1 /* Interprocedural analyses.
2    Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
56 
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
59 
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61 
62 /* Edge summary for IPA-CP edge information.  */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
64 
65 /* Traits for a hash table for reusing already existing ipa_bits. */
66 
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 {
69   typedef ipa_bits *value_type;
70   typedef ipa_bits *compare_type;
71   static hashval_t
hashipa_bit_ggc_hash_traits72   hash (const ipa_bits *p)
73   {
74     hashval_t t = (hashval_t) p->value.to_shwi ();
75     return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76   }
77   static bool
equalipa_bit_ggc_hash_traits78   equal (const ipa_bits *a, const ipa_bits *b)
79     {
80       return a->value == b->value && a->mask == b->mask;
81     }
82   static void
mark_emptyipa_bit_ggc_hash_traits83   mark_empty (ipa_bits *&p)
84     {
85       p = NULL;
86     }
87   static bool
is_emptyipa_bit_ggc_hash_traits88   is_empty (const ipa_bits *p)
89     {
90       return p == NULL;
91     }
92   static bool
is_deletedipa_bit_ggc_hash_traits93   is_deleted (const ipa_bits *p)
94     {
95       return p == reinterpret_cast<const ipa_bits *> (1);
96     }
97   static void
mark_deletedipa_bit_ggc_hash_traits98   mark_deleted (ipa_bits *&p)
99     {
100       p = reinterpret_cast<ipa_bits *> (1);
101     }
102 };
103 
104 /* Hash table for avoid repeated allocations of equal ipa_bits.  */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 
107 /* Traits for a hash table for reusing value_ranges used for IPA.  Note that
108    the equiv bitmap is not hashed and is expected to be NULL.  */
109 
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 {
112   typedef value_range_base *value_type;
113   typedef value_range_base *compare_type;
114   static hashval_t
hashipa_vr_ggc_hash_traits115   hash (const value_range_base *p)
116     {
117       inchash::hash hstate (p->kind ());
118       inchash::add_expr (p->min (), hstate);
119       inchash::add_expr (p->max (), hstate);
120       return hstate.end ();
121     }
122   static bool
equalipa_vr_ggc_hash_traits123   equal (const value_range_base *a, const value_range_base *b)
124     {
125       return a->equal_p (*b);
126     }
127   static void
mark_emptyipa_vr_ggc_hash_traits128   mark_empty (value_range_base *&p)
129     {
130       p = NULL;
131     }
132   static bool
is_emptyipa_vr_ggc_hash_traits133   is_empty (const value_range_base *p)
134     {
135       return p == NULL;
136     }
137   static bool
is_deletedipa_vr_ggc_hash_traits138   is_deleted (const value_range_base *p)
139     {
140       return p == reinterpret_cast<const value_range_base *> (1);
141     }
142   static void
mark_deletedipa_vr_ggc_hash_traits143   mark_deleted (value_range_base *&p)
144     {
145       p = reinterpret_cast<value_range_base *> (1);
146     }
147 };
148 
149 /* Hash table for avoid repeated allocations of equal value_ranges.  */
150 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
151 
152 /* Holders of ipa cgraph hooks: */
153 static struct cgraph_node_hook_list *function_insertion_hook_holder;
154 
155 /* Description of a reference to an IPA constant.  */
156 struct ipa_cst_ref_desc
157 {
158   /* Edge that corresponds to the statement which took the reference.  */
159   struct cgraph_edge *cs;
160   /* Linked list of duplicates created when call graph edges are cloned.  */
161   struct ipa_cst_ref_desc *next_duplicate;
162   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
163      if out of control.  */
164   int refcount;
165 };
166 
167 /* Allocation pool for reference descriptions.  */
168 
169 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
170   ("IPA-PROP ref descriptions");
171 
172 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
173    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
174 
175 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)176 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
177 {
178   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
179 
180   if (!fs_opts)
181     return false;
182   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
183 }
184 
185 /* Return index of the formal whose tree is PTREE in function which corresponds
186    to INFO.  */
187 
188 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)189 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
190 			    tree ptree)
191 {
192   int i, count;
193 
194   count = vec_safe_length (descriptors);
195   for (i = 0; i < count; i++)
196     if ((*descriptors)[i].decl_or_type == ptree)
197       return i;
198 
199   return -1;
200 }
201 
202 /* Return index of the formal whose tree is PTREE in function which corresponds
203    to INFO.  */
204 
205 int
ipa_get_param_decl_index(struct ipa_node_params * info,tree ptree)206 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
207 {
208   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
209 }
210 
211 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
212    NODE.  */
213 
214 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)215 ipa_populate_param_decls (struct cgraph_node *node,
216 			  vec<ipa_param_descriptor, va_gc> &descriptors)
217 {
218   tree fndecl;
219   tree fnargs;
220   tree parm;
221   int param_num;
222 
223   fndecl = node->decl;
224   gcc_assert (gimple_has_body_p (fndecl));
225   fnargs = DECL_ARGUMENTS (fndecl);
226   param_num = 0;
227   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
228     {
229       descriptors[param_num].decl_or_type = parm;
230       descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
231 							     true);
232       param_num++;
233     }
234 }
235 
236 /* Return how many formal parameters FNDECL has.  */
237 
238 int
count_formal_params(tree fndecl)239 count_formal_params (tree fndecl)
240 {
241   tree parm;
242   int count = 0;
243   gcc_assert (gimple_has_body_p (fndecl));
244 
245   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
246     count++;
247 
248   return count;
249 }
250 
251 /* Return the declaration of Ith formal parameter of the function corresponding
252    to INFO.  Note there is no setter function as this array is built just once
253    using ipa_initialize_node_params. */
254 
255 void
ipa_dump_param(FILE * file,struct ipa_node_params * info,int i)256 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
257 {
258   fprintf (file, "param #%i", i);
259   if ((*info->descriptors)[i].decl_or_type)
260     {
261       fprintf (file, " ");
262       print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
263     }
264 }
265 
266 /* If necessary, allocate vector of parameter descriptors in info of NODE.
267    Return true if they were allocated, false if not.  */
268 
269 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)270 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
271 {
272   struct ipa_node_params *info = IPA_NODE_REF (node);
273 
274   if (!info->descriptors && param_count)
275     {
276       vec_safe_grow_cleared (info->descriptors, param_count);
277       return true;
278     }
279   else
280     return false;
281 }
282 
283 /* Initialize the ipa_node_params structure associated with NODE by counting
284    the function parameters, creating the descriptors and populating their
285    param_decls.  */
286 
287 void
ipa_initialize_node_params(struct cgraph_node * node)288 ipa_initialize_node_params (struct cgraph_node *node)
289 {
290   struct ipa_node_params *info = IPA_NODE_REF (node);
291 
292   if (!info->descriptors
293       && ipa_alloc_node_params (node, count_formal_params (node->decl)))
294     ipa_populate_param_decls (node, *info->descriptors);
295 }
296 
297 /* Print the jump functions associated with call graph edge CS to file F.  */
298 
299 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)300 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
301 {
302   int i, count;
303 
304   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
305   for (i = 0; i < count; i++)
306     {
307       struct ipa_jump_func *jump_func;
308       enum jump_func_type type;
309 
310       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
311       type = jump_func->type;
312 
313       fprintf (f, "       param %d: ", i);
314       if (type == IPA_JF_UNKNOWN)
315 	fprintf (f, "UNKNOWN\n");
316       else if (type == IPA_JF_CONST)
317 	{
318 	  tree val = jump_func->value.constant.value;
319 	  fprintf (f, "CONST: ");
320 	  print_generic_expr (f, val);
321 	  if (TREE_CODE (val) == ADDR_EXPR
322 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
323 	    {
324 	      fprintf (f, " -> ");
325 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
326 	    }
327 	  fprintf (f, "\n");
328 	}
329       else if (type == IPA_JF_PASS_THROUGH)
330 	{
331 	  fprintf (f, "PASS THROUGH: ");
332 	  fprintf (f, "%d, op %s",
333 		   jump_func->value.pass_through.formal_id,
334 		   get_tree_code_name(jump_func->value.pass_through.operation));
335 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
336 	    {
337 	      fprintf (f, " ");
338 	      print_generic_expr (f, jump_func->value.pass_through.operand);
339 	    }
340 	  if (jump_func->value.pass_through.agg_preserved)
341 	    fprintf (f, ", agg_preserved");
342 	  fprintf (f, "\n");
343 	}
344       else if (type == IPA_JF_ANCESTOR)
345 	{
346 	  fprintf (f, "ANCESTOR: ");
347 	  fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
348 		   jump_func->value.ancestor.formal_id,
349 		   jump_func->value.ancestor.offset);
350 	  if (jump_func->value.ancestor.agg_preserved)
351 	    fprintf (f, ", agg_preserved");
352 	  fprintf (f, "\n");
353 	}
354 
355       if (jump_func->agg.items)
356 	{
357 	  struct ipa_agg_jf_item *item;
358 	  int j;
359 
360 	  fprintf (f, "         Aggregate passed by %s:\n",
361 		   jump_func->agg.by_ref ? "reference" : "value");
362 	  FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
363 	    {
364 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
365 		       item->offset);
366 	      if (TYPE_P (item->value))
367 		fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
368 			 tree_to_uhwi (TYPE_SIZE (item->value)));
369 	      else
370 		{
371 		  fprintf (f, "cst: ");
372 		  print_generic_expr (f, item->value);
373 		}
374 	      fprintf (f, "\n");
375 	    }
376 	}
377 
378       struct ipa_polymorphic_call_context *ctx
379 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
380       if (ctx && !ctx->useless_p ())
381 	{
382 	  fprintf (f, "         Context: ");
383 	  ctx->dump (dump_file);
384 	}
385 
386       if (jump_func->bits)
387 	{
388 	  fprintf (f, "         value: ");
389 	  print_hex (jump_func->bits->value, f);
390 	  fprintf (f, ", mask: ");
391 	  print_hex (jump_func->bits->mask, f);
392 	  fprintf (f, "\n");
393 	}
394       else
395 	fprintf (f, "         Unknown bits\n");
396 
397       if (jump_func->m_vr)
398 	{
399 	  fprintf (f, "         VR  ");
400 	  fprintf (f, "%s[",
401 		   (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
402 	  print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
403 	  fprintf (f, ", ");
404 	  print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
405 	  fprintf (f, "]\n");
406 	}
407       else
408 	fprintf (f, "         Unknown VR\n");
409     }
410 }
411 
412 
413 /* Print the jump functions of all arguments on all call graph edges going from
414    NODE to file F.  */
415 
416 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)417 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
418 {
419   struct cgraph_edge *cs;
420 
421   fprintf (f, "  Jump functions of caller  %s:\n", node->dump_name ());
422   for (cs = node->callees; cs; cs = cs->next_callee)
423     {
424       if (!ipa_edge_args_info_available_for_edge_p (cs))
425 	continue;
426 
427       fprintf (f, "    callsite  %s -> %s : \n",
428 	       node->dump_name (),
429 	       cs->callee->dump_name ());
430       ipa_print_node_jump_functions_for_edge (f, cs);
431     }
432 
433   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
434     {
435       struct cgraph_indirect_call_info *ii;
436       if (!ipa_edge_args_info_available_for_edge_p (cs))
437 	continue;
438 
439       ii = cs->indirect_info;
440       if (ii->agg_contents)
441 	fprintf (f, "    indirect %s callsite, calling param %i, "
442 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
443 		 ii->member_ptr ? "member ptr" : "aggregate",
444 		 ii->param_index, ii->offset,
445 		 ii->by_ref ? "by reference" : "by_value");
446       else
447 	fprintf (f, "    indirect %s callsite, calling param %i, "
448 		 "offset " HOST_WIDE_INT_PRINT_DEC,
449 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
450 		 ii->offset);
451 
452       if (cs->call_stmt)
453 	{
454 	  fprintf (f, ", for stmt ");
455 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
456 	}
457       else
458 	fprintf (f, "\n");
459       if (ii->polymorphic)
460 	ii->context.dump (f);
461       ipa_print_node_jump_functions_for_edge (f, cs);
462     }
463 }
464 
465 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
466 
467 void
ipa_print_all_jump_functions(FILE * f)468 ipa_print_all_jump_functions (FILE *f)
469 {
470   struct cgraph_node *node;
471 
472   fprintf (f, "\nJump functions:\n");
473   FOR_EACH_FUNCTION (node)
474     {
475       ipa_print_node_jump_functions (f, node);
476     }
477 }
478 
479 /* Set jfunc to be a know-really nothing jump function.  */
480 
481 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)482 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
483 {
484   jfunc->type = IPA_JF_UNKNOWN;
485   jfunc->bits = NULL;
486   jfunc->m_vr = NULL;
487 }
488 
489 /* Set JFUNC to be a copy of another jmp (to be used by jump function
490    combination code).  The two functions will share their rdesc.  */
491 
492 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)493 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
494 		     struct ipa_jump_func *src)
495 
496 {
497   gcc_checking_assert (src->type == IPA_JF_CONST);
498   dst->type = IPA_JF_CONST;
499   dst->value.constant = src->value.constant;
500 }
501 
502 /* Set JFUNC to be a constant jmp function.  */
503 
504 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)505 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
506 		     struct cgraph_edge *cs)
507 {
508   jfunc->type = IPA_JF_CONST;
509   jfunc->value.constant.value = unshare_expr_without_location (constant);
510 
511   if (TREE_CODE (constant) == ADDR_EXPR
512       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
513     {
514       struct ipa_cst_ref_desc *rdesc;
515 
516       rdesc = ipa_refdesc_pool.allocate ();
517       rdesc->cs = cs;
518       rdesc->next_duplicate = NULL;
519       rdesc->refcount = 1;
520       jfunc->value.constant.rdesc = rdesc;
521     }
522   else
523     jfunc->value.constant.rdesc = NULL;
524 }
525 
526 /* Set JFUNC to be a simple pass-through jump function.  */
527 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)528 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
529 				bool agg_preserved)
530 {
531   jfunc->type = IPA_JF_PASS_THROUGH;
532   jfunc->value.pass_through.operand = NULL_TREE;
533   jfunc->value.pass_through.formal_id = formal_id;
534   jfunc->value.pass_through.operation = NOP_EXPR;
535   jfunc->value.pass_through.agg_preserved = agg_preserved;
536 }
537 
538 /* Set JFUNC to be an unary pass through jump function.  */
539 
540 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)541 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
542 			       enum tree_code operation)
543 {
544   jfunc->type = IPA_JF_PASS_THROUGH;
545   jfunc->value.pass_through.operand = NULL_TREE;
546   jfunc->value.pass_through.formal_id = formal_id;
547   jfunc->value.pass_through.operation = operation;
548   jfunc->value.pass_through.agg_preserved = false;
549 }
550 /* Set JFUNC to be an arithmetic pass through jump function.  */
551 
552 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)553 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
554 			       tree operand, enum tree_code operation)
555 {
556   jfunc->type = IPA_JF_PASS_THROUGH;
557   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
558   jfunc->value.pass_through.formal_id = formal_id;
559   jfunc->value.pass_through.operation = operation;
560   jfunc->value.pass_through.agg_preserved = false;
561 }
562 
563 /* Set JFUNC to be an ancestor jump function.  */
564 
565 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved)566 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
567 		     int formal_id, bool agg_preserved)
568 {
569   jfunc->type = IPA_JF_ANCESTOR;
570   jfunc->value.ancestor.formal_id = formal_id;
571   jfunc->value.ancestor.offset = offset;
572   jfunc->value.ancestor.agg_preserved = agg_preserved;
573 }
574 
575 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
576    of this function body.  */
577 
578 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)579 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
580 {
581   gcc_checking_assert (fbi);
582   return &fbi->bb_infos[bb->index];
583 }
584 
585 /* Structure to be passed in between detect_type_change and
586    check_stmt_for_type_change.  */
587 
588 struct prop_type_change_info
589 {
590   /* Offset into the object where there is the virtual method pointer we are
591      looking for.  */
592   HOST_WIDE_INT offset;
593   /* The declaration or SSA_NAME pointer of the base that we are checking for
594      type change.  */
595   tree object;
596   /* Set to true if dynamic type change has been detected.  */
597   bool type_maybe_changed;
598 };
599 
600 /* Return true if STMT can modify a virtual method table pointer.
601 
602    This function makes special assumptions about both constructors and
603    destructors which are all the functions that are allowed to alter the VMT
604    pointers.  It assumes that destructors begin with assignment into all VMT
605    pointers and that constructors essentially look in the following way:
606 
607    1) The very first thing they do is that they call constructors of ancestor
608    sub-objects that have them.
609 
610    2) Then VMT pointers of this and all its ancestors is set to new values
611    corresponding to the type corresponding to the constructor.
612 
613    3) Only afterwards, other stuff such as constructor of member sub-objects
614    and the code written by the user is run.  Only this may include calling
615    virtual functions, directly or indirectly.
616 
617    There is no way to call a constructor of an ancestor sub-object in any
618    other way.
619 
620    This means that we do not have to care whether constructors get the correct
621    type information because they will always change it (in fact, if we define
622    the type to be given by the VMT pointer, it is undefined).
623 
624    The most important fact to derive from the above is that if, for some
625    statement in the section 3, we try to detect whether the dynamic type has
626    changed, we can safely ignore all calls as we examine the function body
627    backwards until we reach statements in section 2 because these calls cannot
628    be ancestor constructors or destructors (if the input is not bogus) and so
629    do not change the dynamic type (this holds true only for automatically
630    allocated objects but at the moment we devirtualize only these).  We then
631    must detect that statements in section 2 change the dynamic type and can try
632    to derive the new type.  That is enough and we can stop, we will never see
633    the calls into constructors of sub-objects in this code.  Therefore we can
634    safely ignore all call statements that we traverse.
635   */
636 
637 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)638 stmt_may_be_vtbl_ptr_store (gimple *stmt)
639 {
640   if (is_gimple_call (stmt))
641     return false;
642   if (gimple_clobber_p (stmt))
643     return false;
644   else if (is_gimple_assign (stmt))
645     {
646       tree lhs = gimple_assign_lhs (stmt);
647 
648       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
649 	{
650 	  if (flag_strict_aliasing
651 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
652 	    return false;
653 
654 	  if (TREE_CODE (lhs) == COMPONENT_REF
655 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
656 	    return false;
657 	  /* In the future we might want to use get_ref_base_and_extent to find
658 	     if there is a field corresponding to the offset and if so, proceed
659 	     almost like if it was a component ref.  */
660 	}
661     }
662   return true;
663 }
664 
665 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
666    to check whether a particular statement may modify the virtual table
667    pointerIt stores its result into DATA, which points to a
668    prop_type_change_info structure.  */
669 
670 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)671 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
672 {
673   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
674   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
675 
676   if (stmt_may_be_vtbl_ptr_store (stmt))
677     {
678       tci->type_maybe_changed = true;
679       return true;
680     }
681   else
682     return false;
683 }
684 
685 /* See if ARG is PARAM_DECl describing instance passed by pointer
686    or reference in FUNCTION.  Return false if the dynamic type may change
687    in between beggining of the function until CALL is invoked.
688 
689    Generally functions are not allowed to change type of such instances,
690    but they call destructors.  We assume that methods cannot destroy the THIS
691    pointer.  Also as a special cases, constructor and destructors may change
692    type of the THIS pointer.  */
693 
694 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)695 param_type_may_change_p (tree function, tree arg, gimple *call)
696 {
697   /* Pure functions cannot do any changes on the dynamic type;
698      that require writting to memory.  */
699   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
700     return false;
701   /* We need to check if we are within inlined consturctor
702      or destructor (ideally we would have way to check that the
703      inline cdtor is actually working on ARG, but we don't have
704      easy tie on this, so punt on all non-pure cdtors.
705      We may also record the types of cdtors and once we know type
706      of the instance match them.
707 
708      Also code unification optimizations may merge calls from
709      different blocks making return values unreliable.  So
710      do nothing during late optimization.  */
711   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
712     return true;
713   if (TREE_CODE (arg) == SSA_NAME
714       && SSA_NAME_IS_DEFAULT_DEF (arg)
715       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
716     {
717       /* Normal (non-THIS) argument.  */
718       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
719 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
720 	  /* THIS pointer of an method - here we want to watch constructors
721 	     and destructors as those definitely may change the dynamic
722 	     type.  */
723 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
724 	      && !DECL_CXX_CONSTRUCTOR_P (function)
725 	      && !DECL_CXX_DESTRUCTOR_P (function)
726 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
727 	{
728 	  /* Walk the inline stack and watch out for ctors/dtors.  */
729 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
730 	       block = BLOCK_SUPERCONTEXT (block))
731 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
732 	      return true;
733 	  return false;
734 	}
735     }
736   return true;
737 }
738 
739 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
740    callsite CALL) by looking for assignments to its virtual table pointer.  If
741    it is, return true and fill in the jump function JFUNC with relevant type
742    information or set it to unknown.  ARG is the object itself (not a pointer
743    to it, unless dereferenced).  BASE is the base of the memory access as
744    returned by get_ref_base_and_extent, as is the offset.
745 
746    This is helper function for detect_type_change and detect_type_change_ssa
747    that does the heavy work which is usually unnecesary.  */
748 
749 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,struct ipa_jump_func * jfunc,HOST_WIDE_INT offset)750 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
751 				       tree base, tree comp_type, gcall *call,
752 				       struct ipa_jump_func *jfunc,
753 				       HOST_WIDE_INT offset)
754 {
755   struct prop_type_change_info tci;
756   ao_ref ao;
757 
758   gcc_checking_assert (DECL_P (arg)
759 		       || TREE_CODE (arg) == MEM_REF
760 		       || handled_component_p (arg));
761 
762   comp_type = TYPE_MAIN_VARIANT (comp_type);
763 
764   /* Const calls cannot call virtual methods through VMT and so type changes do
765      not matter.  */
766   if (!flag_devirtualize || !gimple_vuse (call)
767       /* Be sure expected_type is polymorphic.  */
768       || !comp_type
769       || TREE_CODE (comp_type) != RECORD_TYPE
770       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
771       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
772     return true;
773 
774   ao_ref_init (&ao, arg);
775   ao.base = base;
776   ao.offset = offset;
777   ao.size = POINTER_SIZE;
778   ao.max_size = ao.size;
779 
780   tci.offset = offset;
781   tci.object = get_base_address (arg);
782   tci.type_maybe_changed = false;
783 
784   int walked
785     = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
786 			  &tci, NULL, NULL, fbi->aa_walk_budget + 1);
787 
788   if (walked >= 0 && !tci.type_maybe_changed)
789     return false;
790 
791   ipa_set_jf_unknown (jfunc);
792   return true;
793 }
794 
795 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
796    If it is, return true and fill in the jump function JFUNC with relevant type
797    information or set it to unknown.  ARG is the object itself (not a pointer
798    to it, unless dereferenced).  BASE is the base of the memory access as
799    returned by get_ref_base_and_extent, as is the offset.  */
800 
801 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,struct ipa_jump_func * jfunc,HOST_WIDE_INT offset)802 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
803 		    tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
804 		    HOST_WIDE_INT offset)
805 {
806   if (!flag_devirtualize)
807     return false;
808 
809   if (TREE_CODE	(base) == MEM_REF
810       && !param_type_may_change_p (current_function_decl,
811 				   TREE_OPERAND (base, 0),
812 				   call))
813     return false;
814   return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
815 						call, jfunc, offset);
816 }
817 
818 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
819    SSA name (its dereference will become the base and the offset is assumed to
820    be zero).  */
821 
822 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call,struct ipa_jump_func * jfunc)823 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
824 			gcall *call, struct ipa_jump_func *jfunc)
825 {
826   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
827   if (!flag_devirtualize
828       || !POINTER_TYPE_P (TREE_TYPE (arg)))
829     return false;
830 
831   if (!param_type_may_change_p (current_function_decl, arg, call))
832     return false;
833 
834   arg = build2 (MEM_REF, ptr_type_node, arg,
835 		build_int_cst (ptr_type_node, 0));
836 
837   return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
838 						call, jfunc, 0);
839 }
840 
841 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
842    boolean variable pointed to by DATA.  */
843 
844 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)845 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
846 		     void *data)
847 {
848   bool *b = (bool *) data;
849   *b = true;
850   return true;
851 }
852 
853 /* Find the nearest valid aa status for parameter specified by INDEX that
854    dominates BB.  */
855 
856 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)857 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
858 			   int index)
859 {
860   while (true)
861     {
862       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
863       if (!bb)
864 	return NULL;
865       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
866       if (!bi->param_aa_statuses.is_empty ()
867 	  && bi->param_aa_statuses[index].valid)
868 	return &bi->param_aa_statuses[index];
869     }
870 }
871 
872 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
873    structures and/or intialize the result with a dominating description as
874    necessary.  */
875 
876 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)877 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
878 			  int index)
879 {
880   gcc_checking_assert (fbi);
881   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
882   if (bi->param_aa_statuses.is_empty ())
883     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
884   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
885   if (!paa->valid)
886     {
887       gcc_checking_assert (!paa->parm_modified
888 			   && !paa->ref_modified
889 			   && !paa->pt_modified);
890       struct ipa_param_aa_status *dom_paa;
891       dom_paa = find_dominating_aa_status (fbi, bb, index);
892       if (dom_paa)
893 	*paa = *dom_paa;
894       else
895 	paa->valid = true;
896     }
897 
898   return paa;
899 }
900 
901 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
902    a value known not to be modified in this function before reaching the
903    statement STMT.  FBI holds information about the function we have so far
904    gathered but do not survive the summary building stage.  */
905 
906 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)907 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
908 			      gimple *stmt, tree parm_load)
909 {
910   struct ipa_param_aa_status *paa;
911   bool modified = false;
912   ao_ref refd;
913 
914   tree base = get_base_address (parm_load);
915   gcc_assert (TREE_CODE (base) == PARM_DECL);
916   if (TREE_READONLY (base))
917     return true;
918 
919   gcc_checking_assert (fbi);
920   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
921   if (paa->parm_modified)
922     return false;
923 
924   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
925   ao_ref_init (&refd, parm_load);
926   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
927 				   &modified, NULL, NULL,
928 				   fbi->aa_walk_budget + 1);
929   if (walked < 0)
930     {
931       modified = true;
932       if (fbi)
933 	fbi->aa_walk_budget = 0;
934     }
935   else if (fbi)
936     fbi->aa_walk_budget -= walked;
937   if (paa && modified)
938     paa->parm_modified = true;
939   return !modified;
940 }
941 
942 /* If STMT is an assignment that loads a value from an parameter declaration,
943    return the index of the parameter in ipa_node_params which has not been
944    modified.  Otherwise return -1.  */
945 
946 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)947 load_from_unmodified_param (struct ipa_func_body_info *fbi,
948 			    vec<ipa_param_descriptor, va_gc> *descriptors,
949 			    gimple *stmt)
950 {
951   int index;
952   tree op1;
953 
954   if (!gimple_assign_single_p (stmt))
955     return -1;
956 
957   op1 = gimple_assign_rhs1 (stmt);
958   if (TREE_CODE (op1) != PARM_DECL)
959     return -1;
960 
961   index = ipa_get_param_decl_index_1 (descriptors, op1);
962   if (index < 0
963       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
964     return -1;
965 
966   return index;
967 }
968 
969 /* Return true if memory reference REF (which must be a load through parameter
970    with INDEX) loads data that are known to be unmodified in this function
971    before reaching statement STMT.  */
972 
973 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)974 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
975 			   int index, gimple *stmt, tree ref)
976 {
977   struct ipa_param_aa_status *paa;
978   bool modified = false;
979   ao_ref refd;
980 
981   gcc_checking_assert (fbi);
982   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
983   if (paa->ref_modified)
984     return false;
985 
986   gcc_checking_assert (gimple_vuse (stmt));
987   ao_ref_init (&refd, ref);
988   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
989 				   &modified, NULL, NULL,
990 				   fbi->aa_walk_budget + 1);
991   if (walked < 0)
992     {
993       modified = true;
994       fbi->aa_walk_budget = 0;
995     }
996   else
997     fbi->aa_walk_budget -= walked;
998   if (modified)
999     paa->ref_modified = true;
1000   return !modified;
1001 }
1002 
1003 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1004    is known to be unmodified in this function before reaching call statement
1005    CALL into which it is passed.  FBI describes the function body.  */
1006 
1007 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1008 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1009 			      gimple *call, tree parm)
1010 {
1011   bool modified = false;
1012   ao_ref refd;
1013 
1014   /* It's unnecessary to calculate anything about memory contnets for a const
1015      function because it is not goin to use it.  But do not cache the result
1016      either.  Also, no such calculations for non-pointers.  */
1017   if (!gimple_vuse (call)
1018       || !POINTER_TYPE_P (TREE_TYPE (parm)))
1019     return false;
1020 
1021   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1022 							      gimple_bb (call),
1023 							      index);
1024   if (paa->pt_modified)
1025     return false;
1026 
1027   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1028   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1029 				   &modified, NULL, NULL,
1030 				   fbi->aa_walk_budget + 1);
1031   if (walked < 0)
1032     {
1033       fbi->aa_walk_budget = 0;
1034       modified = true;
1035     }
1036   else
1037     fbi->aa_walk_budget -= walked;
1038   if (modified)
1039     paa->pt_modified = true;
1040   return !modified;
1041 }
1042 
1043 /* Return true if we can prove that OP is a memory reference loading
1044    data from an aggregate passed as a parameter.
1045 
1046    The function works in two modes.  If GUARANTEED_UNMODIFIED is NULL, it return
1047    false if it cannot prove that the value has not been modified before the
1048    load in STMT.  If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1049    if it cannot prove the value has not been modified, in that case it will
1050    store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1051 
1052    INFO and PARMS_AINFO describe parameters of the current function (but the
1053    latter can be NULL), STMT is the load statement.  If function returns true,
1054    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1055    within the aggregate and whether it is a load from a value passed by
1056    reference respectively.  */
1057 
1058 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,HOST_WIDE_INT * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1059 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1060 			vec<ipa_param_descriptor, va_gc> *descriptors,
1061 			gimple *stmt, tree op, int *index_p,
1062 			HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1063 			bool *by_ref_p, bool *guaranteed_unmodified)
1064 {
1065   int index;
1066   HOST_WIDE_INT size;
1067   bool reverse;
1068   tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1069 
1070   if (!base)
1071     return false;
1072 
1073   if (DECL_P (base))
1074     {
1075       int index = ipa_get_param_decl_index_1 (descriptors, base);
1076       if (index >= 0
1077 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1078 	{
1079 	  *index_p = index;
1080 	  *by_ref_p = false;
1081 	  if (size_p)
1082 	    *size_p = size;
1083 	  if (guaranteed_unmodified)
1084 	    *guaranteed_unmodified = true;
1085 	  return true;
1086 	}
1087       return false;
1088     }
1089 
1090   if (TREE_CODE (base) != MEM_REF
1091 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1092 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1093     return false;
1094 
1095   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1096     {
1097       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1098       index = ipa_get_param_decl_index_1 (descriptors, parm);
1099     }
1100   else
1101     {
1102       /* This branch catches situations where a pointer parameter is not a
1103 	 gimple register, for example:
1104 
1105 	 void hip7(S*) (struct S * p)
1106 	 {
1107 	 void (*<T2e4>) (struct S *) D.1867;
1108 	 struct S * p.1;
1109 
1110 	 <bb 2>:
1111 	 p.1_1 = p;
1112 	 D.1867_2 = p.1_1->f;
1113 	 D.1867_2 ();
1114 	 gdp = &p;
1115       */
1116 
1117       gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1118       index = load_from_unmodified_param (fbi, descriptors, def);
1119     }
1120 
1121   if (index >= 0)
1122     {
1123       bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1124       if (!data_preserved && !guaranteed_unmodified)
1125 	return false;
1126 
1127       *index_p = index;
1128       *by_ref_p = true;
1129       if (size_p)
1130 	*size_p = size;
1131       if (guaranteed_unmodified)
1132 	*guaranteed_unmodified = data_preserved;
1133       return true;
1134     }
1135   return false;
1136 }
1137 
1138 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1139    of an assignment statement STMT, try to determine whether we are actually
1140    handling any of the following cases and construct an appropriate jump
1141    function into JFUNC if so:
1142 
1143    1) The passed value is loaded from a formal parameter which is not a gimple
1144    register (most probably because it is addressable, the value has to be
1145    scalar) and we can guarantee the value has not changed.  This case can
1146    therefore be described by a simple pass-through jump function.  For example:
1147 
1148       foo (int a)
1149       {
1150         int a.0;
1151 
1152         a.0_2 = a;
1153         bar (a.0_2);
1154 
1155    2) The passed value can be described by a simple arithmetic pass-through
1156    jump function. E.g.
1157 
1158       foo (int a)
1159       {
1160         int D.2064;
1161 
1162         D.2064_4 = a.1(D) + 4;
1163         bar (D.2064_4);
1164 
1165    This case can also occur in combination of the previous one, e.g.:
1166 
1167       foo (int a, int z)
1168       {
1169         int a.0;
1170         int D.2064;
1171 
1172 	a.0_3 = a;
1173 	D.2064_4 = a.0_3 + 4;
1174 	foo (D.2064_4);
1175 
1176    3) The passed value is an address of an object within another one (which
1177    also passed by reference).  Such situations are described by an ancestor
1178    jump function and describe situations such as:
1179 
1180      B::foo() (struct B * const this)
1181      {
1182        struct A * D.1845;
1183 
1184        D.1845_2 = &this_1(D)->D.1748;
1185        A::bar (D.1845_2);
1186 
1187    INFO is the structure describing individual parameters access different
1188    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1189    only needed for intraprocedural analysis.  */
1190 
1191 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,struct ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1192 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1193 				  struct ipa_node_params *info,
1194 				  struct ipa_jump_func *jfunc,
1195 				  gcall *call, gimple *stmt, tree name,
1196 				  tree param_type)
1197 {
1198   HOST_WIDE_INT offset, size;
1199   tree op1, tc_ssa, base, ssa;
1200   bool reverse;
1201   int index;
1202 
1203   op1 = gimple_assign_rhs1 (stmt);
1204 
1205   if (TREE_CODE (op1) == SSA_NAME)
1206     {
1207       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1208 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1209       else
1210 	index = load_from_unmodified_param (fbi, info->descriptors,
1211 					    SSA_NAME_DEF_STMT (op1));
1212       tc_ssa = op1;
1213     }
1214   else
1215     {
1216       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1217       tc_ssa = gimple_assign_lhs (stmt);
1218     }
1219 
1220   if (index >= 0)
1221     {
1222       switch (gimple_assign_rhs_class (stmt))
1223 	{
1224 	case GIMPLE_BINARY_RHS:
1225 	  {
1226 	    tree op2 = gimple_assign_rhs2 (stmt);
1227 	    if (!is_gimple_ip_invariant (op2)
1228 		|| ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1229 		     != tcc_comparison)
1230 		    && !useless_type_conversion_p (TREE_TYPE (name),
1231 						   TREE_TYPE (op1))))
1232 	      return;
1233 
1234 	    ipa_set_jf_arith_pass_through (jfunc, index, op2,
1235 					   gimple_assign_rhs_code (stmt));
1236 	    break;
1237 	  }
1238 	case GIMPLE_SINGLE_RHS:
1239 	  {
1240 	    bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1241 						       tc_ssa);
1242 	    ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1243 	    break;
1244 	  }
1245 	case GIMPLE_UNARY_RHS:
1246 	  if (is_gimple_assign (stmt)
1247 	      && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1248 	      && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1249 	    ipa_set_jf_unary_pass_through (jfunc, index,
1250 					   gimple_assign_rhs_code (stmt));
1251 	default:;
1252 	}
1253       return;
1254     }
1255 
1256   if (TREE_CODE (op1) != ADDR_EXPR)
1257     return;
1258   op1 = TREE_OPERAND (op1, 0);
1259   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1260     return;
1261   base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1262   offset_int mem_offset;
1263   if (!base
1264       || TREE_CODE (base) != MEM_REF
1265       || !mem_ref_offset (base).is_constant (&mem_offset))
1266     return;
1267   offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1268   ssa = TREE_OPERAND (base, 0);
1269   if (TREE_CODE (ssa) != SSA_NAME
1270       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1271       || offset < 0)
1272     return;
1273 
1274   /* Dynamic types are changed in constructors and destructors.  */
1275   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1276   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1277     ipa_set_ancestor_jf (jfunc, offset,  index,
1278 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1279 }
1280 
1281 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1282    it looks like:
1283 
1284    iftmp.1_3 = &obj_2(D)->D.1762;
1285 
1286    The base of the MEM_REF must be a default definition SSA NAME of a
1287    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1288    whole MEM_REF expression is returned and the offset calculated from any
1289    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1290    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1291 
1292 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1293 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1294 {
1295   HOST_WIDE_INT size;
1296   tree expr, parm, obj;
1297   bool reverse;
1298 
1299   if (!gimple_assign_single_p (assign))
1300     return NULL_TREE;
1301   expr = gimple_assign_rhs1 (assign);
1302 
1303   if (TREE_CODE (expr) != ADDR_EXPR)
1304     return NULL_TREE;
1305   expr = TREE_OPERAND (expr, 0);
1306   obj = expr;
1307   expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1308 
1309   offset_int mem_offset;
1310   if (!expr
1311       || TREE_CODE (expr) != MEM_REF
1312       || !mem_ref_offset (expr).is_constant (&mem_offset))
1313     return NULL_TREE;
1314   parm = TREE_OPERAND (expr, 0);
1315   if (TREE_CODE (parm) != SSA_NAME
1316       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1317       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1318     return NULL_TREE;
1319 
1320   *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1321   *obj_p = obj;
1322   return expr;
1323 }
1324 
1325 
1326 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1327    statement PHI, try to find out whether NAME is in fact a
1328    multiple-inheritance typecast from a descendant into an ancestor of a formal
1329    parameter and thus can be described by an ancestor jump function and if so,
1330    write the appropriate function into JFUNC.
1331 
1332    Essentially we want to match the following pattern:
1333 
1334      if (obj_2(D) != 0B)
1335        goto <bb 3>;
1336      else
1337        goto <bb 4>;
1338 
1339    <bb 3>:
1340      iftmp.1_3 = &obj_2(D)->D.1762;
1341 
1342    <bb 4>:
1343      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1344      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1345      return D.1879_6;  */
1346 
1347 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,struct ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1348 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1349 				    struct ipa_node_params *info,
1350 				    struct ipa_jump_func *jfunc,
1351 				    gcall *call, gphi *phi)
1352 {
1353   HOST_WIDE_INT offset;
1354   gimple *assign, *cond;
1355   basic_block phi_bb, assign_bb, cond_bb;
1356   tree tmp, parm, expr, obj;
1357   int index, i;
1358 
1359   if (gimple_phi_num_args (phi) != 2)
1360     return;
1361 
1362   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1363     tmp = PHI_ARG_DEF (phi, 0);
1364   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1365     tmp = PHI_ARG_DEF (phi, 1);
1366   else
1367     return;
1368   if (TREE_CODE (tmp) != SSA_NAME
1369       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1370       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1371       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1372     return;
1373 
1374   assign = SSA_NAME_DEF_STMT (tmp);
1375   assign_bb = gimple_bb (assign);
1376   if (!single_pred_p (assign_bb))
1377     return;
1378   expr = get_ancestor_addr_info (assign, &obj, &offset);
1379   if (!expr)
1380     return;
1381   parm = TREE_OPERAND (expr, 0);
1382   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1383   if (index < 0)
1384     return;
1385 
1386   cond_bb = single_pred (assign_bb);
1387   cond = last_stmt (cond_bb);
1388   if (!cond
1389       || gimple_code (cond) != GIMPLE_COND
1390       || gimple_cond_code (cond) != NE_EXPR
1391       || gimple_cond_lhs (cond) != parm
1392       || !integer_zerop (gimple_cond_rhs (cond)))
1393     return;
1394 
1395   phi_bb = gimple_bb (phi);
1396   for (i = 0; i < 2; i++)
1397     {
1398       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1399       if (pred != assign_bb && pred != cond_bb)
1400 	return;
1401     }
1402 
1403   ipa_set_ancestor_jf (jfunc, offset, index,
1404 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1405 }
1406 
1407 /* Inspect the given TYPE and return true iff it has the same structure (the
1408    same number of fields of the same types) as a C++ member pointer.  If
1409    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1410    corresponding fields there.  */
1411 
1412 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1413 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1414 {
1415   tree fld;
1416 
1417   if (TREE_CODE (type) != RECORD_TYPE)
1418     return false;
1419 
1420   fld = TYPE_FIELDS (type);
1421   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1422       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1423       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1424     return false;
1425 
1426   if (method_ptr)
1427     *method_ptr = fld;
1428 
1429   fld = DECL_CHAIN (fld);
1430   if (!fld || INTEGRAL_TYPE_P (fld)
1431       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1432     return false;
1433   if (delta)
1434     *delta = fld;
1435 
1436   if (DECL_CHAIN (fld))
1437     return false;
1438 
1439   return true;
1440 }
1441 
1442 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1443    return the rhs of its defining statement.  Otherwise return RHS as it
1444    is.  */
1445 
1446 static inline tree
get_ssa_def_if_simple_copy(tree rhs)1447 get_ssa_def_if_simple_copy (tree rhs)
1448 {
1449   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1450     {
1451       gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1452 
1453       if (gimple_assign_single_p (def_stmt))
1454 	rhs = gimple_assign_rhs1 (def_stmt);
1455       else
1456 	break;
1457     }
1458   return rhs;
1459 }
1460 
1461 /* Simple linked list, describing known contents of an aggregate beforere
1462    call.  */
1463 
1464 struct ipa_known_agg_contents_list
1465 {
1466   /* Offset and size of the described part of the aggregate.  */
1467   HOST_WIDE_INT offset, size;
1468   /* Known constant value or NULL if the contents is known to be unknown.  */
1469   tree constant;
1470   /* Pointer to the next structure in the list.  */
1471   struct ipa_known_agg_contents_list *next;
1472 };
1473 
1474 /* Find the proper place in linked list of ipa_known_agg_contents_list
1475    structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1476    unless there is a partial overlap, in which case return NULL, or such
1477    element is already there, in which case set *ALREADY_THERE to true.  */
1478 
1479 static struct ipa_known_agg_contents_list **
get_place_in_agg_contents_list(struct ipa_known_agg_contents_list ** list,HOST_WIDE_INT lhs_offset,HOST_WIDE_INT lhs_size,bool * already_there)1480 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1481 				HOST_WIDE_INT lhs_offset,
1482 				HOST_WIDE_INT lhs_size,
1483 				bool *already_there)
1484 {
1485   struct ipa_known_agg_contents_list **p = list;
1486   while (*p && (*p)->offset < lhs_offset)
1487     {
1488       if ((*p)->offset + (*p)->size > lhs_offset)
1489 	return NULL;
1490       p = &(*p)->next;
1491     }
1492 
1493   if (*p && (*p)->offset < lhs_offset + lhs_size)
1494     {
1495       if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1496 	/* We already know this value is subsequently overwritten with
1497 	   something else.  */
1498 	*already_there = true;
1499       else
1500 	/* Otherwise this is a partial overlap which we cannot
1501 	   represent.  */
1502 	return NULL;
1503     }
1504   return p;
1505 }
1506 
1507 /* Build aggregate jump function from LIST, assuming there are exactly
1508    CONST_COUNT constant entries there and that th offset of the passed argument
1509    is ARG_OFFSET and store it into JFUNC.  */
1510 
1511 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int const_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1512 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1513 			       int const_count, HOST_WIDE_INT arg_offset,
1514 			       struct ipa_jump_func *jfunc)
1515 {
1516   vec_alloc (jfunc->agg.items, const_count);
1517   while (list)
1518     {
1519       if (list->constant)
1520 	{
1521 	  struct ipa_agg_jf_item item;
1522 	  item.offset = list->offset - arg_offset;
1523 	  gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1524 	  item.value = unshare_expr_without_location (list->constant);
1525 	  jfunc->agg.items->quick_push (item);
1526 	}
1527       list = list->next;
1528     }
1529 }
1530 
1531 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1532    in ARG is filled in with constant values.  ARG can either be an aggregate
1533    expression or a pointer to an aggregate.  ARG_TYPE is the type of the
1534    aggregate.  JFUNC is the jump function into which the constants are
1535    subsequently stored.  */
1536 
1537 static void
determine_locally_known_aggregate_parts(gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1538 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1539 					 tree arg_type,
1540 					 struct ipa_jump_func *jfunc)
1541 {
1542   struct ipa_known_agg_contents_list *list = NULL;
1543   int item_count = 0, const_count = 0;
1544   HOST_WIDE_INT arg_offset, arg_size;
1545   gimple_stmt_iterator gsi;
1546   tree arg_base;
1547   bool check_ref, by_ref;
1548   ao_ref r;
1549 
1550   if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1551     return;
1552 
1553   /* The function operates in three stages.  First, we prepare check_ref, r,
1554      arg_base and arg_offset based on what is actually passed as an actual
1555      argument.  */
1556 
1557   if (POINTER_TYPE_P (arg_type))
1558     {
1559       by_ref = true;
1560       if (TREE_CODE (arg) == SSA_NAME)
1561 	{
1562 	  tree type_size;
1563           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1564 	      || !POINTER_TYPE_P (TREE_TYPE (arg)))
1565             return;
1566 	  check_ref = true;
1567 	  arg_base = arg;
1568 	  arg_offset = 0;
1569 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1570 	  arg_size = tree_to_uhwi (type_size);
1571 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1572 	}
1573       else if (TREE_CODE (arg) == ADDR_EXPR)
1574 	{
1575 	  bool reverse;
1576 
1577 	  arg = TREE_OPERAND (arg, 0);
1578 	  arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1579 						  &arg_size, &reverse);
1580 	  if (!arg_base)
1581 	    return;
1582 	  if (DECL_P (arg_base))
1583 	    {
1584 	      check_ref = false;
1585 	      ao_ref_init (&r, arg_base);
1586 	    }
1587 	  else
1588 	    return;
1589 	}
1590       else
1591 	return;
1592     }
1593   else
1594     {
1595       bool reverse;
1596 
1597       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1598 
1599       by_ref = false;
1600       check_ref = false;
1601       arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1602 					      &arg_size, &reverse);
1603       if (!arg_base)
1604 	return;
1605 
1606       ao_ref_init (&r, arg);
1607     }
1608 
1609   /* Second stage walks back the BB, looks at individual statements and as long
1610      as it is confident of how the statements affect contents of the
1611      aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1612      describing it.  */
1613   gsi = gsi_for_stmt (call);
1614   gsi_prev (&gsi);
1615   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1616     {
1617       struct ipa_known_agg_contents_list *n, **p;
1618       gimple *stmt = gsi_stmt (gsi);
1619       HOST_WIDE_INT lhs_offset, lhs_size;
1620       tree lhs, rhs, lhs_base;
1621       bool reverse;
1622 
1623       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1624 	continue;
1625       if (!gimple_assign_single_p (stmt))
1626 	break;
1627 
1628       lhs = gimple_assign_lhs (stmt);
1629       rhs = gimple_assign_rhs1 (stmt);
1630       if (!is_gimple_reg_type (TREE_TYPE (rhs))
1631 	  || TREE_CODE (lhs) == BIT_FIELD_REF
1632 	  || contains_bitfld_component_ref_p (lhs))
1633 	break;
1634 
1635       lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1636 					      &lhs_size, &reverse);
1637       if (!lhs_base)
1638 	break;
1639 
1640       if (check_ref)
1641 	{
1642 	  if (TREE_CODE (lhs_base) != MEM_REF
1643 	      || TREE_OPERAND (lhs_base, 0) != arg_base
1644 	      || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1645 	    break;
1646 	}
1647       else if (lhs_base != arg_base)
1648 	{
1649 	  if (DECL_P (lhs_base))
1650 	    continue;
1651 	  else
1652 	    break;
1653 	}
1654 
1655       bool already_there = false;
1656       p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1657 					  &already_there);
1658       if (!p)
1659 	break;
1660       if (already_there)
1661 	continue;
1662 
1663       rhs = get_ssa_def_if_simple_copy (rhs);
1664       n = XALLOCA (struct ipa_known_agg_contents_list);
1665       n->size = lhs_size;
1666       n->offset = lhs_offset;
1667       if (is_gimple_ip_invariant (rhs))
1668 	{
1669 	  n->constant = rhs;
1670 	  const_count++;
1671 	}
1672       else
1673 	n->constant = NULL_TREE;
1674       n->next = *p;
1675       *p = n;
1676 
1677       item_count++;
1678       if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1679 	  || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1680 	break;
1681     }
1682 
1683   /* Third stage just goes over the list and creates an appropriate vector of
1684      ipa_agg_jf_item structures out of it, of sourse only if there are
1685      any known constants to begin with.  */
1686 
1687   if (const_count)
1688     {
1689       jfunc->agg.by_ref = by_ref;
1690       build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1691     }
1692 }
1693 
1694 /* Return the Ith param type of callee associated with call graph
1695    edge E.  */
1696 
1697 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)1698 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1699 {
1700   int n;
1701   tree type = (e->callee
1702 	       ? TREE_TYPE (e->callee->decl)
1703 	       : gimple_call_fntype (e->call_stmt));
1704   tree t = TYPE_ARG_TYPES (type);
1705 
1706   for (n = 0; n < i; n++)
1707     {
1708       if (!t)
1709         break;
1710       t = TREE_CHAIN (t);
1711     }
1712   if (t)
1713     return TREE_VALUE (t);
1714   if (!e->callee)
1715     return NULL;
1716   t = DECL_ARGUMENTS (e->callee->decl);
1717   for (n = 0; n < i; n++)
1718     {
1719       if (!t)
1720 	return NULL;
1721       t = TREE_CHAIN (t);
1722     }
1723   if (t)
1724     return TREE_TYPE (t);
1725   return NULL;
1726 }
1727 
1728 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1729    allocated structure or a previously existing one shared with other jump
1730    functions and/or transformation summaries.  */
1731 
1732 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)1733 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1734 {
1735   ipa_bits tmp;
1736   tmp.value = value;
1737   tmp.mask = mask;
1738 
1739   ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1740   if (*slot)
1741     return *slot;
1742 
1743   ipa_bits *res = ggc_alloc<ipa_bits> ();
1744   res->value = value;
1745   res->mask = mask;
1746   *slot = res;
1747 
1748   return res;
1749 }
1750 
1751 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK.  Use hash
1752    table in order to avoid creating multiple same ipa_bits structures.  */
1753 
1754 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)1755 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1756 		    const widest_int &mask)
1757 {
1758   jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1759 }
1760 
1761 /* Return a pointer to a value_range just like *TMP, but either find it in
1762    ipa_vr_hash_table or allocate it in GC memory.  TMP->equiv must be NULL.  */
1763 
1764 static value_range_base *
ipa_get_value_range(value_range_base * tmp)1765 ipa_get_value_range (value_range_base *tmp)
1766 {
1767   value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1768   if (*slot)
1769     return *slot;
1770 
1771   value_range_base *vr = ggc_alloc<value_range_base> ();
1772   *vr = *tmp;
1773   *slot = vr;
1774 
1775   return vr;
1776 }
1777 
1778 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1779    equiv set. Use hash table in order to avoid creating multiple same copies of
1780    value_ranges.  */
1781 
1782 static value_range_base *
ipa_get_value_range(enum value_range_kind type,tree min,tree max)1783 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1784 {
1785   value_range_base tmp (type, min, max);
1786   return ipa_get_value_range (&tmp);
1787 }
1788 
1789 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1790    a NULL equiv bitmap.  Use hash table in order to avoid creating multiple
1791    same value_range structures.  */
1792 
1793 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)1794 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1795 		  tree min, tree max)
1796 {
1797   jf->m_vr = ipa_get_value_range (type, min, max);
1798 }
1799 
1800 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1801    copy from ipa_vr_hash_table or allocate a new on in GC memory.  */
1802 
1803 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range_base * tmp)1804 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1805 {
1806   jf->m_vr = ipa_get_value_range (tmp);
1807 }
1808 
1809 /* Compute jump function for all arguments of callsite CS and insert the
1810    information in the jump_functions array in the ipa_edge_args corresponding
1811    to this callsite.  */
1812 
1813 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)1814 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1815 				     struct cgraph_edge *cs)
1816 {
1817   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1818   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1819   gcall *call = cs->call_stmt;
1820   int n, arg_num = gimple_call_num_args (call);
1821   bool useful_context = false;
1822 
1823   if (arg_num == 0 || args->jump_functions)
1824     return;
1825   vec_safe_grow_cleared (args->jump_functions, arg_num);
1826   if (flag_devirtualize)
1827     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1828 
1829   if (gimple_call_internal_p (call))
1830     return;
1831   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1832     return;
1833 
1834   for (n = 0; n < arg_num; n++)
1835     {
1836       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1837       tree arg = gimple_call_arg (call, n);
1838       tree param_type = ipa_get_callee_param_type (cs, n);
1839       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1840 	{
1841 	  tree instance;
1842 	  struct ipa_polymorphic_call_context context (cs->caller->decl,
1843 						       arg, cs->call_stmt,
1844 						       &instance);
1845 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1846 				    &fbi->aa_walk_budget);
1847 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
1848 	  if (!context.useless_p ())
1849 	    useful_context = true;
1850 	}
1851 
1852       if (POINTER_TYPE_P (TREE_TYPE (arg)))
1853 	{
1854 	  bool addr_nonzero = false;
1855 	  bool strict_overflow = false;
1856 
1857 	  if (TREE_CODE (arg) == SSA_NAME
1858 	      && param_type
1859 	      && get_ptr_nonnull (arg))
1860 	    addr_nonzero = true;
1861 	  else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1862 	    addr_nonzero = true;
1863 
1864 	  if (addr_nonzero)
1865 	    {
1866 	      tree z = build_int_cst (TREE_TYPE (arg), 0);
1867 	      ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1868 	    }
1869 	  else
1870 	    gcc_assert (!jfunc->m_vr);
1871 	}
1872       else
1873 	{
1874 	  wide_int min, max;
1875 	  value_range_kind type;
1876 	  if (TREE_CODE (arg) == SSA_NAME
1877 	      && param_type
1878 	      && (type = get_range_info (arg, &min, &max))
1879 	      && (type == VR_RANGE || type == VR_ANTI_RANGE))
1880 	    {
1881 	      value_range_base resvr;
1882 	      value_range_base tmpvr (type,
1883 				      wide_int_to_tree (TREE_TYPE (arg), min),
1884 				      wide_int_to_tree (TREE_TYPE (arg), max));
1885 	      extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1886 					     &tmpvr, TREE_TYPE (arg));
1887 	      if (!resvr.undefined_p () && !resvr.varying_p ())
1888 		ipa_set_jfunc_vr (jfunc, &resvr);
1889 	      else
1890 		gcc_assert (!jfunc->m_vr);
1891 	    }
1892 	  else
1893 	    gcc_assert (!jfunc->m_vr);
1894 	}
1895 
1896       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1897 	  && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1898 	{
1899 	  if (TREE_CODE (arg) == SSA_NAME)
1900 	    ipa_set_jfunc_bits (jfunc, 0,
1901 				widest_int::from (get_nonzero_bits (arg),
1902 						  TYPE_SIGN (TREE_TYPE (arg))));
1903 	  else
1904 	    ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1905 	}
1906       else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1907 	{
1908 	  unsigned HOST_WIDE_INT bitpos;
1909 	  unsigned align;
1910 
1911 	  get_pointer_alignment_1 (arg, &align, &bitpos);
1912 	  widest_int mask = wi::bit_and_not
1913 	    (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1914 	     align / BITS_PER_UNIT - 1);
1915 	  widest_int value = bitpos / BITS_PER_UNIT;
1916 	  ipa_set_jfunc_bits (jfunc, value, mask);
1917 	}
1918       else
1919 	gcc_assert (!jfunc->bits);
1920 
1921       if (is_gimple_ip_invariant (arg)
1922 	  || (VAR_P (arg)
1923 	      && is_global_var (arg)
1924 	      && TREE_READONLY (arg)))
1925 	ipa_set_jf_constant (jfunc, arg, cs);
1926       else if (!is_gimple_reg_type (TREE_TYPE (arg))
1927 	       && TREE_CODE (arg) == PARM_DECL)
1928 	{
1929 	  int index = ipa_get_param_decl_index (info, arg);
1930 
1931 	  gcc_assert (index >=0);
1932 	  /* Aggregate passed by value, check for pass-through, otherwise we
1933 	     will attempt to fill in aggregate contents later in this
1934 	     for cycle.  */
1935 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1936 	    {
1937 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
1938 	      continue;
1939 	    }
1940 	}
1941       else if (TREE_CODE (arg) == SSA_NAME)
1942 	{
1943 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
1944 	    {
1945 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1946 	      if (index >= 0)
1947 		{
1948 		  bool agg_p;
1949 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1950 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1951 		}
1952 	    }
1953 	  else
1954 	    {
1955 	      gimple *stmt = SSA_NAME_DEF_STMT (arg);
1956 	      if (is_gimple_assign (stmt))
1957 		compute_complex_assign_jump_func (fbi, info, jfunc,
1958 						  call, stmt, arg, param_type);
1959 	      else if (gimple_code (stmt) == GIMPLE_PHI)
1960 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
1961 						    call,
1962 						    as_a <gphi *> (stmt));
1963 	    }
1964 	}
1965 
1966       /* If ARG is pointer, we cannot use its type to determine the type of aggregate
1967 	 passed (because type conversions are ignored in gimple).  Usually we can
1968 	 safely get type from function declaration, but in case of K&R prototypes or
1969 	 variadic functions we can try our luck with type of the pointer passed.
1970 	 TODO: Since we look for actual initialization of the memory object, we may better
1971 	 work out the type based on the memory stores we find.  */
1972       if (!param_type)
1973 	param_type = TREE_TYPE (arg);
1974 
1975       if ((jfunc->type != IPA_JF_PASS_THROUGH
1976 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1977 	  && (jfunc->type != IPA_JF_ANCESTOR
1978 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1979 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1980 	      || POINTER_TYPE_P (param_type)))
1981 	determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1982     }
1983   if (!useful_context)
1984     vec_free (args->polymorphic_call_contexts);
1985 }
1986 
1987 /* Compute jump functions for all edges - both direct and indirect - outgoing
1988    from BB.  */
1989 
1990 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)1991 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1992 {
1993   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1994   int i;
1995   struct cgraph_edge *cs;
1996 
1997   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1998     {
1999       struct cgraph_node *callee = cs->callee;
2000 
2001       if (callee)
2002 	{
2003 	  callee->ultimate_alias_target ();
2004 	  /* We do not need to bother analyzing calls to unknown functions
2005 	     unless they may become known during lto/whopr.  */
2006 	  if (!callee->definition && !flag_lto)
2007 	    continue;
2008 	}
2009       ipa_compute_jump_functions_for_edge (fbi, cs);
2010     }
2011 }
2012 
2013 /* If STMT looks like a statement loading a value from a member pointer formal
2014    parameter, return that parameter and store the offset of the field to
2015    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
2016    might be clobbered).  If USE_DELTA, then we look for a use of the delta
2017    field rather than the pfn.  */
2018 
2019 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2020 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2021 				    HOST_WIDE_INT *offset_p)
2022 {
2023   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2024 
2025   if (!gimple_assign_single_p (stmt))
2026     return NULL_TREE;
2027 
2028   rhs = gimple_assign_rhs1 (stmt);
2029   if (TREE_CODE (rhs) == COMPONENT_REF)
2030     {
2031       ref_field = TREE_OPERAND (rhs, 1);
2032       rhs = TREE_OPERAND (rhs, 0);
2033     }
2034   else
2035     ref_field = NULL_TREE;
2036   if (TREE_CODE (rhs) != MEM_REF)
2037     return NULL_TREE;
2038   rec = TREE_OPERAND (rhs, 0);
2039   if (TREE_CODE (rec) != ADDR_EXPR)
2040     return NULL_TREE;
2041   rec = TREE_OPERAND (rec, 0);
2042   if (TREE_CODE (rec) != PARM_DECL
2043       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2044     return NULL_TREE;
2045   ref_offset = TREE_OPERAND (rhs, 1);
2046 
2047   if (use_delta)
2048     fld = delta_field;
2049   else
2050     fld = ptr_field;
2051   if (offset_p)
2052     *offset_p = int_bit_position (fld);
2053 
2054   if (ref_field)
2055     {
2056       if (integer_nonzerop (ref_offset))
2057 	return NULL_TREE;
2058       return ref_field == fld ? rec : NULL_TREE;
2059     }
2060   else
2061     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2062       : NULL_TREE;
2063 }
2064 
2065 /* Returns true iff T is an SSA_NAME defined by a statement.  */
2066 
2067 static bool
ipa_is_ssa_with_stmt_def(tree t)2068 ipa_is_ssa_with_stmt_def (tree t)
2069 {
2070   if (TREE_CODE (t) == SSA_NAME
2071       && !SSA_NAME_IS_DEFAULT_DEF (t))
2072     return true;
2073   else
2074     return false;
2075 }
2076 
2077 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2078    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
2079    indirect call graph edge.  */
2080 
2081 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt)2082 ipa_note_param_call (struct cgraph_node *node, int param_index,
2083 		     gcall *stmt)
2084 {
2085   struct cgraph_edge *cs;
2086 
2087   cs = node->get_edge (stmt);
2088   cs->indirect_info->param_index = param_index;
2089   cs->indirect_info->agg_contents = 0;
2090   cs->indirect_info->member_ptr = 0;
2091   cs->indirect_info->guaranteed_unmodified = 0;
2092   return cs;
2093 }
2094 
2095 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2096    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
2097    intermediate information about each formal parameter.  Currently it checks
2098    whether the call calls a pointer that is a formal parameter and if so, the
2099    parameter is marked with the called flag and an indirect call graph edge
2100    describing the call is created.  This is very simple for ordinary pointers
2101    represented in SSA but not-so-nice when it comes to member pointers.  The
2102    ugly part of this function does nothing more than trying to match the
2103    pattern of such a call.  An example of such a pattern is the gimple dump
2104    below, the call is on the last line:
2105 
2106      <bb 2>:
2107        f$__delta_5 = f.__delta;
2108        f$__pfn_24 = f.__pfn;
2109 
2110    or
2111      <bb 2>:
2112        f$__delta_5 = MEM[(struct  *)&f];
2113        f$__pfn_24 = MEM[(struct  *)&f + 4B];
2114 
2115    and a few lines below:
2116 
2117      <bb 5>
2118        D.2496_3 = (int) f$__pfn_24;
2119        D.2497_4 = D.2496_3 & 1;
2120        if (D.2497_4 != 0)
2121          goto <bb 3>;
2122        else
2123          goto <bb 4>;
2124 
2125      <bb 6>:
2126        D.2500_7 = (unsigned int) f$__delta_5;
2127        D.2501_8 = &S + D.2500_7;
2128        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2129        D.2503_10 = *D.2502_9;
2130        D.2504_12 = f$__pfn_24 + -1;
2131        D.2505_13 = (unsigned int) D.2504_12;
2132        D.2506_14 = D.2503_10 + D.2505_13;
2133        D.2507_15 = *D.2506_14;
2134        iftmp.11_16 = (String:: *) D.2507_15;
2135 
2136      <bb 7>:
2137        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2138        D.2500_19 = (unsigned int) f$__delta_5;
2139        D.2508_20 = &S + D.2500_19;
2140        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2141 
2142    Such patterns are results of simple calls to a member pointer:
2143 
2144      int doprinting (int (MyString::* f)(int) const)
2145      {
2146        MyString S ("somestring");
2147 
2148        return (S.*f)(4);
2149      }
2150 
2151    Moreover, the function also looks for called pointers loaded from aggregates
2152    passed by value or reference.  */
2153 
2154 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2155 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2156 				tree target)
2157 {
2158   struct ipa_node_params *info = fbi->info;
2159   HOST_WIDE_INT offset;
2160   bool by_ref;
2161 
2162   if (SSA_NAME_IS_DEFAULT_DEF (target))
2163     {
2164       tree var = SSA_NAME_VAR (target);
2165       int index = ipa_get_param_decl_index (info, var);
2166       if (index >= 0)
2167 	ipa_note_param_call (fbi->node, index, call);
2168       return;
2169     }
2170 
2171   int index;
2172   gimple *def = SSA_NAME_DEF_STMT (target);
2173   bool guaranteed_unmodified;
2174   if (gimple_assign_single_p (def)
2175       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2176 				 gimple_assign_rhs1 (def), &index, &offset,
2177 				 NULL, &by_ref, &guaranteed_unmodified))
2178     {
2179       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2180       cs->indirect_info->offset = offset;
2181       cs->indirect_info->agg_contents = 1;
2182       cs->indirect_info->by_ref = by_ref;
2183       cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2184       return;
2185     }
2186 
2187   /* Now we need to try to match the complex pattern of calling a member
2188      pointer. */
2189   if (gimple_code (def) != GIMPLE_PHI
2190       || gimple_phi_num_args (def) != 2
2191       || !POINTER_TYPE_P (TREE_TYPE (target))
2192       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2193     return;
2194 
2195   /* First, we need to check whether one of these is a load from a member
2196      pointer that is a parameter to this function. */
2197   tree n1 = PHI_ARG_DEF (def, 0);
2198   tree n2 = PHI_ARG_DEF (def, 1);
2199   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2200     return;
2201   gimple *d1 = SSA_NAME_DEF_STMT (n1);
2202   gimple *d2 = SSA_NAME_DEF_STMT (n2);
2203 
2204   tree rec;
2205   basic_block bb, virt_bb;
2206   basic_block join = gimple_bb (def);
2207   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2208     {
2209       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2210 	return;
2211 
2212       bb = EDGE_PRED (join, 0)->src;
2213       virt_bb = gimple_bb (d2);
2214     }
2215   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2216     {
2217       bb = EDGE_PRED (join, 1)->src;
2218       virt_bb = gimple_bb (d1);
2219     }
2220   else
2221     return;
2222 
2223   /* Second, we need to check that the basic blocks are laid out in the way
2224      corresponding to the pattern. */
2225 
2226   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2227       || single_pred (virt_bb) != bb
2228       || single_succ (virt_bb) != join)
2229     return;
2230 
2231   /* Third, let's see that the branching is done depending on the least
2232      significant bit of the pfn. */
2233 
2234   gimple *branch = last_stmt (bb);
2235   if (!branch || gimple_code (branch) != GIMPLE_COND)
2236     return;
2237 
2238   if ((gimple_cond_code (branch) != NE_EXPR
2239        && gimple_cond_code (branch) != EQ_EXPR)
2240       || !integer_zerop (gimple_cond_rhs (branch)))
2241     return;
2242 
2243   tree cond = gimple_cond_lhs (branch);
2244   if (!ipa_is_ssa_with_stmt_def (cond))
2245     return;
2246 
2247   def = SSA_NAME_DEF_STMT (cond);
2248   if (!is_gimple_assign (def)
2249       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2250       || !integer_onep (gimple_assign_rhs2 (def)))
2251     return;
2252 
2253   cond = gimple_assign_rhs1 (def);
2254   if (!ipa_is_ssa_with_stmt_def (cond))
2255     return;
2256 
2257   def = SSA_NAME_DEF_STMT (cond);
2258 
2259   if (is_gimple_assign (def)
2260       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2261     {
2262       cond = gimple_assign_rhs1 (def);
2263       if (!ipa_is_ssa_with_stmt_def (cond))
2264 	return;
2265       def = SSA_NAME_DEF_STMT (cond);
2266     }
2267 
2268   tree rec2;
2269   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2270 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2271 					      == ptrmemfunc_vbit_in_delta),
2272 					     NULL);
2273   if (rec != rec2)
2274     return;
2275 
2276   index = ipa_get_param_decl_index (info, rec);
2277   if (index >= 0
2278       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2279     {
2280       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2281       cs->indirect_info->offset = offset;
2282       cs->indirect_info->agg_contents = 1;
2283       cs->indirect_info->member_ptr = 1;
2284       cs->indirect_info->guaranteed_unmodified = 1;
2285     }
2286 
2287   return;
2288 }
2289 
2290 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2291    object referenced in the expression is a formal parameter of the caller
2292    FBI->node (described by FBI->info), create a call note for the
2293    statement.  */
2294 
2295 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2296 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2297 			       gcall *call, tree target)
2298 {
2299   tree obj = OBJ_TYPE_REF_OBJECT (target);
2300   int index;
2301   HOST_WIDE_INT anc_offset;
2302 
2303   if (!flag_devirtualize)
2304     return;
2305 
2306   if (TREE_CODE (obj) != SSA_NAME)
2307     return;
2308 
2309   struct ipa_node_params *info = fbi->info;
2310   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2311     {
2312       struct ipa_jump_func jfunc;
2313       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2314 	return;
2315 
2316       anc_offset = 0;
2317       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2318       gcc_assert (index >= 0);
2319       if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2320 				  call, &jfunc))
2321 	return;
2322     }
2323   else
2324     {
2325       struct ipa_jump_func jfunc;
2326       gimple *stmt = SSA_NAME_DEF_STMT (obj);
2327       tree expr;
2328 
2329       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2330       if (!expr)
2331 	return;
2332       index = ipa_get_param_decl_index (info,
2333 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2334       gcc_assert (index >= 0);
2335       if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2336 			      call, &jfunc, anc_offset))
2337 	return;
2338     }
2339 
2340   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2341   struct cgraph_indirect_call_info *ii = cs->indirect_info;
2342   ii->offset = anc_offset;
2343   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2344   ii->otr_type = obj_type_ref_class (target);
2345   ii->polymorphic = 1;
2346 }
2347 
2348 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2349    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2350    containing intermediate information about each formal parameter.  */
2351 
2352 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2353 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2354 {
2355   tree target = gimple_call_fn (call);
2356 
2357   if (!target
2358       || (TREE_CODE (target) != SSA_NAME
2359           && !virtual_method_call_p (target)))
2360     return;
2361 
2362   struct cgraph_edge *cs = fbi->node->get_edge (call);
2363   /* If we previously turned the call into a direct call, there is
2364      no need to analyze.  */
2365   if (cs && !cs->indirect_unknown_callee)
2366     return;
2367 
2368   if (cs->indirect_info->polymorphic && flag_devirtualize)
2369     {
2370       tree instance;
2371       tree target = gimple_call_fn (call);
2372       ipa_polymorphic_call_context context (current_function_decl,
2373 					    target, call, &instance);
2374 
2375       gcc_checking_assert (cs->indirect_info->otr_type
2376 			   == obj_type_ref_class (target));
2377       gcc_checking_assert (cs->indirect_info->otr_token
2378 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2379 
2380       cs->indirect_info->vptr_changed
2381 	= !context.get_dynamic_type (instance,
2382 				     OBJ_TYPE_REF_OBJECT (target),
2383 				     obj_type_ref_class (target), call,
2384 				     &fbi->aa_walk_budget);
2385       cs->indirect_info->context = context;
2386     }
2387 
2388   if (TREE_CODE (target) == SSA_NAME)
2389     ipa_analyze_indirect_call_uses (fbi, call, target);
2390   else if (virtual_method_call_p (target))
2391     ipa_analyze_virtual_call_uses (fbi, call, target);
2392 }
2393 
2394 
2395 /* Analyze the call statement STMT with respect to formal parameters (described
2396    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2397    formal parameters are called.  */
2398 
2399 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2400 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2401 {
2402   if (is_gimple_call (stmt))
2403     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2404 }
2405 
2406 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2407    If OP is a parameter declaration, mark it as used in the info structure
2408    passed in DATA.  */
2409 
2410 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2411 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2412 {
2413   struct ipa_node_params *info = (struct ipa_node_params *) data;
2414 
2415   op = get_base_address (op);
2416   if (op
2417       && TREE_CODE (op) == PARM_DECL)
2418     {
2419       int index = ipa_get_param_decl_index (info, op);
2420       gcc_assert (index >= 0);
2421       ipa_set_param_used (info, index, true);
2422     }
2423 
2424   return false;
2425 }
2426 
2427 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2428    the findings in various structures of the associated ipa_node_params
2429    structure, such as parameter flags, notes etc.  FBI holds various data about
2430    the function being analyzed.  */
2431 
2432 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2433 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2434 {
2435   gimple_stmt_iterator gsi;
2436   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2437     {
2438       gimple *stmt = gsi_stmt (gsi);
2439 
2440       if (is_gimple_debug (stmt))
2441 	continue;
2442 
2443       ipa_analyze_stmt_uses (fbi, stmt);
2444       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2445 				     visit_ref_for_mod_analysis,
2446 				     visit_ref_for_mod_analysis,
2447 				     visit_ref_for_mod_analysis);
2448     }
2449   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2450     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2451 				   visit_ref_for_mod_analysis,
2452 				   visit_ref_for_mod_analysis,
2453 				   visit_ref_for_mod_analysis);
2454 }
2455 
2456 /* Calculate controlled uses of parameters of NODE.  */
2457 
2458 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2459 ipa_analyze_controlled_uses (struct cgraph_node *node)
2460 {
2461   struct ipa_node_params *info = IPA_NODE_REF (node);
2462 
2463   for (int i = 0; i < ipa_get_param_count (info); i++)
2464     {
2465       tree parm = ipa_get_param (info, i);
2466       int controlled_uses = 0;
2467 
2468       /* For SSA regs see if parameter is used.  For non-SSA we compute
2469 	 the flag during modification analysis.  */
2470       if (is_gimple_reg (parm))
2471 	{
2472 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2473 				       parm);
2474 	  if (ddef && !has_zero_uses (ddef))
2475 	    {
2476 	      imm_use_iterator imm_iter;
2477 	      use_operand_p use_p;
2478 
2479 	      ipa_set_param_used (info, i, true);
2480 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2481 		if (!is_gimple_call (USE_STMT (use_p)))
2482 		  {
2483 		    if (!is_gimple_debug (USE_STMT (use_p)))
2484 		      {
2485 			controlled_uses = IPA_UNDESCRIBED_USE;
2486 			break;
2487 		      }
2488 		  }
2489 		else
2490 		  controlled_uses++;
2491 	    }
2492 	  else
2493 	    controlled_uses = 0;
2494 	}
2495       else
2496 	controlled_uses = IPA_UNDESCRIBED_USE;
2497       ipa_set_controlled_uses (info, i, controlled_uses);
2498     }
2499 }
2500 
2501 /* Free stuff in BI.  */
2502 
2503 static void
free_ipa_bb_info(struct ipa_bb_info * bi)2504 free_ipa_bb_info (struct ipa_bb_info *bi)
2505 {
2506   bi->cg_edges.release ();
2507   bi->param_aa_statuses.release ();
2508 }
2509 
2510 /* Dominator walker driving the analysis.  */
2511 
2512 class analysis_dom_walker : public dom_walker
2513 {
2514 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)2515   analysis_dom_walker (struct ipa_func_body_info *fbi)
2516     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2517 
2518   virtual edge before_dom_children (basic_block);
2519 
2520 private:
2521   struct ipa_func_body_info *m_fbi;
2522 };
2523 
2524 edge
before_dom_children(basic_block bb)2525 analysis_dom_walker::before_dom_children (basic_block bb)
2526 {
2527   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2528   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2529   return NULL;
2530 }
2531 
2532 /* Release body info FBI.  */
2533 
2534 void
ipa_release_body_info(struct ipa_func_body_info * fbi)2535 ipa_release_body_info (struct ipa_func_body_info *fbi)
2536 {
2537   int i;
2538   struct ipa_bb_info *bi;
2539 
2540   FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2541     free_ipa_bb_info (bi);
2542   fbi->bb_infos.release ();
2543 }
2544 
2545 /* Initialize the array describing properties of formal parameters
2546    of NODE, analyze their uses and compute jump functions associated
2547    with actual arguments of calls from within NODE.  */
2548 
2549 void
ipa_analyze_node(struct cgraph_node * node)2550 ipa_analyze_node (struct cgraph_node *node)
2551 {
2552   struct ipa_func_body_info fbi;
2553   struct ipa_node_params *info;
2554 
2555   ipa_check_create_node_params ();
2556   ipa_check_create_edge_args ();
2557   info = IPA_NODE_REF (node);
2558 
2559   if (info->analysis_done)
2560     return;
2561   info->analysis_done = 1;
2562 
2563   if (ipa_func_spec_opts_forbid_analysis_p (node))
2564     {
2565       for (int i = 0; i < ipa_get_param_count (info); i++)
2566 	{
2567 	  ipa_set_param_used (info, i, true);
2568 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2569 	}
2570       return;
2571     }
2572 
2573   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2574   push_cfun (func);
2575   calculate_dominance_info (CDI_DOMINATORS);
2576   ipa_initialize_node_params (node);
2577   ipa_analyze_controlled_uses (node);
2578 
2579   fbi.node = node;
2580   fbi.info = IPA_NODE_REF (node);
2581   fbi.bb_infos = vNULL;
2582   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2583   fbi.param_count = ipa_get_param_count (info);
2584   fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2585 
2586   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2587     {
2588       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2589       bi->cg_edges.safe_push (cs);
2590     }
2591 
2592   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2593     {
2594       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2595       bi->cg_edges.safe_push (cs);
2596     }
2597 
2598   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2599 
2600   ipa_release_body_info (&fbi);
2601   free_dominance_info (CDI_DOMINATORS);
2602   pop_cfun ();
2603 }
2604 
2605 /* Update the jump functions associated with call graph edge E when the call
2606    graph edge CS is being inlined, assuming that E->caller is already (possibly
2607    indirectly) inlined into CS->callee and that E has not been inlined.  */
2608 
2609 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)2610 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2611 				      struct cgraph_edge *e)
2612 {
2613   struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2614   struct ipa_edge_args *args = IPA_EDGE_REF (e);
2615   int count = ipa_get_cs_argument_count (args);
2616   int i;
2617 
2618   for (i = 0; i < count; i++)
2619     {
2620       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2621       struct ipa_polymorphic_call_context *dst_ctx
2622 	= ipa_get_ith_polymorhic_call_context (args, i);
2623 
2624       if (dst->type == IPA_JF_ANCESTOR)
2625 	{
2626 	  struct ipa_jump_func *src;
2627 	  int dst_fid = dst->value.ancestor.formal_id;
2628 	  struct ipa_polymorphic_call_context *src_ctx
2629 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2630 
2631 	  /* Variable number of arguments can cause havoc if we try to access
2632 	     one that does not exist in the inlined edge.  So make sure we
2633 	     don't.  */
2634 	  if (dst_fid >= ipa_get_cs_argument_count (top))
2635 	    {
2636 	      ipa_set_jf_unknown (dst);
2637 	      continue;
2638 	    }
2639 
2640 	  src = ipa_get_ith_jump_func (top, dst_fid);
2641 
2642 	  if (src_ctx && !src_ctx->useless_p ())
2643 	    {
2644 	      struct ipa_polymorphic_call_context ctx = *src_ctx;
2645 
2646 	      /* TODO: Make type preserved safe WRT contexts.  */
2647 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
2648 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2649 	      ctx.offset_by (dst->value.ancestor.offset);
2650 	      if (!ctx.useless_p ())
2651 		{
2652 		  if (!dst_ctx)
2653 		    {
2654 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
2655 					     count);
2656 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2657 		    }
2658 
2659 		  dst_ctx->combine_with (ctx);
2660 		}
2661 	    }
2662 
2663 	  if (src->agg.items
2664 	      && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2665 	    {
2666 	      struct ipa_agg_jf_item *item;
2667 	      int j;
2668 
2669 	      /* Currently we do not produce clobber aggregate jump functions,
2670 		 replace with merging when we do.  */
2671 	      gcc_assert (!dst->agg.items);
2672 
2673 	      dst->agg.items = vec_safe_copy (src->agg.items);
2674 	      dst->agg.by_ref = src->agg.by_ref;
2675 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2676 		item->offset -= dst->value.ancestor.offset;
2677 	    }
2678 
2679 	  if (src->type == IPA_JF_PASS_THROUGH
2680 	      && src->value.pass_through.operation == NOP_EXPR)
2681 	    {
2682 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2683 	      dst->value.ancestor.agg_preserved &=
2684 		src->value.pass_through.agg_preserved;
2685 	    }
2686 	  else if (src->type == IPA_JF_PASS_THROUGH
2687 		   && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2688 	    {
2689 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2690 	      dst->value.ancestor.agg_preserved = false;
2691 	    }
2692 	  else if (src->type == IPA_JF_ANCESTOR)
2693 	    {
2694 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2695 	      dst->value.ancestor.offset += src->value.ancestor.offset;
2696 	      dst->value.ancestor.agg_preserved &=
2697 		src->value.ancestor.agg_preserved;
2698 	    }
2699 	  else
2700 	    ipa_set_jf_unknown (dst);
2701 	}
2702       else if (dst->type == IPA_JF_PASS_THROUGH)
2703 	{
2704 	  struct ipa_jump_func *src;
2705 	  /* We must check range due to calls with variable number of arguments
2706 	     and we cannot combine jump functions with operations.  */
2707 	  if (dst->value.pass_through.operation == NOP_EXPR
2708 	      && (dst->value.pass_through.formal_id
2709 		  < ipa_get_cs_argument_count (top)))
2710 	    {
2711 	      int dst_fid = dst->value.pass_through.formal_id;
2712 	      src = ipa_get_ith_jump_func (top, dst_fid);
2713 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2714 	      struct ipa_polymorphic_call_context *src_ctx
2715 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
2716 
2717 	      if (src_ctx && !src_ctx->useless_p ())
2718 		{
2719 		  struct ipa_polymorphic_call_context ctx = *src_ctx;
2720 
2721 		  /* TODO: Make type preserved safe WRT contexts.  */
2722 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
2723 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2724 		  if (!ctx.useless_p ())
2725 		    {
2726 		      if (!dst_ctx)
2727 			{
2728 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
2729 					         count);
2730 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2731 			}
2732 		      dst_ctx->combine_with (ctx);
2733 		    }
2734 		}
2735 	      switch (src->type)
2736 		{
2737 		case IPA_JF_UNKNOWN:
2738 		  ipa_set_jf_unknown (dst);
2739 		  break;
2740 		case IPA_JF_CONST:
2741 		  ipa_set_jf_cst_copy (dst, src);
2742 		  break;
2743 
2744 		case IPA_JF_PASS_THROUGH:
2745 		  {
2746 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
2747 		    enum tree_code operation;
2748 		    operation = ipa_get_jf_pass_through_operation (src);
2749 
2750 		    if (operation == NOP_EXPR)
2751 		      {
2752 			bool agg_p;
2753 			agg_p = dst_agg_p
2754 			  && ipa_get_jf_pass_through_agg_preserved (src);
2755 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2756 		      }
2757 		    else if (TREE_CODE_CLASS (operation) == tcc_unary)
2758 		      ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2759 		    else
2760 		      {
2761 			tree operand = ipa_get_jf_pass_through_operand (src);
2762 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2763 						       operation);
2764 		      }
2765 		    break;
2766 		  }
2767 		case IPA_JF_ANCESTOR:
2768 		  {
2769 		    bool agg_p;
2770 		    agg_p = dst_agg_p
2771 		      && ipa_get_jf_ancestor_agg_preserved (src);
2772 		    ipa_set_ancestor_jf (dst,
2773 					 ipa_get_jf_ancestor_offset (src),
2774 					 ipa_get_jf_ancestor_formal_id (src),
2775 					 agg_p);
2776 		    break;
2777 		  }
2778 		default:
2779 		  gcc_unreachable ();
2780 		}
2781 
2782 	      if (src->agg.items
2783 		  && (dst_agg_p || !src->agg.by_ref))
2784 		{
2785 		  /* Currently we do not produce clobber aggregate jump
2786 		     functions, replace with merging when we do.  */
2787 		  gcc_assert (!dst->agg.items);
2788 
2789 		  dst->agg.by_ref = src->agg.by_ref;
2790 		  dst->agg.items = vec_safe_copy (src->agg.items);
2791 		}
2792 	    }
2793 	  else
2794 	    ipa_set_jf_unknown (dst);
2795 	}
2796     }
2797 }
2798 
2799 /* If TARGET is an addr_expr of a function declaration, make it the
2800    (SPECULATIVE)destination of an indirect edge IE and return the edge.
2801    Otherwise, return NULL.  */
2802 
2803 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)2804 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2805 				bool speculative)
2806 {
2807   struct cgraph_node *callee;
2808   bool unreachable = false;
2809 
2810   if (TREE_CODE (target) == ADDR_EXPR)
2811     target = TREE_OPERAND (target, 0);
2812   if (TREE_CODE (target) != FUNCTION_DECL)
2813     {
2814       target = canonicalize_constructor_val (target, NULL);
2815       if (!target || TREE_CODE (target) != FUNCTION_DECL)
2816 	{
2817 	  /* Member pointer call that goes through a VMT lookup.  */
2818 	  if (ie->indirect_info->member_ptr
2819 	      /* Or if target is not an invariant expression and we do not
2820 		 know if it will evaulate to function at runtime.
2821 		 This can happen when folding through &VAR, where &VAR
2822 		 is IP invariant, but VAR itself is not.
2823 
2824 		 TODO: Revisit this when GCC 5 is branched.  It seems that
2825 		 member_ptr check is not needed and that we may try to fold
2826 		 the expression and see if VAR is readonly.  */
2827 	      || !is_gimple_ip_invariant (target))
2828 	    {
2829 	      if (dump_enabled_p ())
2830 		{
2831 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2832 				   "discovered direct call non-invariant %s\n",
2833 				   ie->caller->dump_name ());
2834 		}
2835 	      return NULL;
2836 	    }
2837 
2838 
2839           if (dump_enabled_p ())
2840 	    {
2841 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2842 			       "discovered direct call to non-function in %s, "
2843 			       "making it __builtin_unreachable\n",
2844 			       ie->caller->dump_name ());
2845 	    }
2846 
2847 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2848 	  callee = cgraph_node::get_create (target);
2849 	  unreachable = true;
2850 	}
2851       else
2852 	callee = cgraph_node::get (target);
2853     }
2854   else
2855     callee = cgraph_node::get (target);
2856 
2857   /* Because may-edges are not explicitely represented and vtable may be external,
2858      we may create the first reference to the object in the unit.  */
2859   if (!callee || callee->global.inlined_to)
2860     {
2861 
2862       /* We are better to ensure we can refer to it.
2863 	 In the case of static functions we are out of luck, since we already
2864 	 removed its body.  In the case of public functions we may or may
2865 	 not introduce the reference.  */
2866       if (!canonicalize_constructor_val (target, NULL)
2867 	  || !TREE_PUBLIC (target))
2868 	{
2869 	  if (dump_file)
2870 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2871 		     "(%s -> %s) but cannot refer to it. Giving up.\n",
2872 		     ie->caller->dump_name (),
2873 		     ie->callee->dump_name ());
2874 	  return NULL;
2875 	}
2876       callee = cgraph_node::get_create (target);
2877     }
2878 
2879   /* If the edge is already speculated.  */
2880   if (speculative && ie->speculative)
2881     {
2882       struct cgraph_edge *e2;
2883       struct ipa_ref *ref;
2884       ie->speculative_call_info (e2, ie, ref);
2885       if (e2->callee->ultimate_alias_target ()
2886 	  != callee->ultimate_alias_target ())
2887 	{
2888 	  if (dump_file)
2889 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2890 		     "target (%s -> %s) but the call is already "
2891 		     "speculated to %s. Giving up.\n",
2892 		     ie->caller->dump_name (), callee->dump_name (),
2893 		     e2->callee->dump_name ());
2894 	}
2895       else
2896 	{
2897 	  if (dump_file)
2898 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2899 		     "(%s -> %s) this agree with previous speculation.\n",
2900 		     ie->caller->dump_name (), callee->dump_name ());
2901 	}
2902       return NULL;
2903     }
2904 
2905   if (!dbg_cnt (devirt))
2906     return NULL;
2907 
2908   ipa_check_create_node_params ();
2909 
2910   /* We cannot make edges to inline clones.  It is bug that someone removed
2911      the cgraph node too early.  */
2912   gcc_assert (!callee->global.inlined_to);
2913 
2914   if (dump_file && !unreachable)
2915     {
2916       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2917 	       "(%s -> %s), for stmt ",
2918 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2919 	       speculative ? "speculative" : "known",
2920 	       ie->caller->dump_name (),
2921 	       callee->dump_name ());
2922       if (ie->call_stmt)
2923 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2924       else
2925 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2926      }
2927   if (dump_enabled_p ())
2928     {
2929       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2930 		       "converting indirect call in %s to direct call to %s\n",
2931 		       ie->caller->name (), callee->name ());
2932     }
2933   if (!speculative)
2934     {
2935       struct cgraph_edge *orig = ie;
2936       ie = ie->make_direct (callee);
2937       /* If we resolved speculative edge the cost is already up to date
2938 	 for direct call (adjusted by inline_edge_duplication_hook).  */
2939       if (ie == orig)
2940 	{
2941 	  ipa_call_summary *es = ipa_call_summaries->get (ie);
2942 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2943 				 - eni_size_weights.call_cost);
2944 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2945 				 - eni_time_weights.call_cost);
2946 	}
2947     }
2948   else
2949     {
2950       if (!callee->can_be_discarded_p ())
2951 	{
2952 	  cgraph_node *alias;
2953 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2954 	  if (alias)
2955 	    callee = alias;
2956 	}
2957       /* make_speculative will update ie's cost to direct call cost. */
2958       ie = ie->make_speculative
2959 	     (callee, ie->count.apply_scale (8, 10));
2960     }
2961 
2962   return ie;
2963 }
2964 
2965 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2966    CONSTRUCTOR and return it.  Return NULL if the search fails for some
2967    reason.  */
2968 
2969 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)2970 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2971 {
2972   tree type = TREE_TYPE (constructor);
2973   if (TREE_CODE (type) != ARRAY_TYPE
2974       && TREE_CODE (type) != RECORD_TYPE)
2975     return NULL;
2976 
2977   unsigned ix;
2978   tree index, val;
2979   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2980     {
2981       HOST_WIDE_INT elt_offset;
2982       if (TREE_CODE (type) == ARRAY_TYPE)
2983        {
2984          offset_int off;
2985          tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2986          gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2987 
2988          if (index)
2989            {
2990 	     if (TREE_CODE (index) == RANGE_EXPR)
2991 	       off = wi::to_offset (TREE_OPERAND (index, 0));
2992 	     else
2993 	       off = wi::to_offset (index);
2994              if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2995                {
2996                  tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2997                  gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2998                  off = wi::sext (off - wi::to_offset (low_bound),
2999                                  TYPE_PRECISION (TREE_TYPE (index)));
3000                }
3001              off *= wi::to_offset (unit_size);
3002 	     /* ???  Handle more than just the first index of a
3003 	        RANGE_EXPR.  */
3004            }
3005          else
3006            off = wi::to_offset (unit_size) * ix;
3007 
3008          off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3009          if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3010            continue;
3011          elt_offset = off.to_shwi ();
3012        }
3013       else if (TREE_CODE (type) == RECORD_TYPE)
3014        {
3015          gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3016          if (DECL_BIT_FIELD (index))
3017            continue;
3018          elt_offset = int_bit_position (index);
3019        }
3020       else
3021        gcc_unreachable ();
3022 
3023       if (elt_offset > req_offset)
3024 	return NULL;
3025 
3026       if (TREE_CODE (val) == CONSTRUCTOR)
3027 	return find_constructor_constant_at_offset (val,
3028 						    req_offset - elt_offset);
3029 
3030       if (elt_offset == req_offset
3031 	  && is_gimple_reg_type (TREE_TYPE (val))
3032 	  && is_gimple_ip_invariant (val))
3033 	return val;
3034     }
3035   return NULL;
3036 }
3037 
3038 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3039    invariant from a static constructor and if so, return it.  Otherwise return
3040    NULL. */
3041 
3042 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3043 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3044 {
3045   if (by_ref)
3046     {
3047       if (TREE_CODE (scalar) != ADDR_EXPR)
3048 	return NULL;
3049       scalar = TREE_OPERAND (scalar, 0);
3050     }
3051 
3052   if (!VAR_P (scalar)
3053       || !is_global_var (scalar)
3054       || !TREE_READONLY (scalar)
3055       || !DECL_INITIAL (scalar)
3056       || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3057     return NULL;
3058 
3059   return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3060 }
3061 
3062 /* Retrieve value from aggregate jump function AGG or static initializer of
3063    SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3064    none.  BY_REF specifies whether the value has to be passed by reference or
3065    by value.  If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3066    to is set to true if the value comes from an initializer of a constant.  */
3067 
3068 tree
ipa_find_agg_cst_for_param(struct ipa_agg_jump_function * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3069 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3070 			    HOST_WIDE_INT offset, bool by_ref,
3071 			    bool *from_global_constant)
3072 {
3073   struct ipa_agg_jf_item *item;
3074   int i;
3075 
3076   if (scalar)
3077     {
3078       tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3079       if (res)
3080 	{
3081 	  if (from_global_constant)
3082 	    *from_global_constant = true;
3083 	  return res;
3084 	}
3085     }
3086 
3087   if (!agg
3088       || by_ref != agg->by_ref)
3089     return NULL;
3090 
3091   FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3092     if (item->offset == offset)
3093       {
3094 	/* Currently we do not have clobber values, return NULL for them once
3095 	   we do.  */
3096 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
3097 	if (from_global_constant)
3098 	  *from_global_constant = false;
3099 	return item->value;
3100       }
3101   return NULL;
3102 }
3103 
3104 /* Remove a reference to SYMBOL from the list of references of a node given by
3105    reference description RDESC.  Return true if the reference has been
3106    successfully found and removed.  */
3107 
3108 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3109 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3110 {
3111   struct ipa_ref *to_del;
3112   struct cgraph_edge *origin;
3113 
3114   origin = rdesc->cs;
3115   if (!origin)
3116     return false;
3117   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3118 					   origin->lto_stmt_uid);
3119   if (!to_del)
3120     return false;
3121 
3122   to_del->remove_reference ();
3123   if (dump_file)
3124     fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3125 	     origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3126   return true;
3127 }
3128 
3129 /* If JFUNC has a reference description with refcount different from
3130    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3131    NULL.  JFUNC must be a constant jump function.  */
3132 
3133 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3134 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3135 {
3136   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3137   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3138     return rdesc;
3139   else
3140     return NULL;
3141 }
3142 
3143 /* If the value of constant jump function JFUNC is an address of a function
3144    declaration, return the associated call graph node.  Otherwise return
3145    NULL.  */
3146 
3147 static cgraph_node *
cgraph_node_for_jfunc(struct ipa_jump_func * jfunc)3148 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3149 {
3150   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3151   tree cst = ipa_get_jf_constant (jfunc);
3152   if (TREE_CODE (cst) != ADDR_EXPR
3153       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3154     return NULL;
3155 
3156   return cgraph_node::get (TREE_OPERAND (cst, 0));
3157 }
3158 
3159 
3160 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3161    refcount and if it hits zero, remove reference to SYMBOL from the caller of
3162    the edge specified in the rdesc.  Return false if either the symbol or the
3163    reference could not be found, otherwise return true.  */
3164 
3165 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3166 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3167 {
3168   struct ipa_cst_ref_desc *rdesc;
3169   if (jfunc->type == IPA_JF_CONST
3170       && (rdesc = jfunc_rdesc_usable (jfunc))
3171       && --rdesc->refcount == 0)
3172     {
3173       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3174       if (!symbol)
3175 	return false;
3176 
3177       return remove_described_reference (symbol, rdesc);
3178     }
3179   return true;
3180 }
3181 
3182 /* Try to find a destination for indirect edge IE that corresponds to a simple
3183    call or a call of a member function pointer and where the destination is a
3184    pointer formal parameter described by jump function JFUNC.  TARGET_TYPE is
3185    the type of the parameter to which the result of JFUNC is passed.  If it can
3186    be determined, return the newly direct edge, otherwise return NULL.
3187    NEW_ROOT_INFO is the node info that JFUNC lattices are relative to.  */
3188 
3189 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct ipa_node_params * new_root_info)3190 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3191 				  struct ipa_jump_func *jfunc, tree target_type,
3192 				  struct ipa_node_params *new_root_info)
3193 {
3194   struct cgraph_edge *cs;
3195   tree target;
3196   bool agg_contents = ie->indirect_info->agg_contents;
3197   tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3198   if (agg_contents)
3199     {
3200       bool from_global_constant;
3201       target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3202 					   ie->indirect_info->offset,
3203 					   ie->indirect_info->by_ref,
3204 					   &from_global_constant);
3205       if (target
3206 	  && !from_global_constant
3207 	  && !ie->indirect_info->guaranteed_unmodified)
3208 	return NULL;
3209     }
3210   else
3211     target = scalar;
3212   if (!target)
3213     return NULL;
3214   cs = ipa_make_edge_direct_to_target (ie, target);
3215 
3216   if (cs && !agg_contents)
3217     {
3218       bool ok;
3219       gcc_checking_assert (cs->callee
3220 			   && (cs != ie
3221 			       || jfunc->type != IPA_JF_CONST
3222 			       || !cgraph_node_for_jfunc (jfunc)
3223 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
3224       ok = try_decrement_rdesc_refcount (jfunc);
3225       gcc_checking_assert (ok);
3226     }
3227 
3228   return cs;
3229 }
3230 
3231 /* Return the target to be used in cases of impossible devirtualization.  IE
3232    and target (the latter can be NULL) are dumped when dumping is enabled.  */
3233 
3234 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3235 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3236 {
3237   if (dump_file)
3238     {
3239       if (target)
3240 	fprintf (dump_file,
3241 		 "Type inconsistent devirtualization: %s->%s\n",
3242 		 ie->caller->dump_name (),
3243 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3244       else
3245 	fprintf (dump_file,
3246 		 "No devirtualization target in %s\n",
3247 		 ie->caller->dump_name ());
3248     }
3249   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3250   cgraph_node::get_create (new_target);
3251   return new_target;
3252 }
3253 
3254 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3255    call based on a formal parameter which is described by jump function JFUNC
3256    and if it can be determined, make it direct and return the direct edge.
3257    Otherwise, return NULL.  CTX describes the polymorphic context that the
3258    parameter the call is based on brings along with it.  */
3259 
3260 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,struct ipa_polymorphic_call_context ctx)3261 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3262 				   struct ipa_jump_func *jfunc,
3263 				   struct ipa_polymorphic_call_context ctx)
3264 {
3265   tree target = NULL;
3266   bool speculative = false;
3267 
3268   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3269     return NULL;
3270 
3271   gcc_assert (!ie->indirect_info->by_ref);
3272 
3273   /* Try to do lookup via known virtual table pointer value.  */
3274   if (!ie->indirect_info->vptr_changed
3275       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3276     {
3277       tree vtable;
3278       unsigned HOST_WIDE_INT offset;
3279       tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3280 	: NULL;
3281       tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3282 					   ie->indirect_info->offset,
3283 					   true);
3284       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3285 	{
3286 	  bool can_refer;
3287 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3288 						 vtable, offset, &can_refer);
3289 	  if (can_refer)
3290 	    {
3291 	      if (!t
3292 		  || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3293 		      && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3294 		  || !possible_polymorphic_call_target_p
3295 		       (ie, cgraph_node::get (t)))
3296 		{
3297 		  /* Do not speculate builtin_unreachable, it is stupid!  */
3298 		  if (!ie->indirect_info->vptr_changed)
3299 		    target = ipa_impossible_devirt_target (ie, target);
3300 		  else
3301 		    target = NULL;
3302 		}
3303 	      else
3304 		{
3305 		  target = t;
3306 		  speculative = ie->indirect_info->vptr_changed;
3307 		}
3308 	    }
3309 	}
3310     }
3311 
3312   ipa_polymorphic_call_context ie_context (ie);
3313   vec <cgraph_node *>targets;
3314   bool final;
3315 
3316   ctx.offset_by (ie->indirect_info->offset);
3317   if (ie->indirect_info->vptr_changed)
3318     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3319 				      ie->indirect_info->otr_type);
3320   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3321   targets = possible_polymorphic_call_targets
3322     (ie->indirect_info->otr_type,
3323      ie->indirect_info->otr_token,
3324      ctx, &final);
3325   if (final && targets.length () <= 1)
3326     {
3327       speculative = false;
3328       if (targets.length () == 1)
3329 	target = targets[0]->decl;
3330       else
3331 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
3332     }
3333   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3334 	   && !ie->speculative && ie->maybe_hot_p ())
3335     {
3336       cgraph_node *n;
3337       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3338 					    ie->indirect_info->otr_token,
3339 					    ie->indirect_info->context);
3340       if (n)
3341 	{
3342 	  target = n->decl;
3343 	  speculative = true;
3344 	}
3345     }
3346 
3347   if (target)
3348     {
3349       if (!possible_polymorphic_call_target_p
3350 	  (ie, cgraph_node::get_create (target)))
3351 	{
3352 	  if (speculative)
3353 	    return NULL;
3354 	  target = ipa_impossible_devirt_target (ie, target);
3355 	}
3356       return ipa_make_edge_direct_to_target (ie, target, speculative);
3357     }
3358   else
3359     return NULL;
3360 }
3361 
3362 /* Update the param called notes associated with NODE when CS is being inlined,
3363    assuming NODE is (potentially indirectly) inlined into CS->callee.
3364    Moreover, if the callee is discovered to be constant, create a new cgraph
3365    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3366    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3367 
3368 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3369 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3370 				      struct cgraph_node *node,
3371 				      vec<cgraph_edge *> *new_edges)
3372 {
3373   struct ipa_edge_args *top;
3374   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3375   struct ipa_node_params *new_root_info, *inlined_node_info;
3376   bool res = false;
3377 
3378   ipa_check_create_edge_args ();
3379   top = IPA_EDGE_REF (cs);
3380   new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3381 				? cs->caller->global.inlined_to
3382 				: cs->caller);
3383   inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3384 
3385   for (ie = node->indirect_calls; ie; ie = next_ie)
3386     {
3387       struct cgraph_indirect_call_info *ici = ie->indirect_info;
3388       struct ipa_jump_func *jfunc;
3389       int param_index;
3390       cgraph_node *spec_target = NULL;
3391 
3392       next_ie = ie->next_callee;
3393 
3394       if (ici->param_index == -1)
3395 	continue;
3396 
3397       /* We must check range due to calls with variable number of arguments:  */
3398       if (ici->param_index >= ipa_get_cs_argument_count (top))
3399 	{
3400 	  ici->param_index = -1;
3401 	  continue;
3402 	}
3403 
3404       param_index = ici->param_index;
3405       jfunc = ipa_get_ith_jump_func (top, param_index);
3406 
3407       if (ie->speculative)
3408 	{
3409 	  struct cgraph_edge *de;
3410           struct ipa_ref *ref;
3411 	  ie->speculative_call_info (de, ie, ref);
3412 	  spec_target = de->callee;
3413 	}
3414 
3415       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3416 	new_direct_edge = NULL;
3417       else if (ici->polymorphic)
3418 	{
3419           ipa_polymorphic_call_context ctx;
3420 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3421 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3422 	}
3423       else
3424 	{
3425 	  tree target_type =  ipa_get_type (inlined_node_info, param_index);
3426 	  new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3427 							      target_type,
3428 							      new_root_info);
3429 	}
3430 
3431       /* If speculation was removed, then we need to do nothing.  */
3432       if (new_direct_edge && new_direct_edge != ie
3433 	  && new_direct_edge->callee == spec_target)
3434 	{
3435 	  new_direct_edge->indirect_inlining_edge = 1;
3436 	  top = IPA_EDGE_REF (cs);
3437 	  res = true;
3438 	  if (!new_direct_edge->speculative)
3439 	    continue;
3440 	}
3441       else if (new_direct_edge)
3442 	{
3443 	  new_direct_edge->indirect_inlining_edge = 1;
3444 	  if (new_direct_edge->call_stmt)
3445 	    new_direct_edge->call_stmt_cannot_inline_p
3446 	      = !gimple_check_call_matching_types (
3447 		  new_direct_edge->call_stmt,
3448 		  new_direct_edge->callee->decl, false);
3449 	  if (new_edges)
3450 	    {
3451 	      new_edges->safe_push (new_direct_edge);
3452 	      res = true;
3453 	    }
3454 	  top = IPA_EDGE_REF (cs);
3455 	  /* If speculative edge was introduced we still need to update
3456 	     call info of the indirect edge.  */
3457 	  if (!new_direct_edge->speculative)
3458 	    continue;
3459 	}
3460       if (jfunc->type == IPA_JF_PASS_THROUGH
3461           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3462 	{
3463 	  if (ici->agg_contents
3464 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3465 	      && !ici->polymorphic)
3466 	    ici->param_index = -1;
3467 	  else
3468 	    {
3469 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3470 	      if (ici->polymorphic
3471 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3472 		ici->vptr_changed = true;
3473 	    }
3474 	}
3475       else if (jfunc->type == IPA_JF_ANCESTOR)
3476 	{
3477 	  if (ici->agg_contents
3478 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3479 	      && !ici->polymorphic)
3480 	    ici->param_index = -1;
3481 	  else
3482 	    {
3483 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3484 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3485 	      if (ici->polymorphic
3486 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3487 		ici->vptr_changed = true;
3488 	    }
3489 	}
3490       else
3491 	/* Either we can find a destination for this edge now or never. */
3492 	ici->param_index = -1;
3493     }
3494 
3495   return res;
3496 }
3497 
3498 /* Recursively traverse subtree of NODE (including node) made of inlined
3499    cgraph_edges when CS has been inlined and invoke
3500    update_indirect_edges_after_inlining on all nodes and
3501    update_jump_functions_after_inlining on all non-inlined edges that lead out
3502    of this subtree.  Newly discovered indirect edges will be added to
3503    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3504    created.  */
3505 
3506 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3507 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3508 				   struct cgraph_node *node,
3509 				   vec<cgraph_edge *> *new_edges)
3510 {
3511   struct cgraph_edge *e;
3512   bool res;
3513 
3514   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3515 
3516   for (e = node->callees; e; e = e->next_callee)
3517     if (!e->inline_failed)
3518       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3519     else
3520       update_jump_functions_after_inlining (cs, e);
3521   for (e = node->indirect_calls; e; e = e->next_callee)
3522     update_jump_functions_after_inlining (cs, e);
3523 
3524   return res;
3525 }
3526 
3527 /* Combine two controlled uses counts as done during inlining.  */
3528 
3529 static int
combine_controlled_uses_counters(int c,int d)3530 combine_controlled_uses_counters (int c, int d)
3531 {
3532   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3533     return IPA_UNDESCRIBED_USE;
3534   else
3535     return c + d - 1;
3536 }
3537 
3538 /* Propagate number of controlled users from CS->caleee to the new root of the
3539    tree of inlined nodes.  */
3540 
3541 static void
propagate_controlled_uses(struct cgraph_edge * cs)3542 propagate_controlled_uses (struct cgraph_edge *cs)
3543 {
3544   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3545   struct cgraph_node *new_root = cs->caller->global.inlined_to
3546     ? cs->caller->global.inlined_to : cs->caller;
3547   struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3548   struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3549   int count, i;
3550 
3551   count = MIN (ipa_get_cs_argument_count (args),
3552 	       ipa_get_param_count (old_root_info));
3553   for (i = 0; i < count; i++)
3554     {
3555       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3556       struct ipa_cst_ref_desc *rdesc;
3557 
3558       if (jf->type == IPA_JF_PASS_THROUGH)
3559 	{
3560 	  int src_idx, c, d;
3561 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3562 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3563 	  d = ipa_get_controlled_uses (old_root_info, i);
3564 
3565 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3566 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3567 	  c = combine_controlled_uses_counters (c, d);
3568 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3569 	  if (c == 0 && new_root_info->ipcp_orig_node)
3570 	    {
3571 	      struct cgraph_node *n;
3572 	      struct ipa_ref *ref;
3573 	      tree t = new_root_info->known_csts[src_idx];
3574 
3575 	      if (t && TREE_CODE (t) == ADDR_EXPR
3576 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3577 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3578 		  && (ref = new_root->find_reference (n, NULL, 0)))
3579 		{
3580 		  if (dump_file)
3581 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3582 			     "reference from %s to %s.\n",
3583 			     new_root->dump_name (),
3584 			     n->dump_name ());
3585 		  ref->remove_reference ();
3586 		}
3587 	    }
3588 	}
3589       else if (jf->type == IPA_JF_CONST
3590 	       && (rdesc = jfunc_rdesc_usable (jf)))
3591 	{
3592 	  int d = ipa_get_controlled_uses (old_root_info, i);
3593 	  int c = rdesc->refcount;
3594 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3595 	  if (rdesc->refcount == 0)
3596 	    {
3597 	      tree cst = ipa_get_jf_constant (jf);
3598 	      struct cgraph_node *n;
3599 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3600 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3601 				   == FUNCTION_DECL);
3602 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
3603 	      if (n)
3604 		{
3605 		  struct cgraph_node *clone;
3606 		  bool ok;
3607 		  ok = remove_described_reference (n, rdesc);
3608 		  gcc_checking_assert (ok);
3609 
3610 		  clone = cs->caller;
3611 		  while (clone->global.inlined_to
3612 			 && clone != rdesc->cs->caller
3613 			 && IPA_NODE_REF (clone)->ipcp_orig_node)
3614 		    {
3615 		      struct ipa_ref *ref;
3616 		      ref = clone->find_reference (n, NULL, 0);
3617 		      if (ref)
3618 			{
3619 			  if (dump_file)
3620 			    fprintf (dump_file, "ipa-prop: Removing "
3621 				     "cloning-created reference "
3622 				     "from %s to %s.\n",
3623 				     clone->dump_name (),
3624 				     n->dump_name ());
3625 			  ref->remove_reference ();
3626 			}
3627 		      clone = clone->callers->caller;
3628 		    }
3629 		}
3630 	    }
3631 	}
3632     }
3633 
3634   for (i = ipa_get_param_count (old_root_info);
3635        i < ipa_get_cs_argument_count (args);
3636        i++)
3637     {
3638       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3639 
3640       if (jf->type == IPA_JF_CONST)
3641 	{
3642 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3643 	  if (rdesc)
3644 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
3645 	}
3646       else if (jf->type == IPA_JF_PASS_THROUGH)
3647 	ipa_set_controlled_uses (new_root_info,
3648 				 jf->value.pass_through.formal_id,
3649 				 IPA_UNDESCRIBED_USE);
3650     }
3651 }
3652 
3653 /* Update jump functions and call note functions on inlining the call site CS.
3654    CS is expected to lead to a node already cloned by
3655    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
3656    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
3657    created.  */
3658 
3659 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)3660 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3661 				   vec<cgraph_edge *> *new_edges)
3662 {
3663   bool changed;
3664   /* Do nothing if the preparation phase has not been carried out yet
3665      (i.e. during early inlining).  */
3666   if (!ipa_node_params_sum)
3667     return false;
3668   gcc_assert (ipa_edge_args_sum);
3669 
3670   propagate_controlled_uses (cs);
3671   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3672 
3673   return changed;
3674 }
3675 
3676 /* Ensure that array of edge arguments infos is big enough to accommodate a
3677    structure for all edges and reallocates it if not.  Also, allocate
3678    associated hash tables is they do not already exist.  */
3679 
3680 void
ipa_check_create_edge_args(void)3681 ipa_check_create_edge_args (void)
3682 {
3683   if (!ipa_edge_args_sum)
3684     ipa_edge_args_sum
3685       = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3686 	 ipa_edge_args_sum_t (symtab, true));
3687   if (!ipa_bits_hash_table)
3688     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3689   if (!ipa_vr_hash_table)
3690     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3691 }
3692 
3693 /* Free all ipa_edge structures.  */
3694 
3695 void
ipa_free_all_edge_args(void)3696 ipa_free_all_edge_args (void)
3697 {
3698   if (!ipa_edge_args_sum)
3699     return;
3700 
3701   ipa_edge_args_sum->release ();
3702   ipa_edge_args_sum = NULL;
3703 }
3704 
3705 /* Free all ipa_node_params structures.  */
3706 
3707 void
ipa_free_all_node_params(void)3708 ipa_free_all_node_params (void)
3709 {
3710   ipa_node_params_sum->release ();
3711   ipa_node_params_sum = NULL;
3712 }
3713 
3714 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3715    tables if they do not already exist.  */
3716 
3717 void
ipcp_transformation_initialize(void)3718 ipcp_transformation_initialize (void)
3719 {
3720   if (!ipa_bits_hash_table)
3721     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3722   if (!ipa_vr_hash_table)
3723     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3724   if (ipcp_transformation_sum == NULL)
3725     ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3726 }
3727 
3728 /* Set the aggregate replacements of NODE to be AGGVALS.  */
3729 
3730 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)3731 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3732 			      struct ipa_agg_replacement_value *aggvals)
3733 {
3734   ipcp_transformation_initialize ();
3735   ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3736   s->agg_values = aggvals;
3737 }
3738 
3739 /* Hook that is called by cgraph.c when an edge is removed.  Adjust reference
3740    count data structures accordingly.  */
3741 
3742 void
remove(cgraph_edge * cs,ipa_edge_args * args)3743 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3744 {
3745   if (args->jump_functions)
3746     {
3747       struct ipa_jump_func *jf;
3748       int i;
3749       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3750 	{
3751 	  struct ipa_cst_ref_desc *rdesc;
3752 	  try_decrement_rdesc_refcount (jf);
3753 	  if (jf->type == IPA_JF_CONST
3754 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
3755 	      && rdesc->cs == cs)
3756 	    rdesc->cs = NULL;
3757 	}
3758     }
3759 }
3760 
3761 /* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
3762    reference count data strucutres accordingly.  */
3763 
3764 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)3765 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3766 				ipa_edge_args *old_args, ipa_edge_args *new_args)
3767 {
3768   unsigned int i;
3769 
3770   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3771   if (old_args->polymorphic_call_contexts)
3772     new_args->polymorphic_call_contexts
3773       = vec_safe_copy (old_args->polymorphic_call_contexts);
3774 
3775   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3776     {
3777       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3778       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3779 
3780       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3781 
3782       if (src_jf->type == IPA_JF_CONST)
3783 	{
3784 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3785 
3786 	  if (!src_rdesc)
3787 	    dst_jf->value.constant.rdesc = NULL;
3788 	  else if (src->caller == dst->caller)
3789 	    {
3790 	      struct ipa_ref *ref;
3791 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
3792 	      gcc_checking_assert (n);
3793 	      ref = src->caller->find_reference (n, src->call_stmt,
3794 						 src->lto_stmt_uid);
3795 	      gcc_checking_assert (ref);
3796 	      dst->caller->clone_reference (ref, ref->stmt);
3797 
3798 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3799 	      dst_rdesc->cs = dst;
3800 	      dst_rdesc->refcount = src_rdesc->refcount;
3801 	      dst_rdesc->next_duplicate = NULL;
3802 	      dst_jf->value.constant.rdesc = dst_rdesc;
3803 	    }
3804 	  else if (src_rdesc->cs == src)
3805 	    {
3806 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3807 	      dst_rdesc->cs = dst;
3808 	      dst_rdesc->refcount = src_rdesc->refcount;
3809 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3810 	      src_rdesc->next_duplicate = dst_rdesc;
3811 	      dst_jf->value.constant.rdesc = dst_rdesc;
3812 	    }
3813 	  else
3814 	    {
3815 	      struct ipa_cst_ref_desc *dst_rdesc;
3816 	      /* This can happen during inlining, when a JFUNC can refer to a
3817 		 reference taken in a function up in the tree of inline clones.
3818 		 We need to find the duplicate that refers to our tree of
3819 		 inline clones.  */
3820 
3821 	      gcc_assert (dst->caller->global.inlined_to);
3822 	      for (dst_rdesc = src_rdesc->next_duplicate;
3823 		   dst_rdesc;
3824 		   dst_rdesc = dst_rdesc->next_duplicate)
3825 		{
3826 		  struct cgraph_node *top;
3827 		  top = dst_rdesc->cs->caller->global.inlined_to
3828 		    ? dst_rdesc->cs->caller->global.inlined_to
3829 		    : dst_rdesc->cs->caller;
3830 		  if (dst->caller->global.inlined_to == top)
3831 		    break;
3832 		}
3833 	      gcc_assert (dst_rdesc);
3834 	      dst_jf->value.constant.rdesc = dst_rdesc;
3835 	    }
3836 	}
3837       else if (dst_jf->type == IPA_JF_PASS_THROUGH
3838 	       && src->caller == dst->caller)
3839 	{
3840 	  struct cgraph_node *inline_root = dst->caller->global.inlined_to
3841 	    ? dst->caller->global.inlined_to : dst->caller;
3842 	  struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3843 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3844 
3845 	  int c = ipa_get_controlled_uses (root_info, idx);
3846 	  if (c != IPA_UNDESCRIBED_USE)
3847 	    {
3848 	      c++;
3849 	      ipa_set_controlled_uses (root_info, idx, c);
3850 	    }
3851 	}
3852     }
3853 }
3854 
3855 /* Analyze newly added function into callgraph.  */
3856 
3857 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)3858 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3859 {
3860   if (node->has_gimple_body_p ())
3861     ipa_analyze_node (node);
3862 }
3863 
3864 /* Hook that is called by summary when a node is duplicated.  */
3865 
3866 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)3867 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3868 			     ipa_node_params *old_info,
3869 			     ipa_node_params *new_info)
3870 {
3871   ipa_agg_replacement_value *old_av, *new_av;
3872 
3873   new_info->descriptors = vec_safe_copy (old_info->descriptors);
3874   new_info->lattices = NULL;
3875   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3876   new_info->known_csts = old_info->known_csts.copy ();
3877   new_info->known_contexts = old_info->known_contexts.copy ();
3878 
3879   new_info->analysis_done = old_info->analysis_done;
3880   new_info->node_enqueued = old_info->node_enqueued;
3881   new_info->versionable = old_info->versionable;
3882 
3883   old_av = ipa_get_agg_replacements_for_node (src);
3884   if (old_av)
3885     {
3886       new_av = NULL;
3887       while (old_av)
3888 	{
3889 	  struct ipa_agg_replacement_value *v;
3890 
3891 	  v = ggc_alloc<ipa_agg_replacement_value> ();
3892 	  memcpy (v, old_av, sizeof (*v));
3893 	  v->next = new_av;
3894 	  new_av = v;
3895 	  old_av = old_av->next;
3896 	}
3897       ipa_set_node_agg_value_chain (dst, new_av);
3898     }
3899 
3900   ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3901 
3902   if (src_trans)
3903     {
3904       ipcp_transformation_initialize ();
3905       src_trans = ipcp_transformation_sum->get_create (src);
3906       ipcp_transformation *dst_trans
3907 	= ipcp_transformation_sum->get_create (dst);
3908 
3909       dst_trans->bits = vec_safe_copy (src_trans->bits);
3910 
3911       const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3912       vec<ipa_vr, va_gc> *&dst_vr
3913 	= ipcp_get_transformation_summary (dst)->m_vr;
3914       if (vec_safe_length (src_trans->m_vr) > 0)
3915 	{
3916 	  vec_safe_reserve_exact (dst_vr, src_vr->length ());
3917 	  for (unsigned i = 0; i < src_vr->length (); ++i)
3918 	    dst_vr->quick_push ((*src_vr)[i]);
3919 	}
3920     }
3921 }
3922 
3923 /* Register our cgraph hooks if they are not already there.  */
3924 
3925 void
ipa_register_cgraph_hooks(void)3926 ipa_register_cgraph_hooks (void)
3927 {
3928   ipa_check_create_node_params ();
3929   ipa_check_create_edge_args ();
3930 
3931   function_insertion_hook_holder =
3932       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3933 }
3934 
3935 /* Unregister our cgraph hooks if they are not already there.  */
3936 
3937 static void
ipa_unregister_cgraph_hooks(void)3938 ipa_unregister_cgraph_hooks (void)
3939 {
3940   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3941   function_insertion_hook_holder = NULL;
3942 }
3943 
3944 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3945    longer needed after ipa-cp.  */
3946 
3947 void
ipa_free_all_structures_after_ipa_cp(void)3948 ipa_free_all_structures_after_ipa_cp (void)
3949 {
3950   if (!optimize && !in_lto_p)
3951     {
3952       ipa_free_all_edge_args ();
3953       ipa_free_all_node_params ();
3954       ipcp_sources_pool.release ();
3955       ipcp_cst_values_pool.release ();
3956       ipcp_poly_ctx_values_pool.release ();
3957       ipcp_agg_lattice_pool.release ();
3958       ipa_unregister_cgraph_hooks ();
3959       ipa_refdesc_pool.release ();
3960     }
3961 }
3962 
3963 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3964    longer needed after indirect inlining.  */
3965 
3966 void
ipa_free_all_structures_after_iinln(void)3967 ipa_free_all_structures_after_iinln (void)
3968 {
3969   ipa_free_all_edge_args ();
3970   ipa_free_all_node_params ();
3971   ipa_unregister_cgraph_hooks ();
3972   ipcp_sources_pool.release ();
3973   ipcp_cst_values_pool.release ();
3974   ipcp_poly_ctx_values_pool.release ();
3975   ipcp_agg_lattice_pool.release ();
3976   ipa_refdesc_pool.release ();
3977 }
3978 
3979 /* Print ipa_tree_map data structures of all functions in the
3980    callgraph to F.  */
3981 
3982 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)3983 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3984 {
3985   int i, count;
3986   struct ipa_node_params *info;
3987 
3988   if (!node->definition)
3989     return;
3990   info = IPA_NODE_REF (node);
3991   fprintf (f, "  function  %s parameter descriptors:\n", node->dump_name ());
3992   count = ipa_get_param_count (info);
3993   for (i = 0; i < count; i++)
3994     {
3995       int c;
3996 
3997       fprintf (f, "    ");
3998       ipa_dump_param (f, info, i);
3999       if (ipa_is_param_used (info, i))
4000 	fprintf (f, " used");
4001       c = ipa_get_controlled_uses (info, i);
4002       if (c == IPA_UNDESCRIBED_USE)
4003 	fprintf (f, " undescribed_use");
4004       else
4005 	fprintf (f, "  controlled_uses=%i", c);
4006       fprintf (f, "\n");
4007     }
4008 }
4009 
4010 /* Print ipa_tree_map data structures of all functions in the
4011    callgraph to F.  */
4012 
4013 void
ipa_print_all_params(FILE * f)4014 ipa_print_all_params (FILE * f)
4015 {
4016   struct cgraph_node *node;
4017 
4018   fprintf (f, "\nFunction parameters:\n");
4019   FOR_EACH_FUNCTION (node)
4020     ipa_print_node_params (f, node);
4021 }
4022 
4023 /* Dump the AV linked list.  */
4024 
4025 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4026 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4027 {
4028   bool comma = false;
4029   fprintf (f, "     Aggregate replacements:");
4030   for (; av; av = av->next)
4031     {
4032       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4033 	       av->index, av->offset);
4034       print_generic_expr (f, av->value);
4035       comma = true;
4036     }
4037   fprintf (f, "\n");
4038 }
4039 
4040 /* Stream out jump function JUMP_FUNC to OB.  */
4041 
4042 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4043 ipa_write_jump_function (struct output_block *ob,
4044 			 struct ipa_jump_func *jump_func)
4045 {
4046   struct ipa_agg_jf_item *item;
4047   struct bitpack_d bp;
4048   int i, count;
4049   int flag = 0;
4050 
4051   /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4052      as well as WPA memory by handling them specially.  */
4053   if (jump_func->type == IPA_JF_CONST
4054       && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4055     flag = 1;
4056 
4057   streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4058   switch (jump_func->type)
4059     {
4060     case IPA_JF_UNKNOWN:
4061       break;
4062     case IPA_JF_CONST:
4063       gcc_assert (
4064 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4065       stream_write_tree (ob,
4066 			 flag
4067 			 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4068 			 : jump_func->value.constant.value, true);
4069       break;
4070     case IPA_JF_PASS_THROUGH:
4071       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4072       if (jump_func->value.pass_through.operation == NOP_EXPR)
4073 	{
4074 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4075 	  bp = bitpack_create (ob->main_stream);
4076 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4077 	  streamer_write_bitpack (&bp);
4078 	}
4079       else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4080 	       == tcc_unary)
4081 	streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4082       else
4083 	{
4084 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4085 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4086 	}
4087       break;
4088     case IPA_JF_ANCESTOR:
4089       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4090       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4091       bp = bitpack_create (ob->main_stream);
4092       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4093       streamer_write_bitpack (&bp);
4094       break;
4095     }
4096 
4097   count = vec_safe_length (jump_func->agg.items);
4098   streamer_write_uhwi (ob, count);
4099   if (count)
4100     {
4101       bp = bitpack_create (ob->main_stream);
4102       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4103       streamer_write_bitpack (&bp);
4104     }
4105 
4106   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4107     {
4108       streamer_write_uhwi (ob, item->offset);
4109       stream_write_tree (ob, item->value, true);
4110     }
4111 
4112   bp = bitpack_create (ob->main_stream);
4113   bp_pack_value (&bp, !!jump_func->bits, 1);
4114   streamer_write_bitpack (&bp);
4115   if (jump_func->bits)
4116     {
4117       streamer_write_widest_int (ob, jump_func->bits->value);
4118       streamer_write_widest_int (ob, jump_func->bits->mask);
4119     }
4120   bp_pack_value (&bp, !!jump_func->m_vr, 1);
4121   streamer_write_bitpack (&bp);
4122   if (jump_func->m_vr)
4123     {
4124       streamer_write_enum (ob->main_stream, value_rang_type,
4125 			   VR_LAST, jump_func->m_vr->kind ());
4126       stream_write_tree (ob, jump_func->m_vr->min (), true);
4127       stream_write_tree (ob, jump_func->m_vr->max (), true);
4128     }
4129 }
4130 
4131 /* Read in jump function JUMP_FUNC from IB.  */
4132 
4133 static void
ipa_read_jump_function(struct lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,struct data_in * data_in,bool prevails)4134 ipa_read_jump_function (struct lto_input_block *ib,
4135 			struct ipa_jump_func *jump_func,
4136 			struct cgraph_edge *cs,
4137 			struct data_in *data_in,
4138 			bool prevails)
4139 {
4140   enum jump_func_type jftype;
4141   enum tree_code operation;
4142   int i, count;
4143   int val = streamer_read_uhwi (ib);
4144   bool flag = val & 1;
4145 
4146   jftype = (enum jump_func_type) (val / 2);
4147   switch (jftype)
4148     {
4149     case IPA_JF_UNKNOWN:
4150       ipa_set_jf_unknown (jump_func);
4151       break;
4152     case IPA_JF_CONST:
4153       {
4154 	tree t = stream_read_tree (ib, data_in);
4155 	if (flag && prevails)
4156 	  t = build_fold_addr_expr (t);
4157 	ipa_set_jf_constant (jump_func, t, cs);
4158       }
4159       break;
4160     case IPA_JF_PASS_THROUGH:
4161       operation = (enum tree_code) streamer_read_uhwi (ib);
4162       if (operation == NOP_EXPR)
4163 	{
4164 	  int formal_id =  streamer_read_uhwi (ib);
4165 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4166 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4167 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4168 	}
4169       else if (TREE_CODE_CLASS (operation) == tcc_unary)
4170 	{
4171 	  int formal_id =  streamer_read_uhwi (ib);
4172 	  ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4173 	}
4174       else
4175 	{
4176 	  tree operand = stream_read_tree (ib, data_in);
4177 	  int formal_id =  streamer_read_uhwi (ib);
4178 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4179 					 operation);
4180 	}
4181       break;
4182     case IPA_JF_ANCESTOR:
4183       {
4184 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4185 	int formal_id = streamer_read_uhwi (ib);
4186 	struct bitpack_d bp = streamer_read_bitpack (ib);
4187 	bool agg_preserved = bp_unpack_value (&bp, 1);
4188 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4189 	break;
4190       }
4191     default:
4192       fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4193     }
4194 
4195   count = streamer_read_uhwi (ib);
4196   if (prevails)
4197     vec_alloc (jump_func->agg.items, count);
4198   if (count)
4199     {
4200       struct bitpack_d bp = streamer_read_bitpack (ib);
4201       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4202     }
4203   for (i = 0; i < count; i++)
4204     {
4205       struct ipa_agg_jf_item item;
4206       item.offset = streamer_read_uhwi (ib);
4207       item.value = stream_read_tree (ib, data_in);
4208       if (prevails)
4209         jump_func->agg.items->quick_push (item);
4210     }
4211 
4212   struct bitpack_d bp = streamer_read_bitpack (ib);
4213   bool bits_known = bp_unpack_value (&bp, 1);
4214   if (bits_known)
4215     {
4216       widest_int value = streamer_read_widest_int (ib);
4217       widest_int mask = streamer_read_widest_int (ib);
4218       if (prevails)
4219         ipa_set_jfunc_bits (jump_func, value, mask);
4220     }
4221   else
4222     jump_func->bits = NULL;
4223 
4224   struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4225   bool vr_known = bp_unpack_value (&vr_bp, 1);
4226   if (vr_known)
4227     {
4228       enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4229 						       VR_LAST);
4230       tree min = stream_read_tree (ib, data_in);
4231       tree max = stream_read_tree (ib, data_in);
4232       if (prevails)
4233         ipa_set_jfunc_vr (jump_func, type, min, max);
4234     }
4235   else
4236     jump_func->m_vr = NULL;
4237 }
4238 
4239 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4240    relevant to indirect inlining to OB.  */
4241 
4242 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)4243 ipa_write_indirect_edge_info (struct output_block *ob,
4244 			      struct cgraph_edge *cs)
4245 {
4246   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4247   struct bitpack_d bp;
4248 
4249   streamer_write_hwi (ob, ii->param_index);
4250   bp = bitpack_create (ob->main_stream);
4251   bp_pack_value (&bp, ii->polymorphic, 1);
4252   bp_pack_value (&bp, ii->agg_contents, 1);
4253   bp_pack_value (&bp, ii->member_ptr, 1);
4254   bp_pack_value (&bp, ii->by_ref, 1);
4255   bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4256   bp_pack_value (&bp, ii->vptr_changed, 1);
4257   streamer_write_bitpack (&bp);
4258   if (ii->agg_contents || ii->polymorphic)
4259     streamer_write_hwi (ob, ii->offset);
4260   else
4261     gcc_assert (ii->offset == 0);
4262 
4263   if (ii->polymorphic)
4264     {
4265       streamer_write_hwi (ob, ii->otr_token);
4266       stream_write_tree (ob, ii->otr_type, true);
4267       ii->context.stream_out (ob);
4268     }
4269 }
4270 
4271 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4272    relevant to indirect inlining from IB.  */
4273 
4274 static void
ipa_read_indirect_edge_info(struct lto_input_block * ib,struct data_in * data_in,struct cgraph_edge * cs)4275 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4276 			     struct data_in *data_in,
4277 			     struct cgraph_edge *cs)
4278 {
4279   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4280   struct bitpack_d bp;
4281 
4282   ii->param_index = (int) streamer_read_hwi (ib);
4283   bp = streamer_read_bitpack (ib);
4284   ii->polymorphic = bp_unpack_value (&bp, 1);
4285   ii->agg_contents = bp_unpack_value (&bp, 1);
4286   ii->member_ptr = bp_unpack_value (&bp, 1);
4287   ii->by_ref = bp_unpack_value (&bp, 1);
4288   ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4289   ii->vptr_changed = bp_unpack_value (&bp, 1);
4290   if (ii->agg_contents || ii->polymorphic)
4291     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4292   else
4293     ii->offset = 0;
4294   if (ii->polymorphic)
4295     {
4296       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4297       ii->otr_type = stream_read_tree (ib, data_in);
4298       ii->context.stream_in (ib, data_in);
4299     }
4300 }
4301 
4302 /* Stream out NODE info to OB.  */
4303 
4304 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)4305 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4306 {
4307   int node_ref;
4308   lto_symtab_encoder_t encoder;
4309   struct ipa_node_params *info = IPA_NODE_REF (node);
4310   int j;
4311   struct cgraph_edge *e;
4312   struct bitpack_d bp;
4313 
4314   encoder = ob->decl_state->symtab_node_encoder;
4315   node_ref = lto_symtab_encoder_encode (encoder, node);
4316   streamer_write_uhwi (ob, node_ref);
4317 
4318   streamer_write_uhwi (ob, ipa_get_param_count (info));
4319   for (j = 0; j < ipa_get_param_count (info); j++)
4320     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4321   bp = bitpack_create (ob->main_stream);
4322   gcc_assert (info->analysis_done
4323 	      || ipa_get_param_count (info) == 0);
4324   gcc_assert (!info->node_enqueued);
4325   gcc_assert (!info->ipcp_orig_node);
4326   for (j = 0; j < ipa_get_param_count (info); j++)
4327     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4328   streamer_write_bitpack (&bp);
4329   for (j = 0; j < ipa_get_param_count (info); j++)
4330     {
4331       streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4332       stream_write_tree (ob, ipa_get_type (info, j), true);
4333     }
4334   for (e = node->callees; e; e = e->next_callee)
4335     {
4336       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4337 
4338       streamer_write_uhwi (ob,
4339 			   ipa_get_cs_argument_count (args) * 2
4340 			   + (args->polymorphic_call_contexts != NULL));
4341       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4342 	{
4343 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4344 	  if (args->polymorphic_call_contexts != NULL)
4345 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4346 	}
4347     }
4348   for (e = node->indirect_calls; e; e = e->next_callee)
4349     {
4350       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4351 
4352       streamer_write_uhwi (ob,
4353 			   ipa_get_cs_argument_count (args) * 2
4354   			   + (args->polymorphic_call_contexts != NULL));
4355       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4356 	{
4357 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4358 	  if (args->polymorphic_call_contexts != NULL)
4359 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4360 	}
4361       ipa_write_indirect_edge_info (ob, e);
4362     }
4363 }
4364 
4365 /* Stream in edge E from IB.  */
4366 
4367 static void
ipa_read_edge_info(struct lto_input_block * ib,struct data_in * data_in,struct cgraph_edge * e,bool prevails)4368 ipa_read_edge_info (struct lto_input_block *ib,
4369 		    struct data_in *data_in,
4370 		    struct cgraph_edge *e, bool prevails)
4371 {
4372   int count = streamer_read_uhwi (ib);
4373   bool contexts_computed = count & 1;
4374 
4375   count /= 2;
4376   if (!count)
4377     return;
4378   if (prevails && e->possibly_call_in_translation_unit_p ())
4379     {
4380       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4381       vec_safe_grow_cleared (args->jump_functions, count);
4382       if (contexts_computed)
4383 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4384       for (int k = 0; k < count; k++)
4385 	{
4386 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4387 				  data_in, prevails);
4388 	  if (contexts_computed)
4389 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4390 							     (ib, data_in);
4391 	}
4392     }
4393   else
4394     {
4395       for (int k = 0; k < count; k++)
4396 	{
4397 	  struct ipa_jump_func dummy;
4398 	  ipa_read_jump_function (ib, &dummy, e,
4399 				  data_in, prevails);
4400 	  if (contexts_computed)
4401 	    {
4402 	      struct ipa_polymorphic_call_context ctx;
4403 	      ctx.stream_in (ib, data_in);
4404 	    }
4405 	}
4406     }
4407 }
4408 
4409 /* Stream in NODE info from IB.  */
4410 
4411 static void
ipa_read_node_info(struct lto_input_block * ib,struct cgraph_node * node,struct data_in * data_in)4412 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4413 		    struct data_in *data_in)
4414 {
4415   int k;
4416   struct cgraph_edge *e;
4417   struct bitpack_d bp;
4418   bool prevails = node->prevailing_p ();
4419   struct ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4420 
4421   int param_count = streamer_read_uhwi (ib);
4422   if (prevails)
4423     {
4424       ipa_alloc_node_params (node, param_count);
4425       for (k = 0; k < param_count; k++)
4426         (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4427       if (ipa_get_param_count (info) != 0)
4428 	info->analysis_done = true;
4429       info->node_enqueued = false;
4430     }
4431   else
4432     for (k = 0; k < param_count; k++)
4433       streamer_read_uhwi (ib);
4434 
4435   bp = streamer_read_bitpack (ib);
4436   for (k = 0; k < param_count; k++)
4437     {
4438       bool used = bp_unpack_value (&bp, 1);
4439 
4440       if (prevails)
4441         ipa_set_param_used (info, k, used);
4442     }
4443   for (k = 0; k < param_count; k++)
4444     {
4445       int nuses = streamer_read_hwi (ib);
4446       tree type = stream_read_tree (ib, data_in);
4447 
4448       if (prevails)
4449 	{
4450 	  ipa_set_controlled_uses (info, k, nuses);
4451 	  (*info->descriptors)[k].decl_or_type = type;
4452 	}
4453     }
4454   for (e = node->callees; e; e = e->next_callee)
4455     ipa_read_edge_info (ib, data_in, e, prevails);
4456   for (e = node->indirect_calls; e; e = e->next_callee)
4457     {
4458       ipa_read_edge_info (ib, data_in, e, prevails);
4459       ipa_read_indirect_edge_info (ib, data_in, e);
4460     }
4461 }
4462 
4463 /* Write jump functions for nodes in SET.  */
4464 
4465 void
ipa_prop_write_jump_functions(void)4466 ipa_prop_write_jump_functions (void)
4467 {
4468   struct cgraph_node *node;
4469   struct output_block *ob;
4470   unsigned int count = 0;
4471   lto_symtab_encoder_iterator lsei;
4472   lto_symtab_encoder_t encoder;
4473 
4474   if (!ipa_node_params_sum || !ipa_edge_args_sum)
4475     return;
4476 
4477   ob = create_output_block (LTO_section_jump_functions);
4478   encoder = ob->decl_state->symtab_node_encoder;
4479   ob->symbol = NULL;
4480   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4481        lsei_next_function_in_partition (&lsei))
4482     {
4483       node = lsei_cgraph_node (lsei);
4484       if (node->has_gimple_body_p ()
4485 	  && IPA_NODE_REF (node) != NULL)
4486 	count++;
4487     }
4488 
4489   streamer_write_uhwi (ob, count);
4490 
4491   /* Process all of the functions.  */
4492   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4493        lsei_next_function_in_partition (&lsei))
4494     {
4495       node = lsei_cgraph_node (lsei);
4496       if (node->has_gimple_body_p ()
4497 	  && IPA_NODE_REF (node) != NULL)
4498         ipa_write_node_info (ob, node);
4499     }
4500   streamer_write_char_stream (ob->main_stream, 0);
4501   produce_asm (ob, NULL);
4502   destroy_output_block (ob);
4503 }
4504 
4505 /* Read section in file FILE_DATA of length LEN with data DATA.  */
4506 
4507 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)4508 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4509 		       size_t len)
4510 {
4511   const struct lto_function_header *header =
4512     (const struct lto_function_header *) data;
4513   const int cfg_offset = sizeof (struct lto_function_header);
4514   const int main_offset = cfg_offset + header->cfg_size;
4515   const int string_offset = main_offset + header->main_size;
4516   struct data_in *data_in;
4517   unsigned int i;
4518   unsigned int count;
4519 
4520   lto_input_block ib_main ((const char *) data + main_offset,
4521 			   header->main_size, file_data->mode_table);
4522 
4523   data_in =
4524     lto_data_in_create (file_data, (const char *) data + string_offset,
4525 			header->string_size, vNULL);
4526   count = streamer_read_uhwi (&ib_main);
4527 
4528   for (i = 0; i < count; i++)
4529     {
4530       unsigned int index;
4531       struct cgraph_node *node;
4532       lto_symtab_encoder_t encoder;
4533 
4534       index = streamer_read_uhwi (&ib_main);
4535       encoder = file_data->symtab_node_encoder;
4536       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4537 								index));
4538       gcc_assert (node->definition);
4539       ipa_read_node_info (&ib_main, node, data_in);
4540     }
4541   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4542 			 len);
4543   lto_data_in_delete (data_in);
4544 }
4545 
4546 /* Read ipcp jump functions.  */
4547 
4548 void
ipa_prop_read_jump_functions(void)4549 ipa_prop_read_jump_functions (void)
4550 {
4551   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4552   struct lto_file_decl_data *file_data;
4553   unsigned int j = 0;
4554 
4555   ipa_check_create_node_params ();
4556   ipa_check_create_edge_args ();
4557   ipa_register_cgraph_hooks ();
4558 
4559   while ((file_data = file_data_vec[j++]))
4560     {
4561       size_t len;
4562       const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4563 
4564       if (data)
4565         ipa_prop_read_section (file_data, data, len);
4566     }
4567 }
4568 
4569 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)4570 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4571 {
4572   int node_ref;
4573   unsigned int count = 0;
4574   lto_symtab_encoder_t encoder;
4575   struct ipa_agg_replacement_value *aggvals, *av;
4576 
4577   aggvals = ipa_get_agg_replacements_for_node (node);
4578   encoder = ob->decl_state->symtab_node_encoder;
4579   node_ref = lto_symtab_encoder_encode (encoder, node);
4580   streamer_write_uhwi (ob, node_ref);
4581 
4582   for (av = aggvals; av; av = av->next)
4583     count++;
4584   streamer_write_uhwi (ob, count);
4585 
4586   for (av = aggvals; av; av = av->next)
4587     {
4588       struct bitpack_d bp;
4589 
4590       streamer_write_uhwi (ob, av->offset);
4591       streamer_write_uhwi (ob, av->index);
4592       stream_write_tree (ob, av->value, true);
4593 
4594       bp = bitpack_create (ob->main_stream);
4595       bp_pack_value (&bp, av->by_ref, 1);
4596       streamer_write_bitpack (&bp);
4597     }
4598 
4599   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4600   if (ts && vec_safe_length (ts->m_vr) > 0)
4601     {
4602       count = ts->m_vr->length ();
4603       streamer_write_uhwi (ob, count);
4604       for (unsigned i = 0; i < count; ++i)
4605 	{
4606 	  struct bitpack_d bp;
4607 	  ipa_vr *parm_vr = &(*ts->m_vr)[i];
4608 	  bp = bitpack_create (ob->main_stream);
4609 	  bp_pack_value (&bp, parm_vr->known, 1);
4610 	  streamer_write_bitpack (&bp);
4611 	  if (parm_vr->known)
4612 	    {
4613 	      streamer_write_enum (ob->main_stream, value_rang_type,
4614 				   VR_LAST, parm_vr->type);
4615 	      streamer_write_wide_int (ob, parm_vr->min);
4616 	      streamer_write_wide_int (ob, parm_vr->max);
4617 	    }
4618 	}
4619     }
4620   else
4621     streamer_write_uhwi (ob, 0);
4622 
4623   if (ts && vec_safe_length (ts->bits) > 0)
4624     {
4625       count = ts->bits->length ();
4626       streamer_write_uhwi (ob, count);
4627 
4628       for (unsigned i = 0; i < count; ++i)
4629 	{
4630 	  const ipa_bits *bits_jfunc = (*ts->bits)[i];
4631 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
4632 	  bp_pack_value (&bp, !!bits_jfunc, 1);
4633 	  streamer_write_bitpack (&bp);
4634 	  if (bits_jfunc)
4635 	    {
4636 	      streamer_write_widest_int (ob, bits_jfunc->value);
4637 	      streamer_write_widest_int (ob, bits_jfunc->mask);
4638 	    }
4639 	}
4640     }
4641   else
4642     streamer_write_uhwi (ob, 0);
4643 }
4644 
4645 /* Stream in the aggregate value replacement chain for NODE from IB.  */
4646 
4647 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)4648 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4649 			       data_in *data_in)
4650 {
4651   struct ipa_agg_replacement_value *aggvals = NULL;
4652   unsigned int count, i;
4653 
4654   count = streamer_read_uhwi (ib);
4655   for (i = 0; i <count; i++)
4656     {
4657       struct ipa_agg_replacement_value *av;
4658       struct bitpack_d bp;
4659 
4660       av = ggc_alloc<ipa_agg_replacement_value> ();
4661       av->offset = streamer_read_uhwi (ib);
4662       av->index = streamer_read_uhwi (ib);
4663       av->value = stream_read_tree (ib, data_in);
4664       bp = streamer_read_bitpack (ib);
4665       av->by_ref = bp_unpack_value (&bp, 1);
4666       av->next = aggvals;
4667       aggvals = av;
4668     }
4669   ipa_set_node_agg_value_chain (node, aggvals);
4670 
4671   count = streamer_read_uhwi (ib);
4672   if (count > 0)
4673     {
4674       ipcp_transformation_initialize ();
4675       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4676       vec_safe_grow_cleared (ts->m_vr, count);
4677       for (i = 0; i < count; i++)
4678 	{
4679 	  ipa_vr *parm_vr;
4680 	  parm_vr = &(*ts->m_vr)[i];
4681 	  struct bitpack_d bp;
4682 	  bp = streamer_read_bitpack (ib);
4683 	  parm_vr->known = bp_unpack_value (&bp, 1);
4684 	  if (parm_vr->known)
4685 	    {
4686 	      parm_vr->type = streamer_read_enum (ib, value_range_kind,
4687 						  VR_LAST);
4688 	      parm_vr->min = streamer_read_wide_int (ib);
4689 	      parm_vr->max = streamer_read_wide_int (ib);
4690 	    }
4691 	}
4692     }
4693   count = streamer_read_uhwi (ib);
4694   if (count > 0)
4695     {
4696       ipcp_transformation_initialize ();
4697       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4698       vec_safe_grow_cleared (ts->bits, count);
4699 
4700       for (i = 0; i < count; i++)
4701 	{
4702 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4703 	  bool known = bp_unpack_value (&bp, 1);
4704 	  if (known)
4705 	    {
4706 	      ipa_bits *bits
4707 		= ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4708 					      streamer_read_widest_int (ib));
4709 	      (*ts->bits)[i] = bits;
4710 	    }
4711 	}
4712     }
4713 }
4714 
4715 /* Write all aggregate replacement for nodes in set.  */
4716 
4717 void
ipcp_write_transformation_summaries(void)4718 ipcp_write_transformation_summaries (void)
4719 {
4720   struct cgraph_node *node;
4721   struct output_block *ob;
4722   unsigned int count = 0;
4723   lto_symtab_encoder_iterator lsei;
4724   lto_symtab_encoder_t encoder;
4725 
4726   ob = create_output_block (LTO_section_ipcp_transform);
4727   encoder = ob->decl_state->symtab_node_encoder;
4728   ob->symbol = NULL;
4729   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4730        lsei_next_function_in_partition (&lsei))
4731     {
4732       node = lsei_cgraph_node (lsei);
4733       if (node->has_gimple_body_p ())
4734 	count++;
4735     }
4736 
4737   streamer_write_uhwi (ob, count);
4738 
4739   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4740        lsei_next_function_in_partition (&lsei))
4741     {
4742       node = lsei_cgraph_node (lsei);
4743       if (node->has_gimple_body_p ())
4744 	write_ipcp_transformation_info (ob, node);
4745     }
4746   streamer_write_char_stream (ob->main_stream, 0);
4747   produce_asm (ob, NULL);
4748   destroy_output_block (ob);
4749 }
4750 
4751 /* Read replacements section in file FILE_DATA of length LEN with data
4752    DATA.  */
4753 
4754 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)4755 read_replacements_section (struct lto_file_decl_data *file_data,
4756 			   const char *data,
4757 			   size_t len)
4758 {
4759   const struct lto_function_header *header =
4760     (const struct lto_function_header *) data;
4761   const int cfg_offset = sizeof (struct lto_function_header);
4762   const int main_offset = cfg_offset + header->cfg_size;
4763   const int string_offset = main_offset + header->main_size;
4764   struct data_in *data_in;
4765   unsigned int i;
4766   unsigned int count;
4767 
4768   lto_input_block ib_main ((const char *) data + main_offset,
4769 			   header->main_size, file_data->mode_table);
4770 
4771   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4772 				header->string_size, vNULL);
4773   count = streamer_read_uhwi (&ib_main);
4774 
4775   for (i = 0; i < count; i++)
4776     {
4777       unsigned int index;
4778       struct cgraph_node *node;
4779       lto_symtab_encoder_t encoder;
4780 
4781       index = streamer_read_uhwi (&ib_main);
4782       encoder = file_data->symtab_node_encoder;
4783       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4784 								index));
4785       gcc_assert (node->definition);
4786       read_ipcp_transformation_info (&ib_main, node, data_in);
4787     }
4788   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4789 			 len);
4790   lto_data_in_delete (data_in);
4791 }
4792 
4793 /* Read IPA-CP aggregate replacements.  */
4794 
4795 void
ipcp_read_transformation_summaries(void)4796 ipcp_read_transformation_summaries (void)
4797 {
4798   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4799   struct lto_file_decl_data *file_data;
4800   unsigned int j = 0;
4801 
4802   while ((file_data = file_data_vec[j++]))
4803     {
4804       size_t len;
4805       const char *data = lto_get_section_data (file_data,
4806 					       LTO_section_ipcp_transform,
4807 					       NULL, &len);
4808       if (data)
4809         read_replacements_section (file_data, data, len);
4810     }
4811 }
4812 
4813 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4814    NODE.  */
4815 
4816 static void
adjust_agg_replacement_values(struct cgraph_node * node,struct ipa_agg_replacement_value * aggval)4817 adjust_agg_replacement_values (struct cgraph_node *node,
4818 			       struct ipa_agg_replacement_value *aggval)
4819 {
4820   struct ipa_agg_replacement_value *v;
4821   int i, c = 0, d = 0, *adj;
4822 
4823   if (!node->clone.combined_args_to_skip)
4824     return;
4825 
4826   for (v = aggval; v; v = v->next)
4827     {
4828       gcc_assert (v->index >= 0);
4829       if (c < v->index)
4830 	c = v->index;
4831     }
4832   c++;
4833 
4834   adj = XALLOCAVEC (int, c);
4835   for (i = 0; i < c; i++)
4836     if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4837       {
4838 	adj[i] = -1;
4839 	d++;
4840       }
4841     else
4842       adj[i] = i - d;
4843 
4844   for (v = aggval; v; v = v->next)
4845     v->index = adj[v->index];
4846 }
4847 
4848 /* Dominator walker driving the ipcp modification phase.  */
4849 
4850 class ipcp_modif_dom_walker : public dom_walker
4851 {
4852 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc,bool * cc)4853   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4854 			 vec<ipa_param_descriptor, va_gc> *descs,
4855 			 struct ipa_agg_replacement_value *av,
4856 			 bool *sc, bool *cc)
4857     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4858       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4859 
4860   virtual edge before_dom_children (basic_block);
4861 
4862 private:
4863   struct ipa_func_body_info *m_fbi;
4864   vec<ipa_param_descriptor, va_gc> *m_descriptors;
4865   struct ipa_agg_replacement_value *m_aggval;
4866   bool *m_something_changed, *m_cfg_changed;
4867 };
4868 
4869 edge
before_dom_children(basic_block bb)4870 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4871 {
4872   gimple_stmt_iterator gsi;
4873   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4874     {
4875       struct ipa_agg_replacement_value *v;
4876       gimple *stmt = gsi_stmt (gsi);
4877       tree rhs, val, t;
4878       HOST_WIDE_INT offset, size;
4879       int index;
4880       bool by_ref, vce;
4881 
4882       if (!gimple_assign_load_p (stmt))
4883 	continue;
4884       rhs = gimple_assign_rhs1 (stmt);
4885       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4886 	continue;
4887 
4888       vce = false;
4889       t = rhs;
4890       while (handled_component_p (t))
4891 	{
4892 	  /* V_C_E can do things like convert an array of integers to one
4893 	     bigger integer and similar things we do not handle below.  */
4894 	  if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4895 	    {
4896 	      vce = true;
4897 	      break;
4898 	    }
4899 	  t = TREE_OPERAND (t, 0);
4900 	}
4901       if (vce)
4902 	continue;
4903 
4904       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4905 				   &offset, &size, &by_ref))
4906 	continue;
4907       for (v = m_aggval; v; v = v->next)
4908 	if (v->index == index
4909 	    && v->offset == offset)
4910 	  break;
4911       if (!v
4912 	  || v->by_ref != by_ref
4913 	  || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4914 	continue;
4915 
4916       gcc_checking_assert (is_gimple_ip_invariant (v->value));
4917       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4918 	{
4919 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4920 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4921 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
4922 		   == TYPE_SIZE (TREE_TYPE (v->value)))
4923 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4924 	  else
4925 	    {
4926 	      if (dump_file)
4927 		{
4928 		  fprintf (dump_file, "    const ");
4929 		  print_generic_expr (dump_file, v->value);
4930 		  fprintf (dump_file, "  can't be converted to type of ");
4931 		  print_generic_expr (dump_file, rhs);
4932 		  fprintf (dump_file, "\n");
4933 		}
4934 	      continue;
4935 	    }
4936 	}
4937       else
4938 	val = v->value;
4939 
4940       if (dump_file && (dump_flags & TDF_DETAILS))
4941 	{
4942 	  fprintf (dump_file, "Modifying stmt:\n  ");
4943 	  print_gimple_stmt (dump_file, stmt, 0);
4944 	}
4945       gimple_assign_set_rhs_from_tree (&gsi, val);
4946       update_stmt (stmt);
4947 
4948       if (dump_file && (dump_flags & TDF_DETAILS))
4949 	{
4950 	  fprintf (dump_file, "into:\n  ");
4951 	  print_gimple_stmt (dump_file, stmt, 0);
4952 	  fprintf (dump_file, "\n");
4953 	}
4954 
4955       *m_something_changed = true;
4956       if (maybe_clean_eh_stmt (stmt)
4957 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4958 	*m_cfg_changed = true;
4959     }
4960   return NULL;
4961 }
4962 
4963 /* Update bits info of formal parameters as described in
4964    ipcp_transformation.  */
4965 
4966 static void
ipcp_update_bits(struct cgraph_node * node)4967 ipcp_update_bits (struct cgraph_node *node)
4968 {
4969   tree parm = DECL_ARGUMENTS (node->decl);
4970   tree next_parm = parm;
4971   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4972 
4973   if (!ts || vec_safe_length (ts->bits) == 0)
4974     return;
4975 
4976   vec<ipa_bits *, va_gc> &bits = *ts->bits;
4977   unsigned count = bits.length ();
4978 
4979   for (unsigned i = 0; i < count; ++i, parm = next_parm)
4980     {
4981       if (node->clone.combined_args_to_skip
4982 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4983 	continue;
4984 
4985       gcc_checking_assert (parm);
4986       next_parm = DECL_CHAIN (parm);
4987 
4988       if (!bits[i]
4989 	  || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4990 	       || POINTER_TYPE_P (TREE_TYPE (parm)))
4991 	  || !is_gimple_reg (parm))
4992 	continue;
4993 
4994       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4995       if (!ddef)
4996 	continue;
4997 
4998       if (dump_file)
4999 	{
5000 	  fprintf (dump_file, "Adjusting mask for param %u to ", i);
5001 	  print_hex (bits[i]->mask, dump_file);
5002 	  fprintf (dump_file, "\n");
5003 	}
5004 
5005       if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5006 	{
5007 	  unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5008 	  signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5009 
5010 	  wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5011 				  | wide_int::from (bits[i]->value, prec, sgn);
5012 	  set_nonzero_bits (ddef, nonzero_bits);
5013 	}
5014       else
5015 	{
5016 	  unsigned tem = bits[i]->mask.to_uhwi ();
5017 	  unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5018 	  unsigned align = tem & -tem;
5019 	  unsigned misalign = bitpos & (align - 1);
5020 
5021 	  if (align > 1)
5022 	    {
5023 	      if (dump_file)
5024 		fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5025 
5026 	      unsigned old_align, old_misalign;
5027 	      struct ptr_info_def *pi = get_ptr_info (ddef);
5028 	      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5029 
5030 	      if (old_known
5031 		  && old_align > align)
5032 		{
5033 		  if (dump_file)
5034 		    {
5035 		      fprintf (dump_file, "But alignment was already %u.\n", old_align);
5036 		      if ((old_misalign & (align - 1)) != misalign)
5037 			fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5038 				 old_misalign, misalign);
5039 		    }
5040 		  continue;
5041 		}
5042 
5043 	      if (old_known
5044 		  && ((misalign & (old_align - 1)) != old_misalign)
5045 		  && dump_file)
5046 		fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5047 			 old_misalign, misalign);
5048 
5049 	      set_ptr_info_alignment (pi, align, misalign);
5050 	    }
5051 	}
5052     }
5053 }
5054 
5055 /* Update value range of formal parameters as described in
5056    ipcp_transformation.  */
5057 
5058 static void
ipcp_update_vr(struct cgraph_node * node)5059 ipcp_update_vr (struct cgraph_node *node)
5060 {
5061   tree fndecl = node->decl;
5062   tree parm = DECL_ARGUMENTS (fndecl);
5063   tree next_parm = parm;
5064   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5065   if (!ts || vec_safe_length (ts->m_vr) == 0)
5066     return;
5067   const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5068   unsigned count = vr.length ();
5069 
5070   for (unsigned i = 0; i < count; ++i, parm = next_parm)
5071     {
5072       if (node->clone.combined_args_to_skip
5073 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5074 	continue;
5075       gcc_checking_assert (parm);
5076       next_parm = DECL_CHAIN (parm);
5077       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5078 
5079       if (!ddef || !is_gimple_reg (parm))
5080 	continue;
5081 
5082       if (vr[i].known
5083 	  && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5084 	{
5085 	  tree type = TREE_TYPE (ddef);
5086 	  unsigned prec = TYPE_PRECISION (type);
5087 	  if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5088 	    {
5089 	      if (dump_file)
5090 		{
5091 		  fprintf (dump_file, "Setting value range of param %u ", i);
5092 		  fprintf (dump_file, "%s[",
5093 			   (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5094 		  print_decs (vr[i].min, dump_file);
5095 		  fprintf (dump_file, ", ");
5096 		  print_decs (vr[i].max, dump_file);
5097 		  fprintf (dump_file, "]\n");
5098 		}
5099 	      set_range_info (ddef, vr[i].type,
5100 			      wide_int_storage::from (vr[i].min, prec,
5101 						      TYPE_SIGN (type)),
5102 			      wide_int_storage::from (vr[i].max, prec,
5103 						      TYPE_SIGN (type)));
5104 	    }
5105 	  else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5106 		   && vr[i].type == VR_ANTI_RANGE
5107 		   && wi::eq_p (vr[i].min, 0)
5108 		   && wi::eq_p (vr[i].max, 0))
5109 	    {
5110 	      if (dump_file)
5111 		fprintf (dump_file, "Setting nonnull for %u\n", i);
5112 	      set_ptr_nonnull (ddef);
5113 	    }
5114 	}
5115     }
5116 }
5117 
5118 /* IPCP transformation phase doing propagation of aggregate values.  */
5119 
5120 unsigned int
ipcp_transform_function(struct cgraph_node * node)5121 ipcp_transform_function (struct cgraph_node *node)
5122 {
5123   vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5124   struct ipa_func_body_info fbi;
5125   struct ipa_agg_replacement_value *aggval;
5126   int param_count;
5127   bool cfg_changed = false, something_changed = false;
5128 
5129   gcc_checking_assert (cfun);
5130   gcc_checking_assert (current_function_decl);
5131 
5132   if (dump_file)
5133     fprintf (dump_file, "Modification phase of node %s\n",
5134 	     node->dump_name ());
5135 
5136   ipcp_update_bits (node);
5137   ipcp_update_vr (node);
5138   aggval = ipa_get_agg_replacements_for_node (node);
5139   if (!aggval)
5140       return 0;
5141   param_count = count_formal_params (node->decl);
5142   if (param_count == 0)
5143     return 0;
5144   adjust_agg_replacement_values (node, aggval);
5145   if (dump_file)
5146     ipa_dump_agg_replacement_values (dump_file, aggval);
5147 
5148   fbi.node = node;
5149   fbi.info = NULL;
5150   fbi.bb_infos = vNULL;
5151   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5152   fbi.param_count = param_count;
5153   fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5154 
5155   vec_safe_grow_cleared (descriptors, param_count);
5156   ipa_populate_param_decls (node, *descriptors);
5157   calculate_dominance_info (CDI_DOMINATORS);
5158   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5159 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5160 
5161   int i;
5162   struct ipa_bb_info *bi;
5163   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5164     free_ipa_bb_info (bi);
5165   fbi.bb_infos.release ();
5166   free_dominance_info (CDI_DOMINATORS);
5167 
5168   ipcp_transformation *s = ipcp_transformation_sum->get (node);
5169   s->agg_values = NULL;
5170   s->bits = NULL;
5171   s->m_vr = NULL;
5172 
5173   vec_free (descriptors);
5174 
5175   if (!something_changed)
5176     return 0;
5177 
5178   if (cfg_changed)
5179     delete_unreachable_blocks_update_callgraph (node, false);
5180 
5181   return TODO_update_ssa_only_virtuals;
5182 }
5183 
5184 #include "gt-ipa-prop.h"
5185