1 /* Interprocedural analyses.
2    Copyright (C) 2005-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 #include "tree-cfgcleanup.h"
56 
57 /* Function summary where the parameter infos are actually stored. */
58 ipa_node_params_t *ipa_node_params_sum = NULL;
59 
60 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
61 
62 /* Edge summary for IPA-CP edge information.  */
63 ipa_edge_args_sum_t *ipa_edge_args_sum;
64 
65 /* Traits for a hash table for reusing already existing ipa_bits. */
66 
67 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
68 {
69   typedef ipa_bits *value_type;
70   typedef ipa_bits *compare_type;
71   static hashval_t
hashipa_bit_ggc_hash_traits72   hash (const ipa_bits *p)
73   {
74     hashval_t t = (hashval_t) p->value.to_shwi ();
75     return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
76   }
77   static bool
equalipa_bit_ggc_hash_traits78   equal (const ipa_bits *a, const ipa_bits *b)
79     {
80       return a->value == b->value && a->mask == b->mask;
81     }
82   static void
mark_emptyipa_bit_ggc_hash_traits83   mark_empty (ipa_bits *&p)
84     {
85       p = NULL;
86     }
87   static bool
is_emptyipa_bit_ggc_hash_traits88   is_empty (const ipa_bits *p)
89     {
90       return p == NULL;
91     }
92   static bool
is_deletedipa_bit_ggc_hash_traits93   is_deleted (const ipa_bits *p)
94     {
95       return p == reinterpret_cast<const ipa_bits *> (1);
96     }
97   static void
mark_deletedipa_bit_ggc_hash_traits98   mark_deleted (ipa_bits *&p)
99     {
100       p = reinterpret_cast<ipa_bits *> (1);
101     }
102 };
103 
104 /* Hash table for avoid repeated allocations of equal ipa_bits.  */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 
107 /* Traits for a hash table for reusing value_ranges used for IPA.  Note that
108    the equiv bitmap is not hashed and is expected to be NULL.  */
109 
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range_base *>
111 {
112   typedef value_range_base *value_type;
113   typedef value_range_base *compare_type;
114   static hashval_t
hashipa_vr_ggc_hash_traits115   hash (const value_range_base *p)
116     {
117       inchash::hash hstate (p->kind ());
118       inchash::add_expr (p->min (), hstate);
119       inchash::add_expr (p->max (), hstate);
120       return hstate.end ();
121     }
122   static bool
equalipa_vr_ggc_hash_traits123   equal (const value_range_base *a, const value_range_base *b)
124     {
125       return (a->equal_p (*b)
126 	      && types_compatible_p (a->type (), b->type ()));
127     }
128   static void
mark_emptyipa_vr_ggc_hash_traits129   mark_empty (value_range_base *&p)
130     {
131       p = NULL;
132     }
133   static bool
is_emptyipa_vr_ggc_hash_traits134   is_empty (const value_range_base *p)
135     {
136       return p == NULL;
137     }
138   static bool
is_deletedipa_vr_ggc_hash_traits139   is_deleted (const value_range_base *p)
140     {
141       return p == reinterpret_cast<const value_range_base *> (1);
142     }
143   static void
mark_deletedipa_vr_ggc_hash_traits144   mark_deleted (value_range_base *&p)
145     {
146       p = reinterpret_cast<value_range_base *> (1);
147     }
148 };
149 
150 /* Hash table for avoid repeated allocations of equal value_ranges.  */
151 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
152 
153 /* Holders of ipa cgraph hooks: */
154 static struct cgraph_node_hook_list *function_insertion_hook_holder;
155 
156 /* Description of a reference to an IPA constant.  */
157 struct ipa_cst_ref_desc
158 {
159   /* Edge that corresponds to the statement which took the reference.  */
160   struct cgraph_edge *cs;
161   /* Linked list of duplicates created when call graph edges are cloned.  */
162   struct ipa_cst_ref_desc *next_duplicate;
163   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
164      if out of control.  */
165   int refcount;
166 };
167 
168 /* Allocation pool for reference descriptions.  */
169 
170 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
171   ("IPA-PROP ref descriptions");
172 
173 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
174    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
175 
176 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)177 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
178 {
179   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
180 
181   if (!fs_opts)
182     return false;
183   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
184 }
185 
186 /* Return index of the formal whose tree is PTREE in function which corresponds
187    to INFO.  */
188 
189 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)190 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
191 			    tree ptree)
192 {
193   int i, count;
194 
195   count = vec_safe_length (descriptors);
196   for (i = 0; i < count; i++)
197     if ((*descriptors)[i].decl_or_type == ptree)
198       return i;
199 
200   return -1;
201 }
202 
203 /* Return index of the formal whose tree is PTREE in function which corresponds
204    to INFO.  */
205 
206 int
ipa_get_param_decl_index(struct ipa_node_params * info,tree ptree)207 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
208 {
209   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
210 }
211 
212 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
213    NODE.  */
214 
215 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)216 ipa_populate_param_decls (struct cgraph_node *node,
217 			  vec<ipa_param_descriptor, va_gc> &descriptors)
218 {
219   tree fndecl;
220   tree fnargs;
221   tree parm;
222   int param_num;
223 
224   fndecl = node->decl;
225   gcc_assert (gimple_has_body_p (fndecl));
226   fnargs = DECL_ARGUMENTS (fndecl);
227   param_num = 0;
228   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
229     {
230       descriptors[param_num].decl_or_type = parm;
231       descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
232 							     true);
233       param_num++;
234     }
235 }
236 
237 /* Return how many formal parameters FNDECL has.  */
238 
239 int
count_formal_params(tree fndecl)240 count_formal_params (tree fndecl)
241 {
242   tree parm;
243   int count = 0;
244   gcc_assert (gimple_has_body_p (fndecl));
245 
246   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
247     count++;
248 
249   return count;
250 }
251 
252 /* Return the declaration of Ith formal parameter of the function corresponding
253    to INFO.  Note there is no setter function as this array is built just once
254    using ipa_initialize_node_params. */
255 
256 void
ipa_dump_param(FILE * file,struct ipa_node_params * info,int i)257 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
258 {
259   fprintf (file, "param #%i", i);
260   if ((*info->descriptors)[i].decl_or_type)
261     {
262       fprintf (file, " ");
263       print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
264     }
265 }
266 
267 /* If necessary, allocate vector of parameter descriptors in info of NODE.
268    Return true if they were allocated, false if not.  */
269 
270 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)271 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
272 {
273   struct ipa_node_params *info = IPA_NODE_REF (node);
274 
275   if (!info->descriptors && param_count)
276     {
277       vec_safe_grow_cleared (info->descriptors, param_count);
278       return true;
279     }
280   else
281     return false;
282 }
283 
284 /* Initialize the ipa_node_params structure associated with NODE by counting
285    the function parameters, creating the descriptors and populating their
286    param_decls.  */
287 
288 void
ipa_initialize_node_params(struct cgraph_node * node)289 ipa_initialize_node_params (struct cgraph_node *node)
290 {
291   struct ipa_node_params *info = IPA_NODE_REF (node);
292 
293   if (!info->descriptors
294       && ipa_alloc_node_params (node, count_formal_params (node->decl)))
295     ipa_populate_param_decls (node, *info->descriptors);
296 }
297 
298 /* Print the jump functions associated with call graph edge CS to file F.  */
299 
300 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)301 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
302 {
303   int i, count;
304 
305   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
306   for (i = 0; i < count; i++)
307     {
308       struct ipa_jump_func *jump_func;
309       enum jump_func_type type;
310 
311       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
312       type = jump_func->type;
313 
314       fprintf (f, "       param %d: ", i);
315       if (type == IPA_JF_UNKNOWN)
316 	fprintf (f, "UNKNOWN\n");
317       else if (type == IPA_JF_CONST)
318 	{
319 	  tree val = jump_func->value.constant.value;
320 	  fprintf (f, "CONST: ");
321 	  print_generic_expr (f, val);
322 	  if (TREE_CODE (val) == ADDR_EXPR
323 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
324 	    {
325 	      fprintf (f, " -> ");
326 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
327 	    }
328 	  fprintf (f, "\n");
329 	}
330       else if (type == IPA_JF_PASS_THROUGH)
331 	{
332 	  fprintf (f, "PASS THROUGH: ");
333 	  fprintf (f, "%d, op %s",
334 		   jump_func->value.pass_through.formal_id,
335 		   get_tree_code_name(jump_func->value.pass_through.operation));
336 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
337 	    {
338 	      fprintf (f, " ");
339 	      print_generic_expr (f, jump_func->value.pass_through.operand);
340 	    }
341 	  if (jump_func->value.pass_through.agg_preserved)
342 	    fprintf (f, ", agg_preserved");
343 	  fprintf (f, "\n");
344 	}
345       else if (type == IPA_JF_ANCESTOR)
346 	{
347 	  fprintf (f, "ANCESTOR: ");
348 	  fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
349 		   jump_func->value.ancestor.formal_id,
350 		   jump_func->value.ancestor.offset);
351 	  if (jump_func->value.ancestor.agg_preserved)
352 	    fprintf (f, ", agg_preserved");
353 	  fprintf (f, "\n");
354 	}
355 
356       if (jump_func->agg.items)
357 	{
358 	  struct ipa_agg_jf_item *item;
359 	  int j;
360 
361 	  fprintf (f, "         Aggregate passed by %s:\n",
362 		   jump_func->agg.by_ref ? "reference" : "value");
363 	  FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
364 	    {
365 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
366 		       item->offset);
367 	      if (TYPE_P (item->value))
368 		fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
369 			 tree_to_uhwi (TYPE_SIZE (item->value)));
370 	      else
371 		{
372 		  fprintf (f, "cst: ");
373 		  print_generic_expr (f, item->value);
374 		}
375 	      fprintf (f, "\n");
376 	    }
377 	}
378 
379       struct ipa_polymorphic_call_context *ctx
380 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
381       if (ctx && !ctx->useless_p ())
382 	{
383 	  fprintf (f, "         Context: ");
384 	  ctx->dump (dump_file);
385 	}
386 
387       if (jump_func->bits)
388 	{
389 	  fprintf (f, "         value: ");
390 	  print_hex (jump_func->bits->value, f);
391 	  fprintf (f, ", mask: ");
392 	  print_hex (jump_func->bits->mask, f);
393 	  fprintf (f, "\n");
394 	}
395       else
396 	fprintf (f, "         Unknown bits\n");
397 
398       if (jump_func->m_vr)
399 	{
400 	  fprintf (f, "         VR  ");
401 	  fprintf (f, "%s[",
402 		   (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
403 	  print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
404 	  fprintf (f, ", ");
405 	  print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
406 	  fprintf (f, "]\n");
407 	}
408       else
409 	fprintf (f, "         Unknown VR\n");
410     }
411 }
412 
413 
414 /* Print the jump functions of all arguments on all call graph edges going from
415    NODE to file F.  */
416 
417 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)418 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
419 {
420   struct cgraph_edge *cs;
421 
422   fprintf (f, "  Jump functions of caller  %s:\n", node->dump_name ());
423   for (cs = node->callees; cs; cs = cs->next_callee)
424     {
425       if (!ipa_edge_args_info_available_for_edge_p (cs))
426 	continue;
427 
428       fprintf (f, "    callsite  %s -> %s : \n",
429 	       node->dump_name (),
430 	       cs->callee->dump_name ());
431       ipa_print_node_jump_functions_for_edge (f, cs);
432     }
433 
434   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
435     {
436       struct cgraph_indirect_call_info *ii;
437       if (!ipa_edge_args_info_available_for_edge_p (cs))
438 	continue;
439 
440       ii = cs->indirect_info;
441       if (ii->agg_contents)
442 	fprintf (f, "    indirect %s callsite, calling param %i, "
443 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
444 		 ii->member_ptr ? "member ptr" : "aggregate",
445 		 ii->param_index, ii->offset,
446 		 ii->by_ref ? "by reference" : "by_value");
447       else
448 	fprintf (f, "    indirect %s callsite, calling param %i, "
449 		 "offset " HOST_WIDE_INT_PRINT_DEC,
450 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
451 		 ii->offset);
452 
453       if (cs->call_stmt)
454 	{
455 	  fprintf (f, ", for stmt ");
456 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
457 	}
458       else
459 	fprintf (f, "\n");
460       if (ii->polymorphic)
461 	ii->context.dump (f);
462       ipa_print_node_jump_functions_for_edge (f, cs);
463     }
464 }
465 
466 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
467 
468 void
ipa_print_all_jump_functions(FILE * f)469 ipa_print_all_jump_functions (FILE *f)
470 {
471   struct cgraph_node *node;
472 
473   fprintf (f, "\nJump functions:\n");
474   FOR_EACH_FUNCTION (node)
475     {
476       ipa_print_node_jump_functions (f, node);
477     }
478 }
479 
480 /* Set jfunc to be a know-really nothing jump function.  */
481 
482 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)483 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
484 {
485   jfunc->type = IPA_JF_UNKNOWN;
486   jfunc->bits = NULL;
487   jfunc->m_vr = NULL;
488 }
489 
490 /* Set JFUNC to be a copy of another jmp (to be used by jump function
491    combination code).  The two functions will share their rdesc.  */
492 
493 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)494 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
495 		     struct ipa_jump_func *src)
496 
497 {
498   gcc_checking_assert (src->type == IPA_JF_CONST);
499   dst->type = IPA_JF_CONST;
500   dst->value.constant = src->value.constant;
501 }
502 
503 /* Set JFUNC to be a constant jmp function.  */
504 
505 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)506 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
507 		     struct cgraph_edge *cs)
508 {
509   jfunc->type = IPA_JF_CONST;
510   jfunc->value.constant.value = unshare_expr_without_location (constant);
511 
512   if (TREE_CODE (constant) == ADDR_EXPR
513       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
514     {
515       struct ipa_cst_ref_desc *rdesc;
516 
517       rdesc = ipa_refdesc_pool.allocate ();
518       rdesc->cs = cs;
519       rdesc->next_duplicate = NULL;
520       rdesc->refcount = 1;
521       jfunc->value.constant.rdesc = rdesc;
522     }
523   else
524     jfunc->value.constant.rdesc = NULL;
525 }
526 
527 /* Set JFUNC to be a simple pass-through jump function.  */
528 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)529 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
530 				bool agg_preserved)
531 {
532   jfunc->type = IPA_JF_PASS_THROUGH;
533   jfunc->value.pass_through.operand = NULL_TREE;
534   jfunc->value.pass_through.formal_id = formal_id;
535   jfunc->value.pass_through.operation = NOP_EXPR;
536   jfunc->value.pass_through.agg_preserved = agg_preserved;
537 }
538 
539 /* Set JFUNC to be an unary pass through jump function.  */
540 
541 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)542 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
543 			       enum tree_code operation)
544 {
545   jfunc->type = IPA_JF_PASS_THROUGH;
546   jfunc->value.pass_through.operand = NULL_TREE;
547   jfunc->value.pass_through.formal_id = formal_id;
548   jfunc->value.pass_through.operation = operation;
549   jfunc->value.pass_through.agg_preserved = false;
550 }
551 /* Set JFUNC to be an arithmetic pass through jump function.  */
552 
553 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)554 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
555 			       tree operand, enum tree_code operation)
556 {
557   jfunc->type = IPA_JF_PASS_THROUGH;
558   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
559   jfunc->value.pass_through.formal_id = formal_id;
560   jfunc->value.pass_through.operation = operation;
561   jfunc->value.pass_through.agg_preserved = false;
562 }
563 
564 /* Set JFUNC to be an ancestor jump function.  */
565 
566 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved)567 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
568 		     int formal_id, bool agg_preserved)
569 {
570   jfunc->type = IPA_JF_ANCESTOR;
571   jfunc->value.ancestor.formal_id = formal_id;
572   jfunc->value.ancestor.offset = offset;
573   jfunc->value.ancestor.agg_preserved = agg_preserved;
574 }
575 
576 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
577    of this function body.  */
578 
579 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)580 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
581 {
582   gcc_checking_assert (fbi);
583   return &fbi->bb_infos[bb->index];
584 }
585 
586 /* Structure to be passed in between detect_type_change and
587    check_stmt_for_type_change.  */
588 
589 struct prop_type_change_info
590 {
591   /* Offset into the object where there is the virtual method pointer we are
592      looking for.  */
593   HOST_WIDE_INT offset;
594   /* The declaration or SSA_NAME pointer of the base that we are checking for
595      type change.  */
596   tree object;
597   /* Set to true if dynamic type change has been detected.  */
598   bool type_maybe_changed;
599 };
600 
601 /* Return true if STMT can modify a virtual method table pointer.
602 
603    This function makes special assumptions about both constructors and
604    destructors which are all the functions that are allowed to alter the VMT
605    pointers.  It assumes that destructors begin with assignment into all VMT
606    pointers and that constructors essentially look in the following way:
607 
608    1) The very first thing they do is that they call constructors of ancestor
609    sub-objects that have them.
610 
611    2) Then VMT pointers of this and all its ancestors is set to new values
612    corresponding to the type corresponding to the constructor.
613 
614    3) Only afterwards, other stuff such as constructor of member sub-objects
615    and the code written by the user is run.  Only this may include calling
616    virtual functions, directly or indirectly.
617 
618    There is no way to call a constructor of an ancestor sub-object in any
619    other way.
620 
621    This means that we do not have to care whether constructors get the correct
622    type information because they will always change it (in fact, if we define
623    the type to be given by the VMT pointer, it is undefined).
624 
625    The most important fact to derive from the above is that if, for some
626    statement in the section 3, we try to detect whether the dynamic type has
627    changed, we can safely ignore all calls as we examine the function body
628    backwards until we reach statements in section 2 because these calls cannot
629    be ancestor constructors or destructors (if the input is not bogus) and so
630    do not change the dynamic type (this holds true only for automatically
631    allocated objects but at the moment we devirtualize only these).  We then
632    must detect that statements in section 2 change the dynamic type and can try
633    to derive the new type.  That is enough and we can stop, we will never see
634    the calls into constructors of sub-objects in this code.  Therefore we can
635    safely ignore all call statements that we traverse.
636   */
637 
638 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)639 stmt_may_be_vtbl_ptr_store (gimple *stmt)
640 {
641   if (is_gimple_call (stmt))
642     return false;
643   if (gimple_clobber_p (stmt))
644     return false;
645   else if (is_gimple_assign (stmt))
646     {
647       tree lhs = gimple_assign_lhs (stmt);
648 
649       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
650 	{
651 	  if (flag_strict_aliasing
652 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
653 	    return false;
654 
655 	  if (TREE_CODE (lhs) == COMPONENT_REF
656 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
657 	    return false;
658 	  /* In the future we might want to use get_ref_base_and_extent to find
659 	     if there is a field corresponding to the offset and if so, proceed
660 	     almost like if it was a component ref.  */
661 	}
662     }
663   return true;
664 }
665 
666 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
667    to check whether a particular statement may modify the virtual table
668    pointerIt stores its result into DATA, which points to a
669    prop_type_change_info structure.  */
670 
671 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)672 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
673 {
674   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
675   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
676 
677   if (stmt_may_be_vtbl_ptr_store (stmt))
678     {
679       tci->type_maybe_changed = true;
680       return true;
681     }
682   else
683     return false;
684 }
685 
686 /* See if ARG is PARAM_DECl describing instance passed by pointer
687    or reference in FUNCTION.  Return false if the dynamic type may change
688    in between beggining of the function until CALL is invoked.
689 
690    Generally functions are not allowed to change type of such instances,
691    but they call destructors.  We assume that methods cannot destroy the THIS
692    pointer.  Also as a special cases, constructor and destructors may change
693    type of the THIS pointer.  */
694 
695 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)696 param_type_may_change_p (tree function, tree arg, gimple *call)
697 {
698   /* Pure functions cannot do any changes on the dynamic type;
699      that require writting to memory.  */
700   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
701     return false;
702   /* We need to check if we are within inlined consturctor
703      or destructor (ideally we would have way to check that the
704      inline cdtor is actually working on ARG, but we don't have
705      easy tie on this, so punt on all non-pure cdtors.
706      We may also record the types of cdtors and once we know type
707      of the instance match them.
708 
709      Also code unification optimizations may merge calls from
710      different blocks making return values unreliable.  So
711      do nothing during late optimization.  */
712   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
713     return true;
714   if (TREE_CODE (arg) == SSA_NAME
715       && SSA_NAME_IS_DEFAULT_DEF (arg)
716       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
717     {
718       /* Normal (non-THIS) argument.  */
719       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
720 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
721 	  /* THIS pointer of an method - here we want to watch constructors
722 	     and destructors as those definitely may change the dynamic
723 	     type.  */
724 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
725 	      && !DECL_CXX_CONSTRUCTOR_P (function)
726 	      && !DECL_CXX_DESTRUCTOR_P (function)
727 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
728 	{
729 	  /* Walk the inline stack and watch out for ctors/dtors.  */
730 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
731 	       block = BLOCK_SUPERCONTEXT (block))
732 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
733 	      return true;
734 	  return false;
735 	}
736     }
737   return true;
738 }
739 
740 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
741    callsite CALL) by looking for assignments to its virtual table pointer.  If
742    it is, return true and fill in the jump function JFUNC with relevant type
743    information or set it to unknown.  ARG is the object itself (not a pointer
744    to it, unless dereferenced).  BASE is the base of the memory access as
745    returned by get_ref_base_and_extent, as is the offset.
746 
747    This is helper function for detect_type_change and detect_type_change_ssa
748    that does the heavy work which is usually unnecesary.  */
749 
750 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,struct ipa_jump_func * jfunc,HOST_WIDE_INT offset)751 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
752 				       tree base, tree comp_type, gcall *call,
753 				       struct ipa_jump_func *jfunc,
754 				       HOST_WIDE_INT offset)
755 {
756   struct prop_type_change_info tci;
757   ao_ref ao;
758 
759   gcc_checking_assert (DECL_P (arg)
760 		       || TREE_CODE (arg) == MEM_REF
761 		       || handled_component_p (arg));
762 
763   comp_type = TYPE_MAIN_VARIANT (comp_type);
764 
765   /* Const calls cannot call virtual methods through VMT and so type changes do
766      not matter.  */
767   if (!flag_devirtualize || !gimple_vuse (call)
768       /* Be sure expected_type is polymorphic.  */
769       || !comp_type
770       || TREE_CODE (comp_type) != RECORD_TYPE
771       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
772       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
773     return true;
774 
775   ao_ref_init (&ao, arg);
776   ao.base = base;
777   ao.offset = offset;
778   ao.size = POINTER_SIZE;
779   ao.max_size = ao.size;
780 
781   tci.offset = offset;
782   tci.object = get_base_address (arg);
783   tci.type_maybe_changed = false;
784 
785   int walked
786     = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
787 			  &tci, NULL, NULL, fbi->aa_walk_budget + 1);
788 
789   if (walked >= 0 && !tci.type_maybe_changed)
790     return false;
791 
792   ipa_set_jf_unknown (jfunc);
793   return true;
794 }
795 
796 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
797    If it is, return true and fill in the jump function JFUNC with relevant type
798    information or set it to unknown.  ARG is the object itself (not a pointer
799    to it, unless dereferenced).  BASE is the base of the memory access as
800    returned by get_ref_base_and_extent, as is the offset.  */
801 
802 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,struct ipa_jump_func * jfunc,HOST_WIDE_INT offset)803 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
804 		    tree comp_type, gcall *call, struct ipa_jump_func *jfunc,
805 		    HOST_WIDE_INT offset)
806 {
807   if (!flag_devirtualize)
808     return false;
809 
810   if (TREE_CODE	(base) == MEM_REF
811       && !param_type_may_change_p (current_function_decl,
812 				   TREE_OPERAND (base, 0),
813 				   call))
814     return false;
815   return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
816 						call, jfunc, offset);
817 }
818 
819 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
820    SSA name (its dereference will become the base and the offset is assumed to
821    be zero).  */
822 
823 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call,struct ipa_jump_func * jfunc)824 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
825 			gcall *call, struct ipa_jump_func *jfunc)
826 {
827   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
828   if (!flag_devirtualize
829       || !POINTER_TYPE_P (TREE_TYPE (arg)))
830     return false;
831 
832   if (!param_type_may_change_p (current_function_decl, arg, call))
833     return false;
834 
835   arg = build2 (MEM_REF, ptr_type_node, arg,
836 		build_int_cst (ptr_type_node, 0));
837 
838   return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
839 						call, jfunc, 0);
840 }
841 
842 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
843    boolean variable pointed to by DATA.  */
844 
845 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)846 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
847 		     void *data)
848 {
849   bool *b = (bool *) data;
850   *b = true;
851   return true;
852 }
853 
854 /* Find the nearest valid aa status for parameter specified by INDEX that
855    dominates BB.  */
856 
857 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)858 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
859 			   int index)
860 {
861   while (true)
862     {
863       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
864       if (!bb)
865 	return NULL;
866       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
867       if (!bi->param_aa_statuses.is_empty ()
868 	  && bi->param_aa_statuses[index].valid)
869 	return &bi->param_aa_statuses[index];
870     }
871 }
872 
873 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
874    structures and/or intialize the result with a dominating description as
875    necessary.  */
876 
877 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)878 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
879 			  int index)
880 {
881   gcc_checking_assert (fbi);
882   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
883   if (bi->param_aa_statuses.is_empty ())
884     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
885   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
886   if (!paa->valid)
887     {
888       gcc_checking_assert (!paa->parm_modified
889 			   && !paa->ref_modified
890 			   && !paa->pt_modified);
891       struct ipa_param_aa_status *dom_paa;
892       dom_paa = find_dominating_aa_status (fbi, bb, index);
893       if (dom_paa)
894 	*paa = *dom_paa;
895       else
896 	paa->valid = true;
897     }
898 
899   return paa;
900 }
901 
902 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
903    a value known not to be modified in this function before reaching the
904    statement STMT.  FBI holds information about the function we have so far
905    gathered but do not survive the summary building stage.  */
906 
907 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)908 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
909 			      gimple *stmt, tree parm_load)
910 {
911   struct ipa_param_aa_status *paa;
912   bool modified = false;
913   ao_ref refd;
914 
915   tree base = get_base_address (parm_load);
916   gcc_assert (TREE_CODE (base) == PARM_DECL);
917   if (TREE_READONLY (base))
918     return true;
919 
920   gcc_checking_assert (fbi);
921   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
922   if (paa->parm_modified)
923     return false;
924 
925   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
926   ao_ref_init (&refd, parm_load);
927   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
928 				   &modified, NULL, NULL,
929 				   fbi->aa_walk_budget + 1);
930   if (walked < 0)
931     {
932       modified = true;
933       if (fbi)
934 	fbi->aa_walk_budget = 0;
935     }
936   else if (fbi)
937     fbi->aa_walk_budget -= walked;
938   if (paa && modified)
939     paa->parm_modified = true;
940   return !modified;
941 }
942 
943 /* If STMT is an assignment that loads a value from an parameter declaration,
944    return the index of the parameter in ipa_node_params which has not been
945    modified.  Otherwise return -1.  */
946 
947 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)948 load_from_unmodified_param (struct ipa_func_body_info *fbi,
949 			    vec<ipa_param_descriptor, va_gc> *descriptors,
950 			    gimple *stmt)
951 {
952   int index;
953   tree op1;
954 
955   if (!gimple_assign_single_p (stmt))
956     return -1;
957 
958   op1 = gimple_assign_rhs1 (stmt);
959   if (TREE_CODE (op1) != PARM_DECL)
960     return -1;
961 
962   index = ipa_get_param_decl_index_1 (descriptors, op1);
963   if (index < 0
964       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
965     return -1;
966 
967   return index;
968 }
969 
970 /* Return true if memory reference REF (which must be a load through parameter
971    with INDEX) loads data that are known to be unmodified in this function
972    before reaching statement STMT.  */
973 
974 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)975 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
976 			   int index, gimple *stmt, tree ref)
977 {
978   struct ipa_param_aa_status *paa;
979   bool modified = false;
980   ao_ref refd;
981 
982   gcc_checking_assert (fbi);
983   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
984   if (paa->ref_modified)
985     return false;
986 
987   gcc_checking_assert (gimple_vuse (stmt));
988   ao_ref_init (&refd, ref);
989   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
990 				   &modified, NULL, NULL,
991 				   fbi->aa_walk_budget + 1);
992   if (walked < 0)
993     {
994       modified = true;
995       fbi->aa_walk_budget = 0;
996     }
997   else
998     fbi->aa_walk_budget -= walked;
999   if (modified)
1000     paa->ref_modified = true;
1001   return !modified;
1002 }
1003 
1004 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1005    is known to be unmodified in this function before reaching call statement
1006    CALL into which it is passed.  FBI describes the function body.  */
1007 
1008 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1009 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1010 			      gimple *call, tree parm)
1011 {
1012   bool modified = false;
1013   ao_ref refd;
1014 
1015   /* It's unnecessary to calculate anything about memory contnets for a const
1016      function because it is not goin to use it.  But do not cache the result
1017      either.  Also, no such calculations for non-pointers.  */
1018   if (!gimple_vuse (call)
1019       || !POINTER_TYPE_P (TREE_TYPE (parm)))
1020     return false;
1021 
1022   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1023 							      gimple_bb (call),
1024 							      index);
1025   if (paa->pt_modified)
1026     return false;
1027 
1028   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1029   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1030 				   &modified, NULL, NULL,
1031 				   fbi->aa_walk_budget + 1);
1032   if (walked < 0)
1033     {
1034       fbi->aa_walk_budget = 0;
1035       modified = true;
1036     }
1037   else
1038     fbi->aa_walk_budget -= walked;
1039   if (modified)
1040     paa->pt_modified = true;
1041   return !modified;
1042 }
1043 
1044 /* Return true if we can prove that OP is a memory reference loading
1045    data from an aggregate passed as a parameter.
1046 
1047    The function works in two modes.  If GUARANTEED_UNMODIFIED is NULL, it return
1048    false if it cannot prove that the value has not been modified before the
1049    load in STMT.  If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1050    if it cannot prove the value has not been modified, in that case it will
1051    store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1052 
1053    INFO and PARMS_AINFO describe parameters of the current function (but the
1054    latter can be NULL), STMT is the load statement.  If function returns true,
1055    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1056    within the aggregate and whether it is a load from a value passed by
1057    reference respectively.  */
1058 
1059 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,HOST_WIDE_INT * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1060 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1061 			vec<ipa_param_descriptor, va_gc> *descriptors,
1062 			gimple *stmt, tree op, int *index_p,
1063 			HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1064 			bool *by_ref_p, bool *guaranteed_unmodified)
1065 {
1066   int index;
1067   HOST_WIDE_INT size;
1068   bool reverse;
1069   tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1070 
1071   if (!base)
1072     return false;
1073 
1074   if (DECL_P (base))
1075     {
1076       int index = ipa_get_param_decl_index_1 (descriptors, base);
1077       if (index >= 0
1078 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1079 	{
1080 	  *index_p = index;
1081 	  *by_ref_p = false;
1082 	  if (size_p)
1083 	    *size_p = size;
1084 	  if (guaranteed_unmodified)
1085 	    *guaranteed_unmodified = true;
1086 	  return true;
1087 	}
1088       return false;
1089     }
1090 
1091   if (TREE_CODE (base) != MEM_REF
1092 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1093 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1094     return false;
1095 
1096   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1097     {
1098       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1099       index = ipa_get_param_decl_index_1 (descriptors, parm);
1100     }
1101   else
1102     {
1103       /* This branch catches situations where a pointer parameter is not a
1104 	 gimple register, for example:
1105 
1106 	 void hip7(S*) (struct S * p)
1107 	 {
1108 	 void (*<T2e4>) (struct S *) D.1867;
1109 	 struct S * p.1;
1110 
1111 	 <bb 2>:
1112 	 p.1_1 = p;
1113 	 D.1867_2 = p.1_1->f;
1114 	 D.1867_2 ();
1115 	 gdp = &p;
1116       */
1117 
1118       gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1119       index = load_from_unmodified_param (fbi, descriptors, def);
1120     }
1121 
1122   if (index >= 0)
1123     {
1124       bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1125       if (!data_preserved && !guaranteed_unmodified)
1126 	return false;
1127 
1128       *index_p = index;
1129       *by_ref_p = true;
1130       if (size_p)
1131 	*size_p = size;
1132       if (guaranteed_unmodified)
1133 	*guaranteed_unmodified = data_preserved;
1134       return true;
1135     }
1136   return false;
1137 }
1138 
1139 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1140    of an assignment statement STMT, try to determine whether we are actually
1141    handling any of the following cases and construct an appropriate jump
1142    function into JFUNC if so:
1143 
1144    1) The passed value is loaded from a formal parameter which is not a gimple
1145    register (most probably because it is addressable, the value has to be
1146    scalar) and we can guarantee the value has not changed.  This case can
1147    therefore be described by a simple pass-through jump function.  For example:
1148 
1149       foo (int a)
1150       {
1151         int a.0;
1152 
1153         a.0_2 = a;
1154         bar (a.0_2);
1155 
1156    2) The passed value can be described by a simple arithmetic pass-through
1157    jump function. E.g.
1158 
1159       foo (int a)
1160       {
1161         int D.2064;
1162 
1163         D.2064_4 = a.1(D) + 4;
1164         bar (D.2064_4);
1165 
1166    This case can also occur in combination of the previous one, e.g.:
1167 
1168       foo (int a, int z)
1169       {
1170         int a.0;
1171         int D.2064;
1172 
1173 	a.0_3 = a;
1174 	D.2064_4 = a.0_3 + 4;
1175 	foo (D.2064_4);
1176 
1177    3) The passed value is an address of an object within another one (which
1178    also passed by reference).  Such situations are described by an ancestor
1179    jump function and describe situations such as:
1180 
1181      B::foo() (struct B * const this)
1182      {
1183        struct A * D.1845;
1184 
1185        D.1845_2 = &this_1(D)->D.1748;
1186        A::bar (D.1845_2);
1187 
1188    INFO is the structure describing individual parameters access different
1189    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1190    only needed for intraprocedural analysis.  */
1191 
1192 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,struct ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1193 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1194 				  struct ipa_node_params *info,
1195 				  struct ipa_jump_func *jfunc,
1196 				  gcall *call, gimple *stmt, tree name,
1197 				  tree param_type)
1198 {
1199   HOST_WIDE_INT offset, size;
1200   tree op1, tc_ssa, base, ssa;
1201   bool reverse;
1202   int index;
1203 
1204   op1 = gimple_assign_rhs1 (stmt);
1205 
1206   if (TREE_CODE (op1) == SSA_NAME)
1207     {
1208       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1209 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1210       else
1211 	index = load_from_unmodified_param (fbi, info->descriptors,
1212 					    SSA_NAME_DEF_STMT (op1));
1213       tc_ssa = op1;
1214     }
1215   else
1216     {
1217       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1218       tc_ssa = gimple_assign_lhs (stmt);
1219     }
1220 
1221   if (index >= 0)
1222     {
1223       switch (gimple_assign_rhs_class (stmt))
1224 	{
1225 	case GIMPLE_BINARY_RHS:
1226 	  {
1227 	    tree op2 = gimple_assign_rhs2 (stmt);
1228 	    if (!is_gimple_ip_invariant (op2)
1229 		|| ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1230 		     != tcc_comparison)
1231 		    && !useless_type_conversion_p (TREE_TYPE (name),
1232 						   TREE_TYPE (op1))))
1233 	      return;
1234 
1235 	    ipa_set_jf_arith_pass_through (jfunc, index, op2,
1236 					   gimple_assign_rhs_code (stmt));
1237 	    break;
1238 	  }
1239 	case GIMPLE_SINGLE_RHS:
1240 	  {
1241 	    bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1242 						       tc_ssa);
1243 	    ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1244 	    break;
1245 	  }
1246 	case GIMPLE_UNARY_RHS:
1247 	  if (is_gimple_assign (stmt)
1248 	      && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1249 	      && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1250 	    ipa_set_jf_unary_pass_through (jfunc, index,
1251 					   gimple_assign_rhs_code (stmt));
1252 	default:;
1253 	}
1254       return;
1255     }
1256 
1257   if (TREE_CODE (op1) != ADDR_EXPR)
1258     return;
1259   op1 = TREE_OPERAND (op1, 0);
1260   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1261     return;
1262   base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1263   offset_int mem_offset;
1264   if (!base
1265       || TREE_CODE (base) != MEM_REF
1266       || !mem_ref_offset (base).is_constant (&mem_offset))
1267     return;
1268   offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1269   ssa = TREE_OPERAND (base, 0);
1270   if (TREE_CODE (ssa) != SSA_NAME
1271       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1272       || offset < 0)
1273     return;
1274 
1275   /* Dynamic types are changed in constructors and destructors.  */
1276   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1277   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1278     ipa_set_ancestor_jf (jfunc, offset,  index,
1279 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1280 }
1281 
1282 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1283    it looks like:
1284 
1285    iftmp.1_3 = &obj_2(D)->D.1762;
1286 
1287    The base of the MEM_REF must be a default definition SSA NAME of a
1288    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1289    whole MEM_REF expression is returned and the offset calculated from any
1290    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1291    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1292 
1293 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1294 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1295 {
1296   HOST_WIDE_INT size;
1297   tree expr, parm, obj;
1298   bool reverse;
1299 
1300   if (!gimple_assign_single_p (assign))
1301     return NULL_TREE;
1302   expr = gimple_assign_rhs1 (assign);
1303 
1304   if (TREE_CODE (expr) != ADDR_EXPR)
1305     return NULL_TREE;
1306   expr = TREE_OPERAND (expr, 0);
1307   obj = expr;
1308   expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1309 
1310   offset_int mem_offset;
1311   if (!expr
1312       || TREE_CODE (expr) != MEM_REF
1313       || !mem_ref_offset (expr).is_constant (&mem_offset))
1314     return NULL_TREE;
1315   parm = TREE_OPERAND (expr, 0);
1316   if (TREE_CODE (parm) != SSA_NAME
1317       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1318       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1319     return NULL_TREE;
1320 
1321   *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1322   *obj_p = obj;
1323   return expr;
1324 }
1325 
1326 
1327 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1328    statement PHI, try to find out whether NAME is in fact a
1329    multiple-inheritance typecast from a descendant into an ancestor of a formal
1330    parameter and thus can be described by an ancestor jump function and if so,
1331    write the appropriate function into JFUNC.
1332 
1333    Essentially we want to match the following pattern:
1334 
1335      if (obj_2(D) != 0B)
1336        goto <bb 3>;
1337      else
1338        goto <bb 4>;
1339 
1340    <bb 3>:
1341      iftmp.1_3 = &obj_2(D)->D.1762;
1342 
1343    <bb 4>:
1344      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1345      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1346      return D.1879_6;  */
1347 
1348 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,struct ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1349 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1350 				    struct ipa_node_params *info,
1351 				    struct ipa_jump_func *jfunc,
1352 				    gcall *call, gphi *phi)
1353 {
1354   HOST_WIDE_INT offset;
1355   gimple *assign, *cond;
1356   basic_block phi_bb, assign_bb, cond_bb;
1357   tree tmp, parm, expr, obj;
1358   int index, i;
1359 
1360   if (gimple_phi_num_args (phi) != 2)
1361     return;
1362 
1363   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1364     tmp = PHI_ARG_DEF (phi, 0);
1365   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1366     tmp = PHI_ARG_DEF (phi, 1);
1367   else
1368     return;
1369   if (TREE_CODE (tmp) != SSA_NAME
1370       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1371       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1372       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1373     return;
1374 
1375   assign = SSA_NAME_DEF_STMT (tmp);
1376   assign_bb = gimple_bb (assign);
1377   if (!single_pred_p (assign_bb))
1378     return;
1379   expr = get_ancestor_addr_info (assign, &obj, &offset);
1380   if (!expr)
1381     return;
1382   parm = TREE_OPERAND (expr, 0);
1383   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1384   if (index < 0)
1385     return;
1386 
1387   cond_bb = single_pred (assign_bb);
1388   cond = last_stmt (cond_bb);
1389   if (!cond
1390       || gimple_code (cond) != GIMPLE_COND
1391       || gimple_cond_code (cond) != NE_EXPR
1392       || gimple_cond_lhs (cond) != parm
1393       || !integer_zerop (gimple_cond_rhs (cond)))
1394     return;
1395 
1396   phi_bb = gimple_bb (phi);
1397   for (i = 0; i < 2; i++)
1398     {
1399       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1400       if (pred != assign_bb && pred != cond_bb)
1401 	return;
1402     }
1403 
1404   ipa_set_ancestor_jf (jfunc, offset, index,
1405 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1406 }
1407 
1408 /* Inspect the given TYPE and return true iff it has the same structure (the
1409    same number of fields of the same types) as a C++ member pointer.  If
1410    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1411    corresponding fields there.  */
1412 
1413 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1414 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1415 {
1416   tree fld;
1417 
1418   if (TREE_CODE (type) != RECORD_TYPE)
1419     return false;
1420 
1421   fld = TYPE_FIELDS (type);
1422   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1423       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1424       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1425     return false;
1426 
1427   if (method_ptr)
1428     *method_ptr = fld;
1429 
1430   fld = DECL_CHAIN (fld);
1431   if (!fld || INTEGRAL_TYPE_P (fld)
1432       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1433     return false;
1434   if (delta)
1435     *delta = fld;
1436 
1437   if (DECL_CHAIN (fld))
1438     return false;
1439 
1440   return true;
1441 }
1442 
1443 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1444    return the rhs of its defining statement.  Otherwise return RHS as it
1445    is.  */
1446 
1447 static inline tree
get_ssa_def_if_simple_copy(tree rhs)1448 get_ssa_def_if_simple_copy (tree rhs)
1449 {
1450   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1451     {
1452       gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1453 
1454       if (gimple_assign_single_p (def_stmt))
1455 	rhs = gimple_assign_rhs1 (def_stmt);
1456       else
1457 	break;
1458     }
1459   return rhs;
1460 }
1461 
1462 /* Simple linked list, describing known contents of an aggregate beforere
1463    call.  */
1464 
1465 struct ipa_known_agg_contents_list
1466 {
1467   /* Offset and size of the described part of the aggregate.  */
1468   HOST_WIDE_INT offset, size;
1469   /* Known constant value or NULL if the contents is known to be unknown.  */
1470   tree constant;
1471   /* Pointer to the next structure in the list.  */
1472   struct ipa_known_agg_contents_list *next;
1473 };
1474 
1475 /* Find the proper place in linked list of ipa_known_agg_contents_list
1476    structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1477    unless there is a partial overlap, in which case return NULL, or such
1478    element is already there, in which case set *ALREADY_THERE to true.  */
1479 
1480 static struct ipa_known_agg_contents_list **
get_place_in_agg_contents_list(struct ipa_known_agg_contents_list ** list,HOST_WIDE_INT lhs_offset,HOST_WIDE_INT lhs_size,bool * already_there)1481 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1482 				HOST_WIDE_INT lhs_offset,
1483 				HOST_WIDE_INT lhs_size,
1484 				bool *already_there)
1485 {
1486   struct ipa_known_agg_contents_list **p = list;
1487   while (*p && (*p)->offset < lhs_offset)
1488     {
1489       if ((*p)->offset + (*p)->size > lhs_offset)
1490 	return NULL;
1491       p = &(*p)->next;
1492     }
1493 
1494   if (*p && (*p)->offset < lhs_offset + lhs_size)
1495     {
1496       if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1497 	/* We already know this value is subsequently overwritten with
1498 	   something else.  */
1499 	*already_there = true;
1500       else
1501 	/* Otherwise this is a partial overlap which we cannot
1502 	   represent.  */
1503 	return NULL;
1504     }
1505   return p;
1506 }
1507 
1508 /* Build aggregate jump function from LIST, assuming there are exactly
1509    CONST_COUNT constant entries there and that th offset of the passed argument
1510    is ARG_OFFSET and store it into JFUNC.  */
1511 
1512 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int const_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1513 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1514 			       int const_count, HOST_WIDE_INT arg_offset,
1515 			       struct ipa_jump_func *jfunc)
1516 {
1517   vec_alloc (jfunc->agg.items, const_count);
1518   while (list)
1519     {
1520       if (list->constant)
1521 	{
1522 	  struct ipa_agg_jf_item item;
1523 	  item.offset = list->offset - arg_offset;
1524 	  gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1525 	  item.value = unshare_expr_without_location (list->constant);
1526 	  jfunc->agg.items->quick_push (item);
1527 	}
1528       list = list->next;
1529     }
1530 }
1531 
1532 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1533    in ARG is filled in with constant values.  ARG can either be an aggregate
1534    expression or a pointer to an aggregate.  ARG_TYPE is the type of the
1535    aggregate.  JFUNC is the jump function into which the constants are
1536    subsequently stored.  */
1537 
1538 static void
determine_locally_known_aggregate_parts(gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1539 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1540 					 tree arg_type,
1541 					 struct ipa_jump_func *jfunc)
1542 {
1543   struct ipa_known_agg_contents_list *list = NULL;
1544   int item_count = 0, const_count = 0;
1545   HOST_WIDE_INT arg_offset, arg_size;
1546   gimple_stmt_iterator gsi;
1547   tree arg_base;
1548   bool check_ref, by_ref;
1549   ao_ref r;
1550 
1551   if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1552     return;
1553 
1554   /* The function operates in three stages.  First, we prepare check_ref, r,
1555      arg_base and arg_offset based on what is actually passed as an actual
1556      argument.  */
1557 
1558   if (POINTER_TYPE_P (arg_type))
1559     {
1560       by_ref = true;
1561       if (TREE_CODE (arg) == SSA_NAME)
1562 	{
1563 	  tree type_size;
1564           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1565 	      || !POINTER_TYPE_P (TREE_TYPE (arg)))
1566             return;
1567 	  check_ref = true;
1568 	  arg_base = arg;
1569 	  arg_offset = 0;
1570 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1571 	  arg_size = tree_to_uhwi (type_size);
1572 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1573 	}
1574       else if (TREE_CODE (arg) == ADDR_EXPR)
1575 	{
1576 	  bool reverse;
1577 
1578 	  arg = TREE_OPERAND (arg, 0);
1579 	  arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1580 						  &arg_size, &reverse);
1581 	  if (!arg_base)
1582 	    return;
1583 	  if (DECL_P (arg_base))
1584 	    {
1585 	      check_ref = false;
1586 	      ao_ref_init (&r, arg_base);
1587 	    }
1588 	  else
1589 	    return;
1590 	}
1591       else
1592 	return;
1593     }
1594   else
1595     {
1596       bool reverse;
1597 
1598       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1599 
1600       by_ref = false;
1601       check_ref = false;
1602       arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1603 					      &arg_size, &reverse);
1604       if (!arg_base)
1605 	return;
1606 
1607       ao_ref_init (&r, arg);
1608     }
1609 
1610   /* Second stage walks back the BB, looks at individual statements and as long
1611      as it is confident of how the statements affect contents of the
1612      aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1613      describing it.  */
1614   gsi = gsi_for_stmt (call);
1615   gsi_prev (&gsi);
1616   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1617     {
1618       struct ipa_known_agg_contents_list *n, **p;
1619       gimple *stmt = gsi_stmt (gsi);
1620       HOST_WIDE_INT lhs_offset, lhs_size;
1621       tree lhs, rhs, lhs_base;
1622       bool reverse;
1623 
1624       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1625 	continue;
1626       if (!gimple_assign_single_p (stmt))
1627 	break;
1628 
1629       lhs = gimple_assign_lhs (stmt);
1630       rhs = gimple_assign_rhs1 (stmt);
1631       if (!is_gimple_reg_type (TREE_TYPE (rhs))
1632 	  || TREE_CODE (lhs) == BIT_FIELD_REF
1633 	  || contains_bitfld_component_ref_p (lhs))
1634 	break;
1635 
1636       lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1637 					      &lhs_size, &reverse);
1638       if (!lhs_base)
1639 	break;
1640 
1641       if (check_ref)
1642 	{
1643 	  if (TREE_CODE (lhs_base) != MEM_REF
1644 	      || TREE_OPERAND (lhs_base, 0) != arg_base
1645 	      || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1646 	    break;
1647 	}
1648       else if (lhs_base != arg_base)
1649 	{
1650 	  if (DECL_P (lhs_base))
1651 	    continue;
1652 	  else
1653 	    break;
1654 	}
1655 
1656       bool already_there = false;
1657       p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1658 					  &already_there);
1659       if (!p)
1660 	break;
1661       if (already_there)
1662 	continue;
1663 
1664       rhs = get_ssa_def_if_simple_copy (rhs);
1665       n = XALLOCA (struct ipa_known_agg_contents_list);
1666       n->size = lhs_size;
1667       n->offset = lhs_offset;
1668       if (is_gimple_ip_invariant (rhs))
1669 	{
1670 	  n->constant = rhs;
1671 	  const_count++;
1672 	}
1673       else
1674 	n->constant = NULL_TREE;
1675       n->next = *p;
1676       *p = n;
1677 
1678       item_count++;
1679       if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1680 	  || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1681 	break;
1682     }
1683 
1684   /* Third stage just goes over the list and creates an appropriate vector of
1685      ipa_agg_jf_item structures out of it, of sourse only if there are
1686      any known constants to begin with.  */
1687 
1688   if (const_count)
1689     {
1690       jfunc->agg.by_ref = by_ref;
1691       build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1692     }
1693 }
1694 
1695 /* Return the Ith param type of callee associated with call graph
1696    edge E.  */
1697 
1698 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)1699 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1700 {
1701   int n;
1702   tree type = (e->callee
1703 	       ? TREE_TYPE (e->callee->decl)
1704 	       : gimple_call_fntype (e->call_stmt));
1705   tree t = TYPE_ARG_TYPES (type);
1706 
1707   for (n = 0; n < i; n++)
1708     {
1709       if (!t)
1710         break;
1711       t = TREE_CHAIN (t);
1712     }
1713   if (t)
1714     return TREE_VALUE (t);
1715   if (!e->callee)
1716     return NULL;
1717   t = DECL_ARGUMENTS (e->callee->decl);
1718   for (n = 0; n < i; n++)
1719     {
1720       if (!t)
1721 	return NULL;
1722       t = TREE_CHAIN (t);
1723     }
1724   if (t)
1725     return TREE_TYPE (t);
1726   return NULL;
1727 }
1728 
1729 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1730    allocated structure or a previously existing one shared with other jump
1731    functions and/or transformation summaries.  */
1732 
1733 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)1734 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1735 {
1736   ipa_bits tmp;
1737   tmp.value = value;
1738   tmp.mask = mask;
1739 
1740   ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1741   if (*slot)
1742     return *slot;
1743 
1744   ipa_bits *res = ggc_alloc<ipa_bits> ();
1745   res->value = value;
1746   res->mask = mask;
1747   *slot = res;
1748 
1749   return res;
1750 }
1751 
1752 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK.  Use hash
1753    table in order to avoid creating multiple same ipa_bits structures.  */
1754 
1755 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)1756 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1757 		    const widest_int &mask)
1758 {
1759   jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1760 }
1761 
1762 /* Return a pointer to a value_range just like *TMP, but either find it in
1763    ipa_vr_hash_table or allocate it in GC memory.  TMP->equiv must be NULL.  */
1764 
1765 static value_range_base *
ipa_get_value_range(value_range_base * tmp)1766 ipa_get_value_range (value_range_base *tmp)
1767 {
1768   value_range_base **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1769   if (*slot)
1770     return *slot;
1771 
1772   value_range_base *vr = ggc_alloc<value_range_base> ();
1773   *vr = *tmp;
1774   *slot = vr;
1775 
1776   return vr;
1777 }
1778 
1779 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1780    equiv set. Use hash table in order to avoid creating multiple same copies of
1781    value_ranges.  */
1782 
1783 static value_range_base *
ipa_get_value_range(enum value_range_kind type,tree min,tree max)1784 ipa_get_value_range (enum value_range_kind type, tree min, tree max)
1785 {
1786   value_range_base tmp (type, min, max);
1787   return ipa_get_value_range (&tmp);
1788 }
1789 
1790 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1791    a NULL equiv bitmap.  Use hash table in order to avoid creating multiple
1792    same value_range structures.  */
1793 
1794 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)1795 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
1796 		  tree min, tree max)
1797 {
1798   jf->m_vr = ipa_get_value_range (type, min, max);
1799 }
1800 
1801 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1802    copy from ipa_vr_hash_table or allocate a new on in GC memory.  */
1803 
1804 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range_base * tmp)1805 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range_base *tmp)
1806 {
1807   jf->m_vr = ipa_get_value_range (tmp);
1808 }
1809 
1810 /* Compute jump function for all arguments of callsite CS and insert the
1811    information in the jump_functions array in the ipa_edge_args corresponding
1812    to this callsite.  */
1813 
1814 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)1815 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1816 				     struct cgraph_edge *cs)
1817 {
1818   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1819   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1820   gcall *call = cs->call_stmt;
1821   int n, arg_num = gimple_call_num_args (call);
1822   bool useful_context = false;
1823 
1824   if (arg_num == 0 || args->jump_functions)
1825     return;
1826   vec_safe_grow_cleared (args->jump_functions, arg_num);
1827   if (flag_devirtualize)
1828     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1829 
1830   if (gimple_call_internal_p (call))
1831     return;
1832   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1833     return;
1834 
1835   for (n = 0; n < arg_num; n++)
1836     {
1837       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1838       tree arg = gimple_call_arg (call, n);
1839       tree param_type = ipa_get_callee_param_type (cs, n);
1840       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1841 	{
1842 	  tree instance;
1843 	  struct ipa_polymorphic_call_context context (cs->caller->decl,
1844 						       arg, cs->call_stmt,
1845 						       &instance);
1846 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
1847 				    &fbi->aa_walk_budget);
1848 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
1849 	  if (!context.useless_p ())
1850 	    useful_context = true;
1851 	}
1852 
1853       if (POINTER_TYPE_P (TREE_TYPE (arg)))
1854 	{
1855 	  bool addr_nonzero = false;
1856 	  bool strict_overflow = false;
1857 
1858 	  if (TREE_CODE (arg) == SSA_NAME
1859 	      && param_type
1860 	      && get_ptr_nonnull (arg))
1861 	    addr_nonzero = true;
1862 	  else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1863 	    addr_nonzero = true;
1864 
1865 	  if (addr_nonzero)
1866 	    {
1867 	      tree z = build_int_cst (TREE_TYPE (arg), 0);
1868 	      ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1869 	    }
1870 	  else
1871 	    gcc_assert (!jfunc->m_vr);
1872 	}
1873       else
1874 	{
1875 	  wide_int min, max;
1876 	  value_range_kind type;
1877 	  if (TREE_CODE (arg) == SSA_NAME
1878 	      && param_type
1879 	      && (type = get_range_info (arg, &min, &max))
1880 	      && (type == VR_RANGE || type == VR_ANTI_RANGE))
1881 	    {
1882 	      value_range_base resvr;
1883 	      value_range_base tmpvr (type,
1884 				      wide_int_to_tree (TREE_TYPE (arg), min),
1885 				      wide_int_to_tree (TREE_TYPE (arg), max));
1886 	      extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1887 					     &tmpvr, TREE_TYPE (arg));
1888 	      if (!resvr.undefined_p () && !resvr.varying_p ())
1889 		ipa_set_jfunc_vr (jfunc, &resvr);
1890 	      else
1891 		gcc_assert (!jfunc->m_vr);
1892 	    }
1893 	  else
1894 	    gcc_assert (!jfunc->m_vr);
1895 	}
1896 
1897       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1898 	  && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1899 	{
1900 	  if (TREE_CODE (arg) == SSA_NAME)
1901 	    ipa_set_jfunc_bits (jfunc, 0,
1902 				widest_int::from (get_nonzero_bits (arg),
1903 						  TYPE_SIGN (TREE_TYPE (arg))));
1904 	  else
1905 	    ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1906 	}
1907       else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1908 	{
1909 	  unsigned HOST_WIDE_INT bitpos;
1910 	  unsigned align;
1911 
1912 	  get_pointer_alignment_1 (arg, &align, &bitpos);
1913 	  widest_int mask = wi::bit_and_not
1914 	    (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1915 	     align / BITS_PER_UNIT - 1);
1916 	  widest_int value = bitpos / BITS_PER_UNIT;
1917 	  ipa_set_jfunc_bits (jfunc, value, mask);
1918 	}
1919       else
1920 	gcc_assert (!jfunc->bits);
1921 
1922       if (is_gimple_ip_invariant (arg)
1923 	  || (VAR_P (arg)
1924 	      && is_global_var (arg)
1925 	      && TREE_READONLY (arg)))
1926 	ipa_set_jf_constant (jfunc, arg, cs);
1927       else if (!is_gimple_reg_type (TREE_TYPE (arg))
1928 	       && TREE_CODE (arg) == PARM_DECL)
1929 	{
1930 	  int index = ipa_get_param_decl_index (info, arg);
1931 
1932 	  gcc_assert (index >=0);
1933 	  /* Aggregate passed by value, check for pass-through, otherwise we
1934 	     will attempt to fill in aggregate contents later in this
1935 	     for cycle.  */
1936 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1937 	    {
1938 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
1939 	      continue;
1940 	    }
1941 	}
1942       else if (TREE_CODE (arg) == SSA_NAME)
1943 	{
1944 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
1945 	    {
1946 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1947 	      if (index >= 0)
1948 		{
1949 		  bool agg_p;
1950 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1951 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1952 		}
1953 	    }
1954 	  else
1955 	    {
1956 	      gimple *stmt = SSA_NAME_DEF_STMT (arg);
1957 	      if (is_gimple_assign (stmt))
1958 		compute_complex_assign_jump_func (fbi, info, jfunc,
1959 						  call, stmt, arg, param_type);
1960 	      else if (gimple_code (stmt) == GIMPLE_PHI)
1961 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
1962 						    call,
1963 						    as_a <gphi *> (stmt));
1964 	    }
1965 	}
1966 
1967       /* If ARG is pointer, we cannot use its type to determine the type of aggregate
1968 	 passed (because type conversions are ignored in gimple).  Usually we can
1969 	 safely get type from function declaration, but in case of K&R prototypes or
1970 	 variadic functions we can try our luck with type of the pointer passed.
1971 	 TODO: Since we look for actual initialization of the memory object, we may better
1972 	 work out the type based on the memory stores we find.  */
1973       if (!param_type)
1974 	param_type = TREE_TYPE (arg);
1975 
1976       if ((jfunc->type != IPA_JF_PASS_THROUGH
1977 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1978 	  && (jfunc->type != IPA_JF_ANCESTOR
1979 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1980 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1981 	      || POINTER_TYPE_P (param_type)))
1982 	determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1983     }
1984   if (!useful_context)
1985     vec_free (args->polymorphic_call_contexts);
1986 }
1987 
1988 /* Compute jump functions for all edges - both direct and indirect - outgoing
1989    from BB.  */
1990 
1991 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)1992 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
1993 {
1994   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1995   int i;
1996   struct cgraph_edge *cs;
1997 
1998   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
1999     {
2000       struct cgraph_node *callee = cs->callee;
2001 
2002       if (callee)
2003 	{
2004 	  callee->ultimate_alias_target ();
2005 	  /* We do not need to bother analyzing calls to unknown functions
2006 	     unless they may become known during lto/whopr.  */
2007 	  if (!callee->definition && !flag_lto)
2008 	    continue;
2009 	}
2010       ipa_compute_jump_functions_for_edge (fbi, cs);
2011     }
2012 }
2013 
2014 /* If STMT looks like a statement loading a value from a member pointer formal
2015    parameter, return that parameter and store the offset of the field to
2016    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
2017    might be clobbered).  If USE_DELTA, then we look for a use of the delta
2018    field rather than the pfn.  */
2019 
2020 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2021 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2022 				    HOST_WIDE_INT *offset_p)
2023 {
2024   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2025 
2026   if (!gimple_assign_single_p (stmt))
2027     return NULL_TREE;
2028 
2029   rhs = gimple_assign_rhs1 (stmt);
2030   if (TREE_CODE (rhs) == COMPONENT_REF)
2031     {
2032       ref_field = TREE_OPERAND (rhs, 1);
2033       rhs = TREE_OPERAND (rhs, 0);
2034     }
2035   else
2036     ref_field = NULL_TREE;
2037   if (TREE_CODE (rhs) != MEM_REF)
2038     return NULL_TREE;
2039   rec = TREE_OPERAND (rhs, 0);
2040   if (TREE_CODE (rec) != ADDR_EXPR)
2041     return NULL_TREE;
2042   rec = TREE_OPERAND (rec, 0);
2043   if (TREE_CODE (rec) != PARM_DECL
2044       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2045     return NULL_TREE;
2046   ref_offset = TREE_OPERAND (rhs, 1);
2047 
2048   if (use_delta)
2049     fld = delta_field;
2050   else
2051     fld = ptr_field;
2052   if (offset_p)
2053     *offset_p = int_bit_position (fld);
2054 
2055   if (ref_field)
2056     {
2057       if (integer_nonzerop (ref_offset))
2058 	return NULL_TREE;
2059       return ref_field == fld ? rec : NULL_TREE;
2060     }
2061   else
2062     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2063       : NULL_TREE;
2064 }
2065 
2066 /* Returns true iff T is an SSA_NAME defined by a statement.  */
2067 
2068 static bool
ipa_is_ssa_with_stmt_def(tree t)2069 ipa_is_ssa_with_stmt_def (tree t)
2070 {
2071   if (TREE_CODE (t) == SSA_NAME
2072       && !SSA_NAME_IS_DEFAULT_DEF (t))
2073     return true;
2074   else
2075     return false;
2076 }
2077 
2078 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2079    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
2080    indirect call graph edge.  */
2081 
2082 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt)2083 ipa_note_param_call (struct cgraph_node *node, int param_index,
2084 		     gcall *stmt)
2085 {
2086   struct cgraph_edge *cs;
2087 
2088   cs = node->get_edge (stmt);
2089   cs->indirect_info->param_index = param_index;
2090   cs->indirect_info->agg_contents = 0;
2091   cs->indirect_info->member_ptr = 0;
2092   cs->indirect_info->guaranteed_unmodified = 0;
2093   return cs;
2094 }
2095 
2096 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2097    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
2098    intermediate information about each formal parameter.  Currently it checks
2099    whether the call calls a pointer that is a formal parameter and if so, the
2100    parameter is marked with the called flag and an indirect call graph edge
2101    describing the call is created.  This is very simple for ordinary pointers
2102    represented in SSA but not-so-nice when it comes to member pointers.  The
2103    ugly part of this function does nothing more than trying to match the
2104    pattern of such a call.  An example of such a pattern is the gimple dump
2105    below, the call is on the last line:
2106 
2107      <bb 2>:
2108        f$__delta_5 = f.__delta;
2109        f$__pfn_24 = f.__pfn;
2110 
2111    or
2112      <bb 2>:
2113        f$__delta_5 = MEM[(struct  *)&f];
2114        f$__pfn_24 = MEM[(struct  *)&f + 4B];
2115 
2116    and a few lines below:
2117 
2118      <bb 5>
2119        D.2496_3 = (int) f$__pfn_24;
2120        D.2497_4 = D.2496_3 & 1;
2121        if (D.2497_4 != 0)
2122          goto <bb 3>;
2123        else
2124          goto <bb 4>;
2125 
2126      <bb 6>:
2127        D.2500_7 = (unsigned int) f$__delta_5;
2128        D.2501_8 = &S + D.2500_7;
2129        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2130        D.2503_10 = *D.2502_9;
2131        D.2504_12 = f$__pfn_24 + -1;
2132        D.2505_13 = (unsigned int) D.2504_12;
2133        D.2506_14 = D.2503_10 + D.2505_13;
2134        D.2507_15 = *D.2506_14;
2135        iftmp.11_16 = (String:: *) D.2507_15;
2136 
2137      <bb 7>:
2138        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2139        D.2500_19 = (unsigned int) f$__delta_5;
2140        D.2508_20 = &S + D.2500_19;
2141        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2142 
2143    Such patterns are results of simple calls to a member pointer:
2144 
2145      int doprinting (int (MyString::* f)(int) const)
2146      {
2147        MyString S ("somestring");
2148 
2149        return (S.*f)(4);
2150      }
2151 
2152    Moreover, the function also looks for called pointers loaded from aggregates
2153    passed by value or reference.  */
2154 
2155 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2156 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2157 				tree target)
2158 {
2159   struct ipa_node_params *info = fbi->info;
2160   HOST_WIDE_INT offset;
2161   bool by_ref;
2162 
2163   if (SSA_NAME_IS_DEFAULT_DEF (target))
2164     {
2165       tree var = SSA_NAME_VAR (target);
2166       int index = ipa_get_param_decl_index (info, var);
2167       if (index >= 0)
2168 	ipa_note_param_call (fbi->node, index, call);
2169       return;
2170     }
2171 
2172   int index;
2173   gimple *def = SSA_NAME_DEF_STMT (target);
2174   bool guaranteed_unmodified;
2175   if (gimple_assign_single_p (def)
2176       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2177 				 gimple_assign_rhs1 (def), &index, &offset,
2178 				 NULL, &by_ref, &guaranteed_unmodified))
2179     {
2180       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2181       cs->indirect_info->offset = offset;
2182       cs->indirect_info->agg_contents = 1;
2183       cs->indirect_info->by_ref = by_ref;
2184       cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2185       return;
2186     }
2187 
2188   /* Now we need to try to match the complex pattern of calling a member
2189      pointer. */
2190   if (gimple_code (def) != GIMPLE_PHI
2191       || gimple_phi_num_args (def) != 2
2192       || !POINTER_TYPE_P (TREE_TYPE (target))
2193       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2194     return;
2195 
2196   /* First, we need to check whether one of these is a load from a member
2197      pointer that is a parameter to this function. */
2198   tree n1 = PHI_ARG_DEF (def, 0);
2199   tree n2 = PHI_ARG_DEF (def, 1);
2200   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2201     return;
2202   gimple *d1 = SSA_NAME_DEF_STMT (n1);
2203   gimple *d2 = SSA_NAME_DEF_STMT (n2);
2204 
2205   tree rec;
2206   basic_block bb, virt_bb;
2207   basic_block join = gimple_bb (def);
2208   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2209     {
2210       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2211 	return;
2212 
2213       bb = EDGE_PRED (join, 0)->src;
2214       virt_bb = gimple_bb (d2);
2215     }
2216   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2217     {
2218       bb = EDGE_PRED (join, 1)->src;
2219       virt_bb = gimple_bb (d1);
2220     }
2221   else
2222     return;
2223 
2224   /* Second, we need to check that the basic blocks are laid out in the way
2225      corresponding to the pattern. */
2226 
2227   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2228       || single_pred (virt_bb) != bb
2229       || single_succ (virt_bb) != join)
2230     return;
2231 
2232   /* Third, let's see that the branching is done depending on the least
2233      significant bit of the pfn. */
2234 
2235   gimple *branch = last_stmt (bb);
2236   if (!branch || gimple_code (branch) != GIMPLE_COND)
2237     return;
2238 
2239   if ((gimple_cond_code (branch) != NE_EXPR
2240        && gimple_cond_code (branch) != EQ_EXPR)
2241       || !integer_zerop (gimple_cond_rhs (branch)))
2242     return;
2243 
2244   tree cond = gimple_cond_lhs (branch);
2245   if (!ipa_is_ssa_with_stmt_def (cond))
2246     return;
2247 
2248   def = SSA_NAME_DEF_STMT (cond);
2249   if (!is_gimple_assign (def)
2250       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2251       || !integer_onep (gimple_assign_rhs2 (def)))
2252     return;
2253 
2254   cond = gimple_assign_rhs1 (def);
2255   if (!ipa_is_ssa_with_stmt_def (cond))
2256     return;
2257 
2258   def = SSA_NAME_DEF_STMT (cond);
2259 
2260   if (is_gimple_assign (def)
2261       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2262     {
2263       cond = gimple_assign_rhs1 (def);
2264       if (!ipa_is_ssa_with_stmt_def (cond))
2265 	return;
2266       def = SSA_NAME_DEF_STMT (cond);
2267     }
2268 
2269   tree rec2;
2270   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2271 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2272 					      == ptrmemfunc_vbit_in_delta),
2273 					     NULL);
2274   if (rec != rec2)
2275     return;
2276 
2277   index = ipa_get_param_decl_index (info, rec);
2278   if (index >= 0
2279       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2280     {
2281       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2282       cs->indirect_info->offset = offset;
2283       cs->indirect_info->agg_contents = 1;
2284       cs->indirect_info->member_ptr = 1;
2285       cs->indirect_info->guaranteed_unmodified = 1;
2286     }
2287 
2288   return;
2289 }
2290 
2291 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2292    object referenced in the expression is a formal parameter of the caller
2293    FBI->node (described by FBI->info), create a call note for the
2294    statement.  */
2295 
2296 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2297 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2298 			       gcall *call, tree target)
2299 {
2300   tree obj = OBJ_TYPE_REF_OBJECT (target);
2301   int index;
2302   HOST_WIDE_INT anc_offset;
2303 
2304   if (!flag_devirtualize)
2305     return;
2306 
2307   if (TREE_CODE (obj) != SSA_NAME)
2308     return;
2309 
2310   struct ipa_node_params *info = fbi->info;
2311   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2312     {
2313       struct ipa_jump_func jfunc;
2314       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2315 	return;
2316 
2317       anc_offset = 0;
2318       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2319       gcc_assert (index >= 0);
2320       if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2321 				  call, &jfunc))
2322 	return;
2323     }
2324   else
2325     {
2326       struct ipa_jump_func jfunc;
2327       gimple *stmt = SSA_NAME_DEF_STMT (obj);
2328       tree expr;
2329 
2330       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2331       if (!expr)
2332 	return;
2333       index = ipa_get_param_decl_index (info,
2334 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2335       gcc_assert (index >= 0);
2336       if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2337 			      call, &jfunc, anc_offset))
2338 	return;
2339     }
2340 
2341   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2342   struct cgraph_indirect_call_info *ii = cs->indirect_info;
2343   ii->offset = anc_offset;
2344   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2345   ii->otr_type = obj_type_ref_class (target);
2346   ii->polymorphic = 1;
2347 }
2348 
2349 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2350    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2351    containing intermediate information about each formal parameter.  */
2352 
2353 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2354 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2355 {
2356   tree target = gimple_call_fn (call);
2357 
2358   if (!target
2359       || (TREE_CODE (target) != SSA_NAME
2360           && !virtual_method_call_p (target)))
2361     return;
2362 
2363   struct cgraph_edge *cs = fbi->node->get_edge (call);
2364   /* If we previously turned the call into a direct call, there is
2365      no need to analyze.  */
2366   if (cs && !cs->indirect_unknown_callee)
2367     return;
2368 
2369   if (cs->indirect_info->polymorphic && flag_devirtualize)
2370     {
2371       tree instance;
2372       tree target = gimple_call_fn (call);
2373       ipa_polymorphic_call_context context (current_function_decl,
2374 					    target, call, &instance);
2375 
2376       gcc_checking_assert (cs->indirect_info->otr_type
2377 			   == obj_type_ref_class (target));
2378       gcc_checking_assert (cs->indirect_info->otr_token
2379 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2380 
2381       cs->indirect_info->vptr_changed
2382 	= !context.get_dynamic_type (instance,
2383 				     OBJ_TYPE_REF_OBJECT (target),
2384 				     obj_type_ref_class (target), call,
2385 				     &fbi->aa_walk_budget);
2386       cs->indirect_info->context = context;
2387     }
2388 
2389   if (TREE_CODE (target) == SSA_NAME)
2390     ipa_analyze_indirect_call_uses (fbi, call, target);
2391   else if (virtual_method_call_p (target))
2392     ipa_analyze_virtual_call_uses (fbi, call, target);
2393 }
2394 
2395 
2396 /* Analyze the call statement STMT with respect to formal parameters (described
2397    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2398    formal parameters are called.  */
2399 
2400 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2401 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2402 {
2403   if (is_gimple_call (stmt))
2404     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2405 }
2406 
2407 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2408    If OP is a parameter declaration, mark it as used in the info structure
2409    passed in DATA.  */
2410 
2411 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2412 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2413 {
2414   struct ipa_node_params *info = (struct ipa_node_params *) data;
2415 
2416   op = get_base_address (op);
2417   if (op
2418       && TREE_CODE (op) == PARM_DECL)
2419     {
2420       int index = ipa_get_param_decl_index (info, op);
2421       gcc_assert (index >= 0);
2422       ipa_set_param_used (info, index, true);
2423     }
2424 
2425   return false;
2426 }
2427 
2428 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2429    the findings in various structures of the associated ipa_node_params
2430    structure, such as parameter flags, notes etc.  FBI holds various data about
2431    the function being analyzed.  */
2432 
2433 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2434 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2435 {
2436   gimple_stmt_iterator gsi;
2437   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2438     {
2439       gimple *stmt = gsi_stmt (gsi);
2440 
2441       if (is_gimple_debug (stmt))
2442 	continue;
2443 
2444       ipa_analyze_stmt_uses (fbi, stmt);
2445       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2446 				     visit_ref_for_mod_analysis,
2447 				     visit_ref_for_mod_analysis,
2448 				     visit_ref_for_mod_analysis);
2449     }
2450   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2451     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2452 				   visit_ref_for_mod_analysis,
2453 				   visit_ref_for_mod_analysis,
2454 				   visit_ref_for_mod_analysis);
2455 }
2456 
2457 /* Calculate controlled uses of parameters of NODE.  */
2458 
2459 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2460 ipa_analyze_controlled_uses (struct cgraph_node *node)
2461 {
2462   struct ipa_node_params *info = IPA_NODE_REF (node);
2463 
2464   for (int i = 0; i < ipa_get_param_count (info); i++)
2465     {
2466       tree parm = ipa_get_param (info, i);
2467       int controlled_uses = 0;
2468 
2469       /* For SSA regs see if parameter is used.  For non-SSA we compute
2470 	 the flag during modification analysis.  */
2471       if (is_gimple_reg (parm))
2472 	{
2473 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2474 				       parm);
2475 	  if (ddef && !has_zero_uses (ddef))
2476 	    {
2477 	      imm_use_iterator imm_iter;
2478 	      use_operand_p use_p;
2479 
2480 	      ipa_set_param_used (info, i, true);
2481 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2482 		if (!is_gimple_call (USE_STMT (use_p)))
2483 		  {
2484 		    if (!is_gimple_debug (USE_STMT (use_p)))
2485 		      {
2486 			controlled_uses = IPA_UNDESCRIBED_USE;
2487 			break;
2488 		      }
2489 		  }
2490 		else
2491 		  controlled_uses++;
2492 	    }
2493 	  else
2494 	    controlled_uses = 0;
2495 	}
2496       else
2497 	controlled_uses = IPA_UNDESCRIBED_USE;
2498       ipa_set_controlled_uses (info, i, controlled_uses);
2499     }
2500 }
2501 
2502 /* Free stuff in BI.  */
2503 
2504 static void
free_ipa_bb_info(struct ipa_bb_info * bi)2505 free_ipa_bb_info (struct ipa_bb_info *bi)
2506 {
2507   bi->cg_edges.release ();
2508   bi->param_aa_statuses.release ();
2509 }
2510 
2511 /* Dominator walker driving the analysis.  */
2512 
2513 class analysis_dom_walker : public dom_walker
2514 {
2515 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)2516   analysis_dom_walker (struct ipa_func_body_info *fbi)
2517     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2518 
2519   virtual edge before_dom_children (basic_block);
2520 
2521 private:
2522   struct ipa_func_body_info *m_fbi;
2523 };
2524 
2525 edge
before_dom_children(basic_block bb)2526 analysis_dom_walker::before_dom_children (basic_block bb)
2527 {
2528   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2529   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2530   return NULL;
2531 }
2532 
2533 /* Release body info FBI.  */
2534 
2535 void
ipa_release_body_info(struct ipa_func_body_info * fbi)2536 ipa_release_body_info (struct ipa_func_body_info *fbi)
2537 {
2538   int i;
2539   struct ipa_bb_info *bi;
2540 
2541   FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2542     free_ipa_bb_info (bi);
2543   fbi->bb_infos.release ();
2544 }
2545 
2546 /* Initialize the array describing properties of formal parameters
2547    of NODE, analyze their uses and compute jump functions associated
2548    with actual arguments of calls from within NODE.  */
2549 
2550 void
ipa_analyze_node(struct cgraph_node * node)2551 ipa_analyze_node (struct cgraph_node *node)
2552 {
2553   struct ipa_func_body_info fbi;
2554   struct ipa_node_params *info;
2555 
2556   ipa_check_create_node_params ();
2557   ipa_check_create_edge_args ();
2558   info = IPA_NODE_REF (node);
2559 
2560   if (info->analysis_done)
2561     return;
2562   info->analysis_done = 1;
2563 
2564   if (ipa_func_spec_opts_forbid_analysis_p (node))
2565     {
2566       for (int i = 0; i < ipa_get_param_count (info); i++)
2567 	{
2568 	  ipa_set_param_used (info, i, true);
2569 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2570 	}
2571       return;
2572     }
2573 
2574   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2575   push_cfun (func);
2576   calculate_dominance_info (CDI_DOMINATORS);
2577   ipa_initialize_node_params (node);
2578   ipa_analyze_controlled_uses (node);
2579 
2580   fbi.node = node;
2581   fbi.info = IPA_NODE_REF (node);
2582   fbi.bb_infos = vNULL;
2583   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2584   fbi.param_count = ipa_get_param_count (info);
2585   fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
2586 
2587   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2588     {
2589       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2590       bi->cg_edges.safe_push (cs);
2591     }
2592 
2593   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2594     {
2595       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2596       bi->cg_edges.safe_push (cs);
2597     }
2598 
2599   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2600 
2601   ipa_release_body_info (&fbi);
2602   free_dominance_info (CDI_DOMINATORS);
2603   pop_cfun ();
2604 }
2605 
2606 /* Update the jump functions associated with call graph edge E when the call
2607    graph edge CS is being inlined, assuming that E->caller is already (possibly
2608    indirectly) inlined into CS->callee and that E has not been inlined.  */
2609 
2610 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)2611 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2612 				      struct cgraph_edge *e)
2613 {
2614   struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2615   struct ipa_edge_args *args = IPA_EDGE_REF (e);
2616   int count = ipa_get_cs_argument_count (args);
2617   int i;
2618 
2619   for (i = 0; i < count; i++)
2620     {
2621       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2622       struct ipa_polymorphic_call_context *dst_ctx
2623 	= ipa_get_ith_polymorhic_call_context (args, i);
2624 
2625       if (dst->type == IPA_JF_ANCESTOR)
2626 	{
2627 	  struct ipa_jump_func *src;
2628 	  int dst_fid = dst->value.ancestor.formal_id;
2629 	  struct ipa_polymorphic_call_context *src_ctx
2630 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2631 
2632 	  /* Variable number of arguments can cause havoc if we try to access
2633 	     one that does not exist in the inlined edge.  So make sure we
2634 	     don't.  */
2635 	  if (dst_fid >= ipa_get_cs_argument_count (top))
2636 	    {
2637 	      ipa_set_jf_unknown (dst);
2638 	      continue;
2639 	    }
2640 
2641 	  src = ipa_get_ith_jump_func (top, dst_fid);
2642 
2643 	  if (src_ctx && !src_ctx->useless_p ())
2644 	    {
2645 	      struct ipa_polymorphic_call_context ctx = *src_ctx;
2646 
2647 	      /* TODO: Make type preserved safe WRT contexts.  */
2648 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
2649 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2650 	      ctx.offset_by (dst->value.ancestor.offset);
2651 	      if (!ctx.useless_p ())
2652 		{
2653 		  if (!dst_ctx)
2654 		    {
2655 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
2656 					     count);
2657 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2658 		    }
2659 
2660 		  dst_ctx->combine_with (ctx);
2661 		}
2662 	    }
2663 
2664 	  if (src->agg.items
2665 	      && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2666 	    {
2667 	      struct ipa_agg_jf_item *item;
2668 	      int j;
2669 
2670 	      /* Currently we do not produce clobber aggregate jump functions,
2671 		 replace with merging when we do.  */
2672 	      gcc_assert (!dst->agg.items);
2673 
2674 	      dst->agg.items = vec_safe_copy (src->agg.items);
2675 	      dst->agg.by_ref = src->agg.by_ref;
2676 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2677 		item->offset -= dst->value.ancestor.offset;
2678 	    }
2679 
2680 	  if (src->type == IPA_JF_PASS_THROUGH
2681 	      && src->value.pass_through.operation == NOP_EXPR)
2682 	    {
2683 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2684 	      dst->value.ancestor.agg_preserved &=
2685 		src->value.pass_through.agg_preserved;
2686 	    }
2687 	  else if (src->type == IPA_JF_PASS_THROUGH
2688 		   && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2689 	    {
2690 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2691 	      dst->value.ancestor.agg_preserved = false;
2692 	    }
2693 	  else if (src->type == IPA_JF_ANCESTOR)
2694 	    {
2695 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2696 	      dst->value.ancestor.offset += src->value.ancestor.offset;
2697 	      dst->value.ancestor.agg_preserved &=
2698 		src->value.ancestor.agg_preserved;
2699 	    }
2700 	  else
2701 	    ipa_set_jf_unknown (dst);
2702 	}
2703       else if (dst->type == IPA_JF_PASS_THROUGH)
2704 	{
2705 	  struct ipa_jump_func *src;
2706 	  /* We must check range due to calls with variable number of arguments
2707 	     and we cannot combine jump functions with operations.  */
2708 	  if (dst->value.pass_through.operation == NOP_EXPR
2709 	      && (dst->value.pass_through.formal_id
2710 		  < ipa_get_cs_argument_count (top)))
2711 	    {
2712 	      int dst_fid = dst->value.pass_through.formal_id;
2713 	      src = ipa_get_ith_jump_func (top, dst_fid);
2714 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2715 	      struct ipa_polymorphic_call_context *src_ctx
2716 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
2717 
2718 	      if (src_ctx && !src_ctx->useless_p ())
2719 		{
2720 		  struct ipa_polymorphic_call_context ctx = *src_ctx;
2721 
2722 		  /* TODO: Make type preserved safe WRT contexts.  */
2723 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
2724 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2725 		  if (!ctx.useless_p ())
2726 		    {
2727 		      if (!dst_ctx)
2728 			{
2729 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
2730 					         count);
2731 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2732 			}
2733 		      dst_ctx->combine_with (ctx);
2734 		    }
2735 		}
2736 	      switch (src->type)
2737 		{
2738 		case IPA_JF_UNKNOWN:
2739 		  ipa_set_jf_unknown (dst);
2740 		  break;
2741 		case IPA_JF_CONST:
2742 		  ipa_set_jf_cst_copy (dst, src);
2743 		  break;
2744 
2745 		case IPA_JF_PASS_THROUGH:
2746 		  {
2747 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
2748 		    enum tree_code operation;
2749 		    operation = ipa_get_jf_pass_through_operation (src);
2750 
2751 		    if (operation == NOP_EXPR)
2752 		      {
2753 			bool agg_p;
2754 			agg_p = dst_agg_p
2755 			  && ipa_get_jf_pass_through_agg_preserved (src);
2756 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2757 		      }
2758 		    else if (TREE_CODE_CLASS (operation) == tcc_unary)
2759 		      ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2760 		    else
2761 		      {
2762 			tree operand = ipa_get_jf_pass_through_operand (src);
2763 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2764 						       operation);
2765 		      }
2766 		    break;
2767 		  }
2768 		case IPA_JF_ANCESTOR:
2769 		  {
2770 		    bool agg_p;
2771 		    agg_p = dst_agg_p
2772 		      && ipa_get_jf_ancestor_agg_preserved (src);
2773 		    ipa_set_ancestor_jf (dst,
2774 					 ipa_get_jf_ancestor_offset (src),
2775 					 ipa_get_jf_ancestor_formal_id (src),
2776 					 agg_p);
2777 		    break;
2778 		  }
2779 		default:
2780 		  gcc_unreachable ();
2781 		}
2782 
2783 	      if (src->agg.items
2784 		  && (dst_agg_p || !src->agg.by_ref))
2785 		{
2786 		  /* Currently we do not produce clobber aggregate jump
2787 		     functions, replace with merging when we do.  */
2788 		  gcc_assert (!dst->agg.items);
2789 
2790 		  dst->agg.by_ref = src->agg.by_ref;
2791 		  dst->agg.items = vec_safe_copy (src->agg.items);
2792 		}
2793 	    }
2794 	  else
2795 	    ipa_set_jf_unknown (dst);
2796 	}
2797     }
2798 }
2799 
2800 /* If TARGET is an addr_expr of a function declaration, make it the
2801    (SPECULATIVE)destination of an indirect edge IE and return the edge.
2802    Otherwise, return NULL.  */
2803 
2804 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)2805 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2806 				bool speculative)
2807 {
2808   struct cgraph_node *callee;
2809   bool unreachable = false;
2810 
2811   if (TREE_CODE (target) == ADDR_EXPR)
2812     target = TREE_OPERAND (target, 0);
2813   if (TREE_CODE (target) != FUNCTION_DECL)
2814     {
2815       target = canonicalize_constructor_val (target, NULL);
2816       if (!target || TREE_CODE (target) != FUNCTION_DECL)
2817 	{
2818 	  /* Member pointer call that goes through a VMT lookup.  */
2819 	  if (ie->indirect_info->member_ptr
2820 	      /* Or if target is not an invariant expression and we do not
2821 		 know if it will evaulate to function at runtime.
2822 		 This can happen when folding through &VAR, where &VAR
2823 		 is IP invariant, but VAR itself is not.
2824 
2825 		 TODO: Revisit this when GCC 5 is branched.  It seems that
2826 		 member_ptr check is not needed and that we may try to fold
2827 		 the expression and see if VAR is readonly.  */
2828 	      || !is_gimple_ip_invariant (target))
2829 	    {
2830 	      if (dump_enabled_p ())
2831 		{
2832 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2833 				   "discovered direct call non-invariant %s\n",
2834 				   ie->caller->dump_name ());
2835 		}
2836 	      return NULL;
2837 	    }
2838 
2839 
2840           if (dump_enabled_p ())
2841 	    {
2842 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2843 			       "discovered direct call to non-function in %s, "
2844 			       "making it __builtin_unreachable\n",
2845 			       ie->caller->dump_name ());
2846 	    }
2847 
2848 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2849 	  callee = cgraph_node::get_create (target);
2850 	  unreachable = true;
2851 	}
2852       else
2853 	callee = cgraph_node::get (target);
2854     }
2855   else
2856     callee = cgraph_node::get (target);
2857 
2858   /* Because may-edges are not explicitely represented and vtable may be external,
2859      we may create the first reference to the object in the unit.  */
2860   if (!callee || callee->global.inlined_to)
2861     {
2862 
2863       /* We are better to ensure we can refer to it.
2864 	 In the case of static functions we are out of luck, since we already
2865 	 removed its body.  In the case of public functions we may or may
2866 	 not introduce the reference.  */
2867       if (!canonicalize_constructor_val (target, NULL)
2868 	  || !TREE_PUBLIC (target))
2869 	{
2870 	  if (dump_file)
2871 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2872 		     "(%s -> %s) but cannot refer to it. Giving up.\n",
2873 		     ie->caller->dump_name (),
2874 		     ie->callee->dump_name ());
2875 	  return NULL;
2876 	}
2877       callee = cgraph_node::get_create (target);
2878     }
2879 
2880   /* If the edge is already speculated.  */
2881   if (speculative && ie->speculative)
2882     {
2883       struct cgraph_edge *e2;
2884       struct ipa_ref *ref;
2885       ie->speculative_call_info (e2, ie, ref);
2886       if (e2->callee->ultimate_alias_target ()
2887 	  != callee->ultimate_alias_target ())
2888 	{
2889 	  if (dump_file)
2890 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2891 		     "target (%s -> %s) but the call is already "
2892 		     "speculated to %s. Giving up.\n",
2893 		     ie->caller->dump_name (), callee->dump_name (),
2894 		     e2->callee->dump_name ());
2895 	}
2896       else
2897 	{
2898 	  if (dump_file)
2899 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2900 		     "(%s -> %s) this agree with previous speculation.\n",
2901 		     ie->caller->dump_name (), callee->dump_name ());
2902 	}
2903       return NULL;
2904     }
2905 
2906   if (!dbg_cnt (devirt))
2907     return NULL;
2908 
2909   ipa_check_create_node_params ();
2910 
2911   /* We cannot make edges to inline clones.  It is bug that someone removed
2912      the cgraph node too early.  */
2913   gcc_assert (!callee->global.inlined_to);
2914 
2915   if (dump_file && !unreachable)
2916     {
2917       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2918 	       "(%s -> %s), for stmt ",
2919 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2920 	       speculative ? "speculative" : "known",
2921 	       ie->caller->dump_name (),
2922 	       callee->dump_name ());
2923       if (ie->call_stmt)
2924 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2925       else
2926 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2927      }
2928   if (dump_enabled_p ())
2929     {
2930       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
2931 		       "converting indirect call in %s to direct call to %s\n",
2932 		       ie->caller->name (), callee->name ());
2933     }
2934   if (!speculative)
2935     {
2936       struct cgraph_edge *orig = ie;
2937       ie = ie->make_direct (callee);
2938       /* If we resolved speculative edge the cost is already up to date
2939 	 for direct call (adjusted by inline_edge_duplication_hook).  */
2940       if (ie == orig)
2941 	{
2942 	  ipa_call_summary *es = ipa_call_summaries->get (ie);
2943 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2944 				 - eni_size_weights.call_cost);
2945 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2946 				 - eni_time_weights.call_cost);
2947 	}
2948     }
2949   else
2950     {
2951       if (!callee->can_be_discarded_p ())
2952 	{
2953 	  cgraph_node *alias;
2954 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2955 	  if (alias)
2956 	    callee = alias;
2957 	}
2958       /* make_speculative will update ie's cost to direct call cost. */
2959       ie = ie->make_speculative
2960 	     (callee, ie->count.apply_scale (8, 10));
2961     }
2962 
2963   return ie;
2964 }
2965 
2966 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2967    CONSTRUCTOR and return it.  Return NULL if the search fails for some
2968    reason.  */
2969 
2970 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)2971 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2972 {
2973   tree type = TREE_TYPE (constructor);
2974   if (TREE_CODE (type) != ARRAY_TYPE
2975       && TREE_CODE (type) != RECORD_TYPE)
2976     return NULL;
2977 
2978   unsigned ix;
2979   tree index, val;
2980   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2981     {
2982       HOST_WIDE_INT elt_offset;
2983       if (TREE_CODE (type) == ARRAY_TYPE)
2984        {
2985          offset_int off;
2986          tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
2987          gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2988 
2989          if (index)
2990            {
2991 	     if (TREE_CODE (index) == RANGE_EXPR)
2992 	       off = wi::to_offset (TREE_OPERAND (index, 0));
2993 	     else
2994 	       off = wi::to_offset (index);
2995              if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
2996                {
2997                  tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
2998                  gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
2999                  off = wi::sext (off - wi::to_offset (low_bound),
3000                                  TYPE_PRECISION (TREE_TYPE (index)));
3001                }
3002              off *= wi::to_offset (unit_size);
3003 	     /* ???  Handle more than just the first index of a
3004 	        RANGE_EXPR.  */
3005            }
3006          else
3007            off = wi::to_offset (unit_size) * ix;
3008 
3009          off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3010          if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3011            continue;
3012          elt_offset = off.to_shwi ();
3013        }
3014       else if (TREE_CODE (type) == RECORD_TYPE)
3015        {
3016          gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3017          if (DECL_BIT_FIELD (index))
3018            continue;
3019          elt_offset = int_bit_position (index);
3020        }
3021       else
3022        gcc_unreachable ();
3023 
3024       if (elt_offset > req_offset)
3025 	return NULL;
3026 
3027       if (TREE_CODE (val) == CONSTRUCTOR)
3028 	return find_constructor_constant_at_offset (val,
3029 						    req_offset - elt_offset);
3030 
3031       if (elt_offset == req_offset
3032 	  && is_gimple_reg_type (TREE_TYPE (val))
3033 	  && is_gimple_ip_invariant (val))
3034 	return val;
3035     }
3036   return NULL;
3037 }
3038 
3039 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3040    invariant from a static constructor and if so, return it.  Otherwise return
3041    NULL. */
3042 
3043 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3044 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3045 {
3046   if (by_ref)
3047     {
3048       if (TREE_CODE (scalar) != ADDR_EXPR)
3049 	return NULL;
3050       scalar = TREE_OPERAND (scalar, 0);
3051     }
3052 
3053   if (!VAR_P (scalar)
3054       || !is_global_var (scalar)
3055       || !TREE_READONLY (scalar)
3056       || !DECL_INITIAL (scalar)
3057       || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3058     return NULL;
3059 
3060   return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3061 }
3062 
3063 /* Retrieve value from aggregate jump function AGG or static initializer of
3064    SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3065    none.  BY_REF specifies whether the value has to be passed by reference or
3066    by value.  If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3067    to is set to true if the value comes from an initializer of a constant.  */
3068 
3069 tree
ipa_find_agg_cst_for_param(struct ipa_agg_jump_function * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3070 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3071 			    HOST_WIDE_INT offset, bool by_ref,
3072 			    bool *from_global_constant)
3073 {
3074   struct ipa_agg_jf_item *item;
3075   int i;
3076 
3077   if (scalar)
3078     {
3079       tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3080       if (res)
3081 	{
3082 	  if (from_global_constant)
3083 	    *from_global_constant = true;
3084 	  return res;
3085 	}
3086     }
3087 
3088   if (!agg
3089       || by_ref != agg->by_ref)
3090     return NULL;
3091 
3092   FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3093     if (item->offset == offset)
3094       {
3095 	/* Currently we do not have clobber values, return NULL for them once
3096 	   we do.  */
3097 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
3098 	if (from_global_constant)
3099 	  *from_global_constant = false;
3100 	return item->value;
3101       }
3102   return NULL;
3103 }
3104 
3105 /* Remove a reference to SYMBOL from the list of references of a node given by
3106    reference description RDESC.  Return true if the reference has been
3107    successfully found and removed.  */
3108 
3109 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3110 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3111 {
3112   struct ipa_ref *to_del;
3113   struct cgraph_edge *origin;
3114 
3115   origin = rdesc->cs;
3116   if (!origin)
3117     return false;
3118   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3119 					   origin->lto_stmt_uid);
3120   if (!to_del)
3121     return false;
3122 
3123   to_del->remove_reference ();
3124   if (dump_file)
3125     fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3126 	     origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3127   return true;
3128 }
3129 
3130 /* If JFUNC has a reference description with refcount different from
3131    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3132    NULL.  JFUNC must be a constant jump function.  */
3133 
3134 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3135 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3136 {
3137   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3138   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3139     return rdesc;
3140   else
3141     return NULL;
3142 }
3143 
3144 /* If the value of constant jump function JFUNC is an address of a function
3145    declaration, return the associated call graph node.  Otherwise return
3146    NULL.  */
3147 
3148 static cgraph_node *
cgraph_node_for_jfunc(struct ipa_jump_func * jfunc)3149 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3150 {
3151   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3152   tree cst = ipa_get_jf_constant (jfunc);
3153   if (TREE_CODE (cst) != ADDR_EXPR
3154       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3155     return NULL;
3156 
3157   return cgraph_node::get (TREE_OPERAND (cst, 0));
3158 }
3159 
3160 
3161 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3162    refcount and if it hits zero, remove reference to SYMBOL from the caller of
3163    the edge specified in the rdesc.  Return false if either the symbol or the
3164    reference could not be found, otherwise return true.  */
3165 
3166 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3167 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3168 {
3169   struct ipa_cst_ref_desc *rdesc;
3170   if (jfunc->type == IPA_JF_CONST
3171       && (rdesc = jfunc_rdesc_usable (jfunc))
3172       && --rdesc->refcount == 0)
3173     {
3174       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3175       if (!symbol)
3176 	return false;
3177 
3178       return remove_described_reference (symbol, rdesc);
3179     }
3180   return true;
3181 }
3182 
3183 /* Try to find a destination for indirect edge IE that corresponds to a simple
3184    call or a call of a member function pointer and where the destination is a
3185    pointer formal parameter described by jump function JFUNC.  TARGET_TYPE is
3186    the type of the parameter to which the result of JFUNC is passed.  If it can
3187    be determined, return the newly direct edge, otherwise return NULL.
3188    NEW_ROOT_INFO is the node info that JFUNC lattices are relative to.  */
3189 
3190 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct ipa_node_params * new_root_info)3191 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3192 				  struct ipa_jump_func *jfunc, tree target_type,
3193 				  struct ipa_node_params *new_root_info)
3194 {
3195   struct cgraph_edge *cs;
3196   tree target;
3197   bool agg_contents = ie->indirect_info->agg_contents;
3198   tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3199   if (agg_contents)
3200     {
3201       bool from_global_constant;
3202       target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3203 					   ie->indirect_info->offset,
3204 					   ie->indirect_info->by_ref,
3205 					   &from_global_constant);
3206       if (target
3207 	  && !from_global_constant
3208 	  && !ie->indirect_info->guaranteed_unmodified)
3209 	return NULL;
3210     }
3211   else
3212     target = scalar;
3213   if (!target)
3214     return NULL;
3215   cs = ipa_make_edge_direct_to_target (ie, target);
3216 
3217   if (cs && !agg_contents)
3218     {
3219       bool ok;
3220       gcc_checking_assert (cs->callee
3221 			   && (cs != ie
3222 			       || jfunc->type != IPA_JF_CONST
3223 			       || !cgraph_node_for_jfunc (jfunc)
3224 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
3225       ok = try_decrement_rdesc_refcount (jfunc);
3226       gcc_checking_assert (ok);
3227     }
3228 
3229   return cs;
3230 }
3231 
3232 /* Return the target to be used in cases of impossible devirtualization.  IE
3233    and target (the latter can be NULL) are dumped when dumping is enabled.  */
3234 
3235 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3236 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3237 {
3238   if (dump_file)
3239     {
3240       if (target)
3241 	fprintf (dump_file,
3242 		 "Type inconsistent devirtualization: %s->%s\n",
3243 		 ie->caller->dump_name (),
3244 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3245       else
3246 	fprintf (dump_file,
3247 		 "No devirtualization target in %s\n",
3248 		 ie->caller->dump_name ());
3249     }
3250   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3251   cgraph_node::get_create (new_target);
3252   return new_target;
3253 }
3254 
3255 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3256    call based on a formal parameter which is described by jump function JFUNC
3257    and if it can be determined, make it direct and return the direct edge.
3258    Otherwise, return NULL.  CTX describes the polymorphic context that the
3259    parameter the call is based on brings along with it.  */
3260 
3261 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,struct ipa_polymorphic_call_context ctx)3262 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3263 				   struct ipa_jump_func *jfunc,
3264 				   struct ipa_polymorphic_call_context ctx)
3265 {
3266   tree target = NULL;
3267   bool speculative = false;
3268 
3269   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3270     return NULL;
3271 
3272   gcc_assert (!ie->indirect_info->by_ref);
3273 
3274   /* Try to do lookup via known virtual table pointer value.  */
3275   if (!ie->indirect_info->vptr_changed
3276       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3277     {
3278       tree vtable;
3279       unsigned HOST_WIDE_INT offset;
3280       tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3281 	: NULL;
3282       tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3283 					   ie->indirect_info->offset,
3284 					   true);
3285       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3286 	{
3287 	  bool can_refer;
3288 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3289 						 vtable, offset, &can_refer);
3290 	  if (can_refer)
3291 	    {
3292 	      if (!t
3293 		  || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3294 		      && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3295 		  || !possible_polymorphic_call_target_p
3296 		       (ie, cgraph_node::get (t)))
3297 		{
3298 		  /* Do not speculate builtin_unreachable, it is stupid!  */
3299 		  if (!ie->indirect_info->vptr_changed)
3300 		    target = ipa_impossible_devirt_target (ie, target);
3301 		  else
3302 		    target = NULL;
3303 		}
3304 	      else
3305 		{
3306 		  target = t;
3307 		  speculative = ie->indirect_info->vptr_changed;
3308 		}
3309 	    }
3310 	}
3311     }
3312 
3313   ipa_polymorphic_call_context ie_context (ie);
3314   vec <cgraph_node *>targets;
3315   bool final;
3316 
3317   ctx.offset_by (ie->indirect_info->offset);
3318   if (ie->indirect_info->vptr_changed)
3319     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3320 				      ie->indirect_info->otr_type);
3321   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3322   targets = possible_polymorphic_call_targets
3323     (ie->indirect_info->otr_type,
3324      ie->indirect_info->otr_token,
3325      ctx, &final);
3326   if (final && targets.length () <= 1)
3327     {
3328       speculative = false;
3329       if (targets.length () == 1)
3330 	target = targets[0]->decl;
3331       else
3332 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
3333     }
3334   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3335 	   && !ie->speculative && ie->maybe_hot_p ())
3336     {
3337       cgraph_node *n;
3338       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3339 					    ie->indirect_info->otr_token,
3340 					    ie->indirect_info->context);
3341       if (n)
3342 	{
3343 	  target = n->decl;
3344 	  speculative = true;
3345 	}
3346     }
3347 
3348   if (target)
3349     {
3350       if (!possible_polymorphic_call_target_p
3351 	  (ie, cgraph_node::get_create (target)))
3352 	{
3353 	  if (speculative)
3354 	    return NULL;
3355 	  target = ipa_impossible_devirt_target (ie, target);
3356 	}
3357       return ipa_make_edge_direct_to_target (ie, target, speculative);
3358     }
3359   else
3360     return NULL;
3361 }
3362 
3363 /* Update the param called notes associated with NODE when CS is being inlined,
3364    assuming NODE is (potentially indirectly) inlined into CS->callee.
3365    Moreover, if the callee is discovered to be constant, create a new cgraph
3366    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3367    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3368 
3369 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3370 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3371 				      struct cgraph_node *node,
3372 				      vec<cgraph_edge *> *new_edges)
3373 {
3374   struct ipa_edge_args *top;
3375   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3376   struct ipa_node_params *new_root_info, *inlined_node_info;
3377   bool res = false;
3378 
3379   ipa_check_create_edge_args ();
3380   top = IPA_EDGE_REF (cs);
3381   new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3382 				? cs->caller->global.inlined_to
3383 				: cs->caller);
3384   inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3385 
3386   for (ie = node->indirect_calls; ie; ie = next_ie)
3387     {
3388       struct cgraph_indirect_call_info *ici = ie->indirect_info;
3389       struct ipa_jump_func *jfunc;
3390       int param_index;
3391       cgraph_node *spec_target = NULL;
3392 
3393       next_ie = ie->next_callee;
3394 
3395       if (ici->param_index == -1)
3396 	continue;
3397 
3398       /* We must check range due to calls with variable number of arguments:  */
3399       if (ici->param_index >= ipa_get_cs_argument_count (top))
3400 	{
3401 	  ici->param_index = -1;
3402 	  continue;
3403 	}
3404 
3405       param_index = ici->param_index;
3406       jfunc = ipa_get_ith_jump_func (top, param_index);
3407 
3408       if (ie->speculative)
3409 	{
3410 	  struct cgraph_edge *de;
3411           struct ipa_ref *ref;
3412 	  ie->speculative_call_info (de, ie, ref);
3413 	  spec_target = de->callee;
3414 	}
3415 
3416       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3417 	new_direct_edge = NULL;
3418       else if (ici->polymorphic)
3419 	{
3420           ipa_polymorphic_call_context ctx;
3421 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3422 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3423 	}
3424       else
3425 	{
3426 	  tree target_type =  ipa_get_type (inlined_node_info, param_index);
3427 	  new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3428 							      target_type,
3429 							      new_root_info);
3430 	}
3431 
3432       /* If speculation was removed, then we need to do nothing.  */
3433       if (new_direct_edge && new_direct_edge != ie
3434 	  && new_direct_edge->callee == spec_target)
3435 	{
3436 	  new_direct_edge->indirect_inlining_edge = 1;
3437 	  top = IPA_EDGE_REF (cs);
3438 	  res = true;
3439 	  if (!new_direct_edge->speculative)
3440 	    continue;
3441 	}
3442       else if (new_direct_edge)
3443 	{
3444 	  new_direct_edge->indirect_inlining_edge = 1;
3445 	  if (new_direct_edge->call_stmt)
3446 	    new_direct_edge->call_stmt_cannot_inline_p
3447 	      = !gimple_check_call_matching_types (
3448 		  new_direct_edge->call_stmt,
3449 		  new_direct_edge->callee->decl, false);
3450 	  if (new_edges)
3451 	    {
3452 	      new_edges->safe_push (new_direct_edge);
3453 	      res = true;
3454 	    }
3455 	  top = IPA_EDGE_REF (cs);
3456 	  /* If speculative edge was introduced we still need to update
3457 	     call info of the indirect edge.  */
3458 	  if (!new_direct_edge->speculative)
3459 	    continue;
3460 	}
3461       if (jfunc->type == IPA_JF_PASS_THROUGH
3462           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3463 	{
3464 	  if (ici->agg_contents
3465 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3466 	      && !ici->polymorphic)
3467 	    ici->param_index = -1;
3468 	  else
3469 	    {
3470 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3471 	      if (ici->polymorphic
3472 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3473 		ici->vptr_changed = true;
3474 	    }
3475 	}
3476       else if (jfunc->type == IPA_JF_ANCESTOR)
3477 	{
3478 	  if (ici->agg_contents
3479 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3480 	      && !ici->polymorphic)
3481 	    ici->param_index = -1;
3482 	  else
3483 	    {
3484 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3485 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3486 	      if (ici->polymorphic
3487 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3488 		ici->vptr_changed = true;
3489 	    }
3490 	}
3491       else
3492 	/* Either we can find a destination for this edge now or never. */
3493 	ici->param_index = -1;
3494     }
3495 
3496   return res;
3497 }
3498 
3499 /* Recursively traverse subtree of NODE (including node) made of inlined
3500    cgraph_edges when CS has been inlined and invoke
3501    update_indirect_edges_after_inlining on all nodes and
3502    update_jump_functions_after_inlining on all non-inlined edges that lead out
3503    of this subtree.  Newly discovered indirect edges will be added to
3504    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3505    created.  */
3506 
3507 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3508 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3509 				   struct cgraph_node *node,
3510 				   vec<cgraph_edge *> *new_edges)
3511 {
3512   struct cgraph_edge *e;
3513   bool res;
3514 
3515   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3516 
3517   for (e = node->callees; e; e = e->next_callee)
3518     if (!e->inline_failed)
3519       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3520     else
3521       update_jump_functions_after_inlining (cs, e);
3522   for (e = node->indirect_calls; e; e = e->next_callee)
3523     update_jump_functions_after_inlining (cs, e);
3524 
3525   return res;
3526 }
3527 
3528 /* Combine two controlled uses counts as done during inlining.  */
3529 
3530 static int
combine_controlled_uses_counters(int c,int d)3531 combine_controlled_uses_counters (int c, int d)
3532 {
3533   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3534     return IPA_UNDESCRIBED_USE;
3535   else
3536     return c + d - 1;
3537 }
3538 
3539 /* Propagate number of controlled users from CS->caleee to the new root of the
3540    tree of inlined nodes.  */
3541 
3542 static void
propagate_controlled_uses(struct cgraph_edge * cs)3543 propagate_controlled_uses (struct cgraph_edge *cs)
3544 {
3545   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3546   struct cgraph_node *new_root = cs->caller->global.inlined_to
3547     ? cs->caller->global.inlined_to : cs->caller;
3548   struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3549   struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3550   int count, i;
3551 
3552   count = MIN (ipa_get_cs_argument_count (args),
3553 	       ipa_get_param_count (old_root_info));
3554   for (i = 0; i < count; i++)
3555     {
3556       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3557       struct ipa_cst_ref_desc *rdesc;
3558 
3559       if (jf->type == IPA_JF_PASS_THROUGH)
3560 	{
3561 	  int src_idx, c, d;
3562 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3563 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3564 	  d = ipa_get_controlled_uses (old_root_info, i);
3565 
3566 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3567 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3568 	  c = combine_controlled_uses_counters (c, d);
3569 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3570 	  if (c == 0 && new_root_info->ipcp_orig_node)
3571 	    {
3572 	      struct cgraph_node *n;
3573 	      struct ipa_ref *ref;
3574 	      tree t = new_root_info->known_csts[src_idx];
3575 
3576 	      if (t && TREE_CODE (t) == ADDR_EXPR
3577 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3578 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3579 		  && (ref = new_root->find_reference (n, NULL, 0)))
3580 		{
3581 		  if (dump_file)
3582 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3583 			     "reference from %s to %s.\n",
3584 			     new_root->dump_name (),
3585 			     n->dump_name ());
3586 		  ref->remove_reference ();
3587 		}
3588 	    }
3589 	}
3590       else if (jf->type == IPA_JF_CONST
3591 	       && (rdesc = jfunc_rdesc_usable (jf)))
3592 	{
3593 	  int d = ipa_get_controlled_uses (old_root_info, i);
3594 	  int c = rdesc->refcount;
3595 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3596 	  if (rdesc->refcount == 0)
3597 	    {
3598 	      tree cst = ipa_get_jf_constant (jf);
3599 	      struct cgraph_node *n;
3600 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3601 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3602 				   == FUNCTION_DECL);
3603 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
3604 	      if (n)
3605 		{
3606 		  struct cgraph_node *clone;
3607 		  bool ok;
3608 		  ok = remove_described_reference (n, rdesc);
3609 		  gcc_checking_assert (ok);
3610 
3611 		  clone = cs->caller;
3612 		  while (clone->global.inlined_to
3613 			 && clone != rdesc->cs->caller
3614 			 && IPA_NODE_REF (clone)->ipcp_orig_node)
3615 		    {
3616 		      struct ipa_ref *ref;
3617 		      ref = clone->find_reference (n, NULL, 0);
3618 		      if (ref)
3619 			{
3620 			  if (dump_file)
3621 			    fprintf (dump_file, "ipa-prop: Removing "
3622 				     "cloning-created reference "
3623 				     "from %s to %s.\n",
3624 				     clone->dump_name (),
3625 				     n->dump_name ());
3626 			  ref->remove_reference ();
3627 			}
3628 		      clone = clone->callers->caller;
3629 		    }
3630 		}
3631 	    }
3632 	}
3633     }
3634 
3635   for (i = ipa_get_param_count (old_root_info);
3636        i < ipa_get_cs_argument_count (args);
3637        i++)
3638     {
3639       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3640 
3641       if (jf->type == IPA_JF_CONST)
3642 	{
3643 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3644 	  if (rdesc)
3645 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
3646 	}
3647       else if (jf->type == IPA_JF_PASS_THROUGH)
3648 	ipa_set_controlled_uses (new_root_info,
3649 				 jf->value.pass_through.formal_id,
3650 				 IPA_UNDESCRIBED_USE);
3651     }
3652 }
3653 
3654 /* Update jump functions and call note functions on inlining the call site CS.
3655    CS is expected to lead to a node already cloned by
3656    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
3657    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
3658    created.  */
3659 
3660 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)3661 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3662 				   vec<cgraph_edge *> *new_edges)
3663 {
3664   bool changed;
3665   /* Do nothing if the preparation phase has not been carried out yet
3666      (i.e. during early inlining).  */
3667   if (!ipa_node_params_sum)
3668     return false;
3669   gcc_assert (ipa_edge_args_sum);
3670 
3671   propagate_controlled_uses (cs);
3672   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3673 
3674   return changed;
3675 }
3676 
3677 /* Ensure that array of edge arguments infos is big enough to accommodate a
3678    structure for all edges and reallocates it if not.  Also, allocate
3679    associated hash tables is they do not already exist.  */
3680 
3681 void
ipa_check_create_edge_args(void)3682 ipa_check_create_edge_args (void)
3683 {
3684   if (!ipa_edge_args_sum)
3685     ipa_edge_args_sum
3686       = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3687 	 ipa_edge_args_sum_t (symtab, true));
3688   if (!ipa_bits_hash_table)
3689     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3690   if (!ipa_vr_hash_table)
3691     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3692 }
3693 
3694 /* Free all ipa_edge structures.  */
3695 
3696 void
ipa_free_all_edge_args(void)3697 ipa_free_all_edge_args (void)
3698 {
3699   if (!ipa_edge_args_sum)
3700     return;
3701 
3702   ipa_edge_args_sum->release ();
3703   ipa_edge_args_sum = NULL;
3704 }
3705 
3706 /* Free all ipa_node_params structures.  */
3707 
3708 void
ipa_free_all_node_params(void)3709 ipa_free_all_node_params (void)
3710 {
3711   ipa_node_params_sum->release ();
3712   ipa_node_params_sum = NULL;
3713 }
3714 
3715 /* Initialize IPA CP transformation summary and also allocate any necessary hash
3716    tables if they do not already exist.  */
3717 
3718 void
ipcp_transformation_initialize(void)3719 ipcp_transformation_initialize (void)
3720 {
3721   if (!ipa_bits_hash_table)
3722     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3723   if (!ipa_vr_hash_table)
3724     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3725   if (ipcp_transformation_sum == NULL)
3726     ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
3727 }
3728 
3729 /* Release the IPA CP transformation summary.  */
3730 
3731 void
ipcp_free_transformation_sum(void)3732 ipcp_free_transformation_sum (void)
3733 {
3734   if (!ipcp_transformation_sum)
3735     return;
3736 
3737   ipcp_transformation_sum->release ();
3738   ipcp_transformation_sum = NULL;
3739 }
3740 
3741 /* Set the aggregate replacements of NODE to be AGGVALS.  */
3742 
3743 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)3744 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3745 			      struct ipa_agg_replacement_value *aggvals)
3746 {
3747   ipcp_transformation_initialize ();
3748   ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
3749   s->agg_values = aggvals;
3750 }
3751 
3752 /* Hook that is called by cgraph.c when an edge is removed.  Adjust reference
3753    count data structures accordingly.  */
3754 
3755 void
remove(cgraph_edge * cs,ipa_edge_args * args)3756 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3757 {
3758   if (args->jump_functions)
3759     {
3760       struct ipa_jump_func *jf;
3761       int i;
3762       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3763 	{
3764 	  struct ipa_cst_ref_desc *rdesc;
3765 	  try_decrement_rdesc_refcount (jf);
3766 	  if (jf->type == IPA_JF_CONST
3767 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
3768 	      && rdesc->cs == cs)
3769 	    rdesc->cs = NULL;
3770 	}
3771     }
3772 }
3773 
3774 /* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
3775    reference count data strucutres accordingly.  */
3776 
3777 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)3778 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3779 				ipa_edge_args *old_args, ipa_edge_args *new_args)
3780 {
3781   unsigned int i;
3782 
3783   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3784   if (old_args->polymorphic_call_contexts)
3785     new_args->polymorphic_call_contexts
3786       = vec_safe_copy (old_args->polymorphic_call_contexts);
3787 
3788   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3789     {
3790       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3791       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3792 
3793       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3794 
3795       if (src_jf->type == IPA_JF_CONST)
3796 	{
3797 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3798 
3799 	  if (!src_rdesc)
3800 	    dst_jf->value.constant.rdesc = NULL;
3801 	  else if (src->caller == dst->caller)
3802 	    {
3803 	      struct ipa_ref *ref;
3804 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
3805 	      gcc_checking_assert (n);
3806 	      ref = src->caller->find_reference (n, src->call_stmt,
3807 						 src->lto_stmt_uid);
3808 	      gcc_checking_assert (ref);
3809 	      dst->caller->clone_reference (ref, ref->stmt);
3810 
3811 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3812 	      dst_rdesc->cs = dst;
3813 	      dst_rdesc->refcount = src_rdesc->refcount;
3814 	      dst_rdesc->next_duplicate = NULL;
3815 	      dst_jf->value.constant.rdesc = dst_rdesc;
3816 	    }
3817 	  else if (src_rdesc->cs == src)
3818 	    {
3819 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3820 	      dst_rdesc->cs = dst;
3821 	      dst_rdesc->refcount = src_rdesc->refcount;
3822 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3823 	      src_rdesc->next_duplicate = dst_rdesc;
3824 	      dst_jf->value.constant.rdesc = dst_rdesc;
3825 	    }
3826 	  else
3827 	    {
3828 	      struct ipa_cst_ref_desc *dst_rdesc;
3829 	      /* This can happen during inlining, when a JFUNC can refer to a
3830 		 reference taken in a function up in the tree of inline clones.
3831 		 We need to find the duplicate that refers to our tree of
3832 		 inline clones.  */
3833 
3834 	      gcc_assert (dst->caller->global.inlined_to);
3835 	      for (dst_rdesc = src_rdesc->next_duplicate;
3836 		   dst_rdesc;
3837 		   dst_rdesc = dst_rdesc->next_duplicate)
3838 		{
3839 		  struct cgraph_node *top;
3840 		  top = dst_rdesc->cs->caller->global.inlined_to
3841 		    ? dst_rdesc->cs->caller->global.inlined_to
3842 		    : dst_rdesc->cs->caller;
3843 		  if (dst->caller->global.inlined_to == top)
3844 		    break;
3845 		}
3846 	      gcc_assert (dst_rdesc);
3847 	      dst_jf->value.constant.rdesc = dst_rdesc;
3848 	    }
3849 	}
3850       else if (dst_jf->type == IPA_JF_PASS_THROUGH
3851 	       && src->caller == dst->caller)
3852 	{
3853 	  struct cgraph_node *inline_root = dst->caller->global.inlined_to
3854 	    ? dst->caller->global.inlined_to : dst->caller;
3855 	  struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3856 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3857 
3858 	  int c = ipa_get_controlled_uses (root_info, idx);
3859 	  if (c != IPA_UNDESCRIBED_USE)
3860 	    {
3861 	      c++;
3862 	      ipa_set_controlled_uses (root_info, idx, c);
3863 	    }
3864 	}
3865     }
3866 }
3867 
3868 /* Analyze newly added function into callgraph.  */
3869 
3870 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)3871 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3872 {
3873   if (node->has_gimple_body_p ())
3874     ipa_analyze_node (node);
3875 }
3876 
3877 /* Hook that is called by summary when a node is duplicated.  */
3878 
3879 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)3880 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3881 			     ipa_node_params *old_info,
3882 			     ipa_node_params *new_info)
3883 {
3884   ipa_agg_replacement_value *old_av, *new_av;
3885 
3886   new_info->descriptors = vec_safe_copy (old_info->descriptors);
3887   new_info->lattices = NULL;
3888   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3889   new_info->known_csts = old_info->known_csts.copy ();
3890   new_info->known_contexts = old_info->known_contexts.copy ();
3891 
3892   new_info->analysis_done = old_info->analysis_done;
3893   new_info->node_enqueued = old_info->node_enqueued;
3894   new_info->versionable = old_info->versionable;
3895 
3896   old_av = ipa_get_agg_replacements_for_node (src);
3897   if (old_av)
3898     {
3899       new_av = NULL;
3900       while (old_av)
3901 	{
3902 	  struct ipa_agg_replacement_value *v;
3903 
3904 	  v = ggc_alloc<ipa_agg_replacement_value> ();
3905 	  memcpy (v, old_av, sizeof (*v));
3906 	  v->next = new_av;
3907 	  new_av = v;
3908 	  old_av = old_av->next;
3909 	}
3910       ipa_set_node_agg_value_chain (dst, new_av);
3911     }
3912 
3913   ipcp_transformation *src_trans = ipcp_get_transformation_summary (src);
3914 
3915   if (src_trans)
3916     {
3917       ipcp_transformation_initialize ();
3918       src_trans = ipcp_transformation_sum->get_create (src);
3919       ipcp_transformation *dst_trans
3920 	= ipcp_transformation_sum->get_create (dst);
3921 
3922       dst_trans->bits = vec_safe_copy (src_trans->bits);
3923 
3924       const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3925       vec<ipa_vr, va_gc> *&dst_vr
3926 	= ipcp_get_transformation_summary (dst)->m_vr;
3927       if (vec_safe_length (src_trans->m_vr) > 0)
3928 	{
3929 	  vec_safe_reserve_exact (dst_vr, src_vr->length ());
3930 	  for (unsigned i = 0; i < src_vr->length (); ++i)
3931 	    dst_vr->quick_push ((*src_vr)[i]);
3932 	}
3933     }
3934 }
3935 
3936 /* Register our cgraph hooks if they are not already there.  */
3937 
3938 void
ipa_register_cgraph_hooks(void)3939 ipa_register_cgraph_hooks (void)
3940 {
3941   ipa_check_create_node_params ();
3942   ipa_check_create_edge_args ();
3943 
3944   function_insertion_hook_holder =
3945       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3946 }
3947 
3948 /* Unregister our cgraph hooks if they are not already there.  */
3949 
3950 static void
ipa_unregister_cgraph_hooks(void)3951 ipa_unregister_cgraph_hooks (void)
3952 {
3953   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3954   function_insertion_hook_holder = NULL;
3955 }
3956 
3957 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3958    longer needed after ipa-cp.  */
3959 
3960 void
ipa_free_all_structures_after_ipa_cp(void)3961 ipa_free_all_structures_after_ipa_cp (void)
3962 {
3963   if (!optimize && !in_lto_p)
3964     {
3965       ipa_free_all_edge_args ();
3966       ipa_free_all_node_params ();
3967       ipcp_sources_pool.release ();
3968       ipcp_cst_values_pool.release ();
3969       ipcp_poly_ctx_values_pool.release ();
3970       ipcp_agg_lattice_pool.release ();
3971       ipa_unregister_cgraph_hooks ();
3972       ipa_refdesc_pool.release ();
3973     }
3974 }
3975 
3976 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3977    longer needed after indirect inlining.  */
3978 
3979 void
ipa_free_all_structures_after_iinln(void)3980 ipa_free_all_structures_after_iinln (void)
3981 {
3982   ipa_free_all_edge_args ();
3983   ipa_free_all_node_params ();
3984   ipa_unregister_cgraph_hooks ();
3985   ipcp_sources_pool.release ();
3986   ipcp_cst_values_pool.release ();
3987   ipcp_poly_ctx_values_pool.release ();
3988   ipcp_agg_lattice_pool.release ();
3989   ipa_refdesc_pool.release ();
3990 }
3991 
3992 /* Print ipa_tree_map data structures of all functions in the
3993    callgraph to F.  */
3994 
3995 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)3996 ipa_print_node_params (FILE *f, struct cgraph_node *node)
3997 {
3998   int i, count;
3999   struct ipa_node_params *info;
4000 
4001   if (!node->definition)
4002     return;
4003   info = IPA_NODE_REF (node);
4004   fprintf (f, "  function  %s parameter descriptors:\n", node->dump_name ());
4005   count = ipa_get_param_count (info);
4006   for (i = 0; i < count; i++)
4007     {
4008       int c;
4009 
4010       fprintf (f, "    ");
4011       ipa_dump_param (f, info, i);
4012       if (ipa_is_param_used (info, i))
4013 	fprintf (f, " used");
4014       c = ipa_get_controlled_uses (info, i);
4015       if (c == IPA_UNDESCRIBED_USE)
4016 	fprintf (f, " undescribed_use");
4017       else
4018 	fprintf (f, "  controlled_uses=%i", c);
4019       fprintf (f, "\n");
4020     }
4021 }
4022 
4023 /* Print ipa_tree_map data structures of all functions in the
4024    callgraph to F.  */
4025 
4026 void
ipa_print_all_params(FILE * f)4027 ipa_print_all_params (FILE * f)
4028 {
4029   struct cgraph_node *node;
4030 
4031   fprintf (f, "\nFunction parameters:\n");
4032   FOR_EACH_FUNCTION (node)
4033     ipa_print_node_params (f, node);
4034 }
4035 
4036 /* Dump the AV linked list.  */
4037 
4038 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4039 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4040 {
4041   bool comma = false;
4042   fprintf (f, "     Aggregate replacements:");
4043   for (; av; av = av->next)
4044     {
4045       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4046 	       av->index, av->offset);
4047       print_generic_expr (f, av->value);
4048       comma = true;
4049     }
4050   fprintf (f, "\n");
4051 }
4052 
4053 /* Stream out jump function JUMP_FUNC to OB.  */
4054 
4055 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4056 ipa_write_jump_function (struct output_block *ob,
4057 			 struct ipa_jump_func *jump_func)
4058 {
4059   struct ipa_agg_jf_item *item;
4060   struct bitpack_d bp;
4061   int i, count;
4062   int flag = 0;
4063 
4064   /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4065      as well as WPA memory by handling them specially.  */
4066   if (jump_func->type == IPA_JF_CONST
4067       && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4068     flag = 1;
4069 
4070   streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4071   switch (jump_func->type)
4072     {
4073     case IPA_JF_UNKNOWN:
4074       break;
4075     case IPA_JF_CONST:
4076       gcc_assert (
4077 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4078       stream_write_tree (ob,
4079 			 flag
4080 			 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4081 			 : jump_func->value.constant.value, true);
4082       break;
4083     case IPA_JF_PASS_THROUGH:
4084       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4085       if (jump_func->value.pass_through.operation == NOP_EXPR)
4086 	{
4087 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4088 	  bp = bitpack_create (ob->main_stream);
4089 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4090 	  streamer_write_bitpack (&bp);
4091 	}
4092       else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4093 	       == tcc_unary)
4094 	streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4095       else
4096 	{
4097 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4098 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4099 	}
4100       break;
4101     case IPA_JF_ANCESTOR:
4102       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4103       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4104       bp = bitpack_create (ob->main_stream);
4105       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4106       streamer_write_bitpack (&bp);
4107       break;
4108     }
4109 
4110   count = vec_safe_length (jump_func->agg.items);
4111   streamer_write_uhwi (ob, count);
4112   if (count)
4113     {
4114       bp = bitpack_create (ob->main_stream);
4115       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4116       streamer_write_bitpack (&bp);
4117     }
4118 
4119   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4120     {
4121       streamer_write_uhwi (ob, item->offset);
4122       stream_write_tree (ob, item->value, true);
4123     }
4124 
4125   bp = bitpack_create (ob->main_stream);
4126   bp_pack_value (&bp, !!jump_func->bits, 1);
4127   streamer_write_bitpack (&bp);
4128   if (jump_func->bits)
4129     {
4130       streamer_write_widest_int (ob, jump_func->bits->value);
4131       streamer_write_widest_int (ob, jump_func->bits->mask);
4132     }
4133   bp_pack_value (&bp, !!jump_func->m_vr, 1);
4134   streamer_write_bitpack (&bp);
4135   if (jump_func->m_vr)
4136     {
4137       streamer_write_enum (ob->main_stream, value_rang_type,
4138 			   VR_LAST, jump_func->m_vr->kind ());
4139       stream_write_tree (ob, jump_func->m_vr->min (), true);
4140       stream_write_tree (ob, jump_func->m_vr->max (), true);
4141     }
4142 }
4143 
4144 /* Read in jump function JUMP_FUNC from IB.  */
4145 
4146 static void
ipa_read_jump_function(struct lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,struct data_in * data_in,bool prevails)4147 ipa_read_jump_function (struct lto_input_block *ib,
4148 			struct ipa_jump_func *jump_func,
4149 			struct cgraph_edge *cs,
4150 			struct data_in *data_in,
4151 			bool prevails)
4152 {
4153   enum jump_func_type jftype;
4154   enum tree_code operation;
4155   int i, count;
4156   int val = streamer_read_uhwi (ib);
4157   bool flag = val & 1;
4158 
4159   jftype = (enum jump_func_type) (val / 2);
4160   switch (jftype)
4161     {
4162     case IPA_JF_UNKNOWN:
4163       ipa_set_jf_unknown (jump_func);
4164       break;
4165     case IPA_JF_CONST:
4166       {
4167 	tree t = stream_read_tree (ib, data_in);
4168 	if (flag && prevails)
4169 	  t = build_fold_addr_expr (t);
4170 	ipa_set_jf_constant (jump_func, t, cs);
4171       }
4172       break;
4173     case IPA_JF_PASS_THROUGH:
4174       operation = (enum tree_code) streamer_read_uhwi (ib);
4175       if (operation == NOP_EXPR)
4176 	{
4177 	  int formal_id =  streamer_read_uhwi (ib);
4178 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4179 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4180 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4181 	}
4182       else if (TREE_CODE_CLASS (operation) == tcc_unary)
4183 	{
4184 	  int formal_id =  streamer_read_uhwi (ib);
4185 	  ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4186 	}
4187       else
4188 	{
4189 	  tree operand = stream_read_tree (ib, data_in);
4190 	  int formal_id =  streamer_read_uhwi (ib);
4191 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4192 					 operation);
4193 	}
4194       break;
4195     case IPA_JF_ANCESTOR:
4196       {
4197 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4198 	int formal_id = streamer_read_uhwi (ib);
4199 	struct bitpack_d bp = streamer_read_bitpack (ib);
4200 	bool agg_preserved = bp_unpack_value (&bp, 1);
4201 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4202 	break;
4203       }
4204     default:
4205       fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4206     }
4207 
4208   count = streamer_read_uhwi (ib);
4209   if (prevails)
4210     vec_alloc (jump_func->agg.items, count);
4211   if (count)
4212     {
4213       struct bitpack_d bp = streamer_read_bitpack (ib);
4214       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4215     }
4216   for (i = 0; i < count; i++)
4217     {
4218       struct ipa_agg_jf_item item;
4219       item.offset = streamer_read_uhwi (ib);
4220       item.value = stream_read_tree (ib, data_in);
4221       if (prevails)
4222         jump_func->agg.items->quick_push (item);
4223     }
4224 
4225   struct bitpack_d bp = streamer_read_bitpack (ib);
4226   bool bits_known = bp_unpack_value (&bp, 1);
4227   if (bits_known)
4228     {
4229       widest_int value = streamer_read_widest_int (ib);
4230       widest_int mask = streamer_read_widest_int (ib);
4231       if (prevails)
4232         ipa_set_jfunc_bits (jump_func, value, mask);
4233     }
4234   else
4235     jump_func->bits = NULL;
4236 
4237   struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4238   bool vr_known = bp_unpack_value (&vr_bp, 1);
4239   if (vr_known)
4240     {
4241       enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4242 						       VR_LAST);
4243       tree min = stream_read_tree (ib, data_in);
4244       tree max = stream_read_tree (ib, data_in);
4245       if (prevails)
4246         ipa_set_jfunc_vr (jump_func, type, min, max);
4247     }
4248   else
4249     jump_func->m_vr = NULL;
4250 }
4251 
4252 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4253    relevant to indirect inlining to OB.  */
4254 
4255 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)4256 ipa_write_indirect_edge_info (struct output_block *ob,
4257 			      struct cgraph_edge *cs)
4258 {
4259   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4260   struct bitpack_d bp;
4261 
4262   streamer_write_hwi (ob, ii->param_index);
4263   bp = bitpack_create (ob->main_stream);
4264   bp_pack_value (&bp, ii->polymorphic, 1);
4265   bp_pack_value (&bp, ii->agg_contents, 1);
4266   bp_pack_value (&bp, ii->member_ptr, 1);
4267   bp_pack_value (&bp, ii->by_ref, 1);
4268   bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4269   bp_pack_value (&bp, ii->vptr_changed, 1);
4270   streamer_write_bitpack (&bp);
4271   if (ii->agg_contents || ii->polymorphic)
4272     streamer_write_hwi (ob, ii->offset);
4273   else
4274     gcc_assert (ii->offset == 0);
4275 
4276   if (ii->polymorphic)
4277     {
4278       streamer_write_hwi (ob, ii->otr_token);
4279       stream_write_tree (ob, ii->otr_type, true);
4280       ii->context.stream_out (ob);
4281     }
4282 }
4283 
4284 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4285    relevant to indirect inlining from IB.  */
4286 
4287 static void
ipa_read_indirect_edge_info(struct lto_input_block * ib,struct data_in * data_in,struct cgraph_edge * cs)4288 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4289 			     struct data_in *data_in,
4290 			     struct cgraph_edge *cs)
4291 {
4292   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4293   struct bitpack_d bp;
4294 
4295   ii->param_index = (int) streamer_read_hwi (ib);
4296   bp = streamer_read_bitpack (ib);
4297   ii->polymorphic = bp_unpack_value (&bp, 1);
4298   ii->agg_contents = bp_unpack_value (&bp, 1);
4299   ii->member_ptr = bp_unpack_value (&bp, 1);
4300   ii->by_ref = bp_unpack_value (&bp, 1);
4301   ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4302   ii->vptr_changed = bp_unpack_value (&bp, 1);
4303   if (ii->agg_contents || ii->polymorphic)
4304     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4305   else
4306     ii->offset = 0;
4307   if (ii->polymorphic)
4308     {
4309       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4310       ii->otr_type = stream_read_tree (ib, data_in);
4311       ii->context.stream_in (ib, data_in);
4312     }
4313 }
4314 
4315 /* Stream out NODE info to OB.  */
4316 
4317 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)4318 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4319 {
4320   int node_ref;
4321   lto_symtab_encoder_t encoder;
4322   struct ipa_node_params *info = IPA_NODE_REF (node);
4323   int j;
4324   struct cgraph_edge *e;
4325   struct bitpack_d bp;
4326 
4327   encoder = ob->decl_state->symtab_node_encoder;
4328   node_ref = lto_symtab_encoder_encode (encoder, node);
4329   streamer_write_uhwi (ob, node_ref);
4330 
4331   streamer_write_uhwi (ob, ipa_get_param_count (info));
4332   for (j = 0; j < ipa_get_param_count (info); j++)
4333     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4334   bp = bitpack_create (ob->main_stream);
4335   gcc_assert (info->analysis_done
4336 	      || ipa_get_param_count (info) == 0);
4337   gcc_assert (!info->node_enqueued);
4338   gcc_assert (!info->ipcp_orig_node);
4339   for (j = 0; j < ipa_get_param_count (info); j++)
4340     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4341   streamer_write_bitpack (&bp);
4342   for (j = 0; j < ipa_get_param_count (info); j++)
4343     {
4344       streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4345       stream_write_tree (ob, ipa_get_type (info, j), true);
4346     }
4347   for (e = node->callees; e; e = e->next_callee)
4348     {
4349       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4350 
4351       streamer_write_uhwi (ob,
4352 			   ipa_get_cs_argument_count (args) * 2
4353 			   + (args->polymorphic_call_contexts != NULL));
4354       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4355 	{
4356 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4357 	  if (args->polymorphic_call_contexts != NULL)
4358 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4359 	}
4360     }
4361   for (e = node->indirect_calls; e; e = e->next_callee)
4362     {
4363       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4364 
4365       streamer_write_uhwi (ob,
4366 			   ipa_get_cs_argument_count (args) * 2
4367   			   + (args->polymorphic_call_contexts != NULL));
4368       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4369 	{
4370 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4371 	  if (args->polymorphic_call_contexts != NULL)
4372 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4373 	}
4374       ipa_write_indirect_edge_info (ob, e);
4375     }
4376 }
4377 
4378 /* Stream in edge E from IB.  */
4379 
4380 static void
ipa_read_edge_info(struct lto_input_block * ib,struct data_in * data_in,struct cgraph_edge * e,bool prevails)4381 ipa_read_edge_info (struct lto_input_block *ib,
4382 		    struct data_in *data_in,
4383 		    struct cgraph_edge *e, bool prevails)
4384 {
4385   int count = streamer_read_uhwi (ib);
4386   bool contexts_computed = count & 1;
4387 
4388   count /= 2;
4389   if (!count)
4390     return;
4391   if (prevails && e->possibly_call_in_translation_unit_p ())
4392     {
4393       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4394       vec_safe_grow_cleared (args->jump_functions, count);
4395       if (contexts_computed)
4396 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4397       for (int k = 0; k < count; k++)
4398 	{
4399 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4400 				  data_in, prevails);
4401 	  if (contexts_computed)
4402 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4403 							     (ib, data_in);
4404 	}
4405     }
4406   else
4407     {
4408       for (int k = 0; k < count; k++)
4409 	{
4410 	  struct ipa_jump_func dummy;
4411 	  ipa_read_jump_function (ib, &dummy, e,
4412 				  data_in, prevails);
4413 	  if (contexts_computed)
4414 	    {
4415 	      struct ipa_polymorphic_call_context ctx;
4416 	      ctx.stream_in (ib, data_in);
4417 	    }
4418 	}
4419     }
4420 }
4421 
4422 /* Stream in NODE info from IB.  */
4423 
4424 static void
ipa_read_node_info(struct lto_input_block * ib,struct cgraph_node * node,struct data_in * data_in)4425 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4426 		    struct data_in *data_in)
4427 {
4428   int k;
4429   struct cgraph_edge *e;
4430   struct bitpack_d bp;
4431   bool prevails = node->prevailing_p ();
4432   struct ipa_node_params *info = prevails ? IPA_NODE_REF (node) : NULL;
4433 
4434   int param_count = streamer_read_uhwi (ib);
4435   if (prevails)
4436     {
4437       ipa_alloc_node_params (node, param_count);
4438       for (k = 0; k < param_count; k++)
4439         (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4440       if (ipa_get_param_count (info) != 0)
4441 	info->analysis_done = true;
4442       info->node_enqueued = false;
4443     }
4444   else
4445     for (k = 0; k < param_count; k++)
4446       streamer_read_uhwi (ib);
4447 
4448   bp = streamer_read_bitpack (ib);
4449   for (k = 0; k < param_count; k++)
4450     {
4451       bool used = bp_unpack_value (&bp, 1);
4452 
4453       if (prevails)
4454         ipa_set_param_used (info, k, used);
4455     }
4456   for (k = 0; k < param_count; k++)
4457     {
4458       int nuses = streamer_read_hwi (ib);
4459       tree type = stream_read_tree (ib, data_in);
4460 
4461       if (prevails)
4462 	{
4463 	  ipa_set_controlled_uses (info, k, nuses);
4464 	  (*info->descriptors)[k].decl_or_type = type;
4465 	}
4466     }
4467   for (e = node->callees; e; e = e->next_callee)
4468     ipa_read_edge_info (ib, data_in, e, prevails);
4469   for (e = node->indirect_calls; e; e = e->next_callee)
4470     {
4471       ipa_read_edge_info (ib, data_in, e, prevails);
4472       ipa_read_indirect_edge_info (ib, data_in, e);
4473     }
4474 }
4475 
4476 /* Write jump functions for nodes in SET.  */
4477 
4478 void
ipa_prop_write_jump_functions(void)4479 ipa_prop_write_jump_functions (void)
4480 {
4481   struct cgraph_node *node;
4482   struct output_block *ob;
4483   unsigned int count = 0;
4484   lto_symtab_encoder_iterator lsei;
4485   lto_symtab_encoder_t encoder;
4486 
4487   if (!ipa_node_params_sum || !ipa_edge_args_sum)
4488     return;
4489 
4490   ob = create_output_block (LTO_section_jump_functions);
4491   encoder = ob->decl_state->symtab_node_encoder;
4492   ob->symbol = NULL;
4493   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4494        lsei_next_function_in_partition (&lsei))
4495     {
4496       node = lsei_cgraph_node (lsei);
4497       if (node->has_gimple_body_p ()
4498 	  && IPA_NODE_REF (node) != NULL)
4499 	count++;
4500     }
4501 
4502   streamer_write_uhwi (ob, count);
4503 
4504   /* Process all of the functions.  */
4505   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4506        lsei_next_function_in_partition (&lsei))
4507     {
4508       node = lsei_cgraph_node (lsei);
4509       if (node->has_gimple_body_p ()
4510 	  && IPA_NODE_REF (node) != NULL)
4511         ipa_write_node_info (ob, node);
4512     }
4513   streamer_write_char_stream (ob->main_stream, 0);
4514   produce_asm (ob, NULL);
4515   destroy_output_block (ob);
4516 }
4517 
4518 /* Read section in file FILE_DATA of length LEN with data DATA.  */
4519 
4520 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)4521 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4522 		       size_t len)
4523 {
4524   const struct lto_function_header *header =
4525     (const struct lto_function_header *) data;
4526   const int cfg_offset = sizeof (struct lto_function_header);
4527   const int main_offset = cfg_offset + header->cfg_size;
4528   const int string_offset = main_offset + header->main_size;
4529   struct data_in *data_in;
4530   unsigned int i;
4531   unsigned int count;
4532 
4533   lto_input_block ib_main ((const char *) data + main_offset,
4534 			   header->main_size, file_data->mode_table);
4535 
4536   data_in =
4537     lto_data_in_create (file_data, (const char *) data + string_offset,
4538 			header->string_size, vNULL);
4539   count = streamer_read_uhwi (&ib_main);
4540 
4541   for (i = 0; i < count; i++)
4542     {
4543       unsigned int index;
4544       struct cgraph_node *node;
4545       lto_symtab_encoder_t encoder;
4546 
4547       index = streamer_read_uhwi (&ib_main);
4548       encoder = file_data->symtab_node_encoder;
4549       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4550 								index));
4551       gcc_assert (node->definition);
4552       ipa_read_node_info (&ib_main, node, data_in);
4553     }
4554   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4555 			 len);
4556   lto_data_in_delete (data_in);
4557 }
4558 
4559 /* Read ipcp jump functions.  */
4560 
4561 void
ipa_prop_read_jump_functions(void)4562 ipa_prop_read_jump_functions (void)
4563 {
4564   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4565   struct lto_file_decl_data *file_data;
4566   unsigned int j = 0;
4567 
4568   ipa_check_create_node_params ();
4569   ipa_check_create_edge_args ();
4570   ipa_register_cgraph_hooks ();
4571 
4572   while ((file_data = file_data_vec[j++]))
4573     {
4574       size_t len;
4575       const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4576 
4577       if (data)
4578         ipa_prop_read_section (file_data, data, len);
4579     }
4580 }
4581 
4582 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)4583 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4584 {
4585   int node_ref;
4586   unsigned int count = 0;
4587   lto_symtab_encoder_t encoder;
4588   struct ipa_agg_replacement_value *aggvals, *av;
4589 
4590   aggvals = ipa_get_agg_replacements_for_node (node);
4591   encoder = ob->decl_state->symtab_node_encoder;
4592   node_ref = lto_symtab_encoder_encode (encoder, node);
4593   streamer_write_uhwi (ob, node_ref);
4594 
4595   for (av = aggvals; av; av = av->next)
4596     count++;
4597   streamer_write_uhwi (ob, count);
4598 
4599   for (av = aggvals; av; av = av->next)
4600     {
4601       struct bitpack_d bp;
4602 
4603       streamer_write_uhwi (ob, av->offset);
4604       streamer_write_uhwi (ob, av->index);
4605       stream_write_tree (ob, av->value, true);
4606 
4607       bp = bitpack_create (ob->main_stream);
4608       bp_pack_value (&bp, av->by_ref, 1);
4609       streamer_write_bitpack (&bp);
4610     }
4611 
4612   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4613   if (ts && vec_safe_length (ts->m_vr) > 0)
4614     {
4615       count = ts->m_vr->length ();
4616       streamer_write_uhwi (ob, count);
4617       for (unsigned i = 0; i < count; ++i)
4618 	{
4619 	  struct bitpack_d bp;
4620 	  ipa_vr *parm_vr = &(*ts->m_vr)[i];
4621 	  bp = bitpack_create (ob->main_stream);
4622 	  bp_pack_value (&bp, parm_vr->known, 1);
4623 	  streamer_write_bitpack (&bp);
4624 	  if (parm_vr->known)
4625 	    {
4626 	      streamer_write_enum (ob->main_stream, value_rang_type,
4627 				   VR_LAST, parm_vr->type);
4628 	      streamer_write_wide_int (ob, parm_vr->min);
4629 	      streamer_write_wide_int (ob, parm_vr->max);
4630 	    }
4631 	}
4632     }
4633   else
4634     streamer_write_uhwi (ob, 0);
4635 
4636   if (ts && vec_safe_length (ts->bits) > 0)
4637     {
4638       count = ts->bits->length ();
4639       streamer_write_uhwi (ob, count);
4640 
4641       for (unsigned i = 0; i < count; ++i)
4642 	{
4643 	  const ipa_bits *bits_jfunc = (*ts->bits)[i];
4644 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
4645 	  bp_pack_value (&bp, !!bits_jfunc, 1);
4646 	  streamer_write_bitpack (&bp);
4647 	  if (bits_jfunc)
4648 	    {
4649 	      streamer_write_widest_int (ob, bits_jfunc->value);
4650 	      streamer_write_widest_int (ob, bits_jfunc->mask);
4651 	    }
4652 	}
4653     }
4654   else
4655     streamer_write_uhwi (ob, 0);
4656 }
4657 
4658 /* Stream in the aggregate value replacement chain for NODE from IB.  */
4659 
4660 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)4661 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4662 			       data_in *data_in)
4663 {
4664   struct ipa_agg_replacement_value *aggvals = NULL;
4665   unsigned int count, i;
4666 
4667   count = streamer_read_uhwi (ib);
4668   for (i = 0; i <count; i++)
4669     {
4670       struct ipa_agg_replacement_value *av;
4671       struct bitpack_d bp;
4672 
4673       av = ggc_alloc<ipa_agg_replacement_value> ();
4674       av->offset = streamer_read_uhwi (ib);
4675       av->index = streamer_read_uhwi (ib);
4676       av->value = stream_read_tree (ib, data_in);
4677       bp = streamer_read_bitpack (ib);
4678       av->by_ref = bp_unpack_value (&bp, 1);
4679       av->next = aggvals;
4680       aggvals = av;
4681     }
4682   ipa_set_node_agg_value_chain (node, aggvals);
4683 
4684   count = streamer_read_uhwi (ib);
4685   if (count > 0)
4686     {
4687       ipcp_transformation_initialize ();
4688       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4689       vec_safe_grow_cleared (ts->m_vr, count);
4690       for (i = 0; i < count; i++)
4691 	{
4692 	  ipa_vr *parm_vr;
4693 	  parm_vr = &(*ts->m_vr)[i];
4694 	  struct bitpack_d bp;
4695 	  bp = streamer_read_bitpack (ib);
4696 	  parm_vr->known = bp_unpack_value (&bp, 1);
4697 	  if (parm_vr->known)
4698 	    {
4699 	      parm_vr->type = streamer_read_enum (ib, value_range_kind,
4700 						  VR_LAST);
4701 	      parm_vr->min = streamer_read_wide_int (ib);
4702 	      parm_vr->max = streamer_read_wide_int (ib);
4703 	    }
4704 	}
4705     }
4706   count = streamer_read_uhwi (ib);
4707   if (count > 0)
4708     {
4709       ipcp_transformation_initialize ();
4710       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
4711       vec_safe_grow_cleared (ts->bits, count);
4712 
4713       for (i = 0; i < count; i++)
4714 	{
4715 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4716 	  bool known = bp_unpack_value (&bp, 1);
4717 	  if (known)
4718 	    {
4719 	      const widest_int value = streamer_read_widest_int (ib);
4720 	      const widest_int mask = streamer_read_widest_int (ib);
4721 	      ipa_bits *bits
4722 		= ipa_get_ipa_bits_for_value (value, mask);
4723 	      (*ts->bits)[i] = bits;
4724 	    }
4725 	}
4726     }
4727 }
4728 
4729 /* Write all aggregate replacement for nodes in set.  */
4730 
4731 void
ipcp_write_transformation_summaries(void)4732 ipcp_write_transformation_summaries (void)
4733 {
4734   struct cgraph_node *node;
4735   struct output_block *ob;
4736   unsigned int count = 0;
4737   lto_symtab_encoder_iterator lsei;
4738   lto_symtab_encoder_t encoder;
4739 
4740   ob = create_output_block (LTO_section_ipcp_transform);
4741   encoder = ob->decl_state->symtab_node_encoder;
4742   ob->symbol = NULL;
4743   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4744        lsei_next_function_in_partition (&lsei))
4745     {
4746       node = lsei_cgraph_node (lsei);
4747       if (node->has_gimple_body_p ())
4748 	count++;
4749     }
4750 
4751   streamer_write_uhwi (ob, count);
4752 
4753   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4754        lsei_next_function_in_partition (&lsei))
4755     {
4756       node = lsei_cgraph_node (lsei);
4757       if (node->has_gimple_body_p ())
4758 	write_ipcp_transformation_info (ob, node);
4759     }
4760   streamer_write_char_stream (ob->main_stream, 0);
4761   produce_asm (ob, NULL);
4762   destroy_output_block (ob);
4763 }
4764 
4765 /* Read replacements section in file FILE_DATA of length LEN with data
4766    DATA.  */
4767 
4768 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)4769 read_replacements_section (struct lto_file_decl_data *file_data,
4770 			   const char *data,
4771 			   size_t len)
4772 {
4773   const struct lto_function_header *header =
4774     (const struct lto_function_header *) data;
4775   const int cfg_offset = sizeof (struct lto_function_header);
4776   const int main_offset = cfg_offset + header->cfg_size;
4777   const int string_offset = main_offset + header->main_size;
4778   struct data_in *data_in;
4779   unsigned int i;
4780   unsigned int count;
4781 
4782   lto_input_block ib_main ((const char *) data + main_offset,
4783 			   header->main_size, file_data->mode_table);
4784 
4785   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4786 				header->string_size, vNULL);
4787   count = streamer_read_uhwi (&ib_main);
4788 
4789   for (i = 0; i < count; i++)
4790     {
4791       unsigned int index;
4792       struct cgraph_node *node;
4793       lto_symtab_encoder_t encoder;
4794 
4795       index = streamer_read_uhwi (&ib_main);
4796       encoder = file_data->symtab_node_encoder;
4797       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4798 								index));
4799       gcc_assert (node->definition);
4800       read_ipcp_transformation_info (&ib_main, node, data_in);
4801     }
4802   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4803 			 len);
4804   lto_data_in_delete (data_in);
4805 }
4806 
4807 /* Read IPA-CP aggregate replacements.  */
4808 
4809 void
ipcp_read_transformation_summaries(void)4810 ipcp_read_transformation_summaries (void)
4811 {
4812   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4813   struct lto_file_decl_data *file_data;
4814   unsigned int j = 0;
4815 
4816   while ((file_data = file_data_vec[j++]))
4817     {
4818       size_t len;
4819       const char *data = lto_get_section_data (file_data,
4820 					       LTO_section_ipcp_transform,
4821 					       NULL, &len);
4822       if (data)
4823         read_replacements_section (file_data, data, len);
4824     }
4825 }
4826 
4827 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4828    NODE.  */
4829 
4830 static void
adjust_agg_replacement_values(struct cgraph_node * node,struct ipa_agg_replacement_value * aggval)4831 adjust_agg_replacement_values (struct cgraph_node *node,
4832 			       struct ipa_agg_replacement_value *aggval)
4833 {
4834   struct ipa_agg_replacement_value *v;
4835   int i, c = 0, d = 0, *adj;
4836 
4837   if (!node->clone.combined_args_to_skip)
4838     return;
4839 
4840   for (v = aggval; v; v = v->next)
4841     {
4842       gcc_assert (v->index >= 0);
4843       if (c < v->index)
4844 	c = v->index;
4845     }
4846   c++;
4847 
4848   adj = XALLOCAVEC (int, c);
4849   for (i = 0; i < c; i++)
4850     if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4851       {
4852 	adj[i] = -1;
4853 	d++;
4854       }
4855     else
4856       adj[i] = i - d;
4857 
4858   for (v = aggval; v; v = v->next)
4859     v->index = adj[v->index];
4860 }
4861 
4862 /* Dominator walker driving the ipcp modification phase.  */
4863 
4864 class ipcp_modif_dom_walker : public dom_walker
4865 {
4866 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc,bool * cc)4867   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4868 			 vec<ipa_param_descriptor, va_gc> *descs,
4869 			 struct ipa_agg_replacement_value *av,
4870 			 bool *sc, bool *cc)
4871     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4872       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4873 
4874   virtual edge before_dom_children (basic_block);
4875 
4876 private:
4877   struct ipa_func_body_info *m_fbi;
4878   vec<ipa_param_descriptor, va_gc> *m_descriptors;
4879   struct ipa_agg_replacement_value *m_aggval;
4880   bool *m_something_changed, *m_cfg_changed;
4881 };
4882 
4883 edge
before_dom_children(basic_block bb)4884 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4885 {
4886   gimple_stmt_iterator gsi;
4887   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4888     {
4889       struct ipa_agg_replacement_value *v;
4890       gimple *stmt = gsi_stmt (gsi);
4891       tree rhs, val, t;
4892       HOST_WIDE_INT offset, size;
4893       int index;
4894       bool by_ref, vce;
4895 
4896       if (!gimple_assign_load_p (stmt))
4897 	continue;
4898       rhs = gimple_assign_rhs1 (stmt);
4899       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4900 	continue;
4901 
4902       vce = false;
4903       t = rhs;
4904       while (handled_component_p (t))
4905 	{
4906 	  /* V_C_E can do things like convert an array of integers to one
4907 	     bigger integer and similar things we do not handle below.  */
4908 	  if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4909 	    {
4910 	      vce = true;
4911 	      break;
4912 	    }
4913 	  t = TREE_OPERAND (t, 0);
4914 	}
4915       if (vce)
4916 	continue;
4917 
4918       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4919 				   &offset, &size, &by_ref))
4920 	continue;
4921       for (v = m_aggval; v; v = v->next)
4922 	if (v->index == index
4923 	    && v->offset == offset)
4924 	  break;
4925       if (!v
4926 	  || v->by_ref != by_ref
4927 	  || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4928 	continue;
4929 
4930       gcc_checking_assert (is_gimple_ip_invariant (v->value));
4931       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4932 	{
4933 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4934 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4935 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
4936 		   == TYPE_SIZE (TREE_TYPE (v->value)))
4937 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4938 	  else
4939 	    {
4940 	      if (dump_file)
4941 		{
4942 		  fprintf (dump_file, "    const ");
4943 		  print_generic_expr (dump_file, v->value);
4944 		  fprintf (dump_file, "  can't be converted to type of ");
4945 		  print_generic_expr (dump_file, rhs);
4946 		  fprintf (dump_file, "\n");
4947 		}
4948 	      continue;
4949 	    }
4950 	}
4951       else
4952 	val = v->value;
4953 
4954       if (dump_file && (dump_flags & TDF_DETAILS))
4955 	{
4956 	  fprintf (dump_file, "Modifying stmt:\n  ");
4957 	  print_gimple_stmt (dump_file, stmt, 0);
4958 	}
4959       gimple_assign_set_rhs_from_tree (&gsi, val);
4960       update_stmt (stmt);
4961 
4962       if (dump_file && (dump_flags & TDF_DETAILS))
4963 	{
4964 	  fprintf (dump_file, "into:\n  ");
4965 	  print_gimple_stmt (dump_file, stmt, 0);
4966 	  fprintf (dump_file, "\n");
4967 	}
4968 
4969       *m_something_changed = true;
4970       if (maybe_clean_eh_stmt (stmt)
4971 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4972 	*m_cfg_changed = true;
4973     }
4974   return NULL;
4975 }
4976 
4977 /* Update bits info of formal parameters as described in
4978    ipcp_transformation.  */
4979 
4980 static void
ipcp_update_bits(struct cgraph_node * node)4981 ipcp_update_bits (struct cgraph_node *node)
4982 {
4983   tree parm = DECL_ARGUMENTS (node->decl);
4984   tree next_parm = parm;
4985   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
4986 
4987   if (!ts || vec_safe_length (ts->bits) == 0)
4988     return;
4989 
4990   vec<ipa_bits *, va_gc> &bits = *ts->bits;
4991   unsigned count = bits.length ();
4992 
4993   for (unsigned i = 0; i < count; ++i, parm = next_parm)
4994     {
4995       if (node->clone.combined_args_to_skip
4996 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4997 	continue;
4998 
4999       gcc_checking_assert (parm);
5000       next_parm = DECL_CHAIN (parm);
5001 
5002       if (!bits[i]
5003 	  || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5004 	       || POINTER_TYPE_P (TREE_TYPE (parm)))
5005 	  || !is_gimple_reg (parm))
5006 	continue;
5007 
5008       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5009       if (!ddef)
5010 	continue;
5011 
5012       if (dump_file)
5013 	{
5014 	  fprintf (dump_file, "Adjusting mask for param %u to ", i);
5015 	  print_hex (bits[i]->mask, dump_file);
5016 	  fprintf (dump_file, "\n");
5017 	}
5018 
5019       if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5020 	{
5021 	  unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5022 	  signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5023 
5024 	  wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5025 				  | wide_int::from (bits[i]->value, prec, sgn);
5026 	  set_nonzero_bits (ddef, nonzero_bits);
5027 	}
5028       else
5029 	{
5030 	  unsigned tem = bits[i]->mask.to_uhwi ();
5031 	  unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5032 	  unsigned align = tem & -tem;
5033 	  unsigned misalign = bitpos & (align - 1);
5034 
5035 	  if (align > 1)
5036 	    {
5037 	      if (dump_file)
5038 		fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5039 
5040 	      unsigned old_align, old_misalign;
5041 	      struct ptr_info_def *pi = get_ptr_info (ddef);
5042 	      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5043 
5044 	      if (old_known
5045 		  && old_align > align)
5046 		{
5047 		  if (dump_file)
5048 		    {
5049 		      fprintf (dump_file, "But alignment was already %u.\n", old_align);
5050 		      if ((old_misalign & (align - 1)) != misalign)
5051 			fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5052 				 old_misalign, misalign);
5053 		    }
5054 		  continue;
5055 		}
5056 
5057 	      if (old_known
5058 		  && ((misalign & (old_align - 1)) != old_misalign)
5059 		  && dump_file)
5060 		fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5061 			 old_misalign, misalign);
5062 
5063 	      set_ptr_info_alignment (pi, align, misalign);
5064 	    }
5065 	}
5066     }
5067 }
5068 
5069 /* Update value range of formal parameters as described in
5070    ipcp_transformation.  */
5071 
5072 static void
ipcp_update_vr(struct cgraph_node * node)5073 ipcp_update_vr (struct cgraph_node *node)
5074 {
5075   tree fndecl = node->decl;
5076   tree parm = DECL_ARGUMENTS (fndecl);
5077   tree next_parm = parm;
5078   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5079   if (!ts || vec_safe_length (ts->m_vr) == 0)
5080     return;
5081   const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5082   unsigned count = vr.length ();
5083 
5084   for (unsigned i = 0; i < count; ++i, parm = next_parm)
5085     {
5086       if (node->clone.combined_args_to_skip
5087 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5088 	continue;
5089       gcc_checking_assert (parm);
5090       next_parm = DECL_CHAIN (parm);
5091       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5092 
5093       if (!ddef || !is_gimple_reg (parm))
5094 	continue;
5095 
5096       if (vr[i].known
5097 	  && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5098 	{
5099 	  tree type = TREE_TYPE (ddef);
5100 	  unsigned prec = TYPE_PRECISION (type);
5101 	  if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5102 	    {
5103 	      if (dump_file)
5104 		{
5105 		  fprintf (dump_file, "Setting value range of param %u ", i);
5106 		  fprintf (dump_file, "%s[",
5107 			   (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5108 		  print_decs (vr[i].min, dump_file);
5109 		  fprintf (dump_file, ", ");
5110 		  print_decs (vr[i].max, dump_file);
5111 		  fprintf (dump_file, "]\n");
5112 		}
5113 	      set_range_info (ddef, vr[i].type,
5114 			      wide_int_storage::from (vr[i].min, prec,
5115 						      TYPE_SIGN (type)),
5116 			      wide_int_storage::from (vr[i].max, prec,
5117 						      TYPE_SIGN (type)));
5118 	    }
5119 	  else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5120 		   && vr[i].type == VR_ANTI_RANGE
5121 		   && wi::eq_p (vr[i].min, 0)
5122 		   && wi::eq_p (vr[i].max, 0))
5123 	    {
5124 	      if (dump_file)
5125 		fprintf (dump_file, "Setting nonnull for %u\n", i);
5126 	      set_ptr_nonnull (ddef);
5127 	    }
5128 	}
5129     }
5130 }
5131 
5132 /* IPCP transformation phase doing propagation of aggregate values.  */
5133 
5134 unsigned int
ipcp_transform_function(struct cgraph_node * node)5135 ipcp_transform_function (struct cgraph_node *node)
5136 {
5137   vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5138   struct ipa_func_body_info fbi;
5139   struct ipa_agg_replacement_value *aggval;
5140   int param_count;
5141   bool cfg_changed = false, something_changed = false;
5142 
5143   gcc_checking_assert (cfun);
5144   gcc_checking_assert (current_function_decl);
5145 
5146   if (dump_file)
5147     fprintf (dump_file, "Modification phase of node %s\n",
5148 	     node->dump_name ());
5149 
5150   ipcp_update_bits (node);
5151   ipcp_update_vr (node);
5152   aggval = ipa_get_agg_replacements_for_node (node);
5153   if (!aggval)
5154       return 0;
5155   param_count = count_formal_params (node->decl);
5156   if (param_count == 0)
5157     return 0;
5158   adjust_agg_replacement_values (node, aggval);
5159   if (dump_file)
5160     ipa_dump_agg_replacement_values (dump_file, aggval);
5161 
5162   fbi.node = node;
5163   fbi.info = NULL;
5164   fbi.bb_infos = vNULL;
5165   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5166   fbi.param_count = param_count;
5167   fbi.aa_walk_budget = PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
5168 
5169   vec_safe_grow_cleared (descriptors, param_count);
5170   ipa_populate_param_decls (node, *descriptors);
5171   calculate_dominance_info (CDI_DOMINATORS);
5172   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5173 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5174 
5175   int i;
5176   struct ipa_bb_info *bi;
5177   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5178     free_ipa_bb_info (bi);
5179   fbi.bb_infos.release ();
5180   free_dominance_info (CDI_DOMINATORS);
5181 
5182   ipcp_transformation *s = ipcp_transformation_sum->get (node);
5183   s->agg_values = NULL;
5184   s->bits = NULL;
5185   s->m_vr = NULL;
5186 
5187   vec_free (descriptors);
5188 
5189   if (!something_changed)
5190     return 0;
5191 
5192   if (cfg_changed)
5193     delete_unreachable_blocks_update_callgraph (node, false);
5194 
5195   return TODO_update_ssa_only_virtuals;
5196 }
5197 
5198 #include "gt-ipa-prop.h"
5199