1 /* Interprocedural analyses.
2    Copyright (C) 2005-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60 
61 /* Edge summary for IPA-CP edge information.  */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
63 
64 /* Traits for a hash table for reusing already existing ipa_bits. */
65 
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 {
68   typedef ipa_bits *value_type;
69   typedef ipa_bits *compare_type;
70   static hashval_t
hashipa_bit_ggc_hash_traits71   hash (const ipa_bits *p)
72   {
73     hashval_t t = (hashval_t) p->value.to_shwi ();
74     return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75   }
76   static bool
equalipa_bit_ggc_hash_traits77   equal (const ipa_bits *a, const ipa_bits *b)
78     {
79       return a->value == b->value && a->mask == b->mask;
80     }
81   static const bool empty_zero_p = true;
82   static void
mark_emptyipa_bit_ggc_hash_traits83   mark_empty (ipa_bits *&p)
84     {
85       p = NULL;
86     }
87   static bool
is_emptyipa_bit_ggc_hash_traits88   is_empty (const ipa_bits *p)
89     {
90       return p == NULL;
91     }
92   static bool
is_deletedipa_bit_ggc_hash_traits93   is_deleted (const ipa_bits *p)
94     {
95       return p == reinterpret_cast<const ipa_bits *> (1);
96     }
97   static void
mark_deletedipa_bit_ggc_hash_traits98   mark_deleted (ipa_bits *&p)
99     {
100       p = reinterpret_cast<ipa_bits *> (1);
101     }
102 };
103 
104 /* Hash table for avoid repeated allocations of equal ipa_bits.  */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106 
107 /* Traits for a hash table for reusing value_ranges used for IPA.  Note that
108    the equiv bitmap is not hashed and is expected to be NULL.  */
109 
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
111 {
112   typedef value_range *value_type;
113   typedef value_range *compare_type;
114   static hashval_t
hashipa_vr_ggc_hash_traits115   hash (const value_range *p)
116     {
117       inchash::hash hstate (p->kind ());
118       inchash::add_expr (p->min (), hstate);
119       inchash::add_expr (p->max (), hstate);
120       return hstate.end ();
121     }
122   static bool
equalipa_vr_ggc_hash_traits123   equal (const value_range *a, const value_range *b)
124     {
125       return (a->equal_p (*b)
126 	      && types_compatible_p (a->type (), b->type ()));
127     }
128   static const bool empty_zero_p = true;
129   static void
mark_emptyipa_vr_ggc_hash_traits130   mark_empty (value_range *&p)
131     {
132       p = NULL;
133     }
134   static bool
is_emptyipa_vr_ggc_hash_traits135   is_empty (const value_range *p)
136     {
137       return p == NULL;
138     }
139   static bool
is_deletedipa_vr_ggc_hash_traits140   is_deleted (const value_range *p)
141     {
142       return p == reinterpret_cast<const value_range *> (1);
143     }
144   static void
mark_deletedipa_vr_ggc_hash_traits145   mark_deleted (value_range *&p)
146     {
147       p = reinterpret_cast<value_range *> (1);
148     }
149 };
150 
151 /* Hash table for avoid repeated allocations of equal value_ranges.  */
152 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
153 
154 /* Holders of ipa cgraph hooks: */
155 static struct cgraph_node_hook_list *function_insertion_hook_holder;
156 
157 /* Description of a reference to an IPA constant.  */
158 struct ipa_cst_ref_desc
159 {
160   /* Edge that corresponds to the statement which took the reference.  */
161   struct cgraph_edge *cs;
162   /* Linked list of duplicates created when call graph edges are cloned.  */
163   struct ipa_cst_ref_desc *next_duplicate;
164   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
165      if out of control.  */
166   int refcount;
167 };
168 
169 /* Allocation pool for reference descriptions.  */
170 
171 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
172   ("IPA-PROP ref descriptions");
173 
174 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
175    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
176 
177 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)178 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
179 {
180   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
181 
182   if (!fs_opts)
183     return false;
184   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 }
186 
187 /* Return index of the formal whose tree is PTREE in function which corresponds
188    to INFO.  */
189 
190 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)191 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
192 			    tree ptree)
193 {
194   int i, count;
195 
196   count = vec_safe_length (descriptors);
197   for (i = 0; i < count; i++)
198     if ((*descriptors)[i].decl_or_type == ptree)
199       return i;
200 
201   return -1;
202 }
203 
204 /* Return index of the formal whose tree is PTREE in function which corresponds
205    to INFO.  */
206 
207 int
ipa_get_param_decl_index(class ipa_node_params * info,tree ptree)208 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
209 {
210   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 }
212 
213 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
214    NODE.  */
215 
216 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)217 ipa_populate_param_decls (struct cgraph_node *node,
218 			  vec<ipa_param_descriptor, va_gc> &descriptors)
219 {
220   tree fndecl;
221   tree fnargs;
222   tree parm;
223   int param_num;
224 
225   fndecl = node->decl;
226   gcc_assert (gimple_has_body_p (fndecl));
227   fnargs = DECL_ARGUMENTS (fndecl);
228   param_num = 0;
229   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
230     {
231       descriptors[param_num].decl_or_type = parm;
232       unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
233       descriptors[param_num].move_cost = cost;
234       /* Watch overflow, move_cost is a bitfield.  */
235       gcc_checking_assert (cost == descriptors[param_num].move_cost);
236       param_num++;
237     }
238 }
239 
240 /* Return how many formal parameters FNDECL has.  */
241 
242 int
count_formal_params(tree fndecl)243 count_formal_params (tree fndecl)
244 {
245   tree parm;
246   int count = 0;
247   gcc_assert (gimple_has_body_p (fndecl));
248 
249   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
250     count++;
251 
252   return count;
253 }
254 
255 /* Return the declaration of Ith formal parameter of the function corresponding
256    to INFO.  Note there is no setter function as this array is built just once
257    using ipa_initialize_node_params. */
258 
259 void
ipa_dump_param(FILE * file,class ipa_node_params * info,int i)260 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
261 {
262   fprintf (file, "param #%i", i);
263   if ((*info->descriptors)[i].decl_or_type)
264     {
265       fprintf (file, " ");
266       print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
267     }
268 }
269 
270 /* If necessary, allocate vector of parameter descriptors in info of NODE.
271    Return true if they were allocated, false if not.  */
272 
273 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)274 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
275 {
276   class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
277 
278   if (!info->descriptors && param_count)
279     {
280       vec_safe_grow_cleared (info->descriptors, param_count);
281       return true;
282     }
283   else
284     return false;
285 }
286 
287 /* Initialize the ipa_node_params structure associated with NODE by counting
288    the function parameters, creating the descriptors and populating their
289    param_decls.  */
290 
291 void
ipa_initialize_node_params(struct cgraph_node * node)292 ipa_initialize_node_params (struct cgraph_node *node)
293 {
294   class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
295 
296   if (!info->descriptors
297       && ipa_alloc_node_params (node, count_formal_params (node->decl)))
298     ipa_populate_param_decls (node, *info->descriptors);
299 }
300 
301 /* Print the jump functions associated with call graph edge CS to file F.  */
302 
303 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)304 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
305 {
306   int i, count;
307 
308   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
309   for (i = 0; i < count; i++)
310     {
311       struct ipa_jump_func *jump_func;
312       enum jump_func_type type;
313 
314       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
315       type = jump_func->type;
316 
317       fprintf (f, "       param %d: ", i);
318       if (type == IPA_JF_UNKNOWN)
319 	fprintf (f, "UNKNOWN\n");
320       else if (type == IPA_JF_CONST)
321 	{
322 	  tree val = jump_func->value.constant.value;
323 	  fprintf (f, "CONST: ");
324 	  print_generic_expr (f, val);
325 	  if (TREE_CODE (val) == ADDR_EXPR
326 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
327 	    {
328 	      fprintf (f, " -> ");
329 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
330 	    }
331 	  fprintf (f, "\n");
332 	}
333       else if (type == IPA_JF_PASS_THROUGH)
334 	{
335 	  fprintf (f, "PASS THROUGH: ");
336 	  fprintf (f, "%d, op %s",
337 		   jump_func->value.pass_through.formal_id,
338 		   get_tree_code_name(jump_func->value.pass_through.operation));
339 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
340 	    {
341 	      fprintf (f, " ");
342 	      print_generic_expr (f, jump_func->value.pass_through.operand);
343 	    }
344 	  if (jump_func->value.pass_through.agg_preserved)
345 	    fprintf (f, ", agg_preserved");
346 	  fprintf (f, "\n");
347 	}
348       else if (type == IPA_JF_ANCESTOR)
349 	{
350 	  fprintf (f, "ANCESTOR: ");
351 	  fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
352 		   jump_func->value.ancestor.formal_id,
353 		   jump_func->value.ancestor.offset);
354 	  if (jump_func->value.ancestor.agg_preserved)
355 	    fprintf (f, ", agg_preserved");
356 	  fprintf (f, "\n");
357 	}
358 
359       if (jump_func->agg.items)
360 	{
361 	  struct ipa_agg_jf_item *item;
362 	  int j;
363 
364 	  fprintf (f, "         Aggregate passed by %s:\n",
365 		   jump_func->agg.by_ref ? "reference" : "value");
366 	  FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
367 	    {
368 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
369 		       item->offset);
370 	      fprintf (f, "type: ");
371 	      print_generic_expr (f, item->type);
372 	      fprintf (f, ", ");
373 	      if (item->jftype == IPA_JF_PASS_THROUGH)
374 		fprintf (f, "PASS THROUGH: %d,",
375 			 item->value.pass_through.formal_id);
376 	      else if (item->jftype == IPA_JF_LOAD_AGG)
377 		{
378 		  fprintf (f, "LOAD AGG: %d",
379 			   item->value.pass_through.formal_id);
380 		  fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
381 			   item->value.load_agg.offset,
382 			   item->value.load_agg.by_ref ? "reference"
383 						       : "value");
384 		}
385 
386 	      if (item->jftype == IPA_JF_PASS_THROUGH
387 		  || item->jftype == IPA_JF_LOAD_AGG)
388 		{
389 		  fprintf (f, " op %s",
390 		     get_tree_code_name (item->value.pass_through.operation));
391 		  if (item->value.pass_through.operation != NOP_EXPR)
392 		    {
393 		      fprintf (f, " ");
394 		      print_generic_expr (f, item->value.pass_through.operand);
395 		    }
396 		}
397 	      else if (item->jftype == IPA_JF_CONST)
398 		{
399 		  fprintf (f, "CONST: ");
400 		  print_generic_expr (f, item->value.constant);
401 		}
402 	      else if (item->jftype == IPA_JF_UNKNOWN)
403 		fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
404 			 tree_to_uhwi (TYPE_SIZE (item->type)));
405 	      fprintf (f, "\n");
406 	    }
407 	}
408 
409       class ipa_polymorphic_call_context *ctx
410 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
411       if (ctx && !ctx->useless_p ())
412 	{
413 	  fprintf (f, "         Context: ");
414 	  ctx->dump (dump_file);
415 	}
416 
417       if (jump_func->bits)
418 	{
419 	  fprintf (f, "         value: ");
420 	  print_hex (jump_func->bits->value, f);
421 	  fprintf (f, ", mask: ");
422 	  print_hex (jump_func->bits->mask, f);
423 	  fprintf (f, "\n");
424 	}
425       else
426 	fprintf (f, "         Unknown bits\n");
427 
428       if (jump_func->m_vr)
429 	{
430 	  fprintf (f, "         VR  ");
431 	  fprintf (f, "%s[",
432 		   (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
433 	  print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
434 	  fprintf (f, ", ");
435 	  print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
436 	  fprintf (f, "]\n");
437 	}
438       else
439 	fprintf (f, "         Unknown VR\n");
440     }
441 }
442 
443 
444 /* Print the jump functions of all arguments on all call graph edges going from
445    NODE to file F.  */
446 
447 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)448 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
449 {
450   struct cgraph_edge *cs;
451 
452   fprintf (f, "  Jump functions of caller  %s:\n", node->dump_name ());
453   for (cs = node->callees; cs; cs = cs->next_callee)
454     {
455 
456       fprintf (f, "    callsite  %s -> %s : \n",
457 	       node->dump_name (),
458 	       cs->callee->dump_name ());
459       if (!ipa_edge_args_info_available_for_edge_p (cs))
460 	fprintf (f, "       no arg info\n");
461       else
462         ipa_print_node_jump_functions_for_edge (f, cs);
463     }
464 
465   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
466     {
467       class cgraph_indirect_call_info *ii;
468 
469       ii = cs->indirect_info;
470       if (ii->agg_contents)
471 	fprintf (f, "    indirect %s callsite, calling param %i, "
472 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
473 		 ii->member_ptr ? "member ptr" : "aggregate",
474 		 ii->param_index, ii->offset,
475 		 ii->by_ref ? "by reference" : "by_value");
476       else
477 	fprintf (f, "    indirect %s callsite, calling param %i, "
478 		 "offset " HOST_WIDE_INT_PRINT_DEC,
479 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
480 		 ii->offset);
481 
482       if (cs->call_stmt)
483 	{
484 	  fprintf (f, ", for stmt ");
485 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
486 	}
487       else
488 	fprintf (f, "\n");
489       if (ii->polymorphic)
490 	ii->context.dump (f);
491       if (!ipa_edge_args_info_available_for_edge_p (cs))
492 	fprintf (f, "       no arg info\n");
493       else
494         ipa_print_node_jump_functions_for_edge (f, cs);
495     }
496 }
497 
498 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
499 
500 void
ipa_print_all_jump_functions(FILE * f)501 ipa_print_all_jump_functions (FILE *f)
502 {
503   struct cgraph_node *node;
504 
505   fprintf (f, "\nJump functions:\n");
506   FOR_EACH_FUNCTION (node)
507     {
508       ipa_print_node_jump_functions (f, node);
509     }
510 }
511 
512 /* Set jfunc to be a know-really nothing jump function.  */
513 
514 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)515 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
516 {
517   jfunc->type = IPA_JF_UNKNOWN;
518 }
519 
520 /* Set JFUNC to be a copy of another jmp (to be used by jump function
521    combination code).  The two functions will share their rdesc.  */
522 
523 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)524 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
525 		     struct ipa_jump_func *src)
526 
527 {
528   gcc_checking_assert (src->type == IPA_JF_CONST);
529   dst->type = IPA_JF_CONST;
530   dst->value.constant = src->value.constant;
531 }
532 
533 /* Set JFUNC to be a constant jmp function.  */
534 
535 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)536 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
537 		     struct cgraph_edge *cs)
538 {
539   jfunc->type = IPA_JF_CONST;
540   jfunc->value.constant.value = unshare_expr_without_location (constant);
541 
542   if (TREE_CODE (constant) == ADDR_EXPR
543       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
544     {
545       struct ipa_cst_ref_desc *rdesc;
546 
547       rdesc = ipa_refdesc_pool.allocate ();
548       rdesc->cs = cs;
549       rdesc->next_duplicate = NULL;
550       rdesc->refcount = 1;
551       jfunc->value.constant.rdesc = rdesc;
552     }
553   else
554     jfunc->value.constant.rdesc = NULL;
555 }
556 
557 /* Set JFUNC to be a simple pass-through jump function.  */
558 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)559 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
560 				bool agg_preserved)
561 {
562   jfunc->type = IPA_JF_PASS_THROUGH;
563   jfunc->value.pass_through.operand = NULL_TREE;
564   jfunc->value.pass_through.formal_id = formal_id;
565   jfunc->value.pass_through.operation = NOP_EXPR;
566   jfunc->value.pass_through.agg_preserved = agg_preserved;
567 }
568 
569 /* Set JFUNC to be an unary pass through jump function.  */
570 
571 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)572 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
573 			       enum tree_code operation)
574 {
575   jfunc->type = IPA_JF_PASS_THROUGH;
576   jfunc->value.pass_through.operand = NULL_TREE;
577   jfunc->value.pass_through.formal_id = formal_id;
578   jfunc->value.pass_through.operation = operation;
579   jfunc->value.pass_through.agg_preserved = false;
580 }
581 /* Set JFUNC to be an arithmetic pass through jump function.  */
582 
583 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)584 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
585 			       tree operand, enum tree_code operation)
586 {
587   jfunc->type = IPA_JF_PASS_THROUGH;
588   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
589   jfunc->value.pass_through.formal_id = formal_id;
590   jfunc->value.pass_through.operation = operation;
591   jfunc->value.pass_through.agg_preserved = false;
592 }
593 
594 /* Set JFUNC to be an ancestor jump function.  */
595 
596 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved)597 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
598 		     int formal_id, bool agg_preserved)
599 {
600   jfunc->type = IPA_JF_ANCESTOR;
601   jfunc->value.ancestor.formal_id = formal_id;
602   jfunc->value.ancestor.offset = offset;
603   jfunc->value.ancestor.agg_preserved = agg_preserved;
604 }
605 
606 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
607    of this function body.  */
608 
609 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)610 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
611 {
612   gcc_checking_assert (fbi);
613   return &fbi->bb_infos[bb->index];
614 }
615 
616 /* Structure to be passed in between detect_type_change and
617    check_stmt_for_type_change.  */
618 
619 struct prop_type_change_info
620 {
621   /* Offset into the object where there is the virtual method pointer we are
622      looking for.  */
623   HOST_WIDE_INT offset;
624   /* The declaration or SSA_NAME pointer of the base that we are checking for
625      type change.  */
626   tree object;
627   /* Set to true if dynamic type change has been detected.  */
628   bool type_maybe_changed;
629 };
630 
631 /* Return true if STMT can modify a virtual method table pointer.
632 
633    This function makes special assumptions about both constructors and
634    destructors which are all the functions that are allowed to alter the VMT
635    pointers.  It assumes that destructors begin with assignment into all VMT
636    pointers and that constructors essentially look in the following way:
637 
638    1) The very first thing they do is that they call constructors of ancestor
639    sub-objects that have them.
640 
641    2) Then VMT pointers of this and all its ancestors is set to new values
642    corresponding to the type corresponding to the constructor.
643 
644    3) Only afterwards, other stuff such as constructor of member sub-objects
645    and the code written by the user is run.  Only this may include calling
646    virtual functions, directly or indirectly.
647 
648    There is no way to call a constructor of an ancestor sub-object in any
649    other way.
650 
651    This means that we do not have to care whether constructors get the correct
652    type information because they will always change it (in fact, if we define
653    the type to be given by the VMT pointer, it is undefined).
654 
655    The most important fact to derive from the above is that if, for some
656    statement in the section 3, we try to detect whether the dynamic type has
657    changed, we can safely ignore all calls as we examine the function body
658    backwards until we reach statements in section 2 because these calls cannot
659    be ancestor constructors or destructors (if the input is not bogus) and so
660    do not change the dynamic type (this holds true only for automatically
661    allocated objects but at the moment we devirtualize only these).  We then
662    must detect that statements in section 2 change the dynamic type and can try
663    to derive the new type.  That is enough and we can stop, we will never see
664    the calls into constructors of sub-objects in this code.  Therefore we can
665    safely ignore all call statements that we traverse.
666   */
667 
668 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)669 stmt_may_be_vtbl_ptr_store (gimple *stmt)
670 {
671   if (is_gimple_call (stmt))
672     return false;
673   if (gimple_clobber_p (stmt))
674     return false;
675   else if (is_gimple_assign (stmt))
676     {
677       tree lhs = gimple_assign_lhs (stmt);
678 
679       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
680 	{
681 	  if (flag_strict_aliasing
682 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
683 	    return false;
684 
685 	  if (TREE_CODE (lhs) == COMPONENT_REF
686 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
687 	    return false;
688 	  /* In the future we might want to use get_ref_base_and_extent to find
689 	     if there is a field corresponding to the offset and if so, proceed
690 	     almost like if it was a component ref.  */
691 	}
692     }
693   return true;
694 }
695 
696 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
697    to check whether a particular statement may modify the virtual table
698    pointerIt stores its result into DATA, which points to a
699    prop_type_change_info structure.  */
700 
701 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)702 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
703 {
704   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
705   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
706 
707   if (stmt_may_be_vtbl_ptr_store (stmt))
708     {
709       tci->type_maybe_changed = true;
710       return true;
711     }
712   else
713     return false;
714 }
715 
716 /* See if ARG is PARAM_DECl describing instance passed by pointer
717    or reference in FUNCTION.  Return false if the dynamic type may change
718    in between beggining of the function until CALL is invoked.
719 
720    Generally functions are not allowed to change type of such instances,
721    but they call destructors.  We assume that methods cannot destroy the THIS
722    pointer.  Also as a special cases, constructor and destructors may change
723    type of the THIS pointer.  */
724 
725 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)726 param_type_may_change_p (tree function, tree arg, gimple *call)
727 {
728   /* Pure functions cannot do any changes on the dynamic type;
729      that require writting to memory.  */
730   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
731     return false;
732   /* We need to check if we are within inlined consturctor
733      or destructor (ideally we would have way to check that the
734      inline cdtor is actually working on ARG, but we don't have
735      easy tie on this, so punt on all non-pure cdtors.
736      We may also record the types of cdtors and once we know type
737      of the instance match them.
738 
739      Also code unification optimizations may merge calls from
740      different blocks making return values unreliable.  So
741      do nothing during late optimization.  */
742   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
743     return true;
744   if (TREE_CODE (arg) == SSA_NAME
745       && SSA_NAME_IS_DEFAULT_DEF (arg)
746       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
747     {
748       /* Normal (non-THIS) argument.  */
749       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
750 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
751 	  /* THIS pointer of an method - here we want to watch constructors
752 	     and destructors as those definitely may change the dynamic
753 	     type.  */
754 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
755 	      && !DECL_CXX_CONSTRUCTOR_P (function)
756 	      && !DECL_CXX_DESTRUCTOR_P (function)
757 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
758 	{
759 	  /* Walk the inline stack and watch out for ctors/dtors.  */
760 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
761 	       block = BLOCK_SUPERCONTEXT (block))
762 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
763 	      return true;
764 	  return false;
765 	}
766     }
767   return true;
768 }
769 
770 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
771    callsite CALL) by looking for assignments to its virtual table pointer.  If
772    it is, return true.  ARG is the object itself (not a pointer
773    to it, unless dereferenced).  BASE is the base of the memory access as
774    returned by get_ref_base_and_extent, as is the offset.
775 
776    This is helper function for detect_type_change and detect_type_change_ssa
777    that does the heavy work which is usually unnecesary.  */
778 
779 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)780 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
781 				       tree base, tree comp_type, gcall *call,
782 				       HOST_WIDE_INT offset)
783 {
784   struct prop_type_change_info tci;
785   ao_ref ao;
786 
787   gcc_checking_assert (DECL_P (arg)
788 		       || TREE_CODE (arg) == MEM_REF
789 		       || handled_component_p (arg));
790 
791   comp_type = TYPE_MAIN_VARIANT (comp_type);
792 
793   /* Const calls cannot call virtual methods through VMT and so type changes do
794      not matter.  */
795   if (!flag_devirtualize || !gimple_vuse (call)
796       /* Be sure expected_type is polymorphic.  */
797       || !comp_type
798       || TREE_CODE (comp_type) != RECORD_TYPE
799       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
800       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
801     return true;
802 
803   ao_ref_init (&ao, arg);
804   ao.base = base;
805   ao.offset = offset;
806   ao.size = POINTER_SIZE;
807   ao.max_size = ao.size;
808 
809   tci.offset = offset;
810   tci.object = get_base_address (arg);
811   tci.type_maybe_changed = false;
812 
813   int walked
814     = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
815 			  &tci, NULL, NULL, fbi->aa_walk_budget + 1);
816 
817   if (walked >= 0 && !tci.type_maybe_changed)
818     return false;
819 
820   return true;
821 }
822 
823 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
824    If it is, return true.  ARG is the object itself (not a pointer
825    to it, unless dereferenced).  BASE is the base of the memory access as
826    returned by get_ref_base_and_extent, as is the offset.  */
827 
828 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)829 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
830 		    tree comp_type, gcall *call,
831 		    HOST_WIDE_INT offset)
832 {
833   if (!flag_devirtualize)
834     return false;
835 
836   if (TREE_CODE	(base) == MEM_REF
837       && !param_type_may_change_p (current_function_decl,
838 				   TREE_OPERAND (base, 0),
839 				   call))
840     return false;
841   return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
842 						call, offset);
843 }
844 
845 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
846    SSA name (its dereference will become the base and the offset is assumed to
847    be zero).  */
848 
849 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call)850 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
851 			gcall *call)
852 {
853   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
854   if (!flag_devirtualize
855       || !POINTER_TYPE_P (TREE_TYPE (arg)))
856     return false;
857 
858   if (!param_type_may_change_p (current_function_decl, arg, call))
859     return false;
860 
861   arg = build2 (MEM_REF, ptr_type_node, arg,
862 		build_int_cst (ptr_type_node, 0));
863 
864   return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
865 						call, 0);
866 }
867 
868 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
869    boolean variable pointed to by DATA.  */
870 
871 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)872 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
873 		     void *data)
874 {
875   bool *b = (bool *) data;
876   *b = true;
877   return true;
878 }
879 
880 /* Find the nearest valid aa status for parameter specified by INDEX that
881    dominates BB.  */
882 
883 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)884 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
885 			   int index)
886 {
887   while (true)
888     {
889       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
890       if (!bb)
891 	return NULL;
892       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
893       if (!bi->param_aa_statuses.is_empty ()
894 	  && bi->param_aa_statuses[index].valid)
895 	return &bi->param_aa_statuses[index];
896     }
897 }
898 
899 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
900    structures and/or intialize the result with a dominating description as
901    necessary.  */
902 
903 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)904 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
905 			  int index)
906 {
907   gcc_checking_assert (fbi);
908   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
909   if (bi->param_aa_statuses.is_empty ())
910     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
911   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
912   if (!paa->valid)
913     {
914       gcc_checking_assert (!paa->parm_modified
915 			   && !paa->ref_modified
916 			   && !paa->pt_modified);
917       struct ipa_param_aa_status *dom_paa;
918       dom_paa = find_dominating_aa_status (fbi, bb, index);
919       if (dom_paa)
920 	*paa = *dom_paa;
921       else
922 	paa->valid = true;
923     }
924 
925   return paa;
926 }
927 
928 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
929    a value known not to be modified in this function before reaching the
930    statement STMT.  FBI holds information about the function we have so far
931    gathered but do not survive the summary building stage.  */
932 
933 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)934 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
935 			      gimple *stmt, tree parm_load)
936 {
937   struct ipa_param_aa_status *paa;
938   bool modified = false;
939   ao_ref refd;
940 
941   tree base = get_base_address (parm_load);
942   gcc_assert (TREE_CODE (base) == PARM_DECL);
943   if (TREE_READONLY (base))
944     return true;
945 
946   gcc_checking_assert (fbi);
947   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
948   if (paa->parm_modified)
949     return false;
950 
951   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
952   ao_ref_init (&refd, parm_load);
953   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
954 				   &modified, NULL, NULL,
955 				   fbi->aa_walk_budget + 1);
956   if (walked < 0)
957     {
958       modified = true;
959       if (fbi)
960 	fbi->aa_walk_budget = 0;
961     }
962   else if (fbi)
963     fbi->aa_walk_budget -= walked;
964   if (paa && modified)
965     paa->parm_modified = true;
966   return !modified;
967 }
968 
969 /* If STMT is an assignment that loads a value from an parameter declaration,
970    return the index of the parameter in ipa_node_params which has not been
971    modified.  Otherwise return -1.  */
972 
973 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)974 load_from_unmodified_param (struct ipa_func_body_info *fbi,
975 			    vec<ipa_param_descriptor, va_gc> *descriptors,
976 			    gimple *stmt)
977 {
978   int index;
979   tree op1;
980 
981   if (!gimple_assign_single_p (stmt))
982     return -1;
983 
984   op1 = gimple_assign_rhs1 (stmt);
985   if (TREE_CODE (op1) != PARM_DECL)
986     return -1;
987 
988   index = ipa_get_param_decl_index_1 (descriptors, op1);
989   if (index < 0
990       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
991     return -1;
992 
993   return index;
994 }
995 
996 /* Return true if memory reference REF (which must be a load through parameter
997    with INDEX) loads data that are known to be unmodified in this function
998    before reaching statement STMT.  */
999 
1000 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)1001 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1002 			   int index, gimple *stmt, tree ref)
1003 {
1004   struct ipa_param_aa_status *paa;
1005   bool modified = false;
1006   ao_ref refd;
1007 
1008   gcc_checking_assert (fbi);
1009   paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1010   if (paa->ref_modified)
1011     return false;
1012 
1013   gcc_checking_assert (gimple_vuse (stmt));
1014   ao_ref_init (&refd, ref);
1015   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1016 				   &modified, NULL, NULL,
1017 				   fbi->aa_walk_budget + 1);
1018   if (walked < 0)
1019     {
1020       modified = true;
1021       fbi->aa_walk_budget = 0;
1022     }
1023   else
1024     fbi->aa_walk_budget -= walked;
1025   if (modified)
1026     paa->ref_modified = true;
1027   return !modified;
1028 }
1029 
1030 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1031    is known to be unmodified in this function before reaching call statement
1032    CALL into which it is passed.  FBI describes the function body.  */
1033 
1034 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1035 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1036 			      gimple *call, tree parm)
1037 {
1038   bool modified = false;
1039   ao_ref refd;
1040 
1041   /* It's unnecessary to calculate anything about memory contnets for a const
1042      function because it is not goin to use it.  But do not cache the result
1043      either.  Also, no such calculations for non-pointers.  */
1044   if (!gimple_vuse (call)
1045       || !POINTER_TYPE_P (TREE_TYPE (parm)))
1046     return false;
1047 
1048   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1049 							      gimple_bb (call),
1050 							      index);
1051   if (paa->pt_modified)
1052     return false;
1053 
1054   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1055   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1056 				   &modified, NULL, NULL,
1057 				   fbi->aa_walk_budget + 1);
1058   if (walked < 0)
1059     {
1060       fbi->aa_walk_budget = 0;
1061       modified = true;
1062     }
1063   else
1064     fbi->aa_walk_budget -= walked;
1065   if (modified)
1066     paa->pt_modified = true;
1067   return !modified;
1068 }
1069 
1070 /* Return true if we can prove that OP is a memory reference loading
1071    data from an aggregate passed as a parameter.
1072 
1073    The function works in two modes.  If GUARANTEED_UNMODIFIED is NULL, it return
1074    false if it cannot prove that the value has not been modified before the
1075    load in STMT.  If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1076    if it cannot prove the value has not been modified, in that case it will
1077    store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1078 
1079    INFO and PARMS_AINFO describe parameters of the current function (but the
1080    latter can be NULL), STMT is the load statement.  If function returns true,
1081    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1082    within the aggregate and whether it is a load from a value passed by
1083    reference respectively.  */
1084 
1085 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,poly_int64 * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1086 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1087 			vec<ipa_param_descriptor, va_gc> *descriptors,
1088 			gimple *stmt, tree op, int *index_p,
1089 			HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1090 			bool *by_ref_p, bool *guaranteed_unmodified)
1091 {
1092   int index;
1093   HOST_WIDE_INT size;
1094   bool reverse;
1095   tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1096 
1097   if (!base)
1098     return false;
1099 
1100   if (DECL_P (base))
1101     {
1102       int index = ipa_get_param_decl_index_1 (descriptors, base);
1103       if (index >= 0
1104 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1105 	{
1106 	  *index_p = index;
1107 	  *by_ref_p = false;
1108 	  if (size_p)
1109 	    *size_p = size;
1110 	  if (guaranteed_unmodified)
1111 	    *guaranteed_unmodified = true;
1112 	  return true;
1113 	}
1114       return false;
1115     }
1116 
1117   if (TREE_CODE (base) != MEM_REF
1118 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1119 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1120     return false;
1121 
1122   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1123     {
1124       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1125       index = ipa_get_param_decl_index_1 (descriptors, parm);
1126     }
1127   else
1128     {
1129       /* This branch catches situations where a pointer parameter is not a
1130 	 gimple register, for example:
1131 
1132 	 void hip7(S*) (struct S * p)
1133 	 {
1134 	 void (*<T2e4>) (struct S *) D.1867;
1135 	 struct S * p.1;
1136 
1137 	 <bb 2>:
1138 	 p.1_1 = p;
1139 	 D.1867_2 = p.1_1->f;
1140 	 D.1867_2 ();
1141 	 gdp = &p;
1142       */
1143 
1144       gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1145       index = load_from_unmodified_param (fbi, descriptors, def);
1146     }
1147 
1148   if (index >= 0)
1149     {
1150       bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1151       if (!data_preserved && !guaranteed_unmodified)
1152 	return false;
1153 
1154       *index_p = index;
1155       *by_ref_p = true;
1156       if (size_p)
1157 	*size_p = size;
1158       if (guaranteed_unmodified)
1159 	*guaranteed_unmodified = data_preserved;
1160       return true;
1161     }
1162   return false;
1163 }
1164 
1165 /* If STMT is an assignment that loads a value from a parameter declaration,
1166    or from an aggregate passed as the parameter either by value or reference,
1167    return the index of the parameter in ipa_node_params.  Otherwise return -1.
1168 
1169    FBI holds gathered information about the function.  INFO describes
1170    parameters of the function, STMT is the assignment statement.  If it is a
1171    memory load from an aggregate, *OFFSET_P is filled with offset within the
1172    aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1173    reference.  */
1174 
1175 static int
load_from_unmodified_param_or_agg(struct ipa_func_body_info * fbi,class ipa_node_params * info,gimple * stmt,HOST_WIDE_INT * offset_p,bool * by_ref_p)1176 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1177 				   class ipa_node_params *info,
1178 				   gimple *stmt,
1179 				   HOST_WIDE_INT *offset_p,
1180 				   bool *by_ref_p)
1181 {
1182   int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1183   poly_int64 size;
1184 
1185   /* Load value from a parameter declaration.  */
1186   if (index >= 0)
1187     {
1188       *offset_p = -1;
1189       return index;
1190     }
1191 
1192   if (!gimple_assign_load_p (stmt))
1193     return -1;
1194 
1195   tree rhs = gimple_assign_rhs1 (stmt);
1196 
1197   /* Skip memory reference containing VIEW_CONVERT_EXPR.  */
1198   for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1199     if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1200       return -1;
1201 
1202   /* Skip memory reference containing bit-field.  */
1203   if (TREE_CODE (rhs) == BIT_FIELD_REF
1204       || contains_bitfld_component_ref_p (rhs))
1205     return -1;
1206 
1207   if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1208 			       offset_p, &size, by_ref_p))
1209     return -1;
1210 
1211   gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1212 			 size));
1213   if (!*by_ref_p)
1214     {
1215       tree param_type = ipa_get_type (info, index);
1216 
1217       if (!param_type || !AGGREGATE_TYPE_P (param_type))
1218 	return -1;
1219     }
1220   else if (TREE_THIS_VOLATILE (rhs))
1221     return -1;
1222 
1223   return index;
1224 }
1225 
1226 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1227    of an assignment statement STMT, try to determine whether we are actually
1228    handling any of the following cases and construct an appropriate jump
1229    function into JFUNC if so:
1230 
1231    1) The passed value is loaded from a formal parameter which is not a gimple
1232    register (most probably because it is addressable, the value has to be
1233    scalar) and we can guarantee the value has not changed.  This case can
1234    therefore be described by a simple pass-through jump function.  For example:
1235 
1236       foo (int a)
1237       {
1238         int a.0;
1239 
1240         a.0_2 = a;
1241         bar (a.0_2);
1242 
1243    2) The passed value can be described by a simple arithmetic pass-through
1244    jump function. E.g.
1245 
1246       foo (int a)
1247       {
1248         int D.2064;
1249 
1250         D.2064_4 = a.1(D) + 4;
1251         bar (D.2064_4);
1252 
1253    This case can also occur in combination of the previous one, e.g.:
1254 
1255       foo (int a, int z)
1256       {
1257         int a.0;
1258         int D.2064;
1259 
1260 	a.0_3 = a;
1261 	D.2064_4 = a.0_3 + 4;
1262 	foo (D.2064_4);
1263 
1264    3) The passed value is an address of an object within another one (which
1265    also passed by reference).  Such situations are described by an ancestor
1266    jump function and describe situations such as:
1267 
1268      B::foo() (struct B * const this)
1269      {
1270        struct A * D.1845;
1271 
1272        D.1845_2 = &this_1(D)->D.1748;
1273        A::bar (D.1845_2);
1274 
1275    INFO is the structure describing individual parameters access different
1276    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1277    only needed for intraprocedural analysis.  */
1278 
1279 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1280 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1281 				  class ipa_node_params *info,
1282 				  struct ipa_jump_func *jfunc,
1283 				  gcall *call, gimple *stmt, tree name,
1284 				  tree param_type)
1285 {
1286   HOST_WIDE_INT offset, size;
1287   tree op1, tc_ssa, base, ssa;
1288   bool reverse;
1289   int index;
1290 
1291   op1 = gimple_assign_rhs1 (stmt);
1292 
1293   if (TREE_CODE (op1) == SSA_NAME)
1294     {
1295       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1296 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1297       else
1298 	index = load_from_unmodified_param (fbi, info->descriptors,
1299 					    SSA_NAME_DEF_STMT (op1));
1300       tc_ssa = op1;
1301     }
1302   else
1303     {
1304       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1305       tc_ssa = gimple_assign_lhs (stmt);
1306     }
1307 
1308   if (index >= 0)
1309     {
1310       switch (gimple_assign_rhs_class (stmt))
1311 	{
1312 	case GIMPLE_BINARY_RHS:
1313 	  {
1314 	    tree op2 = gimple_assign_rhs2 (stmt);
1315 	    if (!is_gimple_ip_invariant (op2)
1316 		|| ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1317 		     != tcc_comparison)
1318 		    && !useless_type_conversion_p (TREE_TYPE (name),
1319 						   TREE_TYPE (op1))))
1320 	      return;
1321 
1322 	    ipa_set_jf_arith_pass_through (jfunc, index, op2,
1323 					   gimple_assign_rhs_code (stmt));
1324 	    break;
1325 	  }
1326 	case GIMPLE_SINGLE_RHS:
1327 	  {
1328 	    bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1329 						       tc_ssa);
1330 	    ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1331 	    break;
1332 	  }
1333 	case GIMPLE_UNARY_RHS:
1334 	  if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1335 	    ipa_set_jf_unary_pass_through (jfunc, index,
1336 					   gimple_assign_rhs_code (stmt));
1337 	default:;
1338 	}
1339       return;
1340     }
1341 
1342   if (TREE_CODE (op1) != ADDR_EXPR)
1343     return;
1344   op1 = TREE_OPERAND (op1, 0);
1345   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1346     return;
1347   base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1348   offset_int mem_offset;
1349   if (!base
1350       || TREE_CODE (base) != MEM_REF
1351       || !mem_ref_offset (base).is_constant (&mem_offset))
1352     return;
1353   offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1354   ssa = TREE_OPERAND (base, 0);
1355   if (TREE_CODE (ssa) != SSA_NAME
1356       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1357       || offset < 0)
1358     return;
1359 
1360   /* Dynamic types are changed in constructors and destructors.  */
1361   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1362   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1363     ipa_set_ancestor_jf (jfunc, offset,  index,
1364 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1365 }
1366 
1367 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1368    it looks like:
1369 
1370    iftmp.1_3 = &obj_2(D)->D.1762;
1371 
1372    The base of the MEM_REF must be a default definition SSA NAME of a
1373    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1374    whole MEM_REF expression is returned and the offset calculated from any
1375    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1376    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1377 
1378 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1379 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1380 {
1381   HOST_WIDE_INT size;
1382   tree expr, parm, obj;
1383   bool reverse;
1384 
1385   if (!gimple_assign_single_p (assign))
1386     return NULL_TREE;
1387   expr = gimple_assign_rhs1 (assign);
1388 
1389   if (TREE_CODE (expr) != ADDR_EXPR)
1390     return NULL_TREE;
1391   expr = TREE_OPERAND (expr, 0);
1392   obj = expr;
1393   expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1394 
1395   offset_int mem_offset;
1396   if (!expr
1397       || TREE_CODE (expr) != MEM_REF
1398       || !mem_ref_offset (expr).is_constant (&mem_offset))
1399     return NULL_TREE;
1400   parm = TREE_OPERAND (expr, 0);
1401   if (TREE_CODE (parm) != SSA_NAME
1402       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1403       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1404     return NULL_TREE;
1405 
1406   *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1407   *obj_p = obj;
1408   return expr;
1409 }
1410 
1411 
1412 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1413    statement PHI, try to find out whether NAME is in fact a
1414    multiple-inheritance typecast from a descendant into an ancestor of a formal
1415    parameter and thus can be described by an ancestor jump function and if so,
1416    write the appropriate function into JFUNC.
1417 
1418    Essentially we want to match the following pattern:
1419 
1420      if (obj_2(D) != 0B)
1421        goto <bb 3>;
1422      else
1423        goto <bb 4>;
1424 
1425    <bb 3>:
1426      iftmp.1_3 = &obj_2(D)->D.1762;
1427 
1428    <bb 4>:
1429      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1430      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1431      return D.1879_6;  */
1432 
1433 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1434 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1435 				    class ipa_node_params *info,
1436 				    struct ipa_jump_func *jfunc,
1437 				    gcall *call, gphi *phi)
1438 {
1439   HOST_WIDE_INT offset;
1440   gimple *assign, *cond;
1441   basic_block phi_bb, assign_bb, cond_bb;
1442   tree tmp, parm, expr, obj;
1443   int index, i;
1444 
1445   if (gimple_phi_num_args (phi) != 2)
1446     return;
1447 
1448   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1449     tmp = PHI_ARG_DEF (phi, 0);
1450   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1451     tmp = PHI_ARG_DEF (phi, 1);
1452   else
1453     return;
1454   if (TREE_CODE (tmp) != SSA_NAME
1455       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1456       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1457       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1458     return;
1459 
1460   assign = SSA_NAME_DEF_STMT (tmp);
1461   assign_bb = gimple_bb (assign);
1462   if (!single_pred_p (assign_bb))
1463     return;
1464   expr = get_ancestor_addr_info (assign, &obj, &offset);
1465   if (!expr)
1466     return;
1467   parm = TREE_OPERAND (expr, 0);
1468   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1469   if (index < 0)
1470     return;
1471 
1472   cond_bb = single_pred (assign_bb);
1473   cond = last_stmt (cond_bb);
1474   if (!cond
1475       || gimple_code (cond) != GIMPLE_COND
1476       || gimple_cond_code (cond) != NE_EXPR
1477       || gimple_cond_lhs (cond) != parm
1478       || !integer_zerop (gimple_cond_rhs (cond)))
1479     return;
1480 
1481   phi_bb = gimple_bb (phi);
1482   for (i = 0; i < 2; i++)
1483     {
1484       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1485       if (pred != assign_bb && pred != cond_bb)
1486 	return;
1487     }
1488 
1489   ipa_set_ancestor_jf (jfunc, offset, index,
1490 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1491 }
1492 
1493 /* Inspect the given TYPE and return true iff it has the same structure (the
1494    same number of fields of the same types) as a C++ member pointer.  If
1495    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1496    corresponding fields there.  */
1497 
1498 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1499 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1500 {
1501   tree fld;
1502 
1503   if (TREE_CODE (type) != RECORD_TYPE)
1504     return false;
1505 
1506   fld = TYPE_FIELDS (type);
1507   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1508       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1509       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1510     return false;
1511 
1512   if (method_ptr)
1513     *method_ptr = fld;
1514 
1515   fld = DECL_CHAIN (fld);
1516   if (!fld || INTEGRAL_TYPE_P (fld)
1517       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1518     return false;
1519   if (delta)
1520     *delta = fld;
1521 
1522   if (DECL_CHAIN (fld))
1523     return false;
1524 
1525   return true;
1526 }
1527 
1528 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1529    return the rhs of its defining statement, and this statement is stored in
1530    *RHS_STMT.  Otherwise return RHS as it is.  */
1531 
1532 static inline tree
get_ssa_def_if_simple_copy(tree rhs,gimple ** rhs_stmt)1533 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1534 {
1535   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1536     {
1537       gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1538 
1539       if (gimple_assign_single_p (def_stmt))
1540 	rhs = gimple_assign_rhs1 (def_stmt);
1541       else
1542 	break;
1543       *rhs_stmt = def_stmt;
1544     }
1545   return rhs;
1546 }
1547 
1548 /* Simple linked list, describing contents of an aggregate before call.  */
1549 
1550 struct ipa_known_agg_contents_list
1551 {
1552   /* Offset and size of the described part of the aggregate.  */
1553   HOST_WIDE_INT offset, size;
1554 
1555   /* Type of the described part of the aggregate.  */
1556   tree type;
1557 
1558   /* Known constant value or jump function data describing contents.  */
1559   struct ipa_load_agg_data value;
1560 
1561   /* Pointer to the next structure in the list.  */
1562   struct ipa_known_agg_contents_list *next;
1563 };
1564 
1565 /* Add an aggregate content item into a linked list of
1566    ipa_known_agg_contents_list structure, in which all elements
1567    are sorted ascendingly by offset.  */
1568 
1569 static inline void
add_to_agg_contents_list(struct ipa_known_agg_contents_list ** plist,struct ipa_known_agg_contents_list * item)1570 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1571 			  struct ipa_known_agg_contents_list *item)
1572 {
1573   struct ipa_known_agg_contents_list *list = *plist;
1574 
1575   for (; list; list = list->next)
1576     {
1577       if (list->offset >= item->offset)
1578 	break;
1579 
1580       plist = &list->next;
1581     }
1582 
1583   item->next = list;
1584   *plist = item;
1585 }
1586 
1587 /* Check whether a given aggregate content is clobbered by certain element in
1588    a linked list of ipa_known_agg_contents_list.  */
1589 
1590 static inline bool
clobber_by_agg_contents_list_p(struct ipa_known_agg_contents_list * list,struct ipa_known_agg_contents_list * item)1591 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1592 				struct ipa_known_agg_contents_list *item)
1593 {
1594   for (; list; list = list->next)
1595     {
1596       if (list->offset >= item->offset)
1597 	return list->offset < item->offset + item->size;
1598 
1599       if (list->offset + list->size > item->offset)
1600 	return true;
1601     }
1602 
1603   return false;
1604 }
1605 
1606 /* Build aggregate jump function from LIST, assuming there are exactly
1607    VALUE_COUNT entries there and that offset of the passed argument
1608    is ARG_OFFSET and store it into JFUNC.  */
1609 
1610 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int value_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1611 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1612 			       int value_count, HOST_WIDE_INT arg_offset,
1613 			       struct ipa_jump_func *jfunc)
1614 {
1615   vec_alloc (jfunc->agg.items, value_count);
1616   for (; list; list = list->next)
1617     {
1618       struct ipa_agg_jf_item item;
1619       tree operand = list->value.pass_through.operand;
1620 
1621       if (list->value.pass_through.formal_id >= 0)
1622 	{
1623 	  /* Content value is derived from some formal paramerter.  */
1624 	  if (list->value.offset >= 0)
1625 	    item.jftype = IPA_JF_LOAD_AGG;
1626 	  else
1627 	    item.jftype = IPA_JF_PASS_THROUGH;
1628 
1629 	  item.value.load_agg = list->value;
1630 	  if (operand)
1631 	    item.value.pass_through.operand
1632 	      = unshare_expr_without_location (operand);
1633 	}
1634       else if (operand)
1635 	{
1636 	  /* Content value is known constant.  */
1637 	  item.jftype = IPA_JF_CONST;
1638 	  item.value.constant = unshare_expr_without_location (operand);
1639 	}
1640       else
1641 	continue;
1642 
1643       item.type = list->type;
1644       gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1645 
1646       item.offset = list->offset - arg_offset;
1647       gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1648 
1649       jfunc->agg.items->quick_push (item);
1650     }
1651 }
1652 
1653 /* Given an assignment statement STMT, try to collect information into
1654    AGG_VALUE that will be used to construct jump function for RHS of the
1655    assignment, from which content value of an aggregate part comes.
1656 
1657    Besides constant and simple pass-through jump functions, also try to
1658    identify whether it matches the following pattern that can be described by
1659    a load-value-from-aggregate jump function, which is a derivative of simple
1660    pass-through jump function.
1661 
1662      foo (int *p)
1663      {
1664        ...
1665 
1666        *(q_5 + 4) = *(p_3(D) + 28) op 1;
1667        bar (q_5);
1668      }
1669 
1670    Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1671    constant, simple pass-through and load-vale-from-aggregate. If value
1672    is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1673    set to -1. For simple pass-through and load-value-from-aggregate, field
1674    FORMAL_ID specifies the related formal parameter index, and field
1675    OFFSET can be used to distinguish them, -1 means simple pass-through,
1676    otherwise means load-value-from-aggregate.  */
1677 
1678 static void
analyze_agg_content_value(struct ipa_func_body_info * fbi,struct ipa_load_agg_data * agg_value,gimple * stmt)1679 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1680 			   struct ipa_load_agg_data *agg_value,
1681 			   gimple *stmt)
1682 {
1683   tree lhs = gimple_assign_lhs (stmt);
1684   tree rhs1 = gimple_assign_rhs1 (stmt);
1685   enum tree_code code;
1686   int index = -1;
1687 
1688   /* Initialize jump function data for the aggregate part.  */
1689   memset (agg_value, 0, sizeof (*agg_value));
1690   agg_value->pass_through.operation = NOP_EXPR;
1691   agg_value->pass_through.formal_id = -1;
1692   agg_value->offset = -1;
1693 
1694   if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))  /* TODO: Support aggregate type.  */
1695       || TREE_THIS_VOLATILE (lhs)
1696       || TREE_CODE (lhs) == BIT_FIELD_REF
1697       || contains_bitfld_component_ref_p (lhs))
1698     return;
1699 
1700   /* Skip SSA copies.  */
1701   while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1702     {
1703       if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1704 	break;
1705 
1706       stmt = SSA_NAME_DEF_STMT (rhs1);
1707       if (!is_gimple_assign (stmt))
1708 	return;
1709 
1710       rhs1 = gimple_assign_rhs1 (stmt);
1711     }
1712 
1713   code = gimple_assign_rhs_code (stmt);
1714   switch (gimple_assign_rhs_class (stmt))
1715     {
1716     case GIMPLE_SINGLE_RHS:
1717       if (is_gimple_ip_invariant (rhs1))
1718 	{
1719 	  agg_value->pass_through.operand = rhs1;
1720 	  return;
1721 	}
1722       code = NOP_EXPR;
1723       break;
1724 
1725     case GIMPLE_UNARY_RHS:
1726       /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1727 	 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1728 	 tcc_binary, this subtleness is somewhat misleading.
1729 
1730 	 Since tcc_unary is widely used in IPA-CP code to check an operation
1731 	 with one operand, here we only allow tc_unary operation to avoid
1732 	 possible problem.  Then we can use (opclass == tc_unary) or not to
1733 	 distinguish unary and binary.  */
1734       if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1735 	return;
1736 
1737       rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1738       break;
1739 
1740     case GIMPLE_BINARY_RHS:
1741       {
1742 	gimple *rhs1_stmt = stmt;
1743 	gimple *rhs2_stmt = stmt;
1744 	tree rhs2 = gimple_assign_rhs2 (stmt);
1745 
1746 	rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1747 	rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1748 
1749 	if (is_gimple_ip_invariant (rhs2))
1750 	  {
1751 	    agg_value->pass_through.operand = rhs2;
1752 	    stmt = rhs1_stmt;
1753 	  }
1754 	else if (is_gimple_ip_invariant (rhs1))
1755 	  {
1756 	    if (TREE_CODE_CLASS (code) == tcc_comparison)
1757 	      code = swap_tree_comparison (code);
1758 	    else if (!commutative_tree_code (code))
1759 	      return;
1760 
1761 	    agg_value->pass_through.operand = rhs1;
1762 	    stmt = rhs2_stmt;
1763 	    rhs1 = rhs2;
1764 	  }
1765 	else
1766 	  return;
1767 
1768 	if (TREE_CODE_CLASS (code) != tcc_comparison
1769 	    && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1770 	  return;
1771       }
1772       break;
1773 
1774     default:
1775       return;
1776   }
1777 
1778   if (TREE_CODE (rhs1) != SSA_NAME)
1779     index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1780 					       &agg_value->offset,
1781 					       &agg_value->by_ref);
1782   else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1783     index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1784 
1785   if (index >= 0)
1786     {
1787       if (agg_value->offset >= 0)
1788 	agg_value->type = TREE_TYPE (rhs1);
1789       agg_value->pass_through.formal_id = index;
1790       agg_value->pass_through.operation = code;
1791     }
1792   else
1793     agg_value->pass_through.operand = NULL_TREE;
1794 }
1795 
1796 /* If STMT is a memory store to the object whose address is BASE, extract
1797    information (offset, size, and value) into CONTENT, and return true,
1798    otherwise we conservatively assume the whole object is modified with
1799    unknown content, and return false.  CHECK_REF means that access to object
1800    is expected to be in form of MEM_REF expression.  */
1801 
1802 static bool
extract_mem_content(struct ipa_func_body_info * fbi,gimple * stmt,tree base,bool check_ref,struct ipa_known_agg_contents_list * content)1803 extract_mem_content (struct ipa_func_body_info *fbi,
1804 		     gimple *stmt, tree base, bool check_ref,
1805 		     struct ipa_known_agg_contents_list *content)
1806 {
1807   HOST_WIDE_INT lhs_offset, lhs_size;
1808   bool reverse;
1809 
1810   if (!is_gimple_assign (stmt))
1811     return false;
1812 
1813   tree lhs = gimple_assign_lhs (stmt);
1814   tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1815 					       &reverse);
1816   if (!lhs_base)
1817     return false;
1818 
1819   if (check_ref)
1820     {
1821       if (TREE_CODE (lhs_base) != MEM_REF
1822 	  || TREE_OPERAND (lhs_base, 0) != base
1823 	  || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1824 	return false;
1825     }
1826   else if (lhs_base != base)
1827     return false;
1828 
1829   content->offset = lhs_offset;
1830   content->size = lhs_size;
1831   content->type = TREE_TYPE (lhs);
1832   content->next = NULL;
1833 
1834   analyze_agg_content_value (fbi, &content->value, stmt);
1835   return true;
1836 }
1837 
1838 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1839    in ARG is filled in constants or values that are derived from caller's
1840    formal parameter in the way described by some kinds of jump functions.  FBI
1841    is the context of the caller function for interprocedural analysis.  ARG can
1842    either be an aggregate expression or a pointer to an aggregate.  ARG_TYPE is
1843    the type of the aggregate, JFUNC is the jump function for the aggregate.  */
1844 
1845 static void
determine_known_aggregate_parts(struct ipa_func_body_info * fbi,gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1846 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1847 				 gcall *call, tree arg,
1848 				 tree arg_type,
1849 				 struct ipa_jump_func *jfunc)
1850 {
1851   struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1852   bitmap visited = NULL;
1853   int item_count = 0, value_count = 0;
1854   HOST_WIDE_INT arg_offset, arg_size;
1855   tree arg_base;
1856   bool check_ref, by_ref;
1857   ao_ref r;
1858   int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1859 
1860   if (max_agg_items == 0)
1861     return;
1862 
1863   /* The function operates in three stages.  First, we prepare check_ref, r,
1864      arg_base and arg_offset based on what is actually passed as an actual
1865      argument.  */
1866 
1867   if (POINTER_TYPE_P (arg_type))
1868     {
1869       by_ref = true;
1870       if (TREE_CODE (arg) == SSA_NAME)
1871 	{
1872 	  tree type_size;
1873           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1874 	      || !POINTER_TYPE_P (TREE_TYPE (arg)))
1875             return;
1876 	  check_ref = true;
1877 	  arg_base = arg;
1878 	  arg_offset = 0;
1879 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1880 	  arg_size = tree_to_uhwi (type_size);
1881 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1882 	}
1883       else if (TREE_CODE (arg) == ADDR_EXPR)
1884 	{
1885 	  bool reverse;
1886 
1887 	  arg = TREE_OPERAND (arg, 0);
1888 	  arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1889 						  &arg_size, &reverse);
1890 	  if (!arg_base)
1891 	    return;
1892 	  if (DECL_P (arg_base))
1893 	    {
1894 	      check_ref = false;
1895 	      ao_ref_init (&r, arg_base);
1896 	    }
1897 	  else
1898 	    return;
1899 	}
1900       else
1901 	return;
1902     }
1903   else
1904     {
1905       bool reverse;
1906 
1907       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1908 
1909       by_ref = false;
1910       check_ref = false;
1911       arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1912 					      &arg_size, &reverse);
1913       if (!arg_base)
1914 	return;
1915 
1916       ao_ref_init (&r, arg);
1917     }
1918 
1919   /* Second stage traverses virtual SSA web backwards starting from the call
1920      statement, only looks at individual dominating virtual operand (its
1921      definition dominates the call), as long as it is confident that content
1922      of the aggregate is affected by definition of the virtual operand, it
1923      builds a sorted linked list of ipa_agg_jf_list describing that.  */
1924 
1925   for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1926     {
1927       gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1928 
1929       if (gimple_code (stmt) == GIMPLE_PHI)
1930 	{
1931 	  dom_vuse = get_continuation_for_phi (stmt, &r, true,
1932 					       fbi->aa_walk_budget,
1933 					       &visited, false, NULL, NULL);
1934 	  continue;
1935 	}
1936 
1937       if (stmt_may_clobber_ref_p_1 (stmt, &r))
1938 	{
1939 	  struct ipa_known_agg_contents_list *content
1940 			= XALLOCA (struct ipa_known_agg_contents_list);
1941 
1942 	  if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
1943 	    break;
1944 
1945 	  /* Now we get a dominating virtual operand, and need to check
1946 	     whether its value is clobbered any other dominating one.  */
1947 	  if ((content->value.pass_through.formal_id >= 0
1948 	       || content->value.pass_through.operand)
1949 	      && !clobber_by_agg_contents_list_p (all_list, content))
1950 	    {
1951 	      struct ipa_known_agg_contents_list *copy
1952 			= XALLOCA (struct ipa_known_agg_contents_list);
1953 
1954 	      /* Add to the list consisting of only dominating virtual
1955 		 operands, whose definitions can finally reach the call.  */
1956 	      add_to_agg_contents_list (&list, (*copy = *content, copy));
1957 
1958 	      if (++value_count == max_agg_items)
1959 		break;
1960 	    }
1961 
1962 	  /* Add to the list consisting of all dominating virtual operands.  */
1963 	  add_to_agg_contents_list (&all_list, content);
1964 
1965 	  if (++item_count == 2 * max_agg_items)
1966 	    break;
1967 	}
1968       dom_vuse = gimple_vuse (stmt);
1969    }
1970 
1971   if (visited)
1972     BITMAP_FREE (visited);
1973 
1974   /* Third stage just goes over the list and creates an appropriate vector of
1975      ipa_agg_jf_item structures out of it, of course only if there are
1976      any meaningful items to begin with.  */
1977 
1978   if (value_count)
1979     {
1980       jfunc->agg.by_ref = by_ref;
1981       build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
1982     }
1983 }
1984 
1985 
1986 /* Return the Ith param type of callee associated with call graph
1987    edge E.  */
1988 
1989 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)1990 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1991 {
1992   int n;
1993   tree type = (e->callee
1994 	       ? TREE_TYPE (e->callee->decl)
1995 	       : gimple_call_fntype (e->call_stmt));
1996   tree t = TYPE_ARG_TYPES (type);
1997 
1998   for (n = 0; n < i; n++)
1999     {
2000       if (!t)
2001         break;
2002       t = TREE_CHAIN (t);
2003     }
2004   if (t)
2005     return TREE_VALUE (t);
2006   if (!e->callee)
2007     return NULL;
2008   t = DECL_ARGUMENTS (e->callee->decl);
2009   for (n = 0; n < i; n++)
2010     {
2011       if (!t)
2012 	return NULL;
2013       t = TREE_CHAIN (t);
2014     }
2015   if (t)
2016     return TREE_TYPE (t);
2017   return NULL;
2018 }
2019 
2020 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2021    allocated structure or a previously existing one shared with other jump
2022    functions and/or transformation summaries.  */
2023 
2024 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)2025 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2026 {
2027   ipa_bits tmp;
2028   tmp.value = value;
2029   tmp.mask = mask;
2030 
2031   ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2032   if (*slot)
2033     return *slot;
2034 
2035   ipa_bits *res = ggc_alloc<ipa_bits> ();
2036   res->value = value;
2037   res->mask = mask;
2038   *slot = res;
2039 
2040   return res;
2041 }
2042 
2043 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK.  Use hash
2044    table in order to avoid creating multiple same ipa_bits structures.  */
2045 
2046 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)2047 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2048 		    const widest_int &mask)
2049 {
2050   jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2051 }
2052 
2053 /* Return a pointer to a value_range just like *TMP, but either find it in
2054    ipa_vr_hash_table or allocate it in GC memory.  TMP->equiv must be NULL.  */
2055 
2056 static value_range *
ipa_get_value_range(value_range * tmp)2057 ipa_get_value_range (value_range *tmp)
2058 {
2059   value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2060   if (*slot)
2061     return *slot;
2062 
2063   value_range *vr = ggc_alloc<value_range> ();
2064   *vr = *tmp;
2065   *slot = vr;
2066 
2067   return vr;
2068 }
2069 
2070 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2071    equiv set. Use hash table in order to avoid creating multiple same copies of
2072    value_ranges.  */
2073 
2074 static value_range *
ipa_get_value_range(enum value_range_kind kind,tree min,tree max)2075 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2076 {
2077   value_range tmp (min, max, kind);
2078   return ipa_get_value_range (&tmp);
2079 }
2080 
2081 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2082    a NULL equiv bitmap.  Use hash table in order to avoid creating multiple
2083    same value_range structures.  */
2084 
2085 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)2086 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2087 		  tree min, tree max)
2088 {
2089   jf->m_vr = ipa_get_value_range (type, min, max);
2090 }
2091 
2092 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2093    copy from ipa_vr_hash_table or allocate a new on in GC memory.  */
2094 
2095 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range * tmp)2096 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2097 {
2098   jf->m_vr = ipa_get_value_range (tmp);
2099 }
2100 
2101 /* Compute jump function for all arguments of callsite CS and insert the
2102    information in the jump_functions array in the ipa_edge_args corresponding
2103    to this callsite.  */
2104 
2105 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)2106 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2107 				     struct cgraph_edge *cs)
2108 {
2109   class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2110   class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2111   gcall *call = cs->call_stmt;
2112   int n, arg_num = gimple_call_num_args (call);
2113   bool useful_context = false;
2114 
2115   if (arg_num == 0 || args->jump_functions)
2116     return;
2117   vec_safe_grow_cleared (args->jump_functions, arg_num);
2118   if (flag_devirtualize)
2119     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
2120 
2121   if (gimple_call_internal_p (call))
2122     return;
2123   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2124     return;
2125 
2126   for (n = 0; n < arg_num; n++)
2127     {
2128       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2129       tree arg = gimple_call_arg (call, n);
2130       tree param_type = ipa_get_callee_param_type (cs, n);
2131       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2132 	{
2133 	  tree instance;
2134 	  class ipa_polymorphic_call_context context (cs->caller->decl,
2135 						       arg, cs->call_stmt,
2136 						       &instance);
2137 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2138 				    &fbi->aa_walk_budget);
2139 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
2140 	  if (!context.useless_p ())
2141 	    useful_context = true;
2142 	}
2143 
2144       if (POINTER_TYPE_P (TREE_TYPE (arg)))
2145 	{
2146 	  bool addr_nonzero = false;
2147 	  bool strict_overflow = false;
2148 
2149 	  if (TREE_CODE (arg) == SSA_NAME
2150 	      && param_type
2151 	      && get_ptr_nonnull (arg))
2152 	    addr_nonzero = true;
2153 	  else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2154 	    addr_nonzero = true;
2155 
2156 	  if (addr_nonzero)
2157 	    {
2158 	      tree z = build_int_cst (TREE_TYPE (arg), 0);
2159 	      ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2160 	    }
2161 	  else
2162 	    gcc_assert (!jfunc->m_vr);
2163 	}
2164       else
2165 	{
2166 	  wide_int min, max;
2167 	  value_range_kind kind;
2168 	  if (TREE_CODE (arg) == SSA_NAME
2169 	      && param_type
2170 	      && (kind = get_range_info (arg, &min, &max))
2171 	      && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2172 	    {
2173 	      value_range resvr;
2174 	      value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2175 				 wide_int_to_tree (TREE_TYPE (arg), max),
2176 				 kind);
2177 	      range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2178 				     &tmpvr, TREE_TYPE (arg));
2179 	      if (!resvr.undefined_p () && !resvr.varying_p ())
2180 		ipa_set_jfunc_vr (jfunc, &resvr);
2181 	      else
2182 		gcc_assert (!jfunc->m_vr);
2183 	    }
2184 	  else
2185 	    gcc_assert (!jfunc->m_vr);
2186 	}
2187 
2188       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2189 	  && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2190 	{
2191 	  if (TREE_CODE (arg) == SSA_NAME)
2192 	    ipa_set_jfunc_bits (jfunc, 0,
2193 				widest_int::from (get_nonzero_bits (arg),
2194 						  TYPE_SIGN (TREE_TYPE (arg))));
2195 	  else
2196 	    ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2197 	}
2198       else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2199 	{
2200 	  unsigned HOST_WIDE_INT bitpos;
2201 	  unsigned align;
2202 
2203 	  get_pointer_alignment_1 (arg, &align, &bitpos);
2204 	  widest_int mask = wi::bit_and_not
2205 	    (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2206 	     align / BITS_PER_UNIT - 1);
2207 	  widest_int value = bitpos / BITS_PER_UNIT;
2208 	  ipa_set_jfunc_bits (jfunc, value, mask);
2209 	}
2210       else
2211 	gcc_assert (!jfunc->bits);
2212 
2213       if (is_gimple_ip_invariant (arg)
2214 	  || (VAR_P (arg)
2215 	      && is_global_var (arg)
2216 	      && TREE_READONLY (arg)))
2217 	ipa_set_jf_constant (jfunc, arg, cs);
2218       else if (!is_gimple_reg_type (TREE_TYPE (arg))
2219 	       && TREE_CODE (arg) == PARM_DECL)
2220 	{
2221 	  int index = ipa_get_param_decl_index (info, arg);
2222 
2223 	  gcc_assert (index >=0);
2224 	  /* Aggregate passed by value, check for pass-through, otherwise we
2225 	     will attempt to fill in aggregate contents later in this
2226 	     for cycle.  */
2227 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2228 	    {
2229 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
2230 	      continue;
2231 	    }
2232 	}
2233       else if (TREE_CODE (arg) == SSA_NAME)
2234 	{
2235 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
2236 	    {
2237 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2238 	      if (index >= 0)
2239 		{
2240 		  bool agg_p;
2241 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2242 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2243 		}
2244 	    }
2245 	  else
2246 	    {
2247 	      gimple *stmt = SSA_NAME_DEF_STMT (arg);
2248 	      if (is_gimple_assign (stmt))
2249 		compute_complex_assign_jump_func (fbi, info, jfunc,
2250 						  call, stmt, arg, param_type);
2251 	      else if (gimple_code (stmt) == GIMPLE_PHI)
2252 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
2253 						    call,
2254 						    as_a <gphi *> (stmt));
2255 	    }
2256 	}
2257 
2258       /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2259 	 passed (because type conversions are ignored in gimple).  Usually we can
2260 	 safely get type from function declaration, but in case of K&R prototypes or
2261 	 variadic functions we can try our luck with type of the pointer passed.
2262 	 TODO: Since we look for actual initialization of the memory object, we may better
2263 	 work out the type based on the memory stores we find.  */
2264       if (!param_type)
2265 	param_type = TREE_TYPE (arg);
2266 
2267       if ((jfunc->type != IPA_JF_PASS_THROUGH
2268 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2269 	  && (jfunc->type != IPA_JF_ANCESTOR
2270 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2271 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2272 	      || POINTER_TYPE_P (param_type)))
2273 	determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2274     }
2275   if (!useful_context)
2276     vec_free (args->polymorphic_call_contexts);
2277 }
2278 
2279 /* Compute jump functions for all edges - both direct and indirect - outgoing
2280    from BB.  */
2281 
2282 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)2283 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2284 {
2285   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2286   int i;
2287   struct cgraph_edge *cs;
2288 
2289   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2290     {
2291       struct cgraph_node *callee = cs->callee;
2292 
2293       if (callee)
2294 	{
2295 	  callee = callee->ultimate_alias_target ();
2296 	  /* We do not need to bother analyzing calls to unknown functions
2297 	     unless they may become known during lto/whopr.  */
2298 	  if (!callee->definition && !flag_lto)
2299 	    continue;
2300 	}
2301       ipa_compute_jump_functions_for_edge (fbi, cs);
2302     }
2303 }
2304 
2305 /* If STMT looks like a statement loading a value from a member pointer formal
2306    parameter, return that parameter and store the offset of the field to
2307    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
2308    might be clobbered).  If USE_DELTA, then we look for a use of the delta
2309    field rather than the pfn.  */
2310 
2311 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2312 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2313 				    HOST_WIDE_INT *offset_p)
2314 {
2315   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2316 
2317   if (!gimple_assign_single_p (stmt))
2318     return NULL_TREE;
2319 
2320   rhs = gimple_assign_rhs1 (stmt);
2321   if (TREE_CODE (rhs) == COMPONENT_REF)
2322     {
2323       ref_field = TREE_OPERAND (rhs, 1);
2324       rhs = TREE_OPERAND (rhs, 0);
2325     }
2326   else
2327     ref_field = NULL_TREE;
2328   if (TREE_CODE (rhs) != MEM_REF)
2329     return NULL_TREE;
2330   rec = TREE_OPERAND (rhs, 0);
2331   if (TREE_CODE (rec) != ADDR_EXPR)
2332     return NULL_TREE;
2333   rec = TREE_OPERAND (rec, 0);
2334   if (TREE_CODE (rec) != PARM_DECL
2335       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2336     return NULL_TREE;
2337   ref_offset = TREE_OPERAND (rhs, 1);
2338 
2339   if (use_delta)
2340     fld = delta_field;
2341   else
2342     fld = ptr_field;
2343   if (offset_p)
2344     *offset_p = int_bit_position (fld);
2345 
2346   if (ref_field)
2347     {
2348       if (integer_nonzerop (ref_offset))
2349 	return NULL_TREE;
2350       return ref_field == fld ? rec : NULL_TREE;
2351     }
2352   else
2353     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2354       : NULL_TREE;
2355 }
2356 
2357 /* Returns true iff T is an SSA_NAME defined by a statement.  */
2358 
2359 static bool
ipa_is_ssa_with_stmt_def(tree t)2360 ipa_is_ssa_with_stmt_def (tree t)
2361 {
2362   if (TREE_CODE (t) == SSA_NAME
2363       && !SSA_NAME_IS_DEFAULT_DEF (t))
2364     return true;
2365   else
2366     return false;
2367 }
2368 
2369 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2370    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
2371    indirect call graph edge.
2372    If POLYMORPHIC is true record is as a destination of polymorphic call.  */
2373 
2374 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt,bool polymorphic)2375 ipa_note_param_call (struct cgraph_node *node, int param_index,
2376 		     gcall *stmt, bool polymorphic)
2377 {
2378   struct cgraph_edge *cs;
2379 
2380   cs = node->get_edge (stmt);
2381   cs->indirect_info->param_index = param_index;
2382   cs->indirect_info->agg_contents = 0;
2383   cs->indirect_info->member_ptr = 0;
2384   cs->indirect_info->guaranteed_unmodified = 0;
2385   ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2386 					  param_index, true);
2387   if (cs->indirect_info->polymorphic || polymorphic)
2388     ipa_set_param_used_by_polymorphic_call
2389 	    (IPA_NODE_REF (node), param_index, true);
2390   return cs;
2391 }
2392 
2393 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2394    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
2395    intermediate information about each formal parameter.  Currently it checks
2396    whether the call calls a pointer that is a formal parameter and if so, the
2397    parameter is marked with the called flag and an indirect call graph edge
2398    describing the call is created.  This is very simple for ordinary pointers
2399    represented in SSA but not-so-nice when it comes to member pointers.  The
2400    ugly part of this function does nothing more than trying to match the
2401    pattern of such a call.  An example of such a pattern is the gimple dump
2402    below, the call is on the last line:
2403 
2404      <bb 2>:
2405        f$__delta_5 = f.__delta;
2406        f$__pfn_24 = f.__pfn;
2407 
2408    or
2409      <bb 2>:
2410        f$__delta_5 = MEM[(struct  *)&f];
2411        f$__pfn_24 = MEM[(struct  *)&f + 4B];
2412 
2413    and a few lines below:
2414 
2415      <bb 5>
2416        D.2496_3 = (int) f$__pfn_24;
2417        D.2497_4 = D.2496_3 & 1;
2418        if (D.2497_4 != 0)
2419          goto <bb 3>;
2420        else
2421          goto <bb 4>;
2422 
2423      <bb 6>:
2424        D.2500_7 = (unsigned int) f$__delta_5;
2425        D.2501_8 = &S + D.2500_7;
2426        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2427        D.2503_10 = *D.2502_9;
2428        D.2504_12 = f$__pfn_24 + -1;
2429        D.2505_13 = (unsigned int) D.2504_12;
2430        D.2506_14 = D.2503_10 + D.2505_13;
2431        D.2507_15 = *D.2506_14;
2432        iftmp.11_16 = (String:: *) D.2507_15;
2433 
2434      <bb 7>:
2435        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2436        D.2500_19 = (unsigned int) f$__delta_5;
2437        D.2508_20 = &S + D.2500_19;
2438        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2439 
2440    Such patterns are results of simple calls to a member pointer:
2441 
2442      int doprinting (int (MyString::* f)(int) const)
2443      {
2444        MyString S ("somestring");
2445 
2446        return (S.*f)(4);
2447      }
2448 
2449    Moreover, the function also looks for called pointers loaded from aggregates
2450    passed by value or reference.  */
2451 
2452 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2453 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2454 				tree target)
2455 {
2456   class ipa_node_params *info = fbi->info;
2457   HOST_WIDE_INT offset;
2458   bool by_ref;
2459 
2460   if (SSA_NAME_IS_DEFAULT_DEF (target))
2461     {
2462       tree var = SSA_NAME_VAR (target);
2463       int index = ipa_get_param_decl_index (info, var);
2464       if (index >= 0)
2465 	ipa_note_param_call (fbi->node, index, call, false);
2466       return;
2467     }
2468 
2469   int index;
2470   gimple *def = SSA_NAME_DEF_STMT (target);
2471   bool guaranteed_unmodified;
2472   if (gimple_assign_single_p (def)
2473       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2474 				 gimple_assign_rhs1 (def), &index, &offset,
2475 				 NULL, &by_ref, &guaranteed_unmodified))
2476     {
2477       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2478 	 					    call, false);
2479       cs->indirect_info->offset = offset;
2480       cs->indirect_info->agg_contents = 1;
2481       cs->indirect_info->by_ref = by_ref;
2482       cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2483       return;
2484     }
2485 
2486   /* Now we need to try to match the complex pattern of calling a member
2487      pointer. */
2488   if (gimple_code (def) != GIMPLE_PHI
2489       || gimple_phi_num_args (def) != 2
2490       || !POINTER_TYPE_P (TREE_TYPE (target))
2491       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2492     return;
2493 
2494   /* First, we need to check whether one of these is a load from a member
2495      pointer that is a parameter to this function. */
2496   tree n1 = PHI_ARG_DEF (def, 0);
2497   tree n2 = PHI_ARG_DEF (def, 1);
2498   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2499     return;
2500   gimple *d1 = SSA_NAME_DEF_STMT (n1);
2501   gimple *d2 = SSA_NAME_DEF_STMT (n2);
2502 
2503   tree rec;
2504   basic_block bb, virt_bb;
2505   basic_block join = gimple_bb (def);
2506   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2507     {
2508       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2509 	return;
2510 
2511       bb = EDGE_PRED (join, 0)->src;
2512       virt_bb = gimple_bb (d2);
2513     }
2514   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2515     {
2516       bb = EDGE_PRED (join, 1)->src;
2517       virt_bb = gimple_bb (d1);
2518     }
2519   else
2520     return;
2521 
2522   /* Second, we need to check that the basic blocks are laid out in the way
2523      corresponding to the pattern. */
2524 
2525   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2526       || single_pred (virt_bb) != bb
2527       || single_succ (virt_bb) != join)
2528     return;
2529 
2530   /* Third, let's see that the branching is done depending on the least
2531      significant bit of the pfn. */
2532 
2533   gimple *branch = last_stmt (bb);
2534   if (!branch || gimple_code (branch) != GIMPLE_COND)
2535     return;
2536 
2537   if ((gimple_cond_code (branch) != NE_EXPR
2538        && gimple_cond_code (branch) != EQ_EXPR)
2539       || !integer_zerop (gimple_cond_rhs (branch)))
2540     return;
2541 
2542   tree cond = gimple_cond_lhs (branch);
2543   if (!ipa_is_ssa_with_stmt_def (cond))
2544     return;
2545 
2546   def = SSA_NAME_DEF_STMT (cond);
2547   if (!is_gimple_assign (def)
2548       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2549       || !integer_onep (gimple_assign_rhs2 (def)))
2550     return;
2551 
2552   cond = gimple_assign_rhs1 (def);
2553   if (!ipa_is_ssa_with_stmt_def (cond))
2554     return;
2555 
2556   def = SSA_NAME_DEF_STMT (cond);
2557 
2558   if (is_gimple_assign (def)
2559       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2560     {
2561       cond = gimple_assign_rhs1 (def);
2562       if (!ipa_is_ssa_with_stmt_def (cond))
2563 	return;
2564       def = SSA_NAME_DEF_STMT (cond);
2565     }
2566 
2567   tree rec2;
2568   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2569 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2570 					      == ptrmemfunc_vbit_in_delta),
2571 					     NULL);
2572   if (rec != rec2)
2573     return;
2574 
2575   index = ipa_get_param_decl_index (info, rec);
2576   if (index >= 0
2577       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2578     {
2579       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2580 	 					    call, false);
2581       cs->indirect_info->offset = offset;
2582       cs->indirect_info->agg_contents = 1;
2583       cs->indirect_info->member_ptr = 1;
2584       cs->indirect_info->guaranteed_unmodified = 1;
2585     }
2586 
2587   return;
2588 }
2589 
2590 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2591    object referenced in the expression is a formal parameter of the caller
2592    FBI->node (described by FBI->info), create a call note for the
2593    statement.  */
2594 
2595 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2596 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2597 			       gcall *call, tree target)
2598 {
2599   tree obj = OBJ_TYPE_REF_OBJECT (target);
2600   int index;
2601   HOST_WIDE_INT anc_offset;
2602 
2603   if (!flag_devirtualize)
2604     return;
2605 
2606   if (TREE_CODE (obj) != SSA_NAME)
2607     return;
2608 
2609   class ipa_node_params *info = fbi->info;
2610   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2611     {
2612       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2613 	return;
2614 
2615       anc_offset = 0;
2616       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2617       gcc_assert (index >= 0);
2618       if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2619 				  call))
2620 	return;
2621     }
2622   else
2623     {
2624       gimple *stmt = SSA_NAME_DEF_STMT (obj);
2625       tree expr;
2626 
2627       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2628       if (!expr)
2629 	return;
2630       index = ipa_get_param_decl_index (info,
2631 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2632       gcc_assert (index >= 0);
2633       if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2634 			      call, anc_offset))
2635 	return;
2636     }
2637 
2638   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2639      						call, true);
2640   class cgraph_indirect_call_info *ii = cs->indirect_info;
2641   ii->offset = anc_offset;
2642   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2643   ii->otr_type = obj_type_ref_class (target);
2644   ii->polymorphic = 1;
2645 }
2646 
2647 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2648    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2649    containing intermediate information about each formal parameter.  */
2650 
2651 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2652 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2653 {
2654   tree target = gimple_call_fn (call);
2655 
2656   if (!target
2657       || (TREE_CODE (target) != SSA_NAME
2658           && !virtual_method_call_p (target)))
2659     return;
2660 
2661   struct cgraph_edge *cs = fbi->node->get_edge (call);
2662   /* If we previously turned the call into a direct call, there is
2663      no need to analyze.  */
2664   if (cs && !cs->indirect_unknown_callee)
2665     return;
2666 
2667   if (cs->indirect_info->polymorphic && flag_devirtualize)
2668     {
2669       tree instance;
2670       tree target = gimple_call_fn (call);
2671       ipa_polymorphic_call_context context (current_function_decl,
2672 					    target, call, &instance);
2673 
2674       gcc_checking_assert (cs->indirect_info->otr_type
2675 			   == obj_type_ref_class (target));
2676       gcc_checking_assert (cs->indirect_info->otr_token
2677 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2678 
2679       cs->indirect_info->vptr_changed
2680 	= !context.get_dynamic_type (instance,
2681 				     OBJ_TYPE_REF_OBJECT (target),
2682 				     obj_type_ref_class (target), call,
2683 				     &fbi->aa_walk_budget);
2684       cs->indirect_info->context = context;
2685     }
2686 
2687   if (TREE_CODE (target) == SSA_NAME)
2688     ipa_analyze_indirect_call_uses (fbi, call, target);
2689   else if (virtual_method_call_p (target))
2690     ipa_analyze_virtual_call_uses (fbi, call, target);
2691 }
2692 
2693 
2694 /* Analyze the call statement STMT with respect to formal parameters (described
2695    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2696    formal parameters are called.  */
2697 
2698 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2699 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2700 {
2701   if (is_gimple_call (stmt))
2702     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2703 }
2704 
2705 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2706    If OP is a parameter declaration, mark it as used in the info structure
2707    passed in DATA.  */
2708 
2709 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2710 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2711 {
2712   class ipa_node_params *info = (class ipa_node_params *) data;
2713 
2714   op = get_base_address (op);
2715   if (op
2716       && TREE_CODE (op) == PARM_DECL)
2717     {
2718       int index = ipa_get_param_decl_index (info, op);
2719       gcc_assert (index >= 0);
2720       ipa_set_param_used (info, index, true);
2721     }
2722 
2723   return false;
2724 }
2725 
2726 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2727    the findings in various structures of the associated ipa_node_params
2728    structure, such as parameter flags, notes etc.  FBI holds various data about
2729    the function being analyzed.  */
2730 
2731 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2732 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2733 {
2734   gimple_stmt_iterator gsi;
2735   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2736     {
2737       gimple *stmt = gsi_stmt (gsi);
2738 
2739       if (is_gimple_debug (stmt))
2740 	continue;
2741 
2742       ipa_analyze_stmt_uses (fbi, stmt);
2743       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2744 				     visit_ref_for_mod_analysis,
2745 				     visit_ref_for_mod_analysis,
2746 				     visit_ref_for_mod_analysis);
2747     }
2748   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2749     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2750 				   visit_ref_for_mod_analysis,
2751 				   visit_ref_for_mod_analysis,
2752 				   visit_ref_for_mod_analysis);
2753 }
2754 
2755 /* Calculate controlled uses of parameters of NODE.  */
2756 
2757 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2758 ipa_analyze_controlled_uses (struct cgraph_node *node)
2759 {
2760   class ipa_node_params *info = IPA_NODE_REF (node);
2761 
2762   for (int i = 0; i < ipa_get_param_count (info); i++)
2763     {
2764       tree parm = ipa_get_param (info, i);
2765       int controlled_uses = 0;
2766 
2767       /* For SSA regs see if parameter is used.  For non-SSA we compute
2768 	 the flag during modification analysis.  */
2769       if (is_gimple_reg (parm))
2770 	{
2771 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2772 				       parm);
2773 	  if (ddef && !has_zero_uses (ddef))
2774 	    {
2775 	      imm_use_iterator imm_iter;
2776 	      use_operand_p use_p;
2777 
2778 	      ipa_set_param_used (info, i, true);
2779 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2780 		if (!is_gimple_call (USE_STMT (use_p)))
2781 		  {
2782 		    if (!is_gimple_debug (USE_STMT (use_p)))
2783 		      {
2784 			controlled_uses = IPA_UNDESCRIBED_USE;
2785 			break;
2786 		      }
2787 		  }
2788 		else
2789 		  controlled_uses++;
2790 	    }
2791 	  else
2792 	    controlled_uses = 0;
2793 	}
2794       else
2795 	controlled_uses = IPA_UNDESCRIBED_USE;
2796       ipa_set_controlled_uses (info, i, controlled_uses);
2797     }
2798 }
2799 
2800 /* Free stuff in BI.  */
2801 
2802 static void
free_ipa_bb_info(struct ipa_bb_info * bi)2803 free_ipa_bb_info (struct ipa_bb_info *bi)
2804 {
2805   bi->cg_edges.release ();
2806   bi->param_aa_statuses.release ();
2807 }
2808 
2809 /* Dominator walker driving the analysis.  */
2810 
2811 class analysis_dom_walker : public dom_walker
2812 {
2813 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)2814   analysis_dom_walker (struct ipa_func_body_info *fbi)
2815     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2816 
2817   virtual edge before_dom_children (basic_block);
2818 
2819 private:
2820   struct ipa_func_body_info *m_fbi;
2821 };
2822 
2823 edge
before_dom_children(basic_block bb)2824 analysis_dom_walker::before_dom_children (basic_block bb)
2825 {
2826   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2827   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2828   return NULL;
2829 }
2830 
2831 /* Release body info FBI.  */
2832 
2833 void
ipa_release_body_info(struct ipa_func_body_info * fbi)2834 ipa_release_body_info (struct ipa_func_body_info *fbi)
2835 {
2836   int i;
2837   struct ipa_bb_info *bi;
2838 
2839   FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2840     free_ipa_bb_info (bi);
2841   fbi->bb_infos.release ();
2842 }
2843 
2844 /* Initialize the array describing properties of formal parameters
2845    of NODE, analyze their uses and compute jump functions associated
2846    with actual arguments of calls from within NODE.  */
2847 
2848 void
ipa_analyze_node(struct cgraph_node * node)2849 ipa_analyze_node (struct cgraph_node *node)
2850 {
2851   struct ipa_func_body_info fbi;
2852   class ipa_node_params *info;
2853 
2854   ipa_check_create_node_params ();
2855   ipa_check_create_edge_args ();
2856   info = IPA_NODE_REF_GET_CREATE (node);
2857 
2858   if (info->analysis_done)
2859     return;
2860   info->analysis_done = 1;
2861 
2862   if (ipa_func_spec_opts_forbid_analysis_p (node))
2863     {
2864       for (int i = 0; i < ipa_get_param_count (info); i++)
2865 	{
2866 	  ipa_set_param_used (info, i, true);
2867 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2868 	}
2869       return;
2870     }
2871 
2872   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2873   push_cfun (func);
2874   calculate_dominance_info (CDI_DOMINATORS);
2875   ipa_initialize_node_params (node);
2876   ipa_analyze_controlled_uses (node);
2877 
2878   fbi.node = node;
2879   fbi.info = IPA_NODE_REF (node);
2880   fbi.bb_infos = vNULL;
2881   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2882   fbi.param_count = ipa_get_param_count (info);
2883   fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2884 
2885   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2886     {
2887       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2888       bi->cg_edges.safe_push (cs);
2889     }
2890 
2891   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2892     {
2893       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2894       bi->cg_edges.safe_push (cs);
2895     }
2896 
2897   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2898 
2899   ipa_release_body_info (&fbi);
2900   free_dominance_info (CDI_DOMINATORS);
2901   pop_cfun ();
2902 }
2903 
2904 /* Update the jump functions associated with call graph edge E when the call
2905    graph edge CS is being inlined, assuming that E->caller is already (possibly
2906    indirectly) inlined into CS->callee and that E has not been inlined.  */
2907 
2908 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)2909 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2910 				      struct cgraph_edge *e)
2911 {
2912   class ipa_edge_args *top = IPA_EDGE_REF (cs);
2913   class ipa_edge_args *args = IPA_EDGE_REF (e);
2914   if (!args)
2915     return;
2916   int count = ipa_get_cs_argument_count (args);
2917   int i;
2918 
2919   for (i = 0; i < count; i++)
2920     {
2921       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2922       class ipa_polymorphic_call_context *dst_ctx
2923 	= ipa_get_ith_polymorhic_call_context (args, i);
2924 
2925       if (dst->agg.items)
2926 	{
2927 	  struct ipa_agg_jf_item *item;
2928 	  int j;
2929 
2930 	  FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2931 	    {
2932 	      int dst_fid;
2933 	      struct ipa_jump_func *src;
2934 
2935 	      if (item->jftype != IPA_JF_PASS_THROUGH
2936 		  && item->jftype != IPA_JF_LOAD_AGG)
2937 		continue;
2938 
2939 	      dst_fid = item->value.pass_through.formal_id;
2940 	      if (!top || dst_fid >= ipa_get_cs_argument_count (top))
2941 		{
2942 		  item->jftype = IPA_JF_UNKNOWN;
2943 		  continue;
2944 		}
2945 
2946 	      item->value.pass_through.formal_id = -1;
2947 	      src = ipa_get_ith_jump_func (top, dst_fid);
2948 	      if (src->type == IPA_JF_CONST)
2949 		{
2950 		  if (item->jftype == IPA_JF_PASS_THROUGH
2951 		      && item->value.pass_through.operation == NOP_EXPR)
2952 		    {
2953 		      item->jftype = IPA_JF_CONST;
2954 		      item->value.constant = src->value.constant.value;
2955 		      continue;
2956 		    }
2957 		}
2958 	      else if (src->type == IPA_JF_PASS_THROUGH
2959 		       && src->value.pass_through.operation == NOP_EXPR)
2960 		{
2961 		  if (item->jftype == IPA_JF_PASS_THROUGH
2962 		      || !item->value.load_agg.by_ref
2963 		      || src->value.pass_through.agg_preserved)
2964 		    item->value.pass_through.formal_id
2965 				= src->value.pass_through.formal_id;
2966 		}
2967 	      else if (src->type == IPA_JF_ANCESTOR)
2968 		{
2969 		  if (item->jftype == IPA_JF_PASS_THROUGH)
2970 		    {
2971 		      if (!src->value.ancestor.offset)
2972 			item->value.pass_through.formal_id
2973 				= src->value.ancestor.formal_id;
2974 		    }
2975 		  else if (src->value.ancestor.agg_preserved)
2976 		    {
2977 		      gcc_checking_assert (item->value.load_agg.by_ref);
2978 
2979 		      item->value.pass_through.formal_id
2980 				 = src->value.ancestor.formal_id;
2981 		      item->value.load_agg.offset
2982 				+= src->value.ancestor.offset;
2983 		    }
2984 		}
2985 
2986 	      if (item->value.pass_through.formal_id < 0)
2987 		item->jftype = IPA_JF_UNKNOWN;
2988 	    }
2989 	}
2990 
2991       if (!top)
2992 	{
2993 	  ipa_set_jf_unknown (dst);
2994 	  continue;
2995 	}
2996 
2997       if (dst->type == IPA_JF_ANCESTOR)
2998 	{
2999 	  struct ipa_jump_func *src;
3000 	  int dst_fid = dst->value.ancestor.formal_id;
3001 	  class ipa_polymorphic_call_context *src_ctx
3002 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3003 
3004 	  /* Variable number of arguments can cause havoc if we try to access
3005 	     one that does not exist in the inlined edge.  So make sure we
3006 	     don't.  */
3007 	  if (dst_fid >= ipa_get_cs_argument_count (top))
3008 	    {
3009 	      ipa_set_jf_unknown (dst);
3010 	      continue;
3011 	    }
3012 
3013 	  src = ipa_get_ith_jump_func (top, dst_fid);
3014 
3015 	  if (src_ctx && !src_ctx->useless_p ())
3016 	    {
3017 	      class ipa_polymorphic_call_context ctx = *src_ctx;
3018 
3019 	      /* TODO: Make type preserved safe WRT contexts.  */
3020 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
3021 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3022 	      ctx.offset_by (dst->value.ancestor.offset);
3023 	      if (!ctx.useless_p ())
3024 		{
3025 		  if (!dst_ctx)
3026 		    {
3027 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
3028 					     count);
3029 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3030 		    }
3031 
3032 		  dst_ctx->combine_with (ctx);
3033 		}
3034 	    }
3035 
3036 	  /* Parameter and argument in ancestor jump function must be pointer
3037 	     type, which means access to aggregate must be by-reference.  */
3038 	  gcc_assert (!src->agg.items || src->agg.by_ref);
3039 
3040 	  if (src->agg.items && dst->value.ancestor.agg_preserved)
3041 	    {
3042 	      struct ipa_agg_jf_item *item;
3043 	      int j;
3044 
3045 	      /* Currently we do not produce clobber aggregate jump functions,
3046 		 replace with merging when we do.  */
3047 	      gcc_assert (!dst->agg.items);
3048 
3049 	      dst->agg.items = vec_safe_copy (src->agg.items);
3050 	      dst->agg.by_ref = src->agg.by_ref;
3051 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3052 		item->offset -= dst->value.ancestor.offset;
3053 	    }
3054 
3055 	  if (src->type == IPA_JF_PASS_THROUGH
3056 	      && src->value.pass_through.operation == NOP_EXPR)
3057 	    {
3058 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3059 	      dst->value.ancestor.agg_preserved &=
3060 		src->value.pass_through.agg_preserved;
3061 	    }
3062 	  else if (src->type == IPA_JF_ANCESTOR)
3063 	    {
3064 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3065 	      dst->value.ancestor.offset += src->value.ancestor.offset;
3066 	      dst->value.ancestor.agg_preserved &=
3067 		src->value.ancestor.agg_preserved;
3068 	    }
3069 	  else
3070 	    ipa_set_jf_unknown (dst);
3071 	}
3072       else if (dst->type == IPA_JF_PASS_THROUGH)
3073 	{
3074 	  struct ipa_jump_func *src;
3075 	  /* We must check range due to calls with variable number of arguments
3076 	     and we cannot combine jump functions with operations.  */
3077 	  if (dst->value.pass_through.operation == NOP_EXPR
3078 	      && (top && dst->value.pass_through.formal_id
3079 		  < ipa_get_cs_argument_count (top)))
3080 	    {
3081 	      int dst_fid = dst->value.pass_through.formal_id;
3082 	      src = ipa_get_ith_jump_func (top, dst_fid);
3083 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3084 	      class ipa_polymorphic_call_context *src_ctx
3085 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
3086 
3087 	      if (src_ctx && !src_ctx->useless_p ())
3088 		{
3089 		  class ipa_polymorphic_call_context ctx = *src_ctx;
3090 
3091 		  /* TODO: Make type preserved safe WRT contexts.  */
3092 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
3093 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3094 		  if (!ctx.useless_p ())
3095 		    {
3096 		      if (!dst_ctx)
3097 			{
3098 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
3099 					         count);
3100 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3101 			}
3102 		      dst_ctx->combine_with (ctx);
3103 		    }
3104 		}
3105 	      switch (src->type)
3106 		{
3107 		case IPA_JF_UNKNOWN:
3108 		  ipa_set_jf_unknown (dst);
3109 		  break;
3110 		case IPA_JF_CONST:
3111 		  ipa_set_jf_cst_copy (dst, src);
3112 		  break;
3113 
3114 		case IPA_JF_PASS_THROUGH:
3115 		  {
3116 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
3117 		    enum tree_code operation;
3118 		    operation = ipa_get_jf_pass_through_operation (src);
3119 
3120 		    if (operation == NOP_EXPR)
3121 		      {
3122 			bool agg_p;
3123 			agg_p = dst_agg_p
3124 			  && ipa_get_jf_pass_through_agg_preserved (src);
3125 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3126 		      }
3127 		    else if (TREE_CODE_CLASS (operation) == tcc_unary)
3128 		      ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3129 		    else
3130 		      {
3131 			tree operand = ipa_get_jf_pass_through_operand (src);
3132 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3133 						       operation);
3134 		      }
3135 		    break;
3136 		  }
3137 		case IPA_JF_ANCESTOR:
3138 		  {
3139 		    bool agg_p;
3140 		    agg_p = dst_agg_p
3141 		      && ipa_get_jf_ancestor_agg_preserved (src);
3142 		    ipa_set_ancestor_jf (dst,
3143 					 ipa_get_jf_ancestor_offset (src),
3144 					 ipa_get_jf_ancestor_formal_id (src),
3145 					 agg_p);
3146 		    break;
3147 		  }
3148 		default:
3149 		  gcc_unreachable ();
3150 		}
3151 
3152 	      if (src->agg.items
3153 		  && (dst_agg_p || !src->agg.by_ref))
3154 		{
3155 		  /* Currently we do not produce clobber aggregate jump
3156 		     functions, replace with merging when we do.  */
3157 		  gcc_assert (!dst->agg.items);
3158 
3159 		  dst->agg.by_ref = src->agg.by_ref;
3160 		  dst->agg.items = vec_safe_copy (src->agg.items);
3161 		}
3162 	    }
3163 	  else
3164 	    ipa_set_jf_unknown (dst);
3165 	}
3166     }
3167 }
3168 
3169 /* If TARGET is an addr_expr of a function declaration, make it the
3170    (SPECULATIVE)destination of an indirect edge IE and return the edge.
3171    Otherwise, return NULL.  */
3172 
3173 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)3174 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3175 				bool speculative)
3176 {
3177   struct cgraph_node *callee;
3178   bool unreachable = false;
3179 
3180   if (TREE_CODE (target) == ADDR_EXPR)
3181     target = TREE_OPERAND (target, 0);
3182   if (TREE_CODE (target) != FUNCTION_DECL)
3183     {
3184       target = canonicalize_constructor_val (target, NULL);
3185       if (!target || TREE_CODE (target) != FUNCTION_DECL)
3186 	{
3187 	  /* Member pointer call that goes through a VMT lookup.  */
3188 	  if (ie->indirect_info->member_ptr
3189 	      /* Or if target is not an invariant expression and we do not
3190 		 know if it will evaulate to function at runtime.
3191 		 This can happen when folding through &VAR, where &VAR
3192 		 is IP invariant, but VAR itself is not.
3193 
3194 		 TODO: Revisit this when GCC 5 is branched.  It seems that
3195 		 member_ptr check is not needed and that we may try to fold
3196 		 the expression and see if VAR is readonly.  */
3197 	      || !is_gimple_ip_invariant (target))
3198 	    {
3199 	      if (dump_enabled_p ())
3200 		{
3201 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3202 				   "discovered direct call non-invariant %s\n",
3203 				   ie->caller->dump_name ());
3204 		}
3205 	      return NULL;
3206 	    }
3207 
3208 
3209           if (dump_enabled_p ())
3210 	    {
3211 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3212 			       "discovered direct call to non-function in %s, "
3213 			       "making it __builtin_unreachable\n",
3214 			       ie->caller->dump_name ());
3215 	    }
3216 
3217 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3218 	  callee = cgraph_node::get_create (target);
3219 	  unreachable = true;
3220 	}
3221       else
3222 	callee = cgraph_node::get (target);
3223     }
3224   else
3225     callee = cgraph_node::get (target);
3226 
3227   /* Because may-edges are not explicitely represented and vtable may be external,
3228      we may create the first reference to the object in the unit.  */
3229   if (!callee || callee->inlined_to)
3230     {
3231 
3232       /* We are better to ensure we can refer to it.
3233 	 In the case of static functions we are out of luck, since we already
3234 	 removed its body.  In the case of public functions we may or may
3235 	 not introduce the reference.  */
3236       if (!canonicalize_constructor_val (target, NULL)
3237 	  || !TREE_PUBLIC (target))
3238 	{
3239 	  if (dump_file)
3240 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3241 		     "(%s -> %s) but cannot refer to it.  Giving up.\n",
3242 		     ie->caller->dump_name (),
3243 		     ie->callee->dump_name ());
3244 	  return NULL;
3245 	}
3246       callee = cgraph_node::get_create (target);
3247     }
3248 
3249   /* If the edge is already speculated.  */
3250   if (speculative && ie->speculative)
3251     {
3252       if (dump_file)
3253 	{
3254 	  cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3255 	  if (!e2)
3256 	    {
3257 	      if (dump_file)
3258 		fprintf (dump_file, "ipa-prop: Discovered call to a "
3259 			 "speculative target (%s -> %s) but the call is "
3260 			 "already speculated to different target.  "
3261 			 "Giving up.\n",
3262 			 ie->caller->dump_name (), callee->dump_name ());
3263 	    }
3264 	  else
3265 	    {
3266 	      if (dump_file)
3267 		fprintf (dump_file,
3268 			 "ipa-prop: Discovered call to a speculative target "
3269 			 "(%s -> %s) this agree with previous speculation.\n",
3270 			 ie->caller->dump_name (), callee->dump_name ());
3271 	    }
3272 	}
3273       return NULL;
3274     }
3275 
3276   if (!dbg_cnt (devirt))
3277     return NULL;
3278 
3279   ipa_check_create_node_params ();
3280 
3281   /* We cannot make edges to inline clones.  It is bug that someone removed
3282      the cgraph node too early.  */
3283   gcc_assert (!callee->inlined_to);
3284 
3285   if (dump_file && !unreachable)
3286     {
3287       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3288 	       "(%s -> %s), for stmt ",
3289 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3290 	       speculative ? "speculative" : "known",
3291 	       ie->caller->dump_name (),
3292 	       callee->dump_name ());
3293       if (ie->call_stmt)
3294 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3295       else
3296 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3297      }
3298   if (dump_enabled_p ())
3299     {
3300       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3301 		       "converting indirect call in %s to direct call to %s\n",
3302 		       ie->caller->dump_name (), callee->dump_name ());
3303     }
3304   if (!speculative)
3305     {
3306       struct cgraph_edge *orig = ie;
3307       ie = cgraph_edge::make_direct (ie, callee);
3308       /* If we resolved speculative edge the cost is already up to date
3309 	 for direct call (adjusted by inline_edge_duplication_hook).  */
3310       if (ie == orig)
3311 	{
3312 	  ipa_call_summary *es = ipa_call_summaries->get (ie);
3313 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3314 				 - eni_size_weights.call_cost);
3315 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3316 				 - eni_time_weights.call_cost);
3317 	}
3318     }
3319   else
3320     {
3321       if (!callee->can_be_discarded_p ())
3322 	{
3323 	  cgraph_node *alias;
3324 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3325 	  if (alias)
3326 	    callee = alias;
3327 	}
3328       /* make_speculative will update ie's cost to direct call cost. */
3329       ie = ie->make_speculative
3330 	     (callee, ie->count.apply_scale (8, 10));
3331     }
3332 
3333   return ie;
3334 }
3335 
3336 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3337    CONSTRUCTOR and return it.  Return NULL if the search fails for some
3338    reason.  */
3339 
3340 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)3341 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3342 {
3343   tree type = TREE_TYPE (constructor);
3344   if (TREE_CODE (type) != ARRAY_TYPE
3345       && TREE_CODE (type) != RECORD_TYPE)
3346     return NULL;
3347 
3348   unsigned ix;
3349   tree index, val;
3350   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3351     {
3352       HOST_WIDE_INT elt_offset;
3353       if (TREE_CODE (type) == ARRAY_TYPE)
3354        {
3355          offset_int off;
3356          tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3357          gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3358 
3359          if (index)
3360            {
3361 	     if (TREE_CODE (index) == RANGE_EXPR)
3362 	       off = wi::to_offset (TREE_OPERAND (index, 0));
3363 	     else
3364 	       off = wi::to_offset (index);
3365              if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3366                {
3367                  tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3368                  gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3369                  off = wi::sext (off - wi::to_offset (low_bound),
3370                                  TYPE_PRECISION (TREE_TYPE (index)));
3371                }
3372              off *= wi::to_offset (unit_size);
3373 	     /* ???  Handle more than just the first index of a
3374 	        RANGE_EXPR.  */
3375            }
3376          else
3377            off = wi::to_offset (unit_size) * ix;
3378 
3379          off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3380          if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3381            continue;
3382          elt_offset = off.to_shwi ();
3383        }
3384       else if (TREE_CODE (type) == RECORD_TYPE)
3385        {
3386          gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3387          if (DECL_BIT_FIELD (index))
3388            continue;
3389          elt_offset = int_bit_position (index);
3390        }
3391       else
3392        gcc_unreachable ();
3393 
3394       if (elt_offset > req_offset)
3395 	return NULL;
3396 
3397       if (TREE_CODE (val) == CONSTRUCTOR)
3398 	return find_constructor_constant_at_offset (val,
3399 						    req_offset - elt_offset);
3400 
3401       if (elt_offset == req_offset
3402 	  && is_gimple_reg_type (TREE_TYPE (val))
3403 	  && is_gimple_ip_invariant (val))
3404 	return val;
3405     }
3406   return NULL;
3407 }
3408 
3409 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3410    invariant from a static constructor and if so, return it.  Otherwise return
3411    NULL. */
3412 
3413 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3414 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3415 {
3416   if (by_ref)
3417     {
3418       if (TREE_CODE (scalar) != ADDR_EXPR)
3419 	return NULL;
3420       scalar = TREE_OPERAND (scalar, 0);
3421     }
3422 
3423   if (!VAR_P (scalar)
3424       || !is_global_var (scalar)
3425       || !TREE_READONLY (scalar)
3426       || !DECL_INITIAL (scalar)
3427       || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3428     return NULL;
3429 
3430   return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3431 }
3432 
3433 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3434    static initializer of SCALAR (which can be NULL) for the given OFFSET or
3435    return NULL if there is none.  BY_REF specifies whether the value has to be
3436    passed by reference or by value.  If FROM_GLOBAL_CONSTANT is non-NULL, then
3437    the boolean it points to is set to true if the value comes from an
3438    initializer of a constant.  */
3439 
3440 tree
ipa_find_agg_cst_for_param(struct ipa_agg_value_set * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3441 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3442 			    HOST_WIDE_INT offset, bool by_ref,
3443 			    bool *from_global_constant)
3444 {
3445   struct ipa_agg_value *item;
3446   int i;
3447 
3448   if (scalar)
3449     {
3450       tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3451       if (res)
3452 	{
3453 	  if (from_global_constant)
3454 	    *from_global_constant = true;
3455 	  return res;
3456 	}
3457     }
3458 
3459   if (!agg
3460       || by_ref != agg->by_ref)
3461     return NULL;
3462 
3463   FOR_EACH_VEC_ELT (agg->items, i, item)
3464     if (item->offset == offset)
3465       {
3466 	/* Currently we do not have clobber values, return NULL for them once
3467 	   we do.  */
3468 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
3469 	if (from_global_constant)
3470 	  *from_global_constant = false;
3471 	return item->value;
3472       }
3473   return NULL;
3474 }
3475 
3476 /* Remove a reference to SYMBOL from the list of references of a node given by
3477    reference description RDESC.  Return true if the reference has been
3478    successfully found and removed.  */
3479 
3480 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3481 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3482 {
3483   struct ipa_ref *to_del;
3484   struct cgraph_edge *origin;
3485 
3486   origin = rdesc->cs;
3487   if (!origin)
3488     return false;
3489   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3490 					   origin->lto_stmt_uid);
3491   if (!to_del)
3492     return false;
3493 
3494   to_del->remove_reference ();
3495   if (dump_file)
3496     fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3497 	     origin->caller->dump_name (), symbol->dump_name ());
3498   return true;
3499 }
3500 
3501 /* If JFUNC has a reference description with refcount different from
3502    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3503    NULL.  JFUNC must be a constant jump function.  */
3504 
3505 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3506 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3507 {
3508   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3509   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3510     return rdesc;
3511   else
3512     return NULL;
3513 }
3514 
3515 /* If the value of constant jump function JFUNC is an address of a function
3516    declaration, return the associated call graph node.  Otherwise return
3517    NULL.  */
3518 
3519 static cgraph_node *
cgraph_node_for_jfunc(struct ipa_jump_func * jfunc)3520 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3521 {
3522   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3523   tree cst = ipa_get_jf_constant (jfunc);
3524   if (TREE_CODE (cst) != ADDR_EXPR
3525       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3526     return NULL;
3527 
3528   return cgraph_node::get (TREE_OPERAND (cst, 0));
3529 }
3530 
3531 
3532 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3533    refcount and if it hits zero, remove reference to SYMBOL from the caller of
3534    the edge specified in the rdesc.  Return false if either the symbol or the
3535    reference could not be found, otherwise return true.  */
3536 
3537 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3538 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3539 {
3540   struct ipa_cst_ref_desc *rdesc;
3541   if (jfunc->type == IPA_JF_CONST
3542       && (rdesc = jfunc_rdesc_usable (jfunc))
3543       && --rdesc->refcount == 0)
3544     {
3545       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3546       if (!symbol)
3547 	return false;
3548 
3549       return remove_described_reference (symbol, rdesc);
3550     }
3551   return true;
3552 }
3553 
3554 /* Try to find a destination for indirect edge IE that corresponds to a simple
3555    call or a call of a member function pointer and where the destination is a
3556    pointer formal parameter described by jump function JFUNC.  TARGET_TYPE is
3557    the type of the parameter to which the result of JFUNC is passed.  If it can
3558    be determined, return the newly direct edge, otherwise return NULL.
3559    NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3560    relative to.  */
3561 
3562 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3563 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3564 				  struct ipa_jump_func *jfunc, tree target_type,
3565 				  struct cgraph_node *new_root,
3566 				  class ipa_node_params *new_root_info)
3567 {
3568   struct cgraph_edge *cs;
3569   tree target;
3570   bool agg_contents = ie->indirect_info->agg_contents;
3571   tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3572   if (agg_contents)
3573     {
3574       bool from_global_constant;
3575       ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3576 							    new_root,
3577 							    &jfunc->agg);
3578       target = ipa_find_agg_cst_for_param (&agg, scalar,
3579 					   ie->indirect_info->offset,
3580 					   ie->indirect_info->by_ref,
3581 					   &from_global_constant);
3582       agg.release ();
3583       if (target
3584 	  && !from_global_constant
3585 	  && !ie->indirect_info->guaranteed_unmodified)
3586 	return NULL;
3587     }
3588   else
3589     target = scalar;
3590   if (!target)
3591     return NULL;
3592   cs = ipa_make_edge_direct_to_target (ie, target);
3593 
3594   if (cs && !agg_contents)
3595     {
3596       bool ok;
3597       gcc_checking_assert (cs->callee
3598 			   && (cs != ie
3599 			       || jfunc->type != IPA_JF_CONST
3600 			       || !cgraph_node_for_jfunc (jfunc)
3601 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
3602       ok = try_decrement_rdesc_refcount (jfunc);
3603       gcc_checking_assert (ok);
3604     }
3605 
3606   return cs;
3607 }
3608 
3609 /* Return the target to be used in cases of impossible devirtualization.  IE
3610    and target (the latter can be NULL) are dumped when dumping is enabled.  */
3611 
3612 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3613 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3614 {
3615   if (dump_file)
3616     {
3617       if (target)
3618 	fprintf (dump_file,
3619 		 "Type inconsistent devirtualization: %s->%s\n",
3620 		 ie->caller->dump_name (),
3621 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3622       else
3623 	fprintf (dump_file,
3624 		 "No devirtualization target in %s\n",
3625 		 ie->caller->dump_name ());
3626     }
3627   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3628   cgraph_node::get_create (new_target);
3629   return new_target;
3630 }
3631 
3632 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3633    call based on a formal parameter which is described by jump function JFUNC
3634    and if it can be determined, make it direct and return the direct edge.
3635    Otherwise, return NULL.  CTX describes the polymorphic context that the
3636    parameter the call is based on brings along with it.  NEW_ROOT and
3637    NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3638    to.  */
3639 
3640 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,class ipa_polymorphic_call_context ctx,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3641 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3642 				   struct ipa_jump_func *jfunc,
3643 				   class ipa_polymorphic_call_context ctx,
3644 				   struct cgraph_node *new_root,
3645 				   class ipa_node_params *new_root_info)
3646 {
3647   tree target = NULL;
3648   bool speculative = false;
3649 
3650   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3651     return NULL;
3652 
3653   gcc_assert (!ie->indirect_info->by_ref);
3654 
3655   /* Try to do lookup via known virtual table pointer value.  */
3656   if (!ie->indirect_info->vptr_changed
3657       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3658     {
3659       tree vtable;
3660       unsigned HOST_WIDE_INT offset;
3661       tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3662 	: NULL;
3663       ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3664 							    new_root,
3665 							    &jfunc->agg);
3666       tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3667 					   ie->indirect_info->offset,
3668 					   true);
3669       agg.release ();
3670       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3671 	{
3672 	  bool can_refer;
3673 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3674 						 vtable, offset, &can_refer);
3675 	  if (can_refer)
3676 	    {
3677 	      if (!t
3678 		  || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3679 		  || !possible_polymorphic_call_target_p
3680 		       (ie, cgraph_node::get (t)))
3681 		{
3682 		  /* Do not speculate builtin_unreachable, it is stupid!  */
3683 		  if (!ie->indirect_info->vptr_changed)
3684 		    target = ipa_impossible_devirt_target (ie, target);
3685 		  else
3686 		    target = NULL;
3687 		}
3688 	      else
3689 		{
3690 		  target = t;
3691 		  speculative = ie->indirect_info->vptr_changed;
3692 		}
3693 	    }
3694 	}
3695     }
3696 
3697   ipa_polymorphic_call_context ie_context (ie);
3698   vec <cgraph_node *>targets;
3699   bool final;
3700 
3701   ctx.offset_by (ie->indirect_info->offset);
3702   if (ie->indirect_info->vptr_changed)
3703     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3704 				      ie->indirect_info->otr_type);
3705   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3706   targets = possible_polymorphic_call_targets
3707     (ie->indirect_info->otr_type,
3708      ie->indirect_info->otr_token,
3709      ctx, &final);
3710   if (final && targets.length () <= 1)
3711     {
3712       speculative = false;
3713       if (targets.length () == 1)
3714 	target = targets[0]->decl;
3715       else
3716 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
3717     }
3718   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3719 	   && !ie->speculative && ie->maybe_hot_p ())
3720     {
3721       cgraph_node *n;
3722       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3723 					    ie->indirect_info->otr_token,
3724 					    ie->indirect_info->context);
3725       if (n)
3726 	{
3727 	  target = n->decl;
3728 	  speculative = true;
3729 	}
3730     }
3731 
3732   if (target)
3733     {
3734       if (!possible_polymorphic_call_target_p
3735 	  (ie, cgraph_node::get_create (target)))
3736 	{
3737 	  if (speculative)
3738 	    return NULL;
3739 	  target = ipa_impossible_devirt_target (ie, target);
3740 	}
3741       return ipa_make_edge_direct_to_target (ie, target, speculative);
3742     }
3743   else
3744     return NULL;
3745 }
3746 
3747 /* Update the param called notes associated with NODE when CS is being inlined,
3748    assuming NODE is (potentially indirectly) inlined into CS->callee.
3749    Moreover, if the callee is discovered to be constant, create a new cgraph
3750    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3751    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3752 
3753 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3754 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3755 				      struct cgraph_node *node,
3756 				      vec<cgraph_edge *> *new_edges)
3757 {
3758   class ipa_edge_args *top;
3759   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3760   struct cgraph_node *new_root;
3761   class ipa_node_params *new_root_info, *inlined_node_info;
3762   bool res = false;
3763 
3764   ipa_check_create_edge_args ();
3765   top = IPA_EDGE_REF (cs);
3766   new_root = cs->caller->inlined_to
3767 		? cs->caller->inlined_to : cs->caller;
3768   new_root_info = IPA_NODE_REF (new_root);
3769   inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3770 
3771   for (ie = node->indirect_calls; ie; ie = next_ie)
3772     {
3773       class cgraph_indirect_call_info *ici = ie->indirect_info;
3774       struct ipa_jump_func *jfunc;
3775       int param_index;
3776 
3777       next_ie = ie->next_callee;
3778 
3779       if (ici->param_index == -1)
3780 	continue;
3781 
3782       /* We must check range due to calls with variable number of arguments:  */
3783       if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3784 	{
3785 	  ici->param_index = -1;
3786 	  continue;
3787 	}
3788 
3789       param_index = ici->param_index;
3790       jfunc = ipa_get_ith_jump_func (top, param_index);
3791 
3792       auto_vec<cgraph_node *, 4> spec_targets;
3793       if (ie->speculative)
3794 	for (cgraph_edge *direct = ie->first_speculative_call_target ();
3795 	     direct;
3796 	     direct = direct->next_speculative_call_target ())
3797 	  spec_targets.safe_push (direct->callee);
3798 
3799       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3800 	new_direct_edge = NULL;
3801       else if (ici->polymorphic)
3802 	{
3803           ipa_polymorphic_call_context ctx;
3804 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3805 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3806 							       new_root,
3807 							       new_root_info);
3808 	}
3809       else
3810 	{
3811 	  tree target_type =  ipa_get_type (inlined_node_info, param_index);
3812 	  new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3813 							      target_type,
3814 							      new_root,
3815 							      new_root_info);
3816 	}
3817 
3818       /* If speculation was removed, then we need to do nothing.  */
3819       if (new_direct_edge && new_direct_edge != ie
3820 	  && spec_targets.contains (new_direct_edge->callee))
3821 	{
3822 	  new_direct_edge->indirect_inlining_edge = 1;
3823 	  top = IPA_EDGE_REF (cs);
3824 	  res = true;
3825 	  if (!new_direct_edge->speculative)
3826 	    continue;
3827 	}
3828       else if (new_direct_edge)
3829 	{
3830 	  new_direct_edge->indirect_inlining_edge = 1;
3831 	  if (new_edges)
3832 	    {
3833 	      new_edges->safe_push (new_direct_edge);
3834 	      res = true;
3835 	    }
3836 	  top = IPA_EDGE_REF (cs);
3837 	  /* If speculative edge was introduced we still need to update
3838 	     call info of the indirect edge.  */
3839 	  if (!new_direct_edge->speculative)
3840 	    continue;
3841 	}
3842       if (jfunc->type == IPA_JF_PASS_THROUGH
3843           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3844 	{
3845 	  if (ici->agg_contents
3846 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3847 	      && !ici->polymorphic)
3848 	    ici->param_index = -1;
3849 	  else
3850 	    {
3851 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3852 	      if (ici->polymorphic
3853 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3854 		ici->vptr_changed = true;
3855 	      ipa_set_param_used_by_indirect_call (new_root_info,
3856 			     			   ici->param_index, true);
3857 	      if (ici->polymorphic)
3858 		ipa_set_param_used_by_polymorphic_call (new_root_info,
3859 						        ici->param_index, true);
3860 	    }
3861 	}
3862       else if (jfunc->type == IPA_JF_ANCESTOR)
3863 	{
3864 	  if (ici->agg_contents
3865 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3866 	      && !ici->polymorphic)
3867 	    ici->param_index = -1;
3868 	  else
3869 	    {
3870 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3871 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3872 	      if (ici->polymorphic
3873 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3874 		ici->vptr_changed = true;
3875 	      ipa_set_param_used_by_indirect_call (new_root_info,
3876 			     			   ici->param_index, true);
3877 	      if (ici->polymorphic)
3878 		ipa_set_param_used_by_polymorphic_call (new_root_info,
3879 						        ici->param_index, true);
3880 	    }
3881 	}
3882       else
3883 	/* Either we can find a destination for this edge now or never. */
3884 	ici->param_index = -1;
3885     }
3886 
3887   return res;
3888 }
3889 
3890 /* Recursively traverse subtree of NODE (including node) made of inlined
3891    cgraph_edges when CS has been inlined and invoke
3892    update_indirect_edges_after_inlining on all nodes and
3893    update_jump_functions_after_inlining on all non-inlined edges that lead out
3894    of this subtree.  Newly discovered indirect edges will be added to
3895    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3896    created.  */
3897 
3898 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3899 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3900 				   struct cgraph_node *node,
3901 				   vec<cgraph_edge *> *new_edges)
3902 {
3903   struct cgraph_edge *e;
3904   bool res;
3905 
3906   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3907 
3908   for (e = node->callees; e; e = e->next_callee)
3909     if (!e->inline_failed)
3910       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3911     else
3912       update_jump_functions_after_inlining (cs, e);
3913   for (e = node->indirect_calls; e; e = e->next_callee)
3914     update_jump_functions_after_inlining (cs, e);
3915 
3916   return res;
3917 }
3918 
3919 /* Combine two controlled uses counts as done during inlining.  */
3920 
3921 static int
combine_controlled_uses_counters(int c,int d)3922 combine_controlled_uses_counters (int c, int d)
3923 {
3924   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3925     return IPA_UNDESCRIBED_USE;
3926   else
3927     return c + d - 1;
3928 }
3929 
3930 /* Propagate number of controlled users from CS->caleee to the new root of the
3931    tree of inlined nodes.  */
3932 
3933 static void
propagate_controlled_uses(struct cgraph_edge * cs)3934 propagate_controlled_uses (struct cgraph_edge *cs)
3935 {
3936   class ipa_edge_args *args = IPA_EDGE_REF (cs);
3937   if (!args)
3938     return;
3939   struct cgraph_node *new_root = cs->caller->inlined_to
3940     ? cs->caller->inlined_to : cs->caller;
3941   class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3942   class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3943   int count, i;
3944 
3945   if (!old_root_info)
3946     return;
3947 
3948   count = MIN (ipa_get_cs_argument_count (args),
3949 	       ipa_get_param_count (old_root_info));
3950   for (i = 0; i < count; i++)
3951     {
3952       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3953       struct ipa_cst_ref_desc *rdesc;
3954 
3955       if (jf->type == IPA_JF_PASS_THROUGH)
3956 	{
3957 	  int src_idx, c, d;
3958 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3959 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3960 	  d = ipa_get_controlled_uses (old_root_info, i);
3961 
3962 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3963 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3964 	  c = combine_controlled_uses_counters (c, d);
3965 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3966 	  if (c == 0 && new_root_info->ipcp_orig_node)
3967 	    {
3968 	      struct cgraph_node *n;
3969 	      struct ipa_ref *ref;
3970 	      tree t = new_root_info->known_csts[src_idx];
3971 
3972 	      if (t && TREE_CODE (t) == ADDR_EXPR
3973 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3974 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3975 		  && (ref = new_root->find_reference (n, NULL, 0)))
3976 		{
3977 		  if (dump_file)
3978 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3979 			     "reference from %s to %s.\n",
3980 			     new_root->dump_name (),
3981 			     n->dump_name ());
3982 		  ref->remove_reference ();
3983 		}
3984 	    }
3985 	}
3986       else if (jf->type == IPA_JF_CONST
3987 	       && (rdesc = jfunc_rdesc_usable (jf)))
3988 	{
3989 	  int d = ipa_get_controlled_uses (old_root_info, i);
3990 	  int c = rdesc->refcount;
3991 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3992 	  if (rdesc->refcount == 0)
3993 	    {
3994 	      tree cst = ipa_get_jf_constant (jf);
3995 	      struct cgraph_node *n;
3996 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3997 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3998 				   == FUNCTION_DECL);
3999 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
4000 	      if (n)
4001 		{
4002 		  struct cgraph_node *clone;
4003 		  bool ok;
4004 		  ok = remove_described_reference (n, rdesc);
4005 		  gcc_checking_assert (ok);
4006 
4007 		  clone = cs->caller;
4008 		  while (clone->inlined_to
4009 			 && clone->ipcp_clone
4010 			 && clone != rdesc->cs->caller)
4011 		    {
4012 		      struct ipa_ref *ref;
4013 		      ref = clone->find_reference (n, NULL, 0);
4014 		      if (ref)
4015 			{
4016 			  if (dump_file)
4017 			    fprintf (dump_file, "ipa-prop: Removing "
4018 				     "cloning-created reference "
4019 				     "from %s to %s.\n",
4020 				     clone->dump_name (),
4021 				     n->dump_name ());
4022 			  ref->remove_reference ();
4023 			}
4024 		      clone = clone->callers->caller;
4025 		    }
4026 		}
4027 	    }
4028 	}
4029     }
4030 
4031   for (i = ipa_get_param_count (old_root_info);
4032        i < ipa_get_cs_argument_count (args);
4033        i++)
4034     {
4035       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4036 
4037       if (jf->type == IPA_JF_CONST)
4038 	{
4039 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4040 	  if (rdesc)
4041 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
4042 	}
4043       else if (jf->type == IPA_JF_PASS_THROUGH)
4044 	ipa_set_controlled_uses (new_root_info,
4045 				 jf->value.pass_through.formal_id,
4046 				 IPA_UNDESCRIBED_USE);
4047     }
4048 }
4049 
4050 /* Update jump functions and call note functions on inlining the call site CS.
4051    CS is expected to lead to a node already cloned by
4052    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
4053    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
4054    created.  */
4055 
4056 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)4057 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4058 				   vec<cgraph_edge *> *new_edges)
4059 {
4060   bool changed;
4061   /* Do nothing if the preparation phase has not been carried out yet
4062      (i.e. during early inlining).  */
4063   if (!ipa_node_params_sum)
4064     return false;
4065   gcc_assert (ipa_edge_args_sum);
4066 
4067   propagate_controlled_uses (cs);
4068   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4069   ipa_node_params_sum->remove (cs->callee);
4070 
4071   class ipa_edge_args *args = IPA_EDGE_REF (cs);
4072   if (args)
4073     {
4074       bool ok = true;
4075       if (args->jump_functions)
4076 	{
4077 	  struct ipa_jump_func *jf;
4078 	  int i;
4079 	  FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4080 	    if (jf->type == IPA_JF_CONST
4081 		&& ipa_get_jf_constant_rdesc (jf))
4082 	      {
4083 		ok = false;
4084 		break;
4085 	      }
4086 	}
4087       if (ok)
4088         ipa_edge_args_sum->remove (cs);
4089     }
4090   if (ipcp_transformation_sum)
4091     ipcp_transformation_sum->remove (cs->callee);
4092 
4093   return changed;
4094 }
4095 
4096 /* Ensure that array of edge arguments infos is big enough to accommodate a
4097    structure for all edges and reallocates it if not.  Also, allocate
4098    associated hash tables is they do not already exist.  */
4099 
4100 void
ipa_check_create_edge_args(void)4101 ipa_check_create_edge_args (void)
4102 {
4103   if (!ipa_edge_args_sum)
4104     ipa_edge_args_sum
4105       = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4106 	 ipa_edge_args_sum_t (symtab, true));
4107   if (!ipa_bits_hash_table)
4108     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4109   if (!ipa_vr_hash_table)
4110     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4111 }
4112 
4113 /* Free all ipa_edge structures.  */
4114 
4115 void
ipa_free_all_edge_args(void)4116 ipa_free_all_edge_args (void)
4117 {
4118   if (!ipa_edge_args_sum)
4119     return;
4120 
4121   ggc_delete (ipa_edge_args_sum);
4122   ipa_edge_args_sum = NULL;
4123 }
4124 
4125 /* Free all ipa_node_params structures.  */
4126 
4127 void
ipa_free_all_node_params(void)4128 ipa_free_all_node_params (void)
4129 {
4130   ggc_delete (ipa_node_params_sum);
4131   ipa_node_params_sum = NULL;
4132 }
4133 
4134 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4135    tables if they do not already exist.  */
4136 
4137 void
ipcp_transformation_initialize(void)4138 ipcp_transformation_initialize (void)
4139 {
4140   if (!ipa_bits_hash_table)
4141     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4142   if (!ipa_vr_hash_table)
4143     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4144   if (ipcp_transformation_sum == NULL)
4145     ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4146 }
4147 
4148 /* Release the IPA CP transformation summary.  */
4149 
4150 void
ipcp_free_transformation_sum(void)4151 ipcp_free_transformation_sum (void)
4152 {
4153   if (!ipcp_transformation_sum)
4154     return;
4155 
4156   ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4157   ggc_free (ipcp_transformation_sum);
4158   ipcp_transformation_sum = NULL;
4159 }
4160 
4161 /* Set the aggregate replacements of NODE to be AGGVALS.  */
4162 
4163 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)4164 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4165 			      struct ipa_agg_replacement_value *aggvals)
4166 {
4167   ipcp_transformation_initialize ();
4168   ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4169   s->agg_values = aggvals;
4170 }
4171 
4172 /* Hook that is called by cgraph.c when an edge is removed.  Adjust reference
4173    count data structures accordingly.  */
4174 
4175 void
remove(cgraph_edge * cs,ipa_edge_args * args)4176 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4177 {
4178   if (args->jump_functions)
4179     {
4180       struct ipa_jump_func *jf;
4181       int i;
4182       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4183 	{
4184 	  struct ipa_cst_ref_desc *rdesc;
4185 	  try_decrement_rdesc_refcount (jf);
4186 	  if (jf->type == IPA_JF_CONST
4187 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
4188 	      && rdesc->cs == cs)
4189 	    rdesc->cs = NULL;
4190 	}
4191     }
4192 }
4193 
4194 /* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
4195    reference count data strucutres accordingly.  */
4196 
4197 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)4198 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4199 				ipa_edge_args *old_args, ipa_edge_args *new_args)
4200 {
4201   unsigned int i;
4202 
4203   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4204   if (old_args->polymorphic_call_contexts)
4205     new_args->polymorphic_call_contexts
4206       = vec_safe_copy (old_args->polymorphic_call_contexts);
4207 
4208   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4209     {
4210       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4211       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4212 
4213       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4214 
4215       if (src_jf->type == IPA_JF_CONST)
4216 	{
4217 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4218 
4219 	  if (!src_rdesc)
4220 	    dst_jf->value.constant.rdesc = NULL;
4221 	  else if (src->caller == dst->caller)
4222 	    {
4223 	      struct ipa_ref *ref;
4224 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
4225 	      gcc_checking_assert (n);
4226 	      ref = src->caller->find_reference (n, src->call_stmt,
4227 						 src->lto_stmt_uid);
4228 	      gcc_checking_assert (ref);
4229 	      dst->caller->clone_reference (ref, ref->stmt);
4230 
4231 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4232 	      dst_rdesc->cs = dst;
4233 	      dst_rdesc->refcount = src_rdesc->refcount;
4234 	      dst_rdesc->next_duplicate = NULL;
4235 	      dst_jf->value.constant.rdesc = dst_rdesc;
4236 	    }
4237 	  else if (src_rdesc->cs == src)
4238 	    {
4239 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4240 	      dst_rdesc->cs = dst;
4241 	      dst_rdesc->refcount = src_rdesc->refcount;
4242 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4243 	      src_rdesc->next_duplicate = dst_rdesc;
4244 	      dst_jf->value.constant.rdesc = dst_rdesc;
4245 	    }
4246 	  else
4247 	    {
4248 	      struct ipa_cst_ref_desc *dst_rdesc;
4249 	      /* This can happen during inlining, when a JFUNC can refer to a
4250 		 reference taken in a function up in the tree of inline clones.
4251 		 We need to find the duplicate that refers to our tree of
4252 		 inline clones.  */
4253 
4254 	      gcc_assert (dst->caller->inlined_to);
4255 	      for (dst_rdesc = src_rdesc->next_duplicate;
4256 		   dst_rdesc;
4257 		   dst_rdesc = dst_rdesc->next_duplicate)
4258 		{
4259 		  struct cgraph_node *top;
4260 		  top = dst_rdesc->cs->caller->inlined_to
4261 		    ? dst_rdesc->cs->caller->inlined_to
4262 		    : dst_rdesc->cs->caller;
4263 		  if (dst->caller->inlined_to == top)
4264 		    break;
4265 		}
4266 	      gcc_assert (dst_rdesc);
4267 	      dst_jf->value.constant.rdesc = dst_rdesc;
4268 	    }
4269 	}
4270       else if (dst_jf->type == IPA_JF_PASS_THROUGH
4271 	       && src->caller == dst->caller)
4272 	{
4273 	  struct cgraph_node *inline_root = dst->caller->inlined_to
4274 	    ? dst->caller->inlined_to : dst->caller;
4275 	  class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4276 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4277 
4278 	  int c = ipa_get_controlled_uses (root_info, idx);
4279 	  if (c != IPA_UNDESCRIBED_USE)
4280 	    {
4281 	      c++;
4282 	      ipa_set_controlled_uses (root_info, idx, c);
4283 	    }
4284 	}
4285     }
4286 }
4287 
4288 /* Analyze newly added function into callgraph.  */
4289 
4290 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)4291 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4292 {
4293   if (node->has_gimple_body_p ())
4294     ipa_analyze_node (node);
4295 }
4296 
4297 /* Hook that is called by summary when a node is duplicated.  */
4298 
4299 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)4300 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4301 			     ipa_node_params *old_info,
4302 			     ipa_node_params *new_info)
4303 {
4304   ipa_agg_replacement_value *old_av, *new_av;
4305 
4306   new_info->descriptors = vec_safe_copy (old_info->descriptors);
4307   new_info->lattices = NULL;
4308   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4309   new_info->known_csts = old_info->known_csts.copy ();
4310   new_info->known_contexts = old_info->known_contexts.copy ();
4311 
4312   new_info->analysis_done = old_info->analysis_done;
4313   new_info->node_enqueued = old_info->node_enqueued;
4314   new_info->versionable = old_info->versionable;
4315 
4316   old_av = ipa_get_agg_replacements_for_node (src);
4317   if (old_av)
4318     {
4319       new_av = NULL;
4320       while (old_av)
4321 	{
4322 	  struct ipa_agg_replacement_value *v;
4323 
4324 	  v = ggc_alloc<ipa_agg_replacement_value> ();
4325 	  memcpy (v, old_av, sizeof (*v));
4326 	  v->next = new_av;
4327 	  new_av = v;
4328 	  old_av = old_av->next;
4329 	}
4330       ipa_set_node_agg_value_chain (dst, new_av);
4331     }
4332 }
4333 
4334 /* Duplication of ipcp transformation summaries.  */
4335 
4336 void
duplicate(cgraph_node *,cgraph_node * dst,ipcp_transformation * src_trans,ipcp_transformation * dst_trans)4337 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4338 			         ipcp_transformation *src_trans,
4339 			         ipcp_transformation *dst_trans)
4340 {
4341   /* Avoid redundant work of duplicating vectors we will never use.  */
4342   if (dst->inlined_to)
4343     return;
4344   dst_trans->bits = vec_safe_copy (src_trans->bits);
4345   dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4346   ipa_agg_replacement_value *agg = src_trans->agg_values,
4347 			    **aggptr = &dst_trans->agg_values;
4348   while (agg)
4349     {
4350       *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4351       **aggptr = *agg;
4352       agg = agg->next;
4353       aggptr = &(*aggptr)->next;
4354     }
4355 }
4356 
4357 /* Register our cgraph hooks if they are not already there.  */
4358 
4359 void
ipa_register_cgraph_hooks(void)4360 ipa_register_cgraph_hooks (void)
4361 {
4362   ipa_check_create_node_params ();
4363   ipa_check_create_edge_args ();
4364 
4365   function_insertion_hook_holder =
4366       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4367 }
4368 
4369 /* Unregister our cgraph hooks if they are not already there.  */
4370 
4371 static void
ipa_unregister_cgraph_hooks(void)4372 ipa_unregister_cgraph_hooks (void)
4373 {
4374   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4375   function_insertion_hook_holder = NULL;
4376 }
4377 
4378 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4379    longer needed after ipa-cp.  */
4380 
4381 void
ipa_free_all_structures_after_ipa_cp(void)4382 ipa_free_all_structures_after_ipa_cp (void)
4383 {
4384   if (!optimize && !in_lto_p)
4385     {
4386       ipa_free_all_edge_args ();
4387       ipa_free_all_node_params ();
4388       ipcp_sources_pool.release ();
4389       ipcp_cst_values_pool.release ();
4390       ipcp_poly_ctx_values_pool.release ();
4391       ipcp_agg_lattice_pool.release ();
4392       ipa_unregister_cgraph_hooks ();
4393       ipa_refdesc_pool.release ();
4394     }
4395 }
4396 
4397 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4398    longer needed after indirect inlining.  */
4399 
4400 void
ipa_free_all_structures_after_iinln(void)4401 ipa_free_all_structures_after_iinln (void)
4402 {
4403   ipa_free_all_edge_args ();
4404   ipa_free_all_node_params ();
4405   ipa_unregister_cgraph_hooks ();
4406   ipcp_sources_pool.release ();
4407   ipcp_cst_values_pool.release ();
4408   ipcp_poly_ctx_values_pool.release ();
4409   ipcp_agg_lattice_pool.release ();
4410   ipa_refdesc_pool.release ();
4411 }
4412 
4413 /* Print ipa_tree_map data structures of all functions in the
4414    callgraph to F.  */
4415 
4416 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)4417 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4418 {
4419   int i, count;
4420   class ipa_node_params *info;
4421 
4422   if (!node->definition)
4423     return;
4424   info = IPA_NODE_REF (node);
4425   fprintf (f, "  function  %s parameter descriptors:\n", node->dump_name ());
4426   if (!info)
4427     {
4428       fprintf (f, " no params return\n");
4429       return;
4430     }
4431   count = ipa_get_param_count (info);
4432   for (i = 0; i < count; i++)
4433     {
4434       int c;
4435 
4436       fprintf (f, "    ");
4437       ipa_dump_param (f, info, i);
4438       if (ipa_is_param_used (info, i))
4439 	fprintf (f, " used");
4440       if (ipa_is_param_used_by_ipa_predicates (info, i))
4441 	fprintf (f, " used_by_ipa_predicates");
4442       if (ipa_is_param_used_by_indirect_call (info, i))
4443 	fprintf (f, " used_by_indirect_call");
4444       if (ipa_is_param_used_by_polymorphic_call (info, i))
4445 	fprintf (f, " used_by_polymorphic_call");
4446       c = ipa_get_controlled_uses (info, i);
4447       if (c == IPA_UNDESCRIBED_USE)
4448 	fprintf (f, " undescribed_use");
4449       else
4450 	fprintf (f, "  controlled_uses=%i", c);
4451       fprintf (f, "\n");
4452     }
4453 }
4454 
4455 /* Print ipa_tree_map data structures of all functions in the
4456    callgraph to F.  */
4457 
4458 void
ipa_print_all_params(FILE * f)4459 ipa_print_all_params (FILE * f)
4460 {
4461   struct cgraph_node *node;
4462 
4463   fprintf (f, "\nFunction parameters:\n");
4464   FOR_EACH_FUNCTION (node)
4465     ipa_print_node_params (f, node);
4466 }
4467 
4468 /* Dump the AV linked list.  */
4469 
4470 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4471 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4472 {
4473   bool comma = false;
4474   fprintf (f, "     Aggregate replacements:");
4475   for (; av; av = av->next)
4476     {
4477       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4478 	       av->index, av->offset);
4479       print_generic_expr (f, av->value);
4480       comma = true;
4481     }
4482   fprintf (f, "\n");
4483 }
4484 
4485 /* Stream out jump function JUMP_FUNC to OB.  */
4486 
4487 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4488 ipa_write_jump_function (struct output_block *ob,
4489 			 struct ipa_jump_func *jump_func)
4490 {
4491   struct ipa_agg_jf_item *item;
4492   struct bitpack_d bp;
4493   int i, count;
4494   int flag = 0;
4495 
4496   /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4497      as well as WPA memory by handling them specially.  */
4498   if (jump_func->type == IPA_JF_CONST
4499       && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4500     flag = 1;
4501 
4502   streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4503   switch (jump_func->type)
4504     {
4505     case IPA_JF_UNKNOWN:
4506       break;
4507     case IPA_JF_CONST:
4508       gcc_assert (
4509 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4510       stream_write_tree (ob,
4511 			 flag
4512 			 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4513 			 : jump_func->value.constant.value, true);
4514       break;
4515     case IPA_JF_PASS_THROUGH:
4516       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4517       if (jump_func->value.pass_through.operation == NOP_EXPR)
4518 	{
4519 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4520 	  bp = bitpack_create (ob->main_stream);
4521 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4522 	  streamer_write_bitpack (&bp);
4523 	}
4524       else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4525 	       == tcc_unary)
4526 	streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4527       else
4528 	{
4529 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4530 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4531 	}
4532       break;
4533     case IPA_JF_ANCESTOR:
4534       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4535       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4536       bp = bitpack_create (ob->main_stream);
4537       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4538       streamer_write_bitpack (&bp);
4539       break;
4540     default:
4541       fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4542     }
4543 
4544   count = vec_safe_length (jump_func->agg.items);
4545   streamer_write_uhwi (ob, count);
4546   if (count)
4547     {
4548       bp = bitpack_create (ob->main_stream);
4549       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4550       streamer_write_bitpack (&bp);
4551     }
4552 
4553   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4554     {
4555       stream_write_tree (ob, item->type, true);
4556       streamer_write_uhwi (ob, item->offset);
4557       streamer_write_uhwi (ob, item->jftype);
4558       switch (item->jftype)
4559 	{
4560 	case IPA_JF_UNKNOWN:
4561 	  break;
4562 	case IPA_JF_CONST:
4563 	  stream_write_tree (ob, item->value.constant, true);
4564 	  break;
4565 	case IPA_JF_PASS_THROUGH:
4566 	case IPA_JF_LOAD_AGG:
4567 	  streamer_write_uhwi (ob, item->value.pass_through.operation);
4568 	  streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4569 	  if (TREE_CODE_CLASS (item->value.pass_through.operation)
4570 							!= tcc_unary)
4571 	    stream_write_tree (ob, item->value.pass_through.operand, true);
4572 	  if (item->jftype == IPA_JF_LOAD_AGG)
4573 	    {
4574 	      stream_write_tree (ob, item->value.load_agg.type, true);
4575 	      streamer_write_uhwi (ob, item->value.load_agg.offset);
4576 	      bp = bitpack_create (ob->main_stream);
4577 	      bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4578 	      streamer_write_bitpack (&bp);
4579 	    }
4580 	  break;
4581 	default:
4582 	  fatal_error (UNKNOWN_LOCATION,
4583 		       "invalid jump function in LTO stream");
4584 	}
4585     }
4586 
4587   bp = bitpack_create (ob->main_stream);
4588   bp_pack_value (&bp, !!jump_func->bits, 1);
4589   streamer_write_bitpack (&bp);
4590   if (jump_func->bits)
4591     {
4592       streamer_write_widest_int (ob, jump_func->bits->value);
4593       streamer_write_widest_int (ob, jump_func->bits->mask);
4594     }
4595   bp_pack_value (&bp, !!jump_func->m_vr, 1);
4596   streamer_write_bitpack (&bp);
4597   if (jump_func->m_vr)
4598     {
4599       streamer_write_enum (ob->main_stream, value_rang_type,
4600 			   VR_LAST, jump_func->m_vr->kind ());
4601       stream_write_tree (ob, jump_func->m_vr->min (), true);
4602       stream_write_tree (ob, jump_func->m_vr->max (), true);
4603     }
4604 }
4605 
4606 /* Read in jump function JUMP_FUNC from IB.  */
4607 
4608 static void
ipa_read_jump_function(class lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,class data_in * data_in,bool prevails)4609 ipa_read_jump_function (class lto_input_block *ib,
4610 			struct ipa_jump_func *jump_func,
4611 			struct cgraph_edge *cs,
4612 			class data_in *data_in,
4613 			bool prevails)
4614 {
4615   enum jump_func_type jftype;
4616   enum tree_code operation;
4617   int i, count;
4618   int val = streamer_read_uhwi (ib);
4619   bool flag = val & 1;
4620 
4621   jftype = (enum jump_func_type) (val / 2);
4622   switch (jftype)
4623     {
4624     case IPA_JF_UNKNOWN:
4625       ipa_set_jf_unknown (jump_func);
4626       break;
4627     case IPA_JF_CONST:
4628       {
4629 	tree t = stream_read_tree (ib, data_in);
4630 	if (flag && prevails)
4631 	  t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4632 	ipa_set_jf_constant (jump_func, t, cs);
4633       }
4634       break;
4635     case IPA_JF_PASS_THROUGH:
4636       operation = (enum tree_code) streamer_read_uhwi (ib);
4637       if (operation == NOP_EXPR)
4638 	{
4639 	  int formal_id =  streamer_read_uhwi (ib);
4640 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4641 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4642 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4643 	}
4644       else if (TREE_CODE_CLASS (operation) == tcc_unary)
4645 	{
4646 	  int formal_id =  streamer_read_uhwi (ib);
4647 	  ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4648 	}
4649       else
4650 	{
4651 	  tree operand = stream_read_tree (ib, data_in);
4652 	  int formal_id =  streamer_read_uhwi (ib);
4653 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4654 					 operation);
4655 	}
4656       break;
4657     case IPA_JF_ANCESTOR:
4658       {
4659 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4660 	int formal_id = streamer_read_uhwi (ib);
4661 	struct bitpack_d bp = streamer_read_bitpack (ib);
4662 	bool agg_preserved = bp_unpack_value (&bp, 1);
4663 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4664 	break;
4665       }
4666     default:
4667       fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4668     }
4669 
4670   count = streamer_read_uhwi (ib);
4671   if (prevails)
4672     vec_alloc (jump_func->agg.items, count);
4673   if (count)
4674     {
4675       struct bitpack_d bp = streamer_read_bitpack (ib);
4676       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4677     }
4678   for (i = 0; i < count; i++)
4679     {
4680       struct ipa_agg_jf_item item;
4681       item.type = stream_read_tree (ib, data_in);
4682       item.offset = streamer_read_uhwi (ib);
4683       item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4684 
4685       switch (item.jftype)
4686 	{
4687 	case IPA_JF_UNKNOWN:
4688 	  break;
4689 	case IPA_JF_CONST:
4690 	  item.value.constant = stream_read_tree (ib, data_in);
4691 	  break;
4692 	case IPA_JF_PASS_THROUGH:
4693 	case IPA_JF_LOAD_AGG:
4694 	  operation = (enum tree_code) streamer_read_uhwi (ib);
4695 	  item.value.pass_through.operation = operation;
4696 	  item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4697 	  if (TREE_CODE_CLASS (operation) == tcc_unary)
4698 	    item.value.pass_through.operand = NULL_TREE;
4699 	  else
4700 	    item.value.pass_through.operand = stream_read_tree (ib, data_in);
4701 	  if (item.jftype == IPA_JF_LOAD_AGG)
4702 	    {
4703 	      struct bitpack_d bp;
4704 	      item.value.load_agg.type = stream_read_tree (ib, data_in);
4705 	      item.value.load_agg.offset = streamer_read_uhwi (ib);
4706 	      bp = streamer_read_bitpack (ib);
4707 	      item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4708 	    }
4709 	  break;
4710 	default:
4711 	  fatal_error (UNKNOWN_LOCATION,
4712 		       "invalid jump function in LTO stream");
4713 	}
4714       if (prevails)
4715         jump_func->agg.items->quick_push (item);
4716     }
4717 
4718   struct bitpack_d bp = streamer_read_bitpack (ib);
4719   bool bits_known = bp_unpack_value (&bp, 1);
4720   if (bits_known)
4721     {
4722       widest_int value = streamer_read_widest_int (ib);
4723       widest_int mask = streamer_read_widest_int (ib);
4724       if (prevails)
4725         ipa_set_jfunc_bits (jump_func, value, mask);
4726     }
4727   else
4728     jump_func->bits = NULL;
4729 
4730   struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4731   bool vr_known = bp_unpack_value (&vr_bp, 1);
4732   if (vr_known)
4733     {
4734       enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4735 						       VR_LAST);
4736       tree min = stream_read_tree (ib, data_in);
4737       tree max = stream_read_tree (ib, data_in);
4738       if (prevails)
4739         ipa_set_jfunc_vr (jump_func, type, min, max);
4740     }
4741   else
4742     jump_func->m_vr = NULL;
4743 }
4744 
4745 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4746    relevant to indirect inlining to OB.  */
4747 
4748 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)4749 ipa_write_indirect_edge_info (struct output_block *ob,
4750 			      struct cgraph_edge *cs)
4751 {
4752   class cgraph_indirect_call_info *ii = cs->indirect_info;
4753   struct bitpack_d bp;
4754 
4755   streamer_write_hwi (ob, ii->param_index);
4756   bp = bitpack_create (ob->main_stream);
4757   bp_pack_value (&bp, ii->polymorphic, 1);
4758   bp_pack_value (&bp, ii->agg_contents, 1);
4759   bp_pack_value (&bp, ii->member_ptr, 1);
4760   bp_pack_value (&bp, ii->by_ref, 1);
4761   bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4762   bp_pack_value (&bp, ii->vptr_changed, 1);
4763   streamer_write_bitpack (&bp);
4764   if (ii->agg_contents || ii->polymorphic)
4765     streamer_write_hwi (ob, ii->offset);
4766   else
4767     gcc_assert (ii->offset == 0);
4768 
4769   if (ii->polymorphic)
4770     {
4771       streamer_write_hwi (ob, ii->otr_token);
4772       stream_write_tree (ob, ii->otr_type, true);
4773       ii->context.stream_out (ob);
4774     }
4775 }
4776 
4777 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4778    relevant to indirect inlining from IB.  */
4779 
4780 static void
ipa_read_indirect_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * cs,class ipa_node_params * info)4781 ipa_read_indirect_edge_info (class lto_input_block *ib,
4782 			     class data_in *data_in,
4783 			     struct cgraph_edge *cs,
4784 			     class ipa_node_params *info)
4785 {
4786   class cgraph_indirect_call_info *ii = cs->indirect_info;
4787   struct bitpack_d bp;
4788 
4789   ii->param_index = (int) streamer_read_hwi (ib);
4790   bp = streamer_read_bitpack (ib);
4791   ii->polymorphic = bp_unpack_value (&bp, 1);
4792   ii->agg_contents = bp_unpack_value (&bp, 1);
4793   ii->member_ptr = bp_unpack_value (&bp, 1);
4794   ii->by_ref = bp_unpack_value (&bp, 1);
4795   ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4796   ii->vptr_changed = bp_unpack_value (&bp, 1);
4797   if (ii->agg_contents || ii->polymorphic)
4798     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4799   else
4800     ii->offset = 0;
4801   if (ii->polymorphic)
4802     {
4803       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4804       ii->otr_type = stream_read_tree (ib, data_in);
4805       ii->context.stream_in (ib, data_in);
4806     }
4807   if (info && ii->param_index >= 0)
4808     {
4809       if (ii->polymorphic)
4810 	ipa_set_param_used_by_polymorphic_call (info,
4811 						ii->param_index , true);
4812       ipa_set_param_used_by_indirect_call (info,
4813 					   ii->param_index, true);
4814     }
4815 }
4816 
4817 /* Stream out NODE info to OB.  */
4818 
4819 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)4820 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4821 {
4822   int node_ref;
4823   lto_symtab_encoder_t encoder;
4824   class ipa_node_params *info = IPA_NODE_REF (node);
4825   int j;
4826   struct cgraph_edge *e;
4827   struct bitpack_d bp;
4828 
4829   encoder = ob->decl_state->symtab_node_encoder;
4830   node_ref = lto_symtab_encoder_encode (encoder, node);
4831   streamer_write_uhwi (ob, node_ref);
4832 
4833   streamer_write_uhwi (ob, ipa_get_param_count (info));
4834   for (j = 0; j < ipa_get_param_count (info); j++)
4835     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4836   bp = bitpack_create (ob->main_stream);
4837   gcc_assert (info->analysis_done
4838 	      || ipa_get_param_count (info) == 0);
4839   gcc_assert (!info->node_enqueued);
4840   gcc_assert (!info->ipcp_orig_node);
4841   for (j = 0; j < ipa_get_param_count (info); j++)
4842     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4843   streamer_write_bitpack (&bp);
4844   for (j = 0; j < ipa_get_param_count (info); j++)
4845     {
4846       streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4847       stream_write_tree (ob, ipa_get_type (info, j), true);
4848     }
4849   for (e = node->callees; e; e = e->next_callee)
4850     {
4851       class ipa_edge_args *args = IPA_EDGE_REF (e);
4852 
4853       if (!args)
4854 	{
4855 	  streamer_write_uhwi (ob, 0);
4856 	  continue;
4857 	}
4858 
4859       streamer_write_uhwi (ob,
4860 			   ipa_get_cs_argument_count (args) * 2
4861 			   + (args->polymorphic_call_contexts != NULL));
4862       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4863 	{
4864 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4865 	  if (args->polymorphic_call_contexts != NULL)
4866 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4867 	}
4868     }
4869   for (e = node->indirect_calls; e; e = e->next_callee)
4870     {
4871       class ipa_edge_args *args = IPA_EDGE_REF (e);
4872       if (!args)
4873 	streamer_write_uhwi (ob, 0);
4874       else
4875 	{
4876 	  streamer_write_uhwi (ob,
4877 			       ipa_get_cs_argument_count (args) * 2
4878 			       + (args->polymorphic_call_contexts != NULL));
4879 	  for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4880 	    {
4881 	      ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4882 	      if (args->polymorphic_call_contexts != NULL)
4883 		ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4884 	    }
4885 	}
4886       ipa_write_indirect_edge_info (ob, e);
4887     }
4888 }
4889 
4890 /* Stream in edge E from IB.  */
4891 
4892 static void
ipa_read_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * e,bool prevails)4893 ipa_read_edge_info (class lto_input_block *ib,
4894 		    class data_in *data_in,
4895 		    struct cgraph_edge *e, bool prevails)
4896 {
4897   int count = streamer_read_uhwi (ib);
4898   bool contexts_computed = count & 1;
4899 
4900   count /= 2;
4901   if (!count)
4902     return;
4903   if (prevails && e->possibly_call_in_translation_unit_p ())
4904     {
4905       class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4906       vec_safe_grow_cleared (args->jump_functions, count);
4907       if (contexts_computed)
4908 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4909       for (int k = 0; k < count; k++)
4910 	{
4911 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4912 				  data_in, prevails);
4913 	  if (contexts_computed)
4914 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4915 							     (ib, data_in);
4916 	}
4917     }
4918   else
4919     {
4920       for (int k = 0; k < count; k++)
4921 	{
4922 	  struct ipa_jump_func dummy;
4923 	  ipa_read_jump_function (ib, &dummy, e,
4924 				  data_in, prevails);
4925 	  if (contexts_computed)
4926 	    {
4927 	      class ipa_polymorphic_call_context ctx;
4928 	      ctx.stream_in (ib, data_in);
4929 	    }
4930 	}
4931     }
4932 }
4933 
4934 /* Stream in NODE info from IB.  */
4935 
4936 static void
ipa_read_node_info(class lto_input_block * ib,struct cgraph_node * node,class data_in * data_in)4937 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4938 		    class data_in *data_in)
4939 {
4940   int k;
4941   struct cgraph_edge *e;
4942   struct bitpack_d bp;
4943   bool prevails = node->prevailing_p ();
4944   class ipa_node_params *info = prevails
4945 				? IPA_NODE_REF_GET_CREATE (node) : NULL;
4946 
4947   int param_count = streamer_read_uhwi (ib);
4948   if (prevails)
4949     {
4950       ipa_alloc_node_params (node, param_count);
4951       for (k = 0; k < param_count; k++)
4952         (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4953       if (ipa_get_param_count (info) != 0)
4954 	info->analysis_done = true;
4955       info->node_enqueued = false;
4956     }
4957   else
4958     for (k = 0; k < param_count; k++)
4959       streamer_read_uhwi (ib);
4960 
4961   bp = streamer_read_bitpack (ib);
4962   for (k = 0; k < param_count; k++)
4963     {
4964       bool used = bp_unpack_value (&bp, 1);
4965 
4966       if (prevails)
4967         ipa_set_param_used (info, k, used);
4968     }
4969   for (k = 0; k < param_count; k++)
4970     {
4971       int nuses = streamer_read_hwi (ib);
4972       tree type = stream_read_tree (ib, data_in);
4973 
4974       if (prevails)
4975 	{
4976 	  ipa_set_controlled_uses (info, k, nuses);
4977 	  (*info->descriptors)[k].decl_or_type = type;
4978 	}
4979     }
4980   for (e = node->callees; e; e = e->next_callee)
4981     ipa_read_edge_info (ib, data_in, e, prevails);
4982   for (e = node->indirect_calls; e; e = e->next_callee)
4983     {
4984       ipa_read_edge_info (ib, data_in, e, prevails);
4985       ipa_read_indirect_edge_info (ib, data_in, e, info);
4986     }
4987 }
4988 
4989 /* Write jump functions for nodes in SET.  */
4990 
4991 void
ipa_prop_write_jump_functions(void)4992 ipa_prop_write_jump_functions (void)
4993 {
4994   struct cgraph_node *node;
4995   struct output_block *ob;
4996   unsigned int count = 0;
4997   lto_symtab_encoder_iterator lsei;
4998   lto_symtab_encoder_t encoder;
4999 
5000   if (!ipa_node_params_sum || !ipa_edge_args_sum)
5001     return;
5002 
5003   ob = create_output_block (LTO_section_jump_functions);
5004   encoder = ob->decl_state->symtab_node_encoder;
5005   ob->symbol = NULL;
5006   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5007        lsei_next_function_in_partition (&lsei))
5008     {
5009       node = lsei_cgraph_node (lsei);
5010       if (node->has_gimple_body_p ()
5011 	  && IPA_NODE_REF (node) != NULL)
5012 	count++;
5013     }
5014 
5015   streamer_write_uhwi (ob, count);
5016 
5017   /* Process all of the functions.  */
5018   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5019        lsei_next_function_in_partition (&lsei))
5020     {
5021       node = lsei_cgraph_node (lsei);
5022       if (node->has_gimple_body_p ()
5023 	  && IPA_NODE_REF (node) != NULL)
5024         ipa_write_node_info (ob, node);
5025     }
5026   streamer_write_char_stream (ob->main_stream, 0);
5027   produce_asm (ob, NULL);
5028   destroy_output_block (ob);
5029 }
5030 
5031 /* Read section in file FILE_DATA of length LEN with data DATA.  */
5032 
5033 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5034 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5035 		       size_t len)
5036 {
5037   const struct lto_function_header *header =
5038     (const struct lto_function_header *) data;
5039   const int cfg_offset = sizeof (struct lto_function_header);
5040   const int main_offset = cfg_offset + header->cfg_size;
5041   const int string_offset = main_offset + header->main_size;
5042   class data_in *data_in;
5043   unsigned int i;
5044   unsigned int count;
5045 
5046   lto_input_block ib_main ((const char *) data + main_offset,
5047 			   header->main_size, file_data->mode_table);
5048 
5049   data_in =
5050     lto_data_in_create (file_data, (const char *) data + string_offset,
5051 			header->string_size, vNULL);
5052   count = streamer_read_uhwi (&ib_main);
5053 
5054   for (i = 0; i < count; i++)
5055     {
5056       unsigned int index;
5057       struct cgraph_node *node;
5058       lto_symtab_encoder_t encoder;
5059 
5060       index = streamer_read_uhwi (&ib_main);
5061       encoder = file_data->symtab_node_encoder;
5062       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5063 								index));
5064       gcc_assert (node->definition);
5065       ipa_read_node_info (&ib_main, node, data_in);
5066     }
5067   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5068 			 len);
5069   lto_data_in_delete (data_in);
5070 }
5071 
5072 /* Read ipcp jump functions.  */
5073 
5074 void
ipa_prop_read_jump_functions(void)5075 ipa_prop_read_jump_functions (void)
5076 {
5077   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5078   struct lto_file_decl_data *file_data;
5079   unsigned int j = 0;
5080 
5081   ipa_check_create_node_params ();
5082   ipa_check_create_edge_args ();
5083   ipa_register_cgraph_hooks ();
5084 
5085   while ((file_data = file_data_vec[j++]))
5086     {
5087       size_t len;
5088       const char *data
5089 	= lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5090 					&len);
5091       if (data)
5092         ipa_prop_read_section (file_data, data, len);
5093     }
5094 }
5095 
5096 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)5097 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5098 {
5099   int node_ref;
5100   unsigned int count = 0;
5101   lto_symtab_encoder_t encoder;
5102   struct ipa_agg_replacement_value *aggvals, *av;
5103 
5104   aggvals = ipa_get_agg_replacements_for_node (node);
5105   encoder = ob->decl_state->symtab_node_encoder;
5106   node_ref = lto_symtab_encoder_encode (encoder, node);
5107   streamer_write_uhwi (ob, node_ref);
5108 
5109   for (av = aggvals; av; av = av->next)
5110     count++;
5111   streamer_write_uhwi (ob, count);
5112 
5113   for (av = aggvals; av; av = av->next)
5114     {
5115       struct bitpack_d bp;
5116 
5117       streamer_write_uhwi (ob, av->offset);
5118       streamer_write_uhwi (ob, av->index);
5119       stream_write_tree (ob, av->value, true);
5120 
5121       bp = bitpack_create (ob->main_stream);
5122       bp_pack_value (&bp, av->by_ref, 1);
5123       streamer_write_bitpack (&bp);
5124     }
5125 
5126   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5127   if (ts && vec_safe_length (ts->m_vr) > 0)
5128     {
5129       count = ts->m_vr->length ();
5130       streamer_write_uhwi (ob, count);
5131       for (unsigned i = 0; i < count; ++i)
5132 	{
5133 	  struct bitpack_d bp;
5134 	  ipa_vr *parm_vr = &(*ts->m_vr)[i];
5135 	  bp = bitpack_create (ob->main_stream);
5136 	  bp_pack_value (&bp, parm_vr->known, 1);
5137 	  streamer_write_bitpack (&bp);
5138 	  if (parm_vr->known)
5139 	    {
5140 	      streamer_write_enum (ob->main_stream, value_rang_type,
5141 				   VR_LAST, parm_vr->type);
5142 	      streamer_write_wide_int (ob, parm_vr->min);
5143 	      streamer_write_wide_int (ob, parm_vr->max);
5144 	    }
5145 	}
5146     }
5147   else
5148     streamer_write_uhwi (ob, 0);
5149 
5150   if (ts && vec_safe_length (ts->bits) > 0)
5151     {
5152       count = ts->bits->length ();
5153       streamer_write_uhwi (ob, count);
5154 
5155       for (unsigned i = 0; i < count; ++i)
5156 	{
5157 	  const ipa_bits *bits_jfunc = (*ts->bits)[i];
5158 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
5159 	  bp_pack_value (&bp, !!bits_jfunc, 1);
5160 	  streamer_write_bitpack (&bp);
5161 	  if (bits_jfunc)
5162 	    {
5163 	      streamer_write_widest_int (ob, bits_jfunc->value);
5164 	      streamer_write_widest_int (ob, bits_jfunc->mask);
5165 	    }
5166 	}
5167     }
5168   else
5169     streamer_write_uhwi (ob, 0);
5170 }
5171 
5172 /* Stream in the aggregate value replacement chain for NODE from IB.  */
5173 
5174 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)5175 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5176 			       data_in *data_in)
5177 {
5178   struct ipa_agg_replacement_value *aggvals = NULL;
5179   unsigned int count, i;
5180 
5181   count = streamer_read_uhwi (ib);
5182   for (i = 0; i <count; i++)
5183     {
5184       struct ipa_agg_replacement_value *av;
5185       struct bitpack_d bp;
5186 
5187       av = ggc_alloc<ipa_agg_replacement_value> ();
5188       av->offset = streamer_read_uhwi (ib);
5189       av->index = streamer_read_uhwi (ib);
5190       av->value = stream_read_tree (ib, data_in);
5191       bp = streamer_read_bitpack (ib);
5192       av->by_ref = bp_unpack_value (&bp, 1);
5193       av->next = aggvals;
5194       aggvals = av;
5195     }
5196   ipa_set_node_agg_value_chain (node, aggvals);
5197 
5198   count = streamer_read_uhwi (ib);
5199   if (count > 0)
5200     {
5201       ipcp_transformation_initialize ();
5202       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5203       vec_safe_grow_cleared (ts->m_vr, count);
5204       for (i = 0; i < count; i++)
5205 	{
5206 	  ipa_vr *parm_vr;
5207 	  parm_vr = &(*ts->m_vr)[i];
5208 	  struct bitpack_d bp;
5209 	  bp = streamer_read_bitpack (ib);
5210 	  parm_vr->known = bp_unpack_value (&bp, 1);
5211 	  if (parm_vr->known)
5212 	    {
5213 	      parm_vr->type = streamer_read_enum (ib, value_range_kind,
5214 						  VR_LAST);
5215 	      parm_vr->min = streamer_read_wide_int (ib);
5216 	      parm_vr->max = streamer_read_wide_int (ib);
5217 	    }
5218 	}
5219     }
5220   count = streamer_read_uhwi (ib);
5221   if (count > 0)
5222     {
5223       ipcp_transformation_initialize ();
5224       ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5225       vec_safe_grow_cleared (ts->bits, count);
5226 
5227       for (i = 0; i < count; i++)
5228 	{
5229 	  struct bitpack_d bp = streamer_read_bitpack (ib);
5230 	  bool known = bp_unpack_value (&bp, 1);
5231 	  if (known)
5232 	    {
5233 	      const widest_int value = streamer_read_widest_int (ib);
5234 	      const widest_int mask = streamer_read_widest_int (ib);
5235 	      ipa_bits *bits
5236 		= ipa_get_ipa_bits_for_value (value, mask);
5237 	      (*ts->bits)[i] = bits;
5238 	    }
5239 	}
5240     }
5241 }
5242 
5243 /* Write all aggregate replacement for nodes in set.  */
5244 
5245 void
ipcp_write_transformation_summaries(void)5246 ipcp_write_transformation_summaries (void)
5247 {
5248   struct cgraph_node *node;
5249   struct output_block *ob;
5250   unsigned int count = 0;
5251   lto_symtab_encoder_iterator lsei;
5252   lto_symtab_encoder_t encoder;
5253 
5254   ob = create_output_block (LTO_section_ipcp_transform);
5255   encoder = ob->decl_state->symtab_node_encoder;
5256   ob->symbol = NULL;
5257   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5258        lsei_next_function_in_partition (&lsei))
5259     {
5260       node = lsei_cgraph_node (lsei);
5261       if (node->has_gimple_body_p ())
5262 	count++;
5263     }
5264 
5265   streamer_write_uhwi (ob, count);
5266 
5267   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5268        lsei_next_function_in_partition (&lsei))
5269     {
5270       node = lsei_cgraph_node (lsei);
5271       if (node->has_gimple_body_p ())
5272 	write_ipcp_transformation_info (ob, node);
5273     }
5274   streamer_write_char_stream (ob->main_stream, 0);
5275   produce_asm (ob, NULL);
5276   destroy_output_block (ob);
5277 }
5278 
5279 /* Read replacements section in file FILE_DATA of length LEN with data
5280    DATA.  */
5281 
5282 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5283 read_replacements_section (struct lto_file_decl_data *file_data,
5284 			   const char *data,
5285 			   size_t len)
5286 {
5287   const struct lto_function_header *header =
5288     (const struct lto_function_header *) data;
5289   const int cfg_offset = sizeof (struct lto_function_header);
5290   const int main_offset = cfg_offset + header->cfg_size;
5291   const int string_offset = main_offset + header->main_size;
5292   class data_in *data_in;
5293   unsigned int i;
5294   unsigned int count;
5295 
5296   lto_input_block ib_main ((const char *) data + main_offset,
5297 			   header->main_size, file_data->mode_table);
5298 
5299   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5300 				header->string_size, vNULL);
5301   count = streamer_read_uhwi (&ib_main);
5302 
5303   for (i = 0; i < count; i++)
5304     {
5305       unsigned int index;
5306       struct cgraph_node *node;
5307       lto_symtab_encoder_t encoder;
5308 
5309       index = streamer_read_uhwi (&ib_main);
5310       encoder = file_data->symtab_node_encoder;
5311       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5312 								index));
5313       gcc_assert (node->definition);
5314       read_ipcp_transformation_info (&ib_main, node, data_in);
5315     }
5316   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5317 			 len);
5318   lto_data_in_delete (data_in);
5319 }
5320 
5321 /* Read IPA-CP aggregate replacements.  */
5322 
5323 void
ipcp_read_transformation_summaries(void)5324 ipcp_read_transformation_summaries (void)
5325 {
5326   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5327   struct lto_file_decl_data *file_data;
5328   unsigned int j = 0;
5329 
5330   while ((file_data = file_data_vec[j++]))
5331     {
5332       size_t len;
5333       const char *data
5334 	= lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5335 					&len);
5336       if (data)
5337         read_replacements_section (file_data, data, len);
5338     }
5339 }
5340 
5341 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5342    NODE.  */
5343 
5344 static void
adjust_agg_replacement_values(struct cgraph_node * node,struct ipa_agg_replacement_value * aggval)5345 adjust_agg_replacement_values (struct cgraph_node *node,
5346 			       struct ipa_agg_replacement_value *aggval)
5347 {
5348   struct ipa_agg_replacement_value *v;
5349 
5350   if (!node->clone.param_adjustments)
5351     return;
5352 
5353   auto_vec<int, 16> new_indices;
5354   node->clone.param_adjustments->get_updated_indices (&new_indices);
5355   for (v = aggval; v; v = v->next)
5356     {
5357       gcc_checking_assert (v->index >= 0);
5358 
5359       if ((unsigned) v->index < new_indices.length ())
5360 	v->index = new_indices[v->index];
5361       else
5362 	/* This can happen if we know about a constant passed by reference by
5363 	   an argument which is never actually used for anything, let alone
5364 	   loading that constant.  */
5365 	v->index = -1;
5366     }
5367 }
5368 
5369 /* Dominator walker driving the ipcp modification phase.  */
5370 
5371 class ipcp_modif_dom_walker : public dom_walker
5372 {
5373 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc,bool * cc)5374   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5375 			 vec<ipa_param_descriptor, va_gc> *descs,
5376 			 struct ipa_agg_replacement_value *av,
5377 			 bool *sc, bool *cc)
5378     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5379       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5380 
5381   virtual edge before_dom_children (basic_block);
5382 
5383 private:
5384   struct ipa_func_body_info *m_fbi;
5385   vec<ipa_param_descriptor, va_gc> *m_descriptors;
5386   struct ipa_agg_replacement_value *m_aggval;
5387   bool *m_something_changed, *m_cfg_changed;
5388 };
5389 
5390 edge
before_dom_children(basic_block bb)5391 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5392 {
5393   gimple_stmt_iterator gsi;
5394   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5395     {
5396       struct ipa_agg_replacement_value *v;
5397       gimple *stmt = gsi_stmt (gsi);
5398       tree rhs, val, t;
5399       HOST_WIDE_INT offset;
5400       poly_int64 size;
5401       int index;
5402       bool by_ref, vce;
5403 
5404       if (!gimple_assign_load_p (stmt))
5405 	continue;
5406       rhs = gimple_assign_rhs1 (stmt);
5407       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5408 	continue;
5409 
5410       vce = false;
5411       t = rhs;
5412       while (handled_component_p (t))
5413 	{
5414 	  /* V_C_E can do things like convert an array of integers to one
5415 	     bigger integer and similar things we do not handle below.  */
5416 	  if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5417 	    {
5418 	      vce = true;
5419 	      break;
5420 	    }
5421 	  t = TREE_OPERAND (t, 0);
5422 	}
5423       if (vce)
5424 	continue;
5425 
5426       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5427 				   &offset, &size, &by_ref))
5428 	continue;
5429       for (v = m_aggval; v; v = v->next)
5430 	if (v->index == index
5431 	    && v->offset == offset)
5432 	  break;
5433       if (!v
5434 	  || v->by_ref != by_ref
5435 	  || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5436 		       size))
5437 	continue;
5438 
5439       gcc_checking_assert (is_gimple_ip_invariant (v->value));
5440       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5441 	{
5442 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5443 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5444 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
5445 		   == TYPE_SIZE (TREE_TYPE (v->value)))
5446 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5447 	  else
5448 	    {
5449 	      if (dump_file)
5450 		{
5451 		  fprintf (dump_file, "    const ");
5452 		  print_generic_expr (dump_file, v->value);
5453 		  fprintf (dump_file, "  can't be converted to type of ");
5454 		  print_generic_expr (dump_file, rhs);
5455 		  fprintf (dump_file, "\n");
5456 		}
5457 	      continue;
5458 	    }
5459 	}
5460       else
5461 	val = v->value;
5462 
5463       if (dump_file && (dump_flags & TDF_DETAILS))
5464 	{
5465 	  fprintf (dump_file, "Modifying stmt:\n  ");
5466 	  print_gimple_stmt (dump_file, stmt, 0);
5467 	}
5468       gimple_assign_set_rhs_from_tree (&gsi, val);
5469       update_stmt (stmt);
5470 
5471       if (dump_file && (dump_flags & TDF_DETAILS))
5472 	{
5473 	  fprintf (dump_file, "into:\n  ");
5474 	  print_gimple_stmt (dump_file, stmt, 0);
5475 	  fprintf (dump_file, "\n");
5476 	}
5477 
5478       *m_something_changed = true;
5479       if (maybe_clean_eh_stmt (stmt)
5480 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5481 	*m_cfg_changed = true;
5482     }
5483   return NULL;
5484 }
5485 
5486 /* Return true if we have recorded VALUE and MASK about PARM.
5487    Set VALUE and MASk accordingly.  */
5488 
5489 bool
ipcp_get_parm_bits(tree parm,tree * value,widest_int * mask)5490 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5491 {
5492   cgraph_node *cnode = cgraph_node::get (current_function_decl);
5493   ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5494   if (!ts || vec_safe_length (ts->bits) == 0)
5495     return false;
5496 
5497   int i = 0;
5498   for (tree p = DECL_ARGUMENTS (current_function_decl);
5499        p != parm; p = DECL_CHAIN (p))
5500     {
5501       i++;
5502       /* Ignore static chain.  */
5503       if (!p)
5504 	return false;
5505     }
5506 
5507   if (cnode->clone.param_adjustments)
5508     {
5509       i = cnode->clone.param_adjustments->get_original_index (i);
5510       if (i < 0)
5511 	return false;
5512     }
5513 
5514   vec<ipa_bits *, va_gc> &bits = *ts->bits;
5515   if (!bits[i])
5516     return false;
5517   *mask = bits[i]->mask;
5518   *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5519   return true;
5520 }
5521 
5522 
5523 /* Update bits info of formal parameters as described in
5524    ipcp_transformation.  */
5525 
5526 static void
ipcp_update_bits(struct cgraph_node * node)5527 ipcp_update_bits (struct cgraph_node *node)
5528 {
5529   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5530 
5531   if (!ts || vec_safe_length (ts->bits) == 0)
5532     return;
5533   vec<ipa_bits *, va_gc> &bits = *ts->bits;
5534   unsigned count = bits.length ();
5535   if (!count)
5536     return;
5537 
5538   auto_vec<int, 16> new_indices;
5539   bool need_remapping = false;
5540   if (node->clone.param_adjustments)
5541     {
5542       node->clone.param_adjustments->get_updated_indices (&new_indices);
5543       need_remapping = true;
5544     }
5545   auto_vec <tree, 16> parm_decls;
5546   push_function_arg_decls (&parm_decls, node->decl);
5547 
5548   for (unsigned i = 0; i < count; ++i)
5549     {
5550       tree parm;
5551       if (need_remapping)
5552 	{
5553 	  if (i >= new_indices.length ())
5554 	    continue;
5555 	  int idx = new_indices[i];
5556 	  if (idx < 0)
5557 	    continue;
5558 	  parm = parm_decls[idx];
5559 	}
5560       else
5561 	parm = parm_decls[i];
5562       gcc_checking_assert (parm);
5563 
5564 
5565       if (!bits[i]
5566 	  || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5567 	       || POINTER_TYPE_P (TREE_TYPE (parm)))
5568 	  || !is_gimple_reg (parm))
5569 	continue;
5570 
5571       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5572       if (!ddef)
5573 	continue;
5574 
5575       if (dump_file)
5576 	{
5577 	  fprintf (dump_file, "Adjusting mask for param %u to ", i);
5578 	  print_hex (bits[i]->mask, dump_file);
5579 	  fprintf (dump_file, "\n");
5580 	}
5581 
5582       if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5583 	{
5584 	  unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5585 	  signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5586 
5587 	  wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5588 				  | wide_int::from (bits[i]->value, prec, sgn);
5589 	  set_nonzero_bits (ddef, nonzero_bits);
5590 	}
5591       else
5592 	{
5593 	  unsigned tem = bits[i]->mask.to_uhwi ();
5594 	  unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5595 	  unsigned align = tem & -tem;
5596 	  unsigned misalign = bitpos & (align - 1);
5597 
5598 	  if (align > 1)
5599 	    {
5600 	      if (dump_file)
5601 		fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5602 
5603 	      unsigned old_align, old_misalign;
5604 	      struct ptr_info_def *pi = get_ptr_info (ddef);
5605 	      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5606 
5607 	      if (old_known
5608 		  && old_align > align)
5609 		{
5610 		  if (dump_file)
5611 		    {
5612 		      fprintf (dump_file, "But alignment was already %u.\n", old_align);
5613 		      if ((old_misalign & (align - 1)) != misalign)
5614 			fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5615 				 old_misalign, misalign);
5616 		    }
5617 		  continue;
5618 		}
5619 
5620 	      if (old_known
5621 		  && ((misalign & (old_align - 1)) != old_misalign)
5622 		  && dump_file)
5623 		fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5624 			 old_misalign, misalign);
5625 
5626 	      set_ptr_info_alignment (pi, align, misalign);
5627 	    }
5628 	}
5629     }
5630 }
5631 
5632 bool
nonzero_p(tree expr_type)5633 ipa_vr::nonzero_p (tree expr_type) const
5634 {
5635   if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5636     return true;
5637 
5638   unsigned prec = TYPE_PRECISION (expr_type);
5639   return (type == VR_RANGE
5640 	  && TYPE_UNSIGNED (expr_type)
5641 	  && wi::eq_p (min, wi::one (prec))
5642 	  && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5643 }
5644 
5645 /* Update value range of formal parameters as described in
5646    ipcp_transformation.  */
5647 
5648 static void
ipcp_update_vr(struct cgraph_node * node)5649 ipcp_update_vr (struct cgraph_node *node)
5650 {
5651   ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5652   if (!ts || vec_safe_length (ts->m_vr) == 0)
5653     return;
5654   const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5655   unsigned count = vr.length ();
5656   if (!count)
5657     return;
5658 
5659   auto_vec<int, 16> new_indices;
5660   bool need_remapping = false;
5661   if (node->clone.param_adjustments)
5662     {
5663       node->clone.param_adjustments->get_updated_indices (&new_indices);
5664       need_remapping = true;
5665     }
5666   auto_vec <tree, 16> parm_decls;
5667   push_function_arg_decls (&parm_decls, node->decl);
5668 
5669   for (unsigned i = 0; i < count; ++i)
5670     {
5671       tree parm;
5672       int remapped_idx;
5673       if (need_remapping)
5674 	{
5675 	  if (i >= new_indices.length ())
5676 	    continue;
5677 	  remapped_idx = new_indices[i];
5678 	  if (remapped_idx < 0)
5679 	    continue;
5680 	}
5681       else
5682 	remapped_idx = i;
5683 
5684       parm = parm_decls[remapped_idx];
5685 
5686       gcc_checking_assert (parm);
5687       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5688 
5689       if (!ddef || !is_gimple_reg (parm))
5690 	continue;
5691 
5692       if (vr[i].known
5693 	  && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5694 	{
5695 	  tree type = TREE_TYPE (ddef);
5696 	  unsigned prec = TYPE_PRECISION (type);
5697 	  if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5698 	    {
5699 	      if (dump_file)
5700 		{
5701 		  fprintf (dump_file, "Setting value range of param %u "
5702 			   "(now %i) ", i, remapped_idx);
5703 		  fprintf (dump_file, "%s[",
5704 			   (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5705 		  print_decs (vr[i].min, dump_file);
5706 		  fprintf (dump_file, ", ");
5707 		  print_decs (vr[i].max, dump_file);
5708 		  fprintf (dump_file, "]\n");
5709 		}
5710 	      set_range_info (ddef, vr[i].type,
5711 			      wide_int_storage::from (vr[i].min, prec,
5712 						      TYPE_SIGN (type)),
5713 			      wide_int_storage::from (vr[i].max, prec,
5714 						      TYPE_SIGN (type)));
5715 	    }
5716 	  else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5717 		   && vr[i].nonzero_p (TREE_TYPE (ddef)))
5718 	    {
5719 	      if (dump_file)
5720 		fprintf (dump_file, "Setting nonnull for %u\n", i);
5721 	      set_ptr_nonnull (ddef);
5722 	    }
5723 	}
5724     }
5725 }
5726 
5727 /* IPCP transformation phase doing propagation of aggregate values.  */
5728 
5729 unsigned int
ipcp_transform_function(struct cgraph_node * node)5730 ipcp_transform_function (struct cgraph_node *node)
5731 {
5732   vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5733   struct ipa_func_body_info fbi;
5734   struct ipa_agg_replacement_value *aggval;
5735   int param_count;
5736   bool cfg_changed = false, something_changed = false;
5737 
5738   gcc_checking_assert (cfun);
5739   gcc_checking_assert (current_function_decl);
5740 
5741   if (dump_file)
5742     fprintf (dump_file, "Modification phase of node %s\n",
5743 	     node->dump_name ());
5744 
5745   ipcp_update_bits (node);
5746   ipcp_update_vr (node);
5747   aggval = ipa_get_agg_replacements_for_node (node);
5748   if (!aggval)
5749       return 0;
5750   param_count = count_formal_params (node->decl);
5751   if (param_count == 0)
5752     return 0;
5753   adjust_agg_replacement_values (node, aggval);
5754   if (dump_file)
5755     ipa_dump_agg_replacement_values (dump_file, aggval);
5756 
5757   fbi.node = node;
5758   fbi.info = NULL;
5759   fbi.bb_infos = vNULL;
5760   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5761   fbi.param_count = param_count;
5762   fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5763 
5764   vec_safe_grow_cleared (descriptors, param_count);
5765   ipa_populate_param_decls (node, *descriptors);
5766   calculate_dominance_info (CDI_DOMINATORS);
5767   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5768 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5769 
5770   int i;
5771   struct ipa_bb_info *bi;
5772   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5773     free_ipa_bb_info (bi);
5774   fbi.bb_infos.release ();
5775   free_dominance_info (CDI_DOMINATORS);
5776 
5777   ipcp_transformation *s = ipcp_transformation_sum->get (node);
5778   s->agg_values = NULL;
5779   s->bits = NULL;
5780   s->m_vr = NULL;
5781 
5782   vec_free (descriptors);
5783 
5784   if (!something_changed)
5785     return 0;
5786 
5787   if (cfg_changed)
5788     delete_unreachable_blocks_update_callgraph (node, false);
5789 
5790   return TODO_update_ssa_only_virtuals;
5791 }
5792 
5793 
5794 /* Return true if OTHER describes same agg value.  */
5795 bool
equal_to(const ipa_agg_value & other)5796 ipa_agg_value::equal_to (const ipa_agg_value &other)
5797 {
5798   return offset == other.offset
5799 	 && operand_equal_p (value, other.value, 0);
5800 }
5801 #include "gt-ipa-prop.h"
5802