xref: /dragonfly/contrib/gcc-8.0/gcc/ipa-prop.c (revision 7bcb6caf)
1 /* Interprocedural analyses.
2    Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone.  */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information.  */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
62 
63 /* Traits for a hash table for reusing already existing ipa_bits. */
64 
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66 {
67   typedef ipa_bits *value_type;
68   typedef ipa_bits *compare_type;
69   static hashval_t
70   hash (const ipa_bits *p)
71   {
72     hashval_t t = (hashval_t) p->value.to_shwi ();
73     return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74   }
75   static bool
76   equal (const ipa_bits *a, const ipa_bits *b)
77     {
78       return a->value == b->value && a->mask == b->mask;
79     }
80   static void
81   mark_empty (ipa_bits *&p)
82     {
83       p = NULL;
84     }
85   static bool
86   is_empty (const ipa_bits *p)
87     {
88       return p == NULL;
89     }
90   static bool
91   is_deleted (const ipa_bits *p)
92     {
93       return p == reinterpret_cast<const ipa_bits *> (1);
94     }
95   static void
96   mark_deleted (ipa_bits *&p)
97     {
98       p = reinterpret_cast<ipa_bits *> (1);
99     }
100 };
101 
102 /* Hash table for avoid repeated allocations of equal ipa_bits.  */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104 
105 /* Traits for a hash table for reusing value_ranges used for IPA.  Note that
106    the equiv bitmap is not hashed and is expected to be NULL.  */
107 
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109 {
110   typedef value_range *value_type;
111   typedef value_range *compare_type;
112   static hashval_t
113   hash (const value_range *p)
114     {
115       gcc_checking_assert (!p->equiv);
116       inchash::hash hstate (p->type);
117       hstate.add_ptr (p->min);
118       hstate.add_ptr (p->max);
119       return hstate.end ();
120     }
121   static bool
122   equal (const value_range *a, const value_range *b)
123     {
124       return a->type == b->type && a->min == b->min && a->max == b->max;
125     }
126   static void
127   mark_empty (value_range *&p)
128     {
129       p = NULL;
130     }
131   static bool
132   is_empty (const value_range *p)
133     {
134       return p == NULL;
135     }
136   static bool
137   is_deleted (const value_range *p)
138     {
139       return p == reinterpret_cast<const value_range *> (1);
140     }
141   static void
142   mark_deleted (value_range *&p)
143     {
144       p = reinterpret_cast<value_range *> (1);
145     }
146 };
147 
148 /* Hash table for avoid repeated allocations of equal value_ranges.  */
149 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 
154 /* Description of a reference to an IPA constant.  */
155 struct ipa_cst_ref_desc
156 {
157   /* Edge that corresponds to the statement which took the reference.  */
158   struct cgraph_edge *cs;
159   /* Linked list of duplicates created when call graph edges are cloned.  */
160   struct ipa_cst_ref_desc *next_duplicate;
161   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162      if out of control.  */
163   int refcount;
164 };
165 
166 /* Allocation pool for reference descriptions.  */
167 
168 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
169   ("IPA-PROP ref descriptions");
170 
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
173 
174 static bool
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 {
177   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 
179   if (!fs_opts)
180     return false;
181   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
182 }
183 
184 /* Return index of the formal whose tree is PTREE in function which corresponds
185    to INFO.  */
186 
187 static int
188 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 			    tree ptree)
190 {
191   int i, count;
192 
193   count = vec_safe_length (descriptors);
194   for (i = 0; i < count; i++)
195     if ((*descriptors)[i].decl_or_type == ptree)
196       return i;
197 
198   return -1;
199 }
200 
201 /* Return index of the formal whose tree is PTREE in function which corresponds
202    to INFO.  */
203 
204 int
205 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 {
207   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208 }
209 
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211    NODE.  */
212 
213 static void
214 ipa_populate_param_decls (struct cgraph_node *node,
215 			  vec<ipa_param_descriptor, va_gc> &descriptors)
216 {
217   tree fndecl;
218   tree fnargs;
219   tree parm;
220   int param_num;
221 
222   fndecl = node->decl;
223   gcc_assert (gimple_has_body_p (fndecl));
224   fnargs = DECL_ARGUMENTS (fndecl);
225   param_num = 0;
226   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227     {
228       descriptors[param_num].decl_or_type = parm;
229       descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
230 							     true);
231       param_num++;
232     }
233 }
234 
235 /* Return how many formal parameters FNDECL has.  */
236 
237 int
238 count_formal_params (tree fndecl)
239 {
240   tree parm;
241   int count = 0;
242   gcc_assert (gimple_has_body_p (fndecl));
243 
244   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
245     count++;
246 
247   return count;
248 }
249 
250 /* Return the declaration of Ith formal parameter of the function corresponding
251    to INFO.  Note there is no setter function as this array is built just once
252    using ipa_initialize_node_params. */
253 
254 void
255 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 {
257   fprintf (file, "param #%i", i);
258   if ((*info->descriptors)[i].decl_or_type)
259     {
260       fprintf (file, " ");
261       print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
262     }
263 }
264 
265 /* If necessary, allocate vector of parameter descriptors in info of NODE.
266    Return true if they were allocated, false if not.  */
267 
268 static bool
269 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 {
271   struct ipa_node_params *info = IPA_NODE_REF (node);
272 
273   if (!info->descriptors && param_count)
274     {
275       vec_safe_grow_cleared (info->descriptors, param_count);
276       return true;
277     }
278   else
279     return false;
280 }
281 
282 /* Initialize the ipa_node_params structure associated with NODE by counting
283    the function parameters, creating the descriptors and populating their
284    param_decls.  */
285 
286 void
287 ipa_initialize_node_params (struct cgraph_node *node)
288 {
289   struct ipa_node_params *info = IPA_NODE_REF (node);
290 
291   if (!info->descriptors
292       && ipa_alloc_node_params (node, count_formal_params (node->decl)))
293     ipa_populate_param_decls (node, *info->descriptors);
294 }
295 
296 /* Print the jump functions associated with call graph edge CS to file F.  */
297 
298 static void
299 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 {
301   int i, count;
302 
303   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
304   for (i = 0; i < count; i++)
305     {
306       struct ipa_jump_func *jump_func;
307       enum jump_func_type type;
308 
309       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
310       type = jump_func->type;
311 
312       fprintf (f, "       param %d: ", i);
313       if (type == IPA_JF_UNKNOWN)
314 	fprintf (f, "UNKNOWN\n");
315       else if (type == IPA_JF_CONST)
316 	{
317 	  tree val = jump_func->value.constant.value;
318 	  fprintf (f, "CONST: ");
319 	  print_generic_expr (f, val);
320 	  if (TREE_CODE (val) == ADDR_EXPR
321 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 	    {
323 	      fprintf (f, " -> ");
324 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 	    }
326 	  fprintf (f, "\n");
327 	}
328       else if (type == IPA_JF_PASS_THROUGH)
329 	{
330 	  fprintf (f, "PASS THROUGH: ");
331 	  fprintf (f, "%d, op %s",
332 		   jump_func->value.pass_through.formal_id,
333 		   get_tree_code_name(jump_func->value.pass_through.operation));
334 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
335 	    {
336 	      fprintf (f, " ");
337 	      print_generic_expr (f, jump_func->value.pass_through.operand);
338 	    }
339 	  if (jump_func->value.pass_through.agg_preserved)
340 	    fprintf (f, ", agg_preserved");
341 	  fprintf (f, "\n");
342 	}
343       else if (type == IPA_JF_ANCESTOR)
344 	{
345 	  fprintf (f, "ANCESTOR: ");
346 	  fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
347 		   jump_func->value.ancestor.formal_id,
348 		   jump_func->value.ancestor.offset);
349 	  if (jump_func->value.ancestor.agg_preserved)
350 	    fprintf (f, ", agg_preserved");
351 	  fprintf (f, "\n");
352 	}
353 
354       if (jump_func->agg.items)
355 	{
356 	  struct ipa_agg_jf_item *item;
357 	  int j;
358 
359 	  fprintf (f, "         Aggregate passed by %s:\n",
360 		   jump_func->agg.by_ref ? "reference" : "value");
361 	  FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 	    {
363 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
364 		       item->offset);
365 	      if (TYPE_P (item->value))
366 		fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
367 			 tree_to_uhwi (TYPE_SIZE (item->value)));
368 	      else
369 		{
370 		  fprintf (f, "cst: ");
371 		  print_generic_expr (f, item->value);
372 		}
373 	      fprintf (f, "\n");
374 	    }
375 	}
376 
377       struct ipa_polymorphic_call_context *ctx
378 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
379       if (ctx && !ctx->useless_p ())
380 	{
381 	  fprintf (f, "         Context: ");
382 	  ctx->dump (dump_file);
383 	}
384 
385       if (jump_func->bits)
386 	{
387 	  fprintf (f, "         value: ");
388 	  print_hex (jump_func->bits->value, f);
389 	  fprintf (f, ", mask: ");
390 	  print_hex (jump_func->bits->mask, f);
391 	  fprintf (f, "\n");
392 	}
393       else
394 	fprintf (f, "         Unknown bits\n");
395 
396       if (jump_func->m_vr)
397 	{
398 	  fprintf (f, "         VR  ");
399 	  fprintf (f, "%s[",
400 		   (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
401 	  print_decs (wi::to_wide (jump_func->m_vr->min), f);
402 	  fprintf (f, ", ");
403 	  print_decs (wi::to_wide (jump_func->m_vr->max), f);
404 	  fprintf (f, "]\n");
405 	}
406       else
407 	fprintf (f, "         Unknown VR\n");
408     }
409 }
410 
411 
412 /* Print the jump functions of all arguments on all call graph edges going from
413    NODE to file F.  */
414 
415 void
416 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 {
418   struct cgraph_edge *cs;
419 
420   fprintf (f, "  Jump functions of caller  %s:\n", node->dump_name ());
421   for (cs = node->callees; cs; cs = cs->next_callee)
422     {
423       if (!ipa_edge_args_info_available_for_edge_p (cs))
424 	continue;
425 
426       fprintf (f, "    callsite  %s -> %s : \n",
427 	       node->dump_name (),
428 	       cs->callee->dump_name ());
429       ipa_print_node_jump_functions_for_edge (f, cs);
430     }
431 
432   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433     {
434       struct cgraph_indirect_call_info *ii;
435       if (!ipa_edge_args_info_available_for_edge_p (cs))
436 	continue;
437 
438       ii = cs->indirect_info;
439       if (ii->agg_contents)
440 	fprintf (f, "    indirect %s callsite, calling param %i, "
441 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
442 		 ii->member_ptr ? "member ptr" : "aggregate",
443 		 ii->param_index, ii->offset,
444 		 ii->by_ref ? "by reference" : "by_value");
445       else
446 	fprintf (f, "    indirect %s callsite, calling param %i, "
447 		 "offset " HOST_WIDE_INT_PRINT_DEC,
448 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
449 		 ii->offset);
450 
451       if (cs->call_stmt)
452 	{
453 	  fprintf (f, ", for stmt ");
454 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 	}
456       else
457 	fprintf (f, "\n");
458       if (ii->polymorphic)
459 	ii->context.dump (f);
460       ipa_print_node_jump_functions_for_edge (f, cs);
461     }
462 }
463 
464 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
465 
466 void
467 ipa_print_all_jump_functions (FILE *f)
468 {
469   struct cgraph_node *node;
470 
471   fprintf (f, "\nJump functions:\n");
472   FOR_EACH_FUNCTION (node)
473     {
474       ipa_print_node_jump_functions (f, node);
475     }
476 }
477 
478 /* Set jfunc to be a know-really nothing jump function.  */
479 
480 static void
481 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 {
483   jfunc->type = IPA_JF_UNKNOWN;
484   jfunc->bits = NULL;
485   jfunc->m_vr = NULL;
486 }
487 
488 /* Set JFUNC to be a copy of another jmp (to be used by jump function
489    combination code).  The two functions will share their rdesc.  */
490 
491 static void
492 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
493 		     struct ipa_jump_func *src)
494 
495 {
496   gcc_checking_assert (src->type == IPA_JF_CONST);
497   dst->type = IPA_JF_CONST;
498   dst->value.constant = src->value.constant;
499 }
500 
501 /* Set JFUNC to be a constant jmp function.  */
502 
503 static void
504 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
505 		     struct cgraph_edge *cs)
506 {
507   jfunc->type = IPA_JF_CONST;
508   jfunc->value.constant.value = unshare_expr_without_location (constant);
509 
510   if (TREE_CODE (constant) == ADDR_EXPR
511       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512     {
513       struct ipa_cst_ref_desc *rdesc;
514 
515       rdesc = ipa_refdesc_pool.allocate ();
516       rdesc->cs = cs;
517       rdesc->next_duplicate = NULL;
518       rdesc->refcount = 1;
519       jfunc->value.constant.rdesc = rdesc;
520     }
521   else
522     jfunc->value.constant.rdesc = NULL;
523 }
524 
525 /* Set JFUNC to be a simple pass-through jump function.  */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 				bool agg_preserved)
529 {
530   jfunc->type = IPA_JF_PASS_THROUGH;
531   jfunc->value.pass_through.operand = NULL_TREE;
532   jfunc->value.pass_through.formal_id = formal_id;
533   jfunc->value.pass_through.operation = NOP_EXPR;
534   jfunc->value.pass_through.agg_preserved = agg_preserved;
535 }
536 
537 /* Set JFUNC to be an unary pass through jump function.  */
538 
539 static void
540 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 			       enum tree_code operation)
542 {
543   jfunc->type = IPA_JF_PASS_THROUGH;
544   jfunc->value.pass_through.operand = NULL_TREE;
545   jfunc->value.pass_through.formal_id = formal_id;
546   jfunc->value.pass_through.operation = operation;
547   jfunc->value.pass_through.agg_preserved = false;
548 }
549 /* Set JFUNC to be an arithmetic pass through jump function.  */
550 
551 static void
552 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
553 			       tree operand, enum tree_code operation)
554 {
555   jfunc->type = IPA_JF_PASS_THROUGH;
556   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
557   jfunc->value.pass_through.formal_id = formal_id;
558   jfunc->value.pass_through.operation = operation;
559   jfunc->value.pass_through.agg_preserved = false;
560 }
561 
562 /* Set JFUNC to be an ancestor jump function.  */
563 
564 static void
565 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
566 		     int formal_id, bool agg_preserved)
567 {
568   jfunc->type = IPA_JF_ANCESTOR;
569   jfunc->value.ancestor.formal_id = formal_id;
570   jfunc->value.ancestor.offset = offset;
571   jfunc->value.ancestor.agg_preserved = agg_preserved;
572 }
573 
574 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
575    of this function body.  */
576 
577 static struct ipa_bb_info *
578 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 {
580   gcc_checking_assert (fbi);
581   return &fbi->bb_infos[bb->index];
582 }
583 
584 /* Structure to be passed in between detect_type_change and
585    check_stmt_for_type_change.  */
586 
587 struct prop_type_change_info
588 {
589   /* Offset into the object where there is the virtual method pointer we are
590      looking for.  */
591   HOST_WIDE_INT offset;
592   /* The declaration or SSA_NAME pointer of the base that we are checking for
593      type change.  */
594   tree object;
595   /* Set to true if dynamic type change has been detected.  */
596   bool type_maybe_changed;
597 };
598 
599 /* Return true if STMT can modify a virtual method table pointer.
600 
601    This function makes special assumptions about both constructors and
602    destructors which are all the functions that are allowed to alter the VMT
603    pointers.  It assumes that destructors begin with assignment into all VMT
604    pointers and that constructors essentially look in the following way:
605 
606    1) The very first thing they do is that they call constructors of ancestor
607    sub-objects that have them.
608 
609    2) Then VMT pointers of this and all its ancestors is set to new values
610    corresponding to the type corresponding to the constructor.
611 
612    3) Only afterwards, other stuff such as constructor of member sub-objects
613    and the code written by the user is run.  Only this may include calling
614    virtual functions, directly or indirectly.
615 
616    There is no way to call a constructor of an ancestor sub-object in any
617    other way.
618 
619    This means that we do not have to care whether constructors get the correct
620    type information because they will always change it (in fact, if we define
621    the type to be given by the VMT pointer, it is undefined).
622 
623    The most important fact to derive from the above is that if, for some
624    statement in the section 3, we try to detect whether the dynamic type has
625    changed, we can safely ignore all calls as we examine the function body
626    backwards until we reach statements in section 2 because these calls cannot
627    be ancestor constructors or destructors (if the input is not bogus) and so
628    do not change the dynamic type (this holds true only for automatically
629    allocated objects but at the moment we devirtualize only these).  We then
630    must detect that statements in section 2 change the dynamic type and can try
631    to derive the new type.  That is enough and we can stop, we will never see
632    the calls into constructors of sub-objects in this code.  Therefore we can
633    safely ignore all call statements that we traverse.
634   */
635 
636 static bool
637 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 {
639   if (is_gimple_call (stmt))
640     return false;
641   if (gimple_clobber_p (stmt))
642     return false;
643   else if (is_gimple_assign (stmt))
644     {
645       tree lhs = gimple_assign_lhs (stmt);
646 
647       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 	{
649 	  if (flag_strict_aliasing
650 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 	    return false;
652 
653 	  if (TREE_CODE (lhs) == COMPONENT_REF
654 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
655 	    return false;
656 	  /* In the future we might want to use get_ref_base_and_extent to find
657 	     if there is a field corresponding to the offset and if so, proceed
658 	     almost like if it was a component ref.  */
659 	}
660     }
661   return true;
662 }
663 
664 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
665    to check whether a particular statement may modify the virtual table
666    pointerIt stores its result into DATA, which points to a
667    prop_type_change_info structure.  */
668 
669 static bool
670 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 {
672   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
673   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 
675   if (stmt_may_be_vtbl_ptr_store (stmt))
676     {
677       tci->type_maybe_changed = true;
678       return true;
679     }
680   else
681     return false;
682 }
683 
684 /* See if ARG is PARAM_DECl describing instance passed by pointer
685    or reference in FUNCTION.  Return false if the dynamic type may change
686    in between beggining of the function until CALL is invoked.
687 
688    Generally functions are not allowed to change type of such instances,
689    but they call destructors.  We assume that methods can not destroy the THIS
690    pointer.  Also as a special cases, constructor and destructors may change
691    type of the THIS pointer.  */
692 
693 static bool
694 param_type_may_change_p (tree function, tree arg, gimple *call)
695 {
696   /* Pure functions can not do any changes on the dynamic type;
697      that require writting to memory.  */
698   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
699     return false;
700   /* We need to check if we are within inlined consturctor
701      or destructor (ideally we would have way to check that the
702      inline cdtor is actually working on ARG, but we don't have
703      easy tie on this, so punt on all non-pure cdtors.
704      We may also record the types of cdtors and once we know type
705      of the instance match them.
706 
707      Also code unification optimizations may merge calls from
708      different blocks making return values unreliable.  So
709      do nothing during late optimization.  */
710   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
711     return true;
712   if (TREE_CODE (arg) == SSA_NAME
713       && SSA_NAME_IS_DEFAULT_DEF (arg)
714       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715     {
716       /* Normal (non-THIS) argument.  */
717       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
718 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
719 	  /* THIS pointer of an method - here we want to watch constructors
720 	     and destructors as those definitely may change the dynamic
721 	     type.  */
722 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
723 	      && !DECL_CXX_CONSTRUCTOR_P (function)
724 	      && !DECL_CXX_DESTRUCTOR_P (function)
725 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 	{
727 	  /* Walk the inline stack and watch out for ctors/dtors.  */
728 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
729 	       block = BLOCK_SUPERCONTEXT (block))
730 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
731 	      return true;
732 	  return false;
733 	}
734     }
735   return true;
736 }
737 
738 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739    callsite CALL) by looking for assignments to its virtual table pointer.  If
740    it is, return true and fill in the jump function JFUNC with relevant type
741    information or set it to unknown.  ARG is the object itself (not a pointer
742    to it, unless dereferenced).  BASE is the base of the memory access as
743    returned by get_ref_base_and_extent, as is the offset.
744 
745    This is helper function for detect_type_change and detect_type_change_ssa
746    that does the heavy work which is usually unnecesary.  */
747 
748 static bool
749 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
750 				       gcall *call, struct ipa_jump_func *jfunc,
751 				       HOST_WIDE_INT offset)
752 {
753   struct prop_type_change_info tci;
754   ao_ref ao;
755   bool entry_reached = false;
756 
757   gcc_checking_assert (DECL_P (arg)
758 		       || TREE_CODE (arg) == MEM_REF
759 		       || handled_component_p (arg));
760 
761   comp_type = TYPE_MAIN_VARIANT (comp_type);
762 
763   /* Const calls cannot call virtual methods through VMT and so type changes do
764      not matter.  */
765   if (!flag_devirtualize || !gimple_vuse (call)
766       /* Be sure expected_type is polymorphic.  */
767       || !comp_type
768       || TREE_CODE (comp_type) != RECORD_TYPE
769       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
770       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
771     return true;
772 
773   ao_ref_init (&ao, arg);
774   ao.base = base;
775   ao.offset = offset;
776   ao.size = POINTER_SIZE;
777   ao.max_size = ao.size;
778 
779   tci.offset = offset;
780   tci.object = get_base_address (arg);
781   tci.type_maybe_changed = false;
782 
783   walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
784 		      &tci, NULL, &entry_reached);
785   if (!tci.type_maybe_changed)
786     return false;
787 
788   ipa_set_jf_unknown (jfunc);
789   return true;
790 }
791 
792 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
793    If it is, return true and fill in the jump function JFUNC with relevant type
794    information or set it to unknown.  ARG is the object itself (not a pointer
795    to it, unless dereferenced).  BASE is the base of the memory access as
796    returned by get_ref_base_and_extent, as is the offset.  */
797 
798 static bool
799 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
800 		    struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 {
802   if (!flag_devirtualize)
803     return false;
804 
805   if (TREE_CODE	(base) == MEM_REF
806       && !param_type_may_change_p (current_function_decl,
807 				   TREE_OPERAND (base, 0),
808 				   call))
809     return false;
810   return detect_type_change_from_memory_writes (arg, base, comp_type,
811 						call, jfunc, offset);
812 }
813 
814 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
815    SSA name (its dereference will become the base and the offset is assumed to
816    be zero).  */
817 
818 static bool
819 detect_type_change_ssa (tree arg, tree comp_type,
820 			gcall *call, struct ipa_jump_func *jfunc)
821 {
822   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
823   if (!flag_devirtualize
824       || !POINTER_TYPE_P (TREE_TYPE (arg)))
825     return false;
826 
827   if (!param_type_may_change_p (current_function_decl, arg, call))
828     return false;
829 
830   arg = build2 (MEM_REF, ptr_type_node, arg,
831 		build_int_cst (ptr_type_node, 0));
832 
833   return detect_type_change_from_memory_writes (arg, arg, comp_type,
834 						call, jfunc, 0);
835 }
836 
837 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
838    boolean variable pointed to by DATA.  */
839 
840 static bool
841 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
842 		     void *data)
843 {
844   bool *b = (bool *) data;
845   *b = true;
846   return true;
847 }
848 
849 /* Return true if we have already walked so many statements in AA that we
850    should really just start giving up.  */
851 
852 static bool
853 aa_overwalked (struct ipa_func_body_info *fbi)
854 {
855   gcc_checking_assert (fbi);
856   return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857 }
858 
859 /* Find the nearest valid aa status for parameter specified by INDEX that
860    dominates BB.  */
861 
862 static struct ipa_param_aa_status *
863 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
864 			   int index)
865 {
866   while (true)
867     {
868       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
869       if (!bb)
870 	return NULL;
871       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
872       if (!bi->param_aa_statuses.is_empty ()
873 	  && bi->param_aa_statuses[index].valid)
874 	return &bi->param_aa_statuses[index];
875     }
876 }
877 
878 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
879    structures and/or intialize the result with a dominating description as
880    necessary.  */
881 
882 static struct ipa_param_aa_status *
883 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
884 			  int index)
885 {
886   gcc_checking_assert (fbi);
887   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
888   if (bi->param_aa_statuses.is_empty ())
889     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
890   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
891   if (!paa->valid)
892     {
893       gcc_checking_assert (!paa->parm_modified
894 			   && !paa->ref_modified
895 			   && !paa->pt_modified);
896       struct ipa_param_aa_status *dom_paa;
897       dom_paa = find_dominating_aa_status (fbi, bb, index);
898       if (dom_paa)
899 	*paa = *dom_paa;
900       else
901 	paa->valid = true;
902     }
903 
904   return paa;
905 }
906 
907 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
908    a value known not to be modified in this function before reaching the
909    statement STMT.  FBI holds information about the function we have so far
910    gathered but do not survive the summary building stage.  */
911 
912 static bool
913 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
914 			      gimple *stmt, tree parm_load)
915 {
916   struct ipa_param_aa_status *paa;
917   bool modified = false;
918   ao_ref refd;
919 
920   tree base = get_base_address (parm_load);
921   gcc_assert (TREE_CODE (base) == PARM_DECL);
922   if (TREE_READONLY (base))
923     return true;
924 
925   /* FIXME: FBI can be NULL if we are being called from outside
926      ipa_node_analysis or ipcp_transform_function, which currently happens
927      during inlining analysis.  It would be great to extend fbi's lifetime and
928      always have it.  Currently, we are just not afraid of too much walking in
929      that case.  */
930   if (fbi)
931     {
932       if (aa_overwalked (fbi))
933 	return false;
934       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
935       if (paa->parm_modified)
936 	return false;
937     }
938   else
939     paa = NULL;
940 
941   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
942   ao_ref_init (&refd, parm_load);
943   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
944 				   &modified, NULL);
945   if (fbi)
946     fbi->aa_walked += walked;
947   if (paa && modified)
948     paa->parm_modified = true;
949   return !modified;
950 }
951 
952 /* If STMT is an assignment that loads a value from an parameter declaration,
953    return the index of the parameter in ipa_node_params which has not been
954    modified.  Otherwise return -1.  */
955 
956 static int
957 load_from_unmodified_param (struct ipa_func_body_info *fbi,
958 			    vec<ipa_param_descriptor, va_gc> *descriptors,
959 			    gimple *stmt)
960 {
961   int index;
962   tree op1;
963 
964   if (!gimple_assign_single_p (stmt))
965     return -1;
966 
967   op1 = gimple_assign_rhs1 (stmt);
968   if (TREE_CODE (op1) != PARM_DECL)
969     return -1;
970 
971   index = ipa_get_param_decl_index_1 (descriptors, op1);
972   if (index < 0
973       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
974     return -1;
975 
976   return index;
977 }
978 
979 /* Return true if memory reference REF (which must be a load through parameter
980    with INDEX) loads data that are known to be unmodified in this function
981    before reaching statement STMT.  */
982 
983 static bool
984 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
985 			   int index, gimple *stmt, tree ref)
986 {
987   struct ipa_param_aa_status *paa;
988   bool modified = false;
989   ao_ref refd;
990 
991   /* FIXME: FBI can be NULL if we are being called from outside
992      ipa_node_analysis or ipcp_transform_function, which currently happens
993      during inlining analysis.  It would be great to extend fbi's lifetime and
994      always have it.  Currently, we are just not afraid of too much walking in
995      that case.  */
996   if (fbi)
997     {
998       if (aa_overwalked (fbi))
999 	return false;
1000       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1001       if (paa->ref_modified)
1002 	return false;
1003     }
1004   else
1005     paa = NULL;
1006 
1007   gcc_checking_assert (gimple_vuse (stmt));
1008   ao_ref_init (&refd, ref);
1009   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1010 				   &modified, NULL);
1011   if (fbi)
1012     fbi->aa_walked += walked;
1013   if (paa && modified)
1014     paa->ref_modified = true;
1015   return !modified;
1016 }
1017 
1018 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1019    is known to be unmodified in this function before reaching call statement
1020    CALL into which it is passed.  FBI describes the function body.  */
1021 
1022 static bool
1023 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1024 			      gimple *call, tree parm)
1025 {
1026   bool modified = false;
1027   ao_ref refd;
1028 
1029   /* It's unnecessary to calculate anything about memory contnets for a const
1030      function because it is not goin to use it.  But do not cache the result
1031      either.  Also, no such calculations for non-pointers.  */
1032   if (!gimple_vuse (call)
1033       || !POINTER_TYPE_P (TREE_TYPE (parm))
1034       || aa_overwalked (fbi))
1035     return false;
1036 
1037   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1038 							      gimple_bb (call),
1039 							      index);
1040   if (paa->pt_modified)
1041     return false;
1042 
1043   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1044   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1045 				   &modified, NULL);
1046   fbi->aa_walked += walked;
1047   if (modified)
1048     paa->pt_modified = true;
1049   return !modified;
1050 }
1051 
1052 /* Return true if we can prove that OP is a memory reference loading
1053    data from an aggregate passed as a parameter.
1054 
1055    The function works in two modes.  If GUARANTEED_UNMODIFIED is NULL, it return
1056    false if it cannot prove that the value has not been modified before the
1057    load in STMT.  If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1058    if it cannot prove the value has not been modified, in that case it will
1059    store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 
1061    INFO and PARMS_AINFO describe parameters of the current function (but the
1062    latter can be NULL), STMT is the load statement.  If function returns true,
1063    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1064    within the aggregate and whether it is a load from a value passed by
1065    reference respectively.  */
1066 
1067 bool
1068 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1069 			vec<ipa_param_descriptor, va_gc> *descriptors,
1070 			gimple *stmt, tree op, int *index_p,
1071 			HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1072 			bool *by_ref_p, bool *guaranteed_unmodified)
1073 {
1074   int index;
1075   HOST_WIDE_INT size;
1076   bool reverse;
1077   tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1078 
1079   if (!base)
1080     return false;
1081 
1082   if (DECL_P (base))
1083     {
1084       int index = ipa_get_param_decl_index_1 (descriptors, base);
1085       if (index >= 0
1086 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1087 	{
1088 	  *index_p = index;
1089 	  *by_ref_p = false;
1090 	  if (size_p)
1091 	    *size_p = size;
1092 	  if (guaranteed_unmodified)
1093 	    *guaranteed_unmodified = true;
1094 	  return true;
1095 	}
1096       return false;
1097     }
1098 
1099   if (TREE_CODE (base) != MEM_REF
1100 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1102     return false;
1103 
1104   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105     {
1106       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1107       index = ipa_get_param_decl_index_1 (descriptors, parm);
1108     }
1109   else
1110     {
1111       /* This branch catches situations where a pointer parameter is not a
1112 	 gimple register, for example:
1113 
1114 	 void hip7(S*) (struct S * p)
1115 	 {
1116 	 void (*<T2e4>) (struct S *) D.1867;
1117 	 struct S * p.1;
1118 
1119 	 <bb 2>:
1120 	 p.1_1 = p;
1121 	 D.1867_2 = p.1_1->f;
1122 	 D.1867_2 ();
1123 	 gdp = &p;
1124       */
1125 
1126       gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1127       index = load_from_unmodified_param (fbi, descriptors, def);
1128     }
1129 
1130   if (index >= 0)
1131     {
1132       bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133       if (!data_preserved && !guaranteed_unmodified)
1134 	return false;
1135 
1136       *index_p = index;
1137       *by_ref_p = true;
1138       if (size_p)
1139 	*size_p = size;
1140       if (guaranteed_unmodified)
1141 	*guaranteed_unmodified = data_preserved;
1142       return true;
1143     }
1144   return false;
1145 }
1146 
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148    of an assignment statement STMT, try to determine whether we are actually
1149    handling any of the following cases and construct an appropriate jump
1150    function into JFUNC if so:
1151 
1152    1) The passed value is loaded from a formal parameter which is not a gimple
1153    register (most probably because it is addressable, the value has to be
1154    scalar) and we can guarantee the value has not changed.  This case can
1155    therefore be described by a simple pass-through jump function.  For example:
1156 
1157       foo (int a)
1158       {
1159         int a.0;
1160 
1161         a.0_2 = a;
1162         bar (a.0_2);
1163 
1164    2) The passed value can be described by a simple arithmetic pass-through
1165    jump function. E.g.
1166 
1167       foo (int a)
1168       {
1169         int D.2064;
1170 
1171         D.2064_4 = a.1(D) + 4;
1172         bar (D.2064_4);
1173 
1174    This case can also occur in combination of the previous one, e.g.:
1175 
1176       foo (int a, int z)
1177       {
1178         int a.0;
1179         int D.2064;
1180 
1181 	a.0_3 = a;
1182 	D.2064_4 = a.0_3 + 4;
1183 	foo (D.2064_4);
1184 
1185    3) The passed value is an address of an object within another one (which
1186    also passed by reference).  Such situations are described by an ancestor
1187    jump function and describe situations such as:
1188 
1189      B::foo() (struct B * const this)
1190      {
1191        struct A * D.1845;
1192 
1193        D.1845_2 = &this_1(D)->D.1748;
1194        A::bar (D.1845_2);
1195 
1196    INFO is the structure describing individual parameters access different
1197    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1198    only needed for intraprocedural analysis.  */
1199 
1200 static void
1201 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1202 				  struct ipa_node_params *info,
1203 				  struct ipa_jump_func *jfunc,
1204 				  gcall *call, gimple *stmt, tree name,
1205 				  tree param_type)
1206 {
1207   HOST_WIDE_INT offset, size;
1208   tree op1, tc_ssa, base, ssa;
1209   bool reverse;
1210   int index;
1211 
1212   op1 = gimple_assign_rhs1 (stmt);
1213 
1214   if (TREE_CODE (op1) == SSA_NAME)
1215     {
1216       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218       else
1219 	index = load_from_unmodified_param (fbi, info->descriptors,
1220 					    SSA_NAME_DEF_STMT (op1));
1221       tc_ssa = op1;
1222     }
1223   else
1224     {
1225       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1226       tc_ssa = gimple_assign_lhs (stmt);
1227     }
1228 
1229   if (index >= 0)
1230     {
1231       switch (gimple_assign_rhs_class (stmt))
1232 	{
1233 	case GIMPLE_BINARY_RHS:
1234 	  {
1235 	    tree op2 = gimple_assign_rhs2 (stmt);
1236 	    if (!is_gimple_ip_invariant (op2)
1237 		|| ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 		     != tcc_comparison)
1239 		    && !useless_type_conversion_p (TREE_TYPE (name),
1240 						   TREE_TYPE (op1))))
1241 	      return;
1242 
1243 	    ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 					   gimple_assign_rhs_code (stmt));
1245 	    break;
1246 	  }
1247 	case GIMPLE_SINGLE_RHS:
1248 	  {
1249 	    bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 						       tc_ssa);
1251 	    ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 	    break;
1253 	  }
1254 	case GIMPLE_UNARY_RHS:
1255 	  if (is_gimple_assign (stmt)
1256 	      && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 	      && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1258 	    ipa_set_jf_unary_pass_through (jfunc, index,
1259 					   gimple_assign_rhs_code (stmt));
1260 	default:;
1261 	}
1262       return;
1263     }
1264 
1265   if (TREE_CODE (op1) != ADDR_EXPR)
1266     return;
1267   op1 = TREE_OPERAND (op1, 0);
1268   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1269     return;
1270   base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1271   offset_int mem_offset;
1272   if (!base
1273       || TREE_CODE (base) != MEM_REF
1274       || !mem_ref_offset (base).is_constant (&mem_offset))
1275     return;
1276   offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1277   ssa = TREE_OPERAND (base, 0);
1278   if (TREE_CODE (ssa) != SSA_NAME
1279       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1280       || offset < 0)
1281     return;
1282 
1283   /* Dynamic types are changed in constructors and destructors.  */
1284   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1285   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1286     ipa_set_ancestor_jf (jfunc, offset,  index,
1287 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1288 }
1289 
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291    it looks like:
1292 
1293    iftmp.1_3 = &obj_2(D)->D.1762;
1294 
1295    The base of the MEM_REF must be a default definition SSA NAME of a
1296    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1297    whole MEM_REF expression is returned and the offset calculated from any
1298    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1299    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1300 
1301 static tree
1302 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1303 {
1304   HOST_WIDE_INT size;
1305   tree expr, parm, obj;
1306   bool reverse;
1307 
1308   if (!gimple_assign_single_p (assign))
1309     return NULL_TREE;
1310   expr = gimple_assign_rhs1 (assign);
1311 
1312   if (TREE_CODE (expr) != ADDR_EXPR)
1313     return NULL_TREE;
1314   expr = TREE_OPERAND (expr, 0);
1315   obj = expr;
1316   expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1317 
1318   offset_int mem_offset;
1319   if (!expr
1320       || TREE_CODE (expr) != MEM_REF
1321       || !mem_ref_offset (expr).is_constant (&mem_offset))
1322     return NULL_TREE;
1323   parm = TREE_OPERAND (expr, 0);
1324   if (TREE_CODE (parm) != SSA_NAME
1325       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1326       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1327     return NULL_TREE;
1328 
1329   *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1330   *obj_p = obj;
1331   return expr;
1332 }
1333 
1334 
1335 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1336    statement PHI, try to find out whether NAME is in fact a
1337    multiple-inheritance typecast from a descendant into an ancestor of a formal
1338    parameter and thus can be described by an ancestor jump function and if so,
1339    write the appropriate function into JFUNC.
1340 
1341    Essentially we want to match the following pattern:
1342 
1343      if (obj_2(D) != 0B)
1344        goto <bb 3>;
1345      else
1346        goto <bb 4>;
1347 
1348    <bb 3>:
1349      iftmp.1_3 = &obj_2(D)->D.1762;
1350 
1351    <bb 4>:
1352      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1353      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1354      return D.1879_6;  */
1355 
1356 static void
1357 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1358 				    struct ipa_node_params *info,
1359 				    struct ipa_jump_func *jfunc,
1360 				    gcall *call, gphi *phi)
1361 {
1362   HOST_WIDE_INT offset;
1363   gimple *assign, *cond;
1364   basic_block phi_bb, assign_bb, cond_bb;
1365   tree tmp, parm, expr, obj;
1366   int index, i;
1367 
1368   if (gimple_phi_num_args (phi) != 2)
1369     return;
1370 
1371   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1372     tmp = PHI_ARG_DEF (phi, 0);
1373   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1374     tmp = PHI_ARG_DEF (phi, 1);
1375   else
1376     return;
1377   if (TREE_CODE (tmp) != SSA_NAME
1378       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1379       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1380       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1381     return;
1382 
1383   assign = SSA_NAME_DEF_STMT (tmp);
1384   assign_bb = gimple_bb (assign);
1385   if (!single_pred_p (assign_bb))
1386     return;
1387   expr = get_ancestor_addr_info (assign, &obj, &offset);
1388   if (!expr)
1389     return;
1390   parm = TREE_OPERAND (expr, 0);
1391   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1392   if (index < 0)
1393     return;
1394 
1395   cond_bb = single_pred (assign_bb);
1396   cond = last_stmt (cond_bb);
1397   if (!cond
1398       || gimple_code (cond) != GIMPLE_COND
1399       || gimple_cond_code (cond) != NE_EXPR
1400       || gimple_cond_lhs (cond) != parm
1401       || !integer_zerop (gimple_cond_rhs (cond)))
1402     return;
1403 
1404   phi_bb = gimple_bb (phi);
1405   for (i = 0; i < 2; i++)
1406     {
1407       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1408       if (pred != assign_bb && pred != cond_bb)
1409 	return;
1410     }
1411 
1412   ipa_set_ancestor_jf (jfunc, offset, index,
1413 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1414 }
1415 
1416 /* Inspect the given TYPE and return true iff it has the same structure (the
1417    same number of fields of the same types) as a C++ member pointer.  If
1418    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1419    corresponding fields there.  */
1420 
1421 static bool
1422 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1423 {
1424   tree fld;
1425 
1426   if (TREE_CODE (type) != RECORD_TYPE)
1427     return false;
1428 
1429   fld = TYPE_FIELDS (type);
1430   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1431       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1432       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1433     return false;
1434 
1435   if (method_ptr)
1436     *method_ptr = fld;
1437 
1438   fld = DECL_CHAIN (fld);
1439   if (!fld || INTEGRAL_TYPE_P (fld)
1440       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1441     return false;
1442   if (delta)
1443     *delta = fld;
1444 
1445   if (DECL_CHAIN (fld))
1446     return false;
1447 
1448   return true;
1449 }
1450 
1451 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1452    return the rhs of its defining statement.  Otherwise return RHS as it
1453    is.  */
1454 
1455 static inline tree
1456 get_ssa_def_if_simple_copy (tree rhs)
1457 {
1458   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1459     {
1460       gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1461 
1462       if (gimple_assign_single_p (def_stmt))
1463 	rhs = gimple_assign_rhs1 (def_stmt);
1464       else
1465 	break;
1466     }
1467   return rhs;
1468 }
1469 
1470 /* Simple linked list, describing known contents of an aggregate beforere
1471    call.  */
1472 
1473 struct ipa_known_agg_contents_list
1474 {
1475   /* Offset and size of the described part of the aggregate.  */
1476   HOST_WIDE_INT offset, size;
1477   /* Known constant value or NULL if the contents is known to be unknown.  */
1478   tree constant;
1479   /* Pointer to the next structure in the list.  */
1480   struct ipa_known_agg_contents_list *next;
1481 };
1482 
1483 /* Find the proper place in linked list of ipa_known_agg_contents_list
1484    structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1485    unless there is a partial overlap, in which case return NULL, or such
1486    element is already there, in which case set *ALREADY_THERE to true.  */
1487 
1488 static struct ipa_known_agg_contents_list **
1489 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1490 				HOST_WIDE_INT lhs_offset,
1491 				HOST_WIDE_INT lhs_size,
1492 				bool *already_there)
1493 {
1494   struct ipa_known_agg_contents_list **p = list;
1495   while (*p && (*p)->offset < lhs_offset)
1496     {
1497       if ((*p)->offset + (*p)->size > lhs_offset)
1498 	return NULL;
1499       p = &(*p)->next;
1500     }
1501 
1502   if (*p && (*p)->offset < lhs_offset + lhs_size)
1503     {
1504       if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1505 	/* We already know this value is subsequently overwritten with
1506 	   something else.  */
1507 	*already_there = true;
1508       else
1509 	/* Otherwise this is a partial overlap which we cannot
1510 	   represent.  */
1511 	return NULL;
1512     }
1513   return p;
1514 }
1515 
1516 /* Build aggregate jump function from LIST, assuming there are exactly
1517    CONST_COUNT constant entries there and that th offset of the passed argument
1518    is ARG_OFFSET and store it into JFUNC.  */
1519 
1520 static void
1521 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1522 			       int const_count, HOST_WIDE_INT arg_offset,
1523 			       struct ipa_jump_func *jfunc)
1524 {
1525   vec_alloc (jfunc->agg.items, const_count);
1526   while (list)
1527     {
1528       if (list->constant)
1529 	{
1530 	  struct ipa_agg_jf_item item;
1531 	  item.offset = list->offset - arg_offset;
1532 	  gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1533 	  item.value = unshare_expr_without_location (list->constant);
1534 	  jfunc->agg.items->quick_push (item);
1535 	}
1536       list = list->next;
1537     }
1538 }
1539 
1540 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1541    in ARG is filled in with constant values.  ARG can either be an aggregate
1542    expression or a pointer to an aggregate.  ARG_TYPE is the type of the
1543    aggregate.  JFUNC is the jump function into which the constants are
1544    subsequently stored.  */
1545 
1546 static void
1547 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1548 					 tree arg_type,
1549 					 struct ipa_jump_func *jfunc)
1550 {
1551   struct ipa_known_agg_contents_list *list = NULL;
1552   int item_count = 0, const_count = 0;
1553   HOST_WIDE_INT arg_offset, arg_size;
1554   gimple_stmt_iterator gsi;
1555   tree arg_base;
1556   bool check_ref, by_ref;
1557   ao_ref r;
1558 
1559   if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1560     return;
1561 
1562   /* The function operates in three stages.  First, we prepare check_ref, r,
1563      arg_base and arg_offset based on what is actually passed as an actual
1564      argument.  */
1565 
1566   if (POINTER_TYPE_P (arg_type))
1567     {
1568       by_ref = true;
1569       if (TREE_CODE (arg) == SSA_NAME)
1570 	{
1571 	  tree type_size;
1572           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
1573             return;
1574 	  check_ref = true;
1575 	  arg_base = arg;
1576 	  arg_offset = 0;
1577 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1578 	  arg_size = tree_to_uhwi (type_size);
1579 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1580 	}
1581       else if (TREE_CODE (arg) == ADDR_EXPR)
1582 	{
1583 	  bool reverse;
1584 
1585 	  arg = TREE_OPERAND (arg, 0);
1586 	  arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1587 						  &arg_size, &reverse);
1588 	  if (!arg_base)
1589 	    return;
1590 	  if (DECL_P (arg_base))
1591 	    {
1592 	      check_ref = false;
1593 	      ao_ref_init (&r, arg_base);
1594 	    }
1595 	  else
1596 	    return;
1597 	}
1598       else
1599 	return;
1600     }
1601   else
1602     {
1603       bool reverse;
1604 
1605       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1606 
1607       by_ref = false;
1608       check_ref = false;
1609       arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1610 					      &arg_size, &reverse);
1611       if (!arg_base)
1612 	return;
1613 
1614       ao_ref_init (&r, arg);
1615     }
1616 
1617   /* Second stage walks back the BB, looks at individual statements and as long
1618      as it is confident of how the statements affect contents of the
1619      aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1620      describing it.  */
1621   gsi = gsi_for_stmt (call);
1622   gsi_prev (&gsi);
1623   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1624     {
1625       struct ipa_known_agg_contents_list *n, **p;
1626       gimple *stmt = gsi_stmt (gsi);
1627       HOST_WIDE_INT lhs_offset, lhs_size;
1628       tree lhs, rhs, lhs_base;
1629       bool reverse;
1630 
1631       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1632 	continue;
1633       if (!gimple_assign_single_p (stmt))
1634 	break;
1635 
1636       lhs = gimple_assign_lhs (stmt);
1637       rhs = gimple_assign_rhs1 (stmt);
1638       if (!is_gimple_reg_type (TREE_TYPE (rhs))
1639 	  || TREE_CODE (lhs) == BIT_FIELD_REF
1640 	  || contains_bitfld_component_ref_p (lhs))
1641 	break;
1642 
1643       lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1644 					      &lhs_size, &reverse);
1645       if (!lhs_base)
1646 	break;
1647 
1648       if (check_ref)
1649 	{
1650 	  if (TREE_CODE (lhs_base) != MEM_REF
1651 	      || TREE_OPERAND (lhs_base, 0) != arg_base
1652 	      || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1653 	    break;
1654 	}
1655       else if (lhs_base != arg_base)
1656 	{
1657 	  if (DECL_P (lhs_base))
1658 	    continue;
1659 	  else
1660 	    break;
1661 	}
1662 
1663       bool already_there = false;
1664       p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1665 					  &already_there);
1666       if (!p)
1667 	break;
1668       if (already_there)
1669 	continue;
1670 
1671       rhs = get_ssa_def_if_simple_copy (rhs);
1672       n = XALLOCA (struct ipa_known_agg_contents_list);
1673       n->size = lhs_size;
1674       n->offset = lhs_offset;
1675       if (is_gimple_ip_invariant (rhs))
1676 	{
1677 	  n->constant = rhs;
1678 	  const_count++;
1679 	}
1680       else
1681 	n->constant = NULL_TREE;
1682       n->next = *p;
1683       *p = n;
1684 
1685       item_count++;
1686       if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1687 	  || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1688 	break;
1689     }
1690 
1691   /* Third stage just goes over the list and creates an appropriate vector of
1692      ipa_agg_jf_item structures out of it, of sourse only if there are
1693      any known constants to begin with.  */
1694 
1695   if (const_count)
1696     {
1697       jfunc->agg.by_ref = by_ref;
1698       build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1699     }
1700 }
1701 
1702 /* Return the Ith param type of callee associated with call graph
1703    edge E.  */
1704 
1705 tree
1706 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1707 {
1708   int n;
1709   tree type = (e->callee
1710 	       ? TREE_TYPE (e->callee->decl)
1711 	       : gimple_call_fntype (e->call_stmt));
1712   tree t = TYPE_ARG_TYPES (type);
1713 
1714   for (n = 0; n < i; n++)
1715     {
1716       if (!t)
1717         break;
1718       t = TREE_CHAIN (t);
1719     }
1720   if (t)
1721     return TREE_VALUE (t);
1722   if (!e->callee)
1723     return NULL;
1724   t = DECL_ARGUMENTS (e->callee->decl);
1725   for (n = 0; n < i; n++)
1726     {
1727       if (!t)
1728 	return NULL;
1729       t = TREE_CHAIN (t);
1730     }
1731   if (t)
1732     return TREE_TYPE (t);
1733   return NULL;
1734 }
1735 
1736 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1737    allocated structure or a previously existing one shared with other jump
1738    functions and/or transformation summaries.  */
1739 
1740 ipa_bits *
1741 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1742 {
1743   ipa_bits tmp;
1744   tmp.value = value;
1745   tmp.mask = mask;
1746 
1747   ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1748   if (*slot)
1749     return *slot;
1750 
1751   ipa_bits *res = ggc_alloc<ipa_bits> ();
1752   res->value = value;
1753   res->mask = mask;
1754   *slot = res;
1755 
1756   return res;
1757 }
1758 
1759 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK.  Use hash
1760    table in order to avoid creating multiple same ipa_bits structures.  */
1761 
1762 static void
1763 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1764 		    const widest_int &mask)
1765 {
1766   jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1767 }
1768 
1769 /* Return a pointer to a value_range just like *TMP, but either find it in
1770    ipa_vr_hash_table or allocate it in GC memory.  TMP->equiv must be NULL.  */
1771 
1772 static value_range *
1773 ipa_get_value_range (value_range *tmp)
1774 {
1775   value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1776   if (*slot)
1777     return *slot;
1778 
1779   value_range *vr = ggc_alloc<value_range> ();
1780   *vr = *tmp;
1781   *slot = vr;
1782 
1783   return vr;
1784 }
1785 
1786 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1787    equiv set. Use hash table in order to avoid creating multiple same copies of
1788    value_ranges.  */
1789 
1790 static value_range *
1791 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1792 {
1793   value_range tmp;
1794   tmp.type = type;
1795   tmp.min = min;
1796   tmp.max = max;
1797   tmp.equiv = NULL;
1798   return ipa_get_value_range (&tmp);
1799 }
1800 
1801 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1802    a NULL equiv bitmap.  Use hash table in order to avoid creating multiple
1803    same value_range structures.  */
1804 
1805 static void
1806 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1807 		  tree min, tree max)
1808 {
1809   jf->m_vr = ipa_get_value_range (type, min, max);
1810 }
1811 
1812 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1813    copy from ipa_vr_hash_table or allocate a new on in GC memory.  */
1814 
1815 static void
1816 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1817 {
1818   jf->m_vr = ipa_get_value_range (tmp);
1819 }
1820 
1821 /* Compute jump function for all arguments of callsite CS and insert the
1822    information in the jump_functions array in the ipa_edge_args corresponding
1823    to this callsite.  */
1824 
1825 static void
1826 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1827 				     struct cgraph_edge *cs)
1828 {
1829   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1830   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1831   gcall *call = cs->call_stmt;
1832   int n, arg_num = gimple_call_num_args (call);
1833   bool useful_context = false;
1834 
1835   if (arg_num == 0 || args->jump_functions)
1836     return;
1837   vec_safe_grow_cleared (args->jump_functions, arg_num);
1838   if (flag_devirtualize)
1839     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1840 
1841   if (gimple_call_internal_p (call))
1842     return;
1843   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1844     return;
1845 
1846   for (n = 0; n < arg_num; n++)
1847     {
1848       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1849       tree arg = gimple_call_arg (call, n);
1850       tree param_type = ipa_get_callee_param_type (cs, n);
1851       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1852 	{
1853 	  tree instance;
1854 	  struct ipa_polymorphic_call_context context (cs->caller->decl,
1855 						       arg, cs->call_stmt,
1856 						       &instance);
1857 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1858 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
1859 	  if (!context.useless_p ())
1860 	    useful_context = true;
1861 	}
1862 
1863       if (POINTER_TYPE_P (TREE_TYPE (arg)))
1864 	{
1865 	  bool addr_nonzero = false;
1866 	  bool strict_overflow = false;
1867 
1868 	  if (TREE_CODE (arg) == SSA_NAME
1869 	      && param_type
1870 	      && get_ptr_nonnull (arg))
1871 	    addr_nonzero = true;
1872 	  else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1873 	    addr_nonzero = true;
1874 
1875 	  if (addr_nonzero)
1876 	    {
1877 	      tree z = build_int_cst (TREE_TYPE (arg), 0);
1878 	      ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1879 	    }
1880 	  else
1881 	    gcc_assert (!jfunc->m_vr);
1882 	}
1883       else
1884 	{
1885 	  wide_int min, max;
1886 	  value_range_type type;
1887 	  if (TREE_CODE (arg) == SSA_NAME
1888 	      && param_type
1889 	      && (type = get_range_info (arg, &min, &max))
1890 	      && (type == VR_RANGE || type == VR_ANTI_RANGE))
1891 	    {
1892 	      value_range tmpvr,resvr;
1893 
1894 	      tmpvr.type = type;
1895 	      tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1896 	      tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1897 	      tmpvr.equiv = NULL;
1898 	      memset (&resvr, 0, sizeof (resvr));
1899 	      extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1900 					     &tmpvr, TREE_TYPE (arg));
1901 	      if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1902 		ipa_set_jfunc_vr (jfunc, &resvr);
1903 	      else
1904 		gcc_assert (!jfunc->m_vr);
1905 	    }
1906 	  else
1907 	    gcc_assert (!jfunc->m_vr);
1908 	}
1909 
1910       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1911 	  && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1912 	{
1913 	  if (TREE_CODE (arg) == SSA_NAME)
1914 	    ipa_set_jfunc_bits (jfunc, 0,
1915 				widest_int::from (get_nonzero_bits (arg),
1916 						  TYPE_SIGN (TREE_TYPE (arg))));
1917 	  else
1918 	    ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1919 	}
1920       else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1921 	{
1922 	  unsigned HOST_WIDE_INT bitpos;
1923 	  unsigned align;
1924 
1925 	  get_pointer_alignment_1 (arg, &align, &bitpos);
1926 	  widest_int mask = wi::bit_and_not
1927 	    (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1928 	     align / BITS_PER_UNIT - 1);
1929 	  widest_int value = bitpos / BITS_PER_UNIT;
1930 	  ipa_set_jfunc_bits (jfunc, value, mask);
1931 	}
1932       else
1933 	gcc_assert (!jfunc->bits);
1934 
1935       if (is_gimple_ip_invariant (arg)
1936 	  || (VAR_P (arg)
1937 	      && is_global_var (arg)
1938 	      && TREE_READONLY (arg)))
1939 	ipa_set_jf_constant (jfunc, arg, cs);
1940       else if (!is_gimple_reg_type (TREE_TYPE (arg))
1941 	       && TREE_CODE (arg) == PARM_DECL)
1942 	{
1943 	  int index = ipa_get_param_decl_index (info, arg);
1944 
1945 	  gcc_assert (index >=0);
1946 	  /* Aggregate passed by value, check for pass-through, otherwise we
1947 	     will attempt to fill in aggregate contents later in this
1948 	     for cycle.  */
1949 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1950 	    {
1951 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
1952 	      continue;
1953 	    }
1954 	}
1955       else if (TREE_CODE (arg) == SSA_NAME)
1956 	{
1957 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
1958 	    {
1959 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1960 	      if (index >= 0)
1961 		{
1962 		  bool agg_p;
1963 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1964 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1965 		}
1966 	    }
1967 	  else
1968 	    {
1969 	      gimple *stmt = SSA_NAME_DEF_STMT (arg);
1970 	      if (is_gimple_assign (stmt))
1971 		compute_complex_assign_jump_func (fbi, info, jfunc,
1972 						  call, stmt, arg, param_type);
1973 	      else if (gimple_code (stmt) == GIMPLE_PHI)
1974 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
1975 						    call,
1976 						    as_a <gphi *> (stmt));
1977 	    }
1978 	}
1979 
1980       /* If ARG is pointer, we can not use its type to determine the type of aggregate
1981 	 passed (because type conversions are ignored in gimple).  Usually we can
1982 	 safely get type from function declaration, but in case of K&R prototypes or
1983 	 variadic functions we can try our luck with type of the pointer passed.
1984 	 TODO: Since we look for actual initialization of the memory object, we may better
1985 	 work out the type based on the memory stores we find.  */
1986       if (!param_type)
1987 	param_type = TREE_TYPE (arg);
1988 
1989       if ((jfunc->type != IPA_JF_PASS_THROUGH
1990 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1991 	  && (jfunc->type != IPA_JF_ANCESTOR
1992 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1993 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1994 	      || POINTER_TYPE_P (param_type)))
1995 	determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1996     }
1997   if (!useful_context)
1998     vec_free (args->polymorphic_call_contexts);
1999 }
2000 
2001 /* Compute jump functions for all edges - both direct and indirect - outgoing
2002    from BB.  */
2003 
2004 static void
2005 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2006 {
2007   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2008   int i;
2009   struct cgraph_edge *cs;
2010 
2011   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2012     {
2013       struct cgraph_node *callee = cs->callee;
2014 
2015       if (callee)
2016 	{
2017 	  callee->ultimate_alias_target ();
2018 	  /* We do not need to bother analyzing calls to unknown functions
2019 	     unless they may become known during lto/whopr.  */
2020 	  if (!callee->definition && !flag_lto)
2021 	    continue;
2022 	}
2023       ipa_compute_jump_functions_for_edge (fbi, cs);
2024     }
2025 }
2026 
2027 /* If STMT looks like a statement loading a value from a member pointer formal
2028    parameter, return that parameter and store the offset of the field to
2029    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
2030    might be clobbered).  If USE_DELTA, then we look for a use of the delta
2031    field rather than the pfn.  */
2032 
2033 static tree
2034 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2035 				    HOST_WIDE_INT *offset_p)
2036 {
2037   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2038 
2039   if (!gimple_assign_single_p (stmt))
2040     return NULL_TREE;
2041 
2042   rhs = gimple_assign_rhs1 (stmt);
2043   if (TREE_CODE (rhs) == COMPONENT_REF)
2044     {
2045       ref_field = TREE_OPERAND (rhs, 1);
2046       rhs = TREE_OPERAND (rhs, 0);
2047     }
2048   else
2049     ref_field = NULL_TREE;
2050   if (TREE_CODE (rhs) != MEM_REF)
2051     return NULL_TREE;
2052   rec = TREE_OPERAND (rhs, 0);
2053   if (TREE_CODE (rec) != ADDR_EXPR)
2054     return NULL_TREE;
2055   rec = TREE_OPERAND (rec, 0);
2056   if (TREE_CODE (rec) != PARM_DECL
2057       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2058     return NULL_TREE;
2059   ref_offset = TREE_OPERAND (rhs, 1);
2060 
2061   if (use_delta)
2062     fld = delta_field;
2063   else
2064     fld = ptr_field;
2065   if (offset_p)
2066     *offset_p = int_bit_position (fld);
2067 
2068   if (ref_field)
2069     {
2070       if (integer_nonzerop (ref_offset))
2071 	return NULL_TREE;
2072       return ref_field == fld ? rec : NULL_TREE;
2073     }
2074   else
2075     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2076       : NULL_TREE;
2077 }
2078 
2079 /* Returns true iff T is an SSA_NAME defined by a statement.  */
2080 
2081 static bool
2082 ipa_is_ssa_with_stmt_def (tree t)
2083 {
2084   if (TREE_CODE (t) == SSA_NAME
2085       && !SSA_NAME_IS_DEFAULT_DEF (t))
2086     return true;
2087   else
2088     return false;
2089 }
2090 
2091 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2092    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
2093    indirect call graph edge.  */
2094 
2095 static struct cgraph_edge *
2096 ipa_note_param_call (struct cgraph_node *node, int param_index,
2097 		     gcall *stmt)
2098 {
2099   struct cgraph_edge *cs;
2100 
2101   cs = node->get_edge (stmt);
2102   cs->indirect_info->param_index = param_index;
2103   cs->indirect_info->agg_contents = 0;
2104   cs->indirect_info->member_ptr = 0;
2105   cs->indirect_info->guaranteed_unmodified = 0;
2106   return cs;
2107 }
2108 
2109 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2110    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
2111    intermediate information about each formal parameter.  Currently it checks
2112    whether the call calls a pointer that is a formal parameter and if so, the
2113    parameter is marked with the called flag and an indirect call graph edge
2114    describing the call is created.  This is very simple for ordinary pointers
2115    represented in SSA but not-so-nice when it comes to member pointers.  The
2116    ugly part of this function does nothing more than trying to match the
2117    pattern of such a call.  An example of such a pattern is the gimple dump
2118    below, the call is on the last line:
2119 
2120      <bb 2>:
2121        f$__delta_5 = f.__delta;
2122        f$__pfn_24 = f.__pfn;
2123 
2124    or
2125      <bb 2>:
2126        f$__delta_5 = MEM[(struct  *)&f];
2127        f$__pfn_24 = MEM[(struct  *)&f + 4B];
2128 
2129    and a few lines below:
2130 
2131      <bb 5>
2132        D.2496_3 = (int) f$__pfn_24;
2133        D.2497_4 = D.2496_3 & 1;
2134        if (D.2497_4 != 0)
2135          goto <bb 3>;
2136        else
2137          goto <bb 4>;
2138 
2139      <bb 6>:
2140        D.2500_7 = (unsigned int) f$__delta_5;
2141        D.2501_8 = &S + D.2500_7;
2142        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2143        D.2503_10 = *D.2502_9;
2144        D.2504_12 = f$__pfn_24 + -1;
2145        D.2505_13 = (unsigned int) D.2504_12;
2146        D.2506_14 = D.2503_10 + D.2505_13;
2147        D.2507_15 = *D.2506_14;
2148        iftmp.11_16 = (String:: *) D.2507_15;
2149 
2150      <bb 7>:
2151        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2152        D.2500_19 = (unsigned int) f$__delta_5;
2153        D.2508_20 = &S + D.2500_19;
2154        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2155 
2156    Such patterns are results of simple calls to a member pointer:
2157 
2158      int doprinting (int (MyString::* f)(int) const)
2159      {
2160        MyString S ("somestring");
2161 
2162        return (S.*f)(4);
2163      }
2164 
2165    Moreover, the function also looks for called pointers loaded from aggregates
2166    passed by value or reference.  */
2167 
2168 static void
2169 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2170 				tree target)
2171 {
2172   struct ipa_node_params *info = fbi->info;
2173   HOST_WIDE_INT offset;
2174   bool by_ref;
2175 
2176   if (SSA_NAME_IS_DEFAULT_DEF (target))
2177     {
2178       tree var = SSA_NAME_VAR (target);
2179       int index = ipa_get_param_decl_index (info, var);
2180       if (index >= 0)
2181 	ipa_note_param_call (fbi->node, index, call);
2182       return;
2183     }
2184 
2185   int index;
2186   gimple *def = SSA_NAME_DEF_STMT (target);
2187   bool guaranteed_unmodified;
2188   if (gimple_assign_single_p (def)
2189       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2190 				 gimple_assign_rhs1 (def), &index, &offset,
2191 				 NULL, &by_ref, &guaranteed_unmodified))
2192     {
2193       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2194       cs->indirect_info->offset = offset;
2195       cs->indirect_info->agg_contents = 1;
2196       cs->indirect_info->by_ref = by_ref;
2197       cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2198       return;
2199     }
2200 
2201   /* Now we need to try to match the complex pattern of calling a member
2202      pointer. */
2203   if (gimple_code (def) != GIMPLE_PHI
2204       || gimple_phi_num_args (def) != 2
2205       || !POINTER_TYPE_P (TREE_TYPE (target))
2206       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2207     return;
2208 
2209   /* First, we need to check whether one of these is a load from a member
2210      pointer that is a parameter to this function. */
2211   tree n1 = PHI_ARG_DEF (def, 0);
2212   tree n2 = PHI_ARG_DEF (def, 1);
2213   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2214     return;
2215   gimple *d1 = SSA_NAME_DEF_STMT (n1);
2216   gimple *d2 = SSA_NAME_DEF_STMT (n2);
2217 
2218   tree rec;
2219   basic_block bb, virt_bb;
2220   basic_block join = gimple_bb (def);
2221   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2222     {
2223       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2224 	return;
2225 
2226       bb = EDGE_PRED (join, 0)->src;
2227       virt_bb = gimple_bb (d2);
2228     }
2229   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2230     {
2231       bb = EDGE_PRED (join, 1)->src;
2232       virt_bb = gimple_bb (d1);
2233     }
2234   else
2235     return;
2236 
2237   /* Second, we need to check that the basic blocks are laid out in the way
2238      corresponding to the pattern. */
2239 
2240   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2241       || single_pred (virt_bb) != bb
2242       || single_succ (virt_bb) != join)
2243     return;
2244 
2245   /* Third, let's see that the branching is done depending on the least
2246      significant bit of the pfn. */
2247 
2248   gimple *branch = last_stmt (bb);
2249   if (!branch || gimple_code (branch) != GIMPLE_COND)
2250     return;
2251 
2252   if ((gimple_cond_code (branch) != NE_EXPR
2253        && gimple_cond_code (branch) != EQ_EXPR)
2254       || !integer_zerop (gimple_cond_rhs (branch)))
2255     return;
2256 
2257   tree cond = gimple_cond_lhs (branch);
2258   if (!ipa_is_ssa_with_stmt_def (cond))
2259     return;
2260 
2261   def = SSA_NAME_DEF_STMT (cond);
2262   if (!is_gimple_assign (def)
2263       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2264       || !integer_onep (gimple_assign_rhs2 (def)))
2265     return;
2266 
2267   cond = gimple_assign_rhs1 (def);
2268   if (!ipa_is_ssa_with_stmt_def (cond))
2269     return;
2270 
2271   def = SSA_NAME_DEF_STMT (cond);
2272 
2273   if (is_gimple_assign (def)
2274       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2275     {
2276       cond = gimple_assign_rhs1 (def);
2277       if (!ipa_is_ssa_with_stmt_def (cond))
2278 	return;
2279       def = SSA_NAME_DEF_STMT (cond);
2280     }
2281 
2282   tree rec2;
2283   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2284 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2285 					      == ptrmemfunc_vbit_in_delta),
2286 					     NULL);
2287   if (rec != rec2)
2288     return;
2289 
2290   index = ipa_get_param_decl_index (info, rec);
2291   if (index >= 0
2292       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2293     {
2294       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2295       cs->indirect_info->offset = offset;
2296       cs->indirect_info->agg_contents = 1;
2297       cs->indirect_info->member_ptr = 1;
2298       cs->indirect_info->guaranteed_unmodified = 1;
2299     }
2300 
2301   return;
2302 }
2303 
2304 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2305    object referenced in the expression is a formal parameter of the caller
2306    FBI->node (described by FBI->info), create a call note for the
2307    statement.  */
2308 
2309 static void
2310 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2311 			       gcall *call, tree target)
2312 {
2313   tree obj = OBJ_TYPE_REF_OBJECT (target);
2314   int index;
2315   HOST_WIDE_INT anc_offset;
2316 
2317   if (!flag_devirtualize)
2318     return;
2319 
2320   if (TREE_CODE (obj) != SSA_NAME)
2321     return;
2322 
2323   struct ipa_node_params *info = fbi->info;
2324   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2325     {
2326       struct ipa_jump_func jfunc;
2327       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2328 	return;
2329 
2330       anc_offset = 0;
2331       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2332       gcc_assert (index >= 0);
2333       if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2334 				  call, &jfunc))
2335 	return;
2336     }
2337   else
2338     {
2339       struct ipa_jump_func jfunc;
2340       gimple *stmt = SSA_NAME_DEF_STMT (obj);
2341       tree expr;
2342 
2343       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2344       if (!expr)
2345 	return;
2346       index = ipa_get_param_decl_index (info,
2347 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2348       gcc_assert (index >= 0);
2349       if (detect_type_change (obj, expr, obj_type_ref_class (target),
2350 			      call, &jfunc, anc_offset))
2351 	return;
2352     }
2353 
2354   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2355   struct cgraph_indirect_call_info *ii = cs->indirect_info;
2356   ii->offset = anc_offset;
2357   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2358   ii->otr_type = obj_type_ref_class (target);
2359   ii->polymorphic = 1;
2360 }
2361 
2362 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2363    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2364    containing intermediate information about each formal parameter.  */
2365 
2366 static void
2367 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2368 {
2369   tree target = gimple_call_fn (call);
2370 
2371   if (!target
2372       || (TREE_CODE (target) != SSA_NAME
2373           && !virtual_method_call_p (target)))
2374     return;
2375 
2376   struct cgraph_edge *cs = fbi->node->get_edge (call);
2377   /* If we previously turned the call into a direct call, there is
2378      no need to analyze.  */
2379   if (cs && !cs->indirect_unknown_callee)
2380     return;
2381 
2382   if (cs->indirect_info->polymorphic && flag_devirtualize)
2383     {
2384       tree instance;
2385       tree target = gimple_call_fn (call);
2386       ipa_polymorphic_call_context context (current_function_decl,
2387 					    target, call, &instance);
2388 
2389       gcc_checking_assert (cs->indirect_info->otr_type
2390 			   == obj_type_ref_class (target));
2391       gcc_checking_assert (cs->indirect_info->otr_token
2392 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2393 
2394       cs->indirect_info->vptr_changed
2395 	= !context.get_dynamic_type (instance,
2396 				     OBJ_TYPE_REF_OBJECT (target),
2397 				     obj_type_ref_class (target), call);
2398       cs->indirect_info->context = context;
2399     }
2400 
2401   if (TREE_CODE (target) == SSA_NAME)
2402     ipa_analyze_indirect_call_uses (fbi, call, target);
2403   else if (virtual_method_call_p (target))
2404     ipa_analyze_virtual_call_uses (fbi, call, target);
2405 }
2406 
2407 
2408 /* Analyze the call statement STMT with respect to formal parameters (described
2409    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2410    formal parameters are called.  */
2411 
2412 static void
2413 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2414 {
2415   if (is_gimple_call (stmt))
2416     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2417 }
2418 
2419 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2420    If OP is a parameter declaration, mark it as used in the info structure
2421    passed in DATA.  */
2422 
2423 static bool
2424 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2425 {
2426   struct ipa_node_params *info = (struct ipa_node_params *) data;
2427 
2428   op = get_base_address (op);
2429   if (op
2430       && TREE_CODE (op) == PARM_DECL)
2431     {
2432       int index = ipa_get_param_decl_index (info, op);
2433       gcc_assert (index >= 0);
2434       ipa_set_param_used (info, index, true);
2435     }
2436 
2437   return false;
2438 }
2439 
2440 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2441    the findings in various structures of the associated ipa_node_params
2442    structure, such as parameter flags, notes etc.  FBI holds various data about
2443    the function being analyzed.  */
2444 
2445 static void
2446 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2447 {
2448   gimple_stmt_iterator gsi;
2449   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2450     {
2451       gimple *stmt = gsi_stmt (gsi);
2452 
2453       if (is_gimple_debug (stmt))
2454 	continue;
2455 
2456       ipa_analyze_stmt_uses (fbi, stmt);
2457       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2458 				     visit_ref_for_mod_analysis,
2459 				     visit_ref_for_mod_analysis,
2460 				     visit_ref_for_mod_analysis);
2461     }
2462   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2463     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2464 				   visit_ref_for_mod_analysis,
2465 				   visit_ref_for_mod_analysis,
2466 				   visit_ref_for_mod_analysis);
2467 }
2468 
2469 /* Calculate controlled uses of parameters of NODE.  */
2470 
2471 static void
2472 ipa_analyze_controlled_uses (struct cgraph_node *node)
2473 {
2474   struct ipa_node_params *info = IPA_NODE_REF (node);
2475 
2476   for (int i = 0; i < ipa_get_param_count (info); i++)
2477     {
2478       tree parm = ipa_get_param (info, i);
2479       int controlled_uses = 0;
2480 
2481       /* For SSA regs see if parameter is used.  For non-SSA we compute
2482 	 the flag during modification analysis.  */
2483       if (is_gimple_reg (parm))
2484 	{
2485 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2486 				       parm);
2487 	  if (ddef && !has_zero_uses (ddef))
2488 	    {
2489 	      imm_use_iterator imm_iter;
2490 	      use_operand_p use_p;
2491 
2492 	      ipa_set_param_used (info, i, true);
2493 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2494 		if (!is_gimple_call (USE_STMT (use_p)))
2495 		  {
2496 		    if (!is_gimple_debug (USE_STMT (use_p)))
2497 		      {
2498 			controlled_uses = IPA_UNDESCRIBED_USE;
2499 			break;
2500 		      }
2501 		  }
2502 		else
2503 		  controlled_uses++;
2504 	    }
2505 	  else
2506 	    controlled_uses = 0;
2507 	}
2508       else
2509 	controlled_uses = IPA_UNDESCRIBED_USE;
2510       ipa_set_controlled_uses (info, i, controlled_uses);
2511     }
2512 }
2513 
2514 /* Free stuff in BI.  */
2515 
2516 static void
2517 free_ipa_bb_info (struct ipa_bb_info *bi)
2518 {
2519   bi->cg_edges.release ();
2520   bi->param_aa_statuses.release ();
2521 }
2522 
2523 /* Dominator walker driving the analysis.  */
2524 
2525 class analysis_dom_walker : public dom_walker
2526 {
2527 public:
2528   analysis_dom_walker (struct ipa_func_body_info *fbi)
2529     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2530 
2531   virtual edge before_dom_children (basic_block);
2532 
2533 private:
2534   struct ipa_func_body_info *m_fbi;
2535 };
2536 
2537 edge
2538 analysis_dom_walker::before_dom_children (basic_block bb)
2539 {
2540   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2541   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2542   return NULL;
2543 }
2544 
2545 /* Release body info FBI.  */
2546 
2547 void
2548 ipa_release_body_info (struct ipa_func_body_info *fbi)
2549 {
2550   int i;
2551   struct ipa_bb_info *bi;
2552 
2553   FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2554     free_ipa_bb_info (bi);
2555   fbi->bb_infos.release ();
2556 }
2557 
2558 /* Initialize the array describing properties of formal parameters
2559    of NODE, analyze their uses and compute jump functions associated
2560    with actual arguments of calls from within NODE.  */
2561 
2562 void
2563 ipa_analyze_node (struct cgraph_node *node)
2564 {
2565   struct ipa_func_body_info fbi;
2566   struct ipa_node_params *info;
2567 
2568   ipa_check_create_node_params ();
2569   ipa_check_create_edge_args ();
2570   info = IPA_NODE_REF (node);
2571 
2572   if (info->analysis_done)
2573     return;
2574   info->analysis_done = 1;
2575 
2576   if (ipa_func_spec_opts_forbid_analysis_p (node))
2577     {
2578       for (int i = 0; i < ipa_get_param_count (info); i++)
2579 	{
2580 	  ipa_set_param_used (info, i, true);
2581 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2582 	}
2583       return;
2584     }
2585 
2586   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2587   push_cfun (func);
2588   calculate_dominance_info (CDI_DOMINATORS);
2589   ipa_initialize_node_params (node);
2590   ipa_analyze_controlled_uses (node);
2591 
2592   fbi.node = node;
2593   fbi.info = IPA_NODE_REF (node);
2594   fbi.bb_infos = vNULL;
2595   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2596   fbi.param_count = ipa_get_param_count (info);
2597   fbi.aa_walked = 0;
2598 
2599   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2600     {
2601       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2602       bi->cg_edges.safe_push (cs);
2603     }
2604 
2605   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2606     {
2607       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2608       bi->cg_edges.safe_push (cs);
2609     }
2610 
2611   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2612 
2613   ipa_release_body_info (&fbi);
2614   free_dominance_info (CDI_DOMINATORS);
2615   pop_cfun ();
2616 }
2617 
2618 /* Update the jump functions associated with call graph edge E when the call
2619    graph edge CS is being inlined, assuming that E->caller is already (possibly
2620    indirectly) inlined into CS->callee and that E has not been inlined.  */
2621 
2622 static void
2623 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2624 				      struct cgraph_edge *e)
2625 {
2626   struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2627   struct ipa_edge_args *args = IPA_EDGE_REF (e);
2628   int count = ipa_get_cs_argument_count (args);
2629   int i;
2630 
2631   for (i = 0; i < count; i++)
2632     {
2633       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2634       struct ipa_polymorphic_call_context *dst_ctx
2635 	= ipa_get_ith_polymorhic_call_context (args, i);
2636 
2637       if (dst->type == IPA_JF_ANCESTOR)
2638 	{
2639 	  struct ipa_jump_func *src;
2640 	  int dst_fid = dst->value.ancestor.formal_id;
2641 	  struct ipa_polymorphic_call_context *src_ctx
2642 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2643 
2644 	  /* Variable number of arguments can cause havoc if we try to access
2645 	     one that does not exist in the inlined edge.  So make sure we
2646 	     don't.  */
2647 	  if (dst_fid >= ipa_get_cs_argument_count (top))
2648 	    {
2649 	      ipa_set_jf_unknown (dst);
2650 	      continue;
2651 	    }
2652 
2653 	  src = ipa_get_ith_jump_func (top, dst_fid);
2654 
2655 	  if (src_ctx && !src_ctx->useless_p ())
2656 	    {
2657 	      struct ipa_polymorphic_call_context ctx = *src_ctx;
2658 
2659 	      /* TODO: Make type preserved safe WRT contexts.  */
2660 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
2661 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2662 	      ctx.offset_by (dst->value.ancestor.offset);
2663 	      if (!ctx.useless_p ())
2664 		{
2665 		  if (!dst_ctx)
2666 		    {
2667 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
2668 					     count);
2669 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2670 		    }
2671 
2672 		  dst_ctx->combine_with (ctx);
2673 		}
2674 	    }
2675 
2676 	  if (src->agg.items
2677 	      && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2678 	    {
2679 	      struct ipa_agg_jf_item *item;
2680 	      int j;
2681 
2682 	      /* Currently we do not produce clobber aggregate jump functions,
2683 		 replace with merging when we do.  */
2684 	      gcc_assert (!dst->agg.items);
2685 
2686 	      dst->agg.items = vec_safe_copy (src->agg.items);
2687 	      dst->agg.by_ref = src->agg.by_ref;
2688 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2689 		item->offset -= dst->value.ancestor.offset;
2690 	    }
2691 
2692 	  if (src->type == IPA_JF_PASS_THROUGH
2693 	      && src->value.pass_through.operation == NOP_EXPR)
2694 	    {
2695 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2696 	      dst->value.ancestor.agg_preserved &=
2697 		src->value.pass_through.agg_preserved;
2698 	    }
2699 	  else if (src->type == IPA_JF_PASS_THROUGH
2700 		   && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2701 	    {
2702 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2703 	      dst->value.ancestor.agg_preserved = false;
2704 	    }
2705 	  else if (src->type == IPA_JF_ANCESTOR)
2706 	    {
2707 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2708 	      dst->value.ancestor.offset += src->value.ancestor.offset;
2709 	      dst->value.ancestor.agg_preserved &=
2710 		src->value.ancestor.agg_preserved;
2711 	    }
2712 	  else
2713 	    ipa_set_jf_unknown (dst);
2714 	}
2715       else if (dst->type == IPA_JF_PASS_THROUGH)
2716 	{
2717 	  struct ipa_jump_func *src;
2718 	  /* We must check range due to calls with variable number of arguments
2719 	     and we cannot combine jump functions with operations.  */
2720 	  if (dst->value.pass_through.operation == NOP_EXPR
2721 	      && (dst->value.pass_through.formal_id
2722 		  < ipa_get_cs_argument_count (top)))
2723 	    {
2724 	      int dst_fid = dst->value.pass_through.formal_id;
2725 	      src = ipa_get_ith_jump_func (top, dst_fid);
2726 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2727 	      struct ipa_polymorphic_call_context *src_ctx
2728 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
2729 
2730 	      if (src_ctx && !src_ctx->useless_p ())
2731 		{
2732 		  struct ipa_polymorphic_call_context ctx = *src_ctx;
2733 
2734 		  /* TODO: Make type preserved safe WRT contexts.  */
2735 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
2736 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2737 		  if (!ctx.useless_p ())
2738 		    {
2739 		      if (!dst_ctx)
2740 			{
2741 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
2742 					         count);
2743 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2744 			}
2745 		      dst_ctx->combine_with (ctx);
2746 		    }
2747 		}
2748 	      switch (src->type)
2749 		{
2750 		case IPA_JF_UNKNOWN:
2751 		  ipa_set_jf_unknown (dst);
2752 		  break;
2753 		case IPA_JF_CONST:
2754 		  ipa_set_jf_cst_copy (dst, src);
2755 		  break;
2756 
2757 		case IPA_JF_PASS_THROUGH:
2758 		  {
2759 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
2760 		    enum tree_code operation;
2761 		    operation = ipa_get_jf_pass_through_operation (src);
2762 
2763 		    if (operation == NOP_EXPR)
2764 		      {
2765 			bool agg_p;
2766 			agg_p = dst_agg_p
2767 			  && ipa_get_jf_pass_through_agg_preserved (src);
2768 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2769 		      }
2770 		    else if (TREE_CODE_CLASS (operation) == tcc_unary)
2771 		      ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2772 		    else
2773 		      {
2774 			tree operand = ipa_get_jf_pass_through_operand (src);
2775 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2776 						       operation);
2777 		      }
2778 		    break;
2779 		  }
2780 		case IPA_JF_ANCESTOR:
2781 		  {
2782 		    bool agg_p;
2783 		    agg_p = dst_agg_p
2784 		      && ipa_get_jf_ancestor_agg_preserved (src);
2785 		    ipa_set_ancestor_jf (dst,
2786 					 ipa_get_jf_ancestor_offset (src),
2787 					 ipa_get_jf_ancestor_formal_id (src),
2788 					 agg_p);
2789 		    break;
2790 		  }
2791 		default:
2792 		  gcc_unreachable ();
2793 		}
2794 
2795 	      if (src->agg.items
2796 		  && (dst_agg_p || !src->agg.by_ref))
2797 		{
2798 		  /* Currently we do not produce clobber aggregate jump
2799 		     functions, replace with merging when we do.  */
2800 		  gcc_assert (!dst->agg.items);
2801 
2802 		  dst->agg.by_ref = src->agg.by_ref;
2803 		  dst->agg.items = vec_safe_copy (src->agg.items);
2804 		}
2805 	    }
2806 	  else
2807 	    ipa_set_jf_unknown (dst);
2808 	}
2809     }
2810 }
2811 
2812 /* If TARGET is an addr_expr of a function declaration, make it the
2813    (SPECULATIVE)destination of an indirect edge IE and return the edge.
2814    Otherwise, return NULL.  */
2815 
2816 struct cgraph_edge *
2817 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2818 				bool speculative)
2819 {
2820   struct cgraph_node *callee;
2821   struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2822   bool unreachable = false;
2823 
2824   if (TREE_CODE (target) == ADDR_EXPR)
2825     target = TREE_OPERAND (target, 0);
2826   if (TREE_CODE (target) != FUNCTION_DECL)
2827     {
2828       target = canonicalize_constructor_val (target, NULL);
2829       if (!target || TREE_CODE (target) != FUNCTION_DECL)
2830 	{
2831 	  /* Member pointer call that goes through a VMT lookup.  */
2832 	  if (ie->indirect_info->member_ptr
2833 	      /* Or if target is not an invariant expression and we do not
2834 		 know if it will evaulate to function at runtime.
2835 		 This can happen when folding through &VAR, where &VAR
2836 		 is IP invariant, but VAR itself is not.
2837 
2838 		 TODO: Revisit this when GCC 5 is branched.  It seems that
2839 		 member_ptr check is not needed and that we may try to fold
2840 		 the expression and see if VAR is readonly.  */
2841 	      || !is_gimple_ip_invariant (target))
2842 	    {
2843 	      if (dump_enabled_p ())
2844 		{
2845 		  location_t loc = gimple_location_safe (ie->call_stmt);
2846 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2847 				   "discovered direct call non-invariant %s\n",
2848 				   ie->caller->dump_name ());
2849 		}
2850 	      return NULL;
2851 	    }
2852 
2853 
2854           if (dump_enabled_p ())
2855 	    {
2856 	      location_t loc = gimple_location_safe (ie->call_stmt);
2857 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2858 			       "discovered direct call to non-function in %s, "
2859 			       "making it __builtin_unreachable\n",
2860 			       ie->caller->dump_name ());
2861 	    }
2862 
2863 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2864 	  callee = cgraph_node::get_create (target);
2865 	  unreachable = true;
2866 	}
2867       else
2868 	callee = cgraph_node::get (target);
2869     }
2870   else
2871     callee = cgraph_node::get (target);
2872 
2873   /* Because may-edges are not explicitely represented and vtable may be external,
2874      we may create the first reference to the object in the unit.  */
2875   if (!callee || callee->global.inlined_to)
2876     {
2877 
2878       /* We are better to ensure we can refer to it.
2879 	 In the case of static functions we are out of luck, since we already
2880 	 removed its body.  In the case of public functions we may or may
2881 	 not introduce the reference.  */
2882       if (!canonicalize_constructor_val (target, NULL)
2883 	  || !TREE_PUBLIC (target))
2884 	{
2885 	  if (dump_file)
2886 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2887 		     "(%s -> %s) but can not refer to it. Giving up.\n",
2888 		     ie->caller->dump_name (),
2889 		     ie->callee->dump_name ());
2890 	  return NULL;
2891 	}
2892       callee = cgraph_node::get_create (target);
2893     }
2894 
2895   /* If the edge is already speculated.  */
2896   if (speculative && ie->speculative)
2897     {
2898       struct cgraph_edge *e2;
2899       struct ipa_ref *ref;
2900       ie->speculative_call_info (e2, ie, ref);
2901       if (e2->callee->ultimate_alias_target ()
2902 	  != callee->ultimate_alias_target ())
2903 	{
2904 	  if (dump_file)
2905 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2906 		     "target (%s -> %s) but the call is already "
2907 		     "speculated to %s. Giving up.\n",
2908 		     ie->caller->dump_name (), callee->dump_name (),
2909 		     e2->callee->dump_name ());
2910 	}
2911       else
2912 	{
2913 	  if (dump_file)
2914 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2915 		     "(%s -> %s) this agree with previous speculation.\n",
2916 		     ie->caller->dump_name (), callee->dump_name ());
2917 	}
2918       return NULL;
2919     }
2920 
2921   if (!dbg_cnt (devirt))
2922     return NULL;
2923 
2924   ipa_check_create_node_params ();
2925 
2926   /* We can not make edges to inline clones.  It is bug that someone removed
2927      the cgraph node too early.  */
2928   gcc_assert (!callee->global.inlined_to);
2929 
2930   if (dump_file && !unreachable)
2931     {
2932       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2933 	       "(%s -> %s), for stmt ",
2934 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2935 	       speculative ? "speculative" : "known",
2936 	       ie->caller->dump_name (),
2937 	       callee->dump_name ());
2938       if (ie->call_stmt)
2939 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2940       else
2941 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2942      }
2943   if (dump_enabled_p ())
2944     {
2945       location_t loc = gimple_location_safe (ie->call_stmt);
2946 
2947       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2948 		       "converting indirect call in %s to direct call to %s\n",
2949 		       ie->caller->name (), callee->name ());
2950     }
2951   if (!speculative)
2952     {
2953       struct cgraph_edge *orig = ie;
2954       ie = ie->make_direct (callee);
2955       /* If we resolved speculative edge the cost is already up to date
2956 	 for direct call (adjusted by inline_edge_duplication_hook).  */
2957       if (ie == orig)
2958 	{
2959 	  es = ipa_call_summaries->get (ie);
2960 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2961 				 - eni_size_weights.call_cost);
2962 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2963 				 - eni_time_weights.call_cost);
2964 	}
2965     }
2966   else
2967     {
2968       if (!callee->can_be_discarded_p ())
2969 	{
2970 	  cgraph_node *alias;
2971 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2972 	  if (alias)
2973 	    callee = alias;
2974 	}
2975       /* make_speculative will update ie's cost to direct call cost. */
2976       ie = ie->make_speculative
2977 	     (callee, ie->count.apply_scale (8, 10));
2978     }
2979 
2980   return ie;
2981 }
2982 
2983 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2984    CONSTRUCTOR and return it.  Return NULL if the search fails for some
2985    reason.  */
2986 
2987 static tree
2988 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2989 {
2990   tree type = TREE_TYPE (constructor);
2991   if (TREE_CODE (type) != ARRAY_TYPE
2992       && TREE_CODE (type) != RECORD_TYPE)
2993     return NULL;
2994 
2995   unsigned ix;
2996   tree index, val;
2997   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2998     {
2999       HOST_WIDE_INT elt_offset;
3000       if (TREE_CODE (type) == ARRAY_TYPE)
3001        {
3002          offset_int off;
3003          tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3004          gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3005 
3006          if (index)
3007            {
3008 	     if (TREE_CODE (index) == RANGE_EXPR)
3009 	       off = wi::to_offset (TREE_OPERAND (index, 0));
3010 	     else
3011 	       off = wi::to_offset (index);
3012              if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3013                {
3014                  tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3015                  gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3016                  off = wi::sext (off - wi::to_offset (low_bound),
3017                                  TYPE_PRECISION (TREE_TYPE (index)));
3018                }
3019              off *= wi::to_offset (unit_size);
3020 	     /* ???  Handle more than just the first index of a
3021 	        RANGE_EXPR.  */
3022            }
3023          else
3024            off = wi::to_offset (unit_size) * ix;
3025 
3026          off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3027          if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3028            continue;
3029          elt_offset = off.to_shwi ();
3030        }
3031       else if (TREE_CODE (type) == RECORD_TYPE)
3032        {
3033          gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3034          if (DECL_BIT_FIELD (index))
3035            continue;
3036          elt_offset = int_bit_position (index);
3037        }
3038       else
3039        gcc_unreachable ();
3040 
3041       if (elt_offset > req_offset)
3042 	return NULL;
3043 
3044       if (TREE_CODE (val) == CONSTRUCTOR)
3045 	return find_constructor_constant_at_offset (val,
3046 						    req_offset - elt_offset);
3047 
3048       if (elt_offset == req_offset
3049 	  && is_gimple_reg_type (TREE_TYPE (val))
3050 	  && is_gimple_ip_invariant (val))
3051 	return val;
3052     }
3053   return NULL;
3054 }
3055 
3056 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3057    invariant from a static constructor and if so, return it.  Otherwise return
3058    NULL. */
3059 
3060 static tree
3061 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3062 {
3063   if (by_ref)
3064     {
3065       if (TREE_CODE (scalar) != ADDR_EXPR)
3066 	return NULL;
3067       scalar = TREE_OPERAND (scalar, 0);
3068     }
3069 
3070   if (!VAR_P (scalar)
3071       || !is_global_var (scalar)
3072       || !TREE_READONLY (scalar)
3073       || !DECL_INITIAL (scalar)
3074       || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3075     return NULL;
3076 
3077   return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3078 }
3079 
3080 /* Retrieve value from aggregate jump function AGG or static initializer of
3081    SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3082    none.  BY_REF specifies whether the value has to be passed by reference or
3083    by value.  If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3084    to is set to true if the value comes from an initializer of a constant.  */
3085 
3086 tree
3087 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3088 			    HOST_WIDE_INT offset, bool by_ref,
3089 			    bool *from_global_constant)
3090 {
3091   struct ipa_agg_jf_item *item;
3092   int i;
3093 
3094   if (scalar)
3095     {
3096       tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3097       if (res)
3098 	{
3099 	  if (from_global_constant)
3100 	    *from_global_constant = true;
3101 	  return res;
3102 	}
3103     }
3104 
3105   if (!agg
3106       || by_ref != agg->by_ref)
3107     return NULL;
3108 
3109   FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3110     if (item->offset == offset)
3111       {
3112 	/* Currently we do not have clobber values, return NULL for them once
3113 	   we do.  */
3114 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
3115 	if (from_global_constant)
3116 	  *from_global_constant = false;
3117 	return item->value;
3118       }
3119   return NULL;
3120 }
3121 
3122 /* Remove a reference to SYMBOL from the list of references of a node given by
3123    reference description RDESC.  Return true if the reference has been
3124    successfully found and removed.  */
3125 
3126 static bool
3127 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3128 {
3129   struct ipa_ref *to_del;
3130   struct cgraph_edge *origin;
3131 
3132   origin = rdesc->cs;
3133   if (!origin)
3134     return false;
3135   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3136 					   origin->lto_stmt_uid);
3137   if (!to_del)
3138     return false;
3139 
3140   to_del->remove_reference ();
3141   if (dump_file)
3142     fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3143 	     origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3144   return true;
3145 }
3146 
3147 /* If JFUNC has a reference description with refcount different from
3148    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3149    NULL.  JFUNC must be a constant jump function.  */
3150 
3151 static struct ipa_cst_ref_desc *
3152 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3153 {
3154   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3155   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3156     return rdesc;
3157   else
3158     return NULL;
3159 }
3160 
3161 /* If the value of constant jump function JFUNC is an address of a function
3162    declaration, return the associated call graph node.  Otherwise return
3163    NULL.  */
3164 
3165 static cgraph_node *
3166 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3167 {
3168   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3169   tree cst = ipa_get_jf_constant (jfunc);
3170   if (TREE_CODE (cst) != ADDR_EXPR
3171       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3172     return NULL;
3173 
3174   return cgraph_node::get (TREE_OPERAND (cst, 0));
3175 }
3176 
3177 
3178 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3179    refcount and if it hits zero, remove reference to SYMBOL from the caller of
3180    the edge specified in the rdesc.  Return false if either the symbol or the
3181    reference could not be found, otherwise return true.  */
3182 
3183 static bool
3184 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3185 {
3186   struct ipa_cst_ref_desc *rdesc;
3187   if (jfunc->type == IPA_JF_CONST
3188       && (rdesc = jfunc_rdesc_usable (jfunc))
3189       && --rdesc->refcount == 0)
3190     {
3191       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3192       if (!symbol)
3193 	return false;
3194 
3195       return remove_described_reference (symbol, rdesc);
3196     }
3197   return true;
3198 }
3199 
3200 /* Try to find a destination for indirect edge IE that corresponds to a simple
3201    call or a call of a member function pointer and where the destination is a
3202    pointer formal parameter described by jump function JFUNC.  TARGET_TYPE is
3203    the type of the parameter to which the result of JFUNC is passed.  If it can
3204    be determined, return the newly direct edge, otherwise return NULL.
3205    NEW_ROOT_INFO is the node info that JFUNC lattices are relative to.  */
3206 
3207 static struct cgraph_edge *
3208 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3209 				  struct ipa_jump_func *jfunc, tree target_type,
3210 				  struct ipa_node_params *new_root_info)
3211 {
3212   struct cgraph_edge *cs;
3213   tree target;
3214   bool agg_contents = ie->indirect_info->agg_contents;
3215   tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3216   if (agg_contents)
3217     {
3218       bool from_global_constant;
3219       target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3220 					   ie->indirect_info->offset,
3221 					   ie->indirect_info->by_ref,
3222 					   &from_global_constant);
3223       if (target
3224 	  && !from_global_constant
3225 	  && !ie->indirect_info->guaranteed_unmodified)
3226 	return NULL;
3227     }
3228   else
3229     target = scalar;
3230   if (!target)
3231     return NULL;
3232   cs = ipa_make_edge_direct_to_target (ie, target);
3233 
3234   if (cs && !agg_contents)
3235     {
3236       bool ok;
3237       gcc_checking_assert (cs->callee
3238 			   && (cs != ie
3239 			       || jfunc->type != IPA_JF_CONST
3240 			       || !cgraph_node_for_jfunc (jfunc)
3241 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
3242       ok = try_decrement_rdesc_refcount (jfunc);
3243       gcc_checking_assert (ok);
3244     }
3245 
3246   return cs;
3247 }
3248 
3249 /* Return the target to be used in cases of impossible devirtualization.  IE
3250    and target (the latter can be NULL) are dumped when dumping is enabled.  */
3251 
3252 tree
3253 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3254 {
3255   if (dump_file)
3256     {
3257       if (target)
3258 	fprintf (dump_file,
3259 		 "Type inconsistent devirtualization: %s->%s\n",
3260 		 ie->caller->dump_name (),
3261 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3262       else
3263 	fprintf (dump_file,
3264 		 "No devirtualization target in %s\n",
3265 		 ie->caller->dump_name ());
3266     }
3267   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3268   cgraph_node::get_create (new_target);
3269   return new_target;
3270 }
3271 
3272 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3273    call based on a formal parameter which is described by jump function JFUNC
3274    and if it can be determined, make it direct and return the direct edge.
3275    Otherwise, return NULL.  CTX describes the polymorphic context that the
3276    parameter the call is based on brings along with it.  */
3277 
3278 static struct cgraph_edge *
3279 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3280 				   struct ipa_jump_func *jfunc,
3281 				   struct ipa_polymorphic_call_context ctx)
3282 {
3283   tree target = NULL;
3284   bool speculative = false;
3285 
3286   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3287     return NULL;
3288 
3289   gcc_assert (!ie->indirect_info->by_ref);
3290 
3291   /* Try to do lookup via known virtual table pointer value.  */
3292   if (!ie->indirect_info->vptr_changed
3293       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3294     {
3295       tree vtable;
3296       unsigned HOST_WIDE_INT offset;
3297       tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3298 	: NULL;
3299       tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3300 					   ie->indirect_info->offset,
3301 					   true);
3302       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3303 	{
3304 	  bool can_refer;
3305 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3306 						 vtable, offset, &can_refer);
3307 	  if (can_refer)
3308 	    {
3309 	      if (!t
3310 		  || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3311 		      && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3312 		  || !possible_polymorphic_call_target_p
3313 		       (ie, cgraph_node::get (t)))
3314 		{
3315 		  /* Do not speculate builtin_unreachable, it is stupid!  */
3316 		  if (!ie->indirect_info->vptr_changed)
3317 		    target = ipa_impossible_devirt_target (ie, target);
3318 		  else
3319 		    target = NULL;
3320 		}
3321 	      else
3322 		{
3323 		  target = t;
3324 		  speculative = ie->indirect_info->vptr_changed;
3325 		}
3326 	    }
3327 	}
3328     }
3329 
3330   ipa_polymorphic_call_context ie_context (ie);
3331   vec <cgraph_node *>targets;
3332   bool final;
3333 
3334   ctx.offset_by (ie->indirect_info->offset);
3335   if (ie->indirect_info->vptr_changed)
3336     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3337 				      ie->indirect_info->otr_type);
3338   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3339   targets = possible_polymorphic_call_targets
3340     (ie->indirect_info->otr_type,
3341      ie->indirect_info->otr_token,
3342      ctx, &final);
3343   if (final && targets.length () <= 1)
3344     {
3345       speculative = false;
3346       if (targets.length () == 1)
3347 	target = targets[0]->decl;
3348       else
3349 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
3350     }
3351   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3352 	   && !ie->speculative && ie->maybe_hot_p ())
3353     {
3354       cgraph_node *n;
3355       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3356 					    ie->indirect_info->otr_token,
3357 					    ie->indirect_info->context);
3358       if (n)
3359 	{
3360 	  target = n->decl;
3361 	  speculative = true;
3362 	}
3363     }
3364 
3365   if (target)
3366     {
3367       if (!possible_polymorphic_call_target_p
3368 	  (ie, cgraph_node::get_create (target)))
3369 	{
3370 	  if (speculative)
3371 	    return NULL;
3372 	  target = ipa_impossible_devirt_target (ie, target);
3373 	}
3374       return ipa_make_edge_direct_to_target (ie, target, speculative);
3375     }
3376   else
3377     return NULL;
3378 }
3379 
3380 /* Update the param called notes associated with NODE when CS is being inlined,
3381    assuming NODE is (potentially indirectly) inlined into CS->callee.
3382    Moreover, if the callee is discovered to be constant, create a new cgraph
3383    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3384    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3385 
3386 static bool
3387 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3388 				      struct cgraph_node *node,
3389 				      vec<cgraph_edge *> *new_edges)
3390 {
3391   struct ipa_edge_args *top;
3392   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3393   struct ipa_node_params *new_root_info, *inlined_node_info;
3394   bool res = false;
3395 
3396   ipa_check_create_edge_args ();
3397   top = IPA_EDGE_REF (cs);
3398   new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3399 				? cs->caller->global.inlined_to
3400 				: cs->caller);
3401   inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3402 
3403   for (ie = node->indirect_calls; ie; ie = next_ie)
3404     {
3405       struct cgraph_indirect_call_info *ici = ie->indirect_info;
3406       struct ipa_jump_func *jfunc;
3407       int param_index;
3408       cgraph_node *spec_target = NULL;
3409 
3410       next_ie = ie->next_callee;
3411 
3412       if (ici->param_index == -1)
3413 	continue;
3414 
3415       /* We must check range due to calls with variable number of arguments:  */
3416       if (ici->param_index >= ipa_get_cs_argument_count (top))
3417 	{
3418 	  ici->param_index = -1;
3419 	  continue;
3420 	}
3421 
3422       param_index = ici->param_index;
3423       jfunc = ipa_get_ith_jump_func (top, param_index);
3424 
3425       if (ie->speculative)
3426 	{
3427 	  struct cgraph_edge *de;
3428           struct ipa_ref *ref;
3429 	  ie->speculative_call_info (de, ie, ref);
3430 	  spec_target = de->callee;
3431 	}
3432 
3433       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3434 	new_direct_edge = NULL;
3435       else if (ici->polymorphic)
3436 	{
3437           ipa_polymorphic_call_context ctx;
3438 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3439 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3440 	}
3441       else
3442 	{
3443 	  tree target_type =  ipa_get_type (inlined_node_info, param_index);
3444 	  new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3445 							      target_type,
3446 							      new_root_info);
3447 	}
3448 
3449       /* If speculation was removed, then we need to do nothing.  */
3450       if (new_direct_edge && new_direct_edge != ie
3451 	  && new_direct_edge->callee == spec_target)
3452 	{
3453 	  new_direct_edge->indirect_inlining_edge = 1;
3454 	  top = IPA_EDGE_REF (cs);
3455 	  res = true;
3456 	  if (!new_direct_edge->speculative)
3457 	    continue;
3458 	}
3459       else if (new_direct_edge)
3460 	{
3461 	  new_direct_edge->indirect_inlining_edge = 1;
3462 	  if (new_direct_edge->call_stmt)
3463 	    new_direct_edge->call_stmt_cannot_inline_p
3464 	      = !gimple_check_call_matching_types (
3465 		  new_direct_edge->call_stmt,
3466 		  new_direct_edge->callee->decl, false);
3467 	  if (new_edges)
3468 	    {
3469 	      new_edges->safe_push (new_direct_edge);
3470 	      res = true;
3471 	    }
3472 	  top = IPA_EDGE_REF (cs);
3473 	  /* If speculative edge was introduced we still need to update
3474 	     call info of the indirect edge.  */
3475 	  if (!new_direct_edge->speculative)
3476 	    continue;
3477 	}
3478       if (jfunc->type == IPA_JF_PASS_THROUGH
3479           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3480 	{
3481 	  if (ici->agg_contents
3482 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3483 	      && !ici->polymorphic)
3484 	    ici->param_index = -1;
3485 	  else
3486 	    {
3487 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3488 	      if (ici->polymorphic
3489 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3490 		ici->vptr_changed = true;
3491 	    }
3492 	}
3493       else if (jfunc->type == IPA_JF_ANCESTOR)
3494 	{
3495 	  if (ici->agg_contents
3496 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3497 	      && !ici->polymorphic)
3498 	    ici->param_index = -1;
3499 	  else
3500 	    {
3501 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3502 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3503 	      if (ici->polymorphic
3504 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3505 		ici->vptr_changed = true;
3506 	    }
3507 	}
3508       else
3509 	/* Either we can find a destination for this edge now or never. */
3510 	ici->param_index = -1;
3511     }
3512 
3513   return res;
3514 }
3515 
3516 /* Recursively traverse subtree of NODE (including node) made of inlined
3517    cgraph_edges when CS has been inlined and invoke
3518    update_indirect_edges_after_inlining on all nodes and
3519    update_jump_functions_after_inlining on all non-inlined edges that lead out
3520    of this subtree.  Newly discovered indirect edges will be added to
3521    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3522    created.  */
3523 
3524 static bool
3525 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3526 				   struct cgraph_node *node,
3527 				   vec<cgraph_edge *> *new_edges)
3528 {
3529   struct cgraph_edge *e;
3530   bool res;
3531 
3532   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3533 
3534   for (e = node->callees; e; e = e->next_callee)
3535     if (!e->inline_failed)
3536       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3537     else
3538       update_jump_functions_after_inlining (cs, e);
3539   for (e = node->indirect_calls; e; e = e->next_callee)
3540     update_jump_functions_after_inlining (cs, e);
3541 
3542   return res;
3543 }
3544 
3545 /* Combine two controlled uses counts as done during inlining.  */
3546 
3547 static int
3548 combine_controlled_uses_counters (int c, int d)
3549 {
3550   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3551     return IPA_UNDESCRIBED_USE;
3552   else
3553     return c + d - 1;
3554 }
3555 
3556 /* Propagate number of controlled users from CS->caleee to the new root of the
3557    tree of inlined nodes.  */
3558 
3559 static void
3560 propagate_controlled_uses (struct cgraph_edge *cs)
3561 {
3562   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3563   struct cgraph_node *new_root = cs->caller->global.inlined_to
3564     ? cs->caller->global.inlined_to : cs->caller;
3565   struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3566   struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3567   int count, i;
3568 
3569   count = MIN (ipa_get_cs_argument_count (args),
3570 	       ipa_get_param_count (old_root_info));
3571   for (i = 0; i < count; i++)
3572     {
3573       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3574       struct ipa_cst_ref_desc *rdesc;
3575 
3576       if (jf->type == IPA_JF_PASS_THROUGH)
3577 	{
3578 	  int src_idx, c, d;
3579 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3580 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3581 	  d = ipa_get_controlled_uses (old_root_info, i);
3582 
3583 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3584 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3585 	  c = combine_controlled_uses_counters (c, d);
3586 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3587 	  if (c == 0 && new_root_info->ipcp_orig_node)
3588 	    {
3589 	      struct cgraph_node *n;
3590 	      struct ipa_ref *ref;
3591 	      tree t = new_root_info->known_csts[src_idx];
3592 
3593 	      if (t && TREE_CODE (t) == ADDR_EXPR
3594 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3595 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3596 		  && (ref = new_root->find_reference (n, NULL, 0)))
3597 		{
3598 		  if (dump_file)
3599 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3600 			     "reference from %s to %s.\n",
3601 			     new_root->dump_name (),
3602 			     n->dump_name ());
3603 		  ref->remove_reference ();
3604 		}
3605 	    }
3606 	}
3607       else if (jf->type == IPA_JF_CONST
3608 	       && (rdesc = jfunc_rdesc_usable (jf)))
3609 	{
3610 	  int d = ipa_get_controlled_uses (old_root_info, i);
3611 	  int c = rdesc->refcount;
3612 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3613 	  if (rdesc->refcount == 0)
3614 	    {
3615 	      tree cst = ipa_get_jf_constant (jf);
3616 	      struct cgraph_node *n;
3617 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3618 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3619 				   == FUNCTION_DECL);
3620 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
3621 	      if (n)
3622 		{
3623 		  struct cgraph_node *clone;
3624 		  bool ok;
3625 		  ok = remove_described_reference (n, rdesc);
3626 		  gcc_checking_assert (ok);
3627 
3628 		  clone = cs->caller;
3629 		  while (clone->global.inlined_to
3630 			 && clone != rdesc->cs->caller
3631 			 && IPA_NODE_REF (clone)->ipcp_orig_node)
3632 		    {
3633 		      struct ipa_ref *ref;
3634 		      ref = clone->find_reference (n, NULL, 0);
3635 		      if (ref)
3636 			{
3637 			  if (dump_file)
3638 			    fprintf (dump_file, "ipa-prop: Removing "
3639 				     "cloning-created reference "
3640 				     "from %s to %s.\n",
3641 				     clone->dump_name (),
3642 				     n->dump_name ());
3643 			  ref->remove_reference ();
3644 			}
3645 		      clone = clone->callers->caller;
3646 		    }
3647 		}
3648 	    }
3649 	}
3650     }
3651 
3652   for (i = ipa_get_param_count (old_root_info);
3653        i < ipa_get_cs_argument_count (args);
3654        i++)
3655     {
3656       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3657 
3658       if (jf->type == IPA_JF_CONST)
3659 	{
3660 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3661 	  if (rdesc)
3662 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
3663 	}
3664       else if (jf->type == IPA_JF_PASS_THROUGH)
3665 	ipa_set_controlled_uses (new_root_info,
3666 				 jf->value.pass_through.formal_id,
3667 				 IPA_UNDESCRIBED_USE);
3668     }
3669 }
3670 
3671 /* Update jump functions and call note functions on inlining the call site CS.
3672    CS is expected to lead to a node already cloned by
3673    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
3674    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
3675    created.  */
3676 
3677 bool
3678 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3679 				   vec<cgraph_edge *> *new_edges)
3680 {
3681   bool changed;
3682   /* Do nothing if the preparation phase has not been carried out yet
3683      (i.e. during early inlining).  */
3684   if (!ipa_node_params_sum)
3685     return false;
3686   gcc_assert (ipa_edge_args_sum);
3687 
3688   propagate_controlled_uses (cs);
3689   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3690 
3691   return changed;
3692 }
3693 
3694 /* Ensure that array of edge arguments infos is big enough to accommodate a
3695    structure for all edges and reallocates it if not.  Also, allocate
3696    associated hash tables is they do not already exist.  */
3697 
3698 void
3699 ipa_check_create_edge_args (void)
3700 {
3701   if (!ipa_edge_args_sum)
3702     ipa_edge_args_sum
3703       = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3704 	 ipa_edge_args_sum_t (symtab, true));
3705   if (!ipa_bits_hash_table)
3706     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3707   if (!ipa_vr_hash_table)
3708     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3709 }
3710 
3711 /* Frees all dynamically allocated structures that the argument info points
3712    to.  */
3713 
3714 void
3715 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3716 {
3717   vec_free (args->jump_functions);
3718   *args = ipa_edge_args ();
3719 }
3720 
3721 /* Free all ipa_edge structures.  */
3722 
3723 void
3724 ipa_free_all_edge_args (void)
3725 {
3726   if (!ipa_edge_args_sum)
3727     return;
3728 
3729   ipa_edge_args_sum->release ();
3730   ipa_edge_args_sum = NULL;
3731 }
3732 
3733 /* Free all ipa_node_params structures.  */
3734 
3735 void
3736 ipa_free_all_node_params (void)
3737 {
3738   ipa_node_params_sum->release ();
3739   ipa_node_params_sum = NULL;
3740 }
3741 
3742 /* Grow ipcp_transformations if necessary.  Also allocate any necessary hash
3743    tables if they do not already exist.  */
3744 
3745 void
3746 ipcp_grow_transformations_if_necessary (void)
3747 {
3748   if (vec_safe_length (ipcp_transformations)
3749       <= (unsigned) symtab->cgraph_max_uid)
3750     vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3751   if (!ipa_bits_hash_table)
3752     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3753   if (!ipa_vr_hash_table)
3754     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3755 }
3756 
3757 /* Set the aggregate replacements of NODE to be AGGVALS.  */
3758 
3759 void
3760 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3761 			      struct ipa_agg_replacement_value *aggvals)
3762 {
3763   ipcp_grow_transformations_if_necessary ();
3764   (*ipcp_transformations)[node->uid].agg_values = aggvals;
3765 }
3766 
3767 /* Hook that is called by cgraph.c when an edge is removed.  Adjust reference
3768    count data structures accordingly.  */
3769 
3770 void
3771 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3772 {
3773   if (args->jump_functions)
3774     {
3775       struct ipa_jump_func *jf;
3776       int i;
3777       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3778 	{
3779 	  struct ipa_cst_ref_desc *rdesc;
3780 	  try_decrement_rdesc_refcount (jf);
3781 	  if (jf->type == IPA_JF_CONST
3782 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
3783 	      && rdesc->cs == cs)
3784 	    rdesc->cs = NULL;
3785 	}
3786     }
3787 }
3788 
3789 /* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
3790    reference count data strucutres accordingly.  */
3791 
3792 void
3793 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3794 				ipa_edge_args *old_args, ipa_edge_args *new_args)
3795 {
3796   unsigned int i;
3797 
3798   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3799   if (old_args->polymorphic_call_contexts)
3800     new_args->polymorphic_call_contexts
3801       = vec_safe_copy (old_args->polymorphic_call_contexts);
3802 
3803   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3804     {
3805       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3806       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3807 
3808       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3809 
3810       if (src_jf->type == IPA_JF_CONST)
3811 	{
3812 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3813 
3814 	  if (!src_rdesc)
3815 	    dst_jf->value.constant.rdesc = NULL;
3816 	  else if (src->caller == dst->caller)
3817 	    {
3818 	      struct ipa_ref *ref;
3819 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
3820 	      gcc_checking_assert (n);
3821 	      ref = src->caller->find_reference (n, src->call_stmt,
3822 						 src->lto_stmt_uid);
3823 	      gcc_checking_assert (ref);
3824 	      dst->caller->clone_reference (ref, ref->stmt);
3825 
3826 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3827 	      dst_rdesc->cs = dst;
3828 	      dst_rdesc->refcount = src_rdesc->refcount;
3829 	      dst_rdesc->next_duplicate = NULL;
3830 	      dst_jf->value.constant.rdesc = dst_rdesc;
3831 	    }
3832 	  else if (src_rdesc->cs == src)
3833 	    {
3834 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3835 	      dst_rdesc->cs = dst;
3836 	      dst_rdesc->refcount = src_rdesc->refcount;
3837 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3838 	      src_rdesc->next_duplicate = dst_rdesc;
3839 	      dst_jf->value.constant.rdesc = dst_rdesc;
3840 	    }
3841 	  else
3842 	    {
3843 	      struct ipa_cst_ref_desc *dst_rdesc;
3844 	      /* This can happen during inlining, when a JFUNC can refer to a
3845 		 reference taken in a function up in the tree of inline clones.
3846 		 We need to find the duplicate that refers to our tree of
3847 		 inline clones.  */
3848 
3849 	      gcc_assert (dst->caller->global.inlined_to);
3850 	      for (dst_rdesc = src_rdesc->next_duplicate;
3851 		   dst_rdesc;
3852 		   dst_rdesc = dst_rdesc->next_duplicate)
3853 		{
3854 		  struct cgraph_node *top;
3855 		  top = dst_rdesc->cs->caller->global.inlined_to
3856 		    ? dst_rdesc->cs->caller->global.inlined_to
3857 		    : dst_rdesc->cs->caller;
3858 		  if (dst->caller->global.inlined_to == top)
3859 		    break;
3860 		}
3861 	      gcc_assert (dst_rdesc);
3862 	      dst_jf->value.constant.rdesc = dst_rdesc;
3863 	    }
3864 	}
3865       else if (dst_jf->type == IPA_JF_PASS_THROUGH
3866 	       && src->caller == dst->caller)
3867 	{
3868 	  struct cgraph_node *inline_root = dst->caller->global.inlined_to
3869 	    ? dst->caller->global.inlined_to : dst->caller;
3870 	  struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3871 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3872 
3873 	  int c = ipa_get_controlled_uses (root_info, idx);
3874 	  if (c != IPA_UNDESCRIBED_USE)
3875 	    {
3876 	      c++;
3877 	      ipa_set_controlled_uses (root_info, idx, c);
3878 	    }
3879 	}
3880     }
3881 }
3882 
3883 /* Analyze newly added function into callgraph.  */
3884 
3885 static void
3886 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3887 {
3888   if (node->has_gimple_body_p ())
3889     ipa_analyze_node (node);
3890 }
3891 
3892 /* Hook that is called by summary when a node is duplicated.  */
3893 
3894 void
3895 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3896 			     ipa_node_params *old_info,
3897 			     ipa_node_params *new_info)
3898 {
3899   ipa_agg_replacement_value *old_av, *new_av;
3900 
3901   new_info->descriptors = vec_safe_copy (old_info->descriptors);
3902   new_info->lattices = NULL;
3903   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3904   new_info->known_csts = old_info->known_csts.copy ();
3905   new_info->known_contexts = old_info->known_contexts.copy ();
3906 
3907   new_info->analysis_done = old_info->analysis_done;
3908   new_info->node_enqueued = old_info->node_enqueued;
3909   new_info->versionable = old_info->versionable;
3910 
3911   old_av = ipa_get_agg_replacements_for_node (src);
3912   if (old_av)
3913     {
3914       new_av = NULL;
3915       while (old_av)
3916 	{
3917 	  struct ipa_agg_replacement_value *v;
3918 
3919 	  v = ggc_alloc<ipa_agg_replacement_value> ();
3920 	  memcpy (v, old_av, sizeof (*v));
3921 	  v->next = new_av;
3922 	  new_av = v;
3923 	  old_av = old_av->next;
3924 	}
3925       ipa_set_node_agg_value_chain (dst, new_av);
3926     }
3927 
3928   ipcp_transformation_summary *src_trans
3929     = ipcp_get_transformation_summary (src);
3930 
3931   if (src_trans)
3932     {
3933       ipcp_grow_transformations_if_necessary ();
3934       src_trans = ipcp_get_transformation_summary (src);
3935       ipcp_transformation_summary *dst_trans
3936 	= ipcp_get_transformation_summary (dst);
3937 
3938       dst_trans->bits = vec_safe_copy (src_trans->bits);
3939 
3940       const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3941       vec<ipa_vr, va_gc> *&dst_vr
3942 	= ipcp_get_transformation_summary (dst)->m_vr;
3943       if (vec_safe_length (src_trans->m_vr) > 0)
3944 	{
3945 	  vec_safe_reserve_exact (dst_vr, src_vr->length ());
3946 	  for (unsigned i = 0; i < src_vr->length (); ++i)
3947 	    dst_vr->quick_push ((*src_vr)[i]);
3948 	}
3949     }
3950 }
3951 
3952 /* Register our cgraph hooks if they are not already there.  */
3953 
3954 void
3955 ipa_register_cgraph_hooks (void)
3956 {
3957   ipa_check_create_node_params ();
3958   ipa_check_create_edge_args ();
3959 
3960   function_insertion_hook_holder =
3961       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3962 }
3963 
3964 /* Unregister our cgraph hooks if they are not already there.  */
3965 
3966 static void
3967 ipa_unregister_cgraph_hooks (void)
3968 {
3969   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3970   function_insertion_hook_holder = NULL;
3971 }
3972 
3973 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3974    longer needed after ipa-cp.  */
3975 
3976 void
3977 ipa_free_all_structures_after_ipa_cp (void)
3978 {
3979   if (!optimize && !in_lto_p)
3980     {
3981       ipa_free_all_edge_args ();
3982       ipa_free_all_node_params ();
3983       ipcp_sources_pool.release ();
3984       ipcp_cst_values_pool.release ();
3985       ipcp_poly_ctx_values_pool.release ();
3986       ipcp_agg_lattice_pool.release ();
3987       ipa_unregister_cgraph_hooks ();
3988       ipa_refdesc_pool.release ();
3989     }
3990 }
3991 
3992 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3993    longer needed after indirect inlining.  */
3994 
3995 void
3996 ipa_free_all_structures_after_iinln (void)
3997 {
3998   ipa_free_all_edge_args ();
3999   ipa_free_all_node_params ();
4000   ipa_unregister_cgraph_hooks ();
4001   ipcp_sources_pool.release ();
4002   ipcp_cst_values_pool.release ();
4003   ipcp_poly_ctx_values_pool.release ();
4004   ipcp_agg_lattice_pool.release ();
4005   ipa_refdesc_pool.release ();
4006 }
4007 
4008 /* Print ipa_tree_map data structures of all functions in the
4009    callgraph to F.  */
4010 
4011 void
4012 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4013 {
4014   int i, count;
4015   struct ipa_node_params *info;
4016 
4017   if (!node->definition)
4018     return;
4019   info = IPA_NODE_REF (node);
4020   fprintf (f, "  function  %s parameter descriptors:\n", node->dump_name ());
4021   count = ipa_get_param_count (info);
4022   for (i = 0; i < count; i++)
4023     {
4024       int c;
4025 
4026       fprintf (f, "    ");
4027       ipa_dump_param (f, info, i);
4028       if (ipa_is_param_used (info, i))
4029 	fprintf (f, " used");
4030       c = ipa_get_controlled_uses (info, i);
4031       if (c == IPA_UNDESCRIBED_USE)
4032 	fprintf (f, " undescribed_use");
4033       else
4034 	fprintf (f, "  controlled_uses=%i", c);
4035       fprintf (f, "\n");
4036     }
4037 }
4038 
4039 /* Print ipa_tree_map data structures of all functions in the
4040    callgraph to F.  */
4041 
4042 void
4043 ipa_print_all_params (FILE * f)
4044 {
4045   struct cgraph_node *node;
4046 
4047   fprintf (f, "\nFunction parameters:\n");
4048   FOR_EACH_FUNCTION (node)
4049     ipa_print_node_params (f, node);
4050 }
4051 
4052 /* Dump the AV linked list.  */
4053 
4054 void
4055 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4056 {
4057   bool comma = false;
4058   fprintf (f, "     Aggregate replacements:");
4059   for (; av; av = av->next)
4060     {
4061       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4062 	       av->index, av->offset);
4063       print_generic_expr (f, av->value);
4064       comma = true;
4065     }
4066   fprintf (f, "\n");
4067 }
4068 
4069 /* Stream out jump function JUMP_FUNC to OB.  */
4070 
4071 static void
4072 ipa_write_jump_function (struct output_block *ob,
4073 			 struct ipa_jump_func *jump_func)
4074 {
4075   struct ipa_agg_jf_item *item;
4076   struct bitpack_d bp;
4077   int i, count;
4078 
4079   streamer_write_uhwi (ob, jump_func->type);
4080   switch (jump_func->type)
4081     {
4082     case IPA_JF_UNKNOWN:
4083       break;
4084     case IPA_JF_CONST:
4085       gcc_assert (
4086 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4087       stream_write_tree (ob, jump_func->value.constant.value, true);
4088       break;
4089     case IPA_JF_PASS_THROUGH:
4090       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4091       if (jump_func->value.pass_through.operation == NOP_EXPR)
4092 	{
4093 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4094 	  bp = bitpack_create (ob->main_stream);
4095 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4096 	  streamer_write_bitpack (&bp);
4097 	}
4098       else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4099 	       == tcc_unary)
4100 	streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4101       else
4102 	{
4103 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4104 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4105 	}
4106       break;
4107     case IPA_JF_ANCESTOR:
4108       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4109       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4110       bp = bitpack_create (ob->main_stream);
4111       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4112       streamer_write_bitpack (&bp);
4113       break;
4114     }
4115 
4116   count = vec_safe_length (jump_func->agg.items);
4117   streamer_write_uhwi (ob, count);
4118   if (count)
4119     {
4120       bp = bitpack_create (ob->main_stream);
4121       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4122       streamer_write_bitpack (&bp);
4123     }
4124 
4125   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4126     {
4127       streamer_write_uhwi (ob, item->offset);
4128       stream_write_tree (ob, item->value, true);
4129     }
4130 
4131   bp = bitpack_create (ob->main_stream);
4132   bp_pack_value (&bp, !!jump_func->bits, 1);
4133   streamer_write_bitpack (&bp);
4134   if (jump_func->bits)
4135     {
4136       streamer_write_widest_int (ob, jump_func->bits->value);
4137       streamer_write_widest_int (ob, jump_func->bits->mask);
4138     }
4139   bp_pack_value (&bp, !!jump_func->m_vr, 1);
4140   streamer_write_bitpack (&bp);
4141   if (jump_func->m_vr)
4142     {
4143       streamer_write_enum (ob->main_stream, value_rang_type,
4144 			   VR_LAST, jump_func->m_vr->type);
4145       stream_write_tree (ob, jump_func->m_vr->min, true);
4146       stream_write_tree (ob, jump_func->m_vr->max, true);
4147     }
4148 }
4149 
4150 /* Read in jump function JUMP_FUNC from IB.  */
4151 
4152 static void
4153 ipa_read_jump_function (struct lto_input_block *ib,
4154 			struct ipa_jump_func *jump_func,
4155 			struct cgraph_edge *cs,
4156 			struct data_in *data_in)
4157 {
4158   enum jump_func_type jftype;
4159   enum tree_code operation;
4160   int i, count;
4161 
4162   jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4163   switch (jftype)
4164     {
4165     case IPA_JF_UNKNOWN:
4166       ipa_set_jf_unknown (jump_func);
4167       break;
4168     case IPA_JF_CONST:
4169       ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4170       break;
4171     case IPA_JF_PASS_THROUGH:
4172       operation = (enum tree_code) streamer_read_uhwi (ib);
4173       if (operation == NOP_EXPR)
4174 	{
4175 	  int formal_id =  streamer_read_uhwi (ib);
4176 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4177 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4178 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4179 	}
4180       else if (TREE_CODE_CLASS (operation) == tcc_unary)
4181 	{
4182 	  int formal_id =  streamer_read_uhwi (ib);
4183 	  ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4184 	}
4185       else
4186 	{
4187 	  tree operand = stream_read_tree (ib, data_in);
4188 	  int formal_id =  streamer_read_uhwi (ib);
4189 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4190 					 operation);
4191 	}
4192       break;
4193     case IPA_JF_ANCESTOR:
4194       {
4195 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4196 	int formal_id = streamer_read_uhwi (ib);
4197 	struct bitpack_d bp = streamer_read_bitpack (ib);
4198 	bool agg_preserved = bp_unpack_value (&bp, 1);
4199 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4200 	break;
4201       }
4202     }
4203 
4204   count = streamer_read_uhwi (ib);
4205   vec_alloc (jump_func->agg.items, count);
4206   if (count)
4207     {
4208       struct bitpack_d bp = streamer_read_bitpack (ib);
4209       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4210     }
4211   for (i = 0; i < count; i++)
4212     {
4213       struct ipa_agg_jf_item item;
4214       item.offset = streamer_read_uhwi (ib);
4215       item.value = stream_read_tree (ib, data_in);
4216       jump_func->agg.items->quick_push (item);
4217     }
4218 
4219   struct bitpack_d bp = streamer_read_bitpack (ib);
4220   bool bits_known = bp_unpack_value (&bp, 1);
4221   if (bits_known)
4222     {
4223       widest_int value = streamer_read_widest_int (ib);
4224       widest_int mask = streamer_read_widest_int (ib);
4225       ipa_set_jfunc_bits (jump_func, value, mask);
4226     }
4227   else
4228     jump_func->bits = NULL;
4229 
4230   struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4231   bool vr_known = bp_unpack_value (&vr_bp, 1);
4232   if (vr_known)
4233     {
4234       enum value_range_type type = streamer_read_enum (ib, value_range_type,
4235 						       VR_LAST);
4236       tree min = stream_read_tree (ib, data_in);
4237       tree max = stream_read_tree (ib, data_in);
4238       ipa_set_jfunc_vr (jump_func, type, min, max);
4239     }
4240   else
4241     jump_func->m_vr = NULL;
4242 }
4243 
4244 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4245    relevant to indirect inlining to OB.  */
4246 
4247 static void
4248 ipa_write_indirect_edge_info (struct output_block *ob,
4249 			      struct cgraph_edge *cs)
4250 {
4251   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4252   struct bitpack_d bp;
4253 
4254   streamer_write_hwi (ob, ii->param_index);
4255   bp = bitpack_create (ob->main_stream);
4256   bp_pack_value (&bp, ii->polymorphic, 1);
4257   bp_pack_value (&bp, ii->agg_contents, 1);
4258   bp_pack_value (&bp, ii->member_ptr, 1);
4259   bp_pack_value (&bp, ii->by_ref, 1);
4260   bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4261   bp_pack_value (&bp, ii->vptr_changed, 1);
4262   streamer_write_bitpack (&bp);
4263   if (ii->agg_contents || ii->polymorphic)
4264     streamer_write_hwi (ob, ii->offset);
4265   else
4266     gcc_assert (ii->offset == 0);
4267 
4268   if (ii->polymorphic)
4269     {
4270       streamer_write_hwi (ob, ii->otr_token);
4271       stream_write_tree (ob, ii->otr_type, true);
4272       ii->context.stream_out (ob);
4273     }
4274 }
4275 
4276 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4277    relevant to indirect inlining from IB.  */
4278 
4279 static void
4280 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4281 			     struct data_in *data_in,
4282 			     struct cgraph_edge *cs)
4283 {
4284   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4285   struct bitpack_d bp;
4286 
4287   ii->param_index = (int) streamer_read_hwi (ib);
4288   bp = streamer_read_bitpack (ib);
4289   ii->polymorphic = bp_unpack_value (&bp, 1);
4290   ii->agg_contents = bp_unpack_value (&bp, 1);
4291   ii->member_ptr = bp_unpack_value (&bp, 1);
4292   ii->by_ref = bp_unpack_value (&bp, 1);
4293   ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4294   ii->vptr_changed = bp_unpack_value (&bp, 1);
4295   if (ii->agg_contents || ii->polymorphic)
4296     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4297   else
4298     ii->offset = 0;
4299   if (ii->polymorphic)
4300     {
4301       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4302       ii->otr_type = stream_read_tree (ib, data_in);
4303       ii->context.stream_in (ib, data_in);
4304     }
4305 }
4306 
4307 /* Stream out NODE info to OB.  */
4308 
4309 static void
4310 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4311 {
4312   int node_ref;
4313   lto_symtab_encoder_t encoder;
4314   struct ipa_node_params *info = IPA_NODE_REF (node);
4315   int j;
4316   struct cgraph_edge *e;
4317   struct bitpack_d bp;
4318 
4319   encoder = ob->decl_state->symtab_node_encoder;
4320   node_ref = lto_symtab_encoder_encode (encoder, node);
4321   streamer_write_uhwi (ob, node_ref);
4322 
4323   streamer_write_uhwi (ob, ipa_get_param_count (info));
4324   for (j = 0; j < ipa_get_param_count (info); j++)
4325     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4326   bp = bitpack_create (ob->main_stream);
4327   gcc_assert (info->analysis_done
4328 	      || ipa_get_param_count (info) == 0);
4329   gcc_assert (!info->node_enqueued);
4330   gcc_assert (!info->ipcp_orig_node);
4331   for (j = 0; j < ipa_get_param_count (info); j++)
4332     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4333   streamer_write_bitpack (&bp);
4334   for (j = 0; j < ipa_get_param_count (info); j++)
4335     {
4336       streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4337       stream_write_tree (ob, ipa_get_type (info, j), true);
4338     }
4339   for (e = node->callees; e; e = e->next_callee)
4340     {
4341       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4342 
4343       streamer_write_uhwi (ob,
4344 			   ipa_get_cs_argument_count (args) * 2
4345 			   + (args->polymorphic_call_contexts != NULL));
4346       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4347 	{
4348 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4349 	  if (args->polymorphic_call_contexts != NULL)
4350 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4351 	}
4352     }
4353   for (e = node->indirect_calls; e; e = e->next_callee)
4354     {
4355       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4356 
4357       streamer_write_uhwi (ob,
4358 			   ipa_get_cs_argument_count (args) * 2
4359   			   + (args->polymorphic_call_contexts != NULL));
4360       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4361 	{
4362 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4363 	  if (args->polymorphic_call_contexts != NULL)
4364 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4365 	}
4366       ipa_write_indirect_edge_info (ob, e);
4367     }
4368 }
4369 
4370 /* Stream in NODE info from IB.  */
4371 
4372 static void
4373 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4374 		    struct data_in *data_in)
4375 {
4376   struct ipa_node_params *info = IPA_NODE_REF (node);
4377   int k;
4378   struct cgraph_edge *e;
4379   struct bitpack_d bp;
4380 
4381   ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4382 
4383   for (k = 0; k < ipa_get_param_count (info); k++)
4384     (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4385 
4386   bp = streamer_read_bitpack (ib);
4387   if (ipa_get_param_count (info) != 0)
4388     info->analysis_done = true;
4389   info->node_enqueued = false;
4390   for (k = 0; k < ipa_get_param_count (info); k++)
4391     ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4392   for (k = 0; k < ipa_get_param_count (info); k++)
4393     {
4394       ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4395       (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4396     }
4397   for (e = node->callees; e; e = e->next_callee)
4398     {
4399       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4400       int count = streamer_read_uhwi (ib);
4401       bool contexts_computed = count & 1;
4402       count /= 2;
4403 
4404       if (!count)
4405 	continue;
4406       vec_safe_grow_cleared (args->jump_functions, count);
4407       if (contexts_computed)
4408 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4409 
4410       for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4411 	{
4412 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4413 				  data_in);
4414 	  if (contexts_computed)
4415 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4416 	}
4417     }
4418   for (e = node->indirect_calls; e; e = e->next_callee)
4419     {
4420       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4421       int count = streamer_read_uhwi (ib);
4422       bool contexts_computed = count & 1;
4423       count /= 2;
4424 
4425       if (count)
4426 	{
4427 	  vec_safe_grow_cleared (args->jump_functions, count);
4428 	  if (contexts_computed)
4429 	    vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4430           for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4431 	    {
4432 	      ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4433 				      data_in);
4434 	      if (contexts_computed)
4435 		ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4436 	    }
4437 	}
4438       ipa_read_indirect_edge_info (ib, data_in, e);
4439     }
4440 }
4441 
4442 /* Write jump functions for nodes in SET.  */
4443 
4444 void
4445 ipa_prop_write_jump_functions (void)
4446 {
4447   struct cgraph_node *node;
4448   struct output_block *ob;
4449   unsigned int count = 0;
4450   lto_symtab_encoder_iterator lsei;
4451   lto_symtab_encoder_t encoder;
4452 
4453   if (!ipa_node_params_sum || !ipa_edge_args_sum)
4454     return;
4455 
4456   ob = create_output_block (LTO_section_jump_functions);
4457   encoder = ob->decl_state->symtab_node_encoder;
4458   ob->symbol = NULL;
4459   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4460        lsei_next_function_in_partition (&lsei))
4461     {
4462       node = lsei_cgraph_node (lsei);
4463       if (node->has_gimple_body_p ()
4464 	  && IPA_NODE_REF (node) != NULL)
4465 	count++;
4466     }
4467 
4468   streamer_write_uhwi (ob, count);
4469 
4470   /* Process all of the functions.  */
4471   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4472        lsei_next_function_in_partition (&lsei))
4473     {
4474       node = lsei_cgraph_node (lsei);
4475       if (node->has_gimple_body_p ()
4476 	  && IPA_NODE_REF (node) != NULL)
4477         ipa_write_node_info (ob, node);
4478     }
4479   streamer_write_char_stream (ob->main_stream, 0);
4480   produce_asm (ob, NULL);
4481   destroy_output_block (ob);
4482 }
4483 
4484 /* Read section in file FILE_DATA of length LEN with data DATA.  */
4485 
4486 static void
4487 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4488 		       size_t len)
4489 {
4490   const struct lto_function_header *header =
4491     (const struct lto_function_header *) data;
4492   const int cfg_offset = sizeof (struct lto_function_header);
4493   const int main_offset = cfg_offset + header->cfg_size;
4494   const int string_offset = main_offset + header->main_size;
4495   struct data_in *data_in;
4496   unsigned int i;
4497   unsigned int count;
4498 
4499   lto_input_block ib_main ((const char *) data + main_offset,
4500 			   header->main_size, file_data->mode_table);
4501 
4502   data_in =
4503     lto_data_in_create (file_data, (const char *) data + string_offset,
4504 			header->string_size, vNULL);
4505   count = streamer_read_uhwi (&ib_main);
4506 
4507   for (i = 0; i < count; i++)
4508     {
4509       unsigned int index;
4510       struct cgraph_node *node;
4511       lto_symtab_encoder_t encoder;
4512 
4513       index = streamer_read_uhwi (&ib_main);
4514       encoder = file_data->symtab_node_encoder;
4515       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4516 								index));
4517       gcc_assert (node->definition);
4518       ipa_read_node_info (&ib_main, node, data_in);
4519     }
4520   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4521 			 len);
4522   lto_data_in_delete (data_in);
4523 }
4524 
4525 /* Read ipcp jump functions.  */
4526 
4527 void
4528 ipa_prop_read_jump_functions (void)
4529 {
4530   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4531   struct lto_file_decl_data *file_data;
4532   unsigned int j = 0;
4533 
4534   ipa_check_create_node_params ();
4535   ipa_check_create_edge_args ();
4536   ipa_register_cgraph_hooks ();
4537 
4538   while ((file_data = file_data_vec[j++]))
4539     {
4540       size_t len;
4541       const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4542 
4543       if (data)
4544         ipa_prop_read_section (file_data, data, len);
4545     }
4546 }
4547 
4548 void
4549 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4550 {
4551   int node_ref;
4552   unsigned int count = 0;
4553   lto_symtab_encoder_t encoder;
4554   struct ipa_agg_replacement_value *aggvals, *av;
4555 
4556   aggvals = ipa_get_agg_replacements_for_node (node);
4557   encoder = ob->decl_state->symtab_node_encoder;
4558   node_ref = lto_symtab_encoder_encode (encoder, node);
4559   streamer_write_uhwi (ob, node_ref);
4560 
4561   for (av = aggvals; av; av = av->next)
4562     count++;
4563   streamer_write_uhwi (ob, count);
4564 
4565   for (av = aggvals; av; av = av->next)
4566     {
4567       struct bitpack_d bp;
4568 
4569       streamer_write_uhwi (ob, av->offset);
4570       streamer_write_uhwi (ob, av->index);
4571       stream_write_tree (ob, av->value, true);
4572 
4573       bp = bitpack_create (ob->main_stream);
4574       bp_pack_value (&bp, av->by_ref, 1);
4575       streamer_write_bitpack (&bp);
4576     }
4577 
4578   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4579   if (ts && vec_safe_length (ts->m_vr) > 0)
4580     {
4581       count = ts->m_vr->length ();
4582       streamer_write_uhwi (ob, count);
4583       for (unsigned i = 0; i < count; ++i)
4584 	{
4585 	  struct bitpack_d bp;
4586 	  ipa_vr *parm_vr = &(*ts->m_vr)[i];
4587 	  bp = bitpack_create (ob->main_stream);
4588 	  bp_pack_value (&bp, parm_vr->known, 1);
4589 	  streamer_write_bitpack (&bp);
4590 	  if (parm_vr->known)
4591 	    {
4592 	      streamer_write_enum (ob->main_stream, value_rang_type,
4593 				   VR_LAST, parm_vr->type);
4594 	      streamer_write_wide_int (ob, parm_vr->min);
4595 	      streamer_write_wide_int (ob, parm_vr->max);
4596 	    }
4597 	}
4598     }
4599   else
4600     streamer_write_uhwi (ob, 0);
4601 
4602   if (ts && vec_safe_length (ts->bits) > 0)
4603     {
4604       count = ts->bits->length ();
4605       streamer_write_uhwi (ob, count);
4606 
4607       for (unsigned i = 0; i < count; ++i)
4608 	{
4609 	  const ipa_bits *bits_jfunc = (*ts->bits)[i];
4610 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
4611 	  bp_pack_value (&bp, !!bits_jfunc, 1);
4612 	  streamer_write_bitpack (&bp);
4613 	  if (bits_jfunc)
4614 	    {
4615 	      streamer_write_widest_int (ob, bits_jfunc->value);
4616 	      streamer_write_widest_int (ob, bits_jfunc->mask);
4617 	    }
4618 	}
4619     }
4620   else
4621     streamer_write_uhwi (ob, 0);
4622 }
4623 
4624 /* Stream in the aggregate value replacement chain for NODE from IB.  */
4625 
4626 static void
4627 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4628 			       data_in *data_in)
4629 {
4630   struct ipa_agg_replacement_value *aggvals = NULL;
4631   unsigned int count, i;
4632 
4633   count = streamer_read_uhwi (ib);
4634   for (i = 0; i <count; i++)
4635     {
4636       struct ipa_agg_replacement_value *av;
4637       struct bitpack_d bp;
4638 
4639       av = ggc_alloc<ipa_agg_replacement_value> ();
4640       av->offset = streamer_read_uhwi (ib);
4641       av->index = streamer_read_uhwi (ib);
4642       av->value = stream_read_tree (ib, data_in);
4643       bp = streamer_read_bitpack (ib);
4644       av->by_ref = bp_unpack_value (&bp, 1);
4645       av->next = aggvals;
4646       aggvals = av;
4647     }
4648   ipa_set_node_agg_value_chain (node, aggvals);
4649 
4650   count = streamer_read_uhwi (ib);
4651   if (count > 0)
4652     {
4653       ipcp_grow_transformations_if_necessary ();
4654 
4655       ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4656       vec_safe_grow_cleared (ts->m_vr, count);
4657       for (i = 0; i < count; i++)
4658 	{
4659 	  ipa_vr *parm_vr;
4660 	  parm_vr = &(*ts->m_vr)[i];
4661 	  struct bitpack_d bp;
4662 	  bp = streamer_read_bitpack (ib);
4663 	  parm_vr->known = bp_unpack_value (&bp, 1);
4664 	  if (parm_vr->known)
4665 	    {
4666 	      parm_vr->type = streamer_read_enum (ib, value_range_type,
4667 						  VR_LAST);
4668 	      parm_vr->min = streamer_read_wide_int (ib);
4669 	      parm_vr->max = streamer_read_wide_int (ib);
4670 	    }
4671 	}
4672     }
4673   count = streamer_read_uhwi (ib);
4674   if (count > 0)
4675     {
4676       ipcp_grow_transformations_if_necessary ();
4677 
4678       ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4679       vec_safe_grow_cleared (ts->bits, count);
4680 
4681       for (i = 0; i < count; i++)
4682 	{
4683 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4684 	  bool known = bp_unpack_value (&bp, 1);
4685 	  if (known)
4686 	    {
4687 	      ipa_bits *bits
4688 		= ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4689 					      streamer_read_widest_int (ib));
4690 	      (*ts->bits)[i] = bits;
4691 	    }
4692 	}
4693     }
4694 }
4695 
4696 /* Write all aggregate replacement for nodes in set.  */
4697 
4698 void
4699 ipcp_write_transformation_summaries (void)
4700 {
4701   struct cgraph_node *node;
4702   struct output_block *ob;
4703   unsigned int count = 0;
4704   lto_symtab_encoder_iterator lsei;
4705   lto_symtab_encoder_t encoder;
4706 
4707   ob = create_output_block (LTO_section_ipcp_transform);
4708   encoder = ob->decl_state->symtab_node_encoder;
4709   ob->symbol = NULL;
4710   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4711        lsei_next_function_in_partition (&lsei))
4712     {
4713       node = lsei_cgraph_node (lsei);
4714       if (node->has_gimple_body_p ())
4715 	count++;
4716     }
4717 
4718   streamer_write_uhwi (ob, count);
4719 
4720   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4721        lsei_next_function_in_partition (&lsei))
4722     {
4723       node = lsei_cgraph_node (lsei);
4724       if (node->has_gimple_body_p ())
4725 	write_ipcp_transformation_info (ob, node);
4726     }
4727   streamer_write_char_stream (ob->main_stream, 0);
4728   produce_asm (ob, NULL);
4729   destroy_output_block (ob);
4730 }
4731 
4732 /* Read replacements section in file FILE_DATA of length LEN with data
4733    DATA.  */
4734 
4735 static void
4736 read_replacements_section (struct lto_file_decl_data *file_data,
4737 			   const char *data,
4738 			   size_t len)
4739 {
4740   const struct lto_function_header *header =
4741     (const struct lto_function_header *) data;
4742   const int cfg_offset = sizeof (struct lto_function_header);
4743   const int main_offset = cfg_offset + header->cfg_size;
4744   const int string_offset = main_offset + header->main_size;
4745   struct data_in *data_in;
4746   unsigned int i;
4747   unsigned int count;
4748 
4749   lto_input_block ib_main ((const char *) data + main_offset,
4750 			   header->main_size, file_data->mode_table);
4751 
4752   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4753 				header->string_size, vNULL);
4754   count = streamer_read_uhwi (&ib_main);
4755 
4756   for (i = 0; i < count; i++)
4757     {
4758       unsigned int index;
4759       struct cgraph_node *node;
4760       lto_symtab_encoder_t encoder;
4761 
4762       index = streamer_read_uhwi (&ib_main);
4763       encoder = file_data->symtab_node_encoder;
4764       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4765 								index));
4766       gcc_assert (node->definition);
4767       read_ipcp_transformation_info (&ib_main, node, data_in);
4768     }
4769   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4770 			 len);
4771   lto_data_in_delete (data_in);
4772 }
4773 
4774 /* Read IPA-CP aggregate replacements.  */
4775 
4776 void
4777 ipcp_read_transformation_summaries (void)
4778 {
4779   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4780   struct lto_file_decl_data *file_data;
4781   unsigned int j = 0;
4782 
4783   while ((file_data = file_data_vec[j++]))
4784     {
4785       size_t len;
4786       const char *data = lto_get_section_data (file_data,
4787 					       LTO_section_ipcp_transform,
4788 					       NULL, &len);
4789       if (data)
4790         read_replacements_section (file_data, data, len);
4791     }
4792 }
4793 
4794 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4795    NODE.  */
4796 
4797 static void
4798 adjust_agg_replacement_values (struct cgraph_node *node,
4799 			       struct ipa_agg_replacement_value *aggval)
4800 {
4801   struct ipa_agg_replacement_value *v;
4802   int i, c = 0, d = 0, *adj;
4803 
4804   if (!node->clone.combined_args_to_skip)
4805     return;
4806 
4807   for (v = aggval; v; v = v->next)
4808     {
4809       gcc_assert (v->index >= 0);
4810       if (c < v->index)
4811 	c = v->index;
4812     }
4813   c++;
4814 
4815   adj = XALLOCAVEC (int, c);
4816   for (i = 0; i < c; i++)
4817     if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4818       {
4819 	adj[i] = -1;
4820 	d++;
4821       }
4822     else
4823       adj[i] = i - d;
4824 
4825   for (v = aggval; v; v = v->next)
4826     v->index = adj[v->index];
4827 }
4828 
4829 /* Dominator walker driving the ipcp modification phase.  */
4830 
4831 class ipcp_modif_dom_walker : public dom_walker
4832 {
4833 public:
4834   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4835 			 vec<ipa_param_descriptor, va_gc> *descs,
4836 			 struct ipa_agg_replacement_value *av,
4837 			 bool *sc, bool *cc)
4838     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4839       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4840 
4841   virtual edge before_dom_children (basic_block);
4842 
4843 private:
4844   struct ipa_func_body_info *m_fbi;
4845   vec<ipa_param_descriptor, va_gc> *m_descriptors;
4846   struct ipa_agg_replacement_value *m_aggval;
4847   bool *m_something_changed, *m_cfg_changed;
4848 };
4849 
4850 edge
4851 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4852 {
4853   gimple_stmt_iterator gsi;
4854   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4855     {
4856       struct ipa_agg_replacement_value *v;
4857       gimple *stmt = gsi_stmt (gsi);
4858       tree rhs, val, t;
4859       HOST_WIDE_INT offset, size;
4860       int index;
4861       bool by_ref, vce;
4862 
4863       if (!gimple_assign_load_p (stmt))
4864 	continue;
4865       rhs = gimple_assign_rhs1 (stmt);
4866       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4867 	continue;
4868 
4869       vce = false;
4870       t = rhs;
4871       while (handled_component_p (t))
4872 	{
4873 	  /* V_C_E can do things like convert an array of integers to one
4874 	     bigger integer and similar things we do not handle below.  */
4875 	  if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4876 	    {
4877 	      vce = true;
4878 	      break;
4879 	    }
4880 	  t = TREE_OPERAND (t, 0);
4881 	}
4882       if (vce)
4883 	continue;
4884 
4885       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4886 				   &offset, &size, &by_ref))
4887 	continue;
4888       for (v = m_aggval; v; v = v->next)
4889 	if (v->index == index
4890 	    && v->offset == offset)
4891 	  break;
4892       if (!v
4893 	  || v->by_ref != by_ref
4894 	  || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4895 	continue;
4896 
4897       gcc_checking_assert (is_gimple_ip_invariant (v->value));
4898       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4899 	{
4900 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4901 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4902 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
4903 		   == TYPE_SIZE (TREE_TYPE (v->value)))
4904 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4905 	  else
4906 	    {
4907 	      if (dump_file)
4908 		{
4909 		  fprintf (dump_file, "    const ");
4910 		  print_generic_expr (dump_file, v->value);
4911 		  fprintf (dump_file, "  can't be converted to type of ");
4912 		  print_generic_expr (dump_file, rhs);
4913 		  fprintf (dump_file, "\n");
4914 		}
4915 	      continue;
4916 	    }
4917 	}
4918       else
4919 	val = v->value;
4920 
4921       if (dump_file && (dump_flags & TDF_DETAILS))
4922 	{
4923 	  fprintf (dump_file, "Modifying stmt:\n  ");
4924 	  print_gimple_stmt (dump_file, stmt, 0);
4925 	}
4926       gimple_assign_set_rhs_from_tree (&gsi, val);
4927       update_stmt (stmt);
4928 
4929       if (dump_file && (dump_flags & TDF_DETAILS))
4930 	{
4931 	  fprintf (dump_file, "into:\n  ");
4932 	  print_gimple_stmt (dump_file, stmt, 0);
4933 	  fprintf (dump_file, "\n");
4934 	}
4935 
4936       *m_something_changed = true;
4937       if (maybe_clean_eh_stmt (stmt)
4938 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4939 	*m_cfg_changed = true;
4940     }
4941   return NULL;
4942 }
4943 
4944 /* Update bits info of formal parameters as described in
4945    ipcp_transformation_summary.  */
4946 
4947 static void
4948 ipcp_update_bits (struct cgraph_node *node)
4949 {
4950   tree parm = DECL_ARGUMENTS (node->decl);
4951   tree next_parm = parm;
4952   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4953 
4954   if (!ts || vec_safe_length (ts->bits) == 0)
4955     return;
4956 
4957   vec<ipa_bits *, va_gc> &bits = *ts->bits;
4958   unsigned count = bits.length ();
4959 
4960   for (unsigned i = 0; i < count; ++i, parm = next_parm)
4961     {
4962       if (node->clone.combined_args_to_skip
4963 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4964 	continue;
4965 
4966       gcc_checking_assert (parm);
4967       next_parm = DECL_CHAIN (parm);
4968 
4969       if (!bits[i]
4970 	  || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4971 	       || POINTER_TYPE_P (TREE_TYPE (parm)))
4972 	  || !is_gimple_reg (parm))
4973 	continue;
4974 
4975       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4976       if (!ddef)
4977 	continue;
4978 
4979       if (dump_file)
4980 	{
4981 	  fprintf (dump_file, "Adjusting mask for param %u to ", i);
4982 	  print_hex (bits[i]->mask, dump_file);
4983 	  fprintf (dump_file, "\n");
4984 	}
4985 
4986       if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4987 	{
4988 	  unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4989 	  signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4990 
4991 	  wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4992 				  | wide_int::from (bits[i]->value, prec, sgn);
4993 	  set_nonzero_bits (ddef, nonzero_bits);
4994 	}
4995       else
4996 	{
4997 	  unsigned tem = bits[i]->mask.to_uhwi ();
4998 	  unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
4999 	  unsigned align = tem & -tem;
5000 	  unsigned misalign = bitpos & (align - 1);
5001 
5002 	  if (align > 1)
5003 	    {
5004 	      if (dump_file)
5005 		fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5006 
5007 	      unsigned old_align, old_misalign;
5008 	      struct ptr_info_def *pi = get_ptr_info (ddef);
5009 	      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5010 
5011 	      if (old_known
5012 		  && old_align > align)
5013 		{
5014 		  if (dump_file)
5015 		    {
5016 		      fprintf (dump_file, "But alignment was already %u.\n", old_align);
5017 		      if ((old_misalign & (align - 1)) != misalign)
5018 			fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5019 				 old_misalign, misalign);
5020 		    }
5021 		  continue;
5022 		}
5023 
5024 	      if (old_known
5025 		  && ((misalign & (old_align - 1)) != old_misalign)
5026 		  && dump_file)
5027 		fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5028 			 old_misalign, misalign);
5029 
5030 	      set_ptr_info_alignment (pi, align, misalign);
5031 	    }
5032 	}
5033     }
5034 }
5035 
5036 /* Update value range of formal parameters as described in
5037    ipcp_transformation_summary.  */
5038 
5039 static void
5040 ipcp_update_vr (struct cgraph_node *node)
5041 {
5042   tree fndecl = node->decl;
5043   tree parm = DECL_ARGUMENTS (fndecl);
5044   tree next_parm = parm;
5045   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5046   if (!ts || vec_safe_length (ts->m_vr) == 0)
5047     return;
5048   const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5049   unsigned count = vr.length ();
5050 
5051   for (unsigned i = 0; i < count; ++i, parm = next_parm)
5052     {
5053       if (node->clone.combined_args_to_skip
5054 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5055 	continue;
5056       gcc_checking_assert (parm);
5057       next_parm = DECL_CHAIN (parm);
5058       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5059 
5060       if (!ddef || !is_gimple_reg (parm))
5061 	continue;
5062 
5063       if (vr[i].known
5064 	  && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5065 	{
5066 	  tree type = TREE_TYPE (ddef);
5067 	  unsigned prec = TYPE_PRECISION (type);
5068 	  if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5069 	    {
5070 	      if (dump_file)
5071 		{
5072 		  fprintf (dump_file, "Setting value range of param %u ", i);
5073 		  fprintf (dump_file, "%s[",
5074 			   (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5075 		  print_decs (vr[i].min, dump_file);
5076 		  fprintf (dump_file, ", ");
5077 		  print_decs (vr[i].max, dump_file);
5078 		  fprintf (dump_file, "]\n");
5079 		}
5080 	      set_range_info (ddef, vr[i].type,
5081 			      wide_int_storage::from (vr[i].min, prec,
5082 						      TYPE_SIGN (type)),
5083 			      wide_int_storage::from (vr[i].max, prec,
5084 						      TYPE_SIGN (type)));
5085 	    }
5086 	  else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5087 		   && vr[i].type == VR_ANTI_RANGE
5088 		   && wi::eq_p (vr[i].min, 0)
5089 		   && wi::eq_p (vr[i].max, 0))
5090 	    {
5091 	      if (dump_file)
5092 		fprintf (dump_file, "Setting nonnull for %u\n", i);
5093 	      set_ptr_nonnull (ddef);
5094 	    }
5095 	}
5096     }
5097 }
5098 
5099 /* IPCP transformation phase doing propagation of aggregate values.  */
5100 
5101 unsigned int
5102 ipcp_transform_function (struct cgraph_node *node)
5103 {
5104   vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5105   struct ipa_func_body_info fbi;
5106   struct ipa_agg_replacement_value *aggval;
5107   int param_count;
5108   bool cfg_changed = false, something_changed = false;
5109 
5110   gcc_checking_assert (cfun);
5111   gcc_checking_assert (current_function_decl);
5112 
5113   if (dump_file)
5114     fprintf (dump_file, "Modification phase of node %s\n",
5115 	     node->dump_name ());
5116 
5117   ipcp_update_bits (node);
5118   ipcp_update_vr (node);
5119   aggval = ipa_get_agg_replacements_for_node (node);
5120   if (!aggval)
5121       return 0;
5122   param_count = count_formal_params (node->decl);
5123   if (param_count == 0)
5124     return 0;
5125   adjust_agg_replacement_values (node, aggval);
5126   if (dump_file)
5127     ipa_dump_agg_replacement_values (dump_file, aggval);
5128 
5129   fbi.node = node;
5130   fbi.info = NULL;
5131   fbi.bb_infos = vNULL;
5132   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5133   fbi.param_count = param_count;
5134   fbi.aa_walked = 0;
5135 
5136   vec_safe_grow_cleared (descriptors, param_count);
5137   ipa_populate_param_decls (node, *descriptors);
5138   calculate_dominance_info (CDI_DOMINATORS);
5139   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5140 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5141 
5142   int i;
5143   struct ipa_bb_info *bi;
5144   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5145     free_ipa_bb_info (bi);
5146   fbi.bb_infos.release ();
5147   free_dominance_info (CDI_DOMINATORS);
5148   (*ipcp_transformations)[node->uid].agg_values = NULL;
5149   (*ipcp_transformations)[node->uid].bits = NULL;
5150   (*ipcp_transformations)[node->uid].m_vr = NULL;
5151 
5152   vec_free (descriptors);
5153 
5154   if (!something_changed)
5155     return 0;
5156   else if (cfg_changed)
5157     return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5158   else
5159     return TODO_update_ssa_only_virtuals;
5160 }
5161 
5162 #include "gt-ipa-prop.h"
5163