xref: /dragonfly/contrib/gcc-8.0/gcc/ipa-prop.c (revision 37de577a)
1 /* Interprocedural analyses.
2    Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "params.h"
51 #include "ipa-utils.h"
52 #include "dbgcnt.h"
53 #include "domwalk.h"
54 #include "builtins.h"
55 
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58 /* Vector of IPA-CP transformation data for each clone.  */
59 vec<ipcp_transformation_summary, va_gc> *ipcp_transformations;
60 /* Edge summary for IPA-CP edge information.  */
61 ipa_edge_args_sum_t *ipa_edge_args_sum;
62 
63 /* Traits for a hash table for reusing already existing ipa_bits. */
64 
65 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
66 {
67   typedef ipa_bits *value_type;
68   typedef ipa_bits *compare_type;
69   static hashval_t
70   hash (const ipa_bits *p)
71   {
72     hashval_t t = (hashval_t) p->value.to_shwi ();
73     return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
74   }
75   static bool
76   equal (const ipa_bits *a, const ipa_bits *b)
77     {
78       return a->value == b->value && a->mask == b->mask;
79     }
80   static void
81   mark_empty (ipa_bits *&p)
82     {
83       p = NULL;
84     }
85   static bool
86   is_empty (const ipa_bits *p)
87     {
88       return p == NULL;
89     }
90   static bool
91   is_deleted (const ipa_bits *p)
92     {
93       return p == reinterpret_cast<const ipa_bits *> (1);
94     }
95   static void
96   mark_deleted (ipa_bits *&p)
97     {
98       p = reinterpret_cast<ipa_bits *> (1);
99     }
100 };
101 
102 /* Hash table for avoid repeated allocations of equal ipa_bits.  */
103 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
104 
105 /* Traits for a hash table for reusing value_ranges used for IPA.  Note that
106    the equiv bitmap is not hashed and is expected to be NULL.  */
107 
108 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
109 {
110   typedef value_range *value_type;
111   typedef value_range *compare_type;
112   static hashval_t
113   hash (const value_range *p)
114     {
115       gcc_checking_assert (!p->equiv);
116       inchash::hash hstate (p->type);
117       hstate.add_ptr (p->min);
118       hstate.add_ptr (p->max);
119       return hstate.end ();
120     }
121   static bool
122   equal (const value_range *a, const value_range *b)
123     {
124       return a->type == b->type && a->min == b->min && a->max == b->max;
125     }
126   static void
127   mark_empty (value_range *&p)
128     {
129       p = NULL;
130     }
131   static bool
132   is_empty (const value_range *p)
133     {
134       return p == NULL;
135     }
136   static bool
137   is_deleted (const value_range *p)
138     {
139       return p == reinterpret_cast<const value_range *> (1);
140     }
141   static void
142   mark_deleted (value_range *&p)
143     {
144       p = reinterpret_cast<value_range *> (1);
145     }
146 };
147 
148 /* Hash table for avoid repeated allocations of equal value_ranges.  */
149 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
150 
151 /* Holders of ipa cgraph hooks: */
152 static struct cgraph_node_hook_list *function_insertion_hook_holder;
153 
154 /* Description of a reference to an IPA constant.  */
155 struct ipa_cst_ref_desc
156 {
157   /* Edge that corresponds to the statement which took the reference.  */
158   struct cgraph_edge *cs;
159   /* Linked list of duplicates created when call graph edges are cloned.  */
160   struct ipa_cst_ref_desc *next_duplicate;
161   /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
162      if out of control.  */
163   int refcount;
164 };
165 
166 /* Allocation pool for reference descriptions.  */
167 
168 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
169   ("IPA-PROP ref descriptions");
170 
171 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
172    with NODE should prevent us from analyzing it for the purposes of IPA-CP.  */
173 
174 static bool
175 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
176 {
177   tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
178 
179   if (!fs_opts)
180     return false;
181   return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
182 }
183 
184 /* Return index of the formal whose tree is PTREE in function which corresponds
185    to INFO.  */
186 
187 static int
188 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
189 			    tree ptree)
190 {
191   int i, count;
192 
193   count = vec_safe_length (descriptors);
194   for (i = 0; i < count; i++)
195     if ((*descriptors)[i].decl_or_type == ptree)
196       return i;
197 
198   return -1;
199 }
200 
201 /* Return index of the formal whose tree is PTREE in function which corresponds
202    to INFO.  */
203 
204 int
205 ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
206 {
207   return ipa_get_param_decl_index_1 (info->descriptors, ptree);
208 }
209 
210 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
211    NODE.  */
212 
213 static void
214 ipa_populate_param_decls (struct cgraph_node *node,
215 			  vec<ipa_param_descriptor, va_gc> &descriptors)
216 {
217   tree fndecl;
218   tree fnargs;
219   tree parm;
220   int param_num;
221 
222   fndecl = node->decl;
223   gcc_assert (gimple_has_body_p (fndecl));
224   fnargs = DECL_ARGUMENTS (fndecl);
225   param_num = 0;
226   for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
227     {
228       descriptors[param_num].decl_or_type = parm;
229       descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
230 							     true);
231       param_num++;
232     }
233 }
234 
235 /* Return how many formal parameters FNDECL has.  */
236 
237 int
238 count_formal_params (tree fndecl)
239 {
240   tree parm;
241   int count = 0;
242   gcc_assert (gimple_has_body_p (fndecl));
243 
244   for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
245     count++;
246 
247   return count;
248 }
249 
250 /* Return the declaration of Ith formal parameter of the function corresponding
251    to INFO.  Note there is no setter function as this array is built just once
252    using ipa_initialize_node_params. */
253 
254 void
255 ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
256 {
257   fprintf (file, "param #%i", i);
258   if ((*info->descriptors)[i].decl_or_type)
259     {
260       fprintf (file, " ");
261       print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
262     }
263 }
264 
265 /* If necessary, allocate vector of parameter descriptors in info of NODE.
266    Return true if they were allocated, false if not.  */
267 
268 static bool
269 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
270 {
271   struct ipa_node_params *info = IPA_NODE_REF (node);
272 
273   if (!info->descriptors && param_count)
274     {
275       vec_safe_grow_cleared (info->descriptors, param_count);
276       return true;
277     }
278   else
279     return false;
280 }
281 
282 /* Initialize the ipa_node_params structure associated with NODE by counting
283    the function parameters, creating the descriptors and populating their
284    param_decls.  */
285 
286 void
287 ipa_initialize_node_params (struct cgraph_node *node)
288 {
289   struct ipa_node_params *info = IPA_NODE_REF (node);
290 
291   if (!info->descriptors
292       && ipa_alloc_node_params (node, count_formal_params (node->decl)))
293     ipa_populate_param_decls (node, *info->descriptors);
294 }
295 
296 /* Print the jump functions associated with call graph edge CS to file F.  */
297 
298 static void
299 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
300 {
301   int i, count;
302 
303   count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
304   for (i = 0; i < count; i++)
305     {
306       struct ipa_jump_func *jump_func;
307       enum jump_func_type type;
308 
309       jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
310       type = jump_func->type;
311 
312       fprintf (f, "       param %d: ", i);
313       if (type == IPA_JF_UNKNOWN)
314 	fprintf (f, "UNKNOWN\n");
315       else if (type == IPA_JF_CONST)
316 	{
317 	  tree val = jump_func->value.constant.value;
318 	  fprintf (f, "CONST: ");
319 	  print_generic_expr (f, val);
320 	  if (TREE_CODE (val) == ADDR_EXPR
321 	      && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
322 	    {
323 	      fprintf (f, " -> ");
324 	      print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
325 	    }
326 	  fprintf (f, "\n");
327 	}
328       else if (type == IPA_JF_PASS_THROUGH)
329 	{
330 	  fprintf (f, "PASS THROUGH: ");
331 	  fprintf (f, "%d, op %s",
332 		   jump_func->value.pass_through.formal_id,
333 		   get_tree_code_name(jump_func->value.pass_through.operation));
334 	  if (jump_func->value.pass_through.operation != NOP_EXPR)
335 	    {
336 	      fprintf (f, " ");
337 	      print_generic_expr (f, jump_func->value.pass_through.operand);
338 	    }
339 	  if (jump_func->value.pass_through.agg_preserved)
340 	    fprintf (f, ", agg_preserved");
341 	  fprintf (f, "\n");
342 	}
343       else if (type == IPA_JF_ANCESTOR)
344 	{
345 	  fprintf (f, "ANCESTOR: ");
346 	  fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
347 		   jump_func->value.ancestor.formal_id,
348 		   jump_func->value.ancestor.offset);
349 	  if (jump_func->value.ancestor.agg_preserved)
350 	    fprintf (f, ", agg_preserved");
351 	  fprintf (f, "\n");
352 	}
353 
354       if (jump_func->agg.items)
355 	{
356 	  struct ipa_agg_jf_item *item;
357 	  int j;
358 
359 	  fprintf (f, "         Aggregate passed by %s:\n",
360 		   jump_func->agg.by_ref ? "reference" : "value");
361 	  FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
362 	    {
363 	      fprintf (f, "           offset: " HOST_WIDE_INT_PRINT_DEC ", ",
364 		       item->offset);
365 	      if (TYPE_P (item->value))
366 		fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
367 			 tree_to_uhwi (TYPE_SIZE (item->value)));
368 	      else
369 		{
370 		  fprintf (f, "cst: ");
371 		  print_generic_expr (f, item->value);
372 		}
373 	      fprintf (f, "\n");
374 	    }
375 	}
376 
377       struct ipa_polymorphic_call_context *ctx
378 	= ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
379       if (ctx && !ctx->useless_p ())
380 	{
381 	  fprintf (f, "         Context: ");
382 	  ctx->dump (dump_file);
383 	}
384 
385       if (jump_func->bits)
386 	{
387 	  fprintf (f, "         value: ");
388 	  print_hex (jump_func->bits->value, f);
389 	  fprintf (f, ", mask: ");
390 	  print_hex (jump_func->bits->mask, f);
391 	  fprintf (f, "\n");
392 	}
393       else
394 	fprintf (f, "         Unknown bits\n");
395 
396       if (jump_func->m_vr)
397 	{
398 	  fprintf (f, "         VR  ");
399 	  fprintf (f, "%s[",
400 		   (jump_func->m_vr->type == VR_ANTI_RANGE) ? "~" : "");
401 	  print_decs (wi::to_wide (jump_func->m_vr->min), f);
402 	  fprintf (f, ", ");
403 	  print_decs (wi::to_wide (jump_func->m_vr->max), f);
404 	  fprintf (f, "]\n");
405 	}
406       else
407 	fprintf (f, "         Unknown VR\n");
408     }
409 }
410 
411 
412 /* Print the jump functions of all arguments on all call graph edges going from
413    NODE to file F.  */
414 
415 void
416 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
417 {
418   struct cgraph_edge *cs;
419 
420   fprintf (f, "  Jump functions of caller  %s:\n", node->dump_name ());
421   for (cs = node->callees; cs; cs = cs->next_callee)
422     {
423       if (!ipa_edge_args_info_available_for_edge_p (cs))
424 	continue;
425 
426       fprintf (f, "    callsite  %s -> %s : \n",
427 	       node->dump_name (),
428 	       cs->callee->dump_name ());
429       ipa_print_node_jump_functions_for_edge (f, cs);
430     }
431 
432   for (cs = node->indirect_calls; cs; cs = cs->next_callee)
433     {
434       struct cgraph_indirect_call_info *ii;
435       if (!ipa_edge_args_info_available_for_edge_p (cs))
436 	continue;
437 
438       ii = cs->indirect_info;
439       if (ii->agg_contents)
440 	fprintf (f, "    indirect %s callsite, calling param %i, "
441 		 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
442 		 ii->member_ptr ? "member ptr" : "aggregate",
443 		 ii->param_index, ii->offset,
444 		 ii->by_ref ? "by reference" : "by_value");
445       else
446 	fprintf (f, "    indirect %s callsite, calling param %i, "
447 		 "offset " HOST_WIDE_INT_PRINT_DEC,
448 		 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
449 		 ii->offset);
450 
451       if (cs->call_stmt)
452 	{
453 	  fprintf (f, ", for stmt ");
454 	  print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
455 	}
456       else
457 	fprintf (f, "\n");
458       if (ii->polymorphic)
459 	ii->context.dump (f);
460       ipa_print_node_jump_functions_for_edge (f, cs);
461     }
462 }
463 
464 /* Print ipa_jump_func data structures of all nodes in the call graph to F.  */
465 
466 void
467 ipa_print_all_jump_functions (FILE *f)
468 {
469   struct cgraph_node *node;
470 
471   fprintf (f, "\nJump functions:\n");
472   FOR_EACH_FUNCTION (node)
473     {
474       ipa_print_node_jump_functions (f, node);
475     }
476 }
477 
478 /* Set jfunc to be a know-really nothing jump function.  */
479 
480 static void
481 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
482 {
483   jfunc->type = IPA_JF_UNKNOWN;
484   jfunc->bits = NULL;
485   jfunc->m_vr = NULL;
486 }
487 
488 /* Set JFUNC to be a copy of another jmp (to be used by jump function
489    combination code).  The two functions will share their rdesc.  */
490 
491 static void
492 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
493 		     struct ipa_jump_func *src)
494 
495 {
496   gcc_checking_assert (src->type == IPA_JF_CONST);
497   dst->type = IPA_JF_CONST;
498   dst->value.constant = src->value.constant;
499 }
500 
501 /* Set JFUNC to be a constant jmp function.  */
502 
503 static void
504 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
505 		     struct cgraph_edge *cs)
506 {
507   jfunc->type = IPA_JF_CONST;
508   jfunc->value.constant.value = unshare_expr_without_location (constant);
509 
510   if (TREE_CODE (constant) == ADDR_EXPR
511       && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
512     {
513       struct ipa_cst_ref_desc *rdesc;
514 
515       rdesc = ipa_refdesc_pool.allocate ();
516       rdesc->cs = cs;
517       rdesc->next_duplicate = NULL;
518       rdesc->refcount = 1;
519       jfunc->value.constant.rdesc = rdesc;
520     }
521   else
522     jfunc->value.constant.rdesc = NULL;
523 }
524 
525 /* Set JFUNC to be a simple pass-through jump function.  */
526 static void
527 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
528 				bool agg_preserved)
529 {
530   jfunc->type = IPA_JF_PASS_THROUGH;
531   jfunc->value.pass_through.operand = NULL_TREE;
532   jfunc->value.pass_through.formal_id = formal_id;
533   jfunc->value.pass_through.operation = NOP_EXPR;
534   jfunc->value.pass_through.agg_preserved = agg_preserved;
535 }
536 
537 /* Set JFUNC to be an unary pass through jump function.  */
538 
539 static void
540 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
541 			       enum tree_code operation)
542 {
543   jfunc->type = IPA_JF_PASS_THROUGH;
544   jfunc->value.pass_through.operand = NULL_TREE;
545   jfunc->value.pass_through.formal_id = formal_id;
546   jfunc->value.pass_through.operation = operation;
547   jfunc->value.pass_through.agg_preserved = false;
548 }
549 /* Set JFUNC to be an arithmetic pass through jump function.  */
550 
551 static void
552 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
553 			       tree operand, enum tree_code operation)
554 {
555   jfunc->type = IPA_JF_PASS_THROUGH;
556   jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
557   jfunc->value.pass_through.formal_id = formal_id;
558   jfunc->value.pass_through.operation = operation;
559   jfunc->value.pass_through.agg_preserved = false;
560 }
561 
562 /* Set JFUNC to be an ancestor jump function.  */
563 
564 static void
565 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
566 		     int formal_id, bool agg_preserved)
567 {
568   jfunc->type = IPA_JF_ANCESTOR;
569   jfunc->value.ancestor.formal_id = formal_id;
570   jfunc->value.ancestor.offset = offset;
571   jfunc->value.ancestor.agg_preserved = agg_preserved;
572 }
573 
574 /* Get IPA BB information about the given BB.  FBI is the context of analyzis
575    of this function body.  */
576 
577 static struct ipa_bb_info *
578 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
579 {
580   gcc_checking_assert (fbi);
581   return &fbi->bb_infos[bb->index];
582 }
583 
584 /* Structure to be passed in between detect_type_change and
585    check_stmt_for_type_change.  */
586 
587 struct prop_type_change_info
588 {
589   /* Offset into the object where there is the virtual method pointer we are
590      looking for.  */
591   HOST_WIDE_INT offset;
592   /* The declaration or SSA_NAME pointer of the base that we are checking for
593      type change.  */
594   tree object;
595   /* Set to true if dynamic type change has been detected.  */
596   bool type_maybe_changed;
597 };
598 
599 /* Return true if STMT can modify a virtual method table pointer.
600 
601    This function makes special assumptions about both constructors and
602    destructors which are all the functions that are allowed to alter the VMT
603    pointers.  It assumes that destructors begin with assignment into all VMT
604    pointers and that constructors essentially look in the following way:
605 
606    1) The very first thing they do is that they call constructors of ancestor
607    sub-objects that have them.
608 
609    2) Then VMT pointers of this and all its ancestors is set to new values
610    corresponding to the type corresponding to the constructor.
611 
612    3) Only afterwards, other stuff such as constructor of member sub-objects
613    and the code written by the user is run.  Only this may include calling
614    virtual functions, directly or indirectly.
615 
616    There is no way to call a constructor of an ancestor sub-object in any
617    other way.
618 
619    This means that we do not have to care whether constructors get the correct
620    type information because they will always change it (in fact, if we define
621    the type to be given by the VMT pointer, it is undefined).
622 
623    The most important fact to derive from the above is that if, for some
624    statement in the section 3, we try to detect whether the dynamic type has
625    changed, we can safely ignore all calls as we examine the function body
626    backwards until we reach statements in section 2 because these calls cannot
627    be ancestor constructors or destructors (if the input is not bogus) and so
628    do not change the dynamic type (this holds true only for automatically
629    allocated objects but at the moment we devirtualize only these).  We then
630    must detect that statements in section 2 change the dynamic type and can try
631    to derive the new type.  That is enough and we can stop, we will never see
632    the calls into constructors of sub-objects in this code.  Therefore we can
633    safely ignore all call statements that we traverse.
634   */
635 
636 static bool
637 stmt_may_be_vtbl_ptr_store (gimple *stmt)
638 {
639   if (is_gimple_call (stmt))
640     return false;
641   if (gimple_clobber_p (stmt))
642     return false;
643   else if (is_gimple_assign (stmt))
644     {
645       tree lhs = gimple_assign_lhs (stmt);
646 
647       if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
648 	{
649 	  if (flag_strict_aliasing
650 	      && !POINTER_TYPE_P (TREE_TYPE (lhs)))
651 	    return false;
652 
653 	  if (TREE_CODE (lhs) == COMPONENT_REF
654 	      && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
655 	    return false;
656 	  /* In the future we might want to use get_ref_base_and_extent to find
657 	     if there is a field corresponding to the offset and if so, proceed
658 	     almost like if it was a component ref.  */
659 	}
660     }
661   return true;
662 }
663 
664 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
665    to check whether a particular statement may modify the virtual table
666    pointerIt stores its result into DATA, which points to a
667    prop_type_change_info structure.  */
668 
669 static bool
670 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
671 {
672   gimple *stmt = SSA_NAME_DEF_STMT (vdef);
673   struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
674 
675   if (stmt_may_be_vtbl_ptr_store (stmt))
676     {
677       tci->type_maybe_changed = true;
678       return true;
679     }
680   else
681     return false;
682 }
683 
684 /* See if ARG is PARAM_DECl describing instance passed by pointer
685    or reference in FUNCTION.  Return false if the dynamic type may change
686    in between beggining of the function until CALL is invoked.
687 
688    Generally functions are not allowed to change type of such instances,
689    but they call destructors.  We assume that methods can not destroy the THIS
690    pointer.  Also as a special cases, constructor and destructors may change
691    type of the THIS pointer.  */
692 
693 static bool
694 param_type_may_change_p (tree function, tree arg, gimple *call)
695 {
696   /* Pure functions can not do any changes on the dynamic type;
697      that require writting to memory.  */
698   if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
699     return false;
700   /* We need to check if we are within inlined consturctor
701      or destructor (ideally we would have way to check that the
702      inline cdtor is actually working on ARG, but we don't have
703      easy tie on this, so punt on all non-pure cdtors.
704      We may also record the types of cdtors and once we know type
705      of the instance match them.
706 
707      Also code unification optimizations may merge calls from
708      different blocks making return values unreliable.  So
709      do nothing during late optimization.  */
710   if (DECL_STRUCT_FUNCTION (function)->after_inlining)
711     return true;
712   if (TREE_CODE (arg) == SSA_NAME
713       && SSA_NAME_IS_DEFAULT_DEF (arg)
714       && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
715     {
716       /* Normal (non-THIS) argument.  */
717       if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
718 	   || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
719 	  /* THIS pointer of an method - here we want to watch constructors
720 	     and destructors as those definitely may change the dynamic
721 	     type.  */
722 	  || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
723 	      && !DECL_CXX_CONSTRUCTOR_P (function)
724 	      && !DECL_CXX_DESTRUCTOR_P (function)
725 	      && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
726 	{
727 	  /* Walk the inline stack and watch out for ctors/dtors.  */
728 	  for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
729 	       block = BLOCK_SUPERCONTEXT (block))
730 	    if (inlined_polymorphic_ctor_dtor_block_p (block, false))
731 	      return true;
732 	  return false;
733 	}
734     }
735   return true;
736 }
737 
738 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
739    callsite CALL) by looking for assignments to its virtual table pointer.  If
740    it is, return true and fill in the jump function JFUNC with relevant type
741    information or set it to unknown.  ARG is the object itself (not a pointer
742    to it, unless dereferenced).  BASE is the base of the memory access as
743    returned by get_ref_base_and_extent, as is the offset.
744 
745    This is helper function for detect_type_change and detect_type_change_ssa
746    that does the heavy work which is usually unnecesary.  */
747 
748 static bool
749 detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
750 				       gcall *call, struct ipa_jump_func *jfunc,
751 				       HOST_WIDE_INT offset)
752 {
753   struct prop_type_change_info tci;
754   ao_ref ao;
755   bool entry_reached = false;
756 
757   gcc_checking_assert (DECL_P (arg)
758 		       || TREE_CODE (arg) == MEM_REF
759 		       || handled_component_p (arg));
760 
761   comp_type = TYPE_MAIN_VARIANT (comp_type);
762 
763   /* Const calls cannot call virtual methods through VMT and so type changes do
764      not matter.  */
765   if (!flag_devirtualize || !gimple_vuse (call)
766       /* Be sure expected_type is polymorphic.  */
767       || !comp_type
768       || TREE_CODE (comp_type) != RECORD_TYPE
769       || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
770       || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
771     return true;
772 
773   ao_ref_init (&ao, arg);
774   ao.base = base;
775   ao.offset = offset;
776   ao.size = POINTER_SIZE;
777   ao.max_size = ao.size;
778 
779   tci.offset = offset;
780   tci.object = get_base_address (arg);
781   tci.type_maybe_changed = false;
782 
783   walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
784 		      &tci, NULL, &entry_reached);
785   if (!tci.type_maybe_changed)
786     return false;
787 
788   ipa_set_jf_unknown (jfunc);
789   return true;
790 }
791 
792 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
793    If it is, return true and fill in the jump function JFUNC with relevant type
794    information or set it to unknown.  ARG is the object itself (not a pointer
795    to it, unless dereferenced).  BASE is the base of the memory access as
796    returned by get_ref_base_and_extent, as is the offset.  */
797 
798 static bool
799 detect_type_change (tree arg, tree base, tree comp_type, gcall *call,
800 		    struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
801 {
802   if (!flag_devirtualize)
803     return false;
804 
805   if (TREE_CODE	(base) == MEM_REF
806       && !param_type_may_change_p (current_function_decl,
807 				   TREE_OPERAND (base, 0),
808 				   call))
809     return false;
810   return detect_type_change_from_memory_writes (arg, base, comp_type,
811 						call, jfunc, offset);
812 }
813 
814 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
815    SSA name (its dereference will become the base and the offset is assumed to
816    be zero).  */
817 
818 static bool
819 detect_type_change_ssa (tree arg, tree comp_type,
820 			gcall *call, struct ipa_jump_func *jfunc)
821 {
822   gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
823   if (!flag_devirtualize
824       || !POINTER_TYPE_P (TREE_TYPE (arg)))
825     return false;
826 
827   if (!param_type_may_change_p (current_function_decl, arg, call))
828     return false;
829 
830   arg = build2 (MEM_REF, ptr_type_node, arg,
831 		build_int_cst (ptr_type_node, 0));
832 
833   return detect_type_change_from_memory_writes (arg, arg, comp_type,
834 						call, jfunc, 0);
835 }
836 
837 /* Callback of walk_aliased_vdefs.  Flags that it has been invoked to the
838    boolean variable pointed to by DATA.  */
839 
840 static bool
841 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
842 		     void *data)
843 {
844   bool *b = (bool *) data;
845   *b = true;
846   return true;
847 }
848 
849 /* Return true if we have already walked so many statements in AA that we
850    should really just start giving up.  */
851 
852 static bool
853 aa_overwalked (struct ipa_func_body_info *fbi)
854 {
855   gcc_checking_assert (fbi);
856   return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
857 }
858 
859 /* Find the nearest valid aa status for parameter specified by INDEX that
860    dominates BB.  */
861 
862 static struct ipa_param_aa_status *
863 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
864 			   int index)
865 {
866   while (true)
867     {
868       bb = get_immediate_dominator (CDI_DOMINATORS, bb);
869       if (!bb)
870 	return NULL;
871       struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
872       if (!bi->param_aa_statuses.is_empty ()
873 	  && bi->param_aa_statuses[index].valid)
874 	return &bi->param_aa_statuses[index];
875     }
876 }
877 
878 /* Get AA status structure for the given BB and parameter with INDEX.  Allocate
879    structures and/or intialize the result with a dominating description as
880    necessary.  */
881 
882 static struct ipa_param_aa_status *
883 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
884 			  int index)
885 {
886   gcc_checking_assert (fbi);
887   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
888   if (bi->param_aa_statuses.is_empty ())
889     bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
890   struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
891   if (!paa->valid)
892     {
893       gcc_checking_assert (!paa->parm_modified
894 			   && !paa->ref_modified
895 			   && !paa->pt_modified);
896       struct ipa_param_aa_status *dom_paa;
897       dom_paa = find_dominating_aa_status (fbi, bb, index);
898       if (dom_paa)
899 	*paa = *dom_paa;
900       else
901 	paa->valid = true;
902     }
903 
904   return paa;
905 }
906 
907 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
908    a value known not to be modified in this function before reaching the
909    statement STMT.  FBI holds information about the function we have so far
910    gathered but do not survive the summary building stage.  */
911 
912 static bool
913 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
914 			      gimple *stmt, tree parm_load)
915 {
916   struct ipa_param_aa_status *paa;
917   bool modified = false;
918   ao_ref refd;
919 
920   tree base = get_base_address (parm_load);
921   gcc_assert (TREE_CODE (base) == PARM_DECL);
922   if (TREE_READONLY (base))
923     return true;
924 
925   /* FIXME: FBI can be NULL if we are being called from outside
926      ipa_node_analysis or ipcp_transform_function, which currently happens
927      during inlining analysis.  It would be great to extend fbi's lifetime and
928      always have it.  Currently, we are just not afraid of too much walking in
929      that case.  */
930   if (fbi)
931     {
932       if (aa_overwalked (fbi))
933 	return false;
934       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
935       if (paa->parm_modified)
936 	return false;
937     }
938   else
939     paa = NULL;
940 
941   gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
942   ao_ref_init (&refd, parm_load);
943   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
944 				   &modified, NULL);
945   if (fbi)
946     fbi->aa_walked += walked;
947   if (paa && modified)
948     paa->parm_modified = true;
949   return !modified;
950 }
951 
952 /* If STMT is an assignment that loads a value from an parameter declaration,
953    return the index of the parameter in ipa_node_params which has not been
954    modified.  Otherwise return -1.  */
955 
956 static int
957 load_from_unmodified_param (struct ipa_func_body_info *fbi,
958 			    vec<ipa_param_descriptor, va_gc> *descriptors,
959 			    gimple *stmt)
960 {
961   int index;
962   tree op1;
963 
964   if (!gimple_assign_single_p (stmt))
965     return -1;
966 
967   op1 = gimple_assign_rhs1 (stmt);
968   if (TREE_CODE (op1) != PARM_DECL)
969     return -1;
970 
971   index = ipa_get_param_decl_index_1 (descriptors, op1);
972   if (index < 0
973       || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
974     return -1;
975 
976   return index;
977 }
978 
979 /* Return true if memory reference REF (which must be a load through parameter
980    with INDEX) loads data that are known to be unmodified in this function
981    before reaching statement STMT.  */
982 
983 static bool
984 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
985 			   int index, gimple *stmt, tree ref)
986 {
987   struct ipa_param_aa_status *paa;
988   bool modified = false;
989   ao_ref refd;
990 
991   /* FIXME: FBI can be NULL if we are being called from outside
992      ipa_node_analysis or ipcp_transform_function, which currently happens
993      during inlining analysis.  It would be great to extend fbi's lifetime and
994      always have it.  Currently, we are just not afraid of too much walking in
995      that case.  */
996   if (fbi)
997     {
998       if (aa_overwalked (fbi))
999 	return false;
1000       paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1001       if (paa->ref_modified)
1002 	return false;
1003     }
1004   else
1005     paa = NULL;
1006 
1007   gcc_checking_assert (gimple_vuse (stmt));
1008   ao_ref_init (&refd, ref);
1009   int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1010 				   &modified, NULL);
1011   if (fbi)
1012     fbi->aa_walked += walked;
1013   if (paa && modified)
1014     paa->ref_modified = true;
1015   return !modified;
1016 }
1017 
1018 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1019    is known to be unmodified in this function before reaching call statement
1020    CALL into which it is passed.  FBI describes the function body.  */
1021 
1022 static bool
1023 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1024 			      gimple *call, tree parm)
1025 {
1026   bool modified = false;
1027   ao_ref refd;
1028 
1029   /* It's unnecessary to calculate anything about memory contnets for a const
1030      function because it is not goin to use it.  But do not cache the result
1031      either.  Also, no such calculations for non-pointers.  */
1032   if (!gimple_vuse (call)
1033       || !POINTER_TYPE_P (TREE_TYPE (parm))
1034       || aa_overwalked (fbi))
1035     return false;
1036 
1037   struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1038 							      gimple_bb (call),
1039 							      index);
1040   if (paa->pt_modified)
1041     return false;
1042 
1043   ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1044   int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1045 				   &modified, NULL);
1046   fbi->aa_walked += walked;
1047   if (modified)
1048     paa->pt_modified = true;
1049   return !modified;
1050 }
1051 
1052 /* Return true if we can prove that OP is a memory reference loading
1053    data from an aggregate passed as a parameter.
1054 
1055    The function works in two modes.  If GUARANTEED_UNMODIFIED is NULL, it return
1056    false if it cannot prove that the value has not been modified before the
1057    load in STMT.  If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1058    if it cannot prove the value has not been modified, in that case it will
1059    store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1060 
1061    INFO and PARMS_AINFO describe parameters of the current function (but the
1062    latter can be NULL), STMT is the load statement.  If function returns true,
1063    *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1064    within the aggregate and whether it is a load from a value passed by
1065    reference respectively.  */
1066 
1067 bool
1068 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1069 			vec<ipa_param_descriptor, va_gc> *descriptors,
1070 			gimple *stmt, tree op, int *index_p,
1071 			HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1072 			bool *by_ref_p, bool *guaranteed_unmodified)
1073 {
1074   int index;
1075   HOST_WIDE_INT size;
1076   bool reverse;
1077   tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1078 
1079   if (!base)
1080     return false;
1081 
1082   if (DECL_P (base))
1083     {
1084       int index = ipa_get_param_decl_index_1 (descriptors, base);
1085       if (index >= 0
1086 	  && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1087 	{
1088 	  *index_p = index;
1089 	  *by_ref_p = false;
1090 	  if (size_p)
1091 	    *size_p = size;
1092 	  if (guaranteed_unmodified)
1093 	    *guaranteed_unmodified = true;
1094 	  return true;
1095 	}
1096       return false;
1097     }
1098 
1099   if (TREE_CODE (base) != MEM_REF
1100 	   || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1101 	   || !integer_zerop (TREE_OPERAND (base, 1)))
1102     return false;
1103 
1104   if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1105     {
1106       tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1107       index = ipa_get_param_decl_index_1 (descriptors, parm);
1108     }
1109   else
1110     {
1111       /* This branch catches situations where a pointer parameter is not a
1112 	 gimple register, for example:
1113 
1114 	 void hip7(S*) (struct S * p)
1115 	 {
1116 	 void (*<T2e4>) (struct S *) D.1867;
1117 	 struct S * p.1;
1118 
1119 	 <bb 2>:
1120 	 p.1_1 = p;
1121 	 D.1867_2 = p.1_1->f;
1122 	 D.1867_2 ();
1123 	 gdp = &p;
1124       */
1125 
1126       gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1127       index = load_from_unmodified_param (fbi, descriptors, def);
1128     }
1129 
1130   if (index >= 0)
1131     {
1132       bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1133       if (!data_preserved && !guaranteed_unmodified)
1134 	return false;
1135 
1136       *index_p = index;
1137       *by_ref_p = true;
1138       if (size_p)
1139 	*size_p = size;
1140       if (guaranteed_unmodified)
1141 	*guaranteed_unmodified = data_preserved;
1142       return true;
1143     }
1144   return false;
1145 }
1146 
1147 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1148    of an assignment statement STMT, try to determine whether we are actually
1149    handling any of the following cases and construct an appropriate jump
1150    function into JFUNC if so:
1151 
1152    1) The passed value is loaded from a formal parameter which is not a gimple
1153    register (most probably because it is addressable, the value has to be
1154    scalar) and we can guarantee the value has not changed.  This case can
1155    therefore be described by a simple pass-through jump function.  For example:
1156 
1157       foo (int a)
1158       {
1159         int a.0;
1160 
1161         a.0_2 = a;
1162         bar (a.0_2);
1163 
1164    2) The passed value can be described by a simple arithmetic pass-through
1165    jump function. E.g.
1166 
1167       foo (int a)
1168       {
1169         int D.2064;
1170 
1171         D.2064_4 = a.1(D) + 4;
1172         bar (D.2064_4);
1173 
1174    This case can also occur in combination of the previous one, e.g.:
1175 
1176       foo (int a, int z)
1177       {
1178         int a.0;
1179         int D.2064;
1180 
1181 	a.0_3 = a;
1182 	D.2064_4 = a.0_3 + 4;
1183 	foo (D.2064_4);
1184 
1185    3) The passed value is an address of an object within another one (which
1186    also passed by reference).  Such situations are described by an ancestor
1187    jump function and describe situations such as:
1188 
1189      B::foo() (struct B * const this)
1190      {
1191        struct A * D.1845;
1192 
1193        D.1845_2 = &this_1(D)->D.1748;
1194        A::bar (D.1845_2);
1195 
1196    INFO is the structure describing individual parameters access different
1197    stages of IPA optimizations.  PARMS_AINFO contains the information that is
1198    only needed for intraprocedural analysis.  */
1199 
1200 static void
1201 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1202 				  struct ipa_node_params *info,
1203 				  struct ipa_jump_func *jfunc,
1204 				  gcall *call, gimple *stmt, tree name,
1205 				  tree param_type)
1206 {
1207   HOST_WIDE_INT offset, size;
1208   tree op1, tc_ssa, base, ssa;
1209   bool reverse;
1210   int index;
1211 
1212   op1 = gimple_assign_rhs1 (stmt);
1213 
1214   if (TREE_CODE (op1) == SSA_NAME)
1215     {
1216       if (SSA_NAME_IS_DEFAULT_DEF (op1))
1217 	index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1218       else
1219 	index = load_from_unmodified_param (fbi, info->descriptors,
1220 					    SSA_NAME_DEF_STMT (op1));
1221       tc_ssa = op1;
1222     }
1223   else
1224     {
1225       index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1226       tc_ssa = gimple_assign_lhs (stmt);
1227     }
1228 
1229   if (index >= 0)
1230     {
1231       switch (gimple_assign_rhs_class (stmt))
1232 	{
1233 	case GIMPLE_BINARY_RHS:
1234 	  {
1235 	    tree op2 = gimple_assign_rhs2 (stmt);
1236 	    if (!is_gimple_ip_invariant (op2)
1237 		|| ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1238 		     != tcc_comparison)
1239 		    && !useless_type_conversion_p (TREE_TYPE (name),
1240 						   TREE_TYPE (op1))))
1241 	      return;
1242 
1243 	    ipa_set_jf_arith_pass_through (jfunc, index, op2,
1244 					   gimple_assign_rhs_code (stmt));
1245 	    break;
1246 	  }
1247 	case GIMPLE_SINGLE_RHS:
1248 	  {
1249 	    bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1250 						       tc_ssa);
1251 	    ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1252 	    break;
1253 	  }
1254 	case GIMPLE_UNARY_RHS:
1255 	  if (is_gimple_assign (stmt)
1256 	      && gimple_assign_rhs_class (stmt) == GIMPLE_UNARY_RHS
1257 	      && ! CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1258 	    ipa_set_jf_unary_pass_through (jfunc, index,
1259 					   gimple_assign_rhs_code (stmt));
1260 	default:;
1261 	}
1262       return;
1263     }
1264 
1265   if (TREE_CODE (op1) != ADDR_EXPR)
1266     return;
1267   op1 = TREE_OPERAND (op1, 0);
1268   if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1269     return;
1270   base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1271   offset_int mem_offset;
1272   if (!base
1273       || TREE_CODE (base) != MEM_REF
1274       || !mem_ref_offset (base).is_constant (&mem_offset))
1275     return;
1276   offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1277   ssa = TREE_OPERAND (base, 0);
1278   if (TREE_CODE (ssa) != SSA_NAME
1279       || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1280       || offset < 0)
1281     return;
1282 
1283   /* Dynamic types are changed in constructors and destructors.  */
1284   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1285   if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1286     ipa_set_ancestor_jf (jfunc, offset,  index,
1287 			 parm_ref_data_pass_through_p (fbi, index, call, ssa));
1288 }
1289 
1290 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1291    it looks like:
1292 
1293    iftmp.1_3 = &obj_2(D)->D.1762;
1294 
1295    The base of the MEM_REF must be a default definition SSA NAME of a
1296    parameter.  Return NULL_TREE if it looks otherwise.  If case of success, the
1297    whole MEM_REF expression is returned and the offset calculated from any
1298    handled components and the MEM_REF itself is stored into *OFFSET.  The whole
1299    RHS stripped off the ADDR_EXPR is stored into *OBJ_P.  */
1300 
1301 static tree
1302 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1303 {
1304   HOST_WIDE_INT size;
1305   tree expr, parm, obj;
1306   bool reverse;
1307 
1308   if (!gimple_assign_single_p (assign))
1309     return NULL_TREE;
1310   expr = gimple_assign_rhs1 (assign);
1311 
1312   if (TREE_CODE (expr) != ADDR_EXPR)
1313     return NULL_TREE;
1314   expr = TREE_OPERAND (expr, 0);
1315   obj = expr;
1316   expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1317 
1318   offset_int mem_offset;
1319   if (!expr
1320       || TREE_CODE (expr) != MEM_REF
1321       || !mem_ref_offset (expr).is_constant (&mem_offset))
1322     return NULL_TREE;
1323   parm = TREE_OPERAND (expr, 0);
1324   if (TREE_CODE (parm) != SSA_NAME
1325       || !SSA_NAME_IS_DEFAULT_DEF (parm)
1326       || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1327     return NULL_TREE;
1328 
1329   *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1330   *obj_p = obj;
1331   return expr;
1332 }
1333 
1334 
1335 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1336    statement PHI, try to find out whether NAME is in fact a
1337    multiple-inheritance typecast from a descendant into an ancestor of a formal
1338    parameter and thus can be described by an ancestor jump function and if so,
1339    write the appropriate function into JFUNC.
1340 
1341    Essentially we want to match the following pattern:
1342 
1343      if (obj_2(D) != 0B)
1344        goto <bb 3>;
1345      else
1346        goto <bb 4>;
1347 
1348    <bb 3>:
1349      iftmp.1_3 = &obj_2(D)->D.1762;
1350 
1351    <bb 4>:
1352      # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1353      D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1354      return D.1879_6;  */
1355 
1356 static void
1357 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1358 				    struct ipa_node_params *info,
1359 				    struct ipa_jump_func *jfunc,
1360 				    gcall *call, gphi *phi)
1361 {
1362   HOST_WIDE_INT offset;
1363   gimple *assign, *cond;
1364   basic_block phi_bb, assign_bb, cond_bb;
1365   tree tmp, parm, expr, obj;
1366   int index, i;
1367 
1368   if (gimple_phi_num_args (phi) != 2)
1369     return;
1370 
1371   if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1372     tmp = PHI_ARG_DEF (phi, 0);
1373   else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1374     tmp = PHI_ARG_DEF (phi, 1);
1375   else
1376     return;
1377   if (TREE_CODE (tmp) != SSA_NAME
1378       || SSA_NAME_IS_DEFAULT_DEF (tmp)
1379       || !POINTER_TYPE_P (TREE_TYPE (tmp))
1380       || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1381     return;
1382 
1383   assign = SSA_NAME_DEF_STMT (tmp);
1384   assign_bb = gimple_bb (assign);
1385   if (!single_pred_p (assign_bb))
1386     return;
1387   expr = get_ancestor_addr_info (assign, &obj, &offset);
1388   if (!expr)
1389     return;
1390   parm = TREE_OPERAND (expr, 0);
1391   index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1392   if (index < 0)
1393     return;
1394 
1395   cond_bb = single_pred (assign_bb);
1396   cond = last_stmt (cond_bb);
1397   if (!cond
1398       || gimple_code (cond) != GIMPLE_COND
1399       || gimple_cond_code (cond) != NE_EXPR
1400       || gimple_cond_lhs (cond) != parm
1401       || !integer_zerop (gimple_cond_rhs (cond)))
1402     return;
1403 
1404   phi_bb = gimple_bb (phi);
1405   for (i = 0; i < 2; i++)
1406     {
1407       basic_block pred = EDGE_PRED (phi_bb, i)->src;
1408       if (pred != assign_bb && pred != cond_bb)
1409 	return;
1410     }
1411 
1412   ipa_set_ancestor_jf (jfunc, offset, index,
1413 		       parm_ref_data_pass_through_p (fbi, index, call, parm));
1414 }
1415 
1416 /* Inspect the given TYPE and return true iff it has the same structure (the
1417    same number of fields of the same types) as a C++ member pointer.  If
1418    METHOD_PTR and DELTA are non-NULL, store the trees representing the
1419    corresponding fields there.  */
1420 
1421 static bool
1422 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1423 {
1424   tree fld;
1425 
1426   if (TREE_CODE (type) != RECORD_TYPE)
1427     return false;
1428 
1429   fld = TYPE_FIELDS (type);
1430   if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1431       || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1432       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1433     return false;
1434 
1435   if (method_ptr)
1436     *method_ptr = fld;
1437 
1438   fld = DECL_CHAIN (fld);
1439   if (!fld || INTEGRAL_TYPE_P (fld)
1440       || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1441     return false;
1442   if (delta)
1443     *delta = fld;
1444 
1445   if (DECL_CHAIN (fld))
1446     return false;
1447 
1448   return true;
1449 }
1450 
1451 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1452    return the rhs of its defining statement.  Otherwise return RHS as it
1453    is.  */
1454 
1455 static inline tree
1456 get_ssa_def_if_simple_copy (tree rhs)
1457 {
1458   while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1459     {
1460       gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1461 
1462       if (gimple_assign_single_p (def_stmt))
1463 	rhs = gimple_assign_rhs1 (def_stmt);
1464       else
1465 	break;
1466     }
1467   return rhs;
1468 }
1469 
1470 /* Simple linked list, describing known contents of an aggregate beforere
1471    call.  */
1472 
1473 struct ipa_known_agg_contents_list
1474 {
1475   /* Offset and size of the described part of the aggregate.  */
1476   HOST_WIDE_INT offset, size;
1477   /* Known constant value or NULL if the contents is known to be unknown.  */
1478   tree constant;
1479   /* Pointer to the next structure in the list.  */
1480   struct ipa_known_agg_contents_list *next;
1481 };
1482 
1483 /* Find the proper place in linked list of ipa_known_agg_contents_list
1484    structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1485    unless there is a partial overlap, in which case return NULL, or such
1486    element is already there, in which case set *ALREADY_THERE to true.  */
1487 
1488 static struct ipa_known_agg_contents_list **
1489 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1490 				HOST_WIDE_INT lhs_offset,
1491 				HOST_WIDE_INT lhs_size,
1492 				bool *already_there)
1493 {
1494   struct ipa_known_agg_contents_list **p = list;
1495   while (*p && (*p)->offset < lhs_offset)
1496     {
1497       if ((*p)->offset + (*p)->size > lhs_offset)
1498 	return NULL;
1499       p = &(*p)->next;
1500     }
1501 
1502   if (*p && (*p)->offset < lhs_offset + lhs_size)
1503     {
1504       if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1505 	/* We already know this value is subsequently overwritten with
1506 	   something else.  */
1507 	*already_there = true;
1508       else
1509 	/* Otherwise this is a partial overlap which we cannot
1510 	   represent.  */
1511 	return NULL;
1512     }
1513   return p;
1514 }
1515 
1516 /* Build aggregate jump function from LIST, assuming there are exactly
1517    CONST_COUNT constant entries there and that th offset of the passed argument
1518    is ARG_OFFSET and store it into JFUNC.  */
1519 
1520 static void
1521 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1522 			       int const_count, HOST_WIDE_INT arg_offset,
1523 			       struct ipa_jump_func *jfunc)
1524 {
1525   vec_alloc (jfunc->agg.items, const_count);
1526   while (list)
1527     {
1528       if (list->constant)
1529 	{
1530 	  struct ipa_agg_jf_item item;
1531 	  item.offset = list->offset - arg_offset;
1532 	  gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1533 	  item.value = unshare_expr_without_location (list->constant);
1534 	  jfunc->agg.items->quick_push (item);
1535 	}
1536       list = list->next;
1537     }
1538 }
1539 
1540 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1541    in ARG is filled in with constant values.  ARG can either be an aggregate
1542    expression or a pointer to an aggregate.  ARG_TYPE is the type of the
1543    aggregate.  JFUNC is the jump function into which the constants are
1544    subsequently stored.  */
1545 
1546 static void
1547 determine_locally_known_aggregate_parts (gcall *call, tree arg,
1548 					 tree arg_type,
1549 					 struct ipa_jump_func *jfunc)
1550 {
1551   struct ipa_known_agg_contents_list *list = NULL;
1552   int item_count = 0, const_count = 0;
1553   HOST_WIDE_INT arg_offset, arg_size;
1554   gimple_stmt_iterator gsi;
1555   tree arg_base;
1556   bool check_ref, by_ref;
1557   ao_ref r;
1558 
1559   if (PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS) == 0)
1560     return;
1561 
1562   /* The function operates in three stages.  First, we prepare check_ref, r,
1563      arg_base and arg_offset based on what is actually passed as an actual
1564      argument.  */
1565 
1566   if (POINTER_TYPE_P (arg_type))
1567     {
1568       by_ref = true;
1569       if (TREE_CODE (arg) == SSA_NAME)
1570 	{
1571 	  tree type_size;
1572           if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1573 	      || !POINTER_TYPE_P (TREE_TYPE (arg)))
1574             return;
1575 	  check_ref = true;
1576 	  arg_base = arg;
1577 	  arg_offset = 0;
1578 	  type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1579 	  arg_size = tree_to_uhwi (type_size);
1580 	  ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1581 	}
1582       else if (TREE_CODE (arg) == ADDR_EXPR)
1583 	{
1584 	  bool reverse;
1585 
1586 	  arg = TREE_OPERAND (arg, 0);
1587 	  arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1588 						  &arg_size, &reverse);
1589 	  if (!arg_base)
1590 	    return;
1591 	  if (DECL_P (arg_base))
1592 	    {
1593 	      check_ref = false;
1594 	      ao_ref_init (&r, arg_base);
1595 	    }
1596 	  else
1597 	    return;
1598 	}
1599       else
1600 	return;
1601     }
1602   else
1603     {
1604       bool reverse;
1605 
1606       gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1607 
1608       by_ref = false;
1609       check_ref = false;
1610       arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1611 					      &arg_size, &reverse);
1612       if (!arg_base)
1613 	return;
1614 
1615       ao_ref_init (&r, arg);
1616     }
1617 
1618   /* Second stage walks back the BB, looks at individual statements and as long
1619      as it is confident of how the statements affect contents of the
1620      aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1621      describing it.  */
1622   gsi = gsi_for_stmt (call);
1623   gsi_prev (&gsi);
1624   for (; !gsi_end_p (gsi); gsi_prev (&gsi))
1625     {
1626       struct ipa_known_agg_contents_list *n, **p;
1627       gimple *stmt = gsi_stmt (gsi);
1628       HOST_WIDE_INT lhs_offset, lhs_size;
1629       tree lhs, rhs, lhs_base;
1630       bool reverse;
1631 
1632       if (!stmt_may_clobber_ref_p_1 (stmt, &r))
1633 	continue;
1634       if (!gimple_assign_single_p (stmt))
1635 	break;
1636 
1637       lhs = gimple_assign_lhs (stmt);
1638       rhs = gimple_assign_rhs1 (stmt);
1639       if (!is_gimple_reg_type (TREE_TYPE (rhs))
1640 	  || TREE_CODE (lhs) == BIT_FIELD_REF
1641 	  || contains_bitfld_component_ref_p (lhs))
1642 	break;
1643 
1644       lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset,
1645 					      &lhs_size, &reverse);
1646       if (!lhs_base)
1647 	break;
1648 
1649       if (check_ref)
1650 	{
1651 	  if (TREE_CODE (lhs_base) != MEM_REF
1652 	      || TREE_OPERAND (lhs_base, 0) != arg_base
1653 	      || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1654 	    break;
1655 	}
1656       else if (lhs_base != arg_base)
1657 	{
1658 	  if (DECL_P (lhs_base))
1659 	    continue;
1660 	  else
1661 	    break;
1662 	}
1663 
1664       bool already_there = false;
1665       p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1666 					  &already_there);
1667       if (!p)
1668 	break;
1669       if (already_there)
1670 	continue;
1671 
1672       rhs = get_ssa_def_if_simple_copy (rhs);
1673       n = XALLOCA (struct ipa_known_agg_contents_list);
1674       n->size = lhs_size;
1675       n->offset = lhs_offset;
1676       if (is_gimple_ip_invariant (rhs))
1677 	{
1678 	  n->constant = rhs;
1679 	  const_count++;
1680 	}
1681       else
1682 	n->constant = NULL_TREE;
1683       n->next = *p;
1684       *p = n;
1685 
1686       item_count++;
1687       if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1688 	  || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
1689 	break;
1690     }
1691 
1692   /* Third stage just goes over the list and creates an appropriate vector of
1693      ipa_agg_jf_item structures out of it, of sourse only if there are
1694      any known constants to begin with.  */
1695 
1696   if (const_count)
1697     {
1698       jfunc->agg.by_ref = by_ref;
1699       build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
1700     }
1701 }
1702 
1703 /* Return the Ith param type of callee associated with call graph
1704    edge E.  */
1705 
1706 tree
1707 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1708 {
1709   int n;
1710   tree type = (e->callee
1711 	       ? TREE_TYPE (e->callee->decl)
1712 	       : gimple_call_fntype (e->call_stmt));
1713   tree t = TYPE_ARG_TYPES (type);
1714 
1715   for (n = 0; n < i; n++)
1716     {
1717       if (!t)
1718         break;
1719       t = TREE_CHAIN (t);
1720     }
1721   if (t)
1722     return TREE_VALUE (t);
1723   if (!e->callee)
1724     return NULL;
1725   t = DECL_ARGUMENTS (e->callee->decl);
1726   for (n = 0; n < i; n++)
1727     {
1728       if (!t)
1729 	return NULL;
1730       t = TREE_CHAIN (t);
1731     }
1732   if (t)
1733     return TREE_TYPE (t);
1734   return NULL;
1735 }
1736 
1737 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
1738    allocated structure or a previously existing one shared with other jump
1739    functions and/or transformation summaries.  */
1740 
1741 ipa_bits *
1742 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
1743 {
1744   ipa_bits tmp;
1745   tmp.value = value;
1746   tmp.mask = mask;
1747 
1748   ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
1749   if (*slot)
1750     return *slot;
1751 
1752   ipa_bits *res = ggc_alloc<ipa_bits> ();
1753   res->value = value;
1754   res->mask = mask;
1755   *slot = res;
1756 
1757   return res;
1758 }
1759 
1760 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK.  Use hash
1761    table in order to avoid creating multiple same ipa_bits structures.  */
1762 
1763 static void
1764 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
1765 		    const widest_int &mask)
1766 {
1767   jf->bits = ipa_get_ipa_bits_for_value (value, mask);
1768 }
1769 
1770 /* Return a pointer to a value_range just like *TMP, but either find it in
1771    ipa_vr_hash_table or allocate it in GC memory.  TMP->equiv must be NULL.  */
1772 
1773 static value_range *
1774 ipa_get_value_range (value_range *tmp)
1775 {
1776   value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
1777   if (*slot)
1778     return *slot;
1779 
1780   value_range *vr = ggc_alloc<value_range> ();
1781   *vr = *tmp;
1782   *slot = vr;
1783 
1784   return vr;
1785 }
1786 
1787 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
1788    equiv set. Use hash table in order to avoid creating multiple same copies of
1789    value_ranges.  */
1790 
1791 static value_range *
1792 ipa_get_value_range (enum value_range_type type, tree min, tree max)
1793 {
1794   value_range tmp;
1795   tmp.type = type;
1796   tmp.min = min;
1797   tmp.max = max;
1798   tmp.equiv = NULL;
1799   return ipa_get_value_range (&tmp);
1800 }
1801 
1802 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
1803    a NULL equiv bitmap.  Use hash table in order to avoid creating multiple
1804    same value_range structures.  */
1805 
1806 static void
1807 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_type type,
1808 		  tree min, tree max)
1809 {
1810   jf->m_vr = ipa_get_value_range (type, min, max);
1811 }
1812 
1813 /* Assign to JF a pointer to a value_range just liek TMP but either fetch a
1814    copy from ipa_vr_hash_table or allocate a new on in GC memory.  */
1815 
1816 static void
1817 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
1818 {
1819   jf->m_vr = ipa_get_value_range (tmp);
1820 }
1821 
1822 /* Compute jump function for all arguments of callsite CS and insert the
1823    information in the jump_functions array in the ipa_edge_args corresponding
1824    to this callsite.  */
1825 
1826 static void
1827 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
1828 				     struct cgraph_edge *cs)
1829 {
1830   struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
1831   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1832   gcall *call = cs->call_stmt;
1833   int n, arg_num = gimple_call_num_args (call);
1834   bool useful_context = false;
1835 
1836   if (arg_num == 0 || args->jump_functions)
1837     return;
1838   vec_safe_grow_cleared (args->jump_functions, arg_num);
1839   if (flag_devirtualize)
1840     vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
1841 
1842   if (gimple_call_internal_p (call))
1843     return;
1844   if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1845     return;
1846 
1847   for (n = 0; n < arg_num; n++)
1848     {
1849       struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1850       tree arg = gimple_call_arg (call, n);
1851       tree param_type = ipa_get_callee_param_type (cs, n);
1852       if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1853 	{
1854 	  tree instance;
1855 	  struct ipa_polymorphic_call_context context (cs->caller->decl,
1856 						       arg, cs->call_stmt,
1857 						       &instance);
1858 	  context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
1859 	  *ipa_get_ith_polymorhic_call_context (args, n) = context;
1860 	  if (!context.useless_p ())
1861 	    useful_context = true;
1862 	}
1863 
1864       if (POINTER_TYPE_P (TREE_TYPE (arg)))
1865 	{
1866 	  bool addr_nonzero = false;
1867 	  bool strict_overflow = false;
1868 
1869 	  if (TREE_CODE (arg) == SSA_NAME
1870 	      && param_type
1871 	      && get_ptr_nonnull (arg))
1872 	    addr_nonzero = true;
1873 	  else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
1874 	    addr_nonzero = true;
1875 
1876 	  if (addr_nonzero)
1877 	    {
1878 	      tree z = build_int_cst (TREE_TYPE (arg), 0);
1879 	      ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
1880 	    }
1881 	  else
1882 	    gcc_assert (!jfunc->m_vr);
1883 	}
1884       else
1885 	{
1886 	  wide_int min, max;
1887 	  value_range_type type;
1888 	  if (TREE_CODE (arg) == SSA_NAME
1889 	      && param_type
1890 	      && (type = get_range_info (arg, &min, &max))
1891 	      && (type == VR_RANGE || type == VR_ANTI_RANGE))
1892 	    {
1893 	      value_range tmpvr,resvr;
1894 
1895 	      tmpvr.type = type;
1896 	      tmpvr.min = wide_int_to_tree (TREE_TYPE (arg), min);
1897 	      tmpvr.max = wide_int_to_tree (TREE_TYPE (arg), max);
1898 	      tmpvr.equiv = NULL;
1899 	      memset (&resvr, 0, sizeof (resvr));
1900 	      extract_range_from_unary_expr (&resvr, NOP_EXPR, param_type,
1901 					     &tmpvr, TREE_TYPE (arg));
1902 	      if (resvr.type == VR_RANGE || resvr.type == VR_ANTI_RANGE)
1903 		ipa_set_jfunc_vr (jfunc, &resvr);
1904 	      else
1905 		gcc_assert (!jfunc->m_vr);
1906 	    }
1907 	  else
1908 	    gcc_assert (!jfunc->m_vr);
1909 	}
1910 
1911       if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
1912 	  && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
1913 	{
1914 	  if (TREE_CODE (arg) == SSA_NAME)
1915 	    ipa_set_jfunc_bits (jfunc, 0,
1916 				widest_int::from (get_nonzero_bits (arg),
1917 						  TYPE_SIGN (TREE_TYPE (arg))));
1918 	  else
1919 	    ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
1920 	}
1921       else if (POINTER_TYPE_P (TREE_TYPE (arg)))
1922 	{
1923 	  unsigned HOST_WIDE_INT bitpos;
1924 	  unsigned align;
1925 
1926 	  get_pointer_alignment_1 (arg, &align, &bitpos);
1927 	  widest_int mask = wi::bit_and_not
1928 	    (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
1929 	     align / BITS_PER_UNIT - 1);
1930 	  widest_int value = bitpos / BITS_PER_UNIT;
1931 	  ipa_set_jfunc_bits (jfunc, value, mask);
1932 	}
1933       else
1934 	gcc_assert (!jfunc->bits);
1935 
1936       if (is_gimple_ip_invariant (arg)
1937 	  || (VAR_P (arg)
1938 	      && is_global_var (arg)
1939 	      && TREE_READONLY (arg)))
1940 	ipa_set_jf_constant (jfunc, arg, cs);
1941       else if (!is_gimple_reg_type (TREE_TYPE (arg))
1942 	       && TREE_CODE (arg) == PARM_DECL)
1943 	{
1944 	  int index = ipa_get_param_decl_index (info, arg);
1945 
1946 	  gcc_assert (index >=0);
1947 	  /* Aggregate passed by value, check for pass-through, otherwise we
1948 	     will attempt to fill in aggregate contents later in this
1949 	     for cycle.  */
1950 	  if (parm_preserved_before_stmt_p (fbi, index, call, arg))
1951 	    {
1952 	      ipa_set_jf_simple_pass_through (jfunc, index, false);
1953 	      continue;
1954 	    }
1955 	}
1956       else if (TREE_CODE (arg) == SSA_NAME)
1957 	{
1958 	  if (SSA_NAME_IS_DEFAULT_DEF (arg))
1959 	    {
1960 	      int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1961 	      if (index >= 0)
1962 		{
1963 		  bool agg_p;
1964 		  agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
1965 		  ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1966 		}
1967 	    }
1968 	  else
1969 	    {
1970 	      gimple *stmt = SSA_NAME_DEF_STMT (arg);
1971 	      if (is_gimple_assign (stmt))
1972 		compute_complex_assign_jump_func (fbi, info, jfunc,
1973 						  call, stmt, arg, param_type);
1974 	      else if (gimple_code (stmt) == GIMPLE_PHI)
1975 		compute_complex_ancestor_jump_func (fbi, info, jfunc,
1976 						    call,
1977 						    as_a <gphi *> (stmt));
1978 	    }
1979 	}
1980 
1981       /* If ARG is pointer, we can not use its type to determine the type of aggregate
1982 	 passed (because type conversions are ignored in gimple).  Usually we can
1983 	 safely get type from function declaration, but in case of K&R prototypes or
1984 	 variadic functions we can try our luck with type of the pointer passed.
1985 	 TODO: Since we look for actual initialization of the memory object, we may better
1986 	 work out the type based on the memory stores we find.  */
1987       if (!param_type)
1988 	param_type = TREE_TYPE (arg);
1989 
1990       if ((jfunc->type != IPA_JF_PASS_THROUGH
1991 	      || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1992 	  && (jfunc->type != IPA_JF_ANCESTOR
1993 	      || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1994 	  && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1995 	      || POINTER_TYPE_P (param_type)))
1996 	determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
1997     }
1998   if (!useful_context)
1999     vec_free (args->polymorphic_call_contexts);
2000 }
2001 
2002 /* Compute jump functions for all edges - both direct and indirect - outgoing
2003    from BB.  */
2004 
2005 static void
2006 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2007 {
2008   struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2009   int i;
2010   struct cgraph_edge *cs;
2011 
2012   FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2013     {
2014       struct cgraph_node *callee = cs->callee;
2015 
2016       if (callee)
2017 	{
2018 	  callee->ultimate_alias_target ();
2019 	  /* We do not need to bother analyzing calls to unknown functions
2020 	     unless they may become known during lto/whopr.  */
2021 	  if (!callee->definition && !flag_lto)
2022 	    continue;
2023 	}
2024       ipa_compute_jump_functions_for_edge (fbi, cs);
2025     }
2026 }
2027 
2028 /* If STMT looks like a statement loading a value from a member pointer formal
2029    parameter, return that parameter and store the offset of the field to
2030    *OFFSET_P, if it is non-NULL.  Otherwise return NULL (but *OFFSET_P still
2031    might be clobbered).  If USE_DELTA, then we look for a use of the delta
2032    field rather than the pfn.  */
2033 
2034 static tree
2035 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2036 				    HOST_WIDE_INT *offset_p)
2037 {
2038   tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2039 
2040   if (!gimple_assign_single_p (stmt))
2041     return NULL_TREE;
2042 
2043   rhs = gimple_assign_rhs1 (stmt);
2044   if (TREE_CODE (rhs) == COMPONENT_REF)
2045     {
2046       ref_field = TREE_OPERAND (rhs, 1);
2047       rhs = TREE_OPERAND (rhs, 0);
2048     }
2049   else
2050     ref_field = NULL_TREE;
2051   if (TREE_CODE (rhs) != MEM_REF)
2052     return NULL_TREE;
2053   rec = TREE_OPERAND (rhs, 0);
2054   if (TREE_CODE (rec) != ADDR_EXPR)
2055     return NULL_TREE;
2056   rec = TREE_OPERAND (rec, 0);
2057   if (TREE_CODE (rec) != PARM_DECL
2058       || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2059     return NULL_TREE;
2060   ref_offset = TREE_OPERAND (rhs, 1);
2061 
2062   if (use_delta)
2063     fld = delta_field;
2064   else
2065     fld = ptr_field;
2066   if (offset_p)
2067     *offset_p = int_bit_position (fld);
2068 
2069   if (ref_field)
2070     {
2071       if (integer_nonzerop (ref_offset))
2072 	return NULL_TREE;
2073       return ref_field == fld ? rec : NULL_TREE;
2074     }
2075   else
2076     return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2077       : NULL_TREE;
2078 }
2079 
2080 /* Returns true iff T is an SSA_NAME defined by a statement.  */
2081 
2082 static bool
2083 ipa_is_ssa_with_stmt_def (tree t)
2084 {
2085   if (TREE_CODE (t) == SSA_NAME
2086       && !SSA_NAME_IS_DEFAULT_DEF (t))
2087     return true;
2088   else
2089     return false;
2090 }
2091 
2092 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2093    call to a parameter number PARAM_INDEX.  NODE is the caller.  Return the
2094    indirect call graph edge.  */
2095 
2096 static struct cgraph_edge *
2097 ipa_note_param_call (struct cgraph_node *node, int param_index,
2098 		     gcall *stmt)
2099 {
2100   struct cgraph_edge *cs;
2101 
2102   cs = node->get_edge (stmt);
2103   cs->indirect_info->param_index = param_index;
2104   cs->indirect_info->agg_contents = 0;
2105   cs->indirect_info->member_ptr = 0;
2106   cs->indirect_info->guaranteed_unmodified = 0;
2107   return cs;
2108 }
2109 
2110 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2111    (described by INFO).  PARMS_AINFO is a pointer to a vector containing
2112    intermediate information about each formal parameter.  Currently it checks
2113    whether the call calls a pointer that is a formal parameter and if so, the
2114    parameter is marked with the called flag and an indirect call graph edge
2115    describing the call is created.  This is very simple for ordinary pointers
2116    represented in SSA but not-so-nice when it comes to member pointers.  The
2117    ugly part of this function does nothing more than trying to match the
2118    pattern of such a call.  An example of such a pattern is the gimple dump
2119    below, the call is on the last line:
2120 
2121      <bb 2>:
2122        f$__delta_5 = f.__delta;
2123        f$__pfn_24 = f.__pfn;
2124 
2125    or
2126      <bb 2>:
2127        f$__delta_5 = MEM[(struct  *)&f];
2128        f$__pfn_24 = MEM[(struct  *)&f + 4B];
2129 
2130    and a few lines below:
2131 
2132      <bb 5>
2133        D.2496_3 = (int) f$__pfn_24;
2134        D.2497_4 = D.2496_3 & 1;
2135        if (D.2497_4 != 0)
2136          goto <bb 3>;
2137        else
2138          goto <bb 4>;
2139 
2140      <bb 6>:
2141        D.2500_7 = (unsigned int) f$__delta_5;
2142        D.2501_8 = &S + D.2500_7;
2143        D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2144        D.2503_10 = *D.2502_9;
2145        D.2504_12 = f$__pfn_24 + -1;
2146        D.2505_13 = (unsigned int) D.2504_12;
2147        D.2506_14 = D.2503_10 + D.2505_13;
2148        D.2507_15 = *D.2506_14;
2149        iftmp.11_16 = (String:: *) D.2507_15;
2150 
2151      <bb 7>:
2152        # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2153        D.2500_19 = (unsigned int) f$__delta_5;
2154        D.2508_20 = &S + D.2500_19;
2155        D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2156 
2157    Such patterns are results of simple calls to a member pointer:
2158 
2159      int doprinting (int (MyString::* f)(int) const)
2160      {
2161        MyString S ("somestring");
2162 
2163        return (S.*f)(4);
2164      }
2165 
2166    Moreover, the function also looks for called pointers loaded from aggregates
2167    passed by value or reference.  */
2168 
2169 static void
2170 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2171 				tree target)
2172 {
2173   struct ipa_node_params *info = fbi->info;
2174   HOST_WIDE_INT offset;
2175   bool by_ref;
2176 
2177   if (SSA_NAME_IS_DEFAULT_DEF (target))
2178     {
2179       tree var = SSA_NAME_VAR (target);
2180       int index = ipa_get_param_decl_index (info, var);
2181       if (index >= 0)
2182 	ipa_note_param_call (fbi->node, index, call);
2183       return;
2184     }
2185 
2186   int index;
2187   gimple *def = SSA_NAME_DEF_STMT (target);
2188   bool guaranteed_unmodified;
2189   if (gimple_assign_single_p (def)
2190       && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2191 				 gimple_assign_rhs1 (def), &index, &offset,
2192 				 NULL, &by_ref, &guaranteed_unmodified))
2193     {
2194       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2195       cs->indirect_info->offset = offset;
2196       cs->indirect_info->agg_contents = 1;
2197       cs->indirect_info->by_ref = by_ref;
2198       cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2199       return;
2200     }
2201 
2202   /* Now we need to try to match the complex pattern of calling a member
2203      pointer. */
2204   if (gimple_code (def) != GIMPLE_PHI
2205       || gimple_phi_num_args (def) != 2
2206       || !POINTER_TYPE_P (TREE_TYPE (target))
2207       || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2208     return;
2209 
2210   /* First, we need to check whether one of these is a load from a member
2211      pointer that is a parameter to this function. */
2212   tree n1 = PHI_ARG_DEF (def, 0);
2213   tree n2 = PHI_ARG_DEF (def, 1);
2214   if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2215     return;
2216   gimple *d1 = SSA_NAME_DEF_STMT (n1);
2217   gimple *d2 = SSA_NAME_DEF_STMT (n2);
2218 
2219   tree rec;
2220   basic_block bb, virt_bb;
2221   basic_block join = gimple_bb (def);
2222   if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2223     {
2224       if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2225 	return;
2226 
2227       bb = EDGE_PRED (join, 0)->src;
2228       virt_bb = gimple_bb (d2);
2229     }
2230   else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2231     {
2232       bb = EDGE_PRED (join, 1)->src;
2233       virt_bb = gimple_bb (d1);
2234     }
2235   else
2236     return;
2237 
2238   /* Second, we need to check that the basic blocks are laid out in the way
2239      corresponding to the pattern. */
2240 
2241   if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2242       || single_pred (virt_bb) != bb
2243       || single_succ (virt_bb) != join)
2244     return;
2245 
2246   /* Third, let's see that the branching is done depending on the least
2247      significant bit of the pfn. */
2248 
2249   gimple *branch = last_stmt (bb);
2250   if (!branch || gimple_code (branch) != GIMPLE_COND)
2251     return;
2252 
2253   if ((gimple_cond_code (branch) != NE_EXPR
2254        && gimple_cond_code (branch) != EQ_EXPR)
2255       || !integer_zerop (gimple_cond_rhs (branch)))
2256     return;
2257 
2258   tree cond = gimple_cond_lhs (branch);
2259   if (!ipa_is_ssa_with_stmt_def (cond))
2260     return;
2261 
2262   def = SSA_NAME_DEF_STMT (cond);
2263   if (!is_gimple_assign (def)
2264       || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2265       || !integer_onep (gimple_assign_rhs2 (def)))
2266     return;
2267 
2268   cond = gimple_assign_rhs1 (def);
2269   if (!ipa_is_ssa_with_stmt_def (cond))
2270     return;
2271 
2272   def = SSA_NAME_DEF_STMT (cond);
2273 
2274   if (is_gimple_assign (def)
2275       && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2276     {
2277       cond = gimple_assign_rhs1 (def);
2278       if (!ipa_is_ssa_with_stmt_def (cond))
2279 	return;
2280       def = SSA_NAME_DEF_STMT (cond);
2281     }
2282 
2283   tree rec2;
2284   rec2 = ipa_get_stmt_member_ptr_load_param (def,
2285 					     (TARGET_PTRMEMFUNC_VBIT_LOCATION
2286 					      == ptrmemfunc_vbit_in_delta),
2287 					     NULL);
2288   if (rec != rec2)
2289     return;
2290 
2291   index = ipa_get_param_decl_index (info, rec);
2292   if (index >= 0
2293       && parm_preserved_before_stmt_p (fbi, index, call, rec))
2294     {
2295       struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2296       cs->indirect_info->offset = offset;
2297       cs->indirect_info->agg_contents = 1;
2298       cs->indirect_info->member_ptr = 1;
2299       cs->indirect_info->guaranteed_unmodified = 1;
2300     }
2301 
2302   return;
2303 }
2304 
2305 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2306    object referenced in the expression is a formal parameter of the caller
2307    FBI->node (described by FBI->info), create a call note for the
2308    statement.  */
2309 
2310 static void
2311 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2312 			       gcall *call, tree target)
2313 {
2314   tree obj = OBJ_TYPE_REF_OBJECT (target);
2315   int index;
2316   HOST_WIDE_INT anc_offset;
2317 
2318   if (!flag_devirtualize)
2319     return;
2320 
2321   if (TREE_CODE (obj) != SSA_NAME)
2322     return;
2323 
2324   struct ipa_node_params *info = fbi->info;
2325   if (SSA_NAME_IS_DEFAULT_DEF (obj))
2326     {
2327       struct ipa_jump_func jfunc;
2328       if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2329 	return;
2330 
2331       anc_offset = 0;
2332       index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2333       gcc_assert (index >= 0);
2334       if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2335 				  call, &jfunc))
2336 	return;
2337     }
2338   else
2339     {
2340       struct ipa_jump_func jfunc;
2341       gimple *stmt = SSA_NAME_DEF_STMT (obj);
2342       tree expr;
2343 
2344       expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2345       if (!expr)
2346 	return;
2347       index = ipa_get_param_decl_index (info,
2348 					SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2349       gcc_assert (index >= 0);
2350       if (detect_type_change (obj, expr, obj_type_ref_class (target),
2351 			      call, &jfunc, anc_offset))
2352 	return;
2353     }
2354 
2355   struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2356   struct cgraph_indirect_call_info *ii = cs->indirect_info;
2357   ii->offset = anc_offset;
2358   ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2359   ii->otr_type = obj_type_ref_class (target);
2360   ii->polymorphic = 1;
2361 }
2362 
2363 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2364    of the caller (described by INFO).  PARMS_AINFO is a pointer to a vector
2365    containing intermediate information about each formal parameter.  */
2366 
2367 static void
2368 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2369 {
2370   tree target = gimple_call_fn (call);
2371 
2372   if (!target
2373       || (TREE_CODE (target) != SSA_NAME
2374           && !virtual_method_call_p (target)))
2375     return;
2376 
2377   struct cgraph_edge *cs = fbi->node->get_edge (call);
2378   /* If we previously turned the call into a direct call, there is
2379      no need to analyze.  */
2380   if (cs && !cs->indirect_unknown_callee)
2381     return;
2382 
2383   if (cs->indirect_info->polymorphic && flag_devirtualize)
2384     {
2385       tree instance;
2386       tree target = gimple_call_fn (call);
2387       ipa_polymorphic_call_context context (current_function_decl,
2388 					    target, call, &instance);
2389 
2390       gcc_checking_assert (cs->indirect_info->otr_type
2391 			   == obj_type_ref_class (target));
2392       gcc_checking_assert (cs->indirect_info->otr_token
2393 			   == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2394 
2395       cs->indirect_info->vptr_changed
2396 	= !context.get_dynamic_type (instance,
2397 				     OBJ_TYPE_REF_OBJECT (target),
2398 				     obj_type_ref_class (target), call);
2399       cs->indirect_info->context = context;
2400     }
2401 
2402   if (TREE_CODE (target) == SSA_NAME)
2403     ipa_analyze_indirect_call_uses (fbi, call, target);
2404   else if (virtual_method_call_p (target))
2405     ipa_analyze_virtual_call_uses (fbi, call, target);
2406 }
2407 
2408 
2409 /* Analyze the call statement STMT with respect to formal parameters (described
2410    in INFO) of caller given by FBI->NODE.  Currently it only checks whether
2411    formal parameters are called.  */
2412 
2413 static void
2414 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2415 {
2416   if (is_gimple_call (stmt))
2417     ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2418 }
2419 
2420 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2421    If OP is a parameter declaration, mark it as used in the info structure
2422    passed in DATA.  */
2423 
2424 static bool
2425 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2426 {
2427   struct ipa_node_params *info = (struct ipa_node_params *) data;
2428 
2429   op = get_base_address (op);
2430   if (op
2431       && TREE_CODE (op) == PARM_DECL)
2432     {
2433       int index = ipa_get_param_decl_index (info, op);
2434       gcc_assert (index >= 0);
2435       ipa_set_param_used (info, index, true);
2436     }
2437 
2438   return false;
2439 }
2440 
2441 /* Scan the statements in BB and inspect the uses of formal parameters.  Store
2442    the findings in various structures of the associated ipa_node_params
2443    structure, such as parameter flags, notes etc.  FBI holds various data about
2444    the function being analyzed.  */
2445 
2446 static void
2447 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2448 {
2449   gimple_stmt_iterator gsi;
2450   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2451     {
2452       gimple *stmt = gsi_stmt (gsi);
2453 
2454       if (is_gimple_debug (stmt))
2455 	continue;
2456 
2457       ipa_analyze_stmt_uses (fbi, stmt);
2458       walk_stmt_load_store_addr_ops (stmt, fbi->info,
2459 				     visit_ref_for_mod_analysis,
2460 				     visit_ref_for_mod_analysis,
2461 				     visit_ref_for_mod_analysis);
2462     }
2463   for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2464     walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2465 				   visit_ref_for_mod_analysis,
2466 				   visit_ref_for_mod_analysis,
2467 				   visit_ref_for_mod_analysis);
2468 }
2469 
2470 /* Calculate controlled uses of parameters of NODE.  */
2471 
2472 static void
2473 ipa_analyze_controlled_uses (struct cgraph_node *node)
2474 {
2475   struct ipa_node_params *info = IPA_NODE_REF (node);
2476 
2477   for (int i = 0; i < ipa_get_param_count (info); i++)
2478     {
2479       tree parm = ipa_get_param (info, i);
2480       int controlled_uses = 0;
2481 
2482       /* For SSA regs see if parameter is used.  For non-SSA we compute
2483 	 the flag during modification analysis.  */
2484       if (is_gimple_reg (parm))
2485 	{
2486 	  tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2487 				       parm);
2488 	  if (ddef && !has_zero_uses (ddef))
2489 	    {
2490 	      imm_use_iterator imm_iter;
2491 	      use_operand_p use_p;
2492 
2493 	      ipa_set_param_used (info, i, true);
2494 	      FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2495 		if (!is_gimple_call (USE_STMT (use_p)))
2496 		  {
2497 		    if (!is_gimple_debug (USE_STMT (use_p)))
2498 		      {
2499 			controlled_uses = IPA_UNDESCRIBED_USE;
2500 			break;
2501 		      }
2502 		  }
2503 		else
2504 		  controlled_uses++;
2505 	    }
2506 	  else
2507 	    controlled_uses = 0;
2508 	}
2509       else
2510 	controlled_uses = IPA_UNDESCRIBED_USE;
2511       ipa_set_controlled_uses (info, i, controlled_uses);
2512     }
2513 }
2514 
2515 /* Free stuff in BI.  */
2516 
2517 static void
2518 free_ipa_bb_info (struct ipa_bb_info *bi)
2519 {
2520   bi->cg_edges.release ();
2521   bi->param_aa_statuses.release ();
2522 }
2523 
2524 /* Dominator walker driving the analysis.  */
2525 
2526 class analysis_dom_walker : public dom_walker
2527 {
2528 public:
2529   analysis_dom_walker (struct ipa_func_body_info *fbi)
2530     : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2531 
2532   virtual edge before_dom_children (basic_block);
2533 
2534 private:
2535   struct ipa_func_body_info *m_fbi;
2536 };
2537 
2538 edge
2539 analysis_dom_walker::before_dom_children (basic_block bb)
2540 {
2541   ipa_analyze_params_uses_in_bb (m_fbi, bb);
2542   ipa_compute_jump_functions_for_bb (m_fbi, bb);
2543   return NULL;
2544 }
2545 
2546 /* Release body info FBI.  */
2547 
2548 void
2549 ipa_release_body_info (struct ipa_func_body_info *fbi)
2550 {
2551   int i;
2552   struct ipa_bb_info *bi;
2553 
2554   FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2555     free_ipa_bb_info (bi);
2556   fbi->bb_infos.release ();
2557 }
2558 
2559 /* Initialize the array describing properties of formal parameters
2560    of NODE, analyze their uses and compute jump functions associated
2561    with actual arguments of calls from within NODE.  */
2562 
2563 void
2564 ipa_analyze_node (struct cgraph_node *node)
2565 {
2566   struct ipa_func_body_info fbi;
2567   struct ipa_node_params *info;
2568 
2569   ipa_check_create_node_params ();
2570   ipa_check_create_edge_args ();
2571   info = IPA_NODE_REF (node);
2572 
2573   if (info->analysis_done)
2574     return;
2575   info->analysis_done = 1;
2576 
2577   if (ipa_func_spec_opts_forbid_analysis_p (node))
2578     {
2579       for (int i = 0; i < ipa_get_param_count (info); i++)
2580 	{
2581 	  ipa_set_param_used (info, i, true);
2582 	  ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2583 	}
2584       return;
2585     }
2586 
2587   struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2588   push_cfun (func);
2589   calculate_dominance_info (CDI_DOMINATORS);
2590   ipa_initialize_node_params (node);
2591   ipa_analyze_controlled_uses (node);
2592 
2593   fbi.node = node;
2594   fbi.info = IPA_NODE_REF (node);
2595   fbi.bb_infos = vNULL;
2596   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2597   fbi.param_count = ipa_get_param_count (info);
2598   fbi.aa_walked = 0;
2599 
2600   for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2601     {
2602       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2603       bi->cg_edges.safe_push (cs);
2604     }
2605 
2606   for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2607     {
2608       ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2609       bi->cg_edges.safe_push (cs);
2610     }
2611 
2612   analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2613 
2614   ipa_release_body_info (&fbi);
2615   free_dominance_info (CDI_DOMINATORS);
2616   pop_cfun ();
2617 }
2618 
2619 /* Update the jump functions associated with call graph edge E when the call
2620    graph edge CS is being inlined, assuming that E->caller is already (possibly
2621    indirectly) inlined into CS->callee and that E has not been inlined.  */
2622 
2623 static void
2624 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2625 				      struct cgraph_edge *e)
2626 {
2627   struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2628   struct ipa_edge_args *args = IPA_EDGE_REF (e);
2629   int count = ipa_get_cs_argument_count (args);
2630   int i;
2631 
2632   for (i = 0; i < count; i++)
2633     {
2634       struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2635       struct ipa_polymorphic_call_context *dst_ctx
2636 	= ipa_get_ith_polymorhic_call_context (args, i);
2637 
2638       if (dst->type == IPA_JF_ANCESTOR)
2639 	{
2640 	  struct ipa_jump_func *src;
2641 	  int dst_fid = dst->value.ancestor.formal_id;
2642 	  struct ipa_polymorphic_call_context *src_ctx
2643 	    = ipa_get_ith_polymorhic_call_context (top, dst_fid);
2644 
2645 	  /* Variable number of arguments can cause havoc if we try to access
2646 	     one that does not exist in the inlined edge.  So make sure we
2647 	     don't.  */
2648 	  if (dst_fid >= ipa_get_cs_argument_count (top))
2649 	    {
2650 	      ipa_set_jf_unknown (dst);
2651 	      continue;
2652 	    }
2653 
2654 	  src = ipa_get_ith_jump_func (top, dst_fid);
2655 
2656 	  if (src_ctx && !src_ctx->useless_p ())
2657 	    {
2658 	      struct ipa_polymorphic_call_context ctx = *src_ctx;
2659 
2660 	      /* TODO: Make type preserved safe WRT contexts.  */
2661 	      if (!ipa_get_jf_ancestor_type_preserved (dst))
2662 		ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2663 	      ctx.offset_by (dst->value.ancestor.offset);
2664 	      if (!ctx.useless_p ())
2665 		{
2666 		  if (!dst_ctx)
2667 		    {
2668 		      vec_safe_grow_cleared (args->polymorphic_call_contexts,
2669 					     count);
2670 		      dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2671 		    }
2672 
2673 		  dst_ctx->combine_with (ctx);
2674 		}
2675 	    }
2676 
2677 	  if (src->agg.items
2678 	      && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2679 	    {
2680 	      struct ipa_agg_jf_item *item;
2681 	      int j;
2682 
2683 	      /* Currently we do not produce clobber aggregate jump functions,
2684 		 replace with merging when we do.  */
2685 	      gcc_assert (!dst->agg.items);
2686 
2687 	      dst->agg.items = vec_safe_copy (src->agg.items);
2688 	      dst->agg.by_ref = src->agg.by_ref;
2689 	      FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
2690 		item->offset -= dst->value.ancestor.offset;
2691 	    }
2692 
2693 	  if (src->type == IPA_JF_PASS_THROUGH
2694 	      && src->value.pass_through.operation == NOP_EXPR)
2695 	    {
2696 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2697 	      dst->value.ancestor.agg_preserved &=
2698 		src->value.pass_through.agg_preserved;
2699 	    }
2700 	  else if (src->type == IPA_JF_PASS_THROUGH
2701 		   && TREE_CODE_CLASS (src->value.pass_through.operation) == tcc_unary)
2702 	    {
2703 	      dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2704 	      dst->value.ancestor.agg_preserved = false;
2705 	    }
2706 	  else if (src->type == IPA_JF_ANCESTOR)
2707 	    {
2708 	      dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2709 	      dst->value.ancestor.offset += src->value.ancestor.offset;
2710 	      dst->value.ancestor.agg_preserved &=
2711 		src->value.ancestor.agg_preserved;
2712 	    }
2713 	  else
2714 	    ipa_set_jf_unknown (dst);
2715 	}
2716       else if (dst->type == IPA_JF_PASS_THROUGH)
2717 	{
2718 	  struct ipa_jump_func *src;
2719 	  /* We must check range due to calls with variable number of arguments
2720 	     and we cannot combine jump functions with operations.  */
2721 	  if (dst->value.pass_through.operation == NOP_EXPR
2722 	      && (dst->value.pass_through.formal_id
2723 		  < ipa_get_cs_argument_count (top)))
2724 	    {
2725 	      int dst_fid = dst->value.pass_through.formal_id;
2726 	      src = ipa_get_ith_jump_func (top, dst_fid);
2727 	      bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
2728 	      struct ipa_polymorphic_call_context *src_ctx
2729 		= ipa_get_ith_polymorhic_call_context (top, dst_fid);
2730 
2731 	      if (src_ctx && !src_ctx->useless_p ())
2732 		{
2733 		  struct ipa_polymorphic_call_context ctx = *src_ctx;
2734 
2735 		  /* TODO: Make type preserved safe WRT contexts.  */
2736 		  if (!ipa_get_jf_pass_through_type_preserved (dst))
2737 		    ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
2738 		  if (!ctx.useless_p ())
2739 		    {
2740 		      if (!dst_ctx)
2741 			{
2742 			  vec_safe_grow_cleared (args->polymorphic_call_contexts,
2743 					         count);
2744 			  dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2745 			}
2746 		      dst_ctx->combine_with (ctx);
2747 		    }
2748 		}
2749 	      switch (src->type)
2750 		{
2751 		case IPA_JF_UNKNOWN:
2752 		  ipa_set_jf_unknown (dst);
2753 		  break;
2754 		case IPA_JF_CONST:
2755 		  ipa_set_jf_cst_copy (dst, src);
2756 		  break;
2757 
2758 		case IPA_JF_PASS_THROUGH:
2759 		  {
2760 		    int formal_id = ipa_get_jf_pass_through_formal_id (src);
2761 		    enum tree_code operation;
2762 		    operation = ipa_get_jf_pass_through_operation (src);
2763 
2764 		    if (operation == NOP_EXPR)
2765 		      {
2766 			bool agg_p;
2767 			agg_p = dst_agg_p
2768 			  && ipa_get_jf_pass_through_agg_preserved (src);
2769 			ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
2770 		      }
2771 		    else if (TREE_CODE_CLASS (operation) == tcc_unary)
2772 		      ipa_set_jf_unary_pass_through (dst, formal_id, operation);
2773 		    else
2774 		      {
2775 			tree operand = ipa_get_jf_pass_through_operand (src);
2776 			ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2777 						       operation);
2778 		      }
2779 		    break;
2780 		  }
2781 		case IPA_JF_ANCESTOR:
2782 		  {
2783 		    bool agg_p;
2784 		    agg_p = dst_agg_p
2785 		      && ipa_get_jf_ancestor_agg_preserved (src);
2786 		    ipa_set_ancestor_jf (dst,
2787 					 ipa_get_jf_ancestor_offset (src),
2788 					 ipa_get_jf_ancestor_formal_id (src),
2789 					 agg_p);
2790 		    break;
2791 		  }
2792 		default:
2793 		  gcc_unreachable ();
2794 		}
2795 
2796 	      if (src->agg.items
2797 		  && (dst_agg_p || !src->agg.by_ref))
2798 		{
2799 		  /* Currently we do not produce clobber aggregate jump
2800 		     functions, replace with merging when we do.  */
2801 		  gcc_assert (!dst->agg.items);
2802 
2803 		  dst->agg.by_ref = src->agg.by_ref;
2804 		  dst->agg.items = vec_safe_copy (src->agg.items);
2805 		}
2806 	    }
2807 	  else
2808 	    ipa_set_jf_unknown (dst);
2809 	}
2810     }
2811 }
2812 
2813 /* If TARGET is an addr_expr of a function declaration, make it the
2814    (SPECULATIVE)destination of an indirect edge IE and return the edge.
2815    Otherwise, return NULL.  */
2816 
2817 struct cgraph_edge *
2818 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2819 				bool speculative)
2820 {
2821   struct cgraph_node *callee;
2822   struct ipa_call_summary *es = ipa_call_summaries->get (ie);
2823   bool unreachable = false;
2824 
2825   if (TREE_CODE (target) == ADDR_EXPR)
2826     target = TREE_OPERAND (target, 0);
2827   if (TREE_CODE (target) != FUNCTION_DECL)
2828     {
2829       target = canonicalize_constructor_val (target, NULL);
2830       if (!target || TREE_CODE (target) != FUNCTION_DECL)
2831 	{
2832 	  /* Member pointer call that goes through a VMT lookup.  */
2833 	  if (ie->indirect_info->member_ptr
2834 	      /* Or if target is not an invariant expression and we do not
2835 		 know if it will evaulate to function at runtime.
2836 		 This can happen when folding through &VAR, where &VAR
2837 		 is IP invariant, but VAR itself is not.
2838 
2839 		 TODO: Revisit this when GCC 5 is branched.  It seems that
2840 		 member_ptr check is not needed and that we may try to fold
2841 		 the expression and see if VAR is readonly.  */
2842 	      || !is_gimple_ip_invariant (target))
2843 	    {
2844 	      if (dump_enabled_p ())
2845 		{
2846 		  location_t loc = gimple_location_safe (ie->call_stmt);
2847 		  dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2848 				   "discovered direct call non-invariant %s\n",
2849 				   ie->caller->dump_name ());
2850 		}
2851 	      return NULL;
2852 	    }
2853 
2854 
2855           if (dump_enabled_p ())
2856 	    {
2857 	      location_t loc = gimple_location_safe (ie->call_stmt);
2858 	      dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2859 			       "discovered direct call to non-function in %s, "
2860 			       "making it __builtin_unreachable\n",
2861 			       ie->caller->dump_name ());
2862 	    }
2863 
2864 	  target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2865 	  callee = cgraph_node::get_create (target);
2866 	  unreachable = true;
2867 	}
2868       else
2869 	callee = cgraph_node::get (target);
2870     }
2871   else
2872     callee = cgraph_node::get (target);
2873 
2874   /* Because may-edges are not explicitely represented and vtable may be external,
2875      we may create the first reference to the object in the unit.  */
2876   if (!callee || callee->global.inlined_to)
2877     {
2878 
2879       /* We are better to ensure we can refer to it.
2880 	 In the case of static functions we are out of luck, since we already
2881 	 removed its body.  In the case of public functions we may or may
2882 	 not introduce the reference.  */
2883       if (!canonicalize_constructor_val (target, NULL)
2884 	  || !TREE_PUBLIC (target))
2885 	{
2886 	  if (dump_file)
2887 	    fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2888 		     "(%s -> %s) but can not refer to it. Giving up.\n",
2889 		     ie->caller->dump_name (),
2890 		     ie->callee->dump_name ());
2891 	  return NULL;
2892 	}
2893       callee = cgraph_node::get_create (target);
2894     }
2895 
2896   /* If the edge is already speculated.  */
2897   if (speculative && ie->speculative)
2898     {
2899       struct cgraph_edge *e2;
2900       struct ipa_ref *ref;
2901       ie->speculative_call_info (e2, ie, ref);
2902       if (e2->callee->ultimate_alias_target ()
2903 	  != callee->ultimate_alias_target ())
2904 	{
2905 	  if (dump_file)
2906 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative "
2907 		     "target (%s -> %s) but the call is already "
2908 		     "speculated to %s. Giving up.\n",
2909 		     ie->caller->dump_name (), callee->dump_name (),
2910 		     e2->callee->dump_name ());
2911 	}
2912       else
2913 	{
2914 	  if (dump_file)
2915 	    fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2916 		     "(%s -> %s) this agree with previous speculation.\n",
2917 		     ie->caller->dump_name (), callee->dump_name ());
2918 	}
2919       return NULL;
2920     }
2921 
2922   if (!dbg_cnt (devirt))
2923     return NULL;
2924 
2925   ipa_check_create_node_params ();
2926 
2927   /* We can not make edges to inline clones.  It is bug that someone removed
2928      the cgraph node too early.  */
2929   gcc_assert (!callee->global.inlined_to);
2930 
2931   if (dump_file && !unreachable)
2932     {
2933       fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
2934 	       "(%s -> %s), for stmt ",
2935 	       ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
2936 	       speculative ? "speculative" : "known",
2937 	       ie->caller->dump_name (),
2938 	       callee->dump_name ());
2939       if (ie->call_stmt)
2940 	print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2941       else
2942 	fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
2943      }
2944   if (dump_enabled_p ())
2945     {
2946       location_t loc = gimple_location_safe (ie->call_stmt);
2947 
2948       dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2949 		       "converting indirect call in %s to direct call to %s\n",
2950 		       ie->caller->name (), callee->name ());
2951     }
2952   if (!speculative)
2953     {
2954       struct cgraph_edge *orig = ie;
2955       ie = ie->make_direct (callee);
2956       /* If we resolved speculative edge the cost is already up to date
2957 	 for direct call (adjusted by inline_edge_duplication_hook).  */
2958       if (ie == orig)
2959 	{
2960 	  es = ipa_call_summaries->get (ie);
2961 	  es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2962 				 - eni_size_weights.call_cost);
2963 	  es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2964 				 - eni_time_weights.call_cost);
2965 	}
2966     }
2967   else
2968     {
2969       if (!callee->can_be_discarded_p ())
2970 	{
2971 	  cgraph_node *alias;
2972 	  alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2973 	  if (alias)
2974 	    callee = alias;
2975 	}
2976       /* make_speculative will update ie's cost to direct call cost. */
2977       ie = ie->make_speculative
2978 	     (callee, ie->count.apply_scale (8, 10));
2979     }
2980 
2981   return ie;
2982 }
2983 
2984 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
2985    CONSTRUCTOR and return it.  Return NULL if the search fails for some
2986    reason.  */
2987 
2988 static tree
2989 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
2990 {
2991   tree type = TREE_TYPE (constructor);
2992   if (TREE_CODE (type) != ARRAY_TYPE
2993       && TREE_CODE (type) != RECORD_TYPE)
2994     return NULL;
2995 
2996   unsigned ix;
2997   tree index, val;
2998   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
2999     {
3000       HOST_WIDE_INT elt_offset;
3001       if (TREE_CODE (type) == ARRAY_TYPE)
3002        {
3003          offset_int off;
3004          tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3005          gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3006 
3007          if (index)
3008            {
3009 	     if (TREE_CODE (index) == RANGE_EXPR)
3010 	       off = wi::to_offset (TREE_OPERAND (index, 0));
3011 	     else
3012 	       off = wi::to_offset (index);
3013              if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3014                {
3015                  tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3016                  gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3017                  off = wi::sext (off - wi::to_offset (low_bound),
3018                                  TYPE_PRECISION (TREE_TYPE (index)));
3019                }
3020              off *= wi::to_offset (unit_size);
3021 	     /* ???  Handle more than just the first index of a
3022 	        RANGE_EXPR.  */
3023            }
3024          else
3025            off = wi::to_offset (unit_size) * ix;
3026 
3027          off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3028          if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3029            continue;
3030          elt_offset = off.to_shwi ();
3031        }
3032       else if (TREE_CODE (type) == RECORD_TYPE)
3033        {
3034          gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3035          if (DECL_BIT_FIELD (index))
3036            continue;
3037          elt_offset = int_bit_position (index);
3038        }
3039       else
3040        gcc_unreachable ();
3041 
3042       if (elt_offset > req_offset)
3043 	return NULL;
3044 
3045       if (TREE_CODE (val) == CONSTRUCTOR)
3046 	return find_constructor_constant_at_offset (val,
3047 						    req_offset - elt_offset);
3048 
3049       if (elt_offset == req_offset
3050 	  && is_gimple_reg_type (TREE_TYPE (val))
3051 	  && is_gimple_ip_invariant (val))
3052 	return val;
3053     }
3054   return NULL;
3055 }
3056 
3057 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3058    invariant from a static constructor and if so, return it.  Otherwise return
3059    NULL. */
3060 
3061 static tree
3062 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3063 {
3064   if (by_ref)
3065     {
3066       if (TREE_CODE (scalar) != ADDR_EXPR)
3067 	return NULL;
3068       scalar = TREE_OPERAND (scalar, 0);
3069     }
3070 
3071   if (!VAR_P (scalar)
3072       || !is_global_var (scalar)
3073       || !TREE_READONLY (scalar)
3074       || !DECL_INITIAL (scalar)
3075       || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3076     return NULL;
3077 
3078   return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3079 }
3080 
3081 /* Retrieve value from aggregate jump function AGG or static initializer of
3082    SCALAR (which can be NULL) for the given OFFSET or return NULL if there is
3083    none.  BY_REF specifies whether the value has to be passed by reference or
3084    by value.  If FROM_GLOBAL_CONSTANT is non-NULL, then the boolean it points
3085    to is set to true if the value comes from an initializer of a constant.  */
3086 
3087 tree
3088 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg, tree scalar,
3089 			    HOST_WIDE_INT offset, bool by_ref,
3090 			    bool *from_global_constant)
3091 {
3092   struct ipa_agg_jf_item *item;
3093   int i;
3094 
3095   if (scalar)
3096     {
3097       tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3098       if (res)
3099 	{
3100 	  if (from_global_constant)
3101 	    *from_global_constant = true;
3102 	  return res;
3103 	}
3104     }
3105 
3106   if (!agg
3107       || by_ref != agg->by_ref)
3108     return NULL;
3109 
3110   FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
3111     if (item->offset == offset)
3112       {
3113 	/* Currently we do not have clobber values, return NULL for them once
3114 	   we do.  */
3115 	gcc_checking_assert (is_gimple_ip_invariant (item->value));
3116 	if (from_global_constant)
3117 	  *from_global_constant = false;
3118 	return item->value;
3119       }
3120   return NULL;
3121 }
3122 
3123 /* Remove a reference to SYMBOL from the list of references of a node given by
3124    reference description RDESC.  Return true if the reference has been
3125    successfully found and removed.  */
3126 
3127 static bool
3128 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3129 {
3130   struct ipa_ref *to_del;
3131   struct cgraph_edge *origin;
3132 
3133   origin = rdesc->cs;
3134   if (!origin)
3135     return false;
3136   to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3137 					   origin->lto_stmt_uid);
3138   if (!to_del)
3139     return false;
3140 
3141   to_del->remove_reference ();
3142   if (dump_file)
3143     fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3144 	     origin->caller->dump_name (), xstrdup_for_dump (symbol->name ()));
3145   return true;
3146 }
3147 
3148 /* If JFUNC has a reference description with refcount different from
3149    IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3150    NULL.  JFUNC must be a constant jump function.  */
3151 
3152 static struct ipa_cst_ref_desc *
3153 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3154 {
3155   struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3156   if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3157     return rdesc;
3158   else
3159     return NULL;
3160 }
3161 
3162 /* If the value of constant jump function JFUNC is an address of a function
3163    declaration, return the associated call graph node.  Otherwise return
3164    NULL.  */
3165 
3166 static cgraph_node *
3167 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3168 {
3169   gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3170   tree cst = ipa_get_jf_constant (jfunc);
3171   if (TREE_CODE (cst) != ADDR_EXPR
3172       || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3173     return NULL;
3174 
3175   return cgraph_node::get (TREE_OPERAND (cst, 0));
3176 }
3177 
3178 
3179 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3180    refcount and if it hits zero, remove reference to SYMBOL from the caller of
3181    the edge specified in the rdesc.  Return false if either the symbol or the
3182    reference could not be found, otherwise return true.  */
3183 
3184 static bool
3185 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3186 {
3187   struct ipa_cst_ref_desc *rdesc;
3188   if (jfunc->type == IPA_JF_CONST
3189       && (rdesc = jfunc_rdesc_usable (jfunc))
3190       && --rdesc->refcount == 0)
3191     {
3192       symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3193       if (!symbol)
3194 	return false;
3195 
3196       return remove_described_reference (symbol, rdesc);
3197     }
3198   return true;
3199 }
3200 
3201 /* Try to find a destination for indirect edge IE that corresponds to a simple
3202    call or a call of a member function pointer and where the destination is a
3203    pointer formal parameter described by jump function JFUNC.  TARGET_TYPE is
3204    the type of the parameter to which the result of JFUNC is passed.  If it can
3205    be determined, return the newly direct edge, otherwise return NULL.
3206    NEW_ROOT_INFO is the node info that JFUNC lattices are relative to.  */
3207 
3208 static struct cgraph_edge *
3209 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3210 				  struct ipa_jump_func *jfunc, tree target_type,
3211 				  struct ipa_node_params *new_root_info)
3212 {
3213   struct cgraph_edge *cs;
3214   tree target;
3215   bool agg_contents = ie->indirect_info->agg_contents;
3216   tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3217   if (agg_contents)
3218     {
3219       bool from_global_constant;
3220       target = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3221 					   ie->indirect_info->offset,
3222 					   ie->indirect_info->by_ref,
3223 					   &from_global_constant);
3224       if (target
3225 	  && !from_global_constant
3226 	  && !ie->indirect_info->guaranteed_unmodified)
3227 	return NULL;
3228     }
3229   else
3230     target = scalar;
3231   if (!target)
3232     return NULL;
3233   cs = ipa_make_edge_direct_to_target (ie, target);
3234 
3235   if (cs && !agg_contents)
3236     {
3237       bool ok;
3238       gcc_checking_assert (cs->callee
3239 			   && (cs != ie
3240 			       || jfunc->type != IPA_JF_CONST
3241 			       || !cgraph_node_for_jfunc (jfunc)
3242 			       || cs->callee == cgraph_node_for_jfunc (jfunc)));
3243       ok = try_decrement_rdesc_refcount (jfunc);
3244       gcc_checking_assert (ok);
3245     }
3246 
3247   return cs;
3248 }
3249 
3250 /* Return the target to be used in cases of impossible devirtualization.  IE
3251    and target (the latter can be NULL) are dumped when dumping is enabled.  */
3252 
3253 tree
3254 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3255 {
3256   if (dump_file)
3257     {
3258       if (target)
3259 	fprintf (dump_file,
3260 		 "Type inconsistent devirtualization: %s->%s\n",
3261 		 ie->caller->dump_name (),
3262 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3263       else
3264 	fprintf (dump_file,
3265 		 "No devirtualization target in %s\n",
3266 		 ie->caller->dump_name ());
3267     }
3268   tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3269   cgraph_node::get_create (new_target);
3270   return new_target;
3271 }
3272 
3273 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3274    call based on a formal parameter which is described by jump function JFUNC
3275    and if it can be determined, make it direct and return the direct edge.
3276    Otherwise, return NULL.  CTX describes the polymorphic context that the
3277    parameter the call is based on brings along with it.  */
3278 
3279 static struct cgraph_edge *
3280 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3281 				   struct ipa_jump_func *jfunc,
3282 				   struct ipa_polymorphic_call_context ctx)
3283 {
3284   tree target = NULL;
3285   bool speculative = false;
3286 
3287   if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3288     return NULL;
3289 
3290   gcc_assert (!ie->indirect_info->by_ref);
3291 
3292   /* Try to do lookup via known virtual table pointer value.  */
3293   if (!ie->indirect_info->vptr_changed
3294       || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3295     {
3296       tree vtable;
3297       unsigned HOST_WIDE_INT offset;
3298       tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3299 	: NULL;
3300       tree t = ipa_find_agg_cst_for_param (&jfunc->agg, scalar,
3301 					   ie->indirect_info->offset,
3302 					   true);
3303       if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3304 	{
3305 	  bool can_refer;
3306 	  t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3307 						 vtable, offset, &can_refer);
3308 	  if (can_refer)
3309 	    {
3310 	      if (!t
3311 		  || (TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3312 		      && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
3313 		  || !possible_polymorphic_call_target_p
3314 		       (ie, cgraph_node::get (t)))
3315 		{
3316 		  /* Do not speculate builtin_unreachable, it is stupid!  */
3317 		  if (!ie->indirect_info->vptr_changed)
3318 		    target = ipa_impossible_devirt_target (ie, target);
3319 		  else
3320 		    target = NULL;
3321 		}
3322 	      else
3323 		{
3324 		  target = t;
3325 		  speculative = ie->indirect_info->vptr_changed;
3326 		}
3327 	    }
3328 	}
3329     }
3330 
3331   ipa_polymorphic_call_context ie_context (ie);
3332   vec <cgraph_node *>targets;
3333   bool final;
3334 
3335   ctx.offset_by (ie->indirect_info->offset);
3336   if (ie->indirect_info->vptr_changed)
3337     ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3338 				      ie->indirect_info->otr_type);
3339   ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3340   targets = possible_polymorphic_call_targets
3341     (ie->indirect_info->otr_type,
3342      ie->indirect_info->otr_token,
3343      ctx, &final);
3344   if (final && targets.length () <= 1)
3345     {
3346       speculative = false;
3347       if (targets.length () == 1)
3348 	target = targets[0]->decl;
3349       else
3350 	target = ipa_impossible_devirt_target (ie, NULL_TREE);
3351     }
3352   else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3353 	   && !ie->speculative && ie->maybe_hot_p ())
3354     {
3355       cgraph_node *n;
3356       n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3357 					    ie->indirect_info->otr_token,
3358 					    ie->indirect_info->context);
3359       if (n)
3360 	{
3361 	  target = n->decl;
3362 	  speculative = true;
3363 	}
3364     }
3365 
3366   if (target)
3367     {
3368       if (!possible_polymorphic_call_target_p
3369 	  (ie, cgraph_node::get_create (target)))
3370 	{
3371 	  if (speculative)
3372 	    return NULL;
3373 	  target = ipa_impossible_devirt_target (ie, target);
3374 	}
3375       return ipa_make_edge_direct_to_target (ie, target, speculative);
3376     }
3377   else
3378     return NULL;
3379 }
3380 
3381 /* Update the param called notes associated with NODE when CS is being inlined,
3382    assuming NODE is (potentially indirectly) inlined into CS->callee.
3383    Moreover, if the callee is discovered to be constant, create a new cgraph
3384    edge for it.  Newly discovered indirect edges will be added to *NEW_EDGES,
3385    unless NEW_EDGES is NULL.  Return true iff a new edge(s) were created.  */
3386 
3387 static bool
3388 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3389 				      struct cgraph_node *node,
3390 				      vec<cgraph_edge *> *new_edges)
3391 {
3392   struct ipa_edge_args *top;
3393   struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3394   struct ipa_node_params *new_root_info, *inlined_node_info;
3395   bool res = false;
3396 
3397   ipa_check_create_edge_args ();
3398   top = IPA_EDGE_REF (cs);
3399   new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3400 				? cs->caller->global.inlined_to
3401 				: cs->caller);
3402   inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3403 
3404   for (ie = node->indirect_calls; ie; ie = next_ie)
3405     {
3406       struct cgraph_indirect_call_info *ici = ie->indirect_info;
3407       struct ipa_jump_func *jfunc;
3408       int param_index;
3409       cgraph_node *spec_target = NULL;
3410 
3411       next_ie = ie->next_callee;
3412 
3413       if (ici->param_index == -1)
3414 	continue;
3415 
3416       /* We must check range due to calls with variable number of arguments:  */
3417       if (ici->param_index >= ipa_get_cs_argument_count (top))
3418 	{
3419 	  ici->param_index = -1;
3420 	  continue;
3421 	}
3422 
3423       param_index = ici->param_index;
3424       jfunc = ipa_get_ith_jump_func (top, param_index);
3425 
3426       if (ie->speculative)
3427 	{
3428 	  struct cgraph_edge *de;
3429           struct ipa_ref *ref;
3430 	  ie->speculative_call_info (de, ie, ref);
3431 	  spec_target = de->callee;
3432 	}
3433 
3434       if (!opt_for_fn (node->decl, flag_indirect_inlining))
3435 	new_direct_edge = NULL;
3436       else if (ici->polymorphic)
3437 	{
3438           ipa_polymorphic_call_context ctx;
3439 	  ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3440 	  new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx);
3441 	}
3442       else
3443 	{
3444 	  tree target_type =  ipa_get_type (inlined_node_info, param_index);
3445 	  new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3446 							      target_type,
3447 							      new_root_info);
3448 	}
3449 
3450       /* If speculation was removed, then we need to do nothing.  */
3451       if (new_direct_edge && new_direct_edge != ie
3452 	  && new_direct_edge->callee == spec_target)
3453 	{
3454 	  new_direct_edge->indirect_inlining_edge = 1;
3455 	  top = IPA_EDGE_REF (cs);
3456 	  res = true;
3457 	  if (!new_direct_edge->speculative)
3458 	    continue;
3459 	}
3460       else if (new_direct_edge)
3461 	{
3462 	  new_direct_edge->indirect_inlining_edge = 1;
3463 	  if (new_direct_edge->call_stmt)
3464 	    new_direct_edge->call_stmt_cannot_inline_p
3465 	      = !gimple_check_call_matching_types (
3466 		  new_direct_edge->call_stmt,
3467 		  new_direct_edge->callee->decl, false);
3468 	  if (new_edges)
3469 	    {
3470 	      new_edges->safe_push (new_direct_edge);
3471 	      res = true;
3472 	    }
3473 	  top = IPA_EDGE_REF (cs);
3474 	  /* If speculative edge was introduced we still need to update
3475 	     call info of the indirect edge.  */
3476 	  if (!new_direct_edge->speculative)
3477 	    continue;
3478 	}
3479       if (jfunc->type == IPA_JF_PASS_THROUGH
3480           && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3481 	{
3482 	  if (ici->agg_contents
3483 	      && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3484 	      && !ici->polymorphic)
3485 	    ici->param_index = -1;
3486 	  else
3487 	    {
3488 	      ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3489 	      if (ici->polymorphic
3490 		  && !ipa_get_jf_pass_through_type_preserved (jfunc))
3491 		ici->vptr_changed = true;
3492 	    }
3493 	}
3494       else if (jfunc->type == IPA_JF_ANCESTOR)
3495 	{
3496 	  if (ici->agg_contents
3497 	      && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3498 	      && !ici->polymorphic)
3499 	    ici->param_index = -1;
3500 	  else
3501 	    {
3502 	      ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3503 	      ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3504 	      if (ici->polymorphic
3505 		  && !ipa_get_jf_ancestor_type_preserved (jfunc))
3506 		ici->vptr_changed = true;
3507 	    }
3508 	}
3509       else
3510 	/* Either we can find a destination for this edge now or never. */
3511 	ici->param_index = -1;
3512     }
3513 
3514   return res;
3515 }
3516 
3517 /* Recursively traverse subtree of NODE (including node) made of inlined
3518    cgraph_edges when CS has been inlined and invoke
3519    update_indirect_edges_after_inlining on all nodes and
3520    update_jump_functions_after_inlining on all non-inlined edges that lead out
3521    of this subtree.  Newly discovered indirect edges will be added to
3522    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were
3523    created.  */
3524 
3525 static bool
3526 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3527 				   struct cgraph_node *node,
3528 				   vec<cgraph_edge *> *new_edges)
3529 {
3530   struct cgraph_edge *e;
3531   bool res;
3532 
3533   res = update_indirect_edges_after_inlining (cs, node, new_edges);
3534 
3535   for (e = node->callees; e; e = e->next_callee)
3536     if (!e->inline_failed)
3537       res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3538     else
3539       update_jump_functions_after_inlining (cs, e);
3540   for (e = node->indirect_calls; e; e = e->next_callee)
3541     update_jump_functions_after_inlining (cs, e);
3542 
3543   return res;
3544 }
3545 
3546 /* Combine two controlled uses counts as done during inlining.  */
3547 
3548 static int
3549 combine_controlled_uses_counters (int c, int d)
3550 {
3551   if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3552     return IPA_UNDESCRIBED_USE;
3553   else
3554     return c + d - 1;
3555 }
3556 
3557 /* Propagate number of controlled users from CS->caleee to the new root of the
3558    tree of inlined nodes.  */
3559 
3560 static void
3561 propagate_controlled_uses (struct cgraph_edge *cs)
3562 {
3563   struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3564   struct cgraph_node *new_root = cs->caller->global.inlined_to
3565     ? cs->caller->global.inlined_to : cs->caller;
3566   struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3567   struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3568   int count, i;
3569 
3570   count = MIN (ipa_get_cs_argument_count (args),
3571 	       ipa_get_param_count (old_root_info));
3572   for (i = 0; i < count; i++)
3573     {
3574       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3575       struct ipa_cst_ref_desc *rdesc;
3576 
3577       if (jf->type == IPA_JF_PASS_THROUGH)
3578 	{
3579 	  int src_idx, c, d;
3580 	  src_idx = ipa_get_jf_pass_through_formal_id (jf);
3581 	  c = ipa_get_controlled_uses (new_root_info, src_idx);
3582 	  d = ipa_get_controlled_uses (old_root_info, i);
3583 
3584 	  gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3585 			       == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3586 	  c = combine_controlled_uses_counters (c, d);
3587 	  ipa_set_controlled_uses (new_root_info, src_idx, c);
3588 	  if (c == 0 && new_root_info->ipcp_orig_node)
3589 	    {
3590 	      struct cgraph_node *n;
3591 	      struct ipa_ref *ref;
3592 	      tree t = new_root_info->known_csts[src_idx];
3593 
3594 	      if (t && TREE_CODE (t) == ADDR_EXPR
3595 		  && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3596 		  && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3597 		  && (ref = new_root->find_reference (n, NULL, 0)))
3598 		{
3599 		  if (dump_file)
3600 		    fprintf (dump_file, "ipa-prop: Removing cloning-created "
3601 			     "reference from %s to %s.\n",
3602 			     new_root->dump_name (),
3603 			     n->dump_name ());
3604 		  ref->remove_reference ();
3605 		}
3606 	    }
3607 	}
3608       else if (jf->type == IPA_JF_CONST
3609 	       && (rdesc = jfunc_rdesc_usable (jf)))
3610 	{
3611 	  int d = ipa_get_controlled_uses (old_root_info, i);
3612 	  int c = rdesc->refcount;
3613 	  rdesc->refcount = combine_controlled_uses_counters (c, d);
3614 	  if (rdesc->refcount == 0)
3615 	    {
3616 	      tree cst = ipa_get_jf_constant (jf);
3617 	      struct cgraph_node *n;
3618 	      gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3619 				   && TREE_CODE (TREE_OPERAND (cst, 0))
3620 				   == FUNCTION_DECL);
3621 	      n = cgraph_node::get (TREE_OPERAND (cst, 0));
3622 	      if (n)
3623 		{
3624 		  struct cgraph_node *clone;
3625 		  bool ok;
3626 		  ok = remove_described_reference (n, rdesc);
3627 		  gcc_checking_assert (ok);
3628 
3629 		  clone = cs->caller;
3630 		  while (clone->global.inlined_to
3631 			 && clone != rdesc->cs->caller
3632 			 && IPA_NODE_REF (clone)->ipcp_orig_node)
3633 		    {
3634 		      struct ipa_ref *ref;
3635 		      ref = clone->find_reference (n, NULL, 0);
3636 		      if (ref)
3637 			{
3638 			  if (dump_file)
3639 			    fprintf (dump_file, "ipa-prop: Removing "
3640 				     "cloning-created reference "
3641 				     "from %s to %s.\n",
3642 				     clone->dump_name (),
3643 				     n->dump_name ());
3644 			  ref->remove_reference ();
3645 			}
3646 		      clone = clone->callers->caller;
3647 		    }
3648 		}
3649 	    }
3650 	}
3651     }
3652 
3653   for (i = ipa_get_param_count (old_root_info);
3654        i < ipa_get_cs_argument_count (args);
3655        i++)
3656     {
3657       struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3658 
3659       if (jf->type == IPA_JF_CONST)
3660 	{
3661 	  struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3662 	  if (rdesc)
3663 	    rdesc->refcount = IPA_UNDESCRIBED_USE;
3664 	}
3665       else if (jf->type == IPA_JF_PASS_THROUGH)
3666 	ipa_set_controlled_uses (new_root_info,
3667 				 jf->value.pass_through.formal_id,
3668 				 IPA_UNDESCRIBED_USE);
3669     }
3670 }
3671 
3672 /* Update jump functions and call note functions on inlining the call site CS.
3673    CS is expected to lead to a node already cloned by
3674    cgraph_clone_inline_nodes.  Newly discovered indirect edges will be added to
3675    *NEW_EDGES, unless NEW_EDGES is NULL.  Return true iff a new edge(s) were +
3676    created.  */
3677 
3678 bool
3679 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
3680 				   vec<cgraph_edge *> *new_edges)
3681 {
3682   bool changed;
3683   /* Do nothing if the preparation phase has not been carried out yet
3684      (i.e. during early inlining).  */
3685   if (!ipa_node_params_sum)
3686     return false;
3687   gcc_assert (ipa_edge_args_sum);
3688 
3689   propagate_controlled_uses (cs);
3690   changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3691 
3692   return changed;
3693 }
3694 
3695 /* Ensure that array of edge arguments infos is big enough to accommodate a
3696    structure for all edges and reallocates it if not.  Also, allocate
3697    associated hash tables is they do not already exist.  */
3698 
3699 void
3700 ipa_check_create_edge_args (void)
3701 {
3702   if (!ipa_edge_args_sum)
3703     ipa_edge_args_sum
3704       = (new (ggc_cleared_alloc <ipa_edge_args_sum_t> ())
3705 	 ipa_edge_args_sum_t (symtab, true));
3706   if (!ipa_bits_hash_table)
3707     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3708   if (!ipa_vr_hash_table)
3709     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3710 }
3711 
3712 /* Frees all dynamically allocated structures that the argument info points
3713    to.  */
3714 
3715 void
3716 ipa_free_edge_args_substructures (struct ipa_edge_args *args)
3717 {
3718   vec_free (args->jump_functions);
3719   *args = ipa_edge_args ();
3720 }
3721 
3722 /* Free all ipa_edge structures.  */
3723 
3724 void
3725 ipa_free_all_edge_args (void)
3726 {
3727   if (!ipa_edge_args_sum)
3728     return;
3729 
3730   ipa_edge_args_sum->release ();
3731   ipa_edge_args_sum = NULL;
3732 }
3733 
3734 /* Free all ipa_node_params structures.  */
3735 
3736 void
3737 ipa_free_all_node_params (void)
3738 {
3739   ipa_node_params_sum->release ();
3740   ipa_node_params_sum = NULL;
3741 }
3742 
3743 /* Grow ipcp_transformations if necessary.  Also allocate any necessary hash
3744    tables if they do not already exist.  */
3745 
3746 void
3747 ipcp_grow_transformations_if_necessary (void)
3748 {
3749   if (vec_safe_length (ipcp_transformations)
3750       <= (unsigned) symtab->cgraph_max_uid)
3751     vec_safe_grow_cleared (ipcp_transformations, symtab->cgraph_max_uid + 1);
3752   if (!ipa_bits_hash_table)
3753     ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
3754   if (!ipa_vr_hash_table)
3755     ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
3756 }
3757 
3758 /* Set the aggregate replacements of NODE to be AGGVALS.  */
3759 
3760 void
3761 ipa_set_node_agg_value_chain (struct cgraph_node *node,
3762 			      struct ipa_agg_replacement_value *aggvals)
3763 {
3764   ipcp_grow_transformations_if_necessary ();
3765   (*ipcp_transformations)[node->uid].agg_values = aggvals;
3766 }
3767 
3768 /* Hook that is called by cgraph.c when an edge is removed.  Adjust reference
3769    count data structures accordingly.  */
3770 
3771 void
3772 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
3773 {
3774   if (args->jump_functions)
3775     {
3776       struct ipa_jump_func *jf;
3777       int i;
3778       FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
3779 	{
3780 	  struct ipa_cst_ref_desc *rdesc;
3781 	  try_decrement_rdesc_refcount (jf);
3782 	  if (jf->type == IPA_JF_CONST
3783 	      && (rdesc = ipa_get_jf_constant_rdesc (jf))
3784 	      && rdesc->cs == cs)
3785 	    rdesc->cs = NULL;
3786 	}
3787     }
3788 }
3789 
3790 /* Method invoked when an edge is duplicated.  Copy ipa_edge_args and adjust
3791    reference count data strucutres accordingly.  */
3792 
3793 void
3794 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
3795 				ipa_edge_args *old_args, ipa_edge_args *new_args)
3796 {
3797   unsigned int i;
3798 
3799   new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
3800   if (old_args->polymorphic_call_contexts)
3801     new_args->polymorphic_call_contexts
3802       = vec_safe_copy (old_args->polymorphic_call_contexts);
3803 
3804   for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
3805     {
3806       struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3807       struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3808 
3809       dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3810 
3811       if (src_jf->type == IPA_JF_CONST)
3812 	{
3813 	  struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3814 
3815 	  if (!src_rdesc)
3816 	    dst_jf->value.constant.rdesc = NULL;
3817 	  else if (src->caller == dst->caller)
3818 	    {
3819 	      struct ipa_ref *ref;
3820 	      symtab_node *n = cgraph_node_for_jfunc (src_jf);
3821 	      gcc_checking_assert (n);
3822 	      ref = src->caller->find_reference (n, src->call_stmt,
3823 						 src->lto_stmt_uid);
3824 	      gcc_checking_assert (ref);
3825 	      dst->caller->clone_reference (ref, ref->stmt);
3826 
3827 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3828 	      dst_rdesc->cs = dst;
3829 	      dst_rdesc->refcount = src_rdesc->refcount;
3830 	      dst_rdesc->next_duplicate = NULL;
3831 	      dst_jf->value.constant.rdesc = dst_rdesc;
3832 	    }
3833 	  else if (src_rdesc->cs == src)
3834 	    {
3835 	      struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
3836 	      dst_rdesc->cs = dst;
3837 	      dst_rdesc->refcount = src_rdesc->refcount;
3838 	      dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3839 	      src_rdesc->next_duplicate = dst_rdesc;
3840 	      dst_jf->value.constant.rdesc = dst_rdesc;
3841 	    }
3842 	  else
3843 	    {
3844 	      struct ipa_cst_ref_desc *dst_rdesc;
3845 	      /* This can happen during inlining, when a JFUNC can refer to a
3846 		 reference taken in a function up in the tree of inline clones.
3847 		 We need to find the duplicate that refers to our tree of
3848 		 inline clones.  */
3849 
3850 	      gcc_assert (dst->caller->global.inlined_to);
3851 	      for (dst_rdesc = src_rdesc->next_duplicate;
3852 		   dst_rdesc;
3853 		   dst_rdesc = dst_rdesc->next_duplicate)
3854 		{
3855 		  struct cgraph_node *top;
3856 		  top = dst_rdesc->cs->caller->global.inlined_to
3857 		    ? dst_rdesc->cs->caller->global.inlined_to
3858 		    : dst_rdesc->cs->caller;
3859 		  if (dst->caller->global.inlined_to == top)
3860 		    break;
3861 		}
3862 	      gcc_assert (dst_rdesc);
3863 	      dst_jf->value.constant.rdesc = dst_rdesc;
3864 	    }
3865 	}
3866       else if (dst_jf->type == IPA_JF_PASS_THROUGH
3867 	       && src->caller == dst->caller)
3868 	{
3869 	  struct cgraph_node *inline_root = dst->caller->global.inlined_to
3870 	    ? dst->caller->global.inlined_to : dst->caller;
3871 	  struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3872 	  int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3873 
3874 	  int c = ipa_get_controlled_uses (root_info, idx);
3875 	  if (c != IPA_UNDESCRIBED_USE)
3876 	    {
3877 	      c++;
3878 	      ipa_set_controlled_uses (root_info, idx, c);
3879 	    }
3880 	}
3881     }
3882 }
3883 
3884 /* Analyze newly added function into callgraph.  */
3885 
3886 static void
3887 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3888 {
3889   if (node->has_gimple_body_p ())
3890     ipa_analyze_node (node);
3891 }
3892 
3893 /* Hook that is called by summary when a node is duplicated.  */
3894 
3895 void
3896 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
3897 			     ipa_node_params *old_info,
3898 			     ipa_node_params *new_info)
3899 {
3900   ipa_agg_replacement_value *old_av, *new_av;
3901 
3902   new_info->descriptors = vec_safe_copy (old_info->descriptors);
3903   new_info->lattices = NULL;
3904   new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3905   new_info->known_csts = old_info->known_csts.copy ();
3906   new_info->known_contexts = old_info->known_contexts.copy ();
3907 
3908   new_info->analysis_done = old_info->analysis_done;
3909   new_info->node_enqueued = old_info->node_enqueued;
3910   new_info->versionable = old_info->versionable;
3911 
3912   old_av = ipa_get_agg_replacements_for_node (src);
3913   if (old_av)
3914     {
3915       new_av = NULL;
3916       while (old_av)
3917 	{
3918 	  struct ipa_agg_replacement_value *v;
3919 
3920 	  v = ggc_alloc<ipa_agg_replacement_value> ();
3921 	  memcpy (v, old_av, sizeof (*v));
3922 	  v->next = new_av;
3923 	  new_av = v;
3924 	  old_av = old_av->next;
3925 	}
3926       ipa_set_node_agg_value_chain (dst, new_av);
3927     }
3928 
3929   ipcp_transformation_summary *src_trans
3930     = ipcp_get_transformation_summary (src);
3931 
3932   if (src_trans)
3933     {
3934       ipcp_grow_transformations_if_necessary ();
3935       src_trans = ipcp_get_transformation_summary (src);
3936       ipcp_transformation_summary *dst_trans
3937 	= ipcp_get_transformation_summary (dst);
3938 
3939       dst_trans->bits = vec_safe_copy (src_trans->bits);
3940 
3941       const vec<ipa_vr, va_gc> *src_vr = src_trans->m_vr;
3942       vec<ipa_vr, va_gc> *&dst_vr
3943 	= ipcp_get_transformation_summary (dst)->m_vr;
3944       if (vec_safe_length (src_trans->m_vr) > 0)
3945 	{
3946 	  vec_safe_reserve_exact (dst_vr, src_vr->length ());
3947 	  for (unsigned i = 0; i < src_vr->length (); ++i)
3948 	    dst_vr->quick_push ((*src_vr)[i]);
3949 	}
3950     }
3951 }
3952 
3953 /* Register our cgraph hooks if they are not already there.  */
3954 
3955 void
3956 ipa_register_cgraph_hooks (void)
3957 {
3958   ipa_check_create_node_params ();
3959   ipa_check_create_edge_args ();
3960 
3961   function_insertion_hook_holder =
3962       symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
3963 }
3964 
3965 /* Unregister our cgraph hooks if they are not already there.  */
3966 
3967 static void
3968 ipa_unregister_cgraph_hooks (void)
3969 {
3970   symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
3971   function_insertion_hook_holder = NULL;
3972 }
3973 
3974 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3975    longer needed after ipa-cp.  */
3976 
3977 void
3978 ipa_free_all_structures_after_ipa_cp (void)
3979 {
3980   if (!optimize && !in_lto_p)
3981     {
3982       ipa_free_all_edge_args ();
3983       ipa_free_all_node_params ();
3984       ipcp_sources_pool.release ();
3985       ipcp_cst_values_pool.release ();
3986       ipcp_poly_ctx_values_pool.release ();
3987       ipcp_agg_lattice_pool.release ();
3988       ipa_unregister_cgraph_hooks ();
3989       ipa_refdesc_pool.release ();
3990     }
3991 }
3992 
3993 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3994    longer needed after indirect inlining.  */
3995 
3996 void
3997 ipa_free_all_structures_after_iinln (void)
3998 {
3999   ipa_free_all_edge_args ();
4000   ipa_free_all_node_params ();
4001   ipa_unregister_cgraph_hooks ();
4002   ipcp_sources_pool.release ();
4003   ipcp_cst_values_pool.release ();
4004   ipcp_poly_ctx_values_pool.release ();
4005   ipcp_agg_lattice_pool.release ();
4006   ipa_refdesc_pool.release ();
4007 }
4008 
4009 /* Print ipa_tree_map data structures of all functions in the
4010    callgraph to F.  */
4011 
4012 void
4013 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4014 {
4015   int i, count;
4016   struct ipa_node_params *info;
4017 
4018   if (!node->definition)
4019     return;
4020   info = IPA_NODE_REF (node);
4021   fprintf (f, "  function  %s parameter descriptors:\n", node->dump_name ());
4022   count = ipa_get_param_count (info);
4023   for (i = 0; i < count; i++)
4024     {
4025       int c;
4026 
4027       fprintf (f, "    ");
4028       ipa_dump_param (f, info, i);
4029       if (ipa_is_param_used (info, i))
4030 	fprintf (f, " used");
4031       c = ipa_get_controlled_uses (info, i);
4032       if (c == IPA_UNDESCRIBED_USE)
4033 	fprintf (f, " undescribed_use");
4034       else
4035 	fprintf (f, "  controlled_uses=%i", c);
4036       fprintf (f, "\n");
4037     }
4038 }
4039 
4040 /* Print ipa_tree_map data structures of all functions in the
4041    callgraph to F.  */
4042 
4043 void
4044 ipa_print_all_params (FILE * f)
4045 {
4046   struct cgraph_node *node;
4047 
4048   fprintf (f, "\nFunction parameters:\n");
4049   FOR_EACH_FUNCTION (node)
4050     ipa_print_node_params (f, node);
4051 }
4052 
4053 /* Dump the AV linked list.  */
4054 
4055 void
4056 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4057 {
4058   bool comma = false;
4059   fprintf (f, "     Aggregate replacements:");
4060   for (; av; av = av->next)
4061     {
4062       fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4063 	       av->index, av->offset);
4064       print_generic_expr (f, av->value);
4065       comma = true;
4066     }
4067   fprintf (f, "\n");
4068 }
4069 
4070 /* Stream out jump function JUMP_FUNC to OB.  */
4071 
4072 static void
4073 ipa_write_jump_function (struct output_block *ob,
4074 			 struct ipa_jump_func *jump_func)
4075 {
4076   struct ipa_agg_jf_item *item;
4077   struct bitpack_d bp;
4078   int i, count;
4079 
4080   streamer_write_uhwi (ob, jump_func->type);
4081   switch (jump_func->type)
4082     {
4083     case IPA_JF_UNKNOWN:
4084       break;
4085     case IPA_JF_CONST:
4086       gcc_assert (
4087 	  EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4088       stream_write_tree (ob, jump_func->value.constant.value, true);
4089       break;
4090     case IPA_JF_PASS_THROUGH:
4091       streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4092       if (jump_func->value.pass_through.operation == NOP_EXPR)
4093 	{
4094 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4095 	  bp = bitpack_create (ob->main_stream);
4096 	  bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4097 	  streamer_write_bitpack (&bp);
4098 	}
4099       else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4100 	       == tcc_unary)
4101 	streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4102       else
4103 	{
4104 	  stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4105 	  streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4106 	}
4107       break;
4108     case IPA_JF_ANCESTOR:
4109       streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4110       streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4111       bp = bitpack_create (ob->main_stream);
4112       bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4113       streamer_write_bitpack (&bp);
4114       break;
4115     }
4116 
4117   count = vec_safe_length (jump_func->agg.items);
4118   streamer_write_uhwi (ob, count);
4119   if (count)
4120     {
4121       bp = bitpack_create (ob->main_stream);
4122       bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4123       streamer_write_bitpack (&bp);
4124     }
4125 
4126   FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4127     {
4128       streamer_write_uhwi (ob, item->offset);
4129       stream_write_tree (ob, item->value, true);
4130     }
4131 
4132   bp = bitpack_create (ob->main_stream);
4133   bp_pack_value (&bp, !!jump_func->bits, 1);
4134   streamer_write_bitpack (&bp);
4135   if (jump_func->bits)
4136     {
4137       streamer_write_widest_int (ob, jump_func->bits->value);
4138       streamer_write_widest_int (ob, jump_func->bits->mask);
4139     }
4140   bp_pack_value (&bp, !!jump_func->m_vr, 1);
4141   streamer_write_bitpack (&bp);
4142   if (jump_func->m_vr)
4143     {
4144       streamer_write_enum (ob->main_stream, value_rang_type,
4145 			   VR_LAST, jump_func->m_vr->type);
4146       stream_write_tree (ob, jump_func->m_vr->min, true);
4147       stream_write_tree (ob, jump_func->m_vr->max, true);
4148     }
4149 }
4150 
4151 /* Read in jump function JUMP_FUNC from IB.  */
4152 
4153 static void
4154 ipa_read_jump_function (struct lto_input_block *ib,
4155 			struct ipa_jump_func *jump_func,
4156 			struct cgraph_edge *cs,
4157 			struct data_in *data_in)
4158 {
4159   enum jump_func_type jftype;
4160   enum tree_code operation;
4161   int i, count;
4162 
4163   jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4164   switch (jftype)
4165     {
4166     case IPA_JF_UNKNOWN:
4167       ipa_set_jf_unknown (jump_func);
4168       break;
4169     case IPA_JF_CONST:
4170       ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
4171       break;
4172     case IPA_JF_PASS_THROUGH:
4173       operation = (enum tree_code) streamer_read_uhwi (ib);
4174       if (operation == NOP_EXPR)
4175 	{
4176 	  int formal_id =  streamer_read_uhwi (ib);
4177 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4178 	  bool agg_preserved = bp_unpack_value (&bp, 1);
4179 	  ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4180 	}
4181       else if (TREE_CODE_CLASS (operation) == tcc_unary)
4182 	{
4183 	  int formal_id =  streamer_read_uhwi (ib);
4184 	  ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4185 	}
4186       else
4187 	{
4188 	  tree operand = stream_read_tree (ib, data_in);
4189 	  int formal_id =  streamer_read_uhwi (ib);
4190 	  ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4191 					 operation);
4192 	}
4193       break;
4194     case IPA_JF_ANCESTOR:
4195       {
4196 	HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4197 	int formal_id = streamer_read_uhwi (ib);
4198 	struct bitpack_d bp = streamer_read_bitpack (ib);
4199 	bool agg_preserved = bp_unpack_value (&bp, 1);
4200 	ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved);
4201 	break;
4202       }
4203     }
4204 
4205   count = streamer_read_uhwi (ib);
4206   vec_alloc (jump_func->agg.items, count);
4207   if (count)
4208     {
4209       struct bitpack_d bp = streamer_read_bitpack (ib);
4210       jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4211     }
4212   for (i = 0; i < count; i++)
4213     {
4214       struct ipa_agg_jf_item item;
4215       item.offset = streamer_read_uhwi (ib);
4216       item.value = stream_read_tree (ib, data_in);
4217       jump_func->agg.items->quick_push (item);
4218     }
4219 
4220   struct bitpack_d bp = streamer_read_bitpack (ib);
4221   bool bits_known = bp_unpack_value (&bp, 1);
4222   if (bits_known)
4223     {
4224       widest_int value = streamer_read_widest_int (ib);
4225       widest_int mask = streamer_read_widest_int (ib);
4226       ipa_set_jfunc_bits (jump_func, value, mask);
4227     }
4228   else
4229     jump_func->bits = NULL;
4230 
4231   struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4232   bool vr_known = bp_unpack_value (&vr_bp, 1);
4233   if (vr_known)
4234     {
4235       enum value_range_type type = streamer_read_enum (ib, value_range_type,
4236 						       VR_LAST);
4237       tree min = stream_read_tree (ib, data_in);
4238       tree max = stream_read_tree (ib, data_in);
4239       ipa_set_jfunc_vr (jump_func, type, min, max);
4240     }
4241   else
4242     jump_func->m_vr = NULL;
4243 }
4244 
4245 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4246    relevant to indirect inlining to OB.  */
4247 
4248 static void
4249 ipa_write_indirect_edge_info (struct output_block *ob,
4250 			      struct cgraph_edge *cs)
4251 {
4252   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4253   struct bitpack_d bp;
4254 
4255   streamer_write_hwi (ob, ii->param_index);
4256   bp = bitpack_create (ob->main_stream);
4257   bp_pack_value (&bp, ii->polymorphic, 1);
4258   bp_pack_value (&bp, ii->agg_contents, 1);
4259   bp_pack_value (&bp, ii->member_ptr, 1);
4260   bp_pack_value (&bp, ii->by_ref, 1);
4261   bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4262   bp_pack_value (&bp, ii->vptr_changed, 1);
4263   streamer_write_bitpack (&bp);
4264   if (ii->agg_contents || ii->polymorphic)
4265     streamer_write_hwi (ob, ii->offset);
4266   else
4267     gcc_assert (ii->offset == 0);
4268 
4269   if (ii->polymorphic)
4270     {
4271       streamer_write_hwi (ob, ii->otr_token);
4272       stream_write_tree (ob, ii->otr_type, true);
4273       ii->context.stream_out (ob);
4274     }
4275 }
4276 
4277 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4278    relevant to indirect inlining from IB.  */
4279 
4280 static void
4281 ipa_read_indirect_edge_info (struct lto_input_block *ib,
4282 			     struct data_in *data_in,
4283 			     struct cgraph_edge *cs)
4284 {
4285   struct cgraph_indirect_call_info *ii = cs->indirect_info;
4286   struct bitpack_d bp;
4287 
4288   ii->param_index = (int) streamer_read_hwi (ib);
4289   bp = streamer_read_bitpack (ib);
4290   ii->polymorphic = bp_unpack_value (&bp, 1);
4291   ii->agg_contents = bp_unpack_value (&bp, 1);
4292   ii->member_ptr = bp_unpack_value (&bp, 1);
4293   ii->by_ref = bp_unpack_value (&bp, 1);
4294   ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4295   ii->vptr_changed = bp_unpack_value (&bp, 1);
4296   if (ii->agg_contents || ii->polymorphic)
4297     ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4298   else
4299     ii->offset = 0;
4300   if (ii->polymorphic)
4301     {
4302       ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4303       ii->otr_type = stream_read_tree (ib, data_in);
4304       ii->context.stream_in (ib, data_in);
4305     }
4306 }
4307 
4308 /* Stream out NODE info to OB.  */
4309 
4310 static void
4311 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4312 {
4313   int node_ref;
4314   lto_symtab_encoder_t encoder;
4315   struct ipa_node_params *info = IPA_NODE_REF (node);
4316   int j;
4317   struct cgraph_edge *e;
4318   struct bitpack_d bp;
4319 
4320   encoder = ob->decl_state->symtab_node_encoder;
4321   node_ref = lto_symtab_encoder_encode (encoder, node);
4322   streamer_write_uhwi (ob, node_ref);
4323 
4324   streamer_write_uhwi (ob, ipa_get_param_count (info));
4325   for (j = 0; j < ipa_get_param_count (info); j++)
4326     streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4327   bp = bitpack_create (ob->main_stream);
4328   gcc_assert (info->analysis_done
4329 	      || ipa_get_param_count (info) == 0);
4330   gcc_assert (!info->node_enqueued);
4331   gcc_assert (!info->ipcp_orig_node);
4332   for (j = 0; j < ipa_get_param_count (info); j++)
4333     bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4334   streamer_write_bitpack (&bp);
4335   for (j = 0; j < ipa_get_param_count (info); j++)
4336     {
4337       streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4338       stream_write_tree (ob, ipa_get_type (info, j), true);
4339     }
4340   for (e = node->callees; e; e = e->next_callee)
4341     {
4342       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4343 
4344       streamer_write_uhwi (ob,
4345 			   ipa_get_cs_argument_count (args) * 2
4346 			   + (args->polymorphic_call_contexts != NULL));
4347       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4348 	{
4349 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4350 	  if (args->polymorphic_call_contexts != NULL)
4351 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4352 	}
4353     }
4354   for (e = node->indirect_calls; e; e = e->next_callee)
4355     {
4356       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4357 
4358       streamer_write_uhwi (ob,
4359 			   ipa_get_cs_argument_count (args) * 2
4360   			   + (args->polymorphic_call_contexts != NULL));
4361       for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4362 	{
4363 	  ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4364 	  if (args->polymorphic_call_contexts != NULL)
4365 	    ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4366 	}
4367       ipa_write_indirect_edge_info (ob, e);
4368     }
4369 }
4370 
4371 /* Stream in NODE info from IB.  */
4372 
4373 static void
4374 ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4375 		    struct data_in *data_in)
4376 {
4377   struct ipa_node_params *info = IPA_NODE_REF (node);
4378   int k;
4379   struct cgraph_edge *e;
4380   struct bitpack_d bp;
4381 
4382   ipa_alloc_node_params (node, streamer_read_uhwi (ib));
4383 
4384   for (k = 0; k < ipa_get_param_count (info); k++)
4385     (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4386 
4387   bp = streamer_read_bitpack (ib);
4388   if (ipa_get_param_count (info) != 0)
4389     info->analysis_done = true;
4390   info->node_enqueued = false;
4391   for (k = 0; k < ipa_get_param_count (info); k++)
4392     ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
4393   for (k = 0; k < ipa_get_param_count (info); k++)
4394     {
4395       ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
4396       (*info->descriptors)[k].decl_or_type = stream_read_tree (ib, data_in);
4397     }
4398   for (e = node->callees; e; e = e->next_callee)
4399     {
4400       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4401       int count = streamer_read_uhwi (ib);
4402       bool contexts_computed = count & 1;
4403       count /= 2;
4404 
4405       if (!count)
4406 	continue;
4407       vec_safe_grow_cleared (args->jump_functions, count);
4408       if (contexts_computed)
4409 	vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4410 
4411       for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4412 	{
4413 	  ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4414 				  data_in);
4415 	  if (contexts_computed)
4416 	    ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4417 	}
4418     }
4419   for (e = node->indirect_calls; e; e = e->next_callee)
4420     {
4421       struct ipa_edge_args *args = IPA_EDGE_REF (e);
4422       int count = streamer_read_uhwi (ib);
4423       bool contexts_computed = count & 1;
4424       count /= 2;
4425 
4426       if (count)
4427 	{
4428 	  vec_safe_grow_cleared (args->jump_functions, count);
4429 	  if (contexts_computed)
4430 	    vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4431           for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4432 	    {
4433 	      ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4434 				      data_in);
4435 	      if (contexts_computed)
4436 		ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4437 	    }
4438 	}
4439       ipa_read_indirect_edge_info (ib, data_in, e);
4440     }
4441 }
4442 
4443 /* Write jump functions for nodes in SET.  */
4444 
4445 void
4446 ipa_prop_write_jump_functions (void)
4447 {
4448   struct cgraph_node *node;
4449   struct output_block *ob;
4450   unsigned int count = 0;
4451   lto_symtab_encoder_iterator lsei;
4452   lto_symtab_encoder_t encoder;
4453 
4454   if (!ipa_node_params_sum || !ipa_edge_args_sum)
4455     return;
4456 
4457   ob = create_output_block (LTO_section_jump_functions);
4458   encoder = ob->decl_state->symtab_node_encoder;
4459   ob->symbol = NULL;
4460   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4461        lsei_next_function_in_partition (&lsei))
4462     {
4463       node = lsei_cgraph_node (lsei);
4464       if (node->has_gimple_body_p ()
4465 	  && IPA_NODE_REF (node) != NULL)
4466 	count++;
4467     }
4468 
4469   streamer_write_uhwi (ob, count);
4470 
4471   /* Process all of the functions.  */
4472   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4473        lsei_next_function_in_partition (&lsei))
4474     {
4475       node = lsei_cgraph_node (lsei);
4476       if (node->has_gimple_body_p ()
4477 	  && IPA_NODE_REF (node) != NULL)
4478         ipa_write_node_info (ob, node);
4479     }
4480   streamer_write_char_stream (ob->main_stream, 0);
4481   produce_asm (ob, NULL);
4482   destroy_output_block (ob);
4483 }
4484 
4485 /* Read section in file FILE_DATA of length LEN with data DATA.  */
4486 
4487 static void
4488 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4489 		       size_t len)
4490 {
4491   const struct lto_function_header *header =
4492     (const struct lto_function_header *) data;
4493   const int cfg_offset = sizeof (struct lto_function_header);
4494   const int main_offset = cfg_offset + header->cfg_size;
4495   const int string_offset = main_offset + header->main_size;
4496   struct data_in *data_in;
4497   unsigned int i;
4498   unsigned int count;
4499 
4500   lto_input_block ib_main ((const char *) data + main_offset,
4501 			   header->main_size, file_data->mode_table);
4502 
4503   data_in =
4504     lto_data_in_create (file_data, (const char *) data + string_offset,
4505 			header->string_size, vNULL);
4506   count = streamer_read_uhwi (&ib_main);
4507 
4508   for (i = 0; i < count; i++)
4509     {
4510       unsigned int index;
4511       struct cgraph_node *node;
4512       lto_symtab_encoder_t encoder;
4513 
4514       index = streamer_read_uhwi (&ib_main);
4515       encoder = file_data->symtab_node_encoder;
4516       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4517 								index));
4518       gcc_assert (node->definition);
4519       ipa_read_node_info (&ib_main, node, data_in);
4520     }
4521   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4522 			 len);
4523   lto_data_in_delete (data_in);
4524 }
4525 
4526 /* Read ipcp jump functions.  */
4527 
4528 void
4529 ipa_prop_read_jump_functions (void)
4530 {
4531   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4532   struct lto_file_decl_data *file_data;
4533   unsigned int j = 0;
4534 
4535   ipa_check_create_node_params ();
4536   ipa_check_create_edge_args ();
4537   ipa_register_cgraph_hooks ();
4538 
4539   while ((file_data = file_data_vec[j++]))
4540     {
4541       size_t len;
4542       const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4543 
4544       if (data)
4545         ipa_prop_read_section (file_data, data, len);
4546     }
4547 }
4548 
4549 void
4550 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
4551 {
4552   int node_ref;
4553   unsigned int count = 0;
4554   lto_symtab_encoder_t encoder;
4555   struct ipa_agg_replacement_value *aggvals, *av;
4556 
4557   aggvals = ipa_get_agg_replacements_for_node (node);
4558   encoder = ob->decl_state->symtab_node_encoder;
4559   node_ref = lto_symtab_encoder_encode (encoder, node);
4560   streamer_write_uhwi (ob, node_ref);
4561 
4562   for (av = aggvals; av; av = av->next)
4563     count++;
4564   streamer_write_uhwi (ob, count);
4565 
4566   for (av = aggvals; av; av = av->next)
4567     {
4568       struct bitpack_d bp;
4569 
4570       streamer_write_uhwi (ob, av->offset);
4571       streamer_write_uhwi (ob, av->index);
4572       stream_write_tree (ob, av->value, true);
4573 
4574       bp = bitpack_create (ob->main_stream);
4575       bp_pack_value (&bp, av->by_ref, 1);
4576       streamer_write_bitpack (&bp);
4577     }
4578 
4579   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4580   if (ts && vec_safe_length (ts->m_vr) > 0)
4581     {
4582       count = ts->m_vr->length ();
4583       streamer_write_uhwi (ob, count);
4584       for (unsigned i = 0; i < count; ++i)
4585 	{
4586 	  struct bitpack_d bp;
4587 	  ipa_vr *parm_vr = &(*ts->m_vr)[i];
4588 	  bp = bitpack_create (ob->main_stream);
4589 	  bp_pack_value (&bp, parm_vr->known, 1);
4590 	  streamer_write_bitpack (&bp);
4591 	  if (parm_vr->known)
4592 	    {
4593 	      streamer_write_enum (ob->main_stream, value_rang_type,
4594 				   VR_LAST, parm_vr->type);
4595 	      streamer_write_wide_int (ob, parm_vr->min);
4596 	      streamer_write_wide_int (ob, parm_vr->max);
4597 	    }
4598 	}
4599     }
4600   else
4601     streamer_write_uhwi (ob, 0);
4602 
4603   if (ts && vec_safe_length (ts->bits) > 0)
4604     {
4605       count = ts->bits->length ();
4606       streamer_write_uhwi (ob, count);
4607 
4608       for (unsigned i = 0; i < count; ++i)
4609 	{
4610 	  const ipa_bits *bits_jfunc = (*ts->bits)[i];
4611 	  struct bitpack_d bp = bitpack_create (ob->main_stream);
4612 	  bp_pack_value (&bp, !!bits_jfunc, 1);
4613 	  streamer_write_bitpack (&bp);
4614 	  if (bits_jfunc)
4615 	    {
4616 	      streamer_write_widest_int (ob, bits_jfunc->value);
4617 	      streamer_write_widest_int (ob, bits_jfunc->mask);
4618 	    }
4619 	}
4620     }
4621   else
4622     streamer_write_uhwi (ob, 0);
4623 }
4624 
4625 /* Stream in the aggregate value replacement chain for NODE from IB.  */
4626 
4627 static void
4628 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
4629 			       data_in *data_in)
4630 {
4631   struct ipa_agg_replacement_value *aggvals = NULL;
4632   unsigned int count, i;
4633 
4634   count = streamer_read_uhwi (ib);
4635   for (i = 0; i <count; i++)
4636     {
4637       struct ipa_agg_replacement_value *av;
4638       struct bitpack_d bp;
4639 
4640       av = ggc_alloc<ipa_agg_replacement_value> ();
4641       av->offset = streamer_read_uhwi (ib);
4642       av->index = streamer_read_uhwi (ib);
4643       av->value = stream_read_tree (ib, data_in);
4644       bp = streamer_read_bitpack (ib);
4645       av->by_ref = bp_unpack_value (&bp, 1);
4646       av->next = aggvals;
4647       aggvals = av;
4648     }
4649   ipa_set_node_agg_value_chain (node, aggvals);
4650 
4651   count = streamer_read_uhwi (ib);
4652   if (count > 0)
4653     {
4654       ipcp_grow_transformations_if_necessary ();
4655 
4656       ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4657       vec_safe_grow_cleared (ts->m_vr, count);
4658       for (i = 0; i < count; i++)
4659 	{
4660 	  ipa_vr *parm_vr;
4661 	  parm_vr = &(*ts->m_vr)[i];
4662 	  struct bitpack_d bp;
4663 	  bp = streamer_read_bitpack (ib);
4664 	  parm_vr->known = bp_unpack_value (&bp, 1);
4665 	  if (parm_vr->known)
4666 	    {
4667 	      parm_vr->type = streamer_read_enum (ib, value_range_type,
4668 						  VR_LAST);
4669 	      parm_vr->min = streamer_read_wide_int (ib);
4670 	      parm_vr->max = streamer_read_wide_int (ib);
4671 	    }
4672 	}
4673     }
4674   count = streamer_read_uhwi (ib);
4675   if (count > 0)
4676     {
4677       ipcp_grow_transformations_if_necessary ();
4678 
4679       ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4680       vec_safe_grow_cleared (ts->bits, count);
4681 
4682       for (i = 0; i < count; i++)
4683 	{
4684 	  struct bitpack_d bp = streamer_read_bitpack (ib);
4685 	  bool known = bp_unpack_value (&bp, 1);
4686 	  if (known)
4687 	    {
4688 	      ipa_bits *bits
4689 		= ipa_get_ipa_bits_for_value (streamer_read_widest_int (ib),
4690 					      streamer_read_widest_int (ib));
4691 	      (*ts->bits)[i] = bits;
4692 	    }
4693 	}
4694     }
4695 }
4696 
4697 /* Write all aggregate replacement for nodes in set.  */
4698 
4699 void
4700 ipcp_write_transformation_summaries (void)
4701 {
4702   struct cgraph_node *node;
4703   struct output_block *ob;
4704   unsigned int count = 0;
4705   lto_symtab_encoder_iterator lsei;
4706   lto_symtab_encoder_t encoder;
4707 
4708   ob = create_output_block (LTO_section_ipcp_transform);
4709   encoder = ob->decl_state->symtab_node_encoder;
4710   ob->symbol = NULL;
4711   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4712        lsei_next_function_in_partition (&lsei))
4713     {
4714       node = lsei_cgraph_node (lsei);
4715       if (node->has_gimple_body_p ())
4716 	count++;
4717     }
4718 
4719   streamer_write_uhwi (ob, count);
4720 
4721   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4722        lsei_next_function_in_partition (&lsei))
4723     {
4724       node = lsei_cgraph_node (lsei);
4725       if (node->has_gimple_body_p ())
4726 	write_ipcp_transformation_info (ob, node);
4727     }
4728   streamer_write_char_stream (ob->main_stream, 0);
4729   produce_asm (ob, NULL);
4730   destroy_output_block (ob);
4731 }
4732 
4733 /* Read replacements section in file FILE_DATA of length LEN with data
4734    DATA.  */
4735 
4736 static void
4737 read_replacements_section (struct lto_file_decl_data *file_data,
4738 			   const char *data,
4739 			   size_t len)
4740 {
4741   const struct lto_function_header *header =
4742     (const struct lto_function_header *) data;
4743   const int cfg_offset = sizeof (struct lto_function_header);
4744   const int main_offset = cfg_offset + header->cfg_size;
4745   const int string_offset = main_offset + header->main_size;
4746   struct data_in *data_in;
4747   unsigned int i;
4748   unsigned int count;
4749 
4750   lto_input_block ib_main ((const char *) data + main_offset,
4751 			   header->main_size, file_data->mode_table);
4752 
4753   data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
4754 				header->string_size, vNULL);
4755   count = streamer_read_uhwi (&ib_main);
4756 
4757   for (i = 0; i < count; i++)
4758     {
4759       unsigned int index;
4760       struct cgraph_node *node;
4761       lto_symtab_encoder_t encoder;
4762 
4763       index = streamer_read_uhwi (&ib_main);
4764       encoder = file_data->symtab_node_encoder;
4765       node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
4766 								index));
4767       gcc_assert (node->definition);
4768       read_ipcp_transformation_info (&ib_main, node, data_in);
4769     }
4770   lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4771 			 len);
4772   lto_data_in_delete (data_in);
4773 }
4774 
4775 /* Read IPA-CP aggregate replacements.  */
4776 
4777 void
4778 ipcp_read_transformation_summaries (void)
4779 {
4780   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4781   struct lto_file_decl_data *file_data;
4782   unsigned int j = 0;
4783 
4784   while ((file_data = file_data_vec[j++]))
4785     {
4786       size_t len;
4787       const char *data = lto_get_section_data (file_data,
4788 					       LTO_section_ipcp_transform,
4789 					       NULL, &len);
4790       if (data)
4791         read_replacements_section (file_data, data, len);
4792     }
4793 }
4794 
4795 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4796    NODE.  */
4797 
4798 static void
4799 adjust_agg_replacement_values (struct cgraph_node *node,
4800 			       struct ipa_agg_replacement_value *aggval)
4801 {
4802   struct ipa_agg_replacement_value *v;
4803   int i, c = 0, d = 0, *adj;
4804 
4805   if (!node->clone.combined_args_to_skip)
4806     return;
4807 
4808   for (v = aggval; v; v = v->next)
4809     {
4810       gcc_assert (v->index >= 0);
4811       if (c < v->index)
4812 	c = v->index;
4813     }
4814   c++;
4815 
4816   adj = XALLOCAVEC (int, c);
4817   for (i = 0; i < c; i++)
4818     if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4819       {
4820 	adj[i] = -1;
4821 	d++;
4822       }
4823     else
4824       adj[i] = i - d;
4825 
4826   for (v = aggval; v; v = v->next)
4827     v->index = adj[v->index];
4828 }
4829 
4830 /* Dominator walker driving the ipcp modification phase.  */
4831 
4832 class ipcp_modif_dom_walker : public dom_walker
4833 {
4834 public:
4835   ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
4836 			 vec<ipa_param_descriptor, va_gc> *descs,
4837 			 struct ipa_agg_replacement_value *av,
4838 			 bool *sc, bool *cc)
4839     : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
4840       m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
4841 
4842   virtual edge before_dom_children (basic_block);
4843 
4844 private:
4845   struct ipa_func_body_info *m_fbi;
4846   vec<ipa_param_descriptor, va_gc> *m_descriptors;
4847   struct ipa_agg_replacement_value *m_aggval;
4848   bool *m_something_changed, *m_cfg_changed;
4849 };
4850 
4851 edge
4852 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
4853 {
4854   gimple_stmt_iterator gsi;
4855   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4856     {
4857       struct ipa_agg_replacement_value *v;
4858       gimple *stmt = gsi_stmt (gsi);
4859       tree rhs, val, t;
4860       HOST_WIDE_INT offset, size;
4861       int index;
4862       bool by_ref, vce;
4863 
4864       if (!gimple_assign_load_p (stmt))
4865 	continue;
4866       rhs = gimple_assign_rhs1 (stmt);
4867       if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4868 	continue;
4869 
4870       vce = false;
4871       t = rhs;
4872       while (handled_component_p (t))
4873 	{
4874 	  /* V_C_E can do things like convert an array of integers to one
4875 	     bigger integer and similar things we do not handle below.  */
4876 	  if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4877 	    {
4878 	      vce = true;
4879 	      break;
4880 	    }
4881 	  t = TREE_OPERAND (t, 0);
4882 	}
4883       if (vce)
4884 	continue;
4885 
4886       if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
4887 				   &offset, &size, &by_ref))
4888 	continue;
4889       for (v = m_aggval; v; v = v->next)
4890 	if (v->index == index
4891 	    && v->offset == offset)
4892 	  break;
4893       if (!v
4894 	  || v->by_ref != by_ref
4895 	  || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
4896 	continue;
4897 
4898       gcc_checking_assert (is_gimple_ip_invariant (v->value));
4899       if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4900 	{
4901 	  if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4902 	    val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4903 	  else if (TYPE_SIZE (TREE_TYPE (rhs))
4904 		   == TYPE_SIZE (TREE_TYPE (v->value)))
4905 	    val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4906 	  else
4907 	    {
4908 	      if (dump_file)
4909 		{
4910 		  fprintf (dump_file, "    const ");
4911 		  print_generic_expr (dump_file, v->value);
4912 		  fprintf (dump_file, "  can't be converted to type of ");
4913 		  print_generic_expr (dump_file, rhs);
4914 		  fprintf (dump_file, "\n");
4915 		}
4916 	      continue;
4917 	    }
4918 	}
4919       else
4920 	val = v->value;
4921 
4922       if (dump_file && (dump_flags & TDF_DETAILS))
4923 	{
4924 	  fprintf (dump_file, "Modifying stmt:\n  ");
4925 	  print_gimple_stmt (dump_file, stmt, 0);
4926 	}
4927       gimple_assign_set_rhs_from_tree (&gsi, val);
4928       update_stmt (stmt);
4929 
4930       if (dump_file && (dump_flags & TDF_DETAILS))
4931 	{
4932 	  fprintf (dump_file, "into:\n  ");
4933 	  print_gimple_stmt (dump_file, stmt, 0);
4934 	  fprintf (dump_file, "\n");
4935 	}
4936 
4937       *m_something_changed = true;
4938       if (maybe_clean_eh_stmt (stmt)
4939 	  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4940 	*m_cfg_changed = true;
4941     }
4942   return NULL;
4943 }
4944 
4945 /* Update bits info of formal parameters as described in
4946    ipcp_transformation_summary.  */
4947 
4948 static void
4949 ipcp_update_bits (struct cgraph_node *node)
4950 {
4951   tree parm = DECL_ARGUMENTS (node->decl);
4952   tree next_parm = parm;
4953   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
4954 
4955   if (!ts || vec_safe_length (ts->bits) == 0)
4956     return;
4957 
4958   vec<ipa_bits *, va_gc> &bits = *ts->bits;
4959   unsigned count = bits.length ();
4960 
4961   for (unsigned i = 0; i < count; ++i, parm = next_parm)
4962     {
4963       if (node->clone.combined_args_to_skip
4964 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
4965 	continue;
4966 
4967       gcc_checking_assert (parm);
4968       next_parm = DECL_CHAIN (parm);
4969 
4970       if (!bits[i]
4971 	  || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
4972 	       || POINTER_TYPE_P (TREE_TYPE (parm)))
4973 	  || !is_gimple_reg (parm))
4974 	continue;
4975 
4976       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
4977       if (!ddef)
4978 	continue;
4979 
4980       if (dump_file)
4981 	{
4982 	  fprintf (dump_file, "Adjusting mask for param %u to ", i);
4983 	  print_hex (bits[i]->mask, dump_file);
4984 	  fprintf (dump_file, "\n");
4985 	}
4986 
4987       if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
4988 	{
4989 	  unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
4990 	  signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
4991 
4992 	  wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
4993 				  | wide_int::from (bits[i]->value, prec, sgn);
4994 	  set_nonzero_bits (ddef, nonzero_bits);
4995 	}
4996       else
4997 	{
4998 	  unsigned tem = bits[i]->mask.to_uhwi ();
4999 	  unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5000 	  unsigned align = tem & -tem;
5001 	  unsigned misalign = bitpos & (align - 1);
5002 
5003 	  if (align > 1)
5004 	    {
5005 	      if (dump_file)
5006 		fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5007 
5008 	      unsigned old_align, old_misalign;
5009 	      struct ptr_info_def *pi = get_ptr_info (ddef);
5010 	      bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5011 
5012 	      if (old_known
5013 		  && old_align > align)
5014 		{
5015 		  if (dump_file)
5016 		    {
5017 		      fprintf (dump_file, "But alignment was already %u.\n", old_align);
5018 		      if ((old_misalign & (align - 1)) != misalign)
5019 			fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5020 				 old_misalign, misalign);
5021 		    }
5022 		  continue;
5023 		}
5024 
5025 	      if (old_known
5026 		  && ((misalign & (old_align - 1)) != old_misalign)
5027 		  && dump_file)
5028 		fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5029 			 old_misalign, misalign);
5030 
5031 	      set_ptr_info_alignment (pi, align, misalign);
5032 	    }
5033 	}
5034     }
5035 }
5036 
5037 /* Update value range of formal parameters as described in
5038    ipcp_transformation_summary.  */
5039 
5040 static void
5041 ipcp_update_vr (struct cgraph_node *node)
5042 {
5043   tree fndecl = node->decl;
5044   tree parm = DECL_ARGUMENTS (fndecl);
5045   tree next_parm = parm;
5046   ipcp_transformation_summary *ts = ipcp_get_transformation_summary (node);
5047   if (!ts || vec_safe_length (ts->m_vr) == 0)
5048     return;
5049   const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5050   unsigned count = vr.length ();
5051 
5052   for (unsigned i = 0; i < count; ++i, parm = next_parm)
5053     {
5054       if (node->clone.combined_args_to_skip
5055 	  && bitmap_bit_p (node->clone.combined_args_to_skip, i))
5056 	continue;
5057       gcc_checking_assert (parm);
5058       next_parm = DECL_CHAIN (parm);
5059       tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5060 
5061       if (!ddef || !is_gimple_reg (parm))
5062 	continue;
5063 
5064       if (vr[i].known
5065 	  && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5066 	{
5067 	  tree type = TREE_TYPE (ddef);
5068 	  unsigned prec = TYPE_PRECISION (type);
5069 	  if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5070 	    {
5071 	      if (dump_file)
5072 		{
5073 		  fprintf (dump_file, "Setting value range of param %u ", i);
5074 		  fprintf (dump_file, "%s[",
5075 			   (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5076 		  print_decs (vr[i].min, dump_file);
5077 		  fprintf (dump_file, ", ");
5078 		  print_decs (vr[i].max, dump_file);
5079 		  fprintf (dump_file, "]\n");
5080 		}
5081 	      set_range_info (ddef, vr[i].type,
5082 			      wide_int_storage::from (vr[i].min, prec,
5083 						      TYPE_SIGN (type)),
5084 			      wide_int_storage::from (vr[i].max, prec,
5085 						      TYPE_SIGN (type)));
5086 	    }
5087 	  else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5088 		   && vr[i].type == VR_ANTI_RANGE
5089 		   && wi::eq_p (vr[i].min, 0)
5090 		   && wi::eq_p (vr[i].max, 0))
5091 	    {
5092 	      if (dump_file)
5093 		fprintf (dump_file, "Setting nonnull for %u\n", i);
5094 	      set_ptr_nonnull (ddef);
5095 	    }
5096 	}
5097     }
5098 }
5099 
5100 /* IPCP transformation phase doing propagation of aggregate values.  */
5101 
5102 unsigned int
5103 ipcp_transform_function (struct cgraph_node *node)
5104 {
5105   vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5106   struct ipa_func_body_info fbi;
5107   struct ipa_agg_replacement_value *aggval;
5108   int param_count;
5109   bool cfg_changed = false, something_changed = false;
5110 
5111   gcc_checking_assert (cfun);
5112   gcc_checking_assert (current_function_decl);
5113 
5114   if (dump_file)
5115     fprintf (dump_file, "Modification phase of node %s\n",
5116 	     node->dump_name ());
5117 
5118   ipcp_update_bits (node);
5119   ipcp_update_vr (node);
5120   aggval = ipa_get_agg_replacements_for_node (node);
5121   if (!aggval)
5122       return 0;
5123   param_count = count_formal_params (node->decl);
5124   if (param_count == 0)
5125     return 0;
5126   adjust_agg_replacement_values (node, aggval);
5127   if (dump_file)
5128     ipa_dump_agg_replacement_values (dump_file, aggval);
5129 
5130   fbi.node = node;
5131   fbi.info = NULL;
5132   fbi.bb_infos = vNULL;
5133   fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5134   fbi.param_count = param_count;
5135   fbi.aa_walked = 0;
5136 
5137   vec_safe_grow_cleared (descriptors, param_count);
5138   ipa_populate_param_decls (node, *descriptors);
5139   calculate_dominance_info (CDI_DOMINATORS);
5140   ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5141 			 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5142 
5143   int i;
5144   struct ipa_bb_info *bi;
5145   FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5146     free_ipa_bb_info (bi);
5147   fbi.bb_infos.release ();
5148   free_dominance_info (CDI_DOMINATORS);
5149   (*ipcp_transformations)[node->uid].agg_values = NULL;
5150   (*ipcp_transformations)[node->uid].bits = NULL;
5151   (*ipcp_transformations)[node->uid].m_vr = NULL;
5152 
5153   vec_free (descriptors);
5154 
5155   if (!something_changed)
5156     return 0;
5157   else if (cfg_changed)
5158     return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5159   else
5160     return TODO_update_ssa_only_virtuals;
5161 }
5162 
5163 #include "gt-ipa-prop.h"
5164