1 /* Callgraph handling code.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /*  This file contains basic routines manipulating call graph
22 
23     The call-graph is a data structure designed for inter-procedural
24     optimization.  It represents a multi-graph where nodes are functions
25     (symbols within symbol table) and edges are call sites. */
26 
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "backend.h"
31 #include "target.h"
32 #include "rtl.h"
33 #include "tree.h"
34 #include "gimple.h"
35 #include "predict.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
38 #include "cgraph.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
41 #include "varasm.h"
42 #include "calls.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
45 #include "intl.h"
46 #include "tree-eh.h"
47 #include "gimple-iterator.h"
48 #include "tree-cfg.h"
49 #include "tree-ssa.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
53 #include "tree-vrp.h"
54 #include "ipa-prop.h"
55 #include "ipa-fnsummary.h"
56 #include "cfgloop.h"
57 #include "gimple-pretty-print.h"
58 #include "tree-dfa.h"
59 #include "profile.h"
60 #include "params.h"
61 #include "tree-chkp.h"
62 #include "context.h"
63 #include "gimplify.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this.  */
68 #include "tree-pass.h"
69 
70 /* Queue of cgraph nodes scheduled to be lowered.  */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
73 
74 /* Symbol table global context.  */
75 symbol_table *symtab;
76 
77 /* List of hooks triggered on cgraph_edge events.  */
78 struct cgraph_edge_hook_list {
79   cgraph_edge_hook hook;
80   void *data;
81   struct cgraph_edge_hook_list *next;
82 };
83 
84 /* List of hooks triggered on cgraph_node events.  */
85 struct cgraph_node_hook_list {
86   cgraph_node_hook hook;
87   void *data;
88   struct cgraph_node_hook_list *next;
89 };
90 
91 /* List of hooks triggered on events involving two cgraph_edges.  */
92 struct cgraph_2edge_hook_list {
93   cgraph_2edge_hook hook;
94   void *data;
95   struct cgraph_2edge_hook_list *next;
96 };
97 
98 /* List of hooks triggered on events involving two cgraph_nodes.  */
99 struct cgraph_2node_hook_list {
100   cgraph_2node_hook hook;
101   void *data;
102   struct cgraph_2node_hook_list *next;
103 };
104 
105 /* Hash descriptor for cgraph_function_version_info.  */
106 
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
108 {
109   static hashval_t hash (cgraph_function_version_info *);
110   static bool equal (cgraph_function_version_info *,
111 		     cgraph_function_version_info *);
112 };
113 
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115    The cgraph_function_version_info has a THIS_NODE field that is the
116    corresponding cgraph_node..  */
117 
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
119 
120 /* Hash function for cgraph_fnver_htab.  */
121 hashval_t
hash(cgraph_function_version_info * ptr)122 function_version_hasher::hash (cgraph_function_version_info *ptr)
123 {
124   int uid = ptr->this_node->uid;
125   return (hashval_t)(uid);
126 }
127 
128 /* eq function for cgraph_fnver_htab.  */
129 bool
equal(cgraph_function_version_info * n1,cgraph_function_version_info * n2)130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 			       	cgraph_function_version_info *n2)
132 {
133   return n1->this_node->uid == n2->this_node->uid;
134 }
135 
136 /* Mark as GC root all allocated nodes.  */
137 static GTY(()) struct cgraph_function_version_info *
138   version_info_node = NULL;
139 
140 /* Return true if NODE's address can be compared.  */
141 
142 bool
address_can_be_compared_p()143 symtab_node::address_can_be_compared_p ()
144 {
145   /* Address of virtual tables and functions is never compared.  */
146   if (DECL_VIRTUAL_P (decl))
147     return false;
148   /* Address of C++ cdtors is never compared.  */
149   if (is_a <cgraph_node *> (this)
150       && (DECL_CXX_CONSTRUCTOR_P (decl)
151 	  || DECL_CXX_DESTRUCTOR_P (decl)))
152     return false;
153   /* Constant pool symbols addresses are never compared.
154      flag_merge_constants permits us to assume the same on readonly vars.  */
155   if (is_a <varpool_node *> (this)
156       && (DECL_IN_CONSTANT_POOL (decl)
157 	  || (flag_merge_constants >= 2
158 	      && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
159     return false;
160   return true;
161 }
162 
163 /* Get the cgraph_function_version_info node corresponding to node.  */
164 cgraph_function_version_info *
function_version(void)165 cgraph_node::function_version (void)
166 {
167   cgraph_function_version_info key;
168   key.this_node = this;
169 
170   if (cgraph_fnver_htab == NULL)
171     return NULL;
172 
173   return cgraph_fnver_htab->find (&key);
174 }
175 
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177    corresponding to cgraph_node NODE.  */
178 cgraph_function_version_info *
insert_new_function_version(void)179 cgraph_node::insert_new_function_version (void)
180 {
181   version_info_node = NULL;
182   version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183   version_info_node->this_node = this;
184 
185   if (cgraph_fnver_htab == NULL)
186     cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
187 
188   *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
189     = version_info_node;
190   return version_info_node;
191 }
192 
193 /* Remove the cgraph_function_version_info node given by DECL_V.  */
194 static void
delete_function_version(cgraph_function_version_info * decl_v)195 delete_function_version (cgraph_function_version_info *decl_v)
196 {
197   if (decl_v == NULL)
198     return;
199 
200   if (decl_v->prev != NULL)
201     decl_v->prev->next = decl_v->next;
202 
203   if (decl_v->next != NULL)
204     decl_v->next->prev = decl_v->prev;
205 
206   if (cgraph_fnver_htab != NULL)
207     cgraph_fnver_htab->remove_elt (decl_v);
208 }
209 
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL.  This
211    DECL is a duplicate declaration.  */
212 void
delete_function_version_by_decl(tree decl)213 cgraph_node::delete_function_version_by_decl (tree decl)
214 {
215   cgraph_node *decl_node = cgraph_node::get (decl);
216 
217   if (decl_node == NULL)
218     return;
219 
220   delete_function_version (decl_node->function_version ());
221 
222   decl_node->remove ();
223 }
224 
225 /* Record that DECL1 and DECL2 are semantically identical function
226    versions.  */
227 void
record_function_versions(tree decl1,tree decl2)228 cgraph_node::record_function_versions (tree decl1, tree decl2)
229 {
230   cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231   cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232   cgraph_function_version_info *decl1_v = NULL;
233   cgraph_function_version_info *decl2_v = NULL;
234   cgraph_function_version_info *before;
235   cgraph_function_version_info *after;
236 
237   gcc_assert (decl1_node != NULL && decl2_node != NULL);
238   decl1_v = decl1_node->function_version ();
239   decl2_v = decl2_node->function_version ();
240 
241   if (decl1_v != NULL && decl2_v != NULL)
242     return;
243 
244   if (decl1_v == NULL)
245     decl1_v = decl1_node->insert_new_function_version ();
246 
247   if (decl2_v == NULL)
248     decl2_v = decl2_node->insert_new_function_version ();
249 
250   /* Chain decl2_v and decl1_v.  All semantically identical versions
251      will be chained together.  */
252 
253   before = decl1_v;
254   after = decl2_v;
255 
256   while (before->next != NULL)
257     before = before->next;
258 
259   while (after->prev != NULL)
260     after= after->prev;
261 
262   before->next = after;
263   after->prev = before;
264 }
265 
266 /* Initialize callgraph dump file.  */
267 
268 void
initialize(void)269 symbol_table::initialize (void)
270 {
271   if (!dump_file)
272     dump_file = dump_begin (TDI_cgraph, NULL);
273 
274   if (!ipa_clones_dump_file)
275     ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
276 }
277 
278 /* Allocate new callgraph node and insert it into basic data structures.  */
279 
280 cgraph_node *
create_empty(void)281 symbol_table::create_empty (void)
282 {
283   cgraph_node *node = allocate_cgraph_symbol ();
284 
285   node->type = SYMTAB_FUNCTION;
286   node->frequency = NODE_FREQUENCY_NORMAL;
287   node->count_materialization_scale = REG_BR_PROB_BASE;
288   cgraph_count++;
289 
290   return node;
291 }
292 
293 /* Register HOOK to be called with DATA on each removed edge.  */
294 cgraph_edge_hook_list *
add_edge_removal_hook(cgraph_edge_hook hook,void * data)295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
296 {
297   cgraph_edge_hook_list *entry;
298   cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
299 
300   entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
301   entry->hook = hook;
302   entry->data = data;
303   entry->next = NULL;
304   while (*ptr)
305     ptr = &(*ptr)->next;
306   *ptr = entry;
307   return entry;
308 }
309 
310 /* Remove ENTRY from the list of hooks called on removing edges.  */
311 void
remove_edge_removal_hook(cgraph_edge_hook_list * entry)312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
313 {
314   cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
315 
316   while (*ptr != entry)
317     ptr = &(*ptr)->next;
318   *ptr = entry->next;
319   free (entry);
320 }
321 
322 /* Call all edge removal hooks.  */
323 void
call_edge_removal_hooks(cgraph_edge * e)324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
325 {
326   cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
327   while (entry)
328   {
329     entry->hook (e, entry->data);
330     entry = entry->next;
331   }
332 }
333 
334 /* Register HOOK to be called with DATA on each removed node.  */
335 cgraph_node_hook_list *
add_cgraph_removal_hook(cgraph_node_hook hook,void * data)336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
337 {
338   cgraph_node_hook_list *entry;
339   cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
340 
341   entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
342   entry->hook = hook;
343   entry->data = data;
344   entry->next = NULL;
345   while (*ptr)
346     ptr = &(*ptr)->next;
347   *ptr = entry;
348   return entry;
349 }
350 
351 /* Remove ENTRY from the list of hooks called on removing nodes.  */
352 void
remove_cgraph_removal_hook(cgraph_node_hook_list * entry)353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
354 {
355   cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
356 
357   while (*ptr != entry)
358     ptr = &(*ptr)->next;
359   *ptr = entry->next;
360   free (entry);
361 }
362 
363 /* Call all node removal hooks.  */
364 void
call_cgraph_removal_hooks(cgraph_node * node)365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
366 {
367   cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
368   while (entry)
369   {
370     entry->hook (node, entry->data);
371     entry = entry->next;
372   }
373 }
374 
375 /* Call all node removal hooks.  */
376 void
call_cgraph_insertion_hooks(cgraph_node * node)377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
378 {
379   cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
380   while (entry)
381   {
382     entry->hook (node, entry->data);
383     entry = entry->next;
384   }
385 }
386 
387 
388 /* Register HOOK to be called with DATA on each inserted node.  */
389 cgraph_node_hook_list *
add_cgraph_insertion_hook(cgraph_node_hook hook,void * data)390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
391 {
392   cgraph_node_hook_list *entry;
393   cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
394 
395   entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
396   entry->hook = hook;
397   entry->data = data;
398   entry->next = NULL;
399   while (*ptr)
400     ptr = &(*ptr)->next;
401   *ptr = entry;
402   return entry;
403 }
404 
405 /* Remove ENTRY from the list of hooks called on inserted nodes.  */
406 void
remove_cgraph_insertion_hook(cgraph_node_hook_list * entry)407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
408 {
409   cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
410 
411   while (*ptr != entry)
412     ptr = &(*ptr)->next;
413   *ptr = entry->next;
414   free (entry);
415 }
416 
417 /* Register HOOK to be called with DATA on each duplicated edge.  */
418 cgraph_2edge_hook_list *
add_edge_duplication_hook(cgraph_2edge_hook hook,void * data)419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
420 {
421   cgraph_2edge_hook_list *entry;
422   cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
423 
424   entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
425   entry->hook = hook;
426   entry->data = data;
427   entry->next = NULL;
428   while (*ptr)
429     ptr = &(*ptr)->next;
430   *ptr = entry;
431   return entry;
432 }
433 
434 /* Remove ENTRY from the list of hooks called on duplicating edges.  */
435 void
remove_edge_duplication_hook(cgraph_2edge_hook_list * entry)436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
437 {
438   cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
439 
440   while (*ptr != entry)
441     ptr = &(*ptr)->next;
442   *ptr = entry->next;
443   free (entry);
444 }
445 
446 /* Call all edge duplication hooks.  */
447 void
call_edge_duplication_hooks(cgraph_edge * cs1,cgraph_edge * cs2)448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
449 {
450   cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
451   while (entry)
452   {
453     entry->hook (cs1, cs2, entry->data);
454     entry = entry->next;
455   }
456 }
457 
458 /* Register HOOK to be called with DATA on each duplicated node.  */
459 cgraph_2node_hook_list *
add_cgraph_duplication_hook(cgraph_2node_hook hook,void * data)460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
461 {
462   cgraph_2node_hook_list *entry;
463   cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
464 
465   entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
466   entry->hook = hook;
467   entry->data = data;
468   entry->next = NULL;
469   while (*ptr)
470     ptr = &(*ptr)->next;
471   *ptr = entry;
472   return entry;
473 }
474 
475 /* Remove ENTRY from the list of hooks called on duplicating nodes.  */
476 void
remove_cgraph_duplication_hook(cgraph_2node_hook_list * entry)477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
478 {
479   cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
480 
481   while (*ptr != entry)
482     ptr = &(*ptr)->next;
483   *ptr = entry->next;
484   free (entry);
485 }
486 
487 /* Call all node duplication hooks.  */
488 void
call_cgraph_duplication_hooks(cgraph_node * node,cgraph_node * node2)489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
490 					     cgraph_node *node2)
491 {
492   cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
493   while (entry)
494   {
495     entry->hook (node, node2, entry->data);
496     entry = entry->next;
497   }
498 }
499 
500 /* Return cgraph node assigned to DECL.  Create new one when needed.  */
501 
502 cgraph_node *
create(tree decl)503 cgraph_node::create (tree decl)
504 {
505   cgraph_node *node = symtab->create_empty ();
506   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
507 
508   node->decl = decl;
509 
510   node->count = profile_count::uninitialized ();
511 
512   if ((flag_openacc || flag_openmp)
513       && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
514     {
515       node->offloadable = 1;
516       if (ENABLE_OFFLOADING)
517 	g->have_offload = true;
518     }
519 
520   if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
521     node->ifunc_resolver = true;
522 
523   node->register_symbol ();
524 
525   if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
526     {
527       node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
528       node->next_nested = node->origin->nested;
529       node->origin->nested = node;
530     }
531   return node;
532 }
533 
534 /* Try to find a call graph node for declaration DECL and if it does not exist
535    or if it corresponds to an inline clone, create a new one.  */
536 
537 cgraph_node *
get_create(tree decl)538 cgraph_node::get_create (tree decl)
539 {
540   cgraph_node *first_clone = cgraph_node::get (decl);
541 
542   if (first_clone && !first_clone->global.inlined_to)
543     return first_clone;
544 
545   cgraph_node *node = cgraph_node::create (decl);
546   if (first_clone)
547     {
548       first_clone->clone_of = node;
549       node->clones = first_clone;
550       symtab->symtab_prevail_in_asm_name_hash (node);
551       node->decl->decl_with_vis.symtab_node = node;
552       if (dump_file)
553 	fprintf (dump_file, "Introduced new external node "
554 		 "(%s) and turned into root of the clone tree.\n",
555 		 node->dump_name ());
556     }
557   else if (dump_file)
558     fprintf (dump_file, "Introduced new external node "
559 	     "(%s).\n", node->dump_name ());
560   return node;
561 }
562 
563 /* Mark ALIAS as an alias to DECL.  DECL_NODE is cgraph node representing
564    the function body is associated with (not necessarily cgraph_node (DECL).  */
565 
566 cgraph_node *
create_alias(tree alias,tree target)567 cgraph_node::create_alias (tree alias, tree target)
568 {
569   cgraph_node *alias_node;
570 
571   gcc_assert (TREE_CODE (target) == FUNCTION_DECL
572 	      || TREE_CODE (target) == IDENTIFIER_NODE);
573   gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
574   alias_node = cgraph_node::get_create (alias);
575   gcc_assert (!alias_node->definition);
576   alias_node->alias_target = target;
577   alias_node->definition = true;
578   alias_node->alias = true;
579   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
580     alias_node->transparent_alias = alias_node->weakref = true;
581   if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
582     alias_node->ifunc_resolver = true;
583   return alias_node;
584 }
585 
586 /* Attempt to mark ALIAS as an alias to DECL.  Return alias node if successful
587    and NULL otherwise.
588    Same body aliases are output whenever the body of DECL is output,
589    and cgraph_node::get (ALIAS) transparently returns
590    cgraph_node::get (DECL).  */
591 
592 cgraph_node *
create_same_body_alias(tree alias,tree decl)593 cgraph_node::create_same_body_alias (tree alias, tree decl)
594 {
595   cgraph_node *n;
596 
597   /* If aliases aren't supported by the assembler, fail.  */
598   if (!TARGET_SUPPORTS_ALIASES)
599     return NULL;
600 
601   /* Langhooks can create same body aliases of symbols not defined.
602      Those are useless. Drop them on the floor.  */
603   if (symtab->global_info_ready)
604     return NULL;
605 
606   n = cgraph_node::create_alias (alias, decl);
607   n->cpp_implicit_alias = true;
608   if (symtab->cpp_implicit_aliases_done)
609     n->resolve_alias (cgraph_node::get (decl));
610   return n;
611 }
612 
613 /* Add thunk alias into callgraph.  The alias declaration is ALIAS and it
614    aliases DECL with an adjustments made into the first parameter.
615    See comments in struct cgraph_thunk_info for detail on the parameters.  */
616 
617 cgraph_node *
create_thunk(tree alias,tree,bool this_adjusting,HOST_WIDE_INT fixed_offset,HOST_WIDE_INT virtual_value,tree virtual_offset,tree real_alias)618 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
619 			   HOST_WIDE_INT fixed_offset,
620 			   HOST_WIDE_INT virtual_value,
621 			   tree virtual_offset,
622 			   tree real_alias)
623 {
624   cgraph_node *node;
625 
626   node = cgraph_node::get (alias);
627   if (node)
628     node->reset ();
629   else
630     node = cgraph_node::create (alias);
631 
632   /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE.  */
633   gcc_checking_assert (virtual_offset
634 		       ? virtual_value == wi::to_wide (virtual_offset)
635 		       : virtual_value == 0);
636 
637   node->thunk.fixed_offset = fixed_offset;
638   node->thunk.virtual_value = virtual_value;
639   node->thunk.alias = real_alias;
640   node->thunk.this_adjusting = this_adjusting;
641   node->thunk.virtual_offset_p = virtual_offset != NULL;
642   node->thunk.thunk_p = true;
643   node->definition = true;
644 
645   return node;
646 }
647 
648 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
649    Return NULL if there's no such node.  */
650 
651 cgraph_node *
get_for_asmname(tree asmname)652 cgraph_node::get_for_asmname (tree asmname)
653 {
654   /* We do not want to look at inline clones.  */
655   for (symtab_node *node = symtab_node::get_for_asmname (asmname);
656        node;
657        node = node->next_sharing_asm_name)
658     {
659       cgraph_node *cn = dyn_cast <cgraph_node *> (node);
660       if (cn && !cn->global.inlined_to)
661 	return cn;
662     }
663   return NULL;
664 }
665 
666 /* Returns a hash value for X (which really is a cgraph_edge).  */
667 
668 hashval_t
hash(cgraph_edge * e)669 cgraph_edge_hasher::hash (cgraph_edge *e)
670 {
671   /* This is a really poor hash function, but it is what htab_hash_pointer
672      uses.  */
673   return (hashval_t) ((intptr_t)e->call_stmt >> 3);
674 }
675 
676 /* Returns a hash value for X (which really is a cgraph_edge).  */
677 
678 hashval_t
hash(gimple * call_stmt)679 cgraph_edge_hasher::hash (gimple *call_stmt)
680 {
681   /* This is a really poor hash function, but it is what htab_hash_pointer
682      uses.  */
683   return (hashval_t) ((intptr_t)call_stmt >> 3);
684 }
685 
686 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y.  */
687 
688 inline bool
equal(cgraph_edge * x,gimple * y)689 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
690 {
691   return x->call_stmt == y;
692 }
693 
694 /* Add call graph edge E to call site hash of its caller.  */
695 
696 static inline void
cgraph_update_edge_in_call_site_hash(cgraph_edge * e)697 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
698 {
699   gimple *call = e->call_stmt;
700   *e->caller->call_site_hash->find_slot_with_hash
701       (call, cgraph_edge_hasher::hash (call), INSERT) = e;
702 }
703 
704 /* Add call graph edge E to call site hash of its caller.  */
705 
706 static inline void
cgraph_add_edge_to_call_site_hash(cgraph_edge * e)707 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
708 {
709   /* There are two speculative edges for every statement (one direct,
710      one indirect); always hash the direct one.  */
711   if (e->speculative && e->indirect_unknown_callee)
712     return;
713   cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
714       (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
715   if (*slot)
716     {
717       gcc_assert (((cgraph_edge *)*slot)->speculative);
718       if (e->callee)
719 	*slot = e;
720       return;
721     }
722   gcc_assert (!*slot || e->speculative);
723   *slot = e;
724 }
725 
726 /* Return the callgraph edge representing the GIMPLE_CALL statement
727    CALL_STMT.  */
728 
729 cgraph_edge *
get_edge(gimple * call_stmt)730 cgraph_node::get_edge (gimple *call_stmt)
731 {
732   cgraph_edge *e, *e2;
733   int n = 0;
734 
735   if (call_site_hash)
736     return call_site_hash->find_with_hash
737 	(call_stmt, cgraph_edge_hasher::hash (call_stmt));
738 
739   /* This loop may turn out to be performance problem.  In such case adding
740      hashtables into call nodes with very many edges is probably best
741      solution.  It is not good idea to add pointer into CALL_EXPR itself
742      because we want to make possible having multiple cgraph nodes representing
743      different clones of the same body before the body is actually cloned.  */
744   for (e = callees; e; e = e->next_callee)
745     {
746       if (e->call_stmt == call_stmt)
747 	break;
748       n++;
749     }
750 
751   if (!e)
752     for (e = indirect_calls; e; e = e->next_callee)
753       {
754 	if (e->call_stmt == call_stmt)
755 	  break;
756 	n++;
757       }
758 
759   if (n > 100)
760     {
761       call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
762       for (e2 = callees; e2; e2 = e2->next_callee)
763 	cgraph_add_edge_to_call_site_hash (e2);
764       for (e2 = indirect_calls; e2; e2 = e2->next_callee)
765 	cgraph_add_edge_to_call_site_hash (e2);
766     }
767 
768   return e;
769 }
770 
771 
772 /* Change field call_stmt of edge to NEW_STMT.
773    If UPDATE_SPECULATIVE and E is any component of speculative
774    edge, then update all components.  */
775 
776 void
set_call_stmt(gcall * new_stmt,bool update_speculative)777 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
778 {
779   tree decl;
780 
781   /* Speculative edges has three component, update all of them
782      when asked to.  */
783   if (update_speculative && speculative)
784     {
785       cgraph_edge *direct, *indirect;
786       ipa_ref *ref;
787 
788       speculative_call_info (direct, indirect, ref);
789       direct->set_call_stmt (new_stmt, false);
790       indirect->set_call_stmt (new_stmt, false);
791       ref->stmt = new_stmt;
792       return;
793     }
794 
795   /* Only direct speculative edges go to call_site_hash.  */
796   if (caller->call_site_hash
797       && (!speculative || !indirect_unknown_callee))
798     {
799       caller->call_site_hash->remove_elt_with_hash
800 	(call_stmt, cgraph_edge_hasher::hash (call_stmt));
801     }
802 
803   cgraph_edge *e = this;
804 
805   call_stmt = new_stmt;
806   if (indirect_unknown_callee
807       && (decl = gimple_call_fndecl (new_stmt)))
808     {
809       /* Constant propagation (and possibly also inlining?) can turn an
810 	 indirect call into a direct one.  */
811       cgraph_node *new_callee = cgraph_node::get (decl);
812 
813       gcc_checking_assert (new_callee);
814       e = make_direct (new_callee);
815     }
816 
817   push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
818   e->can_throw_external = stmt_can_throw_external (new_stmt);
819   pop_cfun ();
820   if (e->caller->call_site_hash)
821     cgraph_add_edge_to_call_site_hash (e);
822 }
823 
824 /* Allocate a cgraph_edge structure and fill it with data according to the
825    parameters of which only CALLEE can be NULL (when creating an indirect call
826    edge).  */
827 
828 cgraph_edge *
create_edge(cgraph_node * caller,cgraph_node * callee,gcall * call_stmt,profile_count count,bool indir_unknown_callee)829 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
830 			   gcall *call_stmt, profile_count count,
831 			   bool indir_unknown_callee)
832 {
833   cgraph_edge *edge;
834 
835   /* LTO does not actually have access to the call_stmt since these
836      have not been loaded yet.  */
837   if (call_stmt)
838     {
839       /* This is a rather expensive check possibly triggering
840 	 construction of call stmt hashtable.  */
841       cgraph_edge *e;
842       gcc_checking_assert (!(e = caller->get_edge (call_stmt))
843 			   || e->speculative);
844 
845       gcc_assert (is_gimple_call (call_stmt));
846     }
847 
848   if (free_edges)
849     {
850       edge = free_edges;
851       free_edges = NEXT_FREE_EDGE (edge);
852     }
853   else
854     {
855       edge = ggc_alloc<cgraph_edge> ();
856       edge->uid = edges_max_uid++;
857     }
858 
859   edges_count++;
860 
861   edge->aux = NULL;
862   edge->caller = caller;
863   edge->callee = callee;
864   edge->prev_caller = NULL;
865   edge->next_caller = NULL;
866   edge->prev_callee = NULL;
867   edge->next_callee = NULL;
868   edge->lto_stmt_uid = 0;
869 
870   edge->count = count;
871 
872   edge->call_stmt = call_stmt;
873   push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
874   edge->can_throw_external
875     = call_stmt ? stmt_can_throw_external (call_stmt) : false;
876   pop_cfun ();
877   if (call_stmt
878       && callee && callee->decl
879       && !gimple_check_call_matching_types (call_stmt, callee->decl,
880 					    false))
881     {
882       edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
883       edge->call_stmt_cannot_inline_p = true;
884     }
885   else
886     {
887       edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
888       edge->call_stmt_cannot_inline_p = false;
889     }
890 
891   edge->indirect_info = NULL;
892   edge->indirect_inlining_edge = 0;
893   edge->speculative = false;
894   edge->indirect_unknown_callee = indir_unknown_callee;
895   if (opt_for_fn (edge->caller->decl, flag_devirtualize)
896       && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
897     edge->in_polymorphic_cdtor
898       = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
899 				      caller->decl);
900   else
901     edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
902   if (call_stmt && caller->call_site_hash)
903     cgraph_add_edge_to_call_site_hash (edge);
904 
905   return edge;
906 }
907 
908 /* Create edge from a given function to CALLEE in the cgraph.  */
909 
910 cgraph_edge *
create_edge(cgraph_node * callee,gcall * call_stmt,profile_count count)911 cgraph_node::create_edge (cgraph_node *callee,
912 			  gcall *call_stmt, profile_count count)
913 {
914   cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
915 					   false);
916 
917   initialize_inline_failed (edge);
918 
919   edge->next_caller = callee->callers;
920   if (callee->callers)
921     callee->callers->prev_caller = edge;
922   edge->next_callee = callees;
923   if (callees)
924     callees->prev_callee = edge;
925   callees = edge;
926   callee->callers = edge;
927 
928   return edge;
929 }
930 
931 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
932 
933 cgraph_indirect_call_info *
cgraph_allocate_init_indirect_info(void)934 cgraph_allocate_init_indirect_info (void)
935 {
936   cgraph_indirect_call_info *ii;
937 
938   ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
939   ii->param_index = -1;
940   return ii;
941 }
942 
943 /* Create an indirect edge with a yet-undetermined callee where the call
944    statement destination is a formal parameter of the caller with index
945    PARAM_INDEX. */
946 
947 cgraph_edge *
create_indirect_edge(gcall * call_stmt,int ecf_flags,profile_count count,bool compute_indirect_info)948 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
949 				   profile_count count,
950 				   bool compute_indirect_info)
951 {
952   cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
953 							    count, true);
954   tree target;
955 
956   initialize_inline_failed (edge);
957 
958   edge->indirect_info = cgraph_allocate_init_indirect_info ();
959   edge->indirect_info->ecf_flags = ecf_flags;
960   edge->indirect_info->vptr_changed = true;
961 
962   /* Record polymorphic call info.  */
963   if (compute_indirect_info
964       && call_stmt
965       && (target = gimple_call_fn (call_stmt))
966       && virtual_method_call_p (target))
967     {
968       ipa_polymorphic_call_context context (decl, target, call_stmt);
969 
970       /* Only record types can have virtual calls.  */
971       edge->indirect_info->polymorphic = true;
972       edge->indirect_info->param_index = -1;
973       edge->indirect_info->otr_token
974 	 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
975       edge->indirect_info->otr_type = obj_type_ref_class (target);
976       gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
977       edge->indirect_info->context = context;
978     }
979 
980   edge->next_callee = indirect_calls;
981   if (indirect_calls)
982     indirect_calls->prev_callee = edge;
983   indirect_calls = edge;
984 
985   return edge;
986 }
987 
988 /* Remove the edge from the list of the callees of the caller.  */
989 
990 void
remove_caller(void)991 cgraph_edge::remove_caller (void)
992 {
993   if (prev_callee)
994     prev_callee->next_callee = next_callee;
995   if (next_callee)
996     next_callee->prev_callee = prev_callee;
997   if (!prev_callee)
998     {
999       if (indirect_unknown_callee)
1000 	caller->indirect_calls = next_callee;
1001       else
1002 	caller->callees = next_callee;
1003     }
1004   if (caller->call_site_hash)
1005     caller->call_site_hash->remove_elt_with_hash
1006 	(call_stmt, cgraph_edge_hasher::hash (call_stmt));
1007 }
1008 
1009 /* Put the edge onto the free list.  */
1010 
1011 void
free_edge(cgraph_edge * e)1012 symbol_table::free_edge (cgraph_edge *e)
1013 {
1014   int uid = e->uid;
1015 
1016   if (e->indirect_info)
1017     ggc_free (e->indirect_info);
1018 
1019   /* Clear out the edge so we do not dangle pointers.  */
1020   memset (e, 0, sizeof (*e));
1021   e->uid = uid;
1022   NEXT_FREE_EDGE (e) = free_edges;
1023   free_edges = e;
1024   edges_count--;
1025 }
1026 
1027 /* Remove the edge in the cgraph.  */
1028 
1029 void
remove(void)1030 cgraph_edge::remove (void)
1031 {
1032   /* Call all edge removal hooks.  */
1033   symtab->call_edge_removal_hooks (this);
1034 
1035   if (!indirect_unknown_callee)
1036     /* Remove from callers list of the callee.  */
1037     remove_callee ();
1038 
1039   /* Remove from callees list of the callers.  */
1040   remove_caller ();
1041 
1042   /* Put the edge onto the free list.  */
1043   symtab->free_edge (this);
1044 }
1045 
1046 /* Turn edge into speculative call calling N2. Update
1047    the profile so the direct call is taken COUNT times
1048    with FREQUENCY.
1049 
1050    At clone materialization time, the indirect call E will
1051    be expanded as:
1052 
1053    if (call_dest == N2)
1054      n2 ();
1055    else
1056      call call_dest
1057 
1058    At this time the function just creates the direct call,
1059    the referencd representing the if conditional and attaches
1060    them all to the orginal indirect call statement.
1061 
1062    Return direct edge created.  */
1063 
1064 cgraph_edge *
make_speculative(cgraph_node * n2,profile_count direct_count)1065 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count)
1066 {
1067   cgraph_node *n = caller;
1068   ipa_ref *ref = NULL;
1069   cgraph_edge *e2;
1070 
1071   if (dump_file)
1072     fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1073 	     n->dump_name (), n2->dump_name ());
1074   speculative = true;
1075   e2 = n->create_edge (n2, call_stmt, direct_count);
1076   initialize_inline_failed (e2);
1077   e2->speculative = true;
1078   if (TREE_NOTHROW (n2->decl))
1079     e2->can_throw_external = false;
1080   else
1081     e2->can_throw_external = can_throw_external;
1082   e2->lto_stmt_uid = lto_stmt_uid;
1083   e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1084   count -= e2->count;
1085   symtab->call_edge_duplication_hooks (this, e2);
1086   ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1087   ref->lto_stmt_uid = lto_stmt_uid;
1088   ref->speculative = speculative;
1089   n2->mark_address_taken ();
1090   return e2;
1091 }
1092 
1093 /* Speculative call consist of three components:
1094    1) an indirect edge representing the original call
1095    2) an direct edge representing the new call
1096    3) ADDR_EXPR reference representing the speculative check.
1097    All three components are attached to single statement (the indirect
1098    call) and if one of them exists, all of them must exist.
1099 
1100    Given speculative call edge, return all three components.
1101  */
1102 
1103 void
speculative_call_info(cgraph_edge * & direct,cgraph_edge * & indirect,ipa_ref * & reference)1104 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1105 				    cgraph_edge *&indirect,
1106 				    ipa_ref *&reference)
1107 {
1108   ipa_ref *ref;
1109   int i;
1110   cgraph_edge *e2;
1111   cgraph_edge *e = this;
1112 
1113   if (!e->indirect_unknown_callee)
1114     for (e2 = e->caller->indirect_calls;
1115 	 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1116 	 e2 = e2->next_callee)
1117       ;
1118   else
1119     {
1120       e2 = e;
1121       /* We can take advantage of the call stmt hash.  */
1122       if (e2->call_stmt)
1123 	{
1124 	  e = e->caller->get_edge (e2->call_stmt);
1125 	  gcc_assert (e->speculative && !e->indirect_unknown_callee);
1126 	}
1127       else
1128 	for (e = e->caller->callees;
1129 	     e2->call_stmt != e->call_stmt
1130 	     || e2->lto_stmt_uid != e->lto_stmt_uid;
1131 	     e = e->next_callee)
1132 	  ;
1133     }
1134   gcc_assert (e->speculative && e2->speculative);
1135   direct = e;
1136   indirect = e2;
1137 
1138   reference = NULL;
1139   for (i = 0; e->caller->iterate_reference (i, ref); i++)
1140     if (ref->speculative
1141 	&& ((ref->stmt && ref->stmt == e->call_stmt)
1142 	    || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1143       {
1144 	reference = ref;
1145 	break;
1146       }
1147 
1148   /* Speculative edge always consist of all three components - direct edge,
1149      indirect and reference.  */
1150 
1151   gcc_assert (e && e2 && ref);
1152 }
1153 
1154 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1155    Remove the speculative call sequence and return edge representing the call.
1156    It is up to caller to redirect the call as appropriate. */
1157 
1158 cgraph_edge *
resolve_speculation(tree callee_decl)1159 cgraph_edge::resolve_speculation (tree callee_decl)
1160 {
1161   cgraph_edge *edge = this;
1162   cgraph_edge *e2;
1163   ipa_ref *ref;
1164 
1165   gcc_assert (edge->speculative);
1166   edge->speculative_call_info (e2, edge, ref);
1167   if (!callee_decl
1168       || !ref->referred->semantically_equivalent_p
1169 	   (symtab_node::get (callee_decl)))
1170     {
1171       if (dump_file)
1172 	{
1173 	  if (callee_decl)
1174 	    {
1175 	      fprintf (dump_file, "Speculative indirect call %s => %s has "
1176 		       "turned out to have contradicting known target ",
1177 		       edge->caller->dump_name (),
1178 		       e2->callee->dump_name ());
1179 	      print_generic_expr (dump_file, callee_decl);
1180 	      fprintf (dump_file, "\n");
1181 	    }
1182 	  else
1183 	    {
1184 	      fprintf (dump_file, "Removing speculative call %s => %s\n",
1185 		       edge->caller->dump_name (),
1186 		       e2->callee->dump_name ());
1187 	    }
1188 	}
1189     }
1190   else
1191     {
1192       cgraph_edge *tmp = edge;
1193       if (dump_file)
1194         fprintf (dump_file, "Speculative call turned into direct call.\n");
1195       edge = e2;
1196       e2 = tmp;
1197       /* FIXME:  If EDGE is inlined, we should scale up the frequencies and counts
1198          in the functions inlined through it.  */
1199     }
1200   edge->count += e2->count;
1201   edge->speculative = false;
1202   e2->speculative = false;
1203   ref->remove_reference ();
1204   if (e2->indirect_unknown_callee || e2->inline_failed)
1205     e2->remove ();
1206   else
1207     e2->callee->remove_symbol_and_inline_clones ();
1208   if (edge->caller->call_site_hash)
1209     cgraph_update_edge_in_call_site_hash (edge);
1210   return edge;
1211 }
1212 
1213 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1214    CALLEE.  DELTA is an integer constant that is to be added to the this
1215    pointer (first parameter) to compensate for skipping a thunk adjustment.  */
1216 
1217 cgraph_edge *
make_direct(cgraph_node * callee)1218 cgraph_edge::make_direct (cgraph_node *callee)
1219 {
1220   cgraph_edge *edge = this;
1221   gcc_assert (indirect_unknown_callee);
1222 
1223   /* If we are redirecting speculative call, make it non-speculative.  */
1224   if (indirect_unknown_callee && speculative)
1225     {
1226       edge = edge->resolve_speculation (callee->decl);
1227 
1228       /* On successful speculation just return the pre existing direct edge.  */
1229       if (!indirect_unknown_callee)
1230         return edge;
1231     }
1232 
1233   indirect_unknown_callee = 0;
1234   ggc_free (indirect_info);
1235   indirect_info = NULL;
1236 
1237   /* Get the edge out of the indirect edge list. */
1238   if (prev_callee)
1239     prev_callee->next_callee = next_callee;
1240   if (next_callee)
1241     next_callee->prev_callee = prev_callee;
1242   if (!prev_callee)
1243     caller->indirect_calls = next_callee;
1244 
1245   /* Put it into the normal callee list */
1246   prev_callee = NULL;
1247   next_callee = caller->callees;
1248   if (caller->callees)
1249     caller->callees->prev_callee = edge;
1250   caller->callees = edge;
1251 
1252   /* Insert to callers list of the new callee.  */
1253   edge->set_callee (callee);
1254 
1255   if (call_stmt
1256       && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1257     {
1258       call_stmt_cannot_inline_p = true;
1259       inline_failed = CIF_MISMATCHED_ARGUMENTS;
1260     }
1261 
1262   /* We need to re-determine the inlining status of the edge.  */
1263   initialize_inline_failed (edge);
1264   return edge;
1265 }
1266 
1267 /* If necessary, change the function declaration in the call statement
1268    associated with E so that it corresponds to the edge callee.  */
1269 
1270 gimple *
redirect_call_stmt_to_callee(void)1271 cgraph_edge::redirect_call_stmt_to_callee (void)
1272 {
1273   cgraph_edge *e = this;
1274 
1275   tree decl = gimple_call_fndecl (e->call_stmt);
1276   gcall *new_stmt;
1277   gimple_stmt_iterator gsi;
1278   bool skip_bounds = false;
1279 
1280   if (e->speculative)
1281     {
1282       cgraph_edge *e2;
1283       gcall *new_stmt;
1284       ipa_ref *ref;
1285 
1286       e->speculative_call_info (e, e2, ref);
1287       /* If there already is an direct call (i.e. as a result of inliner's
1288 	 substitution), forget about speculating.  */
1289       if (decl)
1290 	e = e->resolve_speculation (decl);
1291       /* If types do not match, speculation was likely wrong.
1292          The direct edge was possibly redirected to the clone with a different
1293 	 signature.  We did not update the call statement yet, so compare it
1294 	 with the reference that still points to the proper type.  */
1295       else if (!gimple_check_call_matching_types (e->call_stmt,
1296 						  ref->referred->decl,
1297 						  true))
1298 	{
1299 	  if (dump_file)
1300 	    fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1301 		     "Type mismatch.\n",
1302 		     e->caller->dump_name (),
1303 		     e->callee->dump_name ());
1304 	  e = e->resolve_speculation ();
1305 	  /* We are producing the final function body and will throw away the
1306 	     callgraph edges really soon.  Reset the counts/frequencies to
1307 	     keep verifier happy in the case of roundoff errors.  */
1308 	  e->count = gimple_bb (e->call_stmt)->count;
1309 	}
1310       /* Expand speculation into GIMPLE code.  */
1311       else
1312 	{
1313 	  if (dump_file)
1314 	    {
1315 	      fprintf (dump_file,
1316 		       "Expanding speculative call of %s -> %s count: ",
1317 		       e->caller->dump_name (),
1318 		       e->callee->dump_name ());
1319 	      e->count.dump (dump_file);
1320 	      fprintf (dump_file, "\n");
1321 	    }
1322 	  gcc_assert (e2->speculative);
1323 	  push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1324 
1325 	  profile_probability prob = e->count.probability_in (e->count
1326 							      + e2->count);
1327 	  if (!prob.initialized_p ())
1328 	    prob = profile_probability::even ();
1329 	  new_stmt = gimple_ic (e->call_stmt,
1330 				dyn_cast<cgraph_node *> (ref->referred),
1331 				prob);
1332 	  e->speculative = false;
1333 	  e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1334 						     false);
1335 	  e->count = gimple_bb (e->call_stmt)->count;
1336 
1337 	  /* Fix edges for BUILT_IN_CHKP_BNDRET calls attached to the
1338 	     processed call stmt.  */
1339 	  if (gimple_call_with_bounds_p (new_stmt)
1340 	      && gimple_call_lhs (new_stmt)
1341 	      && chkp_retbnd_call_by_val (gimple_call_lhs (e2->call_stmt)))
1342 	    {
1343 	      tree dresult = gimple_call_lhs (new_stmt);
1344 	      tree iresult = gimple_call_lhs (e2->call_stmt);
1345 	      gcall *dbndret = chkp_retbnd_call_by_val (dresult);
1346 	      gcall *ibndret = chkp_retbnd_call_by_val (iresult);
1347 	      struct cgraph_edge *iedge
1348 		= e2->caller->cgraph_node::get_edge (ibndret);
1349 
1350 	      if (dbndret)
1351 		iedge->caller->create_edge (iedge->callee, dbndret, e->count);
1352 	    }
1353 
1354 	  e2->speculative = false;
1355 	  e2->count = gimple_bb (e2->call_stmt)->count;
1356 	  ref->speculative = false;
1357 	  ref->stmt = NULL;
1358 	  /* Indirect edges are not both in the call site hash.
1359 	     get it updated.  */
1360 	  if (e->caller->call_site_hash)
1361 	    cgraph_update_edge_in_call_site_hash (e2);
1362 	  pop_cfun ();
1363 	  /* Continue redirecting E to proper target.  */
1364 	}
1365     }
1366 
1367   /* We might propagate instrumented function pointer into
1368      not instrumented function and vice versa.  In such a
1369      case we need to either fix function declaration or
1370      remove bounds from call statement.  */
1371   if (flag_check_pointer_bounds && e->callee)
1372     skip_bounds = chkp_redirect_edge (e);
1373 
1374   if (e->indirect_unknown_callee
1375       || (decl == e->callee->decl
1376 	  && !skip_bounds))
1377     return e->call_stmt;
1378 
1379   if (flag_checking && decl)
1380     {
1381       cgraph_node *node = cgraph_node::get (decl);
1382       gcc_assert (!node || !node->clone.combined_args_to_skip);
1383     }
1384 
1385   if (symtab->dump_file)
1386     {
1387       fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1388 	       e->caller->dump_name (), e->callee->dump_name ());
1389       print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1390       if (e->callee->clone.combined_args_to_skip)
1391 	{
1392 	  fprintf (symtab->dump_file, " combined args to skip: ");
1393 	  dump_bitmap (symtab->dump_file,
1394 		       e->callee->clone.combined_args_to_skip);
1395 	}
1396     }
1397 
1398   if (e->callee->clone.combined_args_to_skip
1399       || skip_bounds)
1400     {
1401       int lp_nr;
1402 
1403       new_stmt = e->call_stmt;
1404       if (e->callee->clone.combined_args_to_skip)
1405 	new_stmt
1406 	  = gimple_call_copy_skip_args (new_stmt,
1407 					e->callee->clone.combined_args_to_skip);
1408       if (skip_bounds)
1409 	new_stmt = chkp_copy_call_skip_bounds (new_stmt);
1410 
1411       tree old_fntype = gimple_call_fntype (e->call_stmt);
1412       gimple_call_set_fndecl (new_stmt, e->callee->decl);
1413       cgraph_node *origin = e->callee;
1414       while (origin->clone_of)
1415 	origin = origin->clone_of;
1416 
1417       if ((origin->former_clone_of
1418 	   && old_fntype == TREE_TYPE (origin->former_clone_of))
1419 	  || old_fntype == TREE_TYPE (origin->decl))
1420 	gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1421       else
1422 	{
1423 	  bitmap skip = e->callee->clone.combined_args_to_skip;
1424 	  tree t = cgraph_build_function_type_skip_args (old_fntype, skip,
1425 							 false);
1426 	  gimple_call_set_fntype (new_stmt, t);
1427 	}
1428 
1429       if (gimple_vdef (new_stmt)
1430 	  && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1431 	SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1432 
1433       gsi = gsi_for_stmt (e->call_stmt);
1434 
1435       /* For optimized away parameters, add on the caller side
1436 	 before the call
1437 	 DEBUG D#X => parm_Y(D)
1438 	 stmts and associate D#X with parm in decl_debug_args_lookup
1439 	 vector to say for debug info that if parameter parm had been passed,
1440 	 it would have value parm_Y(D).  */
1441       if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
1442 	{
1443 	  vec<tree, va_gc> **debug_args
1444 	    = decl_debug_args_lookup (e->callee->decl);
1445 	  tree old_decl = gimple_call_fndecl (e->call_stmt);
1446 	  if (debug_args && old_decl)
1447 	    {
1448 	      tree parm;
1449 	      unsigned i = 0, num;
1450 	      unsigned len = vec_safe_length (*debug_args);
1451 	      unsigned nargs = gimple_call_num_args (e->call_stmt);
1452 	      for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1453 		   parm && num < nargs;
1454 		   parm = DECL_CHAIN (parm), num++)
1455 		if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1456 		    && is_gimple_reg (parm))
1457 		  {
1458 		    unsigned last = i;
1459 
1460 		    while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1461 		      i += 2;
1462 		    if (i >= len)
1463 		      {
1464 			i = 0;
1465 			while (i < last
1466 			       && (**debug_args)[i] != DECL_ORIGIN (parm))
1467 			  i += 2;
1468 			if (i >= last)
1469 			  continue;
1470 		      }
1471 		    tree ddecl = (**debug_args)[i + 1];
1472 		    tree arg = gimple_call_arg (e->call_stmt, num);
1473 		    if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1474 						    TREE_TYPE (arg)))
1475 		      {
1476 			tree rhs1;
1477 			if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1478 			  continue;
1479 			if (TREE_CODE (arg) == SSA_NAME
1480 			    && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1481 			    && (rhs1
1482 				= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1483 			    && useless_type_conversion_p (TREE_TYPE (ddecl),
1484 							  TREE_TYPE (rhs1)))
1485 			  arg = rhs1;
1486 			else
1487 			  arg = fold_convert (TREE_TYPE (ddecl), arg);
1488 		      }
1489 
1490 		    gimple *def_temp
1491 		      = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1492 						 e->call_stmt);
1493 		    gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1494 		  }
1495 	    }
1496 	}
1497 
1498       gsi_replace (&gsi, new_stmt, false);
1499       /* We need to defer cleaning EH info on the new statement to
1500          fixup-cfg.  We may not have dominator information at this point
1501 	 and thus would end up with unreachable blocks and have no way
1502 	 to communicate that we need to run CFG cleanup then.  */
1503       lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1504       if (lp_nr != 0)
1505 	{
1506 	  remove_stmt_from_eh_lp (e->call_stmt);
1507 	  add_stmt_to_eh_lp (new_stmt, lp_nr);
1508 	}
1509     }
1510   else
1511     {
1512       new_stmt = e->call_stmt;
1513       gimple_call_set_fndecl (new_stmt, e->callee->decl);
1514       update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1515     }
1516 
1517   /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1518      adjust gimple_call_fntype too.  */
1519   if (gimple_call_noreturn_p (new_stmt)
1520       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1521       && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1522       && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1523 	  == void_type_node))
1524     gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1525 
1526   /* If the call becomes noreturn, remove the LHS if possible.  */
1527   tree lhs = gimple_call_lhs (new_stmt);
1528   if (lhs
1529       && gimple_call_noreturn_p (new_stmt)
1530       && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1531 	  || should_remove_lhs_p (lhs)))
1532     {
1533       if (TREE_CODE (lhs) == SSA_NAME)
1534 	{
1535 	  tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1536 					TREE_TYPE (lhs), NULL);
1537 	  var = get_or_create_ssa_default_def
1538 		  (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1539 	  gimple *set_stmt = gimple_build_assign (lhs, var);
1540           gsi = gsi_for_stmt (new_stmt);
1541 	  gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1542 	  update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1543 	}
1544       gimple_call_set_lhs (new_stmt, NULL_TREE);
1545       update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1546     }
1547 
1548   /* If new callee has no static chain, remove it.  */
1549   if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1550     {
1551       gimple_call_set_chain (new_stmt, NULL);
1552       update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1553     }
1554 
1555   maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1556 				 new_stmt);
1557 
1558   e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1559 
1560   if (symtab->dump_file)
1561     {
1562       fprintf (symtab->dump_file, "  updated to:");
1563       print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1564     }
1565   return new_stmt;
1566 }
1567 
1568 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1569    OLD_STMT changed into NEW_STMT.  OLD_CALL is gimple_call_fndecl
1570    of OLD_STMT if it was previously call statement.
1571    If NEW_STMT is NULL, the call has been dropped without any
1572    replacement.  */
1573 
1574 static void
cgraph_update_edges_for_call_stmt_node(cgraph_node * node,gimple * old_stmt,tree old_call,gimple * new_stmt)1575 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1576 					gimple *old_stmt, tree old_call,
1577 					gimple *new_stmt)
1578 {
1579   tree new_call = (new_stmt && is_gimple_call (new_stmt))
1580 		  ? gimple_call_fndecl (new_stmt) : 0;
1581 
1582   /* We are seeing indirect calls, then there is nothing to update.  */
1583   if (!new_call && !old_call)
1584     return;
1585   /* See if we turned indirect call into direct call or folded call to one builtin
1586      into different builtin.  */
1587   if (old_call != new_call)
1588     {
1589       cgraph_edge *e = node->get_edge (old_stmt);
1590       cgraph_edge *ne = NULL;
1591       profile_count count;
1592 
1593       if (e)
1594 	{
1595 	  /* Keep calls marked as dead dead.  */
1596 	  if (new_stmt && is_gimple_call (new_stmt) && e->callee
1597 	      && DECL_BUILT_IN_CLASS (e->callee->decl) == BUILT_IN_NORMAL
1598 	      && DECL_FUNCTION_CODE (e->callee->decl) == BUILT_IN_UNREACHABLE)
1599 	    {
1600               node->get_edge (old_stmt)->set_call_stmt
1601 		 (as_a <gcall *> (new_stmt));
1602 	      return;
1603 	    }
1604 	  /* See if the edge is already there and has the correct callee.  It
1605 	     might be so because of indirect inlining has already updated
1606 	     it.  We also might've cloned and redirected the edge.  */
1607 	  if (new_call && e->callee)
1608 	    {
1609 	      cgraph_node *callee = e->callee;
1610 	      while (callee)
1611 		{
1612 		  if (callee->decl == new_call
1613 		      || callee->former_clone_of == new_call)
1614 		    {
1615 		      e->set_call_stmt (as_a <gcall *> (new_stmt));
1616 		      return;
1617 		    }
1618 		  callee = callee->clone_of;
1619 		}
1620 	    }
1621 
1622 	  /* Otherwise remove edge and create new one; we can't simply redirect
1623 	     since function has changed, so inline plan and other information
1624 	     attached to edge is invalid.  */
1625 	  count = e->count;
1626  	  if (e->indirect_unknown_callee || e->inline_failed)
1627 	    e->remove ();
1628 	  else
1629 	    e->callee->remove_symbol_and_inline_clones ();
1630 	}
1631       else if (new_call)
1632 	{
1633 	  /* We are seeing new direct call; compute profile info based on BB.  */
1634 	  basic_block bb = gimple_bb (new_stmt);
1635 	  count = bb->count;
1636 	}
1637 
1638       if (new_call)
1639 	{
1640 	  ne = node->create_edge (cgraph_node::get_create (new_call),
1641 				  as_a <gcall *> (new_stmt), count);
1642 	  gcc_assert (ne->inline_failed);
1643 	}
1644     }
1645   /* We only updated the call stmt; update pointer in cgraph edge..  */
1646   else if (old_stmt != new_stmt)
1647     node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1648 }
1649 
1650 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1651    OLD_STMT changed into NEW_STMT.  OLD_DECL is gimple_call_fndecl
1652    of OLD_STMT before it was updated (updating can happen inplace).  */
1653 
1654 void
cgraph_update_edges_for_call_stmt(gimple * old_stmt,tree old_decl,gimple * new_stmt)1655 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1656 				   gimple *new_stmt)
1657 {
1658   cgraph_node *orig = cgraph_node::get (cfun->decl);
1659   cgraph_node *node;
1660 
1661   gcc_checking_assert (orig);
1662   cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1663   if (orig->clones)
1664     for (node = orig->clones; node != orig;)
1665       {
1666         cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1667 	if (node->clones)
1668 	  node = node->clones;
1669 	else if (node->next_sibling_clone)
1670 	  node = node->next_sibling_clone;
1671 	else
1672 	  {
1673 	    while (node != orig && !node->next_sibling_clone)
1674 	      node = node->clone_of;
1675 	    if (node != orig)
1676 	      node = node->next_sibling_clone;
1677 	  }
1678       }
1679 }
1680 
1681 
1682 /* Remove all callees from the node.  */
1683 
1684 void
remove_callees(void)1685 cgraph_node::remove_callees (void)
1686 {
1687   cgraph_edge *e, *f;
1688 
1689   /* It is sufficient to remove the edges from the lists of callers of
1690      the callees.  The callee list of the node can be zapped with one
1691      assignment.  */
1692   for (e = callees; e; e = f)
1693     {
1694       f = e->next_callee;
1695       symtab->call_edge_removal_hooks (e);
1696       if (!e->indirect_unknown_callee)
1697 	e->remove_callee ();
1698       symtab->free_edge (e);
1699     }
1700   for (e = indirect_calls; e; e = f)
1701     {
1702       f = e->next_callee;
1703       symtab->call_edge_removal_hooks (e);
1704       if (!e->indirect_unknown_callee)
1705 	e->remove_callee ();
1706       symtab->free_edge (e);
1707     }
1708   indirect_calls = NULL;
1709   callees = NULL;
1710   if (call_site_hash)
1711     {
1712       call_site_hash->empty ();
1713       call_site_hash = NULL;
1714     }
1715 }
1716 
1717 /* Remove all callers from the node.  */
1718 
1719 void
remove_callers(void)1720 cgraph_node::remove_callers (void)
1721 {
1722   cgraph_edge *e, *f;
1723 
1724   /* It is sufficient to remove the edges from the lists of callees of
1725      the callers.  The caller list of the node can be zapped with one
1726      assignment.  */
1727   for (e = callers; e; e = f)
1728     {
1729       f = e->next_caller;
1730       symtab->call_edge_removal_hooks (e);
1731       e->remove_caller ();
1732       symtab->free_edge (e);
1733     }
1734   callers = NULL;
1735 }
1736 
1737 /* Helper function for cgraph_release_function_body and free_lang_data.
1738    It releases body from function DECL without having to inspect its
1739    possibly non-existent symtab node.  */
1740 
1741 void
release_function_body(tree decl)1742 release_function_body (tree decl)
1743 {
1744   function *fn = DECL_STRUCT_FUNCTION (decl);
1745   if (fn)
1746     {
1747       if (fn->cfg
1748 	  && loops_for_fn (fn))
1749 	{
1750 	  fn->curr_properties &= ~PROP_loops;
1751 	  loop_optimizer_finalize (fn);
1752 	}
1753       if (fn->gimple_df)
1754 	{
1755 	  delete_tree_ssa (fn);
1756 	  fn->eh = NULL;
1757 	}
1758       if (fn->cfg)
1759 	{
1760 	  gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1761 	  gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1762 	  delete_tree_cfg_annotations (fn);
1763 	  clear_edges (fn);
1764 	  fn->cfg = NULL;
1765 	}
1766       if (fn->value_histograms)
1767 	free_histograms (fn);
1768       gimple_set_body (decl, NULL);
1769       /* Struct function hangs a lot of data that would leak if we didn't
1770          removed all pointers to it.   */
1771       ggc_free (fn);
1772       DECL_STRUCT_FUNCTION (decl) = NULL;
1773     }
1774   DECL_SAVED_TREE (decl) = NULL;
1775 }
1776 
1777 /* Release memory used to represent body of function.
1778    Use this only for functions that are released before being translated to
1779    target code (i.e. RTL).  Functions that are compiled to RTL and beyond
1780    are free'd in final.c via free_after_compilation().
1781    KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk.  */
1782 
1783 void
release_body(bool keep_arguments)1784 cgraph_node::release_body (bool keep_arguments)
1785 {
1786   ipa_transforms_to_apply.release ();
1787   if (!used_as_abstract_origin && symtab->state != PARSING)
1788     {
1789       DECL_RESULT (decl) = NULL;
1790 
1791       if (!keep_arguments)
1792 	DECL_ARGUMENTS (decl) = NULL;
1793     }
1794   /* If the node is abstract and needed, then do not clear
1795      DECL_INITIAL of its associated function declaration because it's
1796      needed to emit debug info later.  */
1797   if (!used_as_abstract_origin && DECL_INITIAL (decl))
1798     DECL_INITIAL (decl) = error_mark_node;
1799   release_function_body (decl);
1800   if (lto_file_data)
1801     {
1802       lto_free_function_in_decl_state_for_node (this);
1803       lto_file_data = NULL;
1804     }
1805 }
1806 
1807 /* Remove function from symbol table.  */
1808 
1809 void
remove(void)1810 cgraph_node::remove (void)
1811 {
1812   cgraph_node *n;
1813   int uid = this->uid;
1814 
1815   if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1816     fprintf (symtab->ipa_clones_dump_file,
1817 	     "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1818 	     DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1819 	     DECL_SOURCE_COLUMN (decl));
1820 
1821   symtab->call_cgraph_removal_hooks (this);
1822   remove_callers ();
1823   remove_callees ();
1824   ipa_transforms_to_apply.release ();
1825   delete_function_version (function_version ());
1826 
1827   /* Incremental inlining access removed nodes stored in the postorder list.
1828      */
1829   force_output = false;
1830   forced_by_abi = false;
1831   for (n = nested; n; n = n->next_nested)
1832     n->origin = NULL;
1833   nested = NULL;
1834   if (origin)
1835     {
1836       cgraph_node **node2 = &origin->nested;
1837 
1838       while (*node2 != this)
1839 	node2 = &(*node2)->next_nested;
1840       *node2 = next_nested;
1841     }
1842   unregister ();
1843   if (prev_sibling_clone)
1844     prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1845   else if (clone_of)
1846     clone_of->clones = next_sibling_clone;
1847   if (next_sibling_clone)
1848     next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1849   if (clones)
1850     {
1851       cgraph_node *n, *next;
1852 
1853       if (clone_of)
1854         {
1855 	  for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1856 	    n->clone_of = clone_of;
1857 	  n->clone_of = clone_of;
1858 	  n->next_sibling_clone = clone_of->clones;
1859 	  if (clone_of->clones)
1860 	    clone_of->clones->prev_sibling_clone = n;
1861 	  clone_of->clones = clones;
1862 	}
1863       else
1864         {
1865 	  /* We are removing node with clones.  This makes clones inconsistent,
1866 	     but assume they will be removed subsequently and just keep clone
1867 	     tree intact.  This can happen in unreachable function removal since
1868 	     we remove unreachable functions in random order, not by bottom-up
1869 	     walk of clone trees.  */
1870 	  for (n = clones; n; n = next)
1871 	    {
1872 	       next = n->next_sibling_clone;
1873 	       n->next_sibling_clone = NULL;
1874 	       n->prev_sibling_clone = NULL;
1875 	       n->clone_of = NULL;
1876 	    }
1877 	}
1878     }
1879 
1880   /* While all the clones are removed after being proceeded, the function
1881      itself is kept in the cgraph even after it is compiled.  Check whether
1882      we are done with this body and reclaim it proactively if this is the case.
1883      */
1884   if (symtab->state != LTO_STREAMING)
1885     {
1886       n = cgraph_node::get (decl);
1887       if (!n
1888 	  || (!n->clones && !n->clone_of && !n->global.inlined_to
1889 	      && ((symtab->global_info_ready || in_lto_p)
1890 		  && (TREE_ASM_WRITTEN (n->decl)
1891 		      || DECL_EXTERNAL (n->decl)
1892 		      || !n->analyzed
1893 		      || (!flag_wpa && n->in_other_partition)))))
1894 	release_body ();
1895     }
1896   else
1897     {
1898       lto_free_function_in_decl_state_for_node (this);
1899       lto_file_data = NULL;
1900     }
1901 
1902   decl = NULL;
1903   if (call_site_hash)
1904     {
1905       call_site_hash->empty ();
1906       call_site_hash = NULL;
1907     }
1908 
1909   if (instrumented_version)
1910     {
1911       instrumented_version->instrumented_version = NULL;
1912       instrumented_version = NULL;
1913     }
1914 
1915   symtab->release_symbol (this, uid);
1916 }
1917 
1918 /* Likewise indicate that a node is having address taken.  */
1919 
1920 void
mark_address_taken(void)1921 cgraph_node::mark_address_taken (void)
1922 {
1923   /* Indirect inlining can figure out that all uses of the address are
1924      inlined.  */
1925   if (global.inlined_to)
1926     {
1927       gcc_assert (cfun->after_inlining);
1928       gcc_assert (callers->indirect_inlining_edge);
1929       return;
1930     }
1931   /* FIXME: address_taken flag is used both as a shortcut for testing whether
1932      IPA_REF_ADDR reference exists (and thus it should be set on node
1933      representing alias we take address of) and as a test whether address
1934      of the object was taken (and thus it should be set on node alias is
1935      referring to).  We should remove the first use and the remove the
1936      following set.  */
1937   address_taken = 1;
1938   cgraph_node *node = ultimate_alias_target ();
1939   node->address_taken = 1;
1940 }
1941 
1942 /* Return local info for the compiled function.  */
1943 
1944 cgraph_local_info *
local_info(tree decl)1945 cgraph_node::local_info (tree decl)
1946 {
1947   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1948   cgraph_node *node = get (decl);
1949   if (!node)
1950     return NULL;
1951   return &node->ultimate_alias_target ()->local;
1952 }
1953 
1954 /* Return local info for the compiled function.  */
1955 
1956 cgraph_rtl_info *
rtl_info(tree decl)1957 cgraph_node::rtl_info (tree decl)
1958 {
1959   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1960   cgraph_node *node = get (decl);
1961   if (!node)
1962     return NULL;
1963   enum availability avail;
1964   node = node->ultimate_alias_target (&avail);
1965   if (decl != current_function_decl
1966       && (avail < AVAIL_AVAILABLE
1967 	  || (node->decl != current_function_decl
1968 	      && !TREE_ASM_WRITTEN (node->decl))))
1969     return NULL;
1970   /* Allocate if it doesn't exist.  */
1971   if (node->rtl == NULL)
1972     node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1973   return node->rtl;
1974 }
1975 
1976 /* Return a string describing the failure REASON.  */
1977 
1978 const char*
cgraph_inline_failed_string(cgraph_inline_failed_t reason)1979 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1980 {
1981 #undef DEFCIFCODE
1982 #define DEFCIFCODE(code, type, string)	string,
1983 
1984   static const char *cif_string_table[CIF_N_REASONS] = {
1985 #include "cif-code.def"
1986   };
1987 
1988   /* Signedness of an enum type is implementation defined, so cast it
1989      to unsigned before testing. */
1990   gcc_assert ((unsigned) reason < CIF_N_REASONS);
1991   return cif_string_table[reason];
1992 }
1993 
1994 /* Return a type describing the failure REASON.  */
1995 
1996 cgraph_inline_failed_type_t
cgraph_inline_failed_type(cgraph_inline_failed_t reason)1997 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1998 {
1999 #undef DEFCIFCODE
2000 #define DEFCIFCODE(code, type, string)	type,
2001 
2002   static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
2003 #include "cif-code.def"
2004   };
2005 
2006   /* Signedness of an enum type is implementation defined, so cast it
2007      to unsigned before testing. */
2008   gcc_assert ((unsigned) reason < CIF_N_REASONS);
2009   return cif_type_table[reason];
2010 }
2011 
2012 /* Names used to print out the availability enum.  */
2013 const char * const cgraph_availability_names[] =
2014   {"unset", "not_available", "overwritable", "available", "local"};
2015 
2016 /* Output flags of edge to a file F.  */
2017 
2018 void
dump_edge_flags(FILE * f)2019 cgraph_edge::dump_edge_flags (FILE *f)
2020 {
2021   if (speculative)
2022     fprintf (f, "(speculative) ");
2023   if (!inline_failed)
2024     fprintf (f, "(inlined) ");
2025   if (call_stmt_cannot_inline_p)
2026     fprintf (f, "(call_stmt_cannot_inline_p) ");
2027   if (indirect_inlining_edge)
2028     fprintf (f, "(indirect_inlining) ");
2029   if (count.initialized_p ())
2030     {
2031       fprintf (f, "(");
2032       count.dump (f);
2033       fprintf (f, ",");
2034       fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
2035     }
2036   if (can_throw_external)
2037     fprintf (f, "(can throw external) ");
2038 }
2039 
2040 /* Dump call graph node to file F.  */
2041 
2042 void
dump(FILE * f)2043 cgraph_node::dump (FILE *f)
2044 {
2045   cgraph_edge *edge;
2046 
2047   dump_base (f);
2048 
2049   if (global.inlined_to)
2050     fprintf (f, "  Function %s is inline copy in %s\n",
2051 	     dump_name (),
2052 	     global.inlined_to->dump_name ());
2053   if (clone_of)
2054     fprintf (f, "  Clone of %s\n", clone_of->dump_asm_name ());
2055   if (symtab->function_flags_ready)
2056     fprintf (f, "  Availability: %s\n",
2057 	     cgraph_availability_names [get_availability ()]);
2058 
2059   if (profile_id)
2060     fprintf (f, "  Profile id: %i\n",
2061 	     profile_id);
2062   fprintf (f, "  First run: %i\n", tp_first_run);
2063   cgraph_function_version_info *vi = function_version ();
2064   if (vi != NULL)
2065     {
2066       fprintf (f, "  Version info: ");
2067       if (vi->prev != NULL)
2068 	{
2069 	  fprintf (f, "prev: ");
2070 	  fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2071 	}
2072       if (vi->next != NULL)
2073 	{
2074 	  fprintf (f, "next: ");
2075 	  fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2076 	}
2077       if (vi->dispatcher_resolver != NULL_TREE)
2078 	fprintf (f, "dispatcher: %s",
2079 		 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2080 
2081       fprintf (f, "\n");
2082     }
2083   fprintf (f, "  Function flags:");
2084   if (count.initialized_p ())
2085     {
2086       fprintf (f, " count: ");
2087       count.dump (f);
2088     }
2089   if (origin)
2090     fprintf (f, " nested in: %s", origin->asm_name ());
2091   if (gimple_has_body_p (decl))
2092     fprintf (f, " body");
2093   if (process)
2094     fprintf (f, " process");
2095   if (local.local)
2096     fprintf (f, " local");
2097   if (local.redefined_extern_inline)
2098     fprintf (f, " redefined_extern_inline");
2099   if (only_called_at_startup)
2100     fprintf (f, " only_called_at_startup");
2101   if (only_called_at_exit)
2102     fprintf (f, " only_called_at_exit");
2103   if (tm_clone)
2104     fprintf (f, " tm_clone");
2105   if (calls_comdat_local)
2106     fprintf (f, " calls_comdat_local");
2107   if (icf_merged)
2108     fprintf (f, " icf_merged");
2109   if (merged_comdat)
2110     fprintf (f, " merged_comdat");
2111   if (split_part)
2112     fprintf (f, " split_part");
2113   if (indirect_call_target)
2114     fprintf (f, " indirect_call_target");
2115   if (nonfreeing_fn)
2116     fprintf (f, " nonfreeing_fn");
2117   if (DECL_STATIC_CONSTRUCTOR (decl))
2118     fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2119   if (DECL_STATIC_DESTRUCTOR (decl))
2120     fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2121   if (frequency == NODE_FREQUENCY_HOT)
2122     fprintf (f, " hot");
2123   if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2124     fprintf (f, " unlikely_executed");
2125   if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2126     fprintf (f, " executed_once");
2127   if (only_called_at_startup)
2128     fprintf (f, " only_called_at_startup");
2129   if (only_called_at_exit)
2130     fprintf (f, " only_called_at_exit");
2131   if (opt_for_fn (decl, optimize_size))
2132     fprintf (f, " optimize_size");
2133   if (parallelized_function)
2134     fprintf (f, " parallelized_function");
2135 
2136   fprintf (f, "\n");
2137 
2138   if (thunk.thunk_p)
2139     {
2140       fprintf (f, "  Thunk");
2141       if (thunk.alias)
2142         fprintf (f, "  of %s (asm: %s)",
2143 		 lang_hooks.decl_printable_name (thunk.alias, 2),
2144 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2145       fprintf (f, " fixed offset %i virtual value %i has "
2146 	       "virtual offset %i)\n",
2147 	       (int)thunk.fixed_offset,
2148 	       (int)thunk.virtual_value,
2149 	       (int)thunk.virtual_offset_p);
2150     }
2151   if (alias && thunk.alias
2152       && DECL_P (thunk.alias))
2153     {
2154       fprintf (f, "  Alias of %s",
2155 	       lang_hooks.decl_printable_name (thunk.alias, 2));
2156       if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2157         fprintf (f, " (asm: %s)",
2158 		 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2159       fprintf (f, "\n");
2160     }
2161 
2162   fprintf (f, "  Called by: ");
2163 
2164   profile_count sum = profile_count::zero ();
2165   for (edge = callers; edge; edge = edge->next_caller)
2166     {
2167       fprintf (f, "%s ", edge->caller->dump_name ());
2168       edge->dump_edge_flags (f);
2169       if (edge->count.initialized_p ())
2170 	sum += edge->count.ipa ();
2171     }
2172 
2173   fprintf (f, "\n  Calls: ");
2174   for (edge = callees; edge; edge = edge->next_callee)
2175     {
2176       fprintf (f, "%s ", edge->callee->dump_name ());
2177       edge->dump_edge_flags (f);
2178     }
2179   fprintf (f, "\n");
2180 
2181   if (count.ipa ().initialized_p ())
2182     {
2183       bool ok = true;
2184       bool min = false;
2185       ipa_ref *ref;
2186 
2187       FOR_EACH_ALIAS (this, ref)
2188 	if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2189 	  sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2190 
2191       if (global.inlined_to
2192 	  || (symtab->state < EXPANSION
2193 	      && ultimate_alias_target () == this && only_called_directly_p ()))
2194 	ok = !count.ipa ().differs_from_p (sum);
2195       else if (count.ipa () > profile_count::from_gcov_type (100)
2196 	       && count.ipa () < sum.apply_scale (99, 100))
2197 	ok = false, min = true;
2198       if (!ok)
2199 	{
2200 	  fprintf (f, "   Invalid sum of caller counts ");
2201 	  sum.dump (f);
2202 	  if (min)
2203 	    fprintf (f, ", should be at most ");
2204 	  else
2205 	    fprintf (f, ", should be ");
2206 	  count.ipa ().dump (f);
2207 	  fprintf (f, "\n");
2208 	}
2209     }
2210 
2211   for (edge = indirect_calls; edge; edge = edge->next_callee)
2212     {
2213       if (edge->indirect_info->polymorphic)
2214 	{
2215           fprintf (f, "   Polymorphic indirect call of type ");
2216 	  print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2217 	  fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2218 	}
2219       else
2220         fprintf (f, "   Indirect call");
2221       edge->dump_edge_flags (f);
2222       if (edge->indirect_info->param_index != -1)
2223 	{
2224 	  fprintf (f, " of param:%i", edge->indirect_info->param_index);
2225 	  if (edge->indirect_info->agg_contents)
2226 	   fprintf (f, " loaded from %s %s at offset %i",
2227 		    edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2228 		    edge->indirect_info->by_ref ? "passed by reference":"",
2229 		    (int)edge->indirect_info->offset);
2230 	  if (edge->indirect_info->vptr_changed)
2231 	    fprintf (f, " (vptr maybe changed)");
2232 	}
2233       fprintf (f, "\n");
2234       if (edge->indirect_info->polymorphic)
2235 	edge->indirect_info->context.dump (f);
2236     }
2237 
2238   if (instrumentation_clone)
2239     fprintf (f, "  Is instrumented version.\n");
2240   else if (instrumented_version)
2241     fprintf (f, "  Has instrumented version.\n");
2242 }
2243 
2244 /* Dump call graph node NODE to stderr.  */
2245 
2246 DEBUG_FUNCTION void
debug(void)2247 cgraph_node::debug (void)
2248 {
2249   dump (stderr);
2250 }
2251 
2252 /* Dump the callgraph to file F.  */
2253 
2254 void
dump_cgraph(FILE * f)2255 cgraph_node::dump_cgraph (FILE *f)
2256 {
2257   cgraph_node *node;
2258 
2259   fprintf (f, "callgraph:\n\n");
2260   FOR_EACH_FUNCTION (node)
2261     node->dump (f);
2262 }
2263 
2264 /* Return true when the DECL can possibly be inlined.  */
2265 
2266 bool
cgraph_function_possibly_inlined_p(tree decl)2267 cgraph_function_possibly_inlined_p (tree decl)
2268 {
2269   if (!symtab->global_info_ready)
2270     return !DECL_UNINLINABLE (decl);
2271   return DECL_POSSIBLY_INLINED (decl);
2272 }
2273 
2274 /* cgraph_node is no longer nested function; update cgraph accordingly.  */
2275 void
unnest(void)2276 cgraph_node::unnest (void)
2277 {
2278   cgraph_node **node2 = &origin->nested;
2279   gcc_assert (origin);
2280 
2281   while (*node2 != this)
2282     node2 = &(*node2)->next_nested;
2283   *node2 = next_nested;
2284   origin = NULL;
2285 }
2286 
2287 /* Return function availability.  See cgraph.h for description of individual
2288    return values.  */
2289 enum availability
get_availability(symtab_node * ref)2290 cgraph_node::get_availability (symtab_node *ref)
2291 {
2292   if (ref)
2293     {
2294       cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2295       if (cref)
2296 	ref = cref->global.inlined_to;
2297     }
2298   enum availability avail;
2299   if (!analyzed)
2300     avail = AVAIL_NOT_AVAILABLE;
2301   else if (local.local)
2302     avail = AVAIL_LOCAL;
2303   else if (global.inlined_to)
2304     avail = AVAIL_AVAILABLE;
2305   else if (transparent_alias)
2306     ultimate_alias_target (&avail, ref);
2307   else if (ifunc_resolver
2308 	   || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2309     avail = AVAIL_INTERPOSABLE;
2310   else if (!externally_visible)
2311     avail = AVAIL_AVAILABLE;
2312   /* If this is a reference from symbol itself and there are no aliases, we
2313      may be sure that the symbol was not interposed by something else because
2314      the symbol itself would be unreachable otherwise.
2315 
2316      Also comdat groups are always resolved in groups.  */
2317   else if ((this == ref && !has_aliases_p ())
2318            || (ref && get_comdat_group ()
2319                && get_comdat_group () == ref->get_comdat_group ()))
2320     avail = AVAIL_AVAILABLE;
2321   /* Inline functions are safe to be analyzed even if their symbol can
2322      be overwritten at runtime.  It is not meaningful to enforce any sane
2323      behavior on replacing inline function by different body.  */
2324   else if (DECL_DECLARED_INLINE_P (decl))
2325     avail = AVAIL_AVAILABLE;
2326 
2327   /* If the function can be overwritten, return OVERWRITABLE.  Take
2328      care at least of two notable extensions - the COMDAT functions
2329      used to share template instantiations in C++ (this is symmetric
2330      to code cp_cannot_inline_tree_fn and probably shall be shared and
2331      the inlinability hooks completely eliminated).  */
2332 
2333   else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2334     avail = AVAIL_INTERPOSABLE;
2335   else avail = AVAIL_AVAILABLE;
2336 
2337   return avail;
2338 }
2339 
2340 /* Worker for cgraph_node_can_be_local_p.  */
2341 static bool
cgraph_node_cannot_be_local_p_1(cgraph_node * node,void *)2342 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2343 {
2344   return !(!node->force_output
2345 	   && ((DECL_COMDAT (node->decl)
2346 		&& !node->forced_by_abi
2347 		&& !node->used_from_object_file_p ()
2348 		&& !node->same_comdat_group)
2349 	       || !node->externally_visible));
2350 }
2351 
2352 /* Return true if cgraph_node can be made local for API change.
2353    Extern inline functions and C++ COMDAT functions can be made local
2354    at the expense of possible code size growth if function is used in multiple
2355    compilation units.  */
2356 bool
can_be_local_p(void)2357 cgraph_node::can_be_local_p (void)
2358 {
2359   return (!address_taken
2360 	  && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2361 						NULL, true));
2362 }
2363 
2364 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2365    When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2366    skipped.  When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2367    skipped.  */
2368 bool
call_for_symbol_thunks_and_aliases(bool (* callback)(cgraph_node *,void *),void * data,bool include_overwritable,bool exclude_virtual_thunks)2369 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2370 						   (cgraph_node *, void *),
2371 						 void *data,
2372 						 bool include_overwritable,
2373 						 bool exclude_virtual_thunks)
2374 {
2375   cgraph_edge *e;
2376   ipa_ref *ref;
2377   enum availability avail = AVAIL_AVAILABLE;
2378 
2379   if (include_overwritable
2380       || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2381     {
2382       if (callback (this, data))
2383         return true;
2384     }
2385   FOR_EACH_ALIAS (this, ref)
2386     {
2387       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2388       if (include_overwritable
2389 	  || alias->get_availability () > AVAIL_INTERPOSABLE)
2390 	if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2391 						     include_overwritable,
2392 						     exclude_virtual_thunks))
2393 	  return true;
2394     }
2395   if (avail <= AVAIL_INTERPOSABLE)
2396     return false;
2397   for (e = callers; e; e = e->next_caller)
2398     if (e->caller->thunk.thunk_p
2399 	&& (include_overwritable
2400 	    || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2401 	&& !(exclude_virtual_thunks
2402 	     && e->caller->thunk.virtual_offset_p))
2403       if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2404 						       include_overwritable,
2405 						       exclude_virtual_thunks))
2406 	return true;
2407 
2408   return false;
2409 }
2410 
2411 /* Worker to bring NODE local.  */
2412 
2413 bool
make_local(cgraph_node * node,void *)2414 cgraph_node::make_local (cgraph_node *node, void *)
2415 {
2416   gcc_checking_assert (node->can_be_local_p ());
2417   if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2418     {
2419       node->make_decl_local ();
2420       node->set_section (NULL);
2421       node->set_comdat_group (NULL);
2422       node->externally_visible = false;
2423       node->forced_by_abi = false;
2424       node->local.local = true;
2425       node->set_section (NULL);
2426       node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2427 			   || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2428 			   && !flag_incremental_link);
2429       node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2430       gcc_assert (node->get_availability () == AVAIL_LOCAL);
2431     }
2432   return false;
2433 }
2434 
2435 /* Bring cgraph node local.  */
2436 
2437 void
make_local(void)2438 cgraph_node::make_local (void)
2439 {
2440   call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2441 }
2442 
2443 /* Worker to set nothrow flag.  */
2444 
2445 static void
set_nothrow_flag_1(cgraph_node * node,bool nothrow,bool non_call,bool * changed)2446 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2447 		    bool *changed)
2448 {
2449   cgraph_edge *e;
2450 
2451   if (nothrow && !TREE_NOTHROW (node->decl))
2452     {
2453       /* With non-call exceptions we can't say for sure if other function body
2454 	 was not possibly optimized to stil throw.  */
2455       if (!non_call || node->binds_to_current_def_p ())
2456 	{
2457 	  TREE_NOTHROW (node->decl) = true;
2458 	  *changed = true;
2459 	  for (e = node->callers; e; e = e->next_caller)
2460 	    e->can_throw_external = false;
2461 	}
2462     }
2463   else if (!nothrow && TREE_NOTHROW (node->decl))
2464     {
2465       TREE_NOTHROW (node->decl) = false;
2466       *changed = true;
2467     }
2468   ipa_ref *ref;
2469   FOR_EACH_ALIAS (node, ref)
2470     {
2471       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2472       if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2473 	set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2474     }
2475   for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2476     if (e->caller->thunk.thunk_p
2477 	&& (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2478       set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2479 }
2480 
2481 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2482    if any to NOTHROW.  */
2483 
2484 bool
set_nothrow_flag(bool nothrow)2485 cgraph_node::set_nothrow_flag (bool nothrow)
2486 {
2487   bool changed = false;
2488   bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2489 
2490   if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2491     set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2492   else
2493     {
2494       ipa_ref *ref;
2495 
2496       FOR_EACH_ALIAS (this, ref)
2497 	{
2498 	  cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2499 	  if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2500 	    set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2501 	}
2502     }
2503   return changed;
2504 }
2505 
2506 /* Worker to set malloc flag.  */
2507 static void
set_malloc_flag_1(cgraph_node * node,bool malloc_p,bool * changed)2508 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2509 {
2510   if (malloc_p && !DECL_IS_MALLOC (node->decl))
2511     {
2512       DECL_IS_MALLOC (node->decl) = true;
2513       *changed = true;
2514     }
2515 
2516   ipa_ref *ref;
2517   FOR_EACH_ALIAS (node, ref)
2518     {
2519       cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2520       if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2521 	set_malloc_flag_1 (alias, malloc_p, changed);
2522     }
2523 
2524   for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2525     if (e->caller->thunk.thunk_p
2526 	&& (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2527       set_malloc_flag_1 (e->caller, malloc_p, changed);
2528 }
2529 
2530 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any.  */
2531 
2532 bool
set_malloc_flag(bool malloc_p)2533 cgraph_node::set_malloc_flag (bool malloc_p)
2534 {
2535   bool changed = false;
2536 
2537   if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2538     set_malloc_flag_1 (this, malloc_p, &changed);
2539   else
2540     {
2541       ipa_ref *ref;
2542 
2543       FOR_EACH_ALIAS (this, ref)
2544 	{
2545 	  cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2546 	  if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2547 	    set_malloc_flag_1 (alias, malloc_p, &changed);
2548 	}
2549     }
2550   return changed;
2551 }
2552 
2553 /* Worker to set_const_flag.  */
2554 
2555 static void
set_const_flag_1(cgraph_node * node,bool set_const,bool looping,bool * changed)2556 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2557 		  bool *changed)
2558 {
2559   /* Static constructors and destructors without a side effect can be
2560      optimized out.  */
2561   if (set_const && !looping)
2562     {
2563       if (DECL_STATIC_CONSTRUCTOR (node->decl))
2564 	{
2565 	  DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2566 	  *changed = true;
2567 	}
2568       if (DECL_STATIC_DESTRUCTOR (node->decl))
2569 	{
2570 	  DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2571 	  *changed = true;
2572 	}
2573     }
2574   if (!set_const)
2575     {
2576       if (TREE_READONLY (node->decl))
2577 	{
2578           TREE_READONLY (node->decl) = 0;
2579           DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2580 	  *changed = true;
2581 	}
2582     }
2583   else
2584     {
2585       /* Consider function:
2586 
2587 	 bool a(int *p)
2588 	 {
2589 	   return *p==*p;
2590 	 }
2591 
2592 	 During early optimization we will turn this into:
2593 
2594 	 bool a(int *p)
2595 	 {
2596 	   return true;
2597 	 }
2598 
2599 	 Now if this function will be detected as CONST however when interposed
2600 	 it may end up being just pure.  We always must assume the worst
2601 	 scenario here.  */
2602       if (TREE_READONLY (node->decl))
2603 	{
2604 	  if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2605 	    {
2606               DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2607 	      *changed = true;
2608 	    }
2609 	}
2610       else if (node->binds_to_current_def_p ())
2611 	{
2612 	  TREE_READONLY (node->decl) = true;
2613           DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2614 	  DECL_PURE_P (node->decl) = false;
2615 	  *changed = true;
2616 	}
2617       else
2618 	{
2619 	  if (dump_file && (dump_flags & TDF_DETAILS))
2620 	    fprintf (dump_file, "Dropping state to PURE because function does "
2621 		     "not bind to current def.\n");
2622 	  if (!DECL_PURE_P (node->decl))
2623 	    {
2624 	      DECL_PURE_P (node->decl) = true;
2625               DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2626 	      *changed = true;
2627 	    }
2628 	  else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2629 	    {
2630               DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2631 	      *changed = true;
2632 	    }
2633 	}
2634     }
2635 
2636   ipa_ref *ref;
2637   FOR_EACH_ALIAS (node, ref)
2638     {
2639       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2640       if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2641 	set_const_flag_1 (alias, set_const, looping, changed);
2642     }
2643   for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2644     if (e->caller->thunk.thunk_p
2645 	&& (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2646       {
2647 	/* Virtual thunks access virtual offset in the vtable, so they can
2648 	   only be pure, never const.  */
2649         if (set_const
2650 	    && (e->caller->thunk.virtual_offset_p
2651 	        || !node->binds_to_current_def_p (e->caller)))
2652 	  *changed |= e->caller->set_pure_flag (true, looping);
2653 	else
2654 	  set_const_flag_1 (e->caller, set_const, looping, changed);
2655       }
2656 }
2657 
2658 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2659    If SET_CONST if false, clear the flag.
2660 
2661    When setting the flag be careful about possible interposition and
2662    do not set the flag for functions that can be interposet and set pure
2663    flag for functions that can bind to other definition.
2664 
2665    Return true if any change was done. */
2666 
2667 bool
set_const_flag(bool set_const,bool looping)2668 cgraph_node::set_const_flag (bool set_const, bool looping)
2669 {
2670   bool changed = false;
2671   if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2672     set_const_flag_1 (this, set_const, looping, &changed);
2673   else
2674     {
2675       ipa_ref *ref;
2676 
2677       FOR_EACH_ALIAS (this, ref)
2678 	{
2679 	  cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2680 	  if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2681 	    set_const_flag_1 (alias, set_const, looping, &changed);
2682 	}
2683     }
2684   return changed;
2685 }
2686 
2687 /* Info used by set_pure_flag_1.  */
2688 
2689 struct set_pure_flag_info
2690 {
2691   bool pure;
2692   bool looping;
2693   bool changed;
2694 };
2695 
2696 /* Worker to set_pure_flag.  */
2697 
2698 static bool
set_pure_flag_1(cgraph_node * node,void * data)2699 set_pure_flag_1 (cgraph_node *node, void *data)
2700 {
2701   struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2702   /* Static constructors and destructors without a side effect can be
2703      optimized out.  */
2704   if (info->pure && !info->looping)
2705     {
2706       if (DECL_STATIC_CONSTRUCTOR (node->decl))
2707 	{
2708 	  DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2709 	  info->changed = true;
2710 	}
2711       if (DECL_STATIC_DESTRUCTOR (node->decl))
2712 	{
2713 	  DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2714 	  info->changed = true;
2715 	}
2716     }
2717   if (info->pure)
2718     {
2719       if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2720 	{
2721           DECL_PURE_P (node->decl) = true;
2722           DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2723 	  info->changed = true;
2724 	}
2725       else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2726 	       && !info->looping)
2727 	{
2728           DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2729 	  info->changed = true;
2730 	}
2731     }
2732   else
2733     {
2734       if (DECL_PURE_P (node->decl))
2735 	{
2736           DECL_PURE_P (node->decl) = false;
2737           DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2738 	  info->changed = true;
2739 	}
2740     }
2741   return false;
2742 }
2743 
2744 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2745    if any to PURE.
2746 
2747    When setting the flag, be careful about possible interposition.
2748    Return true if any change was done. */
2749 
2750 bool
set_pure_flag(bool pure,bool looping)2751 cgraph_node::set_pure_flag (bool pure, bool looping)
2752 {
2753   struct set_pure_flag_info info = {pure, looping, false};
2754   if (!pure)
2755     looping = false;
2756   call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2757   return info.changed;
2758 }
2759 
2760 /* Return true when cgraph_node can not return or throw and thus
2761    it is safe to ignore its side effects for IPA analysis.  */
2762 
2763 bool
cannot_return_p(void)2764 cgraph_node::cannot_return_p (void)
2765 {
2766   int flags = flags_from_decl_or_type (decl);
2767   if (!opt_for_fn (decl, flag_exceptions))
2768     return (flags & ECF_NORETURN) != 0;
2769   else
2770     return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2771 	     == (ECF_NORETURN | ECF_NOTHROW));
2772 }
2773 
2774 /* Return true when call of edge can not lead to return from caller
2775    and thus it is safe to ignore its side effects for IPA analysis
2776    when computing side effects of the caller.
2777    FIXME: We could actually mark all edges that have no reaching
2778    patch to the exit block or throw to get better results.  */
2779 bool
cannot_lead_to_return_p(void)2780 cgraph_edge::cannot_lead_to_return_p (void)
2781 {
2782   if (caller->cannot_return_p ())
2783     return true;
2784   if (indirect_unknown_callee)
2785     {
2786       int flags = indirect_info->ecf_flags;
2787       if (!opt_for_fn (caller->decl, flag_exceptions))
2788 	return (flags & ECF_NORETURN) != 0;
2789       else
2790 	return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2791 		 == (ECF_NORETURN | ECF_NOTHROW));
2792     }
2793   else
2794     return callee->cannot_return_p ();
2795 }
2796 
2797 /* Return true if the call can be hot.  */
2798 
2799 bool
maybe_hot_p(void)2800 cgraph_edge::maybe_hot_p (void)
2801 {
2802   if (!maybe_hot_count_p (NULL, count.ipa ()))
2803     return false;
2804   if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2805       || (callee
2806 	  && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2807     return false;
2808   if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2809       && (callee
2810 	  && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2811     return false;
2812   if (opt_for_fn (caller->decl, optimize_size))
2813     return false;
2814   if (caller->frequency == NODE_FREQUENCY_HOT)
2815     return true;
2816   /* If profile is now known yet, be conservative.
2817      FIXME: this predicate is used by early inliner and can do better there.  */
2818   if (symtab->state < IPA_SSA)
2819     return true;
2820   if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2821       && sreal_frequency () * 2 < 3)
2822     return false;
2823   if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2824       || sreal_frequency () * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) <= 1)
2825     return false;
2826   return true;
2827 }
2828 
2829 /* Worker for cgraph_can_remove_if_no_direct_calls_p.  */
2830 
2831 static bool
nonremovable_p(cgraph_node * node,void *)2832 nonremovable_p (cgraph_node *node, void *)
2833 {
2834   return !node->can_remove_if_no_direct_calls_and_refs_p ();
2835 }
2836 
2837 /* Return true if whole comdat group can be removed if there are no direct
2838    calls to THIS.  */
2839 
2840 bool
can_remove_if_no_direct_calls_p(bool will_inline)2841 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2842 {
2843   struct ipa_ref *ref;
2844 
2845   /* For local symbols or non-comdat group it is the same as
2846      can_remove_if_no_direct_calls_p.  */
2847   if (!externally_visible || !same_comdat_group)
2848     {
2849       if (DECL_EXTERNAL (decl))
2850 	return true;
2851       if (address_taken)
2852 	return false;
2853       return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2854     }
2855 
2856   if (will_inline && address_taken)
2857     return false;
2858 
2859   /* Otheriwse check if we can remove the symbol itself and then verify
2860      that only uses of the comdat groups are direct call to THIS
2861      or its aliases.   */
2862   if (!can_remove_if_no_direct_calls_and_refs_p ())
2863     return false;
2864 
2865   /* Check that all refs come from within the comdat group.  */
2866   for (int i = 0; iterate_referring (i, ref); i++)
2867     if (ref->referring->get_comdat_group () != get_comdat_group ())
2868       return false;
2869 
2870   struct cgraph_node *target = ultimate_alias_target ();
2871   for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2872        next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2873     {
2874       if (!externally_visible)
2875 	continue;
2876       if (!next->alias
2877 	  && !next->can_remove_if_no_direct_calls_and_refs_p ())
2878 	return false;
2879 
2880       /* If we see different symbol than THIS, be sure to check calls.  */
2881       if (next->ultimate_alias_target () != target)
2882 	for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2883 	  if (e->caller->get_comdat_group () != get_comdat_group ()
2884 	      || will_inline)
2885 	    return false;
2886 
2887       /* If function is not being inlined, we care only about
2888 	 references outside of the comdat group.  */
2889       if (!will_inline)
2890         for (int i = 0; next->iterate_referring (i, ref); i++)
2891 	  if (ref->referring->get_comdat_group () != get_comdat_group ())
2892 	    return false;
2893     }
2894   return true;
2895 }
2896 
2897 /* Return true when function cgraph_node can be expected to be removed
2898    from program when direct calls in this compilation unit are removed.
2899 
2900    As a special case COMDAT functions are
2901    cgraph_can_remove_if_no_direct_calls_p while the are not
2902    cgraph_only_called_directly_p (it is possible they are called from other
2903    unit)
2904 
2905    This function behaves as cgraph_only_called_directly_p because eliminating
2906    all uses of COMDAT function does not make it necessarily disappear from
2907    the program unless we are compiling whole program or we do LTO.  In this
2908    case we know we win since dynamic linking will not really discard the
2909    linkonce section.  */
2910 
2911 bool
will_be_removed_from_program_if_no_direct_calls_p(bool will_inline)2912 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2913 	 (bool will_inline)
2914 {
2915   gcc_assert (!global.inlined_to);
2916   if (DECL_EXTERNAL (decl))
2917     return true;
2918 
2919   if (!in_lto_p && !flag_whole_program)
2920     {
2921       /* If the symbol is in comdat group, we need to verify that whole comdat
2922 	 group becomes unreachable.  Technically we could skip references from
2923 	 within the group, too.  */
2924       if (!only_called_directly_p ())
2925 	return false;
2926       if (same_comdat_group && externally_visible)
2927 	{
2928 	  struct cgraph_node *target = ultimate_alias_target ();
2929 
2930 	  if (will_inline && address_taken)
2931 	    return true;
2932 	  for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2933 	       next != this;
2934 	       next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2935 	    {
2936 	      if (!externally_visible)
2937 		continue;
2938 	      if (!next->alias
2939 		  && !next->only_called_directly_p ())
2940 		return false;
2941 
2942 	      /* If we see different symbol than THIS,
2943 		 be sure to check calls.  */
2944 	      if (next->ultimate_alias_target () != target)
2945 		for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2946 		  if (e->caller->get_comdat_group () != get_comdat_group ()
2947 		      || will_inline)
2948 		    return false;
2949 	    }
2950 	}
2951       return true;
2952     }
2953   else
2954     return can_remove_if_no_direct_calls_p (will_inline);
2955 }
2956 
2957 
2958 /* Worker for cgraph_only_called_directly_p.  */
2959 
2960 static bool
cgraph_not_only_called_directly_p_1(cgraph_node * node,void *)2961 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2962 {
2963   return !node->only_called_directly_or_aliased_p ();
2964 }
2965 
2966 /* Return true when function cgraph_node and all its aliases are only called
2967    directly.
2968    i.e. it is not externally visible, address was not taken and
2969    it is not used in any other non-standard way.  */
2970 
2971 bool
only_called_directly_p(void)2972 cgraph_node::only_called_directly_p (void)
2973 {
2974   gcc_assert (ultimate_alias_target () == this);
2975   return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2976 				       NULL, true);
2977 }
2978 
2979 
2980 /* Collect all callers of NODE.  Worker for collect_callers_of_node.  */
2981 
2982 static bool
collect_callers_of_node_1(cgraph_node * node,void * data)2983 collect_callers_of_node_1 (cgraph_node *node, void *data)
2984 {
2985   vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2986   cgraph_edge *cs;
2987   enum availability avail;
2988   node->ultimate_alias_target (&avail);
2989 
2990   if (avail > AVAIL_INTERPOSABLE)
2991     for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2992       if (!cs->indirect_inlining_edge
2993 	  && !cs->caller->thunk.thunk_p)
2994         redirect_callers->safe_push (cs);
2995   return false;
2996 }
2997 
2998 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2999    cgraph_node (i.e. are not overwritable).  */
3000 
3001 vec<cgraph_edge *>
collect_callers(void)3002 cgraph_node::collect_callers (void)
3003 {
3004   vec<cgraph_edge *> redirect_callers = vNULL;
3005   call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
3006 				    &redirect_callers, false);
3007   return redirect_callers;
3008 }
3009 
3010 
3011 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it.  Return
3012    optimistically true if this cannot be determined.  */
3013 
3014 static bool
clone_of_p(cgraph_node * node,cgraph_node * node2)3015 clone_of_p (cgraph_node *node, cgraph_node *node2)
3016 {
3017   node = node->ultimate_alias_target ();
3018   node2 = node2->ultimate_alias_target ();
3019 
3020   if (node2->clone_of == node
3021       || node2->former_clone_of == node->decl)
3022     return true;
3023 
3024   if (!node->thunk.thunk_p && !node->former_thunk_p ())
3025     {
3026       while (node2 && node->decl != node2->decl)
3027 	node2 = node2->clone_of;
3028       return node2 != NULL;
3029     }
3030 
3031   /* There are no virtual clones of thunks so check former_clone_of or if we
3032      might have skipped thunks because this adjustments are no longer
3033      necessary.  */
3034   while (node->thunk.thunk_p || node->former_thunk_p ())
3035     {
3036       if (!node->thunk.this_adjusting)
3037 	return false;
3038       /* In case of instrumented expanded thunks, which can have multiple calls
3039 	 in them, we do not know how to continue and just have to be
3040 	 optimistic.  */
3041       if (node->callees->next_callee)
3042 	return true;
3043       node = node->callees->callee->ultimate_alias_target ();
3044 
3045       if (!node2->clone.args_to_skip
3046 	  || !bitmap_bit_p (node2->clone.args_to_skip, 0))
3047 	return false;
3048       if (node2->former_clone_of == node->decl)
3049 	return true;
3050 
3051       cgraph_node *n2 = node2;
3052       while (n2 && node->decl != n2->decl)
3053 	n2 = n2->clone_of;
3054       if (n2)
3055 	return true;
3056     }
3057 
3058   return false;
3059 }
3060 
3061 /* Verify edge count and frequency.  */
3062 
3063 bool
verify_count()3064 cgraph_edge::verify_count ()
3065 {
3066   bool error_found = false;
3067   if (!count.verify ())
3068     {
3069       error ("caller edge count invalid");
3070       error_found = true;
3071     }
3072   return error_found;
3073 }
3074 
3075 /* Switch to THIS_CFUN if needed and print STMT to stderr.  */
3076 static void
cgraph_debug_gimple_stmt(function * this_cfun,gimple * stmt)3077 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3078 {
3079   bool fndecl_was_null = false;
3080   /* debug_gimple_stmt needs correct cfun */
3081   if (cfun != this_cfun)
3082     set_cfun (this_cfun);
3083   /* ...and an actual current_function_decl */
3084   if (!current_function_decl)
3085     {
3086       current_function_decl = this_cfun->decl;
3087       fndecl_was_null = true;
3088     }
3089   debug_gimple_stmt (stmt);
3090   if (fndecl_was_null)
3091     current_function_decl = NULL;
3092 }
3093 
3094 /* Verify that call graph edge corresponds to DECL from the associated
3095    statement.  Return true if the verification should fail.  */
3096 
3097 bool
verify_corresponds_to_fndecl(tree decl)3098 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3099 {
3100   cgraph_node *node;
3101 
3102   if (!decl || callee->global.inlined_to)
3103     return false;
3104   if (symtab->state == LTO_STREAMING)
3105     return false;
3106   node = cgraph_node::get (decl);
3107 
3108   /* We do not know if a node from a different partition is an alias or what it
3109      aliases and therefore cannot do the former_clone_of check reliably.  When
3110      body_removed is set, we have lost all information about what was alias or
3111      thunk of and also cannot proceed.  */
3112   if (!node
3113       || node->body_removed
3114       || node->in_other_partition
3115       || callee->icf_merged
3116       || callee->in_other_partition)
3117     return false;
3118 
3119   node = node->ultimate_alias_target ();
3120 
3121   /* Optimizers can redirect unreachable calls or calls triggering undefined
3122      behavior to builtin_unreachable.  */
3123   if (DECL_BUILT_IN_CLASS (callee->decl) == BUILT_IN_NORMAL
3124       && DECL_FUNCTION_CODE (callee->decl) == BUILT_IN_UNREACHABLE)
3125     return false;
3126 
3127   if (callee->former_clone_of != node->decl
3128       && (node != callee->ultimate_alias_target ())
3129       && !clone_of_p (node, callee))
3130     return true;
3131   else
3132     return false;
3133 }
3134 
3135 /* Verify cgraph nodes of given cgraph node.  */
3136 DEBUG_FUNCTION void
verify_node(void)3137 cgraph_node::verify_node (void)
3138 {
3139   cgraph_edge *e;
3140   function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3141   basic_block this_block;
3142   gimple_stmt_iterator gsi;
3143   bool error_found = false;
3144 
3145   if (seen_error ())
3146     return;
3147 
3148   timevar_push (TV_CGRAPH_VERIFY);
3149   error_found |= verify_base ();
3150   for (e = callees; e; e = e->next_callee)
3151     if (e->aux)
3152       {
3153 	error ("aux field set for edge %s->%s",
3154 	       identifier_to_locale (e->caller->name ()),
3155 	       identifier_to_locale (e->callee->name ()));
3156 	error_found = true;
3157       }
3158   if (!count.verify ())
3159     {
3160       error ("cgraph count invalid");
3161       error_found = true;
3162     }
3163   if (global.inlined_to && same_comdat_group)
3164     {
3165       error ("inline clone in same comdat group list");
3166       error_found = true;
3167     }
3168   if (!definition && !in_other_partition && local.local)
3169     {
3170       error ("local symbols must be defined");
3171       error_found = true;
3172     }
3173   if (global.inlined_to && externally_visible)
3174     {
3175       error ("externally visible inline clone");
3176       error_found = true;
3177     }
3178   if (global.inlined_to && address_taken)
3179     {
3180       error ("inline clone with address taken");
3181       error_found = true;
3182     }
3183   if (global.inlined_to && force_output)
3184     {
3185       error ("inline clone is forced to output");
3186       error_found = true;
3187     }
3188   for (e = indirect_calls; e; e = e->next_callee)
3189     {
3190       if (e->aux)
3191 	{
3192 	  error ("aux field set for indirect edge from %s",
3193 		 identifier_to_locale (e->caller->name ()));
3194 	  error_found = true;
3195 	}
3196       if (!e->indirect_unknown_callee
3197 	  || !e->indirect_info)
3198 	{
3199 	  error ("An indirect edge from %s is not marked as indirect or has "
3200 		 "associated indirect_info, the corresponding statement is: ",
3201 		 identifier_to_locale (e->caller->name ()));
3202 	  cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3203 	  error_found = true;
3204 	}
3205     }
3206   bool check_comdat = comdat_local_p ();
3207   for (e = callers; e; e = e->next_caller)
3208     {
3209       if (e->verify_count ())
3210 	error_found = true;
3211       if (check_comdat
3212 	  && !in_same_comdat_group_p (e->caller))
3213 	{
3214 	  error ("comdat-local function called by %s outside its comdat",
3215 		 identifier_to_locale (e->caller->name ()));
3216 	  error_found = true;
3217 	}
3218       if (!e->inline_failed)
3219 	{
3220 	  if (global.inlined_to
3221 	      != (e->caller->global.inlined_to
3222 		  ? e->caller->global.inlined_to : e->caller))
3223 	    {
3224 	      error ("inlined_to pointer is wrong");
3225 	      error_found = true;
3226 	    }
3227 	  if (callers->next_caller)
3228 	    {
3229 	      error ("multiple inline callers");
3230 	      error_found = true;
3231 	    }
3232 	}
3233       else
3234 	if (global.inlined_to)
3235 	  {
3236 	    error ("inlined_to pointer set for noninline callers");
3237 	    error_found = true;
3238 	  }
3239     }
3240   for (e = callees; e; e = e->next_callee)
3241     {
3242       if (e->verify_count ())
3243 	error_found = true;
3244       if (gimple_has_body_p (e->caller->decl)
3245 	  && !e->caller->global.inlined_to
3246 	  && !e->speculative
3247 	  /* Optimized out calls are redirected to __builtin_unreachable.  */
3248 	  && (e->count.nonzero_p ()
3249 	      || ! e->callee->decl
3250 	      || DECL_BUILT_IN_CLASS (e->callee->decl) != BUILT_IN_NORMAL
3251 	      || DECL_FUNCTION_CODE (e->callee->decl) != BUILT_IN_UNREACHABLE)
3252 	  && count
3253 	      == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3254 	  && (!e->count.ipa_p ()
3255 	      && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3256 	{
3257 	  error ("caller edge count does not match BB count");
3258 	  fprintf (stderr, "edge count: ");
3259 	  e->count.dump (stderr);
3260 	  fprintf (stderr, "\n bb count: ");
3261 	  gimple_bb (e->call_stmt)->count.dump (stderr);
3262 	  fprintf (stderr, "\n");
3263 	  error_found = true;
3264 	}
3265     }
3266   for (e = indirect_calls; e; e = e->next_callee)
3267     {
3268       if (e->verify_count ())
3269 	error_found = true;
3270       if (gimple_has_body_p (e->caller->decl)
3271 	  && !e->caller->global.inlined_to
3272 	  && !e->speculative
3273 	  && e->count.ipa_p ()
3274 	  && count
3275 	      == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3276 	  && (!e->count.ipa_p ()
3277 	      && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3278 	{
3279 	  error ("indirect call count does not match BB count");
3280 	  fprintf (stderr, "edge count: ");
3281 	  e->count.dump (stderr);
3282 	  fprintf (stderr, "\n bb count: ");
3283 	  gimple_bb (e->call_stmt)->count.dump (stderr);
3284 	  fprintf (stderr, "\n");
3285 	  error_found = true;
3286 	}
3287     }
3288   if (!callers && global.inlined_to)
3289     {
3290       error ("inlined_to pointer is set but no predecessors found");
3291       error_found = true;
3292     }
3293   if (global.inlined_to == this)
3294     {
3295       error ("inlined_to pointer refers to itself");
3296       error_found = true;
3297     }
3298 
3299   if (clone_of)
3300     {
3301       cgraph_node *n;
3302       for (n = clone_of->clones; n; n = n->next_sibling_clone)
3303 	if (n == this)
3304 	  break;
3305       if (!n)
3306 	{
3307 	  error ("cgraph_node has wrong clone_of");
3308 	  error_found = true;
3309 	}
3310     }
3311   if (clones)
3312     {
3313       cgraph_node *n;
3314       for (n = clones; n; n = n->next_sibling_clone)
3315 	if (n->clone_of != this)
3316 	  break;
3317       if (n)
3318 	{
3319 	  error ("cgraph_node has wrong clone list");
3320 	  error_found = true;
3321 	}
3322     }
3323   if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3324     {
3325        error ("cgraph_node is in clone list but it is not clone");
3326        error_found = true;
3327     }
3328   if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3329     {
3330       error ("cgraph_node has wrong prev_clone pointer");
3331       error_found = true;
3332     }
3333   if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3334     {
3335       error ("double linked list of clones corrupted");
3336       error_found = true;
3337     }
3338 
3339   if (analyzed && alias)
3340     {
3341       bool ref_found = false;
3342       int i;
3343       ipa_ref *ref = NULL;
3344 
3345       if (callees)
3346 	{
3347 	  error ("Alias has call edges");
3348           error_found = true;
3349 	}
3350       for (i = 0; iterate_reference (i, ref); i++)
3351 	if (ref->use == IPA_REF_CHKP)
3352 	  ;
3353 	else if (ref->use != IPA_REF_ALIAS)
3354 	  {
3355 	    error ("Alias has non-alias reference");
3356 	    error_found = true;
3357 	  }
3358 	else if (ref_found)
3359 	  {
3360 	    error ("Alias has more than one alias reference");
3361 	    error_found = true;
3362 	  }
3363 	else
3364 	  ref_found = true;
3365       if (!ref_found)
3366 	{
3367 	  error ("Analyzed alias has no reference");
3368 	  error_found = true;
3369 	}
3370     }
3371 
3372   /* Check instrumented version reference.  */
3373   if (instrumented_version
3374       && instrumented_version->instrumented_version != this)
3375     {
3376       error ("Instrumentation clone does not reference original node");
3377       error_found = true;
3378     }
3379 
3380   /* Cannot have orig_decl for not instrumented nodes.  */
3381   if (!instrumentation_clone && orig_decl)
3382     {
3383       error ("Not instrumented node has non-NULL original declaration");
3384       error_found = true;
3385     }
3386 
3387   /* If original not instrumented node still exists then we may check
3388      original declaration is set properly.  */
3389   if (instrumented_version
3390       && orig_decl
3391       && orig_decl != instrumented_version->decl)
3392     {
3393       error ("Instrumented node has wrong original declaration");
3394       error_found = true;
3395     }
3396 
3397   /* Check all nodes have chkp reference to their instrumented versions.  */
3398   if (analyzed
3399       && instrumented_version
3400       && !instrumentation_clone)
3401     {
3402       bool ref_found = false;
3403       int i;
3404       struct ipa_ref *ref;
3405 
3406       for (i = 0; iterate_reference (i, ref); i++)
3407 	if (ref->use == IPA_REF_CHKP)
3408 	  {
3409 	    if (ref_found)
3410 	      {
3411 		error ("Node has more than one chkp reference");
3412 		error_found = true;
3413 	      }
3414 	    if (ref->referred != instrumented_version)
3415 	      {
3416 		error ("Wrong node is referenced with chkp reference");
3417 		error_found = true;
3418 	      }
3419 	    ref_found = true;
3420 	  }
3421 
3422       if (!ref_found)
3423 	{
3424 	  error ("Analyzed node has no reference to instrumented version");
3425 	  error_found = true;
3426 	}
3427     }
3428 
3429   if (instrumentation_clone
3430       && DECL_BUILT_IN_CLASS (decl) == NOT_BUILT_IN)
3431     {
3432       tree name = DECL_ASSEMBLER_NAME (decl);
3433       tree orig_name = DECL_ASSEMBLER_NAME (orig_decl);
3434 
3435       if (!IDENTIFIER_TRANSPARENT_ALIAS (name)
3436 	  || TREE_CHAIN (name) != orig_name)
3437 	{
3438 	  error ("Alias chain for instrumented node is broken");
3439 	  error_found = true;
3440 	}
3441     }
3442 
3443   if (analyzed && thunk.thunk_p)
3444     {
3445       if (!callees)
3446 	{
3447 	  error ("No edge out of thunk node");
3448           error_found = true;
3449 	}
3450       else if (callees->next_callee)
3451 	{
3452 	  error ("More than one edge out of thunk node");
3453           error_found = true;
3454 	}
3455       if (gimple_has_body_p (decl) && !global.inlined_to)
3456         {
3457 	  error ("Thunk is not supposed to have body");
3458           error_found = true;
3459         }
3460       if (thunk.add_pointer_bounds_args
3461 	  && !instrumented_version->semantically_equivalent_p (callees->callee))
3462 	{
3463 	  error ("Instrumentation thunk has wrong edge callee");
3464           error_found = true;
3465 	}
3466     }
3467   else if (analyzed && gimple_has_body_p (decl)
3468 	   && !TREE_ASM_WRITTEN (decl)
3469 	   && (!DECL_EXTERNAL (decl) || global.inlined_to)
3470 	   && !flag_wpa)
3471     {
3472       if (this_cfun->cfg)
3473 	{
3474 	  hash_set<gimple *> stmts;
3475 	  int i;
3476 	  ipa_ref *ref = NULL;
3477 
3478 	  /* Reach the trees by walking over the CFG, and note the
3479 	     enclosing basic-blocks in the call edges.  */
3480 	  FOR_EACH_BB_FN (this_block, this_cfun)
3481 	    {
3482 	      for (gsi = gsi_start_phis (this_block);
3483 		   !gsi_end_p (gsi); gsi_next (&gsi))
3484 		stmts.add (gsi_stmt (gsi));
3485 	      for (gsi = gsi_start_bb (this_block);
3486 		   !gsi_end_p (gsi);
3487 		   gsi_next (&gsi))
3488 		{
3489 		  gimple *stmt = gsi_stmt (gsi);
3490 		  stmts.add (stmt);
3491 		  if (is_gimple_call (stmt))
3492 		    {
3493 		      cgraph_edge *e = get_edge (stmt);
3494 		      tree decl = gimple_call_fndecl (stmt);
3495 		      if (e)
3496 			{
3497 			  if (e->aux)
3498 			    {
3499 			      error ("shared call_stmt:");
3500 			      cgraph_debug_gimple_stmt (this_cfun, stmt);
3501 			      error_found = true;
3502 			    }
3503 			  if (!e->indirect_unknown_callee)
3504 			    {
3505 			      if (e->verify_corresponds_to_fndecl (decl))
3506 				{
3507 				  error ("edge points to wrong declaration:");
3508 				  debug_tree (e->callee->decl);
3509 				  fprintf (stderr," Instead of:");
3510 				  debug_tree (decl);
3511 				  error_found = true;
3512 				}
3513 			    }
3514 			  else if (decl)
3515 			    {
3516 			      error ("an indirect edge with unknown callee "
3517 				     "corresponding to a call_stmt with "
3518 				     "a known declaration:");
3519 			      error_found = true;
3520 			      cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3521 			    }
3522 			  e->aux = (void *)1;
3523 			}
3524 		      else if (decl)
3525 			{
3526 			  error ("missing callgraph edge for call stmt:");
3527 			  cgraph_debug_gimple_stmt (this_cfun, stmt);
3528 			  error_found = true;
3529 			}
3530 		    }
3531 		}
3532 	      }
3533 	    for (i = 0; iterate_reference (i, ref); i++)
3534 	      if (ref->stmt && !stmts.contains (ref->stmt))
3535 		{
3536 		  error ("reference to dead statement");
3537 		  cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3538 		  error_found = true;
3539 		}
3540 	}
3541       else
3542 	/* No CFG available?!  */
3543 	gcc_unreachable ();
3544 
3545       for (e = callees; e; e = e->next_callee)
3546 	{
3547 	  if (!e->aux)
3548 	    {
3549 	      error ("edge %s->%s has no corresponding call_stmt",
3550 		     identifier_to_locale (e->caller->name ()),
3551 		     identifier_to_locale (e->callee->name ()));
3552 	      cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3553 	      error_found = true;
3554 	    }
3555 	  e->aux = 0;
3556 	}
3557       for (e = indirect_calls; e; e = e->next_callee)
3558 	{
3559 	  if (!e->aux && !e->speculative)
3560 	    {
3561 	      error ("an indirect edge from %s has no corresponding call_stmt",
3562 		     identifier_to_locale (e->caller->name ()));
3563 	      cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3564 	      error_found = true;
3565 	    }
3566 	  e->aux = 0;
3567 	}
3568     }
3569   if (error_found)
3570     {
3571       dump (stderr);
3572       internal_error ("verify_cgraph_node failed");
3573     }
3574   timevar_pop (TV_CGRAPH_VERIFY);
3575 }
3576 
3577 /* Verify whole cgraph structure.  */
3578 DEBUG_FUNCTION void
verify_cgraph_nodes(void)3579 cgraph_node::verify_cgraph_nodes (void)
3580 {
3581   cgraph_node *node;
3582 
3583   if (seen_error ())
3584     return;
3585 
3586   FOR_EACH_FUNCTION (node)
3587     node->verify ();
3588 }
3589 
3590 /* Walk the alias chain to return the function cgraph_node is alias of.
3591    Walk through thunks, too.
3592    When AVAILABILITY is non-NULL, get minimal availability in the chain.
3593    When REF is non-NULL, assume that reference happens in symbol REF
3594    when determining the availability.  */
3595 
3596 cgraph_node *
function_symbol(enum availability * availability,struct symtab_node * ref)3597 cgraph_node::function_symbol (enum availability *availability,
3598 			      struct symtab_node *ref)
3599 {
3600   cgraph_node *node = ultimate_alias_target (availability, ref);
3601 
3602   while (node->thunk.thunk_p)
3603     {
3604       ref = node;
3605       node = node->callees->callee;
3606       if (availability)
3607 	{
3608 	  enum availability a;
3609 	  a = node->get_availability (ref);
3610 	  if (a < *availability)
3611 	    *availability = a;
3612 	}
3613       node = node->ultimate_alias_target (availability, ref);
3614     }
3615   return node;
3616 }
3617 
3618 /* Walk the alias chain to return the function cgraph_node is alias of.
3619    Walk through non virtual thunks, too.  Thus we return either a function
3620    or a virtual thunk node.
3621    When AVAILABILITY is non-NULL, get minimal availability in the chain.
3622    When REF is non-NULL, assume that reference happens in symbol REF
3623    when determining the availability.  */
3624 
3625 cgraph_node *
function_or_virtual_thunk_symbol(enum availability * availability,struct symtab_node * ref)3626 cgraph_node::function_or_virtual_thunk_symbol
3627 				(enum availability *availability,
3628 				 struct symtab_node *ref)
3629 {
3630   cgraph_node *node = ultimate_alias_target (availability, ref);
3631 
3632   while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3633     {
3634       ref = node;
3635       node = node->callees->callee;
3636       if (availability)
3637 	{
3638 	  enum availability a;
3639 	  a = node->get_availability (ref);
3640 	  if (a < *availability)
3641 	    *availability = a;
3642 	}
3643       node = node->ultimate_alias_target (availability, ref);
3644     }
3645   return node;
3646 }
3647 
3648 /* When doing LTO, read cgraph_node's body from disk if it is not already
3649    present.  */
3650 
3651 bool
get_untransformed_body(void)3652 cgraph_node::get_untransformed_body (void)
3653 {
3654   lto_file_decl_data *file_data;
3655   const char *data, *name;
3656   size_t len;
3657   tree decl = this->decl;
3658 
3659   /* Check if body is already there.  Either we have gimple body or
3660      the function is thunk and in that case we set DECL_ARGUMENTS.  */
3661   if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3662     return false;
3663 
3664   gcc_assert (in_lto_p && !DECL_RESULT (decl));
3665 
3666   timevar_push (TV_IPA_LTO_GIMPLE_IN);
3667 
3668   file_data = lto_file_data;
3669   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3670 
3671   /* We may have renamed the declaration, e.g., a static function.  */
3672   name = lto_get_decl_name_mapping (file_data, name);
3673   struct lto_in_decl_state *decl_state
3674 	 = lto_get_function_in_decl_state (file_data, decl);
3675 
3676   data = lto_get_section_data (file_data, LTO_section_function_body,
3677 			       name, &len, decl_state->compressed);
3678   if (!data)
3679     fatal_error (input_location, "%s: section %s is missing",
3680 		 file_data->file_name,
3681 		 name);
3682 
3683   gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3684 
3685   lto_input_function_body (file_data, this, data);
3686   lto_stats.num_function_bodies++;
3687   lto_free_section_data (file_data, LTO_section_function_body, name,
3688 			 data, len, decl_state->compressed);
3689   lto_free_function_in_decl_state_for_node (this);
3690   /* Keep lto file data so ipa-inline-analysis knows about cross module
3691      inlining.  */
3692 
3693   timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3694 
3695   return true;
3696 }
3697 
3698 /* Prepare function body.  When doing LTO, read cgraph_node's body from disk
3699    if it is not already present.  When some IPA transformations are scheduled,
3700    apply them.  */
3701 
3702 bool
get_body(void)3703 cgraph_node::get_body (void)
3704 {
3705   bool updated;
3706 
3707   updated = get_untransformed_body ();
3708 
3709   /* Getting transformed body makes no sense for inline clones;
3710      we should never use this on real clones because they are materialized
3711      early.
3712      TODO: Materializing clones here will likely lead to smaller LTRANS
3713      footprint. */
3714   gcc_assert (!global.inlined_to && !clone_of);
3715   if (ipa_transforms_to_apply.exists ())
3716     {
3717       opt_pass *saved_current_pass = current_pass;
3718       FILE *saved_dump_file = dump_file;
3719       const char *saved_dump_file_name = dump_file_name;
3720       dump_flags_t saved_dump_flags = dump_flags;
3721       dump_file_name = NULL;
3722       dump_file = NULL;
3723 
3724       push_cfun (DECL_STRUCT_FUNCTION (decl));
3725       execute_all_ipa_transforms ();
3726       cgraph_edge::rebuild_edges ();
3727       free_dominance_info (CDI_DOMINATORS);
3728       free_dominance_info (CDI_POST_DOMINATORS);
3729       pop_cfun ();
3730       updated = true;
3731 
3732       current_pass = saved_current_pass;
3733       dump_file = saved_dump_file;
3734       dump_file_name = saved_dump_file_name;
3735       dump_flags = saved_dump_flags;
3736     }
3737   return updated;
3738 }
3739 
3740 /* Return the DECL_STRUCT_FUNCTION of the function.  */
3741 
3742 struct function *
get_fun(void)3743 cgraph_node::get_fun (void)
3744 {
3745   cgraph_node *node = this;
3746   struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3747 
3748   while (!fun && node->clone_of)
3749     {
3750       node = node->clone_of;
3751       fun = DECL_STRUCT_FUNCTION (node->decl);
3752     }
3753 
3754   return fun;
3755 }
3756 
3757 /* Verify if the type of the argument matches that of the function
3758    declaration.  If we cannot verify this or there is a mismatch,
3759    return false.  */
3760 
3761 static bool
gimple_check_call_args(gimple * stmt,tree fndecl,bool args_count_match)3762 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3763 {
3764   tree parms, p;
3765   unsigned int i, nargs;
3766 
3767   /* Calls to internal functions always match their signature.  */
3768   if (gimple_call_internal_p (stmt))
3769     return true;
3770 
3771   nargs = gimple_call_num_args (stmt);
3772 
3773   /* Get argument types for verification.  */
3774   if (fndecl)
3775     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3776   else
3777     parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3778 
3779   /* Verify if the type of the argument matches that of the function
3780      declaration.  If we cannot verify this or there is a mismatch,
3781      return false.  */
3782   if (fndecl && DECL_ARGUMENTS (fndecl))
3783     {
3784       for (i = 0, p = DECL_ARGUMENTS (fndecl);
3785 	   i < nargs;
3786 	   i++, p = DECL_CHAIN (p))
3787 	{
3788 	  tree arg;
3789 	  /* We cannot distinguish a varargs function from the case
3790 	     of excess parameters, still deferring the inlining decision
3791 	     to the callee is possible.  */
3792 	  if (!p)
3793 	    break;
3794 	  arg = gimple_call_arg (stmt, i);
3795 	  if (p == error_mark_node
3796 	      || DECL_ARG_TYPE (p) == error_mark_node
3797 	      || arg == error_mark_node
3798 	      || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3799 		  && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3800             return false;
3801 	}
3802       if (args_count_match && p)
3803 	return false;
3804     }
3805   else if (parms)
3806     {
3807       for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3808 	{
3809 	  tree arg;
3810 	  /* If this is a varargs function defer inlining decision
3811 	     to callee.  */
3812 	  if (!p)
3813 	    break;
3814 	  arg = gimple_call_arg (stmt, i);
3815 	  if (TREE_VALUE (p) == error_mark_node
3816 	      || arg == error_mark_node
3817 	      || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3818 	      || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3819 		  && !fold_convertible_p (TREE_VALUE (p), arg)))
3820             return false;
3821 	}
3822     }
3823   else
3824     {
3825       if (nargs != 0)
3826         return false;
3827     }
3828   return true;
3829 }
3830 
3831 /* Verify if the type of the argument and lhs of CALL_STMT matches
3832    that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3833    true, the arg count needs to be the same.
3834    If we cannot verify this or there is a mismatch, return false.  */
3835 
3836 bool
gimple_check_call_matching_types(gimple * call_stmt,tree callee,bool args_count_match)3837 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3838 				  bool args_count_match)
3839 {
3840   tree lhs;
3841 
3842   if ((DECL_RESULT (callee)
3843        && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3844        && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3845        && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3846                                       TREE_TYPE (lhs))
3847        && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3848       || !gimple_check_call_args (call_stmt, callee, args_count_match))
3849     return false;
3850   return true;
3851 }
3852 
3853 /* Reset all state within cgraph.c so that we can rerun the compiler
3854    within the same process.  For use by toplev::finalize.  */
3855 
3856 void
cgraph_c_finalize(void)3857 cgraph_c_finalize (void)
3858 {
3859   symtab = NULL;
3860 
3861   x_cgraph_nodes_queue = NULL;
3862 
3863   cgraph_fnver_htab = NULL;
3864   version_info_node = NULL;
3865 }
3866 
3867 /* A wroker for call_for_symbol_and_aliases.  */
3868 
3869 bool
call_for_symbol_and_aliases_1(bool (* callback)(cgraph_node *,void *),void * data,bool include_overwritable)3870 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3871 							      void *),
3872 					    void *data,
3873 					    bool include_overwritable)
3874 {
3875   ipa_ref *ref;
3876   FOR_EACH_ALIAS (this, ref)
3877     {
3878       cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3879       if (include_overwritable
3880 	  || alias->get_availability () > AVAIL_INTERPOSABLE)
3881 	if (alias->call_for_symbol_and_aliases (callback, data,
3882 						include_overwritable))
3883 	  return true;
3884     }
3885   return false;
3886 }
3887 
3888 /* Return true if NODE has thunk.  */
3889 
3890 bool
has_thunk_p(cgraph_node * node,void *)3891 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3892 {
3893   for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3894     if (e->caller->thunk.thunk_p)
3895       return true;
3896   return false;
3897 }
3898 
3899 /* Expected frequency of executions within the function.  */
3900 
3901 sreal
sreal_frequency()3902 cgraph_edge::sreal_frequency ()
3903 {
3904   return count.to_sreal_scale (caller->global.inlined_to
3905 			       ? caller->global.inlined_to->count
3906 			       : caller->count);
3907 }
3908 
3909 #include "gt-cgraph.h"
3910