xref: /dragonfly/contrib/gcc-8.0/gcc/cgraph.h (revision a42bad2d)
1 /* Callgraph handling code.
2    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3    Contributed by Jan Hubicka
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #ifndef GCC_CGRAPH_H
22 #define GCC_CGRAPH_H
23 
24 #include "profile-count.h"
25 #include "ipa-ref.h"
26 #include "plugin-api.h"
27 
28 class ipa_opt_pass_d;
29 typedef ipa_opt_pass_d *ipa_opt_pass;
30 
31 /* Symbol table consists of functions and variables.
32    TODO: add labels and CONST_DECLs.  */
33 enum symtab_type
34 {
35   SYMTAB_SYMBOL,
36   SYMTAB_FUNCTION,
37   SYMTAB_VARIABLE
38 };
39 
40 /* Section names are stored as reference counted strings in GGC safe hashtable
41    (to make them survive through PCH).  */
42 
43 struct GTY((for_user)) section_hash_entry
44 {
45   int ref_count;
46   char *name;  /* As long as this datastructure stays in GGC, we can not put
47 		  string at the tail of structure of GGC dies in horrible
48 		  way  */
49 };
50 
51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry>
52 {
53   typedef const char *compare_type;
54 
55   static hashval_t hash (section_hash_entry *);
56   static bool equal (section_hash_entry *, const char *);
57 };
58 
59 enum availability
60 {
61   /* Not yet set by cgraph_function_body_availability.  */
62   AVAIL_UNSET,
63   /* Function body/variable initializer is unknown.  */
64   AVAIL_NOT_AVAILABLE,
65   /* Function body/variable initializer is known but might be replaced
66      by a different one from other compilation unit and thus needs to
67      be dealt with a care.  Like AVAIL_NOT_AVAILABLE it can have
68      arbitrary side effects on escaping variables and functions, while
69      like AVAILABLE it might access static variables.  */
70   AVAIL_INTERPOSABLE,
71   /* Function body/variable initializer is known and will be used in final
72      program.  */
73   AVAIL_AVAILABLE,
74   /* Function body/variable initializer is known and all it's uses are
75      explicitly visible within current unit (ie it's address is never taken and
76      it is not exported to other units). Currently used only for functions.  */
77   AVAIL_LOCAL
78 };
79 
80 /* Classification of symbols WRT partitioning.  */
81 enum symbol_partitioning_class
82 {
83    /* External declarations are ignored by partitioning algorithms and they are
84       added into the boundary later via compute_ltrans_boundary.  */
85    SYMBOL_EXTERNAL,
86    /* Partitioned symbols are pur into one of partitions.  */
87    SYMBOL_PARTITION,
88    /* Duplicated symbols (such as comdat or constant pool references) are
89       copied into every node needing them via add_symbol_to_partition.  */
90    SYMBOL_DUPLICATE
91 };
92 
93 /* Base of all entries in the symbol table.
94    The symtab_node is inherited by cgraph and varpol nodes.  */
95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"),
96 	   chain_next ("%h.next"), chain_prev ("%h.previous")))
97   symtab_node
98 {
99 public:
100   /* Return name.  */
101   const char *name () const;
102 
103   /* Return dump name.  */
104   const char *dump_name () const;
105 
106   /* Return asm name.  */
107   const char *asm_name () const;
108 
109   /* Return dump name with assembler name.  */
110   const char *dump_asm_name () const;
111 
112   /* Add node into symbol table.  This function is not used directly, but via
113      cgraph/varpool node creation routines.  */
114   void register_symbol (void);
115 
116   /* Remove symbol from symbol table.  */
117   void remove (void);
118 
119   /* Dump symtab node to F.  */
120   void dump (FILE *f);
121 
122   /* Dump symtab node to stderr.  */
123   void DEBUG_FUNCTION debug (void);
124 
125   /* Verify consistency of node.  */
126   void DEBUG_FUNCTION verify (void);
127 
128   /* Return ipa reference from this symtab_node to
129      REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
130      of the use and STMT the statement (if it exists).  */
131   ipa_ref *create_reference (symtab_node *referred_node,
132 			     enum ipa_ref_use use_type);
133 
134   /* Return ipa reference from this symtab_node to
135      REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
136      of the use and STMT the statement (if it exists).  */
137   ipa_ref *create_reference (symtab_node *referred_node,
138 			     enum ipa_ref_use use_type, gimple *stmt);
139 
140   /* If VAL is a reference to a function or a variable, add a reference from
141      this symtab_node to the corresponding symbol table node.  Return the new
142      reference or NULL if none was created.  */
143   ipa_ref *maybe_create_reference (tree val, gimple *stmt);
144 
145   /* Clone all references from symtab NODE to this symtab_node.  */
146   void clone_references (symtab_node *node);
147 
148   /* Remove all stmt references in non-speculative references.
149      Those are not maintained during inlining & clonning.
150      The exception are speculative references that are updated along
151      with callgraph edges associated with them.  */
152   void clone_referring (symtab_node *node);
153 
154   /* Clone reference REF to this symtab_node and set its stmt to STMT.  */
155   ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt);
156 
157   /* Find the structure describing a reference to REFERRED_NODE
158      and associated with statement STMT.  */
159   ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt,
160 			   unsigned int lto_stmt_uid);
161 
162   /* Remove all references that are associated with statement STMT.  */
163   void remove_stmt_references (gimple *stmt);
164 
165   /* Remove all stmt references in non-speculative references.
166      Those are not maintained during inlining & clonning.
167      The exception are speculative references that are updated along
168      with callgraph edges associated with them.  */
169   void clear_stmts_in_references (void);
170 
171   /* Remove all references in ref list.  */
172   void remove_all_references (void);
173 
174   /* Remove all referring items in ref list.  */
175   void remove_all_referring (void);
176 
177   /* Dump references in ref list to FILE.  */
178   void dump_references (FILE *file);
179 
180   /* Dump referring in list to FILE.  */
181   void dump_referring (FILE *);
182 
183   /* Get number of references for this node.  */
184   inline unsigned num_references (void)
185   {
186     return ref_list.references ? ref_list.references->length () : 0;
187   }
188 
189   /* Iterates I-th reference in the list, REF is also set.  */
190   ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref);
191 
192   /* Iterates I-th referring item in the list, REF is also set.  */
193   ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref);
194 
195   /* Iterates I-th referring alias item in the list, REF is also set.  */
196   ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref);
197 
198   /* Return true if symtab node and TARGET represents
199      semantically equivalent symbols.  */
200   bool semantically_equivalent_p (symtab_node *target);
201 
202   /* Classify symbol symtab node for partitioning.  */
203   enum symbol_partitioning_class get_partitioning_class (void);
204 
205   /* Return comdat group.  */
206   tree get_comdat_group ()
207     {
208       return x_comdat_group;
209     }
210 
211   /* Return comdat group as identifier_node.  */
212   tree get_comdat_group_id ()
213     {
214       if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
215 	x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
216       return x_comdat_group;
217     }
218 
219   /* Set comdat group.  */
220   void set_comdat_group (tree group)
221     {
222       gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
223 			   || DECL_P (group));
224       x_comdat_group = group;
225     }
226 
227   /* Return section as string.  */
228   const char * get_section ()
229     {
230       if (!x_section)
231 	return NULL;
232       return x_section->name;
233     }
234 
235   /* Remove node from same comdat group.   */
236   void remove_from_same_comdat_group (void);
237 
238   /* Add this symtab_node to the same comdat group that OLD is in.  */
239   void add_to_same_comdat_group (symtab_node *old_node);
240 
241   /* Dissolve the same_comdat_group list in which NODE resides.  */
242   void dissolve_same_comdat_group_list (void);
243 
244   /* Return true when symtab_node is known to be used from other (non-LTO)
245      object file. Known only when doing LTO via linker plugin.  */
246   bool used_from_object_file_p (void);
247 
248   /* Walk the alias chain to return the symbol NODE is alias of.
249      If NODE is not an alias, return NODE.
250      When AVAILABILITY is non-NULL, get minimal availability in the chain.
251      When REF is non-NULL, assume that reference happens in symbol REF
252      when determining the availability.  */
253   symtab_node *ultimate_alias_target (enum availability *avail = NULL,
254 				      struct symtab_node *ref = NULL);
255 
256   /* Return next reachable static symbol with initializer after NODE.  */
257   inline symtab_node *next_defined_symbol (void);
258 
259   /* Add reference recording that symtab node is alias of TARGET.
260      If TRANSPARENT is true make the alias to be transparent alias.
261      The function can fail in the case of aliasing cycles; in this case
262      it returns false.  */
263   bool resolve_alias (symtab_node *target, bool transparent = false);
264 
265   /* C++ FE sometimes change linkage flags after producing same
266      body aliases.  */
267   void fixup_same_cpp_alias_visibility (symtab_node *target);
268 
269   /* Call callback on symtab node and aliases associated to this node.
270      When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
271      skipped.  */
272   bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
273 				    void *data,
274 				    bool include_overwrite);
275 
276   /* If node can not be interposable by static or dynamic linker to point to
277      different definition, return this symbol. Otherwise look for alias with
278      such property and if none exists, introduce new one.  */
279   symtab_node *noninterposable_alias (void);
280 
281   /* Return node that alias is aliasing.  */
282   inline symtab_node *get_alias_target (void);
283 
284   /* Set section for symbol and its aliases.  */
285   void set_section (const char *section);
286 
287   /* Set section, do not recurse into aliases.
288      When one wants to change section of symbol and its aliases,
289      use set_section.  */
290   void set_section_for_node (const char *section);
291 
292   /* Set initialization priority to PRIORITY.  */
293   void set_init_priority (priority_type priority);
294 
295   /* Return the initialization priority.  */
296   priority_type get_init_priority ();
297 
298   /* Return availability of NODE when referenced from REF.  */
299   enum availability get_availability (symtab_node *ref = NULL);
300 
301   /* Return true if NODE binds to current definition in final executable
302      when referenced from REF.  If REF is NULL return conservative value
303      for any reference.  */
304   bool binds_to_current_def_p (symtab_node *ref = NULL);
305 
306   /* Make DECL local.  */
307   void make_decl_local (void);
308 
309   /* Copy visibility from N.  */
310   void copy_visibility_from (symtab_node *n);
311 
312   /* Return desired alignment of the definition.  This is NOT alignment useful
313      to access THIS, because THIS may be interposable and DECL_ALIGN should
314      be used instead.  It however must be guaranteed when output definition
315      of THIS.  */
316   unsigned int definition_alignment ();
317 
318   /* Return true if alignment can be increased.  */
319   bool can_increase_alignment_p ();
320 
321   /* Increase alignment of symbol to ALIGN.  */
322   void increase_alignment (unsigned int align);
323 
324   /* Return true if list contains an alias.  */
325   bool has_aliases_p (void);
326 
327   /* Return true when the symbol is real symbol, i.e. it is not inline clone
328      or abstract function kept for debug info purposes only.  */
329   bool real_symbol_p (void);
330 
331   /* Return true when the symbol needs to be output to the LTO symbol table.  */
332   bool output_to_lto_symbol_table_p (void);
333 
334   /* Determine if symbol declaration is needed.  That is, visible to something
335      either outside this translation unit, something magic in the system
336      configury. This function is used just during symbol creation.  */
337   bool needed_p (void);
338 
339   /* Return true if this symbol is a function from the C frontend specified
340      directly in RTL form (with "__RTL").  */
341   bool native_rtl_p () const;
342 
343   /* Return true when there are references to the node.  */
344   bool referred_to_p (bool include_self = true);
345 
346   /* Return true if symbol can be discarded by linker from the binary.
347      Assume that symbol is used (so there is no need to take into account
348      garbage collecting linkers)
349 
350      This can happen for comdats, commons and weaks when they are previaled
351      by other definition at static linking time.  */
352   inline bool
353   can_be_discarded_p (void)
354   {
355     return (DECL_EXTERNAL (decl)
356 	    || ((get_comdat_group ()
357 		 || DECL_COMMON (decl)
358 		 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl)))
359 		&& ((resolution != LDPR_PREVAILING_DEF
360 		     && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP)
361 		    || flag_incremental_link)
362 		&& resolution != LDPR_PREVAILING_DEF_IRONLY));
363   }
364 
365   /* Return true if NODE is local to a particular COMDAT group, and must not
366      be named from outside the COMDAT.  This is used for C++ decloned
367      constructors.  */
368   inline bool comdat_local_p (void)
369   {
370     return (same_comdat_group && !TREE_PUBLIC (decl));
371   }
372 
373   /* Return true if ONE and TWO are part of the same COMDAT group.  */
374   inline bool in_same_comdat_group_p (symtab_node *target);
375 
376   /* Return true if symbol is known to be nonzero.  */
377   bool nonzero_address ();
378 
379   /* Return 0 if symbol is known to have different address than S2,
380      Return 1 if symbol is known to have same address as S2,
381      return 2 otherwise.
382 
383      If MEMORY_ACCESSED is true, assume that both memory pointer to THIS
384      and S2 is going to be accessed.  This eliminates the situations when
385      either THIS or S2 is NULL and is seful for comparing bases when deciding
386      about memory aliasing.  */
387   int equal_address_to (symtab_node *s2, bool memory_accessed = false);
388 
389   /* Return true if symbol's address may possibly be compared to other
390      symbol's address.  */
391   bool address_matters_p ();
392 
393   /* Return true if NODE's address can be compared.  This use properties
394      of NODE only and does not look if the address is actually taken in
395      interesting way.  For that use ADDRESS_MATTERS_P instead.  */
396   bool address_can_be_compared_p (void);
397 
398   /* Return symbol table node associated with DECL, if any,
399      and NULL otherwise.  */
400   static inline symtab_node *get (const_tree decl)
401   {
402     /* Check that we are called for sane type of object - functions
403        and static or external variables.  */
404     gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
405 			 || (TREE_CODE (decl) == VAR_DECL
406 			     && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
407 				 || in_lto_p)));
408     /* Check that the mapping is sane - perhaps this check can go away,
409        but at the moment frontends tends to corrupt the mapping by calling
410        memcpy/memset on the tree nodes.  */
411     gcc_checking_assert (!decl->decl_with_vis.symtab_node
412 			 || decl->decl_with_vis.symtab_node->decl == decl);
413     return decl->decl_with_vis.symtab_node;
414   }
415 
416   /* Try to find a symtab node for declaration DECL and if it does not
417      exist or if it corresponds to an inline clone, create a new one.  */
418   static inline symtab_node * get_create (tree node);
419 
420   /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
421      Return NULL if there's no such node.  */
422   static symtab_node *get_for_asmname (const_tree asmname);
423 
424   /* Verify symbol table for internal consistency.  */
425   static DEBUG_FUNCTION void verify_symtab_nodes (void);
426 
427   /* Perform internal consistency checks, if they are enabled.  */
428   static inline void checking_verify_symtab_nodes (void);
429 
430   /* Type of the symbol.  */
431   ENUM_BITFIELD (symtab_type) type : 8;
432 
433   /* The symbols resolution.  */
434   ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8;
435 
436   /*** Flags representing the symbol type.  ***/
437 
438   /* True when symbol corresponds to a definition in current unit.
439      set via finalize_function or finalize_decl  */
440   unsigned definition : 1;
441   /* True when symbol is an alias.
442      Set by ssemble_alias.  */
443   unsigned alias : 1;
444   /* When true the alias is translated into its target symbol either by GCC
445      or assembler (it also may just be a duplicate declaration of the same
446      linker name).
447 
448      Currently transparent aliases come in three different flavors
449        - aliases having the same assembler name as their target (aka duplicated
450 	 declarations). In this case the assembler names compare via
451 	 assembler_names_equal_p and weakref is false
452        - aliases that are renamed at a time being output to final file
453 	 by varasm.c. For those DECL_ASSEMBLER_NAME have
454 	 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler
455 	 name must be unique.
456 	 Weakrefs belong to this cateogry when we target assembler without
457 	 .weakref directive.
458        - weakrefs that are renamed by assembler via .weakref directive.
459 	 In this case the alias may or may not be definition (depending if
460 	 target declaration was seen by the compiler), weakref is set.
461 	 Unless we are before renaming statics, assembler names are different.
462 
463      Given that we now support duplicate declarations, the second option is
464      redundant and will be removed.  */
465   unsigned transparent_alias : 1;
466   /* True when alias is a weakref.  */
467   unsigned weakref : 1;
468   /* C++ frontend produce same body aliases and extra name aliases for
469      virtual functions and vtables that are obviously equivalent.
470      Those aliases are bit special, especially because C++ frontend
471      visibility code is so ugly it can not get them right at first time
472      and their visibility needs to be copied from their "masters" at
473      the end of parsing.  */
474   unsigned cpp_implicit_alias : 1;
475   /* Set once the definition was analyzed.  The list of references and
476      other properties are built during analysis.  */
477   unsigned analyzed : 1;
478   /* Set for write-only variables.  */
479   unsigned writeonly : 1;
480   /* Visibility of symbol was used for further optimization; do not
481      permit further changes.  */
482   unsigned refuse_visibility_changes : 1;
483 
484   /*** Visibility and linkage flags.  ***/
485 
486   /* Set when function is visible by other units.  */
487   unsigned externally_visible : 1;
488   /* Don't reorder to other symbols having this set.  */
489   unsigned no_reorder : 1;
490   /* The symbol will be assumed to be used in an invisible way (like
491      by an toplevel asm statement).  */
492   unsigned force_output : 1;
493   /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be
494      exported.  Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted
495      to static and it does not inhibit optimization.  */
496   unsigned forced_by_abi : 1;
497   /* True when the name is known to be unique and thus it does not need mangling.  */
498   unsigned unique_name : 1;
499   /* Specify whether the section was set by user or by
500      compiler via -ffunction-sections.  */
501   unsigned implicit_section : 1;
502   /* True when body and other characteristics have been removed by
503      symtab_remove_unreachable_nodes. */
504   unsigned body_removed : 1;
505 
506   /*** WHOPR Partitioning flags.
507        These flags are used at ltrans stage when only part of the callgraph is
508        available. ***/
509 
510   /* Set when variable is used from other LTRANS partition.  */
511   unsigned used_from_other_partition : 1;
512   /* Set when function is available in the other LTRANS partition.
513      During WPA output it is used to mark nodes that are present in
514      multiple partitions.  */
515   unsigned in_other_partition : 1;
516 
517 
518 
519   /*** other flags.  ***/
520 
521   /* Set when symbol has address taken. */
522   unsigned address_taken : 1;
523   /* Set when init priority is set.  */
524   unsigned in_init_priority_hash : 1;
525 
526   /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case
527      of offloading, for separate compilation for a different target.  */
528   unsigned need_lto_streaming : 1;
529 
530   /* Set when symbol can be streamed into bytecode for offloading.  */
531   unsigned offloadable : 1;
532 
533 
534   /* Ordering of all symtab entries.  */
535   int order;
536 
537   /* Declaration representing the symbol.  */
538   tree decl;
539 
540   /* Linked list of symbol table entries starting with symtab_nodes.  */
541   symtab_node *next;
542   symtab_node *previous;
543 
544   /* Linked list of symbols with the same asm name.  There may be multiple
545      entries for single symbol name during LTO, because symbols are renamed
546      only after partitioning.
547 
548      Because inline clones are kept in the assembler name has, they also produce
549      duplicate entries.
550 
551      There are also several long standing bugs where frontends and builtin
552      code produce duplicated decls.  */
553   symtab_node *next_sharing_asm_name;
554   symtab_node *previous_sharing_asm_name;
555 
556   /* Circular list of nodes in the same comdat group if non-NULL.  */
557   symtab_node *same_comdat_group;
558 
559   /* Vectors of referring and referenced entities.  */
560   ipa_ref_list ref_list;
561 
562   /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer
563      depending to what was known to frontend on the creation time.
564      Once alias is resolved, this pointer become NULL.  */
565   tree alias_target;
566 
567   /* File stream where this node is being written to.  */
568   struct lto_file_decl_data * lto_file_data;
569 
570   PTR GTY ((skip)) aux;
571 
572   /* Comdat group the symbol is in.  Can be private if GGC allowed that.  */
573   tree x_comdat_group;
574 
575   /* Section name. Again can be private, if allowed.  */
576   section_hash_entry *x_section;
577 
578 protected:
579   /* Dump base fields of symtab nodes to F.  Not to be used directly.  */
580   void dump_base (FILE *);
581 
582   /* Verify common part of symtab node.  */
583   bool DEBUG_FUNCTION verify_base (void);
584 
585   /* Remove node from symbol table.  This function is not used directly, but via
586      cgraph/varpool node removal routines.  */
587   void unregister (void);
588 
589   /* Return the initialization and finalization priority information for
590      DECL.  If there is no previous priority information, a freshly
591      allocated structure is returned.  */
592   struct symbol_priority_map *priority_info (void);
593 
594   /* Worker for call_for_symbol_and_aliases_1.  */
595   bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *),
596 				      void *data,
597 				      bool include_overwrite);
598 private:
599   /* Worker for set_section.  */
600   static bool set_section (symtab_node *n, void *s);
601 
602   /* Worker for symtab_resolve_alias.  */
603   static bool set_implicit_section (symtab_node *n, void *);
604 
605   /* Worker searching noninterposable alias.  */
606   static bool noninterposable_alias (symtab_node *node, void *data);
607 
608   /* Worker for ultimate_alias_target.  */
609   symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL,
610 					symtab_node *ref = NULL);
611 
612   /* Get dump name with normal or assembly name.  */
613   const char *get_dump_name (bool asm_name_p) const;
614 };
615 
616 inline void
617 symtab_node::checking_verify_symtab_nodes (void)
618 {
619   if (flag_checking)
620     symtab_node::verify_symtab_nodes ();
621 }
622 
623 /* Walk all aliases for NODE.  */
624 #define FOR_EACH_ALIAS(node, alias) \
625   for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++)
626 
627 /* This is the information that is put into the cgraph local structure
628    to recover a function.  */
629 struct lto_file_decl_data;
630 
631 extern const char * const cgraph_availability_names[];
632 extern const char * const ld_plugin_symbol_resolution_names[];
633 extern const char * const tls_model_names[];
634 
635 /* Sub-structure of cgraph_node.  Holds information about thunk, used only for
636    same body aliases.
637 
638    Thunks are basically wrappers around methods which are introduced in case
639    of multiple inheritance in order to adjust the value of the "this" pointer
640    or of the returned value.
641 
642    In the case of this-adjusting thunks, each back-end can override the
643    can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk
644    (with a tail call for instance) directly as assembly.  For the default hook
645    or for the case where the can_output_mi_thunk hooks return false, the thunk
646    is gimplified and lowered using the regular machinery.  */
647 
648 struct GTY(()) cgraph_thunk_info {
649   /* Offset used to adjust "this".  */
650   HOST_WIDE_INT fixed_offset;
651 
652   /* Offset in the virtual table to get the offset to adjust "this".  Valid iff
653      VIRTUAL_OFFSET_P is true.  */
654   HOST_WIDE_INT virtual_value;
655 
656   /* Thunk target, i.e. the method that this thunk wraps.  Depending on the
657      TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias.  */
658   tree alias;
659 
660   /* Nonzero for a "this" adjusting thunk and zero for a result adjusting
661      thunk.  */
662   bool this_adjusting;
663 
664   /* If true, this thunk is what we call a virtual thunk.  In this case:
665      * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is
666        done, add to the result the offset found in the vtable at:
667 	 vptr + VIRTUAL_VALUE
668      * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after
669        the virtual one.  */
670   bool virtual_offset_p;
671 
672   /* ??? True for special kind of thunks, seems related to instrumentation.  */
673   bool add_pointer_bounds_args;
674 
675   /* Set to true when alias node (the cgraph_node to which this struct belong)
676      is a thunk.  Access to any other fields is invalid if this is false.  */
677   bool thunk_p;
678 };
679 
680 /* Information about the function collected locally.
681    Available after function is analyzed.  */
682 
683 struct GTY(()) cgraph_local_info {
684   /* Set when function is visible in current compilation unit only and
685      its address is never taken.  */
686   unsigned local : 1;
687 
688   /* False when there is something makes versioning impossible.  */
689   unsigned versionable : 1;
690 
691   /* False when function calling convention and signature can not be changed.
692      This is the case when __builtin_apply_args is used.  */
693   unsigned can_change_signature : 1;
694 
695   /* True when the function has been originally extern inline, but it is
696      redefined now.  */
697   unsigned redefined_extern_inline : 1;
698 
699   /* True if the function may enter serial irrevocable mode.  */
700   unsigned tm_may_enter_irr : 1;
701 };
702 
703 /* Information about the function that needs to be computed globally
704    once compilation is finished.  Available only with -funit-at-a-time.  */
705 
706 struct GTY(()) cgraph_global_info {
707   /* For inline clones this points to the function they will be
708      inlined into.  */
709   cgraph_node *inlined_to;
710 };
711 
712 /* Represent which DECL tree (or reference to such tree)
713    will be replaced by another tree while versioning.  */
714 struct GTY(()) ipa_replace_map
715 {
716   /* The tree that will be replaced.  */
717   tree old_tree;
718   /* The new (replacing) tree.  */
719   tree new_tree;
720   /* Parameter number to replace, when old_tree is NULL.  */
721   int parm_num;
722   /* True when a substitution should be done, false otherwise.  */
723   bool replace_p;
724   /* True when we replace a reference to old_tree.  */
725   bool ref_p;
726 };
727 
728 struct GTY(()) cgraph_clone_info
729 {
730   vec<ipa_replace_map *, va_gc> *tree_map;
731   bitmap args_to_skip;
732   bitmap combined_args_to_skip;
733 };
734 
735 enum cgraph_simd_clone_arg_type
736 {
737   SIMD_CLONE_ARG_TYPE_VECTOR,
738   SIMD_CLONE_ARG_TYPE_UNIFORM,
739   /* These are only for integer/pointer arguments passed by value.  */
740   SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP,
741   SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP,
742   /* These 6 are only for reference type arguments or arguments passed
743      by reference.  */
744   SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP,
745   SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP,
746   SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP,
747   SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP,
748   SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP,
749   SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP,
750   SIMD_CLONE_ARG_TYPE_MASK
751 };
752 
753 /* Function arguments in the original function of a SIMD clone.
754    Supplementary data for `struct simd_clone'.  */
755 
756 struct GTY(()) cgraph_simd_clone_arg {
757   /* Original function argument as it originally existed in
758      DECL_ARGUMENTS.  */
759   tree orig_arg;
760 
761   /* orig_arg's function (or for extern functions type from
762      TYPE_ARG_TYPES).  */
763   tree orig_type;
764 
765   /* If argument is a vector, this holds the vector version of
766      orig_arg that after adjusting the argument types will live in
767      DECL_ARGUMENTS.  Otherwise, this is NULL.
768 
769      This basically holds:
770        vector(simdlen) __typeof__(orig_arg) new_arg.  */
771   tree vector_arg;
772 
773   /* vector_arg's type (or for extern functions new vector type.  */
774   tree vector_type;
775 
776   /* If argument is a vector, this holds the array where the simd
777      argument is held while executing the simd clone function.  This
778      is a local variable in the cloned function.  Its content is
779      copied from vector_arg upon entry to the clone.
780 
781      This basically holds:
782        __typeof__(orig_arg) simd_array[simdlen].  */
783   tree simd_array;
784 
785   /* A SIMD clone's argument can be either linear (constant or
786      variable), uniform, or vector.  */
787   enum cgraph_simd_clone_arg_type arg_type;
788 
789   /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is
790      the constant linear step, if arg_type is
791      SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of
792      the uniform argument holding the step, otherwise 0.  */
793   HOST_WIDE_INT linear_step;
794 
795   /* Variable alignment if available, otherwise 0.  */
796   unsigned int alignment;
797 };
798 
799 /* Specific data for a SIMD function clone.  */
800 
801 struct GTY(()) cgraph_simd_clone {
802   /* Number of words in the SIMD lane associated with this clone.  */
803   unsigned int simdlen;
804 
805   /* Number of annotated function arguments in `args'.  This is
806      usually the number of named arguments in FNDECL.  */
807   unsigned int nargs;
808 
809   /* Max hardware vector size in bits for integral vectors.  */
810   unsigned int vecsize_int;
811 
812   /* Max hardware vector size in bits for floating point vectors.  */
813   unsigned int vecsize_float;
814 
815   /* Machine mode of the mask argument(s), if they are to be passed
816      as bitmasks in integer argument(s).  VOIDmode if masks are passed
817      as vectors of characteristic type.  */
818   machine_mode mask_mode;
819 
820   /* The mangling character for a given vector size.  This is used
821      to determine the ISA mangling bit as specified in the Intel
822      Vector ABI.  */
823   unsigned char vecsize_mangle;
824 
825   /* True if this is the masked, in-branch version of the clone,
826      otherwise false.  */
827   unsigned int inbranch : 1;
828 
829   /* Doubly linked list of SIMD clones.  */
830   cgraph_node *prev_clone, *next_clone;
831 
832   /* Original cgraph node the SIMD clones were created for.  */
833   cgraph_node *origin;
834 
835   /* Annotated function arguments for the original function.  */
836   cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1];
837 };
838 
839 /* Function Multiversioning info.  */
840 struct GTY((for_user)) cgraph_function_version_info {
841   /* The cgraph_node for which the function version info is stored.  */
842   cgraph_node *this_node;
843   /* Chains all the semantically identical function versions.  The
844      first function in this chain is the version_info node of the
845      default function.  */
846   cgraph_function_version_info *prev;
847   /* If this version node corresponds to a dispatcher for function
848      versions, this points to the version info node of the default
849      function, the first node in the chain.  */
850   cgraph_function_version_info *next;
851   /* If this node corresponds to a function version, this points
852      to the dispatcher function decl, which is the function that must
853      be called to execute the right function version at run-time.
854 
855      If this cgraph node is a dispatcher (if dispatcher_function is
856      true, in the cgraph_node struct) for function versions, this
857      points to resolver function, which holds the function body of the
858      dispatcher. The dispatcher decl is an alias to the resolver
859      function decl.  */
860   tree dispatcher_resolver;
861 };
862 
863 #define DEFCIFCODE(code, type, string)	CIF_ ## code,
864 /* Reasons for inlining failures.  */
865 
866 enum cgraph_inline_failed_t {
867 #include "cif-code.def"
868   CIF_N_REASONS
869 };
870 
871 enum cgraph_inline_failed_type_t
872 {
873   CIF_FINAL_NORMAL = 0,
874   CIF_FINAL_ERROR
875 };
876 
877 struct cgraph_edge;
878 
879 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge>
880 {
881   typedef gimple *compare_type;
882 
883   static hashval_t hash (cgraph_edge *);
884   static hashval_t hash (gimple *);
885   static bool equal (cgraph_edge *, gimple *);
886 };
887 
888 /* The cgraph data structure.
889    Each function decl has assigned cgraph_node listing callees and callers.  */
890 
891 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node {
892 public:
893   /* Remove the node from cgraph and all inline clones inlined into it.
894      Skip however removal of FORBIDDEN_NODE and return true if it needs to be
895      removed.  This allows to call the function from outer loop walking clone
896      tree.  */
897   bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
898 
899   /* Record all references from cgraph_node that are taken
900      in statement STMT.  */
901   void record_stmt_references (gimple *stmt);
902 
903   /* Like cgraph_set_call_stmt but walk the clone tree and update all
904      clones sharing the same function body.
905      When WHOLE_SPECULATIVE_EDGES is true, all three components of
906      speculative edge gets updated.  Otherwise we update only direct
907      call.  */
908   void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt,
909 				       bool update_speculative = true);
910 
911   /* Walk the alias chain to return the function cgraph_node is alias of.
912      Walk through thunk, too.
913      When AVAILABILITY is non-NULL, get minimal availability in the chain.
914      When REF is non-NULL, assume that reference happens in symbol REF
915      when determining the availability.  */
916   cgraph_node *function_symbol (enum availability *avail = NULL,
917 				struct symtab_node *ref = NULL);
918 
919   /* Walk the alias chain to return the function cgraph_node is alias of.
920      Walk through non virtual thunks, too.  Thus we return either a function
921      or a virtual thunk node.
922      When AVAILABILITY is non-NULL, get minimal availability in the chain.
923      When REF is non-NULL, assume that reference happens in symbol REF
924      when determining the availability.  */
925   cgraph_node *function_or_virtual_thunk_symbol
926 				(enum availability *avail = NULL,
927 				 struct symtab_node *ref = NULL);
928 
929   /* Create node representing clone of N executed COUNT times.  Decrease
930      the execution counts from original node too.
931      The new clone will have decl set to DECL that may or may not be the same
932      as decl of N.
933 
934      When UPDATE_ORIGINAL is true, the counts are subtracted from the original
935      function's profile to reflect the fact that part of execution is handled
936      by node.
937      When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
938      the new clone. Otherwise the caller is responsible for doing so later.
939 
940      If the new node is being inlined into another one, NEW_INLINED_TO should be
941      the outline function the new one is (even indirectly) inlined to.
942      All hooks will see this in node's global.inlined_to, when invoked.
943      Can be NULL if the node is not inlined.  SUFFIX is string that is appended
944      to the original name.  */
945   cgraph_node *create_clone (tree decl, profile_count count,
946 			     bool update_original,
947 			     vec<cgraph_edge *> redirect_callers,
948 			     bool call_duplication_hook,
949 			     cgraph_node *new_inlined_to,
950 			     bitmap args_to_skip, const char *suffix = NULL);
951 
952   /* Create callgraph node clone with new declaration.  The actual body will
953      be copied later at compilation stage.  */
954   cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
955 				     vec<ipa_replace_map *, va_gc> *tree_map,
956 				     bitmap args_to_skip, const char * suffix);
957 
958   /* cgraph node being removed from symbol table; see if its entry can be
959    replaced by other inline clone.  */
960   cgraph_node *find_replacement (void);
961 
962   /* Create a new cgraph node which is the new version of
963      callgraph node.  REDIRECT_CALLERS holds the callers
964      edges which should be redirected to point to
965      NEW_VERSION.  ALL the callees edges of the node
966      are cloned to the new version node.  Return the new
967      version node.
968 
969      If non-NULL BLOCK_TO_COPY determine what basic blocks
970      was copied to prevent duplications of calls that are dead
971      in the clone.
972 
973      SUFFIX is string that is appended to the original name.  */
974 
975   cgraph_node *create_version_clone (tree new_decl,
976 				    vec<cgraph_edge *> redirect_callers,
977 				    bitmap bbs_to_copy,
978 				    const char *suffix = NULL);
979 
980   /* Perform function versioning.
981      Function versioning includes copying of the tree and
982      a callgraph update (creating a new cgraph node and updating
983      its callees and callers).
984 
985      REDIRECT_CALLERS varray includes the edges to be redirected
986      to the new version.
987 
988      TREE_MAP is a mapping of tree nodes we want to replace with
989      new ones (according to results of prior analysis).
990 
991      If non-NULL ARGS_TO_SKIP determine function parameters to remove
992      from new version.
993      If SKIP_RETURN is true, the new version will return void.
994      If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
995      If non_NULL NEW_ENTRY determine new entry BB of the clone.
996 
997      Return the new version's cgraph node.  */
998   cgraph_node *create_version_clone_with_body
999     (vec<cgraph_edge *> redirect_callers,
1000      vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip,
1001      bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block,
1002      const char *clone_name);
1003 
1004   /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
1005      corresponding to cgraph_node.  */
1006   cgraph_function_version_info *insert_new_function_version (void);
1007 
1008   /* Get the cgraph_function_version_info node corresponding to node.  */
1009   cgraph_function_version_info *function_version (void);
1010 
1011   /* Discover all functions and variables that are trivially needed, analyze
1012      them as well as all functions and variables referred by them  */
1013   void analyze (void);
1014 
1015   /* Add thunk alias into callgraph.  The alias declaration is ALIAS and it
1016      aliases DECL with an adjustments made into the first parameter.
1017      See comments in struct cgraph_thunk_info for detail on the parameters.  */
1018   cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
1019 			      HOST_WIDE_INT fixed_offset,
1020 			      HOST_WIDE_INT virtual_value,
1021 			      tree virtual_offset,
1022 			      tree real_alias);
1023 
1024 
1025   /* Return node that alias is aliasing.  */
1026   inline cgraph_node *get_alias_target (void);
1027 
1028   /* Given function symbol, walk the alias chain to return the function node
1029      is alias of. Do not walk through thunks.
1030      When AVAILABILITY is non-NULL, get minimal availability in the chain.
1031      When REF is non-NULL, assume that reference happens in symbol REF
1032      when determining the availability.  */
1033 
1034   cgraph_node *ultimate_alias_target (availability *availability = NULL,
1035 				      symtab_node *ref = NULL);
1036 
1037   /* Expand thunk NODE to gimple if possible.
1038      When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
1039      no assembler is produced.
1040      When OUTPUT_ASM_THUNK is true, also produce assembler for
1041      thunks that are not lowered.  */
1042   bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
1043 
1044   /*  Call expand_thunk on all callers that are thunks and analyze those
1045       nodes that were expanded.  */
1046   void expand_all_artificial_thunks ();
1047 
1048   /* Assemble thunks and aliases associated to node.  */
1049   void assemble_thunks_and_aliases (void);
1050 
1051   /* Expand function specified by node.  */
1052   void expand (void);
1053 
1054   /* As an GCC extension we allow redefinition of the function.  The
1055      semantics when both copies of bodies differ is not well defined.
1056      We replace the old body with new body so in unit at a time mode
1057      we always use new body, while in normal mode we may end up with
1058      old body inlined into some functions and new body expanded and
1059      inlined in others.  */
1060   void reset (void);
1061 
1062   /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
1063      kind of wrapper method.  */
1064   void create_wrapper (cgraph_node *target);
1065 
1066   /* Verify cgraph nodes of the cgraph node.  */
1067   void DEBUG_FUNCTION verify_node (void);
1068 
1069   /* Remove function from symbol table.  */
1070   void remove (void);
1071 
1072   /* Dump call graph node to file F.  */
1073   void dump (FILE *f);
1074 
1075   /* Dump call graph node to stderr.  */
1076   void DEBUG_FUNCTION debug (void);
1077 
1078   /* When doing LTO, read cgraph_node's body from disk if it is not already
1079      present.  */
1080   bool get_untransformed_body (void);
1081 
1082   /* Prepare function body.  When doing LTO, read cgraph_node's body from disk
1083      if it is not already present.  When some IPA transformations are scheduled,
1084      apply them.  */
1085   bool get_body (void);
1086 
1087   /* Release memory used to represent body of function.
1088      Use this only for functions that are released before being translated to
1089      target code (i.e. RTL).  Functions that are compiled to RTL and beyond
1090      are free'd in final.c via free_after_compilation().  */
1091   void release_body (bool keep_arguments = false);
1092 
1093   /* Return the DECL_STRUCT_FUNCTION of the function.  */
1094   struct function *get_fun (void);
1095 
1096   /* cgraph_node is no longer nested function; update cgraph accordingly.  */
1097   void unnest (void);
1098 
1099   /* Bring cgraph node local.  */
1100   void make_local (void);
1101 
1102   /* Likewise indicate that a node is having address taken.  */
1103   void mark_address_taken (void);
1104 
1105   /* Set fialization priority to PRIORITY.  */
1106   void set_fini_priority (priority_type priority);
1107 
1108   /* Return the finalization priority.  */
1109   priority_type get_fini_priority (void);
1110 
1111   /* Create edge from a given function to CALLEE in the cgraph.  */
1112   cgraph_edge *create_edge (cgraph_node *callee,
1113 			    gcall *call_stmt, profile_count count);
1114 
1115   /* Create an indirect edge with a yet-undetermined callee where the call
1116      statement destination is a formal parameter of the caller with index
1117      PARAM_INDEX. */
1118   cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags,
1119 				     profile_count count,
1120 				     bool compute_indirect_info = true);
1121 
1122   /* Like cgraph_create_edge walk the clone tree and update all clones sharing
1123    same function body.  If clones already have edge for OLD_STMT; only
1124    update the edge same way as cgraph_set_call_stmt_including_clones does.  */
1125   void create_edge_including_clones (cgraph_node *callee,
1126 				     gimple *old_stmt, gcall *stmt,
1127 				     profile_count count,
1128 				     cgraph_inline_failed_t reason);
1129 
1130   /* Return the callgraph edge representing the GIMPLE_CALL statement
1131      CALL_STMT.  */
1132   cgraph_edge *get_edge (gimple *call_stmt);
1133 
1134   /* Collect all callers of cgraph_node and its aliases that are known to lead
1135      to NODE (i.e. are not overwritable) and that are not thunks.  */
1136   vec<cgraph_edge *> collect_callers (void);
1137 
1138   /* Remove all callers from the node.  */
1139   void remove_callers (void);
1140 
1141   /* Remove all callees from the node.  */
1142   void remove_callees (void);
1143 
1144   /* Return function availability.  See cgraph.h for description of individual
1145      return values.  */
1146   enum availability get_availability (symtab_node *ref = NULL);
1147 
1148   /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
1149      if any to NOTHROW.  */
1150   bool set_nothrow_flag (bool nothrow);
1151 
1152   /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node
1153      if any.  */
1154   bool set_malloc_flag (bool malloc_p);
1155 
1156   /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
1157     If SET_CONST if false, clear the flag.
1158 
1159     When setting the flag be careful about possible interposition and
1160     do not set the flag for functions that can be interposet and set pure
1161     flag for functions that can bind to other definition.
1162 
1163     Return true if any change was done. */
1164 
1165   bool set_const_flag (bool set_const, bool looping);
1166 
1167   /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
1168      if any to PURE.
1169 
1170      When setting the flag, be careful about possible interposition.
1171      Return true if any change was done. */
1172 
1173   bool set_pure_flag (bool pure, bool looping);
1174 
1175   /* Call callback on function and aliases associated to the function.
1176      When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1177      skipped. */
1178 
1179   bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
1180 						      void *),
1181 				    void *data, bool include_overwritable);
1182 
1183   /* Call callback on cgraph_node, thunks and aliases associated to NODE.
1184      When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1185      skipped.  When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
1186      skipped.  */
1187   bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
1188 							     void *data),
1189 					   void *data,
1190 					   bool include_overwritable,
1191 					   bool exclude_virtual_thunks = false);
1192 
1193   /* Likewise indicate that a node is needed, i.e. reachable via some
1194      external means.  */
1195   inline void mark_force_output (void);
1196 
1197   /* Return true when function can be marked local.  */
1198   bool local_p (void);
1199 
1200   /* Return true if cgraph_node can be made local for API change.
1201      Extern inline functions and C++ COMDAT functions can be made local
1202      at the expense of possible code size growth if function is used in multiple
1203      compilation units.  */
1204   bool can_be_local_p (void);
1205 
1206   /* Return true when cgraph_node can not return or throw and thus
1207      it is safe to ignore its side effects for IPA analysis.  */
1208   bool cannot_return_p (void);
1209 
1210   /* Return true when function cgraph_node and all its aliases are only called
1211      directly.
1212      i.e. it is not externally visible, address was not taken and
1213      it is not used in any other non-standard way.  */
1214   bool only_called_directly_p (void);
1215 
1216   /* Return true when function is only called directly or it has alias.
1217      i.e. it is not externally visible, address was not taken and
1218      it is not used in any other non-standard way.  */
1219   inline bool only_called_directly_or_aliased_p (void);
1220 
1221   /* Return true when function cgraph_node can be expected to be removed
1222      from program when direct calls in this compilation unit are removed.
1223 
1224      As a special case COMDAT functions are
1225      cgraph_can_remove_if_no_direct_calls_p while the are not
1226      cgraph_only_called_directly_p (it is possible they are called from other
1227      unit)
1228 
1229      This function behaves as cgraph_only_called_directly_p because eliminating
1230      all uses of COMDAT function does not make it necessarily disappear from
1231      the program unless we are compiling whole program or we do LTO.  In this
1232      case we know we win since dynamic linking will not really discard the
1233      linkonce section.
1234 
1235      If WILL_INLINE is true, assume that function will be inlined into all the
1236      direct calls.  */
1237   bool will_be_removed_from_program_if_no_direct_calls_p
1238 	 (bool will_inline = false);
1239 
1240   /* Return true when function can be removed from callgraph
1241      if all direct calls and references are eliminated.  The function does
1242      not take into account comdat groups.  */
1243   bool can_remove_if_no_direct_calls_and_refs_p (void);
1244 
1245   /* Return true when function cgraph_node and its aliases can be removed from
1246      callgraph if all direct calls are eliminated.
1247      If WILL_INLINE is true, assume that function will be inlined into all the
1248      direct calls.  */
1249   bool can_remove_if_no_direct_calls_p (bool will_inline = false);
1250 
1251   /* Return true when callgraph node is a function with Gimple body defined
1252      in current unit.  Functions can also be define externally or they
1253      can be thunks with no Gimple representation.
1254 
1255      Note that at WPA stage, the function body may not be present in memory.  */
1256   inline bool has_gimple_body_p (void);
1257 
1258   /* Return true if function should be optimized for size.  */
1259   bool optimize_for_size_p (void);
1260 
1261   /* Dump the callgraph to file F.  */
1262   static void dump_cgraph (FILE *f);
1263 
1264   /* Dump the call graph to stderr.  */
1265   static inline
1266   void debug_cgraph (void)
1267   {
1268     dump_cgraph (stderr);
1269   }
1270 
1271   /* Record that DECL1 and DECL2 are semantically identical function
1272      versions.  */
1273   static void record_function_versions (tree decl1, tree decl2);
1274 
1275   /* Remove the cgraph_function_version_info and cgraph_node for DECL.  This
1276      DECL is a duplicate declaration.  */
1277   static void delete_function_version_by_decl (tree decl);
1278 
1279   /* Add the function FNDECL to the call graph.
1280      Unlike finalize_function, this function is intended to be used
1281      by middle end and allows insertion of new function at arbitrary point
1282      of compilation.  The function can be either in high, low or SSA form
1283      GIMPLE.
1284 
1285      The function is assumed to be reachable and have address taken (so no
1286      API breaking optimizations are performed on it).
1287 
1288      Main work done by this function is to enqueue the function for later
1289      processing to avoid need the passes to be re-entrant.  */
1290   static void add_new_function (tree fndecl, bool lowered);
1291 
1292   /* Return callgraph node for given symbol and check it is a function. */
1293   static inline cgraph_node *get (const_tree decl)
1294   {
1295     gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
1296     return dyn_cast <cgraph_node *> (symtab_node::get (decl));
1297   }
1298 
1299   /* DECL has been parsed.  Take it, queue it, compile it at the whim of the
1300      logic in effect.  If NO_COLLECT is true, then our caller cannot stand to
1301      have the garbage collector run at the moment.  We would need to either
1302      create a new GC context, or just not compile right now.  */
1303   static void finalize_function (tree, bool);
1304 
1305   /* Return cgraph node assigned to DECL.  Create new one when needed.  */
1306   static cgraph_node * create (tree decl);
1307 
1308   /* Try to find a call graph node for declaration DECL and if it does not
1309      exist or if it corresponds to an inline clone, create a new one.  */
1310   static cgraph_node * get_create (tree);
1311 
1312   /* Return local info for the compiled function.  */
1313   static cgraph_local_info *local_info (tree decl);
1314 
1315   /* Return local info for the compiled function.  */
1316   static struct cgraph_rtl_info *rtl_info (tree);
1317 
1318   /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
1319      Return NULL if there's no such node.  */
1320   static cgraph_node *get_for_asmname (tree asmname);
1321 
1322   /* Attempt to mark ALIAS as an alias to DECL.  Return alias node if
1323      successful and NULL otherwise.
1324      Same body aliases are output whenever the body of DECL is output,
1325      and cgraph_node::get (ALIAS) transparently
1326      returns cgraph_node::get (DECL).  */
1327   static cgraph_node * create_same_body_alias (tree alias, tree decl);
1328 
1329   /* Verify whole cgraph structure.  */
1330   static void DEBUG_FUNCTION verify_cgraph_nodes (void);
1331 
1332   /* Verify cgraph, if consistency checking is enabled.  */
1333   static inline void checking_verify_cgraph_nodes (void);
1334 
1335   /* Worker to bring NODE local.  */
1336   static bool make_local (cgraph_node *node, void *);
1337 
1338   /* Mark ALIAS as an alias to DECL.  DECL_NODE is cgraph node representing
1339      the function body is associated
1340      with (not necessarily cgraph_node (DECL).  */
1341   static cgraph_node *create_alias (tree alias, tree target);
1342 
1343   /* Return true if NODE has thunk.  */
1344   static bool has_thunk_p (cgraph_node *node, void *);
1345 
1346   cgraph_edge *callees;
1347   cgraph_edge *callers;
1348   /* List of edges representing indirect calls with a yet undetermined
1349      callee.  */
1350   cgraph_edge *indirect_calls;
1351   /* For nested functions points to function the node is nested in.  */
1352   cgraph_node *origin;
1353   /* Points to first nested function, if any.  */
1354   cgraph_node *nested;
1355   /* Pointer to the next function with same origin, if any.  */
1356   cgraph_node *next_nested;
1357   /* Pointer to the next clone.  */
1358   cgraph_node *next_sibling_clone;
1359   cgraph_node *prev_sibling_clone;
1360   cgraph_node *clones;
1361   cgraph_node *clone_of;
1362   /* If instrumentation_clone is 1 then instrumented_version points
1363      to the original function used to make instrumented version.
1364      Otherwise points to instrumented version of the function.  */
1365   cgraph_node *instrumented_version;
1366   /* If instrumentation_clone is 1 then orig_decl is the original
1367      function declaration.  */
1368   tree orig_decl;
1369   /* For functions with many calls sites it holds map from call expression
1370      to the edge to speed up cgraph_edge function.  */
1371   hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash;
1372   /* Declaration node used to be clone of. */
1373   tree former_clone_of;
1374 
1375   /* If this is a SIMD clone, this points to the SIMD specific
1376      information for it.  */
1377   cgraph_simd_clone *simdclone;
1378   /* If this function has SIMD clones, this points to the first clone.  */
1379   cgraph_node *simd_clones;
1380 
1381   /* Interprocedural passes scheduled to have their transform functions
1382      applied next time we execute local pass on them.  We maintain it
1383      per-function in order to allow IPA passes to introduce new functions.  */
1384   vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
1385 
1386   cgraph_local_info local;
1387   cgraph_global_info global;
1388   struct cgraph_rtl_info *rtl;
1389   cgraph_clone_info clone;
1390   cgraph_thunk_info thunk;
1391 
1392   /* Expected number of executions: calculated in profile.c.  */
1393   profile_count count;
1394   /* How to scale counts at materialization time; used to merge
1395      LTO units with different number of profile runs.  */
1396   int count_materialization_scale;
1397   /* Unique id of the node.  */
1398   int uid;
1399   /* Summary unique id of the node.  */
1400   int summary_uid;
1401   /* ID assigned by the profiling.  */
1402   unsigned int profile_id;
1403   /* Time profiler: first run of function.  */
1404   int tp_first_run;
1405 
1406   /* Set when decl is an abstract function pointed to by the
1407      ABSTRACT_DECL_ORIGIN of a reachable function.  */
1408   unsigned used_as_abstract_origin : 1;
1409   /* Set once the function is lowered (i.e. its CFG is built).  */
1410   unsigned lowered : 1;
1411   /* Set once the function has been instantiated and its callee
1412      lists created.  */
1413   unsigned process : 1;
1414   /* How commonly executed the node is.  Initialized during branch
1415      probabilities pass.  */
1416   ENUM_BITFIELD (node_frequency) frequency : 2;
1417   /* True when function can only be called at startup (from static ctor).  */
1418   unsigned only_called_at_startup : 1;
1419   /* True when function can only be called at startup (from static dtor).  */
1420   unsigned only_called_at_exit : 1;
1421   /* True when function is the transactional clone of a function which
1422      is called only from inside transactions.  */
1423   /* ?? We should be able to remove this.  We have enough bits in
1424      cgraph to calculate it.  */
1425   unsigned tm_clone : 1;
1426   /* True if this decl is a dispatcher for function versions.  */
1427   unsigned dispatcher_function : 1;
1428   /* True if this decl calls a COMDAT-local function.  This is set up in
1429      compute_fn_summary and inline_call.  */
1430   unsigned calls_comdat_local : 1;
1431   /* True if node has been created by merge operation in IPA-ICF.  */
1432   unsigned icf_merged: 1;
1433   /* True when function is clone created for Pointer Bounds Checker
1434      instrumentation.  */
1435   unsigned instrumentation_clone : 1;
1436   /* True if call to node can't result in a call to free, munmap or
1437      other operation that could make previously non-trapping memory
1438      accesses trapping.  */
1439   unsigned nonfreeing_fn : 1;
1440   /* True if there was multiple COMDAT bodies merged by lto-symtab.  */
1441   unsigned merged_comdat : 1;
1442   /* True if function was created to be executed in parallel.  */
1443   unsigned parallelized_function : 1;
1444   /* True if function is part split out by ipa-split.  */
1445   unsigned split_part : 1;
1446   /* True if the function appears as possible target of indirect call.  */
1447   unsigned indirect_call_target : 1;
1448 
1449 private:
1450   /* Worker for call_for_symbol_and_aliases.  */
1451   bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
1452 						        void *),
1453 				      void *data, bool include_overwritable);
1454 };
1455 
1456 /* A cgraph node set is a collection of cgraph nodes.  A cgraph node
1457    can appear in multiple sets.  */
1458 struct cgraph_node_set_def
1459 {
1460   hash_map<cgraph_node *, size_t> *map;
1461   vec<cgraph_node *> nodes;
1462 };
1463 
1464 typedef cgraph_node_set_def *cgraph_node_set;
1465 typedef struct varpool_node_set_def *varpool_node_set;
1466 
1467 class varpool_node;
1468 
1469 /* A varpool node set is a collection of varpool nodes.  A varpool node
1470    can appear in multiple sets.  */
1471 struct varpool_node_set_def
1472 {
1473   hash_map<varpool_node *, size_t> * map;
1474   vec<varpool_node *> nodes;
1475 };
1476 
1477 /* Iterator structure for cgraph node sets.  */
1478 struct cgraph_node_set_iterator
1479 {
1480   cgraph_node_set set;
1481   unsigned index;
1482 };
1483 
1484 /* Iterator structure for varpool node sets.  */
1485 struct varpool_node_set_iterator
1486 {
1487   varpool_node_set set;
1488   unsigned index;
1489 };
1490 
1491 /* Context of polymorphic call. It represent information about the type of
1492    instance that may reach the call.  This is used by ipa-devirt walkers of the
1493    type inheritance graph.  */
1494 
1495 class GTY(()) ipa_polymorphic_call_context {
1496 public:
1497   /* The called object appears in an object of type OUTER_TYPE
1498      at offset OFFSET.  When information is not 100% reliable, we
1499      use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */
1500   HOST_WIDE_INT offset;
1501   HOST_WIDE_INT speculative_offset;
1502   tree outer_type;
1503   tree speculative_outer_type;
1504   /* True if outer object may be in construction or destruction.  */
1505   unsigned maybe_in_construction : 1;
1506   /* True if outer object may be of derived type.  */
1507   unsigned maybe_derived_type : 1;
1508   /* True if speculative outer object may be of derived type.  We always
1509      speculate that construction does not happen.  */
1510   unsigned speculative_maybe_derived_type : 1;
1511   /* True if the context is invalid and all calls should be redirected
1512      to BUILTIN_UNREACHABLE.  */
1513   unsigned invalid : 1;
1514   /* True if the outer type is dynamic.  */
1515   unsigned dynamic : 1;
1516 
1517   /* Build empty "I know nothing" context.  */
1518   ipa_polymorphic_call_context ();
1519   /* Build polymorphic call context for indirect call E.  */
1520   ipa_polymorphic_call_context (cgraph_edge *e);
1521   /* Build polymorphic call context for IP invariant CST.
1522      If specified, OTR_TYPE specify the type of polymorphic call
1523      that takes CST+OFFSET as a prameter.  */
1524   ipa_polymorphic_call_context (tree cst, tree otr_type = NULL,
1525 				HOST_WIDE_INT offset = 0);
1526   /* Build context for pointer REF contained in FNDECL at statement STMT.
1527      if INSTANCE is non-NULL, return pointer to the object described by
1528      the context.  */
1529   ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt,
1530 				tree *instance = NULL);
1531 
1532   /* Look for vtable stores or constructor calls to work out dynamic type
1533      of memory location.  */
1534   bool get_dynamic_type (tree, tree, tree, gimple *);
1535 
1536   /* Make context non-speculative.  */
1537   void clear_speculation ();
1538 
1539   /* Produce context specifying all derrived types of OTR_TYPE.  If OTR_TYPE is
1540      NULL, the context is set to dummy "I know nothing" setting.  */
1541   void clear_outer_type (tree otr_type = NULL);
1542 
1543   /* Walk container types and modify context to point to actual class
1544      containing OTR_TYPE (if non-NULL) as base class.
1545      Return true if resulting context is valid.
1546 
1547      When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made
1548      valid only via allocation of new polymorphic type inside by means
1549      of placement new.
1550 
1551      When CONSIDER_BASES is false, only look for actual fields, not base types
1552      of TYPE.  */
1553   bool restrict_to_inner_class (tree otr_type,
1554 				bool consider_placement_new = true,
1555 				bool consider_bases = true);
1556 
1557   /* Adjust all offsets in contexts by given number of bits.  */
1558   void offset_by (HOST_WIDE_INT);
1559   /* Use when we can not track dynamic type change.  This speculatively assume
1560      type change is not happening.  */
1561   void possible_dynamic_type_change (bool, tree otr_type = NULL);
1562   /* Assume that both THIS and a given context is valid and strenghten THIS
1563      if possible.  Return true if any strenghtening was made.
1564      If actual type the context is being used in is known, OTR_TYPE should be
1565      set accordingly. This improves quality of combined result.  */
1566   bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1567   bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL);
1568 
1569   /* Return TRUE if context is fully useless.  */
1570   bool useless_p () const;
1571   /* Return TRUE if this context conveys the same information as X.  */
1572   bool equal_to (const ipa_polymorphic_call_context &x) const;
1573 
1574   /* Dump human readable context to F.  If NEWLINE is true, it will be
1575      terminated by a newline.  */
1576   void dump (FILE *f, bool newline = true) const;
1577   void DEBUG_FUNCTION debug () const;
1578 
1579   /* LTO streaming.  */
1580   void stream_out (struct output_block *) const;
1581   void stream_in (struct lto_input_block *, struct data_in *data_in);
1582 
1583 private:
1584   bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1585   bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree);
1586   void set_by_decl (tree, HOST_WIDE_INT);
1587   bool set_by_invariant (tree, tree, HOST_WIDE_INT);
1588   bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const;
1589   void make_speculative (tree otr_type = NULL);
1590 };
1591 
1592 /* Structure containing additional information about an indirect call.  */
1593 
1594 struct GTY(()) cgraph_indirect_call_info
1595 {
1596   /* When agg_content is set, an offset where the call pointer is located
1597      within the aggregate.  */
1598   HOST_WIDE_INT offset;
1599   /* Context of the polymorphic call; use only when POLYMORPHIC flag is set.  */
1600   ipa_polymorphic_call_context context;
1601   /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set).  */
1602   HOST_WIDE_INT otr_token;
1603   /* Type of the object from OBJ_TYPE_REF_OBJECT. */
1604   tree otr_type;
1605   /* Index of the parameter that is called.  */
1606   int param_index;
1607   /* ECF flags determined from the caller.  */
1608   int ecf_flags;
1609   /* Profile_id of common target obtrained from profile.  */
1610   int common_target_id;
1611   /* Probability that call will land in function with COMMON_TARGET_ID.  */
1612   int common_target_probability;
1613 
1614   /* Set when the call is a virtual call with the parameter being the
1615      associated object pointer rather than a simple direct call.  */
1616   unsigned polymorphic : 1;
1617   /* Set when the call is a call of a pointer loaded from contents of an
1618      aggregate at offset.  */
1619   unsigned agg_contents : 1;
1620   /* Set when this is a call through a member pointer.  */
1621   unsigned member_ptr : 1;
1622   /* When the agg_contents bit is set, this one determines whether the
1623      destination is loaded from a parameter passed by reference. */
1624   unsigned by_ref : 1;
1625   /* When the agg_contents bit is set, this one determines whether we can
1626      deduce from the function body that the loaded value from the reference is
1627      never modified between the invocation of the function and the load
1628      point.  */
1629   unsigned guaranteed_unmodified : 1;
1630   /* For polymorphic calls this specify whether the virtual table pointer
1631      may have changed in between function entry and the call.  */
1632   unsigned vptr_changed : 1;
1633 };
1634 
1635 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"),
1636 	    for_user)) cgraph_edge {
1637   friend class cgraph_node;
1638 
1639   /* Remove the edge in the cgraph.  */
1640   void remove (void);
1641 
1642   /* Change field call_stmt of edge to NEW_STMT.
1643      If UPDATE_SPECULATIVE and E is any component of speculative
1644      edge, then update all components.  */
1645   void set_call_stmt (gcall *new_stmt, bool update_speculative = true);
1646 
1647   /* Redirect callee of the edge to N.  The function does not update underlying
1648      call expression.  */
1649   void redirect_callee (cgraph_node *n);
1650 
1651   /* If the edge does not lead to a thunk, simply redirect it to N.  Otherwise
1652      create one or more equivalent thunks for N and redirect E to the first in
1653      the chain.  Note that it is then necessary to call
1654      n->expand_all_artificial_thunks once all callers are redirected.  */
1655   void redirect_callee_duplicating_thunks (cgraph_node *n);
1656 
1657   /* Make an indirect edge with an unknown callee an ordinary edge leading to
1658      CALLEE.  DELTA is an integer constant that is to be added to the this
1659      pointer (first parameter) to compensate for skipping
1660      a thunk adjustment.  */
1661   cgraph_edge *make_direct (cgraph_node *callee);
1662 
1663   /* Turn edge into speculative call calling N2. Update
1664      the profile so the direct call is taken COUNT times
1665      with FREQUENCY.  */
1666   cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count);
1667 
1668    /* Given speculative call edge, return all three components.  */
1669   void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect,
1670 			      ipa_ref *&reference);
1671 
1672   /* Speculative call edge turned out to be direct call to CALLE_DECL.
1673      Remove the speculative call sequence and return edge representing the call.
1674      It is up to caller to redirect the call as appropriate. */
1675   cgraph_edge *resolve_speculation (tree callee_decl = NULL);
1676 
1677   /* If necessary, change the function declaration in the call statement
1678      associated with the edge so that it corresponds to the edge callee.  */
1679   gimple *redirect_call_stmt_to_callee (void);
1680 
1681   /* Create clone of edge in the node N represented
1682      by CALL_EXPR the callgraph.  */
1683   cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid,
1684 		       profile_count num, profile_count den,
1685 		       bool update_original);
1686 
1687   /* Verify edge count and frequency.  */
1688   bool verify_count ();
1689 
1690   /* Return true when call of edge can not lead to return from caller
1691      and thus it is safe to ignore its side effects for IPA analysis
1692      when computing side effects of the caller.  */
1693   bool cannot_lead_to_return_p (void);
1694 
1695   /* Return true when the edge represents a direct recursion.  */
1696   bool recursive_p (void);
1697 
1698   /* Return true if the call can be hot.  */
1699   bool maybe_hot_p (void);
1700 
1701   /* Rebuild cgraph edges for current function node.  This needs to be run after
1702      passes that don't update the cgraph.  */
1703   static unsigned int rebuild_edges (void);
1704 
1705   /* Rebuild cgraph references for current function node.  This needs to be run
1706      after passes that don't update the cgraph.  */
1707   static void rebuild_references (void);
1708 
1709   /* Expected number of executions: calculated in profile.c.  */
1710   profile_count count;
1711   cgraph_node *caller;
1712   cgraph_node *callee;
1713   cgraph_edge *prev_caller;
1714   cgraph_edge *next_caller;
1715   cgraph_edge *prev_callee;
1716   cgraph_edge *next_callee;
1717   gcall *call_stmt;
1718   /* Additional information about an indirect call.  Not cleared when an edge
1719      becomes direct.  */
1720   cgraph_indirect_call_info *indirect_info;
1721   PTR GTY ((skip (""))) aux;
1722   /* When equal to CIF_OK, inline this call.  Otherwise, points to the
1723      explanation why function was not inlined.  */
1724   enum cgraph_inline_failed_t inline_failed;
1725   /* The stmt_uid of call_stmt.  This is used by LTO to recover the call_stmt
1726      when the function is serialized in.  */
1727   unsigned int lto_stmt_uid;
1728   /* Unique id of the edge.  */
1729   int uid;
1730   /* Whether this edge was made direct by indirect inlining.  */
1731   unsigned int indirect_inlining_edge : 1;
1732   /* Whether this edge describes an indirect call with an undetermined
1733      callee.  */
1734   unsigned int indirect_unknown_callee : 1;
1735   /* Whether this edge is still a dangling  */
1736   /* True if the corresponding CALL stmt cannot be inlined.  */
1737   unsigned int call_stmt_cannot_inline_p : 1;
1738   /* Can this call throw externally?  */
1739   unsigned int can_throw_external : 1;
1740   /* Edges with SPECULATIVE flag represents indirect calls that was
1741      speculatively turned into direct (i.e. by profile feedback).
1742      The final code sequence will have form:
1743 
1744      if (call_target == expected_fn)
1745        expected_fn ();
1746      else
1747        call_target ();
1748 
1749      Every speculative call is represented by three components attached
1750      to a same call statement:
1751      1) a direct call (to expected_fn)
1752      2) an indirect call (to call_target)
1753      3) a IPA_REF_ADDR refrence to expected_fn.
1754 
1755      Optimizers may later redirect direct call to clone, so 1) and 3)
1756      do not need to necesarily agree with destination.  */
1757   unsigned int speculative : 1;
1758   /* Set to true when caller is a constructor or destructor of polymorphic
1759      type.  */
1760   unsigned in_polymorphic_cdtor : 1;
1761 
1762   /* Return true if call must bind to current definition.  */
1763   bool binds_to_current_def_p ();
1764 
1765   /* Expected frequency of executions within the function.
1766      When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
1767      per function call.  The range is 0 to CGRAPH_FREQ_MAX.  */
1768   int frequency ();
1769 
1770   /* Expected frequency of executions within the function.  */
1771   sreal sreal_frequency ();
1772 private:
1773   /* Remove the edge from the list of the callers of the callee.  */
1774   void remove_caller (void);
1775 
1776   /* Remove the edge from the list of the callees of the caller.  */
1777   void remove_callee (void);
1778 
1779   /* Set callee N of call graph edge and add it to the corresponding set of
1780      callers. */
1781   void set_callee (cgraph_node *n);
1782 
1783   /* Output flags of edge to a file F.  */
1784   void dump_edge_flags (FILE *f);
1785 
1786   /* Verify that call graph edge corresponds to DECL from the associated
1787      statement.  Return true if the verification should fail.  */
1788   bool verify_corresponds_to_fndecl (tree decl);
1789 };
1790 
1791 #define CGRAPH_FREQ_BASE 1000
1792 #define CGRAPH_FREQ_MAX 100000
1793 
1794 /* The varpool data structure.
1795    Each static variable decl has assigned varpool_node.  */
1796 
1797 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node {
1798 public:
1799   /* Dump given varpool node to F.  */
1800   void dump (FILE *f);
1801 
1802   /* Dump given varpool node to stderr.  */
1803   void DEBUG_FUNCTION debug (void);
1804 
1805   /* Remove variable from symbol table.  */
1806   void remove (void);
1807 
1808   /* Remove node initializer when it is no longer needed.  */
1809   void remove_initializer (void);
1810 
1811   void analyze (void);
1812 
1813   /* Return variable availability.  */
1814   availability get_availability (symtab_node *ref = NULL);
1815 
1816   /* When doing LTO, read variable's constructor from disk if
1817      it is not already present.  */
1818   tree get_constructor (void);
1819 
1820   /* Return true if variable has constructor that can be used for folding.  */
1821   bool ctor_useable_for_folding_p (void);
1822 
1823   /* For given variable pool node, walk the alias chain to return the function
1824      the variable is alias of. Do not walk through thunks.
1825      When AVAILABILITY is non-NULL, get minimal availability in the chain.
1826      When REF is non-NULL, assume that reference happens in symbol REF
1827      when determining the availability.  */
1828   inline varpool_node *ultimate_alias_target
1829     (availability *availability = NULL, symtab_node *ref = NULL);
1830 
1831   /* Return node that alias is aliasing.  */
1832   inline varpool_node *get_alias_target (void);
1833 
1834   /* Output one variable, if necessary.  Return whether we output it.  */
1835   bool assemble_decl (void);
1836 
1837   /* For variables in named sections make sure get_variable_section
1838      is called before we switch to those sections.  Then section
1839      conflicts between read-only and read-only requiring relocations
1840      sections can be resolved.  */
1841   void finalize_named_section_flags (void);
1842 
1843   /* Call calback on varpool symbol and aliases associated to varpool symbol.
1844      When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
1845      skipped. */
1846   bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *),
1847 				    void *data,
1848 				    bool include_overwritable);
1849 
1850   /* Return true when variable should be considered externally visible.  */
1851   bool externally_visible_p (void);
1852 
1853   /* Return true when all references to variable must be visible
1854      in ipa_ref_list.
1855      i.e. if the variable is not externally visible or not used in some magic
1856      way (asm statement or such).
1857      The magic uses are all summarized in force_output flag.  */
1858   inline bool all_refs_explicit_p ();
1859 
1860   /* Return true when variable can be removed from variable pool
1861      if all direct calls are eliminated.  */
1862   inline bool can_remove_if_no_refs_p (void);
1863 
1864   /* Add the variable DECL to the varpool.
1865      Unlike finalize_decl function is intended to be used
1866      by middle end and allows insertion of new variable at arbitrary point
1867      of compilation.  */
1868   static void add (tree decl);
1869 
1870   /* Return varpool node for given symbol and check it is a function. */
1871   static inline varpool_node *get (const_tree decl);
1872 
1873   /* Mark DECL as finalized.  By finalizing the declaration, frontend instruct
1874      the middle end to output the variable to asm file, if needed or externally
1875      visible.  */
1876   static void finalize_decl (tree decl);
1877 
1878   /* Attempt to mark ALIAS as an alias to DECL.  Return TRUE if successful.
1879      Extra name aliases are output whenever DECL is output.  */
1880   static varpool_node * create_extra_name_alias (tree alias, tree decl);
1881 
1882   /* Attempt to mark ALIAS as an alias to DECL.  Return TRUE if successful.
1883      Extra name aliases are output whenever DECL is output.  */
1884   static varpool_node * create_alias (tree, tree);
1885 
1886   /* Dump the variable pool to F.  */
1887   static void dump_varpool (FILE *f);
1888 
1889   /* Dump the variable pool to stderr.  */
1890   static void DEBUG_FUNCTION debug_varpool (void);
1891 
1892   /* Allocate new callgraph node and insert it into basic data structures.  */
1893   static varpool_node *create_empty (void);
1894 
1895   /* Return varpool node assigned to DECL.  Create new one when needed.  */
1896   static varpool_node *get_create (tree decl);
1897 
1898   /* Given an assembler name, lookup node.  */
1899   static varpool_node *get_for_asmname (tree asmname);
1900 
1901   /* Set when variable is scheduled to be assembled.  */
1902   unsigned output : 1;
1903 
1904   /* Set when variable has statically initialized pointer
1905      or is a static bounds variable and needs initalization.  */
1906   unsigned need_bounds_init : 1;
1907 
1908   /* Set if the variable is dynamically initialized, except for
1909      function local statics.   */
1910   unsigned dynamically_initialized : 1;
1911 
1912   ENUM_BITFIELD(tls_model) tls_model : 3;
1913 
1914   /* Set if the variable is known to be used by single function only.
1915      This is computed by ipa_signle_use pass and used by late optimizations
1916      in places where optimization would be valid for local static variable
1917      if we did not do any inter-procedural code movement.  */
1918   unsigned used_by_single_function : 1;
1919 
1920 private:
1921   /* Assemble thunks and aliases associated to varpool node.  */
1922   void assemble_aliases (void);
1923 
1924   /* Worker for call_for_node_and_aliases.  */
1925   bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *),
1926 				      void *data,
1927 				      bool include_overwritable);
1928 };
1929 
1930 /* Every top level asm statement is put into a asm_node.  */
1931 
1932 struct GTY(()) asm_node {
1933 
1934 
1935   /* Next asm node.  */
1936   asm_node *next;
1937   /* String for this asm node.  */
1938   tree asm_str;
1939   /* Ordering of all cgraph nodes.  */
1940   int order;
1941 };
1942 
1943 /* Report whether or not THIS symtab node is a function, aka cgraph_node.  */
1944 
1945 template <>
1946 template <>
1947 inline bool
1948 is_a_helper <cgraph_node *>::test (symtab_node *p)
1949 {
1950   return p && p->type == SYMTAB_FUNCTION;
1951 }
1952 
1953 /* Report whether or not THIS symtab node is a vriable, aka varpool_node.  */
1954 
1955 template <>
1956 template <>
1957 inline bool
1958 is_a_helper <varpool_node *>::test (symtab_node *p)
1959 {
1960   return p && p->type == SYMTAB_VARIABLE;
1961 }
1962 
1963 /* Macros to access the next item in the list of free cgraph nodes and
1964    edges. */
1965 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next)
1966 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2
1967 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
1968 
1969 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *);
1970 typedef void (*cgraph_node_hook)(cgraph_node *, void *);
1971 typedef void (*varpool_node_hook)(varpool_node *, void *);
1972 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *);
1973 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *);
1974 
1975 struct cgraph_edge_hook_list;
1976 struct cgraph_node_hook_list;
1977 struct varpool_node_hook_list;
1978 struct cgraph_2edge_hook_list;
1979 struct cgraph_2node_hook_list;
1980 
1981 /* Map from a symbol to initialization/finalization priorities.  */
1982 struct GTY(()) symbol_priority_map {
1983   priority_type init;
1984   priority_type fini;
1985 };
1986 
1987 enum symtab_state
1988 {
1989   /* Frontend is parsing and finalizing functions.  */
1990   PARSING,
1991   /* Callgraph is being constructed.  It is safe to add new functions.  */
1992   CONSTRUCTION,
1993   /* Callgraph is being streamed-in at LTO time.  */
1994   LTO_STREAMING,
1995   /* Callgraph is built and early IPA passes are being run.  */
1996   IPA,
1997   /* Callgraph is built and all functions are transformed to SSA form.  */
1998   IPA_SSA,
1999   /* All inline decisions are done; it is now possible to remove extern inline
2000      functions and virtual call targets.  */
2001   IPA_SSA_AFTER_INLINING,
2002   /* Functions are now ordered and being passed to RTL expanders.  */
2003   EXPANSION,
2004   /* All cgraph expansion is done.  */
2005   FINISHED
2006 };
2007 
2008 struct asmname_hasher : ggc_ptr_hash <symtab_node>
2009 {
2010   typedef const_tree compare_type;
2011 
2012   static hashval_t hash (symtab_node *n);
2013   static bool equal (symtab_node *n, const_tree t);
2014 };
2015 
2016 class GTY((tag ("SYMTAB"))) symbol_table
2017 {
2018 public:
2019   friend class symtab_node;
2020   friend class cgraph_node;
2021   friend class cgraph_edge;
2022 
2023   symbol_table (): cgraph_max_summary_uid (1)
2024   {
2025   }
2026 
2027   /* Initialize callgraph dump file.  */
2028   void initialize (void);
2029 
2030   /* Register a top-level asm statement ASM_STR.  */
2031   inline asm_node *finalize_toplevel_asm (tree asm_str);
2032 
2033   /* Analyze the whole compilation unit once it is parsed completely.  */
2034   void finalize_compilation_unit (void);
2035 
2036   /* C++ frontend produce same body aliases all over the place, even before PCH
2037      gets streamed out. It relies on us linking the aliases with their function
2038      in order to do the fixups, but ipa-ref is not PCH safe.  Consequentely we
2039      first produce aliases without links, but once C++ FE is sure he won't sream
2040      PCH we build the links via this function.  */
2041   void process_same_body_aliases (void);
2042 
2043   /* Perform simple optimizations based on callgraph.  */
2044   void compile (void);
2045 
2046   /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
2047      functions into callgraph in a way so they look like ordinary reachable
2048      functions inserted into callgraph already at construction time.  */
2049   void process_new_functions (void);
2050 
2051   /* Once all functions from compilation unit are in memory, produce all clones
2052      and update all calls.  We might also do this on demand if we don't want to
2053      bring all functions to memory prior compilation, but current WHOPR
2054      implementation does that and it is bit easier to keep everything right
2055      in this order.  */
2056   void materialize_all_clones (void);
2057 
2058   /* Register a symbol NODE.  */
2059   inline void register_symbol (symtab_node *node);
2060 
2061   inline void
2062   clear_asm_symbols (void)
2063   {
2064     asmnodes = NULL;
2065     asm_last_node = NULL;
2066   }
2067 
2068   /* Perform reachability analysis and reclaim all unreachable nodes.  */
2069   bool remove_unreachable_nodes (FILE *file);
2070 
2071   /* Optimization of function bodies might've rendered some variables as
2072      unnecessary so we want to avoid these from being compiled.  Re-do
2073      reachability starting from variables that are either externally visible
2074      or was referred from the asm output routines.  */
2075   void remove_unreferenced_decls (void);
2076 
2077   /* Unregister a symbol NODE.  */
2078   inline void unregister (symtab_node *node);
2079 
2080   /* Allocate new callgraph node and insert it into basic data structures.  */
2081   cgraph_node *create_empty (void);
2082 
2083   /* Release a callgraph NODE with UID and put in to the list
2084      of free nodes.  */
2085   void release_symbol (cgraph_node *node, int uid);
2086 
2087   /* Output all variables enqueued to be assembled.  */
2088   bool output_variables (void);
2089 
2090   /* Weakrefs may be associated to external decls and thus not output
2091      at expansion time.  Emit all necessary aliases.  */
2092   void output_weakrefs (void);
2093 
2094   /* Return first static symbol with definition.  */
2095   inline symtab_node *first_symbol (void);
2096 
2097   /* Return first assembler symbol.  */
2098   inline asm_node *
2099   first_asm_symbol (void)
2100   {
2101     return asmnodes;
2102   }
2103 
2104   /* Return first static symbol with definition.  */
2105   inline symtab_node *first_defined_symbol (void);
2106 
2107   /* Return first variable.  */
2108   inline varpool_node *first_variable (void);
2109 
2110   /* Return next variable after NODE.  */
2111   inline varpool_node *next_variable (varpool_node *node);
2112 
2113   /* Return first static variable with initializer.  */
2114   inline varpool_node *first_static_initializer (void);
2115 
2116   /* Return next static variable with initializer after NODE.  */
2117   inline varpool_node *next_static_initializer (varpool_node *node);
2118 
2119   /* Return first static variable with definition.  */
2120   inline varpool_node *first_defined_variable (void);
2121 
2122   /* Return next static variable with definition after NODE.  */
2123   inline varpool_node *next_defined_variable (varpool_node *node);
2124 
2125   /* Return first function with body defined.  */
2126   inline cgraph_node *first_defined_function (void);
2127 
2128   /* Return next function with body defined after NODE.  */
2129   inline cgraph_node *next_defined_function (cgraph_node *node);
2130 
2131   /* Return first function.  */
2132   inline cgraph_node *first_function (void);
2133 
2134   /* Return next function.  */
2135   inline cgraph_node *next_function (cgraph_node *node);
2136 
2137   /* Return first function with body defined.  */
2138   cgraph_node *first_function_with_gimple_body (void);
2139 
2140   /* Return next reachable static variable with initializer after NODE.  */
2141   inline cgraph_node *next_function_with_gimple_body (cgraph_node *node);
2142 
2143   /* Register HOOK to be called with DATA on each removed edge.  */
2144   cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook,
2145 						void *data);
2146 
2147   /* Remove ENTRY from the list of hooks called on removing edges.  */
2148   void remove_edge_removal_hook (cgraph_edge_hook_list *entry);
2149 
2150   /* Register HOOK to be called with DATA on each removed node.  */
2151   cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook,
2152 						  void *data);
2153 
2154   /* Remove ENTRY from the list of hooks called on removing nodes.  */
2155   void remove_cgraph_removal_hook (cgraph_node_hook_list *entry);
2156 
2157   /* Register HOOK to be called with DATA on each removed node.  */
2158   varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook,
2159 						    void *data);
2160 
2161   /* Remove ENTRY from the list of hooks called on removing nodes.  */
2162   void remove_varpool_removal_hook (varpool_node_hook_list *entry);
2163 
2164   /* Register HOOK to be called with DATA on each inserted node.  */
2165   cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook,
2166 						    void *data);
2167 
2168   /* Remove ENTRY from the list of hooks called on inserted nodes.  */
2169   void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry);
2170 
2171   /* Register HOOK to be called with DATA on each inserted node.  */
2172   varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook,
2173 						      void *data);
2174 
2175   /* Remove ENTRY from the list of hooks called on inserted nodes.  */
2176   void remove_varpool_insertion_hook (varpool_node_hook_list *entry);
2177 
2178   /* Register HOOK to be called with DATA on each duplicated edge.  */
2179   cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook,
2180 						     void *data);
2181   /* Remove ENTRY from the list of hooks called on duplicating edges.  */
2182   void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry);
2183 
2184   /* Register HOOK to be called with DATA on each duplicated node.  */
2185   cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook,
2186 						       void *data);
2187 
2188   /* Remove ENTRY from the list of hooks called on duplicating nodes.  */
2189   void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry);
2190 
2191   /* Call all edge removal hooks.  */
2192   void call_edge_removal_hooks (cgraph_edge *e);
2193 
2194   /* Call all node insertion hooks.  */
2195   void call_cgraph_insertion_hooks (cgraph_node *node);
2196 
2197   /* Call all node removal hooks.  */
2198   void call_cgraph_removal_hooks (cgraph_node *node);
2199 
2200   /* Call all node duplication hooks.  */
2201   void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2);
2202 
2203   /* Call all edge duplication hooks.  */
2204   void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2);
2205 
2206   /* Call all node removal hooks.  */
2207   void call_varpool_removal_hooks (varpool_node *node);
2208 
2209   /* Call all node insertion hooks.  */
2210   void call_varpool_insertion_hooks (varpool_node *node);
2211 
2212   /* Arrange node to be first in its entry of assembler_name_hash.  */
2213   void symtab_prevail_in_asm_name_hash (symtab_node *node);
2214 
2215   /* Initalize asm name hash unless.  */
2216   void symtab_initialize_asm_name_hash (void);
2217 
2218   /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables.  */
2219   void change_decl_assembler_name (tree decl, tree name);
2220 
2221   /* Dump symbol table to F.  */
2222   void dump (FILE *f);
2223 
2224   /* Dump symbol table to stderr.  */
2225   void DEBUG_FUNCTION debug (void);
2226 
2227   /* Return true if assembler names NAME1 and NAME2 leads to the same symbol
2228      name.  */
2229   static bool assembler_names_equal_p (const char *name1, const char *name2);
2230 
2231   int cgraph_count;
2232   int cgraph_max_uid;
2233   int cgraph_max_summary_uid;
2234 
2235   int edges_count;
2236   int edges_max_uid;
2237 
2238   symtab_node* GTY(()) nodes;
2239   asm_node* GTY(()) asmnodes;
2240   asm_node* GTY(()) asm_last_node;
2241   cgraph_node* GTY(()) free_nodes;
2242 
2243   /* Head of a linked list of unused (freed) call graph edges.
2244      Do not GTY((delete)) this list so UIDs gets reliably recycled.  */
2245   cgraph_edge * GTY(()) free_edges;
2246 
2247   /* The order index of the next symtab node to be created.  This is
2248      used so that we can sort the cgraph nodes in order by when we saw
2249      them, to support -fno-toplevel-reorder.  */
2250   int order;
2251 
2252   /* Set when whole unit has been analyzed so we can access global info.  */
2253   bool global_info_ready;
2254   /* What state callgraph is in right now.  */
2255   enum symtab_state state;
2256   /* Set when the cgraph is fully build and the basic flags are computed.  */
2257   bool function_flags_ready;
2258 
2259   bool cpp_implicit_aliases_done;
2260 
2261   /* Hash table used to hold sectoons.  */
2262   hash_table<section_name_hasher> *GTY(()) section_hash;
2263 
2264   /* Hash table used to convert assembler names into nodes.  */
2265   hash_table<asmname_hasher> *assembler_name_hash;
2266 
2267   /* Hash table used to hold init priorities.  */
2268   hash_map<symtab_node *, symbol_priority_map> *init_priority_hash;
2269 
2270   FILE* GTY ((skip)) dump_file;
2271 
2272   /* Return symbol used to separate symbol name from suffix.  */
2273   static char symbol_suffix_separator ();
2274 
2275   FILE* GTY ((skip)) ipa_clones_dump_file;
2276 
2277   hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes;
2278 
2279 private:
2280   /* Allocate new callgraph node.  */
2281   inline cgraph_node * allocate_cgraph_symbol (void);
2282 
2283   /* Allocate a cgraph_edge structure and fill it with data according to the
2284      parameters of which only CALLEE can be NULL (when creating an indirect call
2285      edge).  */
2286   cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee,
2287 			    gcall *call_stmt, profile_count count,
2288 			    bool indir_unknown_callee);
2289 
2290   /* Put the edge onto the free list.  */
2291   void free_edge (cgraph_edge *e);
2292 
2293   /* Insert NODE to assembler name hash.  */
2294   void insert_to_assembler_name_hash (symtab_node *node, bool with_clones);
2295 
2296   /* Remove NODE from assembler name hash.  */
2297   void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones);
2298 
2299   /* Hash asmnames ignoring the user specified marks.  */
2300   static hashval_t decl_assembler_name_hash (const_tree asmname);
2301 
2302   /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL.  */
2303   static bool decl_assembler_name_equal (tree decl, const_tree asmname);
2304 
2305   friend struct asmname_hasher;
2306 
2307   /* List of hooks triggered when an edge is removed.  */
2308   cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook;
2309   /* List of hooks triggem_red when a cgraph node is removed.  */
2310   cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook;
2311   /* List of hooks triggered when an edge is duplicated.  */
2312   cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook;
2313   /* List of hooks triggered when a node is duplicated.  */
2314   cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook;
2315   /* List of hooks triggered when an function is inserted.  */
2316   cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook;
2317   /* List of hooks triggered when an variable is inserted.  */
2318   varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook;
2319   /* List of hooks triggered when a node is removed.  */
2320   varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook;
2321 };
2322 
2323 extern GTY(()) symbol_table *symtab;
2324 
2325 extern vec<cgraph_node *> cgraph_new_nodes;
2326 
2327 inline hashval_t
2328 asmname_hasher::hash (symtab_node *n)
2329 {
2330   return symbol_table::decl_assembler_name_hash
2331     (DECL_ASSEMBLER_NAME (n->decl));
2332 }
2333 
2334 inline bool
2335 asmname_hasher::equal (symtab_node *n, const_tree t)
2336 {
2337   return symbol_table::decl_assembler_name_equal (n->decl, t);
2338 }
2339 
2340 /* In cgraph.c  */
2341 void cgraph_c_finalize (void);
2342 void release_function_body (tree);
2343 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void);
2344 
2345 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *);
2346 bool cgraph_function_possibly_inlined_p (tree);
2347 
2348 const char* cgraph_inline_failed_string (cgraph_inline_failed_t);
2349 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t);
2350 
2351 extern bool gimple_check_call_matching_types (gimple *, tree, bool);
2352 
2353 /* In cgraphunit.c  */
2354 void cgraphunit_c_finalize (void);
2355 
2356 /*  Initialize datastructures so DECL is a function in lowered gimple form.
2357     IN_SSA is true if the gimple is in SSA.  */
2358 basic_block init_lowered_empty_function (tree, bool, profile_count);
2359 
2360 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree);
2361 /* In cgraphclones.c  */
2362 
2363 tree clone_function_name_1 (const char *, const char *);
2364 tree clone_function_name (tree decl, const char *);
2365 
2366 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *,
2367 			       bool, bitmap, bool, bitmap, basic_block);
2368 
2369 void dump_callgraph_transformation (const cgraph_node *original,
2370 				    const cgraph_node *clone,
2371 				    const char *suffix);
2372 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip,
2373 					   bool skip_return);
2374 
2375 /* In cgraphbuild.c  */
2376 int compute_call_stmt_bb_frequency (tree, basic_block bb);
2377 void record_references_in_initializer (tree, bool);
2378 
2379 /* In ipa.c  */
2380 void cgraph_build_static_cdtor (char which, tree body, int priority);
2381 bool ipa_discover_readonly_nonaddressable_vars (void);
2382 
2383 /* In varpool.c  */
2384 tree ctor_for_folding (tree);
2385 
2386 /* In tree-chkp.c  */
2387 extern bool chkp_function_instrumented_p (tree fndecl);
2388 
2389 /* In ipa-inline-analysis.c  */
2390 void initialize_inline_failed (struct cgraph_edge *);
2391 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining);
2392 
2393 /* Return true when the symbol is real symbol, i.e. it is not inline clone
2394    or abstract function kept for debug info purposes only.  */
2395 inline bool
2396 symtab_node::real_symbol_p (void)
2397 {
2398   cgraph_node *cnode;
2399 
2400   if (DECL_ABSTRACT_P (decl))
2401     return false;
2402   if (transparent_alias && definition)
2403     return false;
2404   if (!is_a <cgraph_node *> (this))
2405     return true;
2406   cnode = dyn_cast <cgraph_node *> (this);
2407   if (cnode->global.inlined_to)
2408     return false;
2409   return true;
2410 }
2411 
2412 /* Return true if DECL should have entry in symbol table if used.
2413    Those are functions and static & external veriables*/
2414 
2415 static inline bool
2416 decl_in_symtab_p (const_tree decl)
2417 {
2418   return (TREE_CODE (decl) == FUNCTION_DECL
2419           || (TREE_CODE (decl) == VAR_DECL
2420 	      && (TREE_STATIC (decl) || DECL_EXTERNAL (decl))));
2421 }
2422 
2423 inline bool
2424 symtab_node::in_same_comdat_group_p (symtab_node *target)
2425 {
2426   symtab_node *source = this;
2427 
2428   if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2429     {
2430       if (cn->global.inlined_to)
2431 	source = cn->global.inlined_to;
2432     }
2433   if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
2434     {
2435       if (cn->global.inlined_to)
2436 	target = cn->global.inlined_to;
2437     }
2438 
2439   return source->get_comdat_group () == target->get_comdat_group ();
2440 }
2441 
2442 /* Return node that alias is aliasing.  */
2443 
2444 inline symtab_node *
2445 symtab_node::get_alias_target (void)
2446 {
2447   ipa_ref *ref = NULL;
2448   iterate_reference (0, ref);
2449   if (ref->use == IPA_REF_CHKP)
2450     iterate_reference (1, ref);
2451   gcc_checking_assert (ref->use == IPA_REF_ALIAS);
2452   return ref->referred;
2453 }
2454 
2455 /* Return next reachable static symbol with initializer after the node.  */
2456 
2457 inline symtab_node *
2458 symtab_node::next_defined_symbol (void)
2459 {
2460   symtab_node *node1 = next;
2461 
2462   for (; node1; node1 = node1->next)
2463     if (node1->definition)
2464       return node1;
2465 
2466   return NULL;
2467 }
2468 
2469 /* Iterates I-th reference in the list, REF is also set.  */
2470 
2471 inline ipa_ref *
2472 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref)
2473 {
2474   vec_safe_iterate (ref_list.references, i, &ref);
2475 
2476   return ref;
2477 }
2478 
2479 /* Iterates I-th referring item in the list, REF is also set.  */
2480 
2481 inline ipa_ref *
2482 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref)
2483 {
2484   ref_list.referring.iterate (i, &ref);
2485 
2486   return ref;
2487 }
2488 
2489 /* Iterates I-th referring alias item in the list, REF is also set.  */
2490 
2491 inline ipa_ref *
2492 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref)
2493 {
2494   ref_list.referring.iterate (i, &ref);
2495 
2496   if (ref && ref->use != IPA_REF_ALIAS)
2497     return NULL;
2498 
2499   return ref;
2500 }
2501 
2502 /* Return true if list contains an alias.  */
2503 
2504 inline bool
2505 symtab_node::has_aliases_p (void)
2506 {
2507   ipa_ref *ref = NULL;
2508 
2509   return (iterate_direct_aliases (0, ref) != NULL);
2510 }
2511 
2512 /* Return true when RESOLUTION indicate that linker will use
2513    the symbol from non-LTO object files.  */
2514 
2515 inline bool
2516 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
2517 {
2518   return (resolution == LDPR_PREVAILING_DEF
2519 	  || resolution == LDPR_PREEMPTED_REG
2520 	  || resolution == LDPR_RESOLVED_EXEC
2521 	  || resolution == LDPR_RESOLVED_DYN);
2522 }
2523 
2524 /* Return true when symtab_node is known to be used from other (non-LTO)
2525    object file. Known only when doing LTO via linker plugin.  */
2526 
2527 inline bool
2528 symtab_node::used_from_object_file_p (void)
2529 {
2530   if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl))
2531     return false;
2532   if (resolution_used_from_other_file_p (resolution))
2533     return true;
2534   return false;
2535 }
2536 
2537 /* Return varpool node for given symbol and check it is a function. */
2538 
2539 inline varpool_node *
2540 varpool_node::get (const_tree decl)
2541 {
2542   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2543   return dyn_cast<varpool_node *> (symtab_node::get (decl));
2544 }
2545 
2546 /* Register a symbol NODE.  */
2547 
2548 inline void
2549 symbol_table::register_symbol (symtab_node *node)
2550 {
2551   node->next = nodes;
2552   node->previous = NULL;
2553 
2554   if (nodes)
2555     nodes->previous = node;
2556   nodes = node;
2557 
2558   node->order = order++;
2559 }
2560 
2561 /* Register a top-level asm statement ASM_STR.  */
2562 
2563 asm_node *
2564 symbol_table::finalize_toplevel_asm (tree asm_str)
2565 {
2566   asm_node *node;
2567 
2568   node = ggc_cleared_alloc<asm_node> ();
2569   node->asm_str = asm_str;
2570   node->order = order++;
2571   node->next = NULL;
2572 
2573   if (asmnodes == NULL)
2574     asmnodes = node;
2575   else
2576     asm_last_node->next = node;
2577 
2578   asm_last_node = node;
2579   return node;
2580 }
2581 
2582 /* Unregister a symbol NODE.  */
2583 inline void
2584 symbol_table::unregister (symtab_node *node)
2585 {
2586   if (node->previous)
2587     node->previous->next = node->next;
2588   else
2589     nodes = node->next;
2590 
2591   if (node->next)
2592     node->next->previous = node->previous;
2593 
2594   node->next = NULL;
2595   node->previous = NULL;
2596 }
2597 
2598 /* Release a callgraph NODE with UID and put in to the list of free nodes.  */
2599 
2600 inline void
2601 symbol_table::release_symbol (cgraph_node *node, int uid)
2602 {
2603   cgraph_count--;
2604 
2605   /* Clear out the node to NULL all pointers and add the node to the free
2606      list.  */
2607   memset (node, 0, sizeof (*node));
2608   node->type = SYMTAB_FUNCTION;
2609   node->uid = uid;
2610   SET_NEXT_FREE_NODE (node, free_nodes);
2611   free_nodes = node;
2612 }
2613 
2614 /* Allocate new callgraph node.  */
2615 
2616 inline cgraph_node *
2617 symbol_table::allocate_cgraph_symbol (void)
2618 {
2619   cgraph_node *node;
2620 
2621   if (free_nodes)
2622     {
2623       node = free_nodes;
2624       free_nodes = NEXT_FREE_NODE (node);
2625     }
2626   else
2627     {
2628       node = ggc_cleared_alloc<cgraph_node> ();
2629       node->uid = cgraph_max_uid++;
2630     }
2631 
2632   node->summary_uid = cgraph_max_summary_uid++;
2633   return node;
2634 }
2635 
2636 
2637 /* Return first static symbol with definition.  */
2638 inline symtab_node *
2639 symbol_table::first_symbol (void)
2640 {
2641   return nodes;
2642 }
2643 
2644 /* Walk all symbols.  */
2645 #define FOR_EACH_SYMBOL(node) \
2646    for ((node) = symtab->first_symbol (); (node); (node) = (node)->next)
2647 
2648 /* Return first static symbol with definition.  */
2649 inline symtab_node *
2650 symbol_table::first_defined_symbol (void)
2651 {
2652   symtab_node *node;
2653 
2654   for (node = nodes; node; node = node->next)
2655     if (node->definition)
2656       return node;
2657 
2658   return NULL;
2659 }
2660 
2661 /* Walk all symbols with definitions in current unit.  */
2662 #define FOR_EACH_DEFINED_SYMBOL(node) \
2663    for ((node) = symtab->first_defined_symbol (); (node); \
2664 	(node) = node->next_defined_symbol ())
2665 
2666 /* Return first variable.  */
2667 inline varpool_node *
2668 symbol_table::first_variable (void)
2669 {
2670   symtab_node *node;
2671   for (node = nodes; node; node = node->next)
2672     if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
2673       return vnode;
2674   return NULL;
2675 }
2676 
2677 /* Return next variable after NODE.  */
2678 inline varpool_node *
2679 symbol_table::next_variable (varpool_node *node)
2680 {
2681   symtab_node *node1 = node->next;
2682   for (; node1; node1 = node1->next)
2683     if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1))
2684       return vnode1;
2685   return NULL;
2686 }
2687 /* Walk all variables.  */
2688 #define FOR_EACH_VARIABLE(node) \
2689    for ((node) = symtab->first_variable (); \
2690         (node); \
2691 	(node) = symtab->next_variable ((node)))
2692 
2693 /* Return first static variable with initializer.  */
2694 inline varpool_node *
2695 symbol_table::first_static_initializer (void)
2696 {
2697   symtab_node *node;
2698   for (node = nodes; node; node = node->next)
2699     {
2700       varpool_node *vnode = dyn_cast <varpool_node *> (node);
2701       if (vnode && DECL_INITIAL (node->decl))
2702 	return vnode;
2703     }
2704   return NULL;
2705 }
2706 
2707 /* Return next static variable with initializer after NODE.  */
2708 inline varpool_node *
2709 symbol_table::next_static_initializer (varpool_node *node)
2710 {
2711   symtab_node *node1 = node->next;
2712   for (; node1; node1 = node1->next)
2713     {
2714       varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2715       if (vnode1 && DECL_INITIAL (node1->decl))
2716 	return vnode1;
2717     }
2718   return NULL;
2719 }
2720 
2721 /* Walk all static variables with initializer set.  */
2722 #define FOR_EACH_STATIC_INITIALIZER(node) \
2723    for ((node) = symtab->first_static_initializer (); (node); \
2724 	(node) = symtab->next_static_initializer (node))
2725 
2726 /* Return first static variable with definition.  */
2727 inline varpool_node *
2728 symbol_table::first_defined_variable (void)
2729 {
2730   symtab_node *node;
2731   for (node = nodes; node; node = node->next)
2732     {
2733       varpool_node *vnode = dyn_cast <varpool_node *> (node);
2734       if (vnode && vnode->definition)
2735 	return vnode;
2736     }
2737   return NULL;
2738 }
2739 
2740 /* Return next static variable with definition after NODE.  */
2741 inline varpool_node *
2742 symbol_table::next_defined_variable (varpool_node *node)
2743 {
2744   symtab_node *node1 = node->next;
2745   for (; node1; node1 = node1->next)
2746     {
2747       varpool_node *vnode1 = dyn_cast <varpool_node *> (node1);
2748       if (vnode1 && vnode1->definition)
2749 	return vnode1;
2750     }
2751   return NULL;
2752 }
2753 /* Walk all variables with definitions in current unit.  */
2754 #define FOR_EACH_DEFINED_VARIABLE(node) \
2755    for ((node) = symtab->first_defined_variable (); (node); \
2756 	(node) = symtab->next_defined_variable (node))
2757 
2758 /* Return first function with body defined.  */
2759 inline cgraph_node *
2760 symbol_table::first_defined_function (void)
2761 {
2762   symtab_node *node;
2763   for (node = nodes; node; node = node->next)
2764     {
2765       cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2766       if (cn && cn->definition)
2767 	return cn;
2768     }
2769   return NULL;
2770 }
2771 
2772 /* Return next function with body defined after NODE.  */
2773 inline cgraph_node *
2774 symbol_table::next_defined_function (cgraph_node *node)
2775 {
2776   symtab_node *node1 = node->next;
2777   for (; node1; node1 = node1->next)
2778     {
2779       cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2780       if (cn1 && cn1->definition)
2781 	return cn1;
2782     }
2783   return NULL;
2784 }
2785 
2786 /* Walk all functions with body defined.  */
2787 #define FOR_EACH_DEFINED_FUNCTION(node) \
2788    for ((node) = symtab->first_defined_function (); (node); \
2789 	(node) = symtab->next_defined_function ((node)))
2790 
2791 /* Return first function.  */
2792 inline cgraph_node *
2793 symbol_table::first_function (void)
2794 {
2795   symtab_node *node;
2796   for (node = nodes; node; node = node->next)
2797     if (cgraph_node *cn = dyn_cast <cgraph_node *> (node))
2798       return cn;
2799   return NULL;
2800 }
2801 
2802 /* Return next function.  */
2803 inline cgraph_node *
2804 symbol_table::next_function (cgraph_node *node)
2805 {
2806   symtab_node *node1 = node->next;
2807   for (; node1; node1 = node1->next)
2808     if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1))
2809       return cn1;
2810   return NULL;
2811 }
2812 
2813 /* Return first function with body defined.  */
2814 inline cgraph_node *
2815 symbol_table::first_function_with_gimple_body (void)
2816 {
2817   symtab_node *node;
2818   for (node = nodes; node; node = node->next)
2819     {
2820       cgraph_node *cn = dyn_cast <cgraph_node *> (node);
2821       if (cn && cn->has_gimple_body_p ())
2822 	return cn;
2823     }
2824   return NULL;
2825 }
2826 
2827 /* Return next reachable static variable with initializer after NODE.  */
2828 inline cgraph_node *
2829 symbol_table::next_function_with_gimple_body (cgraph_node *node)
2830 {
2831   symtab_node *node1 = node->next;
2832   for (; node1; node1 = node1->next)
2833     {
2834       cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1);
2835       if (cn1 && cn1->has_gimple_body_p ())
2836 	return cn1;
2837     }
2838   return NULL;
2839 }
2840 
2841 /* Walk all functions.  */
2842 #define FOR_EACH_FUNCTION(node) \
2843    for ((node) = symtab->first_function (); (node); \
2844 	(node) = symtab->next_function ((node)))
2845 
2846 /* Return true when callgraph node is a function with Gimple body defined
2847    in current unit.  Functions can also be define externally or they
2848    can be thunks with no Gimple representation.
2849 
2850    Note that at WPA stage, the function body may not be present in memory.  */
2851 
2852 inline bool
2853 cgraph_node::has_gimple_body_p (void)
2854 {
2855   return definition && !thunk.thunk_p && !alias;
2856 }
2857 
2858 /* Walk all functions with body defined.  */
2859 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \
2860    for ((node) = symtab->first_function_with_gimple_body (); (node); \
2861 	(node) = symtab->next_function_with_gimple_body (node))
2862 
2863 /* Uniquize all constants that appear in memory.
2864    Each constant in memory thus far output is recorded
2865    in `const_desc_table'.  */
2866 
2867 struct GTY((for_user)) constant_descriptor_tree {
2868   /* A MEM for the constant.  */
2869   rtx rtl;
2870 
2871   /* The value of the constant.  */
2872   tree value;
2873 
2874   /* Hash of value.  Computing the hash from value each time
2875      hashfn is called can't work properly, as that means recursive
2876      use of the hash table during hash table expansion.  */
2877   hashval_t hash;
2878 };
2879 
2880 /* Return true when function is only called directly or it has alias.
2881    i.e. it is not externally visible, address was not taken and
2882    it is not used in any other non-standard way.  */
2883 
2884 inline bool
2885 cgraph_node::only_called_directly_or_aliased_p (void)
2886 {
2887   gcc_assert (!global.inlined_to);
2888   return (!force_output && !address_taken
2889 	  && !used_from_other_partition
2890 	  && !DECL_VIRTUAL_P (decl)
2891 	  && !DECL_STATIC_CONSTRUCTOR (decl)
2892 	  && !DECL_STATIC_DESTRUCTOR (decl)
2893 	  && !used_from_object_file_p ()
2894 	  && !externally_visible);
2895 }
2896 
2897 /* Return true when function can be removed from callgraph
2898    if all direct calls are eliminated.  */
2899 
2900 inline bool
2901 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
2902 {
2903   gcc_checking_assert (!global.inlined_to);
2904   /* Instrumentation clones should not be removed before
2905      instrumentation happens.  New callers may appear after
2906      instrumentation.  */
2907   if (instrumentation_clone
2908       && !chkp_function_instrumented_p (decl))
2909     return false;
2910   /* Extern inlines can always go, we will use the external definition.  */
2911   if (DECL_EXTERNAL (decl))
2912     return true;
2913   /* When function is needed, we can not remove it.  */
2914   if (force_output || used_from_other_partition)
2915     return false;
2916   if (DECL_STATIC_CONSTRUCTOR (decl)
2917       || DECL_STATIC_DESTRUCTOR (decl))
2918     return false;
2919   /* Only COMDAT functions can be removed if externally visible.  */
2920   if (externally_visible
2921       && (!DECL_COMDAT (decl)
2922 	  || forced_by_abi
2923 	  || used_from_object_file_p ()))
2924     return false;
2925   return true;
2926 }
2927 
2928 /* Verify cgraph, if consistency checking is enabled.  */
2929 
2930 inline void
2931 cgraph_node::checking_verify_cgraph_nodes (void)
2932 {
2933   if (flag_checking)
2934     cgraph_node::verify_cgraph_nodes ();
2935 }
2936 
2937 /* Return true when variable can be removed from variable pool
2938    if all direct calls are eliminated.  */
2939 
2940 inline bool
2941 varpool_node::can_remove_if_no_refs_p (void)
2942 {
2943   if (DECL_EXTERNAL (decl))
2944     return true;
2945   return (!force_output && !used_from_other_partition
2946 	  && ((DECL_COMDAT (decl)
2947 	       && !forced_by_abi
2948 	       && !used_from_object_file_p ())
2949 	      || !externally_visible
2950 	      || DECL_HAS_VALUE_EXPR_P (decl)));
2951 }
2952 
2953 /* Return true when all references to variable must be visible in ipa_ref_list.
2954    i.e. if the variable is not externally visible or not used in some magic
2955    way (asm statement or such).
2956    The magic uses are all summarized in force_output flag.  */
2957 
2958 inline bool
2959 varpool_node::all_refs_explicit_p ()
2960 {
2961   return (definition
2962 	  && !externally_visible
2963 	  && !used_from_other_partition
2964 	  && !force_output);
2965 }
2966 
2967 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree>
2968 {
2969   static hashval_t hash (constant_descriptor_tree *);
2970   static bool equal (constant_descriptor_tree *, constant_descriptor_tree *);
2971 };
2972 
2973 /* Constant pool accessor function.  */
2974 hash_table<tree_descriptor_hasher> *constant_pool_htab (void);
2975 
2976 /* Return node that alias is aliasing.  */
2977 
2978 inline cgraph_node *
2979 cgraph_node::get_alias_target (void)
2980 {
2981   return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ());
2982 }
2983 
2984 /* Return node that alias is aliasing.  */
2985 
2986 inline varpool_node *
2987 varpool_node::get_alias_target (void)
2988 {
2989   return dyn_cast <varpool_node *> (symtab_node::get_alias_target ());
2990 }
2991 
2992 /* Walk the alias chain to return the symbol NODE is alias of.
2993    If NODE is not an alias, return NODE.
2994    When AVAILABILITY is non-NULL, get minimal availability in the chain.
2995    When REF is non-NULL, assume that reference happens in symbol REF
2996    when determining the availability.  */
2997 
2998 inline symtab_node *
2999 symtab_node::ultimate_alias_target (enum availability *availability,
3000 				    symtab_node *ref)
3001 {
3002   if (!alias)
3003     {
3004       if (availability)
3005 	*availability = get_availability (ref);
3006       return this;
3007     }
3008 
3009   return ultimate_alias_target_1 (availability, ref);
3010 }
3011 
3012 /* Given function symbol, walk the alias chain to return the function node
3013    is alias of. Do not walk through thunks.
3014    When AVAILABILITY is non-NULL, get minimal availability in the chain.
3015    When REF is non-NULL, assume that reference happens in symbol REF
3016    when determining the availability.  */
3017 
3018 inline cgraph_node *
3019 cgraph_node::ultimate_alias_target (enum availability *availability,
3020 				    symtab_node *ref)
3021 {
3022   cgraph_node *n = dyn_cast <cgraph_node *>
3023     (symtab_node::ultimate_alias_target (availability, ref));
3024   if (!n && availability)
3025     *availability = AVAIL_NOT_AVAILABLE;
3026   return n;
3027 }
3028 
3029 /* For given variable pool node, walk the alias chain to return the function
3030    the variable is alias of. Do not walk through thunks.
3031    When AVAILABILITY is non-NULL, get minimal availability in the chain.
3032    When REF is non-NULL, assume that reference happens in symbol REF
3033    when determining the availability.  */
3034 
3035 inline varpool_node *
3036 varpool_node::ultimate_alias_target (availability *availability,
3037 				     symtab_node *ref)
3038 {
3039   varpool_node *n = dyn_cast <varpool_node *>
3040     (symtab_node::ultimate_alias_target (availability, ref));
3041 
3042   if (!n && availability)
3043     *availability = AVAIL_NOT_AVAILABLE;
3044   return n;
3045 }
3046 
3047 /* Set callee N of call graph edge and add it to the corresponding set of
3048    callers. */
3049 
3050 inline void
3051 cgraph_edge::set_callee (cgraph_node *n)
3052 {
3053   prev_caller = NULL;
3054   if (n->callers)
3055     n->callers->prev_caller = this;
3056   next_caller = n->callers;
3057   n->callers = this;
3058   callee = n;
3059 }
3060 
3061 /* Redirect callee of the edge to N.  The function does not update underlying
3062    call expression.  */
3063 
3064 inline void
3065 cgraph_edge::redirect_callee (cgraph_node *n)
3066 {
3067   /* Remove from callers list of the current callee.  */
3068   remove_callee ();
3069 
3070   /* Insert to callers list of the new callee.  */
3071   set_callee (n);
3072 }
3073 
3074 /* Return true when the edge represents a direct recursion.  */
3075 
3076 inline bool
3077 cgraph_edge::recursive_p (void)
3078 {
3079   cgraph_node *c = callee->ultimate_alias_target ();
3080   if (caller->global.inlined_to)
3081     return caller->global.inlined_to->decl == c->decl;
3082   else
3083     return caller->decl == c->decl;
3084 }
3085 
3086 /* Remove the edge from the list of the callers of the callee.  */
3087 
3088 inline void
3089 cgraph_edge::remove_callee (void)
3090 {
3091   gcc_assert (!indirect_unknown_callee);
3092   if (prev_caller)
3093     prev_caller->next_caller = next_caller;
3094   if (next_caller)
3095     next_caller->prev_caller = prev_caller;
3096   if (!prev_caller)
3097     callee->callers = next_caller;
3098 }
3099 
3100 /* Return true if call must bind to current definition.  */
3101 
3102 inline bool
3103 cgraph_edge::binds_to_current_def_p ()
3104 {
3105   if (callee)
3106     return callee->binds_to_current_def_p (caller);
3107   else
3108     return false;
3109 }
3110 
3111 /* Expected frequency of executions within the function.
3112    When set to CGRAPH_FREQ_BASE, the edge is expected to be called once
3113    per function call.  The range is 0 to CGRAPH_FREQ_MAX.  */
3114 
3115 inline int
3116 cgraph_edge::frequency ()
3117 {
3118   return count.to_cgraph_frequency (caller->global.inlined_to
3119 				    ? caller->global.inlined_to->count
3120 				    : caller->count);
3121 }
3122 
3123 
3124 /* Return true if the TM_CLONE bit is set for a given FNDECL.  */
3125 static inline bool
3126 decl_is_tm_clone (const_tree fndecl)
3127 {
3128   cgraph_node *n = cgraph_node::get (fndecl);
3129   if (n)
3130     return n->tm_clone;
3131   return false;
3132 }
3133 
3134 /* Likewise indicate that a node is needed, i.e. reachable via some
3135    external means.  */
3136 
3137 inline void
3138 cgraph_node::mark_force_output (void)
3139 {
3140   force_output = 1;
3141   gcc_checking_assert (!global.inlined_to);
3142 }
3143 
3144 /* Return true if function should be optimized for size.  */
3145 
3146 inline bool
3147 cgraph_node::optimize_for_size_p (void)
3148 {
3149   if (opt_for_fn (decl, optimize_size))
3150     return true;
3151   if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
3152     return true;
3153   else
3154     return false;
3155 }
3156 
3157 /* Return symtab_node for NODE or create one if it is not present
3158    in symtab.  */
3159 
3160 inline symtab_node *
3161 symtab_node::get_create (tree node)
3162 {
3163   if (TREE_CODE (node) == VAR_DECL)
3164     return varpool_node::get_create (node);
3165   else
3166     return cgraph_node::get_create (node);
3167 }
3168 
3169 /* Return availability of NODE when referenced from REF.  */
3170 
3171 inline enum availability
3172 symtab_node::get_availability (symtab_node *ref)
3173 {
3174   if (is_a <cgraph_node *> (this))
3175     return dyn_cast <cgraph_node *> (this)->get_availability (ref);
3176   else
3177     return dyn_cast <varpool_node *> (this)->get_availability (ref);
3178 }
3179 
3180 /* Call calback on symtab node and aliases associated to this node.
3181    When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */
3182 
3183 inline bool
3184 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *,
3185 							    void *),
3186 					  void *data,
3187 					  bool include_overwritable)
3188 {
3189   if (include_overwritable
3190       || get_availability () > AVAIL_INTERPOSABLE)
3191     {
3192       if (callback (this, data))
3193         return true;
3194     }
3195   if (has_aliases_p ())
3196     return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3197   return false;
3198 }
3199 
3200 /* Call callback on function and aliases associated to the function.
3201    When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3202    skipped.  */
3203 
3204 inline bool
3205 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
3206 							    void *),
3207 					  void *data,
3208 					  bool include_overwritable)
3209 {
3210   if (include_overwritable
3211       || get_availability () > AVAIL_INTERPOSABLE)
3212     {
3213       if (callback (this, data))
3214         return true;
3215     }
3216   if (has_aliases_p ())
3217     return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3218   return false;
3219 }
3220 
3221 /* Call calback on varpool symbol and aliases associated to varpool symbol.
3222    When INCLUDE_OVERWRITABLE is false, overwritable symbols are
3223    skipped. */
3224 
3225 inline bool
3226 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *,
3227 							     void *),
3228 					   void *data,
3229 					   bool include_overwritable)
3230 {
3231   if (include_overwritable
3232       || get_availability () > AVAIL_INTERPOSABLE)
3233     {
3234       if (callback (this, data))
3235         return true;
3236     }
3237   if (has_aliases_p ())
3238     return call_for_symbol_and_aliases_1 (callback, data, include_overwritable);
3239   return false;
3240 }
3241 
3242 /* Return true if refernece may be used in address compare.  */
3243 
3244 inline bool
3245 ipa_ref::address_matters_p ()
3246 {
3247   if (use != IPA_REF_ADDR)
3248     return false;
3249   /* Addresses taken from virtual tables are never compared.  */
3250   if (is_a <varpool_node *> (referring)
3251       && DECL_VIRTUAL_P (referring->decl))
3252     return false;
3253   return referred->address_can_be_compared_p ();
3254 }
3255 
3256 /* Build polymorphic call context for indirect call E.  */
3257 
3258 inline
3259 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e)
3260 {
3261   gcc_checking_assert (e->indirect_info->polymorphic);
3262   *this = e->indirect_info->context;
3263 }
3264 
3265 /* Build empty "I know nothing" context.  */
3266 
3267 inline
3268 ipa_polymorphic_call_context::ipa_polymorphic_call_context ()
3269 {
3270   clear_speculation ();
3271   clear_outer_type ();
3272   invalid = false;
3273 }
3274 
3275 /* Make context non-speculative.  */
3276 
3277 inline void
3278 ipa_polymorphic_call_context::clear_speculation ()
3279 {
3280   speculative_outer_type = NULL;
3281   speculative_offset = 0;
3282   speculative_maybe_derived_type = false;
3283 }
3284 
3285 /* Produce context specifying all derrived types of OTR_TYPE.  If OTR_TYPE is
3286    NULL, the context is set to dummy "I know nothing" setting.  */
3287 
3288 inline void
3289 ipa_polymorphic_call_context::clear_outer_type (tree otr_type)
3290 {
3291   outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL;
3292   offset = 0;
3293   maybe_derived_type = true;
3294   maybe_in_construction = true;
3295   dynamic = true;
3296 }
3297 
3298 /* Adjust all offsets in contexts by OFF bits.  */
3299 
3300 inline void
3301 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off)
3302 {
3303   if (outer_type)
3304     offset += off;
3305   if (speculative_outer_type)
3306     speculative_offset += off;
3307 }
3308 
3309 /* Return TRUE if context is fully useless.  */
3310 
3311 inline bool
3312 ipa_polymorphic_call_context::useless_p () const
3313 {
3314   return (!outer_type && !speculative_outer_type);
3315 }
3316 
3317 /* Return true if NODE is local.  Instrumentation clones are counted as local
3318    only when original function is local.  */
3319 
3320 static inline bool
3321 cgraph_local_p (cgraph_node *node)
3322 {
3323   if (!node->instrumentation_clone || !node->instrumented_version)
3324     return node->local.local;
3325 
3326   return node->local.local && node->instrumented_version->local.local;
3327 }
3328 
3329 /* When using fprintf (or similar), problems can arise with
3330    transient generated strings.  Many string-generation APIs
3331    only support one result being alive at once (e.g. by
3332    returning a pointer to a statically-allocated buffer).
3333 
3334    If there is more than one generated string within one
3335    fprintf call: the first string gets evicted or overwritten
3336    by the second, before fprintf is fully evaluated.
3337    See e.g. PR/53136.
3338 
3339    This function provides a workaround for this, by providing
3340    a simple way to create copies of these transient strings,
3341    without the need to have explicit cleanup:
3342 
3343        fprintf (dumpfile, "string 1: %s string 2:%s\n",
3344                 xstrdup_for_dump (EXPR_1),
3345                 xstrdup_for_dump (EXPR_2));
3346 
3347    This is actually a simple wrapper around ggc_strdup, but
3348    the name documents the intent.  We require that no GC can occur
3349    within the fprintf call.  */
3350 
3351 static inline const char *
3352 xstrdup_for_dump (const char *transient_str)
3353 {
3354   return ggc_strdup (transient_str);
3355 }
3356 
3357 #endif  /* GCC_CGRAPH_H  */
3358