1 /* Callgraph handling code. 2 Copyright (C) 2003-2018 Free Software Foundation, Inc. 3 Contributed by Jan Hubicka 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #ifndef GCC_CGRAPH_H 22 #define GCC_CGRAPH_H 23 24 #include "profile-count.h" 25 #include "ipa-ref.h" 26 #include "plugin-api.h" 27 28 class ipa_opt_pass_d; 29 typedef ipa_opt_pass_d *ipa_opt_pass; 30 31 /* Symbol table consists of functions and variables. 32 TODO: add labels and CONST_DECLs. */ 33 enum symtab_type 34 { 35 SYMTAB_SYMBOL, 36 SYMTAB_FUNCTION, 37 SYMTAB_VARIABLE 38 }; 39 40 /* Section names are stored as reference counted strings in GGC safe hashtable 41 (to make them survive through PCH). */ 42 43 struct GTY((for_user)) section_hash_entry 44 { 45 int ref_count; 46 char *name; /* As long as this datastructure stays in GGC, we can not put 47 string at the tail of structure of GGC dies in horrible 48 way */ 49 }; 50 51 struct section_name_hasher : ggc_ptr_hash<section_hash_entry> 52 { 53 typedef const char *compare_type; 54 55 static hashval_t hash (section_hash_entry *); 56 static bool equal (section_hash_entry *, const char *); 57 }; 58 59 enum availability 60 { 61 /* Not yet set by cgraph_function_body_availability. */ 62 AVAIL_UNSET, 63 /* Function body/variable initializer is unknown. */ 64 AVAIL_NOT_AVAILABLE, 65 /* Function body/variable initializer is known but might be replaced 66 by a different one from other compilation unit and thus needs to 67 be dealt with a care. Like AVAIL_NOT_AVAILABLE it can have 68 arbitrary side effects on escaping variables and functions, while 69 like AVAILABLE it might access static variables. */ 70 AVAIL_INTERPOSABLE, 71 /* Function body/variable initializer is known and will be used in final 72 program. */ 73 AVAIL_AVAILABLE, 74 /* Function body/variable initializer is known and all it's uses are 75 explicitly visible within current unit (ie it's address is never taken and 76 it is not exported to other units). Currently used only for functions. */ 77 AVAIL_LOCAL 78 }; 79 80 /* Classification of symbols WRT partitioning. */ 81 enum symbol_partitioning_class 82 { 83 /* External declarations are ignored by partitioning algorithms and they are 84 added into the boundary later via compute_ltrans_boundary. */ 85 SYMBOL_EXTERNAL, 86 /* Partitioned symbols are pur into one of partitions. */ 87 SYMBOL_PARTITION, 88 /* Duplicated symbols (such as comdat or constant pool references) are 89 copied into every node needing them via add_symbol_to_partition. */ 90 SYMBOL_DUPLICATE 91 }; 92 93 /* Base of all entries in the symbol table. 94 The symtab_node is inherited by cgraph and varpol nodes. */ 95 class GTY((desc ("%h.type"), tag ("SYMTAB_SYMBOL"), 96 chain_next ("%h.next"), chain_prev ("%h.previous"))) 97 symtab_node 98 { 99 public: 100 /* Return name. */ 101 const char *name () const; 102 103 /* Return dump name. */ 104 const char *dump_name () const; 105 106 /* Return asm name. */ 107 const char *asm_name () const; 108 109 /* Return dump name with assembler name. */ 110 const char *dump_asm_name () const; 111 112 /* Add node into symbol table. This function is not used directly, but via 113 cgraph/varpool node creation routines. */ 114 void register_symbol (void); 115 116 /* Remove symbol from symbol table. */ 117 void remove (void); 118 119 /* Dump symtab node to F. */ 120 void dump (FILE *f); 121 122 /* Dump symtab node to stderr. */ 123 void DEBUG_FUNCTION debug (void); 124 125 /* Verify consistency of node. */ 126 void DEBUG_FUNCTION verify (void); 127 128 /* Return ipa reference from this symtab_node to 129 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type 130 of the use and STMT the statement (if it exists). */ 131 ipa_ref *create_reference (symtab_node *referred_node, 132 enum ipa_ref_use use_type); 133 134 /* Return ipa reference from this symtab_node to 135 REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type 136 of the use and STMT the statement (if it exists). */ 137 ipa_ref *create_reference (symtab_node *referred_node, 138 enum ipa_ref_use use_type, gimple *stmt); 139 140 /* If VAL is a reference to a function or a variable, add a reference from 141 this symtab_node to the corresponding symbol table node. Return the new 142 reference or NULL if none was created. */ 143 ipa_ref *maybe_create_reference (tree val, gimple *stmt); 144 145 /* Clone all references from symtab NODE to this symtab_node. */ 146 void clone_references (symtab_node *node); 147 148 /* Remove all stmt references in non-speculative references. 149 Those are not maintained during inlining & clonning. 150 The exception are speculative references that are updated along 151 with callgraph edges associated with them. */ 152 void clone_referring (symtab_node *node); 153 154 /* Clone reference REF to this symtab_node and set its stmt to STMT. */ 155 ipa_ref *clone_reference (ipa_ref *ref, gimple *stmt); 156 157 /* Find the structure describing a reference to REFERRED_NODE 158 and associated with statement STMT. */ 159 ipa_ref *find_reference (symtab_node *referred_node, gimple *stmt, 160 unsigned int lto_stmt_uid); 161 162 /* Remove all references that are associated with statement STMT. */ 163 void remove_stmt_references (gimple *stmt); 164 165 /* Remove all stmt references in non-speculative references. 166 Those are not maintained during inlining & clonning. 167 The exception are speculative references that are updated along 168 with callgraph edges associated with them. */ 169 void clear_stmts_in_references (void); 170 171 /* Remove all references in ref list. */ 172 void remove_all_references (void); 173 174 /* Remove all referring items in ref list. */ 175 void remove_all_referring (void); 176 177 /* Dump references in ref list to FILE. */ 178 void dump_references (FILE *file); 179 180 /* Dump referring in list to FILE. */ 181 void dump_referring (FILE *); 182 183 /* Get number of references for this node. */ 184 inline unsigned num_references (void) 185 { 186 return ref_list.references ? ref_list.references->length () : 0; 187 } 188 189 /* Iterates I-th reference in the list, REF is also set. */ 190 ipa_ref *iterate_reference (unsigned i, ipa_ref *&ref); 191 192 /* Iterates I-th referring item in the list, REF is also set. */ 193 ipa_ref *iterate_referring (unsigned i, ipa_ref *&ref); 194 195 /* Iterates I-th referring alias item in the list, REF is also set. */ 196 ipa_ref *iterate_direct_aliases (unsigned i, ipa_ref *&ref); 197 198 /* Return true if symtab node and TARGET represents 199 semantically equivalent symbols. */ 200 bool semantically_equivalent_p (symtab_node *target); 201 202 /* Classify symbol symtab node for partitioning. */ 203 enum symbol_partitioning_class get_partitioning_class (void); 204 205 /* Return comdat group. */ 206 tree get_comdat_group () 207 { 208 return x_comdat_group; 209 } 210 211 /* Return comdat group as identifier_node. */ 212 tree get_comdat_group_id () 213 { 214 if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE) 215 x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group); 216 return x_comdat_group; 217 } 218 219 /* Set comdat group. */ 220 void set_comdat_group (tree group) 221 { 222 gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE 223 || DECL_P (group)); 224 x_comdat_group = group; 225 } 226 227 /* Return section as string. */ 228 const char * get_section () 229 { 230 if (!x_section) 231 return NULL; 232 return x_section->name; 233 } 234 235 /* Remove node from same comdat group. */ 236 void remove_from_same_comdat_group (void); 237 238 /* Add this symtab_node to the same comdat group that OLD is in. */ 239 void add_to_same_comdat_group (symtab_node *old_node); 240 241 /* Dissolve the same_comdat_group list in which NODE resides. */ 242 void dissolve_same_comdat_group_list (void); 243 244 /* Return true when symtab_node is known to be used from other (non-LTO) 245 object file. Known only when doing LTO via linker plugin. */ 246 bool used_from_object_file_p (void); 247 248 /* Walk the alias chain to return the symbol NODE is alias of. 249 If NODE is not an alias, return NODE. 250 When AVAILABILITY is non-NULL, get minimal availability in the chain. 251 When REF is non-NULL, assume that reference happens in symbol REF 252 when determining the availability. */ 253 symtab_node *ultimate_alias_target (enum availability *avail = NULL, 254 struct symtab_node *ref = NULL); 255 256 /* Return next reachable static symbol with initializer after NODE. */ 257 inline symtab_node *next_defined_symbol (void); 258 259 /* Add reference recording that symtab node is alias of TARGET. 260 If TRANSPARENT is true make the alias to be transparent alias. 261 The function can fail in the case of aliasing cycles; in this case 262 it returns false. */ 263 bool resolve_alias (symtab_node *target, bool transparent = false); 264 265 /* C++ FE sometimes change linkage flags after producing same 266 body aliases. */ 267 void fixup_same_cpp_alias_visibility (symtab_node *target); 268 269 /* Call callback on symtab node and aliases associated to this node. 270 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are 271 skipped. */ 272 bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *), 273 void *data, 274 bool include_overwrite); 275 276 /* If node can not be interposable by static or dynamic linker to point to 277 different definition, return this symbol. Otherwise look for alias with 278 such property and if none exists, introduce new one. */ 279 symtab_node *noninterposable_alias (void); 280 281 /* Return node that alias is aliasing. */ 282 inline symtab_node *get_alias_target (void); 283 284 /* Set section for symbol and its aliases. */ 285 void set_section (const char *section); 286 287 /* Set section, do not recurse into aliases. 288 When one wants to change section of symbol and its aliases, 289 use set_section. */ 290 void set_section_for_node (const char *section); 291 292 /* Set initialization priority to PRIORITY. */ 293 void set_init_priority (priority_type priority); 294 295 /* Return the initialization priority. */ 296 priority_type get_init_priority (); 297 298 /* Return availability of NODE when referenced from REF. */ 299 enum availability get_availability (symtab_node *ref = NULL); 300 301 /* Return true if NODE binds to current definition in final executable 302 when referenced from REF. If REF is NULL return conservative value 303 for any reference. */ 304 bool binds_to_current_def_p (symtab_node *ref = NULL); 305 306 /* Make DECL local. */ 307 void make_decl_local (void); 308 309 /* Copy visibility from N. */ 310 void copy_visibility_from (symtab_node *n); 311 312 /* Return desired alignment of the definition. This is NOT alignment useful 313 to access THIS, because THIS may be interposable and DECL_ALIGN should 314 be used instead. It however must be guaranteed when output definition 315 of THIS. */ 316 unsigned int definition_alignment (); 317 318 /* Return true if alignment can be increased. */ 319 bool can_increase_alignment_p (); 320 321 /* Increase alignment of symbol to ALIGN. */ 322 void increase_alignment (unsigned int align); 323 324 /* Return true if list contains an alias. */ 325 bool has_aliases_p (void); 326 327 /* Return true when the symbol is real symbol, i.e. it is not inline clone 328 or abstract function kept for debug info purposes only. */ 329 bool real_symbol_p (void); 330 331 /* Return true when the symbol needs to be output to the LTO symbol table. */ 332 bool output_to_lto_symbol_table_p (void); 333 334 /* Determine if symbol declaration is needed. That is, visible to something 335 either outside this translation unit, something magic in the system 336 configury. This function is used just during symbol creation. */ 337 bool needed_p (void); 338 339 /* Return true if this symbol is a function from the C frontend specified 340 directly in RTL form (with "__RTL"). */ 341 bool native_rtl_p () const; 342 343 /* Return true when there are references to the node. */ 344 bool referred_to_p (bool include_self = true); 345 346 /* Return true if symbol can be discarded by linker from the binary. 347 Assume that symbol is used (so there is no need to take into account 348 garbage collecting linkers) 349 350 This can happen for comdats, commons and weaks when they are previaled 351 by other definition at static linking time. */ 352 inline bool 353 can_be_discarded_p (void) 354 { 355 return (DECL_EXTERNAL (decl) 356 || ((get_comdat_group () 357 || DECL_COMMON (decl) 358 || (DECL_SECTION_NAME (decl) && DECL_WEAK (decl))) 359 && ((resolution != LDPR_PREVAILING_DEF 360 && resolution != LDPR_PREVAILING_DEF_IRONLY_EXP) 361 || flag_incremental_link) 362 && resolution != LDPR_PREVAILING_DEF_IRONLY)); 363 } 364 365 /* Return true if NODE is local to a particular COMDAT group, and must not 366 be named from outside the COMDAT. This is used for C++ decloned 367 constructors. */ 368 inline bool comdat_local_p (void) 369 { 370 return (same_comdat_group && !TREE_PUBLIC (decl)); 371 } 372 373 /* Return true if ONE and TWO are part of the same COMDAT group. */ 374 inline bool in_same_comdat_group_p (symtab_node *target); 375 376 /* Return true if symbol is known to be nonzero. */ 377 bool nonzero_address (); 378 379 /* Return 0 if symbol is known to have different address than S2, 380 Return 1 if symbol is known to have same address as S2, 381 return 2 otherwise. 382 383 If MEMORY_ACCESSED is true, assume that both memory pointer to THIS 384 and S2 is going to be accessed. This eliminates the situations when 385 either THIS or S2 is NULL and is seful for comparing bases when deciding 386 about memory aliasing. */ 387 int equal_address_to (symtab_node *s2, bool memory_accessed = false); 388 389 /* Return true if symbol's address may possibly be compared to other 390 symbol's address. */ 391 bool address_matters_p (); 392 393 /* Return true if NODE's address can be compared. This use properties 394 of NODE only and does not look if the address is actually taken in 395 interesting way. For that use ADDRESS_MATTERS_P instead. */ 396 bool address_can_be_compared_p (void); 397 398 /* Return symbol table node associated with DECL, if any, 399 and NULL otherwise. */ 400 static inline symtab_node *get (const_tree decl) 401 { 402 /* Check that we are called for sane type of object - functions 403 and static or external variables. */ 404 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL 405 || (TREE_CODE (decl) == VAR_DECL 406 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl) 407 || in_lto_p))); 408 /* Check that the mapping is sane - perhaps this check can go away, 409 but at the moment frontends tends to corrupt the mapping by calling 410 memcpy/memset on the tree nodes. */ 411 gcc_checking_assert (!decl->decl_with_vis.symtab_node 412 || decl->decl_with_vis.symtab_node->decl == decl); 413 return decl->decl_with_vis.symtab_node; 414 } 415 416 /* Try to find a symtab node for declaration DECL and if it does not 417 exist or if it corresponds to an inline clone, create a new one. */ 418 static inline symtab_node * get_create (tree node); 419 420 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME. 421 Return NULL if there's no such node. */ 422 static symtab_node *get_for_asmname (const_tree asmname); 423 424 /* Verify symbol table for internal consistency. */ 425 static DEBUG_FUNCTION void verify_symtab_nodes (void); 426 427 /* Perform internal consistency checks, if they are enabled. */ 428 static inline void checking_verify_symtab_nodes (void); 429 430 /* Type of the symbol. */ 431 ENUM_BITFIELD (symtab_type) type : 8; 432 433 /* The symbols resolution. */ 434 ENUM_BITFIELD (ld_plugin_symbol_resolution) resolution : 8; 435 436 /*** Flags representing the symbol type. ***/ 437 438 /* True when symbol corresponds to a definition in current unit. 439 set via finalize_function or finalize_decl */ 440 unsigned definition : 1; 441 /* True when symbol is an alias. 442 Set by ssemble_alias. */ 443 unsigned alias : 1; 444 /* When true the alias is translated into its target symbol either by GCC 445 or assembler (it also may just be a duplicate declaration of the same 446 linker name). 447 448 Currently transparent aliases come in three different flavors 449 - aliases having the same assembler name as their target (aka duplicated 450 declarations). In this case the assembler names compare via 451 assembler_names_equal_p and weakref is false 452 - aliases that are renamed at a time being output to final file 453 by varasm.c. For those DECL_ASSEMBLER_NAME have 454 IDENTIFIER_TRANSPARENT_ALIAS set and thus also their assembler 455 name must be unique. 456 Weakrefs belong to this cateogry when we target assembler without 457 .weakref directive. 458 - weakrefs that are renamed by assembler via .weakref directive. 459 In this case the alias may or may not be definition (depending if 460 target declaration was seen by the compiler), weakref is set. 461 Unless we are before renaming statics, assembler names are different. 462 463 Given that we now support duplicate declarations, the second option is 464 redundant and will be removed. */ 465 unsigned transparent_alias : 1; 466 /* True when alias is a weakref. */ 467 unsigned weakref : 1; 468 /* C++ frontend produce same body aliases and extra name aliases for 469 virtual functions and vtables that are obviously equivalent. 470 Those aliases are bit special, especially because C++ frontend 471 visibility code is so ugly it can not get them right at first time 472 and their visibility needs to be copied from their "masters" at 473 the end of parsing. */ 474 unsigned cpp_implicit_alias : 1; 475 /* Set once the definition was analyzed. The list of references and 476 other properties are built during analysis. */ 477 unsigned analyzed : 1; 478 /* Set for write-only variables. */ 479 unsigned writeonly : 1; 480 /* Visibility of symbol was used for further optimization; do not 481 permit further changes. */ 482 unsigned refuse_visibility_changes : 1; 483 484 /*** Visibility and linkage flags. ***/ 485 486 /* Set when function is visible by other units. */ 487 unsigned externally_visible : 1; 488 /* Don't reorder to other symbols having this set. */ 489 unsigned no_reorder : 1; 490 /* The symbol will be assumed to be used in an invisible way (like 491 by an toplevel asm statement). */ 492 unsigned force_output : 1; 493 /* Like FORCE_OUTPUT, but in the case it is ABI requiring the symbol to be 494 exported. Unlike FORCE_OUTPUT this flag gets cleared to symbols promoted 495 to static and it does not inhibit optimization. */ 496 unsigned forced_by_abi : 1; 497 /* True when the name is known to be unique and thus it does not need mangling. */ 498 unsigned unique_name : 1; 499 /* Specify whether the section was set by user or by 500 compiler via -ffunction-sections. */ 501 unsigned implicit_section : 1; 502 /* True when body and other characteristics have been removed by 503 symtab_remove_unreachable_nodes. */ 504 unsigned body_removed : 1; 505 506 /*** WHOPR Partitioning flags. 507 These flags are used at ltrans stage when only part of the callgraph is 508 available. ***/ 509 510 /* Set when variable is used from other LTRANS partition. */ 511 unsigned used_from_other_partition : 1; 512 /* Set when function is available in the other LTRANS partition. 513 During WPA output it is used to mark nodes that are present in 514 multiple partitions. */ 515 unsigned in_other_partition : 1; 516 517 518 519 /*** other flags. ***/ 520 521 /* Set when symbol has address taken. */ 522 unsigned address_taken : 1; 523 /* Set when init priority is set. */ 524 unsigned in_init_priority_hash : 1; 525 526 /* Set when symbol needs to be streamed into LTO bytecode for LTO, or in case 527 of offloading, for separate compilation for a different target. */ 528 unsigned need_lto_streaming : 1; 529 530 /* Set when symbol can be streamed into bytecode for offloading. */ 531 unsigned offloadable : 1; 532 533 /* Set when symbol is an IFUNC resolver. */ 534 unsigned ifunc_resolver : 1; 535 536 537 /* Ordering of all symtab entries. */ 538 int order; 539 540 /* Declaration representing the symbol. */ 541 tree decl; 542 543 /* Linked list of symbol table entries starting with symtab_nodes. */ 544 symtab_node *next; 545 symtab_node *previous; 546 547 /* Linked list of symbols with the same asm name. There may be multiple 548 entries for single symbol name during LTO, because symbols are renamed 549 only after partitioning. 550 551 Because inline clones are kept in the assembler name has, they also produce 552 duplicate entries. 553 554 There are also several long standing bugs where frontends and builtin 555 code produce duplicated decls. */ 556 symtab_node *next_sharing_asm_name; 557 symtab_node *previous_sharing_asm_name; 558 559 /* Circular list of nodes in the same comdat group if non-NULL. */ 560 symtab_node *same_comdat_group; 561 562 /* Vectors of referring and referenced entities. */ 563 ipa_ref_list ref_list; 564 565 /* Alias target. May be either DECL pointer or ASSEMBLER_NAME pointer 566 depending to what was known to frontend on the creation time. 567 Once alias is resolved, this pointer become NULL. */ 568 tree alias_target; 569 570 /* File stream where this node is being written to. */ 571 struct lto_file_decl_data * lto_file_data; 572 573 PTR GTY ((skip)) aux; 574 575 /* Comdat group the symbol is in. Can be private if GGC allowed that. */ 576 tree x_comdat_group; 577 578 /* Section name. Again can be private, if allowed. */ 579 section_hash_entry *x_section; 580 581 protected: 582 /* Dump base fields of symtab nodes to F. Not to be used directly. */ 583 void dump_base (FILE *); 584 585 /* Verify common part of symtab node. */ 586 bool DEBUG_FUNCTION verify_base (void); 587 588 /* Remove node from symbol table. This function is not used directly, but via 589 cgraph/varpool node removal routines. */ 590 void unregister (void); 591 592 /* Return the initialization and finalization priority information for 593 DECL. If there is no previous priority information, a freshly 594 allocated structure is returned. */ 595 struct symbol_priority_map *priority_info (void); 596 597 /* Worker for call_for_symbol_and_aliases_1. */ 598 bool call_for_symbol_and_aliases_1 (bool (*callback) (symtab_node *, void *), 599 void *data, 600 bool include_overwrite); 601 private: 602 /* Worker for set_section. */ 603 static bool set_section (symtab_node *n, void *s); 604 605 /* Worker for symtab_resolve_alias. */ 606 static bool set_implicit_section (symtab_node *n, void *); 607 608 /* Worker searching noninterposable alias. */ 609 static bool noninterposable_alias (symtab_node *node, void *data); 610 611 /* Worker for ultimate_alias_target. */ 612 symtab_node *ultimate_alias_target_1 (enum availability *avail = NULL, 613 symtab_node *ref = NULL); 614 615 /* Get dump name with normal or assembly name. */ 616 const char *get_dump_name (bool asm_name_p) const; 617 }; 618 619 inline void 620 symtab_node::checking_verify_symtab_nodes (void) 621 { 622 if (flag_checking) 623 symtab_node::verify_symtab_nodes (); 624 } 625 626 /* Walk all aliases for NODE. */ 627 #define FOR_EACH_ALIAS(node, alias) \ 628 for (unsigned x_i = 0; node->iterate_direct_aliases (x_i, alias); x_i++) 629 630 /* This is the information that is put into the cgraph local structure 631 to recover a function. */ 632 struct lto_file_decl_data; 633 634 extern const char * const cgraph_availability_names[]; 635 extern const char * const ld_plugin_symbol_resolution_names[]; 636 extern const char * const tls_model_names[]; 637 638 /* Sub-structure of cgraph_node. Holds information about thunk, used only for 639 same body aliases. 640 641 Thunks are basically wrappers around methods which are introduced in case 642 of multiple inheritance in order to adjust the value of the "this" pointer 643 or of the returned value. 644 645 In the case of this-adjusting thunks, each back-end can override the 646 can_output_mi_thunk/output_mi_thunk target hooks to generate a minimal thunk 647 (with a tail call for instance) directly as assembly. For the default hook 648 or for the case where the can_output_mi_thunk hooks return false, the thunk 649 is gimplified and lowered using the regular machinery. */ 650 651 struct GTY(()) cgraph_thunk_info { 652 /* Offset used to adjust "this". */ 653 HOST_WIDE_INT fixed_offset; 654 655 /* Offset in the virtual table to get the offset to adjust "this". Valid iff 656 VIRTUAL_OFFSET_P is true. */ 657 HOST_WIDE_INT virtual_value; 658 659 /* Thunk target, i.e. the method that this thunk wraps. Depending on the 660 TARGET_USE_LOCAL_THUNK_ALIAS_P macro, this may have to be a new alias. */ 661 tree alias; 662 663 /* Nonzero for a "this" adjusting thunk and zero for a result adjusting 664 thunk. */ 665 bool this_adjusting; 666 667 /* If true, this thunk is what we call a virtual thunk. In this case: 668 * for this-adjusting thunks, after the FIXED_OFFSET based adjustment is 669 done, add to the result the offset found in the vtable at: 670 vptr + VIRTUAL_VALUE 671 * for result-adjusting thunks, the FIXED_OFFSET adjustment is done after 672 the virtual one. */ 673 bool virtual_offset_p; 674 675 /* ??? True for special kind of thunks, seems related to instrumentation. */ 676 bool add_pointer_bounds_args; 677 678 /* Set to true when alias node (the cgraph_node to which this struct belong) 679 is a thunk. Access to any other fields is invalid if this is false. */ 680 bool thunk_p; 681 }; 682 683 /* Information about the function collected locally. 684 Available after function is analyzed. */ 685 686 struct GTY(()) cgraph_local_info { 687 /* Set when function is visible in current compilation unit only and 688 its address is never taken. */ 689 unsigned local : 1; 690 691 /* False when there is something makes versioning impossible. */ 692 unsigned versionable : 1; 693 694 /* False when function calling convention and signature can not be changed. 695 This is the case when __builtin_apply_args is used. */ 696 unsigned can_change_signature : 1; 697 698 /* True when the function has been originally extern inline, but it is 699 redefined now. */ 700 unsigned redefined_extern_inline : 1; 701 702 /* True if the function may enter serial irrevocable mode. */ 703 unsigned tm_may_enter_irr : 1; 704 }; 705 706 /* Information about the function that needs to be computed globally 707 once compilation is finished. Available only with -funit-at-a-time. */ 708 709 struct GTY(()) cgraph_global_info { 710 /* For inline clones this points to the function they will be 711 inlined into. */ 712 cgraph_node *inlined_to; 713 }; 714 715 /* Represent which DECL tree (or reference to such tree) 716 will be replaced by another tree while versioning. */ 717 struct GTY(()) ipa_replace_map 718 { 719 /* The tree that will be replaced. */ 720 tree old_tree; 721 /* The new (replacing) tree. */ 722 tree new_tree; 723 /* Parameter number to replace, when old_tree is NULL. */ 724 int parm_num; 725 /* True when a substitution should be done, false otherwise. */ 726 bool replace_p; 727 /* True when we replace a reference to old_tree. */ 728 bool ref_p; 729 }; 730 731 struct GTY(()) cgraph_clone_info 732 { 733 vec<ipa_replace_map *, va_gc> *tree_map; 734 bitmap args_to_skip; 735 bitmap combined_args_to_skip; 736 }; 737 738 enum cgraph_simd_clone_arg_type 739 { 740 SIMD_CLONE_ARG_TYPE_VECTOR, 741 SIMD_CLONE_ARG_TYPE_UNIFORM, 742 /* These are only for integer/pointer arguments passed by value. */ 743 SIMD_CLONE_ARG_TYPE_LINEAR_CONSTANT_STEP, 744 SIMD_CLONE_ARG_TYPE_LINEAR_VARIABLE_STEP, 745 /* These 6 are only for reference type arguments or arguments passed 746 by reference. */ 747 SIMD_CLONE_ARG_TYPE_LINEAR_REF_CONSTANT_STEP, 748 SIMD_CLONE_ARG_TYPE_LINEAR_REF_VARIABLE_STEP, 749 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_CONSTANT_STEP, 750 SIMD_CLONE_ARG_TYPE_LINEAR_UVAL_VARIABLE_STEP, 751 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_CONSTANT_STEP, 752 SIMD_CLONE_ARG_TYPE_LINEAR_VAL_VARIABLE_STEP, 753 SIMD_CLONE_ARG_TYPE_MASK 754 }; 755 756 /* Function arguments in the original function of a SIMD clone. 757 Supplementary data for `struct simd_clone'. */ 758 759 struct GTY(()) cgraph_simd_clone_arg { 760 /* Original function argument as it originally existed in 761 DECL_ARGUMENTS. */ 762 tree orig_arg; 763 764 /* orig_arg's function (or for extern functions type from 765 TYPE_ARG_TYPES). */ 766 tree orig_type; 767 768 /* If argument is a vector, this holds the vector version of 769 orig_arg that after adjusting the argument types will live in 770 DECL_ARGUMENTS. Otherwise, this is NULL. 771 772 This basically holds: 773 vector(simdlen) __typeof__(orig_arg) new_arg. */ 774 tree vector_arg; 775 776 /* vector_arg's type (or for extern functions new vector type. */ 777 tree vector_type; 778 779 /* If argument is a vector, this holds the array where the simd 780 argument is held while executing the simd clone function. This 781 is a local variable in the cloned function. Its content is 782 copied from vector_arg upon entry to the clone. 783 784 This basically holds: 785 __typeof__(orig_arg) simd_array[simdlen]. */ 786 tree simd_array; 787 788 /* A SIMD clone's argument can be either linear (constant or 789 variable), uniform, or vector. */ 790 enum cgraph_simd_clone_arg_type arg_type; 791 792 /* For arg_type SIMD_CLONE_ARG_TYPE_LINEAR_*CONSTANT_STEP this is 793 the constant linear step, if arg_type is 794 SIMD_CLONE_ARG_TYPE_LINEAR_*VARIABLE_STEP, this is index of 795 the uniform argument holding the step, otherwise 0. */ 796 HOST_WIDE_INT linear_step; 797 798 /* Variable alignment if available, otherwise 0. */ 799 unsigned int alignment; 800 }; 801 802 /* Specific data for a SIMD function clone. */ 803 804 struct GTY(()) cgraph_simd_clone { 805 /* Number of words in the SIMD lane associated with this clone. */ 806 unsigned int simdlen; 807 808 /* Number of annotated function arguments in `args'. This is 809 usually the number of named arguments in FNDECL. */ 810 unsigned int nargs; 811 812 /* Max hardware vector size in bits for integral vectors. */ 813 unsigned int vecsize_int; 814 815 /* Max hardware vector size in bits for floating point vectors. */ 816 unsigned int vecsize_float; 817 818 /* Machine mode of the mask argument(s), if they are to be passed 819 as bitmasks in integer argument(s). VOIDmode if masks are passed 820 as vectors of characteristic type. */ 821 machine_mode mask_mode; 822 823 /* The mangling character for a given vector size. This is used 824 to determine the ISA mangling bit as specified in the Intel 825 Vector ABI. */ 826 unsigned char vecsize_mangle; 827 828 /* True if this is the masked, in-branch version of the clone, 829 otherwise false. */ 830 unsigned int inbranch : 1; 831 832 /* Doubly linked list of SIMD clones. */ 833 cgraph_node *prev_clone, *next_clone; 834 835 /* Original cgraph node the SIMD clones were created for. */ 836 cgraph_node *origin; 837 838 /* Annotated function arguments for the original function. */ 839 cgraph_simd_clone_arg GTY((length ("%h.nargs"))) args[1]; 840 }; 841 842 /* Function Multiversioning info. */ 843 struct GTY((for_user)) cgraph_function_version_info { 844 /* The cgraph_node for which the function version info is stored. */ 845 cgraph_node *this_node; 846 /* Chains all the semantically identical function versions. The 847 first function in this chain is the version_info node of the 848 default function. */ 849 cgraph_function_version_info *prev; 850 /* If this version node corresponds to a dispatcher for function 851 versions, this points to the version info node of the default 852 function, the first node in the chain. */ 853 cgraph_function_version_info *next; 854 /* If this node corresponds to a function version, this points 855 to the dispatcher function decl, which is the function that must 856 be called to execute the right function version at run-time. 857 858 If this cgraph node is a dispatcher (if dispatcher_function is 859 true, in the cgraph_node struct) for function versions, this 860 points to resolver function, which holds the function body of the 861 dispatcher. The dispatcher decl is an alias to the resolver 862 function decl. */ 863 tree dispatcher_resolver; 864 }; 865 866 #define DEFCIFCODE(code, type, string) CIF_ ## code, 867 /* Reasons for inlining failures. */ 868 869 enum cgraph_inline_failed_t { 870 #include "cif-code.def" 871 CIF_N_REASONS 872 }; 873 874 enum cgraph_inline_failed_type_t 875 { 876 CIF_FINAL_NORMAL = 0, 877 CIF_FINAL_ERROR 878 }; 879 880 struct cgraph_edge; 881 882 struct cgraph_edge_hasher : ggc_ptr_hash<cgraph_edge> 883 { 884 typedef gimple *compare_type; 885 886 static hashval_t hash (cgraph_edge *); 887 static hashval_t hash (gimple *); 888 static bool equal (cgraph_edge *, gimple *); 889 }; 890 891 /* The cgraph data structure. 892 Each function decl has assigned cgraph_node listing callees and callers. */ 893 894 struct GTY((tag ("SYMTAB_FUNCTION"))) cgraph_node : public symtab_node { 895 public: 896 /* Remove the node from cgraph and all inline clones inlined into it. 897 Skip however removal of FORBIDDEN_NODE and return true if it needs to be 898 removed. This allows to call the function from outer loop walking clone 899 tree. */ 900 bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL); 901 902 /* Record all references from cgraph_node that are taken 903 in statement STMT. */ 904 void record_stmt_references (gimple *stmt); 905 906 /* Like cgraph_set_call_stmt but walk the clone tree and update all 907 clones sharing the same function body. 908 When WHOLE_SPECULATIVE_EDGES is true, all three components of 909 speculative edge gets updated. Otherwise we update only direct 910 call. */ 911 void set_call_stmt_including_clones (gimple *old_stmt, gcall *new_stmt, 912 bool update_speculative = true); 913 914 /* Walk the alias chain to return the function cgraph_node is alias of. 915 Walk through thunk, too. 916 When AVAILABILITY is non-NULL, get minimal availability in the chain. 917 When REF is non-NULL, assume that reference happens in symbol REF 918 when determining the availability. */ 919 cgraph_node *function_symbol (enum availability *avail = NULL, 920 struct symtab_node *ref = NULL); 921 922 /* Walk the alias chain to return the function cgraph_node is alias of. 923 Walk through non virtual thunks, too. Thus we return either a function 924 or a virtual thunk node. 925 When AVAILABILITY is non-NULL, get minimal availability in the chain. 926 When REF is non-NULL, assume that reference happens in symbol REF 927 when determining the availability. */ 928 cgraph_node *function_or_virtual_thunk_symbol 929 (enum availability *avail = NULL, 930 struct symtab_node *ref = NULL); 931 932 /* Create node representing clone of N executed COUNT times. Decrease 933 the execution counts from original node too. 934 The new clone will have decl set to DECL that may or may not be the same 935 as decl of N. 936 937 When UPDATE_ORIGINAL is true, the counts are subtracted from the original 938 function's profile to reflect the fact that part of execution is handled 939 by node. 940 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about 941 the new clone. Otherwise the caller is responsible for doing so later. 942 943 If the new node is being inlined into another one, NEW_INLINED_TO should be 944 the outline function the new one is (even indirectly) inlined to. 945 All hooks will see this in node's global.inlined_to, when invoked. 946 Can be NULL if the node is not inlined. SUFFIX is string that is appended 947 to the original name. */ 948 cgraph_node *create_clone (tree decl, profile_count count, 949 bool update_original, 950 vec<cgraph_edge *> redirect_callers, 951 bool call_duplication_hook, 952 cgraph_node *new_inlined_to, 953 bitmap args_to_skip, const char *suffix = NULL); 954 955 /* Create callgraph node clone with new declaration. The actual body will 956 be copied later at compilation stage. */ 957 cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers, 958 vec<ipa_replace_map *, va_gc> *tree_map, 959 bitmap args_to_skip, const char * suffix); 960 961 /* cgraph node being removed from symbol table; see if its entry can be 962 replaced by other inline clone. */ 963 cgraph_node *find_replacement (void); 964 965 /* Create a new cgraph node which is the new version of 966 callgraph node. REDIRECT_CALLERS holds the callers 967 edges which should be redirected to point to 968 NEW_VERSION. ALL the callees edges of the node 969 are cloned to the new version node. Return the new 970 version node. 971 972 If non-NULL BLOCK_TO_COPY determine what basic blocks 973 was copied to prevent duplications of calls that are dead 974 in the clone. 975 976 SUFFIX is string that is appended to the original name. */ 977 978 cgraph_node *create_version_clone (tree new_decl, 979 vec<cgraph_edge *> redirect_callers, 980 bitmap bbs_to_copy, 981 const char *suffix = NULL); 982 983 /* Perform function versioning. 984 Function versioning includes copying of the tree and 985 a callgraph update (creating a new cgraph node and updating 986 its callees and callers). 987 988 REDIRECT_CALLERS varray includes the edges to be redirected 989 to the new version. 990 991 TREE_MAP is a mapping of tree nodes we want to replace with 992 new ones (according to results of prior analysis). 993 994 If non-NULL ARGS_TO_SKIP determine function parameters to remove 995 from new version. 996 If SKIP_RETURN is true, the new version will return void. 997 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy. 998 If non_NULL NEW_ENTRY determine new entry BB of the clone. 999 1000 If TARGET_ATTRIBUTES is non-null, when creating a new declaration, 1001 add the attributes to DECL_ATTRIBUTES. And call valid_attribute_p 1002 that will promote value of the attribute DECL_FUNCTION_SPECIFIC_TARGET 1003 of the declaration. 1004 1005 Return the new version's cgraph node. */ 1006 cgraph_node *create_version_clone_with_body 1007 (vec<cgraph_edge *> redirect_callers, 1008 vec<ipa_replace_map *, va_gc> *tree_map, bitmap args_to_skip, 1009 bool skip_return, bitmap bbs_to_copy, basic_block new_entry_block, 1010 const char *clone_name, tree target_attributes = NULL_TREE); 1011 1012 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab 1013 corresponding to cgraph_node. */ 1014 cgraph_function_version_info *insert_new_function_version (void); 1015 1016 /* Get the cgraph_function_version_info node corresponding to node. */ 1017 cgraph_function_version_info *function_version (void); 1018 1019 /* Discover all functions and variables that are trivially needed, analyze 1020 them as well as all functions and variables referred by them */ 1021 void analyze (void); 1022 1023 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it 1024 aliases DECL with an adjustments made into the first parameter. 1025 See comments in struct cgraph_thunk_info for detail on the parameters. */ 1026 cgraph_node * create_thunk (tree alias, tree, bool this_adjusting, 1027 HOST_WIDE_INT fixed_offset, 1028 HOST_WIDE_INT virtual_value, 1029 tree virtual_offset, 1030 tree real_alias); 1031 1032 1033 /* Return node that alias is aliasing. */ 1034 inline cgraph_node *get_alias_target (void); 1035 1036 /* Given function symbol, walk the alias chain to return the function node 1037 is alias of. Do not walk through thunks. 1038 When AVAILABILITY is non-NULL, get minimal availability in the chain. 1039 When REF is non-NULL, assume that reference happens in symbol REF 1040 when determining the availability. */ 1041 1042 cgraph_node *ultimate_alias_target (availability *availability = NULL, 1043 symtab_node *ref = NULL); 1044 1045 /* Expand thunk NODE to gimple if possible. 1046 When FORCE_GIMPLE_THUNK is true, gimple thunk is created and 1047 no assembler is produced. 1048 When OUTPUT_ASM_THUNK is true, also produce assembler for 1049 thunks that are not lowered. */ 1050 bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk); 1051 1052 /* Call expand_thunk on all callers that are thunks and analyze those 1053 nodes that were expanded. */ 1054 void expand_all_artificial_thunks (); 1055 1056 /* Assemble thunks and aliases associated to node. */ 1057 void assemble_thunks_and_aliases (void); 1058 1059 /* Expand function specified by node. */ 1060 void expand (void); 1061 1062 /* As an GCC extension we allow redefinition of the function. The 1063 semantics when both copies of bodies differ is not well defined. 1064 We replace the old body with new body so in unit at a time mode 1065 we always use new body, while in normal mode we may end up with 1066 old body inlined into some functions and new body expanded and 1067 inlined in others. */ 1068 void reset (void); 1069 1070 /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this 1071 kind of wrapper method. */ 1072 void create_wrapper (cgraph_node *target); 1073 1074 /* Verify cgraph nodes of the cgraph node. */ 1075 void DEBUG_FUNCTION verify_node (void); 1076 1077 /* Remove function from symbol table. */ 1078 void remove (void); 1079 1080 /* Dump call graph node to file F. */ 1081 void dump (FILE *f); 1082 1083 /* Dump call graph node to stderr. */ 1084 void DEBUG_FUNCTION debug (void); 1085 1086 /* When doing LTO, read cgraph_node's body from disk if it is not already 1087 present. */ 1088 bool get_untransformed_body (void); 1089 1090 /* Prepare function body. When doing LTO, read cgraph_node's body from disk 1091 if it is not already present. When some IPA transformations are scheduled, 1092 apply them. */ 1093 bool get_body (void); 1094 1095 /* Release memory used to represent body of function. 1096 Use this only for functions that are released before being translated to 1097 target code (i.e. RTL). Functions that are compiled to RTL and beyond 1098 are free'd in final.c via free_after_compilation(). */ 1099 void release_body (bool keep_arguments = false); 1100 1101 /* Return the DECL_STRUCT_FUNCTION of the function. */ 1102 struct function *get_fun (void); 1103 1104 /* cgraph_node is no longer nested function; update cgraph accordingly. */ 1105 void unnest (void); 1106 1107 /* Bring cgraph node local. */ 1108 void make_local (void); 1109 1110 /* Likewise indicate that a node is having address taken. */ 1111 void mark_address_taken (void); 1112 1113 /* Set fialization priority to PRIORITY. */ 1114 void set_fini_priority (priority_type priority); 1115 1116 /* Return the finalization priority. */ 1117 priority_type get_fini_priority (void); 1118 1119 /* Create edge from a given function to CALLEE in the cgraph. */ 1120 cgraph_edge *create_edge (cgraph_node *callee, 1121 gcall *call_stmt, profile_count count); 1122 1123 /* Create an indirect edge with a yet-undetermined callee where the call 1124 statement destination is a formal parameter of the caller with index 1125 PARAM_INDEX. */ 1126 cgraph_edge *create_indirect_edge (gcall *call_stmt, int ecf_flags, 1127 profile_count count, 1128 bool compute_indirect_info = true); 1129 1130 /* Like cgraph_create_edge walk the clone tree and update all clones sharing 1131 same function body. If clones already have edge for OLD_STMT; only 1132 update the edge same way as cgraph_set_call_stmt_including_clones does. */ 1133 void create_edge_including_clones (cgraph_node *callee, 1134 gimple *old_stmt, gcall *stmt, 1135 profile_count count, 1136 cgraph_inline_failed_t reason); 1137 1138 /* Return the callgraph edge representing the GIMPLE_CALL statement 1139 CALL_STMT. */ 1140 cgraph_edge *get_edge (gimple *call_stmt); 1141 1142 /* Collect all callers of cgraph_node and its aliases that are known to lead 1143 to NODE (i.e. are not overwritable) and that are not thunks. */ 1144 vec<cgraph_edge *> collect_callers (void); 1145 1146 /* Remove all callers from the node. */ 1147 void remove_callers (void); 1148 1149 /* Remove all callees from the node. */ 1150 void remove_callees (void); 1151 1152 /* Return function availability. See cgraph.h for description of individual 1153 return values. */ 1154 enum availability get_availability (symtab_node *ref = NULL); 1155 1156 /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node 1157 if any to NOTHROW. */ 1158 bool set_nothrow_flag (bool nothrow); 1159 1160 /* SET DECL_IS_MALLOC on cgraph_node's decl and on aliases of the node 1161 if any. */ 1162 bool set_malloc_flag (bool malloc_p); 1163 1164 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST. 1165 If SET_CONST if false, clear the flag. 1166 1167 When setting the flag be careful about possible interposition and 1168 do not set the flag for functions that can be interposet and set pure 1169 flag for functions that can bind to other definition. 1170 1171 Return true if any change was done. */ 1172 1173 bool set_const_flag (bool set_const, bool looping); 1174 1175 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node 1176 if any to PURE. 1177 1178 When setting the flag, be careful about possible interposition. 1179 Return true if any change was done. */ 1180 1181 bool set_pure_flag (bool pure, bool looping); 1182 1183 /* Call callback on function and aliases associated to the function. 1184 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are 1185 skipped. */ 1186 1187 bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, 1188 void *), 1189 void *data, bool include_overwritable); 1190 1191 /* Call callback on cgraph_node, thunks and aliases associated to NODE. 1192 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are 1193 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are 1194 skipped. */ 1195 bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node, 1196 void *data), 1197 void *data, 1198 bool include_overwritable, 1199 bool exclude_virtual_thunks = false); 1200 1201 /* Likewise indicate that a node is needed, i.e. reachable via some 1202 external means. */ 1203 inline void mark_force_output (void); 1204 1205 /* Return true when function can be marked local. */ 1206 bool local_p (void); 1207 1208 /* Return true if cgraph_node can be made local for API change. 1209 Extern inline functions and C++ COMDAT functions can be made local 1210 at the expense of possible code size growth if function is used in multiple 1211 compilation units. */ 1212 bool can_be_local_p (void); 1213 1214 /* Return true when cgraph_node can not return or throw and thus 1215 it is safe to ignore its side effects for IPA analysis. */ 1216 bool cannot_return_p (void); 1217 1218 /* Return true when function cgraph_node and all its aliases are only called 1219 directly. 1220 i.e. it is not externally visible, address was not taken and 1221 it is not used in any other non-standard way. */ 1222 bool only_called_directly_p (void); 1223 1224 /* Return true when function is only called directly or it has alias. 1225 i.e. it is not externally visible, address was not taken and 1226 it is not used in any other non-standard way. */ 1227 inline bool only_called_directly_or_aliased_p (void); 1228 1229 /* Return true when function cgraph_node can be expected to be removed 1230 from program when direct calls in this compilation unit are removed. 1231 1232 As a special case COMDAT functions are 1233 cgraph_can_remove_if_no_direct_calls_p while the are not 1234 cgraph_only_called_directly_p (it is possible they are called from other 1235 unit) 1236 1237 This function behaves as cgraph_only_called_directly_p because eliminating 1238 all uses of COMDAT function does not make it necessarily disappear from 1239 the program unless we are compiling whole program or we do LTO. In this 1240 case we know we win since dynamic linking will not really discard the 1241 linkonce section. 1242 1243 If WILL_INLINE is true, assume that function will be inlined into all the 1244 direct calls. */ 1245 bool will_be_removed_from_program_if_no_direct_calls_p 1246 (bool will_inline = false); 1247 1248 /* Return true when function can be removed from callgraph 1249 if all direct calls and references are eliminated. The function does 1250 not take into account comdat groups. */ 1251 bool can_remove_if_no_direct_calls_and_refs_p (void); 1252 1253 /* Return true when function cgraph_node and its aliases can be removed from 1254 callgraph if all direct calls are eliminated. 1255 If WILL_INLINE is true, assume that function will be inlined into all the 1256 direct calls. */ 1257 bool can_remove_if_no_direct_calls_p (bool will_inline = false); 1258 1259 /* Return true when callgraph node is a function with Gimple body defined 1260 in current unit. Functions can also be define externally or they 1261 can be thunks with no Gimple representation. 1262 1263 Note that at WPA stage, the function body may not be present in memory. */ 1264 inline bool has_gimple_body_p (void); 1265 1266 /* Return true if this node represents a former, i.e. an expanded, thunk. */ 1267 inline bool former_thunk_p (void); 1268 1269 /* Return true if function should be optimized for size. */ 1270 bool optimize_for_size_p (void); 1271 1272 /* Dump the callgraph to file F. */ 1273 static void dump_cgraph (FILE *f); 1274 1275 /* Dump the call graph to stderr. */ 1276 static inline 1277 void debug_cgraph (void) 1278 { 1279 dump_cgraph (stderr); 1280 } 1281 1282 /* Record that DECL1 and DECL2 are semantically identical function 1283 versions. */ 1284 static void record_function_versions (tree decl1, tree decl2); 1285 1286 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This 1287 DECL is a duplicate declaration. */ 1288 static void delete_function_version_by_decl (tree decl); 1289 1290 /* Add the function FNDECL to the call graph. 1291 Unlike finalize_function, this function is intended to be used 1292 by middle end and allows insertion of new function at arbitrary point 1293 of compilation. The function can be either in high, low or SSA form 1294 GIMPLE. 1295 1296 The function is assumed to be reachable and have address taken (so no 1297 API breaking optimizations are performed on it). 1298 1299 Main work done by this function is to enqueue the function for later 1300 processing to avoid need the passes to be re-entrant. */ 1301 static void add_new_function (tree fndecl, bool lowered); 1302 1303 /* Return callgraph node for given symbol and check it is a function. */ 1304 static inline cgraph_node *get (const_tree decl) 1305 { 1306 gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL); 1307 return dyn_cast <cgraph_node *> (symtab_node::get (decl)); 1308 } 1309 1310 /* DECL has been parsed. Take it, queue it, compile it at the whim of the 1311 logic in effect. If NO_COLLECT is true, then our caller cannot stand to 1312 have the garbage collector run at the moment. We would need to either 1313 create a new GC context, or just not compile right now. */ 1314 static void finalize_function (tree, bool); 1315 1316 /* Return cgraph node assigned to DECL. Create new one when needed. */ 1317 static cgraph_node * create (tree decl); 1318 1319 /* Try to find a call graph node for declaration DECL and if it does not 1320 exist or if it corresponds to an inline clone, create a new one. */ 1321 static cgraph_node * get_create (tree); 1322 1323 /* Return local info for the compiled function. */ 1324 static cgraph_local_info *local_info (tree decl); 1325 1326 /* Return local info for the compiled function. */ 1327 static struct cgraph_rtl_info *rtl_info (tree); 1328 1329 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME. 1330 Return NULL if there's no such node. */ 1331 static cgraph_node *get_for_asmname (tree asmname); 1332 1333 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if 1334 successful and NULL otherwise. 1335 Same body aliases are output whenever the body of DECL is output, 1336 and cgraph_node::get (ALIAS) transparently 1337 returns cgraph_node::get (DECL). */ 1338 static cgraph_node * create_same_body_alias (tree alias, tree decl); 1339 1340 /* Verify whole cgraph structure. */ 1341 static void DEBUG_FUNCTION verify_cgraph_nodes (void); 1342 1343 /* Verify cgraph, if consistency checking is enabled. */ 1344 static inline void checking_verify_cgraph_nodes (void); 1345 1346 /* Worker to bring NODE local. */ 1347 static bool make_local (cgraph_node *node, void *); 1348 1349 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing 1350 the function body is associated 1351 with (not necessarily cgraph_node (DECL). */ 1352 static cgraph_node *create_alias (tree alias, tree target); 1353 1354 /* Return true if NODE has thunk. */ 1355 static bool has_thunk_p (cgraph_node *node, void *); 1356 1357 cgraph_edge *callees; 1358 cgraph_edge *callers; 1359 /* List of edges representing indirect calls with a yet undetermined 1360 callee. */ 1361 cgraph_edge *indirect_calls; 1362 /* For nested functions points to function the node is nested in. */ 1363 cgraph_node *origin; 1364 /* Points to first nested function, if any. */ 1365 cgraph_node *nested; 1366 /* Pointer to the next function with same origin, if any. */ 1367 cgraph_node *next_nested; 1368 /* Pointer to the next clone. */ 1369 cgraph_node *next_sibling_clone; 1370 cgraph_node *prev_sibling_clone; 1371 cgraph_node *clones; 1372 cgraph_node *clone_of; 1373 /* If instrumentation_clone is 1 then instrumented_version points 1374 to the original function used to make instrumented version. 1375 Otherwise points to instrumented version of the function. */ 1376 cgraph_node *instrumented_version; 1377 /* If instrumentation_clone is 1 then orig_decl is the original 1378 function declaration. */ 1379 tree orig_decl; 1380 /* For functions with many calls sites it holds map from call expression 1381 to the edge to speed up cgraph_edge function. */ 1382 hash_table<cgraph_edge_hasher> *GTY(()) call_site_hash; 1383 /* Declaration node used to be clone of. */ 1384 tree former_clone_of; 1385 1386 /* If this is a SIMD clone, this points to the SIMD specific 1387 information for it. */ 1388 cgraph_simd_clone *simdclone; 1389 /* If this function has SIMD clones, this points to the first clone. */ 1390 cgraph_node *simd_clones; 1391 1392 /* Interprocedural passes scheduled to have their transform functions 1393 applied next time we execute local pass on them. We maintain it 1394 per-function in order to allow IPA passes to introduce new functions. */ 1395 vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply; 1396 1397 cgraph_local_info local; 1398 cgraph_global_info global; 1399 struct cgraph_rtl_info *rtl; 1400 cgraph_clone_info clone; 1401 cgraph_thunk_info thunk; 1402 1403 /* Expected number of executions: calculated in profile.c. */ 1404 profile_count count; 1405 /* How to scale counts at materialization time; used to merge 1406 LTO units with different number of profile runs. */ 1407 int count_materialization_scale; 1408 /* Unique id of the node. */ 1409 int uid; 1410 /* Summary unique id of the node. */ 1411 int summary_uid; 1412 /* ID assigned by the profiling. */ 1413 unsigned int profile_id; 1414 /* Time profiler: first run of function. */ 1415 int tp_first_run; 1416 1417 /* Set when decl is an abstract function pointed to by the 1418 ABSTRACT_DECL_ORIGIN of a reachable function. */ 1419 unsigned used_as_abstract_origin : 1; 1420 /* Set once the function is lowered (i.e. its CFG is built). */ 1421 unsigned lowered : 1; 1422 /* Set once the function has been instantiated and its callee 1423 lists created. */ 1424 unsigned process : 1; 1425 /* How commonly executed the node is. Initialized during branch 1426 probabilities pass. */ 1427 ENUM_BITFIELD (node_frequency) frequency : 2; 1428 /* True when function can only be called at startup (from static ctor). */ 1429 unsigned only_called_at_startup : 1; 1430 /* True when function can only be called at startup (from static dtor). */ 1431 unsigned only_called_at_exit : 1; 1432 /* True when function is the transactional clone of a function which 1433 is called only from inside transactions. */ 1434 /* ?? We should be able to remove this. We have enough bits in 1435 cgraph to calculate it. */ 1436 unsigned tm_clone : 1; 1437 /* True if this decl is a dispatcher for function versions. */ 1438 unsigned dispatcher_function : 1; 1439 /* True if this decl calls a COMDAT-local function. This is set up in 1440 compute_fn_summary and inline_call. */ 1441 unsigned calls_comdat_local : 1; 1442 /* True if node has been created by merge operation in IPA-ICF. */ 1443 unsigned icf_merged: 1; 1444 /* True when function is clone created for Pointer Bounds Checker 1445 instrumentation. */ 1446 unsigned instrumentation_clone : 1; 1447 /* True if call to node can't result in a call to free, munmap or 1448 other operation that could make previously non-trapping memory 1449 accesses trapping. */ 1450 unsigned nonfreeing_fn : 1; 1451 /* True if there was multiple COMDAT bodies merged by lto-symtab. */ 1452 unsigned merged_comdat : 1; 1453 /* True if function was created to be executed in parallel. */ 1454 unsigned parallelized_function : 1; 1455 /* True if function is part split out by ipa-split. */ 1456 unsigned split_part : 1; 1457 /* True if the function appears as possible target of indirect call. */ 1458 unsigned indirect_call_target : 1; 1459 1460 private: 1461 /* Worker for call_for_symbol_and_aliases. */ 1462 bool call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *, 1463 void *), 1464 void *data, bool include_overwritable); 1465 }; 1466 1467 /* A cgraph node set is a collection of cgraph nodes. A cgraph node 1468 can appear in multiple sets. */ 1469 struct cgraph_node_set_def 1470 { 1471 hash_map<cgraph_node *, size_t> *map; 1472 vec<cgraph_node *> nodes; 1473 }; 1474 1475 typedef cgraph_node_set_def *cgraph_node_set; 1476 typedef struct varpool_node_set_def *varpool_node_set; 1477 1478 class varpool_node; 1479 1480 /* A varpool node set is a collection of varpool nodes. A varpool node 1481 can appear in multiple sets. */ 1482 struct varpool_node_set_def 1483 { 1484 hash_map<varpool_node *, size_t> * map; 1485 vec<varpool_node *> nodes; 1486 }; 1487 1488 /* Iterator structure for cgraph node sets. */ 1489 struct cgraph_node_set_iterator 1490 { 1491 cgraph_node_set set; 1492 unsigned index; 1493 }; 1494 1495 /* Iterator structure for varpool node sets. */ 1496 struct varpool_node_set_iterator 1497 { 1498 varpool_node_set set; 1499 unsigned index; 1500 }; 1501 1502 /* Context of polymorphic call. It represent information about the type of 1503 instance that may reach the call. This is used by ipa-devirt walkers of the 1504 type inheritance graph. */ 1505 1506 class GTY(()) ipa_polymorphic_call_context { 1507 public: 1508 /* The called object appears in an object of type OUTER_TYPE 1509 at offset OFFSET. When information is not 100% reliable, we 1510 use SPECULATIVE_OUTER_TYPE and SPECULATIVE_OFFSET. */ 1511 HOST_WIDE_INT offset; 1512 HOST_WIDE_INT speculative_offset; 1513 tree outer_type; 1514 tree speculative_outer_type; 1515 /* True if outer object may be in construction or destruction. */ 1516 unsigned maybe_in_construction : 1; 1517 /* True if outer object may be of derived type. */ 1518 unsigned maybe_derived_type : 1; 1519 /* True if speculative outer object may be of derived type. We always 1520 speculate that construction does not happen. */ 1521 unsigned speculative_maybe_derived_type : 1; 1522 /* True if the context is invalid and all calls should be redirected 1523 to BUILTIN_UNREACHABLE. */ 1524 unsigned invalid : 1; 1525 /* True if the outer type is dynamic. */ 1526 unsigned dynamic : 1; 1527 1528 /* Build empty "I know nothing" context. */ 1529 ipa_polymorphic_call_context (); 1530 /* Build polymorphic call context for indirect call E. */ 1531 ipa_polymorphic_call_context (cgraph_edge *e); 1532 /* Build polymorphic call context for IP invariant CST. 1533 If specified, OTR_TYPE specify the type of polymorphic call 1534 that takes CST+OFFSET as a prameter. */ 1535 ipa_polymorphic_call_context (tree cst, tree otr_type = NULL, 1536 HOST_WIDE_INT offset = 0); 1537 /* Build context for pointer REF contained in FNDECL at statement STMT. 1538 if INSTANCE is non-NULL, return pointer to the object described by 1539 the context. */ 1540 ipa_polymorphic_call_context (tree fndecl, tree ref, gimple *stmt, 1541 tree *instance = NULL); 1542 1543 /* Look for vtable stores or constructor calls to work out dynamic type 1544 of memory location. */ 1545 bool get_dynamic_type (tree, tree, tree, gimple *); 1546 1547 /* Make context non-speculative. */ 1548 void clear_speculation (); 1549 1550 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is 1551 NULL, the context is set to dummy "I know nothing" setting. */ 1552 void clear_outer_type (tree otr_type = NULL); 1553 1554 /* Walk container types and modify context to point to actual class 1555 containing OTR_TYPE (if non-NULL) as base class. 1556 Return true if resulting context is valid. 1557 1558 When CONSIDER_PLACEMENT_NEW is false, reject contexts that may be made 1559 valid only via allocation of new polymorphic type inside by means 1560 of placement new. 1561 1562 When CONSIDER_BASES is false, only look for actual fields, not base types 1563 of TYPE. */ 1564 bool restrict_to_inner_class (tree otr_type, 1565 bool consider_placement_new = true, 1566 bool consider_bases = true); 1567 1568 /* Adjust all offsets in contexts by given number of bits. */ 1569 void offset_by (HOST_WIDE_INT); 1570 /* Use when we can not track dynamic type change. This speculatively assume 1571 type change is not happening. */ 1572 void possible_dynamic_type_change (bool, tree otr_type = NULL); 1573 /* Assume that both THIS and a given context is valid and strenghten THIS 1574 if possible. Return true if any strenghtening was made. 1575 If actual type the context is being used in is known, OTR_TYPE should be 1576 set accordingly. This improves quality of combined result. */ 1577 bool combine_with (ipa_polymorphic_call_context, tree otr_type = NULL); 1578 bool meet_with (ipa_polymorphic_call_context, tree otr_type = NULL); 1579 1580 /* Return TRUE if context is fully useless. */ 1581 bool useless_p () const; 1582 /* Return TRUE if this context conveys the same information as X. */ 1583 bool equal_to (const ipa_polymorphic_call_context &x) const; 1584 1585 /* Dump human readable context to F. If NEWLINE is true, it will be 1586 terminated by a newline. */ 1587 void dump (FILE *f, bool newline = true) const; 1588 void DEBUG_FUNCTION debug () const; 1589 1590 /* LTO streaming. */ 1591 void stream_out (struct output_block *) const; 1592 void stream_in (struct lto_input_block *, struct data_in *data_in); 1593 1594 private: 1595 bool combine_speculation_with (tree, HOST_WIDE_INT, bool, tree); 1596 bool meet_speculation_with (tree, HOST_WIDE_INT, bool, tree); 1597 void set_by_decl (tree, HOST_WIDE_INT); 1598 bool set_by_invariant (tree, tree, HOST_WIDE_INT); 1599 bool speculation_consistent_p (tree, HOST_WIDE_INT, bool, tree) const; 1600 void make_speculative (tree otr_type = NULL); 1601 }; 1602 1603 /* Structure containing additional information about an indirect call. */ 1604 1605 struct GTY(()) cgraph_indirect_call_info 1606 { 1607 /* When agg_content is set, an offset where the call pointer is located 1608 within the aggregate. */ 1609 HOST_WIDE_INT offset; 1610 /* Context of the polymorphic call; use only when POLYMORPHIC flag is set. */ 1611 ipa_polymorphic_call_context context; 1612 /* OBJ_TYPE_REF_TOKEN of a polymorphic call (if polymorphic is set). */ 1613 HOST_WIDE_INT otr_token; 1614 /* Type of the object from OBJ_TYPE_REF_OBJECT. */ 1615 tree otr_type; 1616 /* Index of the parameter that is called. */ 1617 int param_index; 1618 /* ECF flags determined from the caller. */ 1619 int ecf_flags; 1620 /* Profile_id of common target obtrained from profile. */ 1621 int common_target_id; 1622 /* Probability that call will land in function with COMMON_TARGET_ID. */ 1623 int common_target_probability; 1624 1625 /* Set when the call is a virtual call with the parameter being the 1626 associated object pointer rather than a simple direct call. */ 1627 unsigned polymorphic : 1; 1628 /* Set when the call is a call of a pointer loaded from contents of an 1629 aggregate at offset. */ 1630 unsigned agg_contents : 1; 1631 /* Set when this is a call through a member pointer. */ 1632 unsigned member_ptr : 1; 1633 /* When the agg_contents bit is set, this one determines whether the 1634 destination is loaded from a parameter passed by reference. */ 1635 unsigned by_ref : 1; 1636 /* When the agg_contents bit is set, this one determines whether we can 1637 deduce from the function body that the loaded value from the reference is 1638 never modified between the invocation of the function and the load 1639 point. */ 1640 unsigned guaranteed_unmodified : 1; 1641 /* For polymorphic calls this specify whether the virtual table pointer 1642 may have changed in between function entry and the call. */ 1643 unsigned vptr_changed : 1; 1644 }; 1645 1646 struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"), 1647 for_user)) cgraph_edge { 1648 friend class cgraph_node; 1649 1650 /* Remove the edge in the cgraph. */ 1651 void remove (void); 1652 1653 /* Change field call_stmt of edge to NEW_STMT. 1654 If UPDATE_SPECULATIVE and E is any component of speculative 1655 edge, then update all components. */ 1656 void set_call_stmt (gcall *new_stmt, bool update_speculative = true); 1657 1658 /* Redirect callee of the edge to N. The function does not update underlying 1659 call expression. */ 1660 void redirect_callee (cgraph_node *n); 1661 1662 /* If the edge does not lead to a thunk, simply redirect it to N. Otherwise 1663 create one or more equivalent thunks for N and redirect E to the first in 1664 the chain. Note that it is then necessary to call 1665 n->expand_all_artificial_thunks once all callers are redirected. */ 1666 void redirect_callee_duplicating_thunks (cgraph_node *n); 1667 1668 /* Make an indirect edge with an unknown callee an ordinary edge leading to 1669 CALLEE. DELTA is an integer constant that is to be added to the this 1670 pointer (first parameter) to compensate for skipping 1671 a thunk adjustment. */ 1672 cgraph_edge *make_direct (cgraph_node *callee); 1673 1674 /* Turn edge into speculative call calling N2. Update 1675 the profile so the direct call is taken COUNT times 1676 with FREQUENCY. */ 1677 cgraph_edge *make_speculative (cgraph_node *n2, profile_count direct_count); 1678 1679 /* Given speculative call edge, return all three components. */ 1680 void speculative_call_info (cgraph_edge *&direct, cgraph_edge *&indirect, 1681 ipa_ref *&reference); 1682 1683 /* Speculative call edge turned out to be direct call to CALLE_DECL. 1684 Remove the speculative call sequence and return edge representing the call. 1685 It is up to caller to redirect the call as appropriate. */ 1686 cgraph_edge *resolve_speculation (tree callee_decl = NULL); 1687 1688 /* If necessary, change the function declaration in the call statement 1689 associated with the edge so that it corresponds to the edge callee. */ 1690 gimple *redirect_call_stmt_to_callee (void); 1691 1692 /* Create clone of edge in the node N represented 1693 by CALL_EXPR the callgraph. */ 1694 cgraph_edge * clone (cgraph_node *n, gcall *call_stmt, unsigned stmt_uid, 1695 profile_count num, profile_count den, 1696 bool update_original); 1697 1698 /* Verify edge count and frequency. */ 1699 bool verify_count (); 1700 1701 /* Return true when call of edge can not lead to return from caller 1702 and thus it is safe to ignore its side effects for IPA analysis 1703 when computing side effects of the caller. */ 1704 bool cannot_lead_to_return_p (void); 1705 1706 /* Return true when the edge represents a direct recursion. */ 1707 bool recursive_p (void); 1708 1709 /* Return true if the call can be hot. */ 1710 bool maybe_hot_p (void); 1711 1712 /* Rebuild cgraph edges for current function node. This needs to be run after 1713 passes that don't update the cgraph. */ 1714 static unsigned int rebuild_edges (void); 1715 1716 /* Rebuild cgraph references for current function node. This needs to be run 1717 after passes that don't update the cgraph. */ 1718 static void rebuild_references (void); 1719 1720 /* Expected number of executions: calculated in profile.c. */ 1721 profile_count count; 1722 cgraph_node *caller; 1723 cgraph_node *callee; 1724 cgraph_edge *prev_caller; 1725 cgraph_edge *next_caller; 1726 cgraph_edge *prev_callee; 1727 cgraph_edge *next_callee; 1728 gcall *call_stmt; 1729 /* Additional information about an indirect call. Not cleared when an edge 1730 becomes direct. */ 1731 cgraph_indirect_call_info *indirect_info; 1732 PTR GTY ((skip (""))) aux; 1733 /* When equal to CIF_OK, inline this call. Otherwise, points to the 1734 explanation why function was not inlined. */ 1735 enum cgraph_inline_failed_t inline_failed; 1736 /* The stmt_uid of call_stmt. This is used by LTO to recover the call_stmt 1737 when the function is serialized in. */ 1738 unsigned int lto_stmt_uid; 1739 /* Unique id of the edge. */ 1740 int uid; 1741 /* Whether this edge was made direct by indirect inlining. */ 1742 unsigned int indirect_inlining_edge : 1; 1743 /* Whether this edge describes an indirect call with an undetermined 1744 callee. */ 1745 unsigned int indirect_unknown_callee : 1; 1746 /* Whether this edge is still a dangling */ 1747 /* True if the corresponding CALL stmt cannot be inlined. */ 1748 unsigned int call_stmt_cannot_inline_p : 1; 1749 /* Can this call throw externally? */ 1750 unsigned int can_throw_external : 1; 1751 /* Edges with SPECULATIVE flag represents indirect calls that was 1752 speculatively turned into direct (i.e. by profile feedback). 1753 The final code sequence will have form: 1754 1755 if (call_target == expected_fn) 1756 expected_fn (); 1757 else 1758 call_target (); 1759 1760 Every speculative call is represented by three components attached 1761 to a same call statement: 1762 1) a direct call (to expected_fn) 1763 2) an indirect call (to call_target) 1764 3) a IPA_REF_ADDR refrence to expected_fn. 1765 1766 Optimizers may later redirect direct call to clone, so 1) and 3) 1767 do not need to necesarily agree with destination. */ 1768 unsigned int speculative : 1; 1769 /* Set to true when caller is a constructor or destructor of polymorphic 1770 type. */ 1771 unsigned in_polymorphic_cdtor : 1; 1772 1773 /* Return true if call must bind to current definition. */ 1774 bool binds_to_current_def_p (); 1775 1776 /* Expected frequency of executions within the function. 1777 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once 1778 per function call. The range is 0 to CGRAPH_FREQ_MAX. */ 1779 int frequency (); 1780 1781 /* Expected frequency of executions within the function. */ 1782 sreal sreal_frequency (); 1783 private: 1784 /* Remove the edge from the list of the callers of the callee. */ 1785 void remove_caller (void); 1786 1787 /* Remove the edge from the list of the callees of the caller. */ 1788 void remove_callee (void); 1789 1790 /* Set callee N of call graph edge and add it to the corresponding set of 1791 callers. */ 1792 void set_callee (cgraph_node *n); 1793 1794 /* Output flags of edge to a file F. */ 1795 void dump_edge_flags (FILE *f); 1796 1797 /* Verify that call graph edge corresponds to DECL from the associated 1798 statement. Return true if the verification should fail. */ 1799 bool verify_corresponds_to_fndecl (tree decl); 1800 }; 1801 1802 #define CGRAPH_FREQ_BASE 1000 1803 #define CGRAPH_FREQ_MAX 100000 1804 1805 /* The varpool data structure. 1806 Each static variable decl has assigned varpool_node. */ 1807 1808 class GTY((tag ("SYMTAB_VARIABLE"))) varpool_node : public symtab_node { 1809 public: 1810 /* Dump given varpool node to F. */ 1811 void dump (FILE *f); 1812 1813 /* Dump given varpool node to stderr. */ 1814 void DEBUG_FUNCTION debug (void); 1815 1816 /* Remove variable from symbol table. */ 1817 void remove (void); 1818 1819 /* Remove node initializer when it is no longer needed. */ 1820 void remove_initializer (void); 1821 1822 void analyze (void); 1823 1824 /* Return variable availability. */ 1825 availability get_availability (symtab_node *ref = NULL); 1826 1827 /* When doing LTO, read variable's constructor from disk if 1828 it is not already present. */ 1829 tree get_constructor (void); 1830 1831 /* Return true if variable has constructor that can be used for folding. */ 1832 bool ctor_useable_for_folding_p (void); 1833 1834 /* For given variable pool node, walk the alias chain to return the function 1835 the variable is alias of. Do not walk through thunks. 1836 When AVAILABILITY is non-NULL, get minimal availability in the chain. 1837 When REF is non-NULL, assume that reference happens in symbol REF 1838 when determining the availability. */ 1839 inline varpool_node *ultimate_alias_target 1840 (availability *availability = NULL, symtab_node *ref = NULL); 1841 1842 /* Return node that alias is aliasing. */ 1843 inline varpool_node *get_alias_target (void); 1844 1845 /* Output one variable, if necessary. Return whether we output it. */ 1846 bool assemble_decl (void); 1847 1848 /* For variables in named sections make sure get_variable_section 1849 is called before we switch to those sections. Then section 1850 conflicts between read-only and read-only requiring relocations 1851 sections can be resolved. */ 1852 void finalize_named_section_flags (void); 1853 1854 /* Call calback on varpool symbol and aliases associated to varpool symbol. 1855 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are 1856 skipped. */ 1857 bool call_for_symbol_and_aliases (bool (*callback) (varpool_node *, void *), 1858 void *data, 1859 bool include_overwritable); 1860 1861 /* Return true when variable should be considered externally visible. */ 1862 bool externally_visible_p (void); 1863 1864 /* Return true when all references to variable must be visible 1865 in ipa_ref_list. 1866 i.e. if the variable is not externally visible or not used in some magic 1867 way (asm statement or such). 1868 The magic uses are all summarized in force_output flag. */ 1869 inline bool all_refs_explicit_p (); 1870 1871 /* Return true when variable can be removed from variable pool 1872 if all direct calls are eliminated. */ 1873 inline bool can_remove_if_no_refs_p (void); 1874 1875 /* Add the variable DECL to the varpool. 1876 Unlike finalize_decl function is intended to be used 1877 by middle end and allows insertion of new variable at arbitrary point 1878 of compilation. */ 1879 static void add (tree decl); 1880 1881 /* Return varpool node for given symbol and check it is a function. */ 1882 static inline varpool_node *get (const_tree decl); 1883 1884 /* Mark DECL as finalized. By finalizing the declaration, frontend instruct 1885 the middle end to output the variable to asm file, if needed or externally 1886 visible. */ 1887 static void finalize_decl (tree decl); 1888 1889 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful. 1890 Extra name aliases are output whenever DECL is output. */ 1891 static varpool_node * create_extra_name_alias (tree alias, tree decl); 1892 1893 /* Attempt to mark ALIAS as an alias to DECL. Return TRUE if successful. 1894 Extra name aliases are output whenever DECL is output. */ 1895 static varpool_node * create_alias (tree, tree); 1896 1897 /* Dump the variable pool to F. */ 1898 static void dump_varpool (FILE *f); 1899 1900 /* Dump the variable pool to stderr. */ 1901 static void DEBUG_FUNCTION debug_varpool (void); 1902 1903 /* Allocate new callgraph node and insert it into basic data structures. */ 1904 static varpool_node *create_empty (void); 1905 1906 /* Return varpool node assigned to DECL. Create new one when needed. */ 1907 static varpool_node *get_create (tree decl); 1908 1909 /* Given an assembler name, lookup node. */ 1910 static varpool_node *get_for_asmname (tree asmname); 1911 1912 /* Set when variable is scheduled to be assembled. */ 1913 unsigned output : 1; 1914 1915 /* Set when variable has statically initialized pointer 1916 or is a static bounds variable and needs initalization. */ 1917 unsigned need_bounds_init : 1; 1918 1919 /* Set if the variable is dynamically initialized, except for 1920 function local statics. */ 1921 unsigned dynamically_initialized : 1; 1922 1923 ENUM_BITFIELD(tls_model) tls_model : 3; 1924 1925 /* Set if the variable is known to be used by single function only. 1926 This is computed by ipa_signle_use pass and used by late optimizations 1927 in places where optimization would be valid for local static variable 1928 if we did not do any inter-procedural code movement. */ 1929 unsigned used_by_single_function : 1; 1930 1931 private: 1932 /* Assemble thunks and aliases associated to varpool node. */ 1933 void assemble_aliases (void); 1934 1935 /* Worker for call_for_node_and_aliases. */ 1936 bool call_for_symbol_and_aliases_1 (bool (*callback) (varpool_node *, void *), 1937 void *data, 1938 bool include_overwritable); 1939 }; 1940 1941 /* Every top level asm statement is put into a asm_node. */ 1942 1943 struct GTY(()) asm_node { 1944 1945 1946 /* Next asm node. */ 1947 asm_node *next; 1948 /* String for this asm node. */ 1949 tree asm_str; 1950 /* Ordering of all cgraph nodes. */ 1951 int order; 1952 }; 1953 1954 /* Report whether or not THIS symtab node is a function, aka cgraph_node. */ 1955 1956 template <> 1957 template <> 1958 inline bool 1959 is_a_helper <cgraph_node *>::test (symtab_node *p) 1960 { 1961 return p && p->type == SYMTAB_FUNCTION; 1962 } 1963 1964 /* Report whether or not THIS symtab node is a vriable, aka varpool_node. */ 1965 1966 template <> 1967 template <> 1968 inline bool 1969 is_a_helper <varpool_node *>::test (symtab_node *p) 1970 { 1971 return p && p->type == SYMTAB_VARIABLE; 1972 } 1973 1974 /* Macros to access the next item in the list of free cgraph nodes and 1975 edges. */ 1976 #define NEXT_FREE_NODE(NODE) dyn_cast<cgraph_node *> ((NODE)->next) 1977 #define SET_NEXT_FREE_NODE(NODE,NODE2) ((NODE))->next = NODE2 1978 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller 1979 1980 typedef void (*cgraph_edge_hook)(cgraph_edge *, void *); 1981 typedef void (*cgraph_node_hook)(cgraph_node *, void *); 1982 typedef void (*varpool_node_hook)(varpool_node *, void *); 1983 typedef void (*cgraph_2edge_hook)(cgraph_edge *, cgraph_edge *, void *); 1984 typedef void (*cgraph_2node_hook)(cgraph_node *, cgraph_node *, void *); 1985 1986 struct cgraph_edge_hook_list; 1987 struct cgraph_node_hook_list; 1988 struct varpool_node_hook_list; 1989 struct cgraph_2edge_hook_list; 1990 struct cgraph_2node_hook_list; 1991 1992 /* Map from a symbol to initialization/finalization priorities. */ 1993 struct GTY(()) symbol_priority_map { 1994 priority_type init; 1995 priority_type fini; 1996 }; 1997 1998 enum symtab_state 1999 { 2000 /* Frontend is parsing and finalizing functions. */ 2001 PARSING, 2002 /* Callgraph is being constructed. It is safe to add new functions. */ 2003 CONSTRUCTION, 2004 /* Callgraph is being streamed-in at LTO time. */ 2005 LTO_STREAMING, 2006 /* Callgraph is built and early IPA passes are being run. */ 2007 IPA, 2008 /* Callgraph is built and all functions are transformed to SSA form. */ 2009 IPA_SSA, 2010 /* All inline decisions are done; it is now possible to remove extern inline 2011 functions and virtual call targets. */ 2012 IPA_SSA_AFTER_INLINING, 2013 /* Functions are now ordered and being passed to RTL expanders. */ 2014 EXPANSION, 2015 /* All cgraph expansion is done. */ 2016 FINISHED 2017 }; 2018 2019 struct asmname_hasher : ggc_ptr_hash <symtab_node> 2020 { 2021 typedef const_tree compare_type; 2022 2023 static hashval_t hash (symtab_node *n); 2024 static bool equal (symtab_node *n, const_tree t); 2025 }; 2026 2027 class GTY((tag ("SYMTAB"))) symbol_table 2028 { 2029 public: 2030 friend class symtab_node; 2031 friend class cgraph_node; 2032 friend class cgraph_edge; 2033 2034 symbol_table (): cgraph_max_summary_uid (1) 2035 { 2036 } 2037 2038 /* Initialize callgraph dump file. */ 2039 void initialize (void); 2040 2041 /* Register a top-level asm statement ASM_STR. */ 2042 inline asm_node *finalize_toplevel_asm (tree asm_str); 2043 2044 /* Analyze the whole compilation unit once it is parsed completely. */ 2045 void finalize_compilation_unit (void); 2046 2047 /* C++ frontend produce same body aliases all over the place, even before PCH 2048 gets streamed out. It relies on us linking the aliases with their function 2049 in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we 2050 first produce aliases without links, but once C++ FE is sure he won't sream 2051 PCH we build the links via this function. */ 2052 void process_same_body_aliases (void); 2053 2054 /* Perform simple optimizations based on callgraph. */ 2055 void compile (void); 2056 2057 /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these 2058 functions into callgraph in a way so they look like ordinary reachable 2059 functions inserted into callgraph already at construction time. */ 2060 void process_new_functions (void); 2061 2062 /* Once all functions from compilation unit are in memory, produce all clones 2063 and update all calls. We might also do this on demand if we don't want to 2064 bring all functions to memory prior compilation, but current WHOPR 2065 implementation does that and it is bit easier to keep everything right 2066 in this order. */ 2067 void materialize_all_clones (void); 2068 2069 /* Register a symbol NODE. */ 2070 inline void register_symbol (symtab_node *node); 2071 2072 inline void 2073 clear_asm_symbols (void) 2074 { 2075 asmnodes = NULL; 2076 asm_last_node = NULL; 2077 } 2078 2079 /* Perform reachability analysis and reclaim all unreachable nodes. */ 2080 bool remove_unreachable_nodes (FILE *file); 2081 2082 /* Optimization of function bodies might've rendered some variables as 2083 unnecessary so we want to avoid these from being compiled. Re-do 2084 reachability starting from variables that are either externally visible 2085 or was referred from the asm output routines. */ 2086 void remove_unreferenced_decls (void); 2087 2088 /* Unregister a symbol NODE. */ 2089 inline void unregister (symtab_node *node); 2090 2091 /* Allocate new callgraph node and insert it into basic data structures. */ 2092 cgraph_node *create_empty (void); 2093 2094 /* Release a callgraph NODE with UID and put in to the list 2095 of free nodes. */ 2096 void release_symbol (cgraph_node *node, int uid); 2097 2098 /* Output all variables enqueued to be assembled. */ 2099 bool output_variables (void); 2100 2101 /* Weakrefs may be associated to external decls and thus not output 2102 at expansion time. Emit all necessary aliases. */ 2103 void output_weakrefs (void); 2104 2105 /* Return first static symbol with definition. */ 2106 inline symtab_node *first_symbol (void); 2107 2108 /* Return first assembler symbol. */ 2109 inline asm_node * 2110 first_asm_symbol (void) 2111 { 2112 return asmnodes; 2113 } 2114 2115 /* Return first static symbol with definition. */ 2116 inline symtab_node *first_defined_symbol (void); 2117 2118 /* Return first variable. */ 2119 inline varpool_node *first_variable (void); 2120 2121 /* Return next variable after NODE. */ 2122 inline varpool_node *next_variable (varpool_node *node); 2123 2124 /* Return first static variable with initializer. */ 2125 inline varpool_node *first_static_initializer (void); 2126 2127 /* Return next static variable with initializer after NODE. */ 2128 inline varpool_node *next_static_initializer (varpool_node *node); 2129 2130 /* Return first static variable with definition. */ 2131 inline varpool_node *first_defined_variable (void); 2132 2133 /* Return next static variable with definition after NODE. */ 2134 inline varpool_node *next_defined_variable (varpool_node *node); 2135 2136 /* Return first function with body defined. */ 2137 inline cgraph_node *first_defined_function (void); 2138 2139 /* Return next function with body defined after NODE. */ 2140 inline cgraph_node *next_defined_function (cgraph_node *node); 2141 2142 /* Return first function. */ 2143 inline cgraph_node *first_function (void); 2144 2145 /* Return next function. */ 2146 inline cgraph_node *next_function (cgraph_node *node); 2147 2148 /* Return first function with body defined. */ 2149 cgraph_node *first_function_with_gimple_body (void); 2150 2151 /* Return next reachable static variable with initializer after NODE. */ 2152 inline cgraph_node *next_function_with_gimple_body (cgraph_node *node); 2153 2154 /* Register HOOK to be called with DATA on each removed edge. */ 2155 cgraph_edge_hook_list *add_edge_removal_hook (cgraph_edge_hook hook, 2156 void *data); 2157 2158 /* Remove ENTRY from the list of hooks called on removing edges. */ 2159 void remove_edge_removal_hook (cgraph_edge_hook_list *entry); 2160 2161 /* Register HOOK to be called with DATA on each removed node. */ 2162 cgraph_node_hook_list *add_cgraph_removal_hook (cgraph_node_hook hook, 2163 void *data); 2164 2165 /* Remove ENTRY from the list of hooks called on removing nodes. */ 2166 void remove_cgraph_removal_hook (cgraph_node_hook_list *entry); 2167 2168 /* Register HOOK to be called with DATA on each removed node. */ 2169 varpool_node_hook_list *add_varpool_removal_hook (varpool_node_hook hook, 2170 void *data); 2171 2172 /* Remove ENTRY from the list of hooks called on removing nodes. */ 2173 void remove_varpool_removal_hook (varpool_node_hook_list *entry); 2174 2175 /* Register HOOK to be called with DATA on each inserted node. */ 2176 cgraph_node_hook_list *add_cgraph_insertion_hook (cgraph_node_hook hook, 2177 void *data); 2178 2179 /* Remove ENTRY from the list of hooks called on inserted nodes. */ 2180 void remove_cgraph_insertion_hook (cgraph_node_hook_list *entry); 2181 2182 /* Register HOOK to be called with DATA on each inserted node. */ 2183 varpool_node_hook_list *add_varpool_insertion_hook (varpool_node_hook hook, 2184 void *data); 2185 2186 /* Remove ENTRY from the list of hooks called on inserted nodes. */ 2187 void remove_varpool_insertion_hook (varpool_node_hook_list *entry); 2188 2189 /* Register HOOK to be called with DATA on each duplicated edge. */ 2190 cgraph_2edge_hook_list *add_edge_duplication_hook (cgraph_2edge_hook hook, 2191 void *data); 2192 /* Remove ENTRY from the list of hooks called on duplicating edges. */ 2193 void remove_edge_duplication_hook (cgraph_2edge_hook_list *entry); 2194 2195 /* Register HOOK to be called with DATA on each duplicated node. */ 2196 cgraph_2node_hook_list *add_cgraph_duplication_hook (cgraph_2node_hook hook, 2197 void *data); 2198 2199 /* Remove ENTRY from the list of hooks called on duplicating nodes. */ 2200 void remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry); 2201 2202 /* Call all edge removal hooks. */ 2203 void call_edge_removal_hooks (cgraph_edge *e); 2204 2205 /* Call all node insertion hooks. */ 2206 void call_cgraph_insertion_hooks (cgraph_node *node); 2207 2208 /* Call all node removal hooks. */ 2209 void call_cgraph_removal_hooks (cgraph_node *node); 2210 2211 /* Call all node duplication hooks. */ 2212 void call_cgraph_duplication_hooks (cgraph_node *node, cgraph_node *node2); 2213 2214 /* Call all edge duplication hooks. */ 2215 void call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2); 2216 2217 /* Call all node removal hooks. */ 2218 void call_varpool_removal_hooks (varpool_node *node); 2219 2220 /* Call all node insertion hooks. */ 2221 void call_varpool_insertion_hooks (varpool_node *node); 2222 2223 /* Arrange node to be first in its entry of assembler_name_hash. */ 2224 void symtab_prevail_in_asm_name_hash (symtab_node *node); 2225 2226 /* Initalize asm name hash unless. */ 2227 void symtab_initialize_asm_name_hash (void); 2228 2229 /* Set the DECL_ASSEMBLER_NAME and update symtab hashtables. */ 2230 void change_decl_assembler_name (tree decl, tree name); 2231 2232 /* Dump symbol table to F. */ 2233 void dump (FILE *f); 2234 2235 /* Dump symbol table to stderr. */ 2236 void DEBUG_FUNCTION debug (void); 2237 2238 /* Return true if assembler names NAME1 and NAME2 leads to the same symbol 2239 name. */ 2240 static bool assembler_names_equal_p (const char *name1, const char *name2); 2241 2242 int cgraph_count; 2243 int cgraph_max_uid; 2244 int cgraph_max_summary_uid; 2245 2246 int edges_count; 2247 int edges_max_uid; 2248 2249 symtab_node* GTY(()) nodes; 2250 asm_node* GTY(()) asmnodes; 2251 asm_node* GTY(()) asm_last_node; 2252 cgraph_node* GTY(()) free_nodes; 2253 2254 /* Head of a linked list of unused (freed) call graph edges. 2255 Do not GTY((delete)) this list so UIDs gets reliably recycled. */ 2256 cgraph_edge * GTY(()) free_edges; 2257 2258 /* The order index of the next symtab node to be created. This is 2259 used so that we can sort the cgraph nodes in order by when we saw 2260 them, to support -fno-toplevel-reorder. */ 2261 int order; 2262 2263 /* Set when whole unit has been analyzed so we can access global info. */ 2264 bool global_info_ready; 2265 /* What state callgraph is in right now. */ 2266 enum symtab_state state; 2267 /* Set when the cgraph is fully build and the basic flags are computed. */ 2268 bool function_flags_ready; 2269 2270 bool cpp_implicit_aliases_done; 2271 2272 /* Hash table used to hold sectoons. */ 2273 hash_table<section_name_hasher> *GTY(()) section_hash; 2274 2275 /* Hash table used to convert assembler names into nodes. */ 2276 hash_table<asmname_hasher> *assembler_name_hash; 2277 2278 /* Hash table used to hold init priorities. */ 2279 hash_map<symtab_node *, symbol_priority_map> *init_priority_hash; 2280 2281 FILE* GTY ((skip)) dump_file; 2282 2283 /* Return symbol used to separate symbol name from suffix. */ 2284 static char symbol_suffix_separator (); 2285 2286 FILE* GTY ((skip)) ipa_clones_dump_file; 2287 2288 hash_set <const cgraph_node *> GTY ((skip)) cloned_nodes; 2289 2290 private: 2291 /* Allocate new callgraph node. */ 2292 inline cgraph_node * allocate_cgraph_symbol (void); 2293 2294 /* Allocate a cgraph_edge structure and fill it with data according to the 2295 parameters of which only CALLEE can be NULL (when creating an indirect call 2296 edge). */ 2297 cgraph_edge *create_edge (cgraph_node *caller, cgraph_node *callee, 2298 gcall *call_stmt, profile_count count, 2299 bool indir_unknown_callee); 2300 2301 /* Put the edge onto the free list. */ 2302 void free_edge (cgraph_edge *e); 2303 2304 /* Insert NODE to assembler name hash. */ 2305 void insert_to_assembler_name_hash (symtab_node *node, bool with_clones); 2306 2307 /* Remove NODE from assembler name hash. */ 2308 void unlink_from_assembler_name_hash (symtab_node *node, bool with_clones); 2309 2310 /* Hash asmnames ignoring the user specified marks. */ 2311 static hashval_t decl_assembler_name_hash (const_tree asmname); 2312 2313 /* Compare ASMNAME with the DECL_ASSEMBLER_NAME of DECL. */ 2314 static bool decl_assembler_name_equal (tree decl, const_tree asmname); 2315 2316 friend struct asmname_hasher; 2317 2318 /* List of hooks triggered when an edge is removed. */ 2319 cgraph_edge_hook_list * GTY((skip)) m_first_edge_removal_hook; 2320 /* List of hooks triggem_red when a cgraph node is removed. */ 2321 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_removal_hook; 2322 /* List of hooks triggered when an edge is duplicated. */ 2323 cgraph_2edge_hook_list * GTY((skip)) m_first_edge_duplicated_hook; 2324 /* List of hooks triggered when a node is duplicated. */ 2325 cgraph_2node_hook_list * GTY((skip)) m_first_cgraph_duplicated_hook; 2326 /* List of hooks triggered when an function is inserted. */ 2327 cgraph_node_hook_list * GTY((skip)) m_first_cgraph_insertion_hook; 2328 /* List of hooks triggered when an variable is inserted. */ 2329 varpool_node_hook_list * GTY((skip)) m_first_varpool_insertion_hook; 2330 /* List of hooks triggered when a node is removed. */ 2331 varpool_node_hook_list * GTY((skip)) m_first_varpool_removal_hook; 2332 }; 2333 2334 extern GTY(()) symbol_table *symtab; 2335 2336 extern vec<cgraph_node *> cgraph_new_nodes; 2337 2338 inline hashval_t 2339 asmname_hasher::hash (symtab_node *n) 2340 { 2341 return symbol_table::decl_assembler_name_hash 2342 (DECL_ASSEMBLER_NAME (n->decl)); 2343 } 2344 2345 inline bool 2346 asmname_hasher::equal (symtab_node *n, const_tree t) 2347 { 2348 return symbol_table::decl_assembler_name_equal (n->decl, t); 2349 } 2350 2351 /* In cgraph.c */ 2352 void cgraph_c_finalize (void); 2353 void release_function_body (tree); 2354 cgraph_indirect_call_info *cgraph_allocate_init_indirect_info (void); 2355 2356 void cgraph_update_edges_for_call_stmt (gimple *, tree, gimple *); 2357 bool cgraph_function_possibly_inlined_p (tree); 2358 2359 const char* cgraph_inline_failed_string (cgraph_inline_failed_t); 2360 cgraph_inline_failed_type_t cgraph_inline_failed_type (cgraph_inline_failed_t); 2361 2362 extern bool gimple_check_call_matching_types (gimple *, tree, bool); 2363 2364 /* In cgraphunit.c */ 2365 void cgraphunit_c_finalize (void); 2366 2367 /* Initialize datastructures so DECL is a function in lowered gimple form. 2368 IN_SSA is true if the gimple is in SSA. */ 2369 basic_block init_lowered_empty_function (tree, bool, profile_count); 2370 2371 tree thunk_adjust (gimple_stmt_iterator *, tree, bool, HOST_WIDE_INT, tree); 2372 /* In cgraphclones.c */ 2373 2374 tree clone_function_name_1 (const char *, const char *); 2375 tree clone_function_name (tree decl, const char *); 2376 2377 void tree_function_versioning (tree, tree, vec<ipa_replace_map *, va_gc> *, 2378 bool, bitmap, bool, bitmap, basic_block); 2379 2380 void dump_callgraph_transformation (const cgraph_node *original, 2381 const cgraph_node *clone, 2382 const char *suffix); 2383 tree cgraph_build_function_type_skip_args (tree orig_type, bitmap args_to_skip, 2384 bool skip_return); 2385 2386 /* In cgraphbuild.c */ 2387 int compute_call_stmt_bb_frequency (tree, basic_block bb); 2388 void record_references_in_initializer (tree, bool); 2389 2390 /* In ipa.c */ 2391 void cgraph_build_static_cdtor (char which, tree body, int priority); 2392 bool ipa_discover_readonly_nonaddressable_vars (void); 2393 2394 /* In varpool.c */ 2395 tree ctor_for_folding (tree); 2396 2397 /* In tree-chkp.c */ 2398 extern bool chkp_function_instrumented_p (tree fndecl); 2399 2400 /* In ipa-inline-analysis.c */ 2401 void initialize_inline_failed (struct cgraph_edge *); 2402 bool speculation_useful_p (struct cgraph_edge *e, bool anticipate_inlining); 2403 2404 /* Return true when the symbol is real symbol, i.e. it is not inline clone 2405 or abstract function kept for debug info purposes only. */ 2406 inline bool 2407 symtab_node::real_symbol_p (void) 2408 { 2409 cgraph_node *cnode; 2410 2411 if (DECL_ABSTRACT_P (decl)) 2412 return false; 2413 if (transparent_alias && definition) 2414 return false; 2415 if (!is_a <cgraph_node *> (this)) 2416 return true; 2417 cnode = dyn_cast <cgraph_node *> (this); 2418 if (cnode->global.inlined_to) 2419 return false; 2420 return true; 2421 } 2422 2423 /* Return true if DECL should have entry in symbol table if used. 2424 Those are functions and static & external veriables*/ 2425 2426 static inline bool 2427 decl_in_symtab_p (const_tree decl) 2428 { 2429 return (TREE_CODE (decl) == FUNCTION_DECL 2430 || (TREE_CODE (decl) == VAR_DECL 2431 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))); 2432 } 2433 2434 inline bool 2435 symtab_node::in_same_comdat_group_p (symtab_node *target) 2436 { 2437 symtab_node *source = this; 2438 2439 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target)) 2440 { 2441 if (cn->global.inlined_to) 2442 source = cn->global.inlined_to; 2443 } 2444 if (cgraph_node *cn = dyn_cast <cgraph_node *> (target)) 2445 { 2446 if (cn->global.inlined_to) 2447 target = cn->global.inlined_to; 2448 } 2449 2450 return source->get_comdat_group () == target->get_comdat_group (); 2451 } 2452 2453 /* Return node that alias is aliasing. */ 2454 2455 inline symtab_node * 2456 symtab_node::get_alias_target (void) 2457 { 2458 ipa_ref *ref = NULL; 2459 iterate_reference (0, ref); 2460 if (ref->use == IPA_REF_CHKP) 2461 iterate_reference (1, ref); 2462 gcc_checking_assert (ref->use == IPA_REF_ALIAS); 2463 return ref->referred; 2464 } 2465 2466 /* Return next reachable static symbol with initializer after the node. */ 2467 2468 inline symtab_node * 2469 symtab_node::next_defined_symbol (void) 2470 { 2471 symtab_node *node1 = next; 2472 2473 for (; node1; node1 = node1->next) 2474 if (node1->definition) 2475 return node1; 2476 2477 return NULL; 2478 } 2479 2480 /* Iterates I-th reference in the list, REF is also set. */ 2481 2482 inline ipa_ref * 2483 symtab_node::iterate_reference (unsigned i, ipa_ref *&ref) 2484 { 2485 vec_safe_iterate (ref_list.references, i, &ref); 2486 2487 return ref; 2488 } 2489 2490 /* Iterates I-th referring item in the list, REF is also set. */ 2491 2492 inline ipa_ref * 2493 symtab_node::iterate_referring (unsigned i, ipa_ref *&ref) 2494 { 2495 ref_list.referring.iterate (i, &ref); 2496 2497 return ref; 2498 } 2499 2500 /* Iterates I-th referring alias item in the list, REF is also set. */ 2501 2502 inline ipa_ref * 2503 symtab_node::iterate_direct_aliases (unsigned i, ipa_ref *&ref) 2504 { 2505 ref_list.referring.iterate (i, &ref); 2506 2507 if (ref && ref->use != IPA_REF_ALIAS) 2508 return NULL; 2509 2510 return ref; 2511 } 2512 2513 /* Return true if list contains an alias. */ 2514 2515 inline bool 2516 symtab_node::has_aliases_p (void) 2517 { 2518 ipa_ref *ref = NULL; 2519 2520 return (iterate_direct_aliases (0, ref) != NULL); 2521 } 2522 2523 /* Return true when RESOLUTION indicate that linker will use 2524 the symbol from non-LTO object files. */ 2525 2526 inline bool 2527 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution) 2528 { 2529 return (resolution == LDPR_PREVAILING_DEF 2530 || resolution == LDPR_PREEMPTED_REG 2531 || resolution == LDPR_RESOLVED_EXEC 2532 || resolution == LDPR_RESOLVED_DYN); 2533 } 2534 2535 /* Return true when symtab_node is known to be used from other (non-LTO) 2536 object file. Known only when doing LTO via linker plugin. */ 2537 2538 inline bool 2539 symtab_node::used_from_object_file_p (void) 2540 { 2541 if (!TREE_PUBLIC (decl) || DECL_EXTERNAL (decl)) 2542 return false; 2543 if (resolution_used_from_other_file_p (resolution)) 2544 return true; 2545 return false; 2546 } 2547 2548 /* Return varpool node for given symbol and check it is a function. */ 2549 2550 inline varpool_node * 2551 varpool_node::get (const_tree decl) 2552 { 2553 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL); 2554 return dyn_cast<varpool_node *> (symtab_node::get (decl)); 2555 } 2556 2557 /* Register a symbol NODE. */ 2558 2559 inline void 2560 symbol_table::register_symbol (symtab_node *node) 2561 { 2562 node->next = nodes; 2563 node->previous = NULL; 2564 2565 if (nodes) 2566 nodes->previous = node; 2567 nodes = node; 2568 2569 node->order = order++; 2570 } 2571 2572 /* Register a top-level asm statement ASM_STR. */ 2573 2574 asm_node * 2575 symbol_table::finalize_toplevel_asm (tree asm_str) 2576 { 2577 asm_node *node; 2578 2579 node = ggc_cleared_alloc<asm_node> (); 2580 node->asm_str = asm_str; 2581 node->order = order++; 2582 node->next = NULL; 2583 2584 if (asmnodes == NULL) 2585 asmnodes = node; 2586 else 2587 asm_last_node->next = node; 2588 2589 asm_last_node = node; 2590 return node; 2591 } 2592 2593 /* Unregister a symbol NODE. */ 2594 inline void 2595 symbol_table::unregister (symtab_node *node) 2596 { 2597 if (node->previous) 2598 node->previous->next = node->next; 2599 else 2600 nodes = node->next; 2601 2602 if (node->next) 2603 node->next->previous = node->previous; 2604 2605 node->next = NULL; 2606 node->previous = NULL; 2607 } 2608 2609 /* Release a callgraph NODE with UID and put in to the list of free nodes. */ 2610 2611 inline void 2612 symbol_table::release_symbol (cgraph_node *node, int uid) 2613 { 2614 cgraph_count--; 2615 2616 /* Clear out the node to NULL all pointers and add the node to the free 2617 list. */ 2618 memset (node, 0, sizeof (*node)); 2619 node->type = SYMTAB_FUNCTION; 2620 node->uid = uid; 2621 SET_NEXT_FREE_NODE (node, free_nodes); 2622 free_nodes = node; 2623 } 2624 2625 /* Allocate new callgraph node. */ 2626 2627 inline cgraph_node * 2628 symbol_table::allocate_cgraph_symbol (void) 2629 { 2630 cgraph_node *node; 2631 2632 if (free_nodes) 2633 { 2634 node = free_nodes; 2635 free_nodes = NEXT_FREE_NODE (node); 2636 } 2637 else 2638 { 2639 node = ggc_cleared_alloc<cgraph_node> (); 2640 node->uid = cgraph_max_uid++; 2641 } 2642 2643 node->summary_uid = cgraph_max_summary_uid++; 2644 return node; 2645 } 2646 2647 2648 /* Return first static symbol with definition. */ 2649 inline symtab_node * 2650 symbol_table::first_symbol (void) 2651 { 2652 return nodes; 2653 } 2654 2655 /* Walk all symbols. */ 2656 #define FOR_EACH_SYMBOL(node) \ 2657 for ((node) = symtab->first_symbol (); (node); (node) = (node)->next) 2658 2659 /* Return first static symbol with definition. */ 2660 inline symtab_node * 2661 symbol_table::first_defined_symbol (void) 2662 { 2663 symtab_node *node; 2664 2665 for (node = nodes; node; node = node->next) 2666 if (node->definition) 2667 return node; 2668 2669 return NULL; 2670 } 2671 2672 /* Walk all symbols with definitions in current unit. */ 2673 #define FOR_EACH_DEFINED_SYMBOL(node) \ 2674 for ((node) = symtab->first_defined_symbol (); (node); \ 2675 (node) = node->next_defined_symbol ()) 2676 2677 /* Return first variable. */ 2678 inline varpool_node * 2679 symbol_table::first_variable (void) 2680 { 2681 symtab_node *node; 2682 for (node = nodes; node; node = node->next) 2683 if (varpool_node *vnode = dyn_cast <varpool_node *> (node)) 2684 return vnode; 2685 return NULL; 2686 } 2687 2688 /* Return next variable after NODE. */ 2689 inline varpool_node * 2690 symbol_table::next_variable (varpool_node *node) 2691 { 2692 symtab_node *node1 = node->next; 2693 for (; node1; node1 = node1->next) 2694 if (varpool_node *vnode1 = dyn_cast <varpool_node *> (node1)) 2695 return vnode1; 2696 return NULL; 2697 } 2698 /* Walk all variables. */ 2699 #define FOR_EACH_VARIABLE(node) \ 2700 for ((node) = symtab->first_variable (); \ 2701 (node); \ 2702 (node) = symtab->next_variable ((node))) 2703 2704 /* Return first static variable with initializer. */ 2705 inline varpool_node * 2706 symbol_table::first_static_initializer (void) 2707 { 2708 symtab_node *node; 2709 for (node = nodes; node; node = node->next) 2710 { 2711 varpool_node *vnode = dyn_cast <varpool_node *> (node); 2712 if (vnode && DECL_INITIAL (node->decl)) 2713 return vnode; 2714 } 2715 return NULL; 2716 } 2717 2718 /* Return next static variable with initializer after NODE. */ 2719 inline varpool_node * 2720 symbol_table::next_static_initializer (varpool_node *node) 2721 { 2722 symtab_node *node1 = node->next; 2723 for (; node1; node1 = node1->next) 2724 { 2725 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1); 2726 if (vnode1 && DECL_INITIAL (node1->decl)) 2727 return vnode1; 2728 } 2729 return NULL; 2730 } 2731 2732 /* Walk all static variables with initializer set. */ 2733 #define FOR_EACH_STATIC_INITIALIZER(node) \ 2734 for ((node) = symtab->first_static_initializer (); (node); \ 2735 (node) = symtab->next_static_initializer (node)) 2736 2737 /* Return first static variable with definition. */ 2738 inline varpool_node * 2739 symbol_table::first_defined_variable (void) 2740 { 2741 symtab_node *node; 2742 for (node = nodes; node; node = node->next) 2743 { 2744 varpool_node *vnode = dyn_cast <varpool_node *> (node); 2745 if (vnode && vnode->definition) 2746 return vnode; 2747 } 2748 return NULL; 2749 } 2750 2751 /* Return next static variable with definition after NODE. */ 2752 inline varpool_node * 2753 symbol_table::next_defined_variable (varpool_node *node) 2754 { 2755 symtab_node *node1 = node->next; 2756 for (; node1; node1 = node1->next) 2757 { 2758 varpool_node *vnode1 = dyn_cast <varpool_node *> (node1); 2759 if (vnode1 && vnode1->definition) 2760 return vnode1; 2761 } 2762 return NULL; 2763 } 2764 /* Walk all variables with definitions in current unit. */ 2765 #define FOR_EACH_DEFINED_VARIABLE(node) \ 2766 for ((node) = symtab->first_defined_variable (); (node); \ 2767 (node) = symtab->next_defined_variable (node)) 2768 2769 /* Return first function with body defined. */ 2770 inline cgraph_node * 2771 symbol_table::first_defined_function (void) 2772 { 2773 symtab_node *node; 2774 for (node = nodes; node; node = node->next) 2775 { 2776 cgraph_node *cn = dyn_cast <cgraph_node *> (node); 2777 if (cn && cn->definition) 2778 return cn; 2779 } 2780 return NULL; 2781 } 2782 2783 /* Return next function with body defined after NODE. */ 2784 inline cgraph_node * 2785 symbol_table::next_defined_function (cgraph_node *node) 2786 { 2787 symtab_node *node1 = node->next; 2788 for (; node1; node1 = node1->next) 2789 { 2790 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1); 2791 if (cn1 && cn1->definition) 2792 return cn1; 2793 } 2794 return NULL; 2795 } 2796 2797 /* Walk all functions with body defined. */ 2798 #define FOR_EACH_DEFINED_FUNCTION(node) \ 2799 for ((node) = symtab->first_defined_function (); (node); \ 2800 (node) = symtab->next_defined_function ((node))) 2801 2802 /* Return first function. */ 2803 inline cgraph_node * 2804 symbol_table::first_function (void) 2805 { 2806 symtab_node *node; 2807 for (node = nodes; node; node = node->next) 2808 if (cgraph_node *cn = dyn_cast <cgraph_node *> (node)) 2809 return cn; 2810 return NULL; 2811 } 2812 2813 /* Return next function. */ 2814 inline cgraph_node * 2815 symbol_table::next_function (cgraph_node *node) 2816 { 2817 symtab_node *node1 = node->next; 2818 for (; node1; node1 = node1->next) 2819 if (cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1)) 2820 return cn1; 2821 return NULL; 2822 } 2823 2824 /* Return first function with body defined. */ 2825 inline cgraph_node * 2826 symbol_table::first_function_with_gimple_body (void) 2827 { 2828 symtab_node *node; 2829 for (node = nodes; node; node = node->next) 2830 { 2831 cgraph_node *cn = dyn_cast <cgraph_node *> (node); 2832 if (cn && cn->has_gimple_body_p ()) 2833 return cn; 2834 } 2835 return NULL; 2836 } 2837 2838 /* Return next reachable static variable with initializer after NODE. */ 2839 inline cgraph_node * 2840 symbol_table::next_function_with_gimple_body (cgraph_node *node) 2841 { 2842 symtab_node *node1 = node->next; 2843 for (; node1; node1 = node1->next) 2844 { 2845 cgraph_node *cn1 = dyn_cast <cgraph_node *> (node1); 2846 if (cn1 && cn1->has_gimple_body_p ()) 2847 return cn1; 2848 } 2849 return NULL; 2850 } 2851 2852 /* Walk all functions. */ 2853 #define FOR_EACH_FUNCTION(node) \ 2854 for ((node) = symtab->first_function (); (node); \ 2855 (node) = symtab->next_function ((node))) 2856 2857 /* Return true when callgraph node is a function with Gimple body defined 2858 in current unit. Functions can also be define externally or they 2859 can be thunks with no Gimple representation. 2860 2861 Note that at WPA stage, the function body may not be present in memory. */ 2862 2863 inline bool 2864 cgraph_node::has_gimple_body_p (void) 2865 { 2866 return definition && !thunk.thunk_p && !alias; 2867 } 2868 2869 /* Return true if this node represents a former, i.e. an expanded, thunk. */ 2870 2871 inline bool 2872 cgraph_node::former_thunk_p (void) 2873 { 2874 return (!thunk.thunk_p 2875 && (thunk.fixed_offset 2876 || thunk.virtual_offset_p)); 2877 } 2878 2879 /* Walk all functions with body defined. */ 2880 #define FOR_EACH_FUNCTION_WITH_GIMPLE_BODY(node) \ 2881 for ((node) = symtab->first_function_with_gimple_body (); (node); \ 2882 (node) = symtab->next_function_with_gimple_body (node)) 2883 2884 /* Uniquize all constants that appear in memory. 2885 Each constant in memory thus far output is recorded 2886 in `const_desc_table'. */ 2887 2888 struct GTY((for_user)) constant_descriptor_tree { 2889 /* A MEM for the constant. */ 2890 rtx rtl; 2891 2892 /* The value of the constant. */ 2893 tree value; 2894 2895 /* Hash of value. Computing the hash from value each time 2896 hashfn is called can't work properly, as that means recursive 2897 use of the hash table during hash table expansion. */ 2898 hashval_t hash; 2899 }; 2900 2901 /* Return true when function is only called directly or it has alias. 2902 i.e. it is not externally visible, address was not taken and 2903 it is not used in any other non-standard way. */ 2904 2905 inline bool 2906 cgraph_node::only_called_directly_or_aliased_p (void) 2907 { 2908 gcc_assert (!global.inlined_to); 2909 return (!force_output && !address_taken 2910 && !ifunc_resolver 2911 && !used_from_other_partition 2912 && !DECL_VIRTUAL_P (decl) 2913 && !DECL_STATIC_CONSTRUCTOR (decl) 2914 && !DECL_STATIC_DESTRUCTOR (decl) 2915 && !used_from_object_file_p () 2916 && !externally_visible); 2917 } 2918 2919 /* Return true when function can be removed from callgraph 2920 if all direct calls are eliminated. */ 2921 2922 inline bool 2923 cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void) 2924 { 2925 gcc_checking_assert (!global.inlined_to); 2926 /* Instrumentation clones should not be removed before 2927 instrumentation happens. New callers may appear after 2928 instrumentation. */ 2929 if (instrumentation_clone 2930 && !chkp_function_instrumented_p (decl)) 2931 return false; 2932 /* Extern inlines can always go, we will use the external definition. */ 2933 if (DECL_EXTERNAL (decl)) 2934 return true; 2935 /* When function is needed, we can not remove it. */ 2936 if (force_output || used_from_other_partition) 2937 return false; 2938 if (DECL_STATIC_CONSTRUCTOR (decl) 2939 || DECL_STATIC_DESTRUCTOR (decl)) 2940 return false; 2941 /* Only COMDAT functions can be removed if externally visible. */ 2942 if (externally_visible 2943 && (!DECL_COMDAT (decl) 2944 || forced_by_abi 2945 || used_from_object_file_p ())) 2946 return false; 2947 return true; 2948 } 2949 2950 /* Verify cgraph, if consistency checking is enabled. */ 2951 2952 inline void 2953 cgraph_node::checking_verify_cgraph_nodes (void) 2954 { 2955 if (flag_checking) 2956 cgraph_node::verify_cgraph_nodes (); 2957 } 2958 2959 /* Return true when variable can be removed from variable pool 2960 if all direct calls are eliminated. */ 2961 2962 inline bool 2963 varpool_node::can_remove_if_no_refs_p (void) 2964 { 2965 if (DECL_EXTERNAL (decl)) 2966 return true; 2967 return (!force_output && !used_from_other_partition 2968 && ((DECL_COMDAT (decl) 2969 && !forced_by_abi 2970 && !used_from_object_file_p ()) 2971 || !externally_visible 2972 || DECL_HAS_VALUE_EXPR_P (decl))); 2973 } 2974 2975 /* Return true when all references to variable must be visible in ipa_ref_list. 2976 i.e. if the variable is not externally visible or not used in some magic 2977 way (asm statement or such). 2978 The magic uses are all summarized in force_output flag. */ 2979 2980 inline bool 2981 varpool_node::all_refs_explicit_p () 2982 { 2983 return (definition 2984 && !externally_visible 2985 && !used_from_other_partition 2986 && !force_output); 2987 } 2988 2989 struct tree_descriptor_hasher : ggc_ptr_hash<constant_descriptor_tree> 2990 { 2991 static hashval_t hash (constant_descriptor_tree *); 2992 static bool equal (constant_descriptor_tree *, constant_descriptor_tree *); 2993 }; 2994 2995 /* Constant pool accessor function. */ 2996 hash_table<tree_descriptor_hasher> *constant_pool_htab (void); 2997 2998 /* Return node that alias is aliasing. */ 2999 3000 inline cgraph_node * 3001 cgraph_node::get_alias_target (void) 3002 { 3003 return dyn_cast <cgraph_node *> (symtab_node::get_alias_target ()); 3004 } 3005 3006 /* Return node that alias is aliasing. */ 3007 3008 inline varpool_node * 3009 varpool_node::get_alias_target (void) 3010 { 3011 return dyn_cast <varpool_node *> (symtab_node::get_alias_target ()); 3012 } 3013 3014 /* Walk the alias chain to return the symbol NODE is alias of. 3015 If NODE is not an alias, return NODE. 3016 When AVAILABILITY is non-NULL, get minimal availability in the chain. 3017 When REF is non-NULL, assume that reference happens in symbol REF 3018 when determining the availability. */ 3019 3020 inline symtab_node * 3021 symtab_node::ultimate_alias_target (enum availability *availability, 3022 symtab_node *ref) 3023 { 3024 if (!alias) 3025 { 3026 if (availability) 3027 *availability = get_availability (ref); 3028 return this; 3029 } 3030 3031 return ultimate_alias_target_1 (availability, ref); 3032 } 3033 3034 /* Given function symbol, walk the alias chain to return the function node 3035 is alias of. Do not walk through thunks. 3036 When AVAILABILITY is non-NULL, get minimal availability in the chain. 3037 When REF is non-NULL, assume that reference happens in symbol REF 3038 when determining the availability. */ 3039 3040 inline cgraph_node * 3041 cgraph_node::ultimate_alias_target (enum availability *availability, 3042 symtab_node *ref) 3043 { 3044 cgraph_node *n = dyn_cast <cgraph_node *> 3045 (symtab_node::ultimate_alias_target (availability, ref)); 3046 if (!n && availability) 3047 *availability = AVAIL_NOT_AVAILABLE; 3048 return n; 3049 } 3050 3051 /* For given variable pool node, walk the alias chain to return the function 3052 the variable is alias of. Do not walk through thunks. 3053 When AVAILABILITY is non-NULL, get minimal availability in the chain. 3054 When REF is non-NULL, assume that reference happens in symbol REF 3055 when determining the availability. */ 3056 3057 inline varpool_node * 3058 varpool_node::ultimate_alias_target (availability *availability, 3059 symtab_node *ref) 3060 { 3061 varpool_node *n = dyn_cast <varpool_node *> 3062 (symtab_node::ultimate_alias_target (availability, ref)); 3063 3064 if (!n && availability) 3065 *availability = AVAIL_NOT_AVAILABLE; 3066 return n; 3067 } 3068 3069 /* Set callee N of call graph edge and add it to the corresponding set of 3070 callers. */ 3071 3072 inline void 3073 cgraph_edge::set_callee (cgraph_node *n) 3074 { 3075 prev_caller = NULL; 3076 if (n->callers) 3077 n->callers->prev_caller = this; 3078 next_caller = n->callers; 3079 n->callers = this; 3080 callee = n; 3081 } 3082 3083 /* Redirect callee of the edge to N. The function does not update underlying 3084 call expression. */ 3085 3086 inline void 3087 cgraph_edge::redirect_callee (cgraph_node *n) 3088 { 3089 /* Remove from callers list of the current callee. */ 3090 remove_callee (); 3091 3092 /* Insert to callers list of the new callee. */ 3093 set_callee (n); 3094 } 3095 3096 /* Return true when the edge represents a direct recursion. */ 3097 3098 inline bool 3099 cgraph_edge::recursive_p (void) 3100 { 3101 cgraph_node *c = callee->ultimate_alias_target (); 3102 if (caller->global.inlined_to) 3103 return caller->global.inlined_to->decl == c->decl; 3104 else 3105 return caller->decl == c->decl; 3106 } 3107 3108 /* Remove the edge from the list of the callers of the callee. */ 3109 3110 inline void 3111 cgraph_edge::remove_callee (void) 3112 { 3113 gcc_assert (!indirect_unknown_callee); 3114 if (prev_caller) 3115 prev_caller->next_caller = next_caller; 3116 if (next_caller) 3117 next_caller->prev_caller = prev_caller; 3118 if (!prev_caller) 3119 callee->callers = next_caller; 3120 } 3121 3122 /* Return true if call must bind to current definition. */ 3123 3124 inline bool 3125 cgraph_edge::binds_to_current_def_p () 3126 { 3127 if (callee) 3128 return callee->binds_to_current_def_p (caller); 3129 else 3130 return false; 3131 } 3132 3133 /* Expected frequency of executions within the function. 3134 When set to CGRAPH_FREQ_BASE, the edge is expected to be called once 3135 per function call. The range is 0 to CGRAPH_FREQ_MAX. */ 3136 3137 inline int 3138 cgraph_edge::frequency () 3139 { 3140 return count.to_cgraph_frequency (caller->global.inlined_to 3141 ? caller->global.inlined_to->count 3142 : caller->count); 3143 } 3144 3145 3146 /* Return true if the TM_CLONE bit is set for a given FNDECL. */ 3147 static inline bool 3148 decl_is_tm_clone (const_tree fndecl) 3149 { 3150 cgraph_node *n = cgraph_node::get (fndecl); 3151 if (n) 3152 return n->tm_clone; 3153 return false; 3154 } 3155 3156 /* Likewise indicate that a node is needed, i.e. reachable via some 3157 external means. */ 3158 3159 inline void 3160 cgraph_node::mark_force_output (void) 3161 { 3162 force_output = 1; 3163 gcc_checking_assert (!global.inlined_to); 3164 } 3165 3166 /* Return true if function should be optimized for size. */ 3167 3168 inline bool 3169 cgraph_node::optimize_for_size_p (void) 3170 { 3171 if (opt_for_fn (decl, optimize_size)) 3172 return true; 3173 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED) 3174 return true; 3175 else 3176 return false; 3177 } 3178 3179 /* Return symtab_node for NODE or create one if it is not present 3180 in symtab. */ 3181 3182 inline symtab_node * 3183 symtab_node::get_create (tree node) 3184 { 3185 if (TREE_CODE (node) == VAR_DECL) 3186 return varpool_node::get_create (node); 3187 else 3188 return cgraph_node::get_create (node); 3189 } 3190 3191 /* Return availability of NODE when referenced from REF. */ 3192 3193 inline enum availability 3194 symtab_node::get_availability (symtab_node *ref) 3195 { 3196 if (is_a <cgraph_node *> (this)) 3197 return dyn_cast <cgraph_node *> (this)->get_availability (ref); 3198 else 3199 return dyn_cast <varpool_node *> (this)->get_availability (ref); 3200 } 3201 3202 /* Call calback on symtab node and aliases associated to this node. 3203 When INCLUDE_OVERWRITABLE is false, overwritable symbols are skipped. */ 3204 3205 inline bool 3206 symtab_node::call_for_symbol_and_aliases (bool (*callback) (symtab_node *, 3207 void *), 3208 void *data, 3209 bool include_overwritable) 3210 { 3211 if (include_overwritable 3212 || get_availability () > AVAIL_INTERPOSABLE) 3213 { 3214 if (callback (this, data)) 3215 return true; 3216 } 3217 if (has_aliases_p ()) 3218 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable); 3219 return false; 3220 } 3221 3222 /* Call callback on function and aliases associated to the function. 3223 When INCLUDE_OVERWRITABLE is false, overwritable symbols are 3224 skipped. */ 3225 3226 inline bool 3227 cgraph_node::call_for_symbol_and_aliases (bool (*callback) (cgraph_node *, 3228 void *), 3229 void *data, 3230 bool include_overwritable) 3231 { 3232 if (include_overwritable 3233 || get_availability () > AVAIL_INTERPOSABLE) 3234 { 3235 if (callback (this, data)) 3236 return true; 3237 } 3238 if (has_aliases_p ()) 3239 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable); 3240 return false; 3241 } 3242 3243 /* Call calback on varpool symbol and aliases associated to varpool symbol. 3244 When INCLUDE_OVERWRITABLE is false, overwritable symbols are 3245 skipped. */ 3246 3247 inline bool 3248 varpool_node::call_for_symbol_and_aliases (bool (*callback) (varpool_node *, 3249 void *), 3250 void *data, 3251 bool include_overwritable) 3252 { 3253 if (include_overwritable 3254 || get_availability () > AVAIL_INTERPOSABLE) 3255 { 3256 if (callback (this, data)) 3257 return true; 3258 } 3259 if (has_aliases_p ()) 3260 return call_for_symbol_and_aliases_1 (callback, data, include_overwritable); 3261 return false; 3262 } 3263 3264 /* Return true if refernece may be used in address compare. */ 3265 3266 inline bool 3267 ipa_ref::address_matters_p () 3268 { 3269 if (use != IPA_REF_ADDR) 3270 return false; 3271 /* Addresses taken from virtual tables are never compared. */ 3272 if (is_a <varpool_node *> (referring) 3273 && DECL_VIRTUAL_P (referring->decl)) 3274 return false; 3275 return referred->address_can_be_compared_p (); 3276 } 3277 3278 /* Build polymorphic call context for indirect call E. */ 3279 3280 inline 3281 ipa_polymorphic_call_context::ipa_polymorphic_call_context (cgraph_edge *e) 3282 { 3283 gcc_checking_assert (e->indirect_info->polymorphic); 3284 *this = e->indirect_info->context; 3285 } 3286 3287 /* Build empty "I know nothing" context. */ 3288 3289 inline 3290 ipa_polymorphic_call_context::ipa_polymorphic_call_context () 3291 { 3292 clear_speculation (); 3293 clear_outer_type (); 3294 invalid = false; 3295 } 3296 3297 /* Make context non-speculative. */ 3298 3299 inline void 3300 ipa_polymorphic_call_context::clear_speculation () 3301 { 3302 speculative_outer_type = NULL; 3303 speculative_offset = 0; 3304 speculative_maybe_derived_type = false; 3305 } 3306 3307 /* Produce context specifying all derrived types of OTR_TYPE. If OTR_TYPE is 3308 NULL, the context is set to dummy "I know nothing" setting. */ 3309 3310 inline void 3311 ipa_polymorphic_call_context::clear_outer_type (tree otr_type) 3312 { 3313 outer_type = otr_type ? TYPE_MAIN_VARIANT (otr_type) : NULL; 3314 offset = 0; 3315 maybe_derived_type = true; 3316 maybe_in_construction = true; 3317 dynamic = true; 3318 } 3319 3320 /* Adjust all offsets in contexts by OFF bits. */ 3321 3322 inline void 3323 ipa_polymorphic_call_context::offset_by (HOST_WIDE_INT off) 3324 { 3325 if (outer_type) 3326 offset += off; 3327 if (speculative_outer_type) 3328 speculative_offset += off; 3329 } 3330 3331 /* Return TRUE if context is fully useless. */ 3332 3333 inline bool 3334 ipa_polymorphic_call_context::useless_p () const 3335 { 3336 return (!outer_type && !speculative_outer_type); 3337 } 3338 3339 /* Return true if NODE is local. Instrumentation clones are counted as local 3340 only when original function is local. */ 3341 3342 static inline bool 3343 cgraph_local_p (cgraph_node *node) 3344 { 3345 if (!node->instrumentation_clone || !node->instrumented_version) 3346 return node->local.local; 3347 3348 return node->local.local && node->instrumented_version->local.local; 3349 } 3350 3351 /* When using fprintf (or similar), problems can arise with 3352 transient generated strings. Many string-generation APIs 3353 only support one result being alive at once (e.g. by 3354 returning a pointer to a statically-allocated buffer). 3355 3356 If there is more than one generated string within one 3357 fprintf call: the first string gets evicted or overwritten 3358 by the second, before fprintf is fully evaluated. 3359 See e.g. PR/53136. 3360 3361 This function provides a workaround for this, by providing 3362 a simple way to create copies of these transient strings, 3363 without the need to have explicit cleanup: 3364 3365 fprintf (dumpfile, "string 1: %s string 2:%s\n", 3366 xstrdup_for_dump (EXPR_1), 3367 xstrdup_for_dump (EXPR_2)); 3368 3369 This is actually a simple wrapper around ggc_strdup, but 3370 the name documents the intent. We require that no GC can occur 3371 within the fprintf call. */ 3372 3373 static inline const char * 3374 xstrdup_for_dump (const char *transient_str) 3375 { 3376 return ggc_strdup (transient_str); 3377 } 3378 3379 #endif /* GCC_CGRAPH_H */ 3380