1 /* Basic IPA optimizations and utilities. 2 Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011 3 Free Software Foundation, Inc. 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "tm.h" 25 #include "cgraph.h" 26 #include "tree-pass.h" 27 #include "timevar.h" 28 #include "gimple.h" 29 #include "ggc.h" 30 #include "flags.h" 31 #include "pointer-set.h" 32 #include "target.h" 33 #include "tree-iterator.h" 34 #include "ipa-utils.h" 35 36 /* Look for all functions inlined to NODE and update their inlined_to pointers 37 to INLINED_TO. */ 38 39 static void 40 update_inlined_to_pointer (struct cgraph_node *node, struct cgraph_node *inlined_to) 41 { 42 struct cgraph_edge *e; 43 for (e = node->callees; e; e = e->next_callee) 44 if (e->callee->global.inlined_to) 45 { 46 e->callee->global.inlined_to = inlined_to; 47 update_inlined_to_pointer (e->callee, inlined_to); 48 } 49 } 50 51 /* Add cgraph NODE to queue starting at FIRST. 52 53 The queue is linked via AUX pointers and terminated by pointer to 1. 54 We enqueue nodes at two occasions: when we find them reachable or when we find 55 their bodies needed for further clonning. In the second case we mark them 56 by pointer to 2 after processing so they are re-queue when they become 57 reachable. */ 58 59 static void 60 enqueue_cgraph_node (struct cgraph_node *node, struct cgraph_node **first) 61 { 62 /* Node is still in queue; do nothing. */ 63 if (node->aux && node->aux != (void *) 2) 64 return; 65 /* Node was already processed as unreachable, re-enqueue 66 only if it became reachable now. */ 67 if (node->aux == (void *)2 && !node->reachable) 68 return; 69 node->aux = *first; 70 *first = node; 71 } 72 73 /* Add varpool NODE to queue starting at FIRST. */ 74 75 static void 76 enqueue_varpool_node (struct varpool_node *node, struct varpool_node **first) 77 { 78 node->aux = *first; 79 *first = node; 80 } 81 82 /* Process references. */ 83 84 static void 85 process_references (struct ipa_ref_list *list, 86 struct cgraph_node **first, 87 struct varpool_node **first_varpool, 88 bool before_inlining_p) 89 { 90 int i; 91 struct ipa_ref *ref; 92 for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++) 93 { 94 if (ref->refered_type == IPA_REF_CGRAPH) 95 { 96 struct cgraph_node *node = ipa_ref_node (ref); 97 if (!node->reachable 98 && node->analyzed 99 && (!DECL_EXTERNAL (node->decl) 100 || before_inlining_p)) 101 node->reachable = true; 102 enqueue_cgraph_node (node, first); 103 } 104 else 105 { 106 struct varpool_node *node = ipa_ref_varpool_node (ref); 107 if (!node->needed) 108 { 109 varpool_mark_needed_node (node); 110 enqueue_varpool_node (node, first_varpool); 111 } 112 } 113 } 114 } 115 116 117 /* Return true when NODE can not be local. Worker for cgraph_local_node_p. */ 118 119 static bool 120 cgraph_non_local_node_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED) 121 { 122 /* FIXME: Aliases can be local, but i386 gets thunks wrong then. */ 123 return !(cgraph_only_called_directly_or_aliased_p (node) 124 && !ipa_ref_has_aliases_p (&node->ref_list) 125 && node->analyzed 126 && !DECL_EXTERNAL (node->decl) 127 && !node->local.externally_visible 128 && !node->reachable_from_other_partition 129 && !node->in_other_partition); 130 } 131 132 /* Return true when function can be marked local. */ 133 134 static bool 135 cgraph_local_node_p (struct cgraph_node *node) 136 { 137 struct cgraph_node *n = cgraph_function_or_thunk_node (node, NULL); 138 139 /* FIXME: thunks can be considered local, but we need prevent i386 140 from attempting to change calling convention of them. */ 141 if (n->thunk.thunk_p) 142 return false; 143 return !cgraph_for_node_and_aliases (n, 144 cgraph_non_local_node_p_1, NULL, true); 145 146 } 147 148 /* Return true when NODE has ADDR reference. */ 149 150 static bool 151 has_addr_references_p (struct cgraph_node *node, 152 void *data ATTRIBUTE_UNUSED) 153 { 154 int i; 155 struct ipa_ref *ref; 156 157 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++) 158 if (ref->use == IPA_REF_ADDR) 159 return true; 160 return false; 161 } 162 163 /* Perform reachability analysis and reclaim all unreachable nodes. 164 If BEFORE_INLINING_P is true this function is called before inlining 165 decisions has been made. If BEFORE_INLINING_P is false this function also 166 removes unneeded bodies of extern inline functions. */ 167 168 bool 169 cgraph_remove_unreachable_nodes (bool before_inlining_p, FILE *file) 170 { 171 struct cgraph_node *first = (struct cgraph_node *) (void *) 1; 172 struct varpool_node *first_varpool = (struct varpool_node *) (void *) 1; 173 struct cgraph_node *node, *next; 174 struct varpool_node *vnode, *vnext; 175 bool changed = false; 176 177 #ifdef ENABLE_CHECKING 178 verify_cgraph (); 179 #endif 180 if (file) 181 fprintf (file, "\nReclaiming functions:"); 182 #ifdef ENABLE_CHECKING 183 for (node = cgraph_nodes; node; node = node->next) 184 gcc_assert (!node->aux); 185 for (vnode = varpool_nodes; vnode; vnode = vnode->next) 186 gcc_assert (!vnode->aux); 187 #endif 188 varpool_reset_queue (); 189 /* Mark functions whose bodies are obviously needed. 190 This is mostly when they can be referenced externally. Inline clones 191 are special since their declarations are shared with master clone and thus 192 cgraph_can_remove_if_no_direct_calls_and_refs_p should not be called on them. */ 193 for (node = cgraph_nodes; node; node = node->next) 194 if (node->analyzed && !node->global.inlined_to 195 && (!cgraph_can_remove_if_no_direct_calls_and_refs_p (node) 196 /* Keep around virtual functions for possible devirtualization. */ 197 || (before_inlining_p && DECL_VIRTUAL_P (node->decl)))) 198 { 199 gcc_assert (!node->global.inlined_to); 200 enqueue_cgraph_node (node, &first); 201 node->reachable = true; 202 } 203 else 204 { 205 gcc_assert (!node->aux); 206 node->reachable = false; 207 } 208 209 /* Mark variables that are obviously needed. */ 210 for (vnode = varpool_nodes; vnode; vnode = vnode->next) 211 { 212 vnode->next_needed = NULL; 213 vnode->prev_needed = NULL; 214 if ((vnode->analyzed || vnode->force_output) 215 && !varpool_can_remove_if_no_refs (vnode)) 216 { 217 vnode->needed = false; 218 varpool_mark_needed_node (vnode); 219 enqueue_varpool_node (vnode, &first_varpool); 220 } 221 else 222 vnode->needed = false; 223 } 224 225 /* Perform reachability analysis. As a special case do not consider 226 extern inline functions not inlined as live because we won't output 227 them at all. 228 229 We maintain two worklist, one for cgraph nodes other for varpools and 230 are finished once both are empty. */ 231 232 while (first != (struct cgraph_node *) (void *) 1 233 || first_varpool != (struct varpool_node *) (void *) 1) 234 { 235 if (first != (struct cgraph_node *) (void *) 1) 236 { 237 struct cgraph_edge *e; 238 node = first; 239 first = (struct cgraph_node *) first->aux; 240 if (!node->reachable) 241 node->aux = (void *)2; 242 243 /* If we found this node reachable, first mark on the callees 244 reachable too, unless they are direct calls to extern inline functions 245 we decided to not inline. */ 246 if (node->reachable) 247 { 248 for (e = node->callees; e; e = e->next_callee) 249 { 250 if (!e->callee->reachable 251 && node->analyzed 252 && (!e->inline_failed 253 || !DECL_EXTERNAL (e->callee->decl) 254 || before_inlining_p)) 255 e->callee->reachable = true; 256 enqueue_cgraph_node (e->callee, &first); 257 } 258 process_references (&node->ref_list, &first, &first_varpool, before_inlining_p); 259 } 260 261 /* If any function in a comdat group is reachable, force 262 all other functions in the same comdat group to be 263 also reachable. */ 264 if (node->same_comdat_group 265 && node->reachable 266 && !node->global.inlined_to) 267 { 268 for (next = node->same_comdat_group; 269 next != node; 270 next = next->same_comdat_group) 271 if (!next->reachable) 272 { 273 next->reachable = true; 274 enqueue_cgraph_node (next, &first); 275 } 276 } 277 278 /* We can freely remove inline clones even if they are cloned, however if 279 function is clone of real clone, we must keep it around in order to 280 make materialize_clones produce function body with the changes 281 applied. */ 282 while (node->clone_of && !node->clone_of->aux 283 && !gimple_has_body_p (node->decl)) 284 { 285 bool noninline = node->clone_of->decl != node->decl; 286 node = node->clone_of; 287 if (noninline && !node->reachable && !node->aux) 288 { 289 enqueue_cgraph_node (node, &first); 290 break; 291 } 292 } 293 } 294 if (first_varpool != (struct varpool_node *) (void *) 1) 295 { 296 vnode = first_varpool; 297 first_varpool = (struct varpool_node *)first_varpool->aux; 298 vnode->aux = NULL; 299 process_references (&vnode->ref_list, &first, &first_varpool, before_inlining_p); 300 /* If any function in a comdat group is reachable, force 301 all other functions in the same comdat group to be 302 also reachable. */ 303 if (vnode->same_comdat_group) 304 { 305 struct varpool_node *next; 306 for (next = vnode->same_comdat_group; 307 next != vnode; 308 next = next->same_comdat_group) 309 if (!next->needed) 310 { 311 varpool_mark_needed_node (next); 312 enqueue_varpool_node (next, &first_varpool); 313 } 314 } 315 } 316 } 317 318 /* Remove unreachable nodes. 319 320 Completely unreachable functions can be fully removed from the callgraph. 321 Extern inline functions that we decided to not inline need to become unanalyzed nodes of 322 callgraph (so we still have edges to them). We remove function body then. 323 324 Also we need to care functions that are unreachable but we need to keep them around 325 for later clonning. In this case we also turn them to unanalyzed nodes, but 326 keep the body around. */ 327 for (node = cgraph_nodes; node; node = next) 328 { 329 next = node->next; 330 if (node->aux && !node->reachable) 331 { 332 cgraph_node_remove_callees (node); 333 ipa_remove_all_references (&node->ref_list); 334 node->analyzed = false; 335 } 336 if (!node->aux) 337 { 338 struct cgraph_edge *e; 339 bool found = false; 340 int i; 341 struct ipa_ref *ref; 342 343 node->global.inlined_to = NULL; 344 if (file) 345 fprintf (file, " %s", cgraph_node_name (node)); 346 /* See if there is reachable caller. */ 347 for (e = node->callers; e && !found; e = e->next_caller) 348 if (e->caller->reachable) 349 found = true; 350 for (i = 0; (ipa_ref_list_refering_iterate (&node->ref_list, i, ref) 351 && !found); i++) 352 if (ref->refering_type == IPA_REF_CGRAPH 353 && ipa_ref_refering_node (ref)->reachable) 354 found = true; 355 else if (ref->refering_type == IPA_REF_VARPOOL 356 && ipa_ref_refering_varpool_node (ref)->needed) 357 found = true; 358 359 /* If so, we need to keep node in the callgraph. */ 360 if (found) 361 { 362 if (node->analyzed) 363 { 364 struct cgraph_node *clone; 365 366 /* If there are still clones, we must keep body around. 367 Otherwise we can just remove the body but keep the clone. */ 368 for (clone = node->clones; clone; 369 clone = clone->next_sibling_clone) 370 if (clone->aux) 371 break; 372 if (!clone) 373 { 374 cgraph_release_function_body (node); 375 if (node->prev_sibling_clone) 376 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone; 377 else if (node->clone_of) 378 node->clone_of->clones = node->next_sibling_clone; 379 if (node->next_sibling_clone) 380 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone; 381 if (node->clone_of) 382 node->former_clone_of = node->clone_of->decl; 383 node->clone_of = NULL; 384 node->next_sibling_clone = NULL; 385 node->prev_sibling_clone = NULL; 386 } 387 else 388 gcc_assert (!clone->in_other_partition); 389 node->analyzed = false; 390 changed = true; 391 cgraph_node_remove_callees (node); 392 ipa_remove_all_references (&node->ref_list); 393 } 394 } 395 else 396 { 397 cgraph_remove_node (node); 398 changed = true; 399 } 400 } 401 } 402 for (node = cgraph_nodes; node; node = node->next) 403 { 404 /* Inline clones might be kept around so their materializing allows further 405 cloning. If the function the clone is inlined into is removed, we need 406 to turn it into normal cone. */ 407 if (node->global.inlined_to 408 && !node->callers) 409 { 410 gcc_assert (node->clones); 411 node->global.inlined_to = NULL; 412 update_inlined_to_pointer (node, node); 413 } 414 node->aux = NULL; 415 } 416 417 if (file) 418 fprintf (file, "\n"); 419 420 /* We must release unused extern inlines or sanity checking will fail. Rest of transformations 421 are undesirable at -O0 since we do not want to remove anything. */ 422 if (!optimize) 423 return changed; 424 425 if (file) 426 fprintf (file, "Reclaiming variables:"); 427 for (vnode = varpool_nodes; vnode; vnode = vnext) 428 { 429 vnext = vnode->next; 430 if (!vnode->needed) 431 { 432 if (file) 433 fprintf (file, " %s", varpool_node_name (vnode)); 434 varpool_remove_node (vnode); 435 changed = true; 436 } 437 } 438 439 /* Now update address_taken flags and try to promote functions to be local. */ 440 441 if (file) 442 fprintf (file, "\nClearing address taken flags:"); 443 for (node = cgraph_nodes; node; node = node->next) 444 if (node->address_taken 445 && !node->reachable_from_other_partition) 446 { 447 if (!cgraph_for_node_and_aliases (node, has_addr_references_p, NULL, true)) 448 { 449 if (file) 450 fprintf (file, " %s", cgraph_node_name (node)); 451 node->address_taken = false; 452 changed = true; 453 if (cgraph_local_node_p (node)) 454 { 455 node->local.local = true; 456 if (file) 457 fprintf (file, " (local)"); 458 } 459 } 460 } 461 if (file) 462 fprintf (file, "\n"); 463 464 #ifdef ENABLE_CHECKING 465 verify_cgraph (); 466 #endif 467 468 /* Reclaim alias pairs for functions that have disappeared from the 469 call graph. */ 470 remove_unreachable_alias_pairs (); 471 472 return changed; 473 } 474 475 /* Discover variables that have no longer address taken or that are read only 476 and update their flags. 477 478 FIXME: This can not be done in between gimplify and omp_expand since 479 readonly flag plays role on what is shared and what is not. Currently we do 480 this transformation as part of whole program visibility and re-do at 481 ipa-reference pass (to take into account clonning), but it would 482 make sense to do it before early optimizations. */ 483 484 void 485 ipa_discover_readonly_nonaddressable_vars (void) 486 { 487 struct varpool_node *vnode; 488 if (dump_file) 489 fprintf (dump_file, "Clearing variable flags:"); 490 for (vnode = varpool_nodes; vnode; vnode = vnode->next) 491 if (vnode->finalized && varpool_all_refs_explicit_p (vnode) 492 && (TREE_ADDRESSABLE (vnode->decl) || !TREE_READONLY (vnode->decl))) 493 { 494 bool written = false; 495 bool address_taken = false; 496 int i; 497 struct ipa_ref *ref; 498 for (i = 0; ipa_ref_list_refering_iterate (&vnode->ref_list, i, ref) 499 && (!written || !address_taken); i++) 500 switch (ref->use) 501 { 502 case IPA_REF_ADDR: 503 address_taken = true; 504 break; 505 case IPA_REF_LOAD: 506 break; 507 case IPA_REF_STORE: 508 written = true; 509 break; 510 } 511 if (TREE_ADDRESSABLE (vnode->decl) && !address_taken) 512 { 513 if (dump_file) 514 fprintf (dump_file, " %s (addressable)", varpool_node_name (vnode)); 515 TREE_ADDRESSABLE (vnode->decl) = 0; 516 } 517 if (!TREE_READONLY (vnode->decl) && !address_taken && !written 518 /* Making variable in explicit section readonly can cause section 519 type conflict. 520 See e.g. gcc.c-torture/compile/pr23237.c */ 521 && DECL_SECTION_NAME (vnode->decl) == NULL) 522 { 523 if (dump_file) 524 fprintf (dump_file, " %s (read-only)", varpool_node_name (vnode)); 525 TREE_READONLY (vnode->decl) = 1; 526 } 527 } 528 if (dump_file) 529 fprintf (dump_file, "\n"); 530 } 531 532 /* Return true when there is a reference to node and it is not vtable. */ 533 static bool 534 cgraph_address_taken_from_non_vtable_p (struct cgraph_node *node) 535 { 536 int i; 537 struct ipa_ref *ref; 538 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++) 539 if (ref->use == IPA_REF_ADDR) 540 { 541 struct varpool_node *node; 542 if (ref->refering_type == IPA_REF_CGRAPH) 543 return true; 544 node = ipa_ref_refering_varpool_node (ref); 545 if (!DECL_VIRTUAL_P (node->decl)) 546 return true; 547 } 548 return false; 549 } 550 551 /* COMDAT functions must be shared only if they have address taken, 552 otherwise we can produce our own private implementation with 553 -fwhole-program. 554 Return true when turning COMDAT functoin static can not lead to wrong 555 code when the resulting object links with a library defining same COMDAT. 556 557 Virtual functions do have their addresses taken from the vtables, 558 but in C++ there is no way to compare their addresses for equality. */ 559 560 bool 561 cgraph_comdat_can_be_unshared_p (struct cgraph_node *node) 562 { 563 if ((cgraph_address_taken_from_non_vtable_p (node) 564 && !DECL_VIRTUAL_P (node->decl)) 565 || !node->analyzed) 566 return false; 567 if (node->same_comdat_group) 568 { 569 struct cgraph_node *next; 570 571 /* If more than one function is in the same COMDAT group, it must 572 be shared even if just one function in the comdat group has 573 address taken. */ 574 for (next = node->same_comdat_group; 575 next != node; next = next->same_comdat_group) 576 if (cgraph_address_taken_from_non_vtable_p (next) 577 && !DECL_VIRTUAL_P (next->decl)) 578 return false; 579 } 580 return true; 581 } 582 583 /* Return true when function NODE should be considered externally visible. */ 584 585 static bool 586 cgraph_externally_visible_p (struct cgraph_node *node, 587 bool whole_program, bool aliased) 588 { 589 if (!node->local.finalized) 590 return false; 591 if (!DECL_COMDAT (node->decl) 592 && (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))) 593 return false; 594 595 /* Do not even try to be smart about aliased nodes. Until we properly 596 represent everything by same body alias, these are just evil. */ 597 if (aliased) 598 return true; 599 600 /* Do not try to localize built-in functions yet. One of problems is that we 601 end up mangling their asm for WHOPR that makes it impossible to call them 602 using the implicit built-in declarations anymore. Similarly this enables 603 us to remove them as unreachable before actual calls may appear during 604 expansion or folding. */ 605 if (DECL_BUILT_IN (node->decl)) 606 return true; 607 608 /* If linker counts on us, we must preserve the function. */ 609 if (cgraph_used_from_object_file_p (node)) 610 return true; 611 if (DECL_PRESERVE_P (node->decl)) 612 return true; 613 if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (node->decl))) 614 return true; 615 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES 616 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (node->decl))) 617 return true; 618 if (node->resolution == LDPR_PREVAILING_DEF_IRONLY) 619 return false; 620 /* When doing LTO or whole program, we can bring COMDAT functoins static. 621 This improves code quality and we know we will duplicate them at most twice 622 (in the case that we are not using plugin and link with object file 623 implementing same COMDAT) */ 624 if ((in_lto_p || whole_program) 625 && DECL_COMDAT (node->decl) 626 && cgraph_comdat_can_be_unshared_p (node)) 627 return false; 628 629 /* When doing link time optimizations, hidden symbols become local. */ 630 if (in_lto_p 631 && (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN 632 || DECL_VISIBILITY (node->decl) == VISIBILITY_INTERNAL) 633 /* Be sure that node is defined in IR file, not in other object 634 file. In that case we don't set used_from_other_object_file. */ 635 && node->analyzed) 636 ; 637 else if (!whole_program) 638 return true; 639 640 if (MAIN_NAME_P (DECL_NAME (node->decl))) 641 return true; 642 643 return false; 644 } 645 646 /* Return true when variable VNODE should be considered externally visible. */ 647 648 bool 649 varpool_externally_visible_p (struct varpool_node *vnode, bool aliased) 650 { 651 if (!DECL_COMDAT (vnode->decl) && !TREE_PUBLIC (vnode->decl)) 652 return false; 653 654 /* Do not even try to be smart about aliased nodes. Until we properly 655 represent everything by same body alias, these are just evil. */ 656 if (aliased) 657 return true; 658 659 /* If linker counts on us, we must preserve the function. */ 660 if (varpool_used_from_object_file_p (vnode)) 661 return true; 662 663 if (DECL_HARD_REGISTER (vnode->decl)) 664 return true; 665 if (DECL_PRESERVE_P (vnode->decl)) 666 return true; 667 if (lookup_attribute ("externally_visible", 668 DECL_ATTRIBUTES (vnode->decl))) 669 return true; 670 if (TARGET_DLLIMPORT_DECL_ATTRIBUTES 671 && lookup_attribute ("dllexport", 672 DECL_ATTRIBUTES (vnode->decl))) 673 return true; 674 675 /* See if we have linker information about symbol not being used or 676 if we need to make guess based on the declaration. 677 678 Even if the linker clams the symbol is unused, never bring internal 679 symbols that are declared by user as used or externally visible. 680 This is needed for i.e. references from asm statements. */ 681 if (varpool_used_from_object_file_p (vnode)) 682 return true; 683 if (vnode->resolution == LDPR_PREVAILING_DEF_IRONLY) 684 return false; 685 686 /* As a special case, the COMDAT virutal tables can be unshared. 687 In LTO mode turn vtables into static variables. The variable is readonly, 688 so this does not enable more optimization, but referring static var 689 is faster for dynamic linking. Also this match logic hidding vtables 690 from LTO symbol tables. */ 691 if ((in_lto_p || flag_whole_program) 692 && !vnode->force_output 693 && DECL_COMDAT (vnode->decl) && DECL_VIRTUAL_P (vnode->decl)) 694 return false; 695 696 /* When doing link time optimizations, hidden symbols become local. */ 697 if (in_lto_p 698 && (DECL_VISIBILITY (vnode->decl) == VISIBILITY_HIDDEN 699 || DECL_VISIBILITY (vnode->decl) == VISIBILITY_INTERNAL) 700 /* Be sure that node is defined in IR file, not in other object 701 file. In that case we don't set used_from_other_object_file. */ 702 && vnode->finalized) 703 ; 704 else if (!flag_whole_program) 705 return true; 706 707 /* Do not attempt to privatize COMDATS by default. 708 This would break linking with C++ libraries sharing 709 inline definitions. 710 711 FIXME: We can do so for readonly vars with no address taken and 712 possibly also for vtables since no direct pointer comparsion is done. 713 It might be interesting to do so to reduce linking overhead. */ 714 if (DECL_COMDAT (vnode->decl) || DECL_WEAK (vnode->decl)) 715 return true; 716 return false; 717 } 718 719 /* Dissolve the same_comdat_group list in which NODE resides. */ 720 721 static void 722 dissolve_same_comdat_group_list (struct cgraph_node *node) 723 { 724 struct cgraph_node *n = node, *next; 725 do 726 { 727 next = n->same_comdat_group; 728 n->same_comdat_group = NULL; 729 n = next; 730 } 731 while (n != node); 732 } 733 734 /* Mark visibility of all functions. 735 736 A local function is one whose calls can occur only in the current 737 compilation unit and all its calls are explicit, so we can change 738 its calling convention. We simply mark all static functions whose 739 address is not taken as local. 740 741 We also change the TREE_PUBLIC flag of all declarations that are public 742 in language point of view but we want to overwrite this default 743 via visibilities for the backend point of view. */ 744 745 static unsigned int 746 function_and_variable_visibility (bool whole_program) 747 { 748 struct cgraph_node *node; 749 struct varpool_node *vnode; 750 struct pointer_set_t *aliased_nodes = pointer_set_create (); 751 struct pointer_set_t *aliased_vnodes = pointer_set_create (); 752 unsigned i; 753 alias_pair *p; 754 755 /* Discover aliased nodes. */ 756 FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p) 757 { 758 if (dump_file) 759 fprintf (dump_file, "Alias %s->%s", 760 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)), 761 IDENTIFIER_POINTER (p->target)); 762 763 if ((node = cgraph_node_for_asm (p->target)) != NULL 764 && !DECL_EXTERNAL (node->decl)) 765 { 766 if (!node->analyzed) 767 continue; 768 cgraph_mark_needed_node (node); 769 gcc_assert (node->needed); 770 pointer_set_insert (aliased_nodes, node); 771 if (dump_file) 772 fprintf (dump_file, " node %s/%i", 773 cgraph_node_name (node), node->uid); 774 } 775 else if ((vnode = varpool_node_for_asm (p->target)) != NULL 776 && !DECL_EXTERNAL (vnode->decl)) 777 { 778 varpool_mark_needed_node (vnode); 779 gcc_assert (vnode->needed); 780 pointer_set_insert (aliased_vnodes, vnode); 781 if (dump_file) 782 fprintf (dump_file, " varpool node %s", 783 varpool_node_name (vnode)); 784 } 785 if (dump_file) 786 fprintf (dump_file, "\n"); 787 } 788 789 for (node = cgraph_nodes; node; node = node->next) 790 { 791 int flags = flags_from_decl_or_type (node->decl); 792 793 /* Optimize away PURE and CONST constructors and destructors. */ 794 if (optimize 795 && (flags & (ECF_CONST | ECF_PURE)) 796 && !(flags & ECF_LOOPING_CONST_OR_PURE)) 797 { 798 DECL_STATIC_CONSTRUCTOR (node->decl) = 0; 799 DECL_STATIC_DESTRUCTOR (node->decl) = 0; 800 } 801 802 /* Frontends and alias code marks nodes as needed before parsing is finished. 803 We may end up marking as node external nodes where this flag is meaningless 804 strip it. */ 805 if (node->needed 806 && (DECL_EXTERNAL (node->decl) || !node->analyzed)) 807 node->needed = 0; 808 809 /* C++ FE on lack of COMDAT support create local COMDAT functions 810 (that ought to be shared but can not due to object format 811 limitations). It is neccesary to keep the flag to make rest of C++ FE 812 happy. Clear the flag here to avoid confusion in middle-end. */ 813 if (DECL_COMDAT (node->decl) && !TREE_PUBLIC (node->decl)) 814 DECL_COMDAT (node->decl) = 0; 815 /* For external decls stop tracking same_comdat_group, it doesn't matter 816 what comdat group they are in when they won't be emitted in this TU, 817 and simplifies later passes. */ 818 if (node->same_comdat_group && DECL_EXTERNAL (node->decl)) 819 { 820 #ifdef ENABLE_CHECKING 821 struct cgraph_node *n; 822 823 for (n = node->same_comdat_group; 824 n != node; 825 n = n->same_comdat_group) 826 /* If at least one of same comdat group functions is external, 827 all of them have to be, otherwise it is a front-end bug. */ 828 gcc_assert (DECL_EXTERNAL (n->decl)); 829 #endif 830 dissolve_same_comdat_group_list (node); 831 } 832 gcc_assert ((!DECL_WEAK (node->decl) && !DECL_COMDAT (node->decl)) 833 || TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl)); 834 if (cgraph_externally_visible_p (node, whole_program, 835 pointer_set_contains (aliased_nodes, 836 node))) 837 { 838 gcc_assert (!node->global.inlined_to); 839 node->local.externally_visible = true; 840 } 841 else 842 node->local.externally_visible = false; 843 if (!node->local.externally_visible && node->analyzed 844 && !DECL_EXTERNAL (node->decl)) 845 { 846 gcc_assert (whole_program || in_lto_p || !TREE_PUBLIC (node->decl)); 847 cgraph_make_decl_local (node->decl); 848 node->resolution = LDPR_PREVAILING_DEF_IRONLY; 849 if (node->same_comdat_group) 850 /* cgraph_externally_visible_p has already checked all other nodes 851 in the group and they will all be made local. We need to 852 dissolve the group at once so that the predicate does not 853 segfault though. */ 854 dissolve_same_comdat_group_list (node); 855 } 856 857 if (node->thunk.thunk_p 858 && TREE_PUBLIC (node->decl)) 859 { 860 struct cgraph_node *decl_node = node; 861 862 decl_node = cgraph_function_node (decl_node->callees->callee, NULL); 863 864 /* Thunks have the same visibility as function they are attached to. 865 Make sure the C++ front end set this up properly. */ 866 if (DECL_ONE_ONLY (decl_node->decl)) 867 { 868 gcc_checking_assert (DECL_COMDAT (node->decl) 869 == DECL_COMDAT (decl_node->decl)); 870 gcc_checking_assert (DECL_COMDAT_GROUP (node->decl) 871 == DECL_COMDAT_GROUP (decl_node->decl)); 872 gcc_checking_assert (node->same_comdat_group); 873 } 874 if (DECL_EXTERNAL (decl_node->decl)) 875 DECL_EXTERNAL (node->decl) = 1; 876 } 877 } 878 for (node = cgraph_nodes; node; node = node->next) 879 node->local.local = cgraph_local_node_p (node); 880 for (vnode = varpool_nodes; vnode; vnode = vnode->next) 881 { 882 /* weak flag makes no sense on local variables. */ 883 gcc_assert (!DECL_WEAK (vnode->decl) 884 || TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl)); 885 /* In several cases declarations can not be common: 886 887 - when declaration has initializer 888 - when it is in weak 889 - when it has specific section 890 - when it resides in non-generic address space. 891 - if declaration is local, it will get into .local common section 892 so common flag is not needed. Frontends still produce these in 893 certain cases, such as for: 894 895 static int a __attribute__ ((common)) 896 897 Canonicalize things here and clear the redundant flag. */ 898 if (DECL_COMMON (vnode->decl) 899 && (!(TREE_PUBLIC (vnode->decl) || DECL_EXTERNAL (vnode->decl)) 900 || (DECL_INITIAL (vnode->decl) 901 && DECL_INITIAL (vnode->decl) != error_mark_node) 902 || DECL_WEAK (vnode->decl) 903 || DECL_SECTION_NAME (vnode->decl) != NULL 904 || ! (ADDR_SPACE_GENERIC_P 905 (TYPE_ADDR_SPACE (TREE_TYPE (vnode->decl)))))) 906 DECL_COMMON (vnode->decl) = 0; 907 } 908 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) 909 { 910 if (!vnode->finalized) 911 continue; 912 if (vnode->needed 913 && varpool_externally_visible_p 914 (vnode, 915 pointer_set_contains (aliased_vnodes, vnode))) 916 vnode->externally_visible = true; 917 else 918 vnode->externally_visible = false; 919 if (!vnode->externally_visible) 920 { 921 gcc_assert (in_lto_p || whole_program || !TREE_PUBLIC (vnode->decl)); 922 cgraph_make_decl_local (vnode->decl); 923 vnode->resolution = LDPR_PREVAILING_DEF_IRONLY; 924 } 925 gcc_assert (TREE_STATIC (vnode->decl)); 926 } 927 pointer_set_destroy (aliased_nodes); 928 pointer_set_destroy (aliased_vnodes); 929 930 if (dump_file) 931 { 932 fprintf (dump_file, "\nMarking local functions:"); 933 for (node = cgraph_nodes; node; node = node->next) 934 if (node->local.local) 935 fprintf (dump_file, " %s", cgraph_node_name (node)); 936 fprintf (dump_file, "\n\n"); 937 fprintf (dump_file, "\nMarking externally visible functions:"); 938 for (node = cgraph_nodes; node; node = node->next) 939 if (node->local.externally_visible) 940 fprintf (dump_file, " %s", cgraph_node_name (node)); 941 fprintf (dump_file, "\n\n"); 942 fprintf (dump_file, "\nMarking externally visible variables:"); 943 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) 944 if (vnode->externally_visible) 945 fprintf (dump_file, " %s", varpool_node_name (vnode)); 946 fprintf (dump_file, "\n\n"); 947 } 948 cgraph_function_flags_ready = true; 949 return 0; 950 } 951 952 /* Local function pass handling visibilities. This happens before LTO streaming 953 so in particular -fwhole-program should be ignored at this level. */ 954 955 static unsigned int 956 local_function_and_variable_visibility (void) 957 { 958 return function_and_variable_visibility (flag_whole_program && !flag_lto); 959 } 960 961 struct simple_ipa_opt_pass pass_ipa_function_and_variable_visibility = 962 { 963 { 964 SIMPLE_IPA_PASS, 965 "visibility", /* name */ 966 NULL, /* gate */ 967 local_function_and_variable_visibility,/* execute */ 968 NULL, /* sub */ 969 NULL, /* next */ 970 0, /* static_pass_number */ 971 TV_CGRAPHOPT, /* tv_id */ 972 0, /* properties_required */ 973 0, /* properties_provided */ 974 0, /* properties_destroyed */ 975 0, /* todo_flags_start */ 976 TODO_remove_functions | TODO_dump_cgraph 977 | TODO_ggc_collect /* todo_flags_finish */ 978 } 979 }; 980 981 /* Do not re-run on ltrans stage. */ 982 983 static bool 984 gate_whole_program_function_and_variable_visibility (void) 985 { 986 return !flag_ltrans; 987 } 988 989 /* Bring functionss local at LTO time whith -fwhole-program. */ 990 991 static unsigned int 992 whole_program_function_and_variable_visibility (void) 993 { 994 struct cgraph_node *node; 995 struct varpool_node *vnode; 996 997 function_and_variable_visibility (flag_whole_program); 998 999 for (node = cgraph_nodes; node; node = node->next) 1000 if ((node->local.externally_visible && !DECL_COMDAT (node->decl)) 1001 && node->local.finalized) 1002 cgraph_mark_needed_node (node); 1003 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) 1004 if (vnode->externally_visible && !DECL_COMDAT (vnode->decl)) 1005 varpool_mark_needed_node (vnode); 1006 if (dump_file) 1007 { 1008 fprintf (dump_file, "\nNeeded variables:"); 1009 for (vnode = varpool_nodes_queue; vnode; vnode = vnode->next_needed) 1010 if (vnode->needed) 1011 fprintf (dump_file, " %s", varpool_node_name (vnode)); 1012 fprintf (dump_file, "\n\n"); 1013 } 1014 if (optimize) 1015 ipa_discover_readonly_nonaddressable_vars (); 1016 return 0; 1017 } 1018 1019 struct ipa_opt_pass_d pass_ipa_whole_program_visibility = 1020 { 1021 { 1022 IPA_PASS, 1023 "whole-program", /* name */ 1024 gate_whole_program_function_and_variable_visibility,/* gate */ 1025 whole_program_function_and_variable_visibility,/* execute */ 1026 NULL, /* sub */ 1027 NULL, /* next */ 1028 0, /* static_pass_number */ 1029 TV_CGRAPHOPT, /* tv_id */ 1030 0, /* properties_required */ 1031 0, /* properties_provided */ 1032 0, /* properties_destroyed */ 1033 0, /* todo_flags_start */ 1034 TODO_remove_functions | TODO_dump_cgraph 1035 | TODO_ggc_collect /* todo_flags_finish */ 1036 }, 1037 NULL, /* generate_summary */ 1038 NULL, /* write_summary */ 1039 NULL, /* read_summary */ 1040 NULL, /* write_optimization_summary */ 1041 NULL, /* read_optimization_summary */ 1042 NULL, /* stmt_fixup */ 1043 0, /* TODOs */ 1044 NULL, /* function_transform */ 1045 NULL, /* variable_transform */ 1046 }; 1047 1048 1049 /* Simple ipa profile pass propagating frequencies across the callgraph. */ 1050 1051 static unsigned int 1052 ipa_profile (void) 1053 { 1054 struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes); 1055 struct cgraph_edge *e; 1056 int order_pos; 1057 bool something_changed = false; 1058 int i; 1059 1060 order_pos = ipa_reverse_postorder (order); 1061 for (i = order_pos - 1; i >= 0; i--) 1062 { 1063 if (order[i]->local.local && cgraph_propagate_frequency (order[i])) 1064 { 1065 for (e = order[i]->callees; e; e = e->next_callee) 1066 if (e->callee->local.local && !e->callee->aux) 1067 { 1068 something_changed = true; 1069 e->callee->aux = (void *)1; 1070 } 1071 } 1072 order[i]->aux = NULL; 1073 } 1074 1075 while (something_changed) 1076 { 1077 something_changed = false; 1078 for (i = order_pos - 1; i >= 0; i--) 1079 { 1080 if (order[i]->aux && cgraph_propagate_frequency (order[i])) 1081 { 1082 for (e = order[i]->callees; e; e = e->next_callee) 1083 if (e->callee->local.local && !e->callee->aux) 1084 { 1085 something_changed = true; 1086 e->callee->aux = (void *)1; 1087 } 1088 } 1089 order[i]->aux = NULL; 1090 } 1091 } 1092 free (order); 1093 return 0; 1094 } 1095 1096 static bool 1097 gate_ipa_profile (void) 1098 { 1099 return flag_ipa_profile; 1100 } 1101 1102 struct ipa_opt_pass_d pass_ipa_profile = 1103 { 1104 { 1105 IPA_PASS, 1106 "profile_estimate", /* name */ 1107 gate_ipa_profile, /* gate */ 1108 ipa_profile, /* execute */ 1109 NULL, /* sub */ 1110 NULL, /* next */ 1111 0, /* static_pass_number */ 1112 TV_IPA_PROFILE, /* tv_id */ 1113 0, /* properties_required */ 1114 0, /* properties_provided */ 1115 0, /* properties_destroyed */ 1116 0, /* todo_flags_start */ 1117 0 /* todo_flags_finish */ 1118 }, 1119 NULL, /* generate_summary */ 1120 NULL, /* write_summary */ 1121 NULL, /* read_summary */ 1122 NULL, /* write_optimization_summary */ 1123 NULL, /* read_optimization_summary */ 1124 NULL, /* stmt_fixup */ 1125 0, /* TODOs */ 1126 NULL, /* function_transform */ 1127 NULL /* variable_transform */ 1128 }; 1129 1130 /* Generate and emit a static constructor or destructor. WHICH must 1131 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY 1132 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the 1133 initialization priority for this constructor or destructor. 1134 1135 FINAL specify whether the externally visible name for collect2 should 1136 be produced. */ 1137 1138 static void 1139 cgraph_build_static_cdtor_1 (char which, tree body, int priority, bool final) 1140 { 1141 static int counter = 0; 1142 char which_buf[16]; 1143 tree decl, name, resdecl; 1144 1145 /* The priority is encoded in the constructor or destructor name. 1146 collect2 will sort the names and arrange that they are called at 1147 program startup. */ 1148 if (final) 1149 sprintf (which_buf, "%c_%.5d_%d", which, priority, counter++); 1150 else 1151 /* Proudce sane name but one not recognizable by collect2, just for the 1152 case we fail to inline the function. */ 1153 sprintf (which_buf, "sub_%c_%.5d_%d", which, priority, counter++); 1154 name = get_file_function_name (which_buf); 1155 1156 decl = build_decl (input_location, FUNCTION_DECL, name, 1157 build_function_type_list (void_type_node, NULL_TREE)); 1158 current_function_decl = decl; 1159 1160 resdecl = build_decl (input_location, 1161 RESULT_DECL, NULL_TREE, void_type_node); 1162 DECL_ARTIFICIAL (resdecl) = 1; 1163 DECL_RESULT (decl) = resdecl; 1164 DECL_CONTEXT (resdecl) = decl; 1165 1166 allocate_struct_function (decl, false); 1167 1168 TREE_STATIC (decl) = 1; 1169 TREE_USED (decl) = 1; 1170 DECL_ARTIFICIAL (decl) = 1; 1171 DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (decl) = 1; 1172 DECL_SAVED_TREE (decl) = body; 1173 if (!targetm.have_ctors_dtors && final) 1174 { 1175 TREE_PUBLIC (decl) = 1; 1176 DECL_PRESERVE_P (decl) = 1; 1177 } 1178 DECL_UNINLINABLE (decl) = 1; 1179 1180 DECL_INITIAL (decl) = make_node (BLOCK); 1181 TREE_USED (DECL_INITIAL (decl)) = 1; 1182 1183 DECL_SOURCE_LOCATION (decl) = input_location; 1184 cfun->function_end_locus = input_location; 1185 1186 switch (which) 1187 { 1188 case 'I': 1189 DECL_STATIC_CONSTRUCTOR (decl) = 1; 1190 decl_init_priority_insert (decl, priority); 1191 break; 1192 case 'D': 1193 DECL_STATIC_DESTRUCTOR (decl) = 1; 1194 decl_fini_priority_insert (decl, priority); 1195 break; 1196 default: 1197 gcc_unreachable (); 1198 } 1199 1200 gimplify_function_tree (decl); 1201 1202 cgraph_add_new_function (decl, false); 1203 1204 set_cfun (NULL); 1205 current_function_decl = NULL; 1206 } 1207 1208 /* Generate and emit a static constructor or destructor. WHICH must 1209 be one of 'I' (for a constructor) or 'D' (for a destructor). BODY 1210 is a STATEMENT_LIST containing GENERIC statements. PRIORITY is the 1211 initialization priority for this constructor or destructor. */ 1212 1213 void 1214 cgraph_build_static_cdtor (char which, tree body, int priority) 1215 { 1216 cgraph_build_static_cdtor_1 (which, body, priority, false); 1217 } 1218 1219 /* A vector of FUNCTION_DECLs declared as static constructors. */ 1220 static VEC(tree, heap) *static_ctors; 1221 /* A vector of FUNCTION_DECLs declared as static destructors. */ 1222 static VEC(tree, heap) *static_dtors; 1223 1224 /* When target does not have ctors and dtors, we call all constructor 1225 and destructor by special initialization/destruction function 1226 recognized by collect2. 1227 1228 When we are going to build this function, collect all constructors and 1229 destructors and turn them into normal functions. */ 1230 1231 static void 1232 record_cdtor_fn (struct cgraph_node *node) 1233 { 1234 if (DECL_STATIC_CONSTRUCTOR (node->decl)) 1235 VEC_safe_push (tree, heap, static_ctors, node->decl); 1236 if (DECL_STATIC_DESTRUCTOR (node->decl)) 1237 VEC_safe_push (tree, heap, static_dtors, node->decl); 1238 node = cgraph_get_node (node->decl); 1239 DECL_DISREGARD_INLINE_LIMITS (node->decl) = 1; 1240 } 1241 1242 /* Define global constructors/destructor functions for the CDTORS, of 1243 which they are LEN. The CDTORS are sorted by initialization 1244 priority. If CTOR_P is true, these are constructors; otherwise, 1245 they are destructors. */ 1246 1247 static void 1248 build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors) 1249 { 1250 size_t i,j; 1251 size_t len = VEC_length (tree, cdtors); 1252 1253 i = 0; 1254 while (i < len) 1255 { 1256 tree body; 1257 tree fn; 1258 priority_type priority; 1259 1260 priority = 0; 1261 body = NULL_TREE; 1262 j = i; 1263 do 1264 { 1265 priority_type p; 1266 fn = VEC_index (tree, cdtors, j); 1267 p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn); 1268 if (j == i) 1269 priority = p; 1270 else if (p != priority) 1271 break; 1272 j++; 1273 } 1274 while (j < len); 1275 1276 /* When there is only one cdtor and target supports them, do nothing. */ 1277 if (j == i + 1 1278 && targetm.have_ctors_dtors) 1279 { 1280 i++; 1281 continue; 1282 } 1283 /* Find the next batch of constructors/destructors with the same 1284 initialization priority. */ 1285 for (;i < j; i++) 1286 { 1287 tree call; 1288 fn = VEC_index (tree, cdtors, i); 1289 call = build_call_expr (fn, 0); 1290 if (ctor_p) 1291 DECL_STATIC_CONSTRUCTOR (fn) = 0; 1292 else 1293 DECL_STATIC_DESTRUCTOR (fn) = 0; 1294 /* We do not want to optimize away pure/const calls here. 1295 When optimizing, these should be already removed, when not 1296 optimizing, we want user to be able to breakpoint in them. */ 1297 TREE_SIDE_EFFECTS (call) = 1; 1298 append_to_statement_list (call, &body); 1299 } 1300 gcc_assert (body != NULL_TREE); 1301 /* Generate a function to call all the function of like 1302 priority. */ 1303 cgraph_build_static_cdtor_1 (ctor_p ? 'I' : 'D', body, priority, true); 1304 } 1305 } 1306 1307 /* Comparison function for qsort. P1 and P2 are actually of type 1308 "tree *" and point to static constructors. DECL_INIT_PRIORITY is 1309 used to determine the sort order. */ 1310 1311 static int 1312 compare_ctor (const void *p1, const void *p2) 1313 { 1314 tree f1; 1315 tree f2; 1316 int priority1; 1317 int priority2; 1318 1319 f1 = *(const tree *)p1; 1320 f2 = *(const tree *)p2; 1321 priority1 = DECL_INIT_PRIORITY (f1); 1322 priority2 = DECL_INIT_PRIORITY (f2); 1323 1324 if (priority1 < priority2) 1325 return -1; 1326 else if (priority1 > priority2) 1327 return 1; 1328 else 1329 /* Ensure a stable sort. Constructors are executed in backwarding 1330 order to make LTO initialize braries first. */ 1331 return DECL_UID (f2) - DECL_UID (f1); 1332 } 1333 1334 /* Comparison function for qsort. P1 and P2 are actually of type 1335 "tree *" and point to static destructors. DECL_FINI_PRIORITY is 1336 used to determine the sort order. */ 1337 1338 static int 1339 compare_dtor (const void *p1, const void *p2) 1340 { 1341 tree f1; 1342 tree f2; 1343 int priority1; 1344 int priority2; 1345 1346 f1 = *(const tree *)p1; 1347 f2 = *(const tree *)p2; 1348 priority1 = DECL_FINI_PRIORITY (f1); 1349 priority2 = DECL_FINI_PRIORITY (f2); 1350 1351 if (priority1 < priority2) 1352 return -1; 1353 else if (priority1 > priority2) 1354 return 1; 1355 else 1356 /* Ensure a stable sort. */ 1357 return DECL_UID (f1) - DECL_UID (f2); 1358 } 1359 1360 /* Generate functions to call static constructors and destructors 1361 for targets that do not support .ctors/.dtors sections. These 1362 functions have magic names which are detected by collect2. */ 1363 1364 static void 1365 build_cdtor_fns (void) 1366 { 1367 if (!VEC_empty (tree, static_ctors)) 1368 { 1369 gcc_assert (!targetm.have_ctors_dtors || in_lto_p); 1370 VEC_qsort (tree, static_ctors, compare_ctor); 1371 build_cdtor (/*ctor_p=*/true, static_ctors); 1372 } 1373 1374 if (!VEC_empty (tree, static_dtors)) 1375 { 1376 gcc_assert (!targetm.have_ctors_dtors || in_lto_p); 1377 VEC_qsort (tree, static_dtors, compare_dtor); 1378 build_cdtor (/*ctor_p=*/false, static_dtors); 1379 } 1380 } 1381 1382 /* Look for constructors and destructors and produce function calling them. 1383 This is needed for targets not supporting ctors or dtors, but we perform the 1384 transformation also at linktime to merge possibly numberous 1385 constructors/destructors into single function to improve code locality and 1386 reduce size. */ 1387 1388 static unsigned int 1389 ipa_cdtor_merge (void) 1390 { 1391 struct cgraph_node *node; 1392 for (node = cgraph_nodes; node; node = node->next) 1393 if (node->analyzed 1394 && (DECL_STATIC_CONSTRUCTOR (node->decl) 1395 || DECL_STATIC_DESTRUCTOR (node->decl))) 1396 record_cdtor_fn (node); 1397 build_cdtor_fns (); 1398 VEC_free (tree, heap, static_ctors); 1399 VEC_free (tree, heap, static_dtors); 1400 return 0; 1401 } 1402 1403 /* Perform the pass when we have no ctors/dtors support 1404 or at LTO time to merge multiple constructors into single 1405 function. */ 1406 1407 static bool 1408 gate_ipa_cdtor_merge (void) 1409 { 1410 return !targetm.have_ctors_dtors || (optimize && in_lto_p); 1411 } 1412 1413 struct ipa_opt_pass_d pass_ipa_cdtor_merge = 1414 { 1415 { 1416 IPA_PASS, 1417 "cdtor", /* name */ 1418 gate_ipa_cdtor_merge, /* gate */ 1419 ipa_cdtor_merge, /* execute */ 1420 NULL, /* sub */ 1421 NULL, /* next */ 1422 0, /* static_pass_number */ 1423 TV_CGRAPHOPT, /* tv_id */ 1424 0, /* properties_required */ 1425 0, /* properties_provided */ 1426 0, /* properties_destroyed */ 1427 0, /* todo_flags_start */ 1428 0 /* todo_flags_finish */ 1429 }, 1430 NULL, /* generate_summary */ 1431 NULL, /* write_summary */ 1432 NULL, /* read_summary */ 1433 NULL, /* write_optimization_summary */ 1434 NULL, /* read_optimization_summary */ 1435 NULL, /* stmt_fixup */ 1436 0, /* TODOs */ 1437 NULL, /* function_transform */ 1438 NULL /* variable_transform */ 1439 }; 1440