1 /* LTO partitioning logic routines. 2 Copyright (C) 2009-2018 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #include "config.h" 21 #include "system.h" 22 #include "coretypes.h" 23 #include "target.h" 24 #include "function.h" 25 #include "basic-block.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "alloc-pool.h" 29 #include "stringpool.h" 30 #include "cgraph.h" 31 #include "lto-streamer.h" 32 #include "params.h" 33 #include "symbol-summary.h" 34 #include "tree-vrp.h" 35 #include "ipa-prop.h" 36 #include "ipa-fnsummary.h" 37 #include "lto-partition.h" 38 39 vec<ltrans_partition> ltrans_partitions; 40 41 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node); 42 43 44 /* Create new partition with name NAME. */ 45 46 static ltrans_partition 47 new_partition (const char *name) 48 { 49 ltrans_partition part = XCNEW (struct ltrans_partition_def); 50 part->encoder = lto_symtab_encoder_new (false); 51 part->name = name; 52 part->insns = 0; 53 part->symbols = 0; 54 ltrans_partitions.safe_push (part); 55 return part; 56 } 57 58 /* Free memory used by ltrans datastructures. */ 59 60 void 61 free_ltrans_partitions (void) 62 { 63 unsigned int idx; 64 ltrans_partition part; 65 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++) 66 { 67 if (part->initializers_visited) 68 delete part->initializers_visited; 69 /* Symtab encoder is freed after streaming. */ 70 free (part); 71 } 72 ltrans_partitions.release (); 73 } 74 75 /* Return true if symbol is already in some partition. */ 76 77 static inline bool 78 symbol_partitioned_p (symtab_node *node) 79 { 80 return node->aux; 81 } 82 83 /* Add references into the partition. */ 84 static void 85 add_references_to_partition (ltrans_partition part, symtab_node *node) 86 { 87 int i; 88 struct ipa_ref *ref = NULL; 89 90 /* Add all duplicated references to the partition. */ 91 for (i = 0; node->iterate_reference (i, ref); i++) 92 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE) 93 add_symbol_to_partition (part, ref->referred); 94 /* References to a readonly variable may be constant foled into its value. 95 Recursively look into the initializers of the constant variable and add 96 references, too. */ 97 else if (is_a <varpool_node *> (ref->referred) 98 && (dyn_cast <varpool_node *> (ref->referred) 99 ->ctor_useable_for_folding_p () 100 || POINTER_BOUNDS_P (ref->referred->decl)) 101 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred)) 102 { 103 if (!part->initializers_visited) 104 part->initializers_visited = new hash_set<symtab_node *>; 105 if (!part->initializers_visited->add (ref->referred)) 106 add_references_to_partition (part, ref->referred); 107 } 108 } 109 110 /* Helper function for add_symbol_to_partition doing the actual dirty work 111 of adding NODE to PART. */ 112 113 static bool 114 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node) 115 { 116 enum symbol_partitioning_class c = node->get_partitioning_class (); 117 struct ipa_ref *ref; 118 symtab_node *node1; 119 120 /* If NODE is already there, we have nothing to do. */ 121 if (lto_symtab_encoder_in_partition_p (part->encoder, node)) 122 return true; 123 124 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output 125 just once. 126 127 Be lax about comdats; they may or may not be duplicated and we may 128 end up in need to duplicate keyed comdat because it has unkeyed alias. */ 129 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl) 130 && symbol_partitioned_p (node)) 131 return false; 132 133 /* Be sure that we never try to duplicate partitioned symbol 134 or add external symbol. */ 135 gcc_assert (c != SYMBOL_EXTERNAL 136 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node))); 137 138 part->symbols++; 139 140 lto_set_symtab_encoder_in_partition (part->encoder, node); 141 142 if (symbol_partitioned_p (node)) 143 { 144 node->in_other_partition = 1; 145 if (symtab->dump_file) 146 fprintf (symtab->dump_file, 147 "Symbol node %s now used in multiple partitions\n", 148 node->name ()); 149 } 150 node->aux = (void *)((size_t)node->aux + 1); 151 152 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) 153 { 154 struct cgraph_edge *e; 155 if (!node->alias) 156 part->insns += ipa_fn_summaries->get (cnode)->self_size; 157 158 /* Add all inline clones and callees that are duplicated. */ 159 for (e = cnode->callees; e; e = e->next_callee) 160 if (!e->inline_failed) 161 add_symbol_to_partition_1 (part, e->callee); 162 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE) 163 add_symbol_to_partition (part, e->callee); 164 165 /* Add all thunks associated with the function. */ 166 for (e = cnode->callers; e; e = e->next_caller) 167 if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to) 168 add_symbol_to_partition_1 (part, e->caller); 169 170 /* Instrumented version is actually the same function. 171 Therefore put it into the same partition. */ 172 if (cnode->instrumented_version) 173 add_symbol_to_partition_1 (part, cnode->instrumented_version); 174 } 175 176 add_references_to_partition (part, node); 177 178 /* Add all aliases associated with the symbol. */ 179 180 FOR_EACH_ALIAS (node, ref) 181 if (!ref->referring->transparent_alias) 182 add_symbol_to_partition_1 (part, ref->referring); 183 else 184 { 185 struct ipa_ref *ref2; 186 /* We do not need to add transparent aliases if they are not used. 187 However we must add aliases of transparent aliases if they exist. */ 188 FOR_EACH_ALIAS (ref->referring, ref2) 189 { 190 /* Nested transparent aliases are not permitted. */ 191 gcc_checking_assert (!ref2->referring->transparent_alias); 192 add_symbol_to_partition_1 (part, ref2->referring); 193 } 194 } 195 196 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */ 197 if (node->same_comdat_group) 198 for (node1 = node->same_comdat_group; 199 node1 != node; node1 = node1->same_comdat_group) 200 if (!node->alias) 201 { 202 bool added = add_symbol_to_partition_1 (part, node1); 203 gcc_assert (added); 204 } 205 return true; 206 } 207 208 /* If symbol NODE is really part of other symbol's definition (i.e. it is 209 internal label, thunk, alias or so), return the outer symbol. 210 When add_symbol_to_partition_1 is called on the outer symbol it must 211 eventually add NODE, too. */ 212 static symtab_node * 213 contained_in_symbol (symtab_node *node) 214 { 215 /* There is no need to consider transparent aliases to be part of the 216 definition: they are only useful insite the partition they are output 217 and thus we will always see an explicit reference to it. */ 218 if (node->transparent_alias) 219 return node; 220 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) 221 { 222 cnode = cnode->function_symbol (); 223 if (cnode->global.inlined_to) 224 cnode = cnode->global.inlined_to; 225 return cnode; 226 } 227 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node)) 228 return vnode->ultimate_alias_target (); 229 return node; 230 } 231 232 /* Add symbol NODE to partition. When definition of NODE is part 233 of other symbol definition, add the other symbol, too. */ 234 235 static void 236 add_symbol_to_partition (ltrans_partition part, symtab_node *node) 237 { 238 symtab_node *node1; 239 240 /* Verify that we do not try to duplicate something that can not be. */ 241 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE 242 || !symbol_partitioned_p (node)); 243 244 while ((node1 = contained_in_symbol (node)) != node) 245 node = node1; 246 247 /* If we have duplicated symbol contained in something we can not duplicate, 248 we are very badly screwed. The other way is possible, so we do not 249 assert this in add_symbol_to_partition_1. 250 251 Be lax about comdats; they may or may not be duplicated and we may 252 end up in need to duplicate keyed comdat because it has unkeyed alias. */ 253 254 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE 255 || DECL_COMDAT (node->decl) 256 || !symbol_partitioned_p (node)); 257 258 add_symbol_to_partition_1 (part, node); 259 } 260 261 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES 262 and number of varpool nodes is N_VARPOOL_NODES. */ 263 264 static void 265 undo_partition (ltrans_partition partition, unsigned int n_nodes) 266 { 267 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes) 268 { 269 symtab_node *node = lto_symtab_encoder_deref (partition->encoder, 270 n_nodes); 271 partition->symbols--; 272 cgraph_node *cnode; 273 274 /* After UNDO we no longer know what was visited. */ 275 if (partition->initializers_visited) 276 delete partition->initializers_visited; 277 partition->initializers_visited = NULL; 278 279 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node))) 280 partition->insns -= ipa_fn_summaries->get (cnode)->self_size; 281 lto_symtab_encoder_delete_node (partition->encoder, node); 282 node->aux = (void *)((size_t)node->aux - 1); 283 } 284 } 285 286 /* Group cgrah nodes by input files. This is used mainly for testing 287 right now. */ 288 289 void 290 lto_1_to_1_map (void) 291 { 292 symtab_node *node; 293 struct lto_file_decl_data *file_data; 294 hash_map<lto_file_decl_data *, ltrans_partition> pmap; 295 ltrans_partition partition; 296 int npartitions = 0; 297 298 FOR_EACH_SYMBOL (node) 299 { 300 if (node->get_partitioning_class () != SYMBOL_PARTITION 301 || symbol_partitioned_p (node)) 302 continue; 303 304 file_data = node->lto_file_data; 305 306 if (file_data) 307 { 308 ltrans_partition *slot = &pmap.get_or_insert (file_data); 309 if (*slot) 310 partition = *slot; 311 else 312 { 313 partition = new_partition (file_data->file_name); 314 *slot = partition; 315 npartitions++; 316 } 317 } 318 else if (!file_data && ltrans_partitions.length ()) 319 partition = ltrans_partitions[0]; 320 else 321 { 322 partition = new_partition (""); 323 pmap.put (NULL, partition); 324 npartitions++; 325 } 326 327 add_symbol_to_partition (partition, node); 328 } 329 330 /* If the cgraph is empty, create one cgraph node set so that there is still 331 an output file for any variables that need to be exported in a DSO. */ 332 if (!npartitions) 333 new_partition ("empty"); 334 335 } 336 337 /* Maximal partitioning. Put every new symbol into new partition if possible. */ 338 339 void 340 lto_max_map (void) 341 { 342 symtab_node *node; 343 ltrans_partition partition; 344 int npartitions = 0; 345 346 FOR_EACH_SYMBOL (node) 347 { 348 if (node->get_partitioning_class () != SYMBOL_PARTITION 349 || symbol_partitioned_p (node)) 350 continue; 351 partition = new_partition (node->asm_name ()); 352 add_symbol_to_partition (partition, node); 353 npartitions++; 354 } 355 if (!npartitions) 356 new_partition ("empty"); 357 } 358 359 /* Helper function for qsort; sort nodes by order. noreorder functions must have 360 been removed earlier. */ 361 static int 362 node_cmp (const void *pa, const void *pb) 363 { 364 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa; 365 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb; 366 367 /* Profile reorder flag enables function reordering based on first execution 368 of a function. All functions with profile are placed in ascending 369 order at the beginning. */ 370 371 if (flag_profile_reorder_functions) 372 { 373 /* Functions with time profile are sorted in ascending order. */ 374 if (a->tp_first_run && b->tp_first_run) 375 return a->tp_first_run != b->tp_first_run 376 ? a->tp_first_run - b->tp_first_run 377 : a->order - b->order; 378 379 /* Functions with time profile are sorted before the functions 380 that do not have the profile. */ 381 if (a->tp_first_run || b->tp_first_run) 382 return b->tp_first_run - a->tp_first_run; 383 } 384 385 return b->order - a->order; 386 } 387 388 /* Helper function for qsort; sort nodes by order. */ 389 static int 390 varpool_node_cmp (const void *pa, const void *pb) 391 { 392 const symtab_node *a = *static_cast<const symtab_node * const *> (pa); 393 const symtab_node *b = *static_cast<const symtab_node * const *> (pb); 394 return b->order - a->order; 395 } 396 397 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */ 398 399 static void 400 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition) 401 { 402 unsigned i; 403 symtab_node *node; 404 405 next_nodes.qsort (varpool_node_cmp); 406 FOR_EACH_VEC_ELT (next_nodes, i, node) 407 if (!symbol_partitioned_p (node)) 408 add_symbol_to_partition (partition, node); 409 } 410 411 412 /* Group cgraph nodes into equally-sized partitions. 413 414 The partitioning algorithm is simple: nodes are taken in predefined order. 415 The order corresponds to the order we want functions to have in the final 416 output. In the future this will be given by function reordering pass, but 417 at the moment we use the topological order, which is a good approximation. 418 419 The goal is to partition this linear order into intervals (partitions) so 420 that all the partitions have approximately the same size and the number of 421 callgraph or IPA reference edges crossing boundaries is minimal. 422 423 This is a lot faster (O(n) in size of callgraph) than algorithms doing 424 priority-based graph clustering that are generally O(n^2) and, since 425 WHOPR is designed to make things go well across partitions, it leads 426 to good results. 427 428 We compute the expected size of a partition as: 429 430 max (total_size / lto_partitions, min_partition_size) 431 432 We use dynamic expected size of partition so small programs are partitioned 433 into enough partitions to allow use of multiple CPUs, while large programs 434 are not partitioned too much. Creating too many partitions significantly 435 increases the streaming overhead. 436 437 In the future, we would like to bound the maximal size of partitions so as 438 to prevent the LTRANS stage from consuming too much memory. At the moment, 439 however, the WPA stage is the most memory intensive for large benchmarks, 440 since too many types and declarations are read into memory. 441 442 The function implements a simple greedy algorithm. Nodes are being added 443 to the current partition until after 3/4 of the expected partition size is 444 reached. Past this threshold, we keep track of boundary size (number of 445 edges going to other partitions) and continue adding functions until after 446 the current partition has grown to twice the expected partition size. Then 447 the process is undone to the point where the minimal ratio of boundary size 448 and in-partition calls was reached. */ 449 450 void 451 lto_balanced_map (int n_lto_partitions, int max_partition_size) 452 { 453 int n_nodes = 0; 454 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0; 455 struct cgraph_node **order = XNEWVEC (cgraph_node *, symtab->cgraph_max_uid); 456 auto_vec<cgraph_node *> noreorder; 457 auto_vec<varpool_node *> varpool_order; 458 int i; 459 struct cgraph_node *node; 460 int original_total_size, total_size = 0, best_total_size = 0; 461 int partition_size; 462 ltrans_partition partition; 463 int last_visited_node = 0; 464 varpool_node *vnode; 465 int cost = 0, internal = 0; 466 int best_n_nodes = 0, best_i = 0, best_cost = 467 INT_MAX, best_internal = 0; 468 int npartitions; 469 int current_order = -1; 470 int noreorder_pos = 0; 471 472 FOR_EACH_VARIABLE (vnode) 473 gcc_assert (!vnode->aux); 474 475 FOR_EACH_DEFINED_FUNCTION (node) 476 if (node->get_partitioning_class () == SYMBOL_PARTITION) 477 { 478 if (node->no_reorder) 479 noreorder.safe_push (node); 480 else 481 order[n_nodes++] = node; 482 if (!node->alias) 483 total_size += ipa_fn_summaries->get (node)->size; 484 } 485 486 original_total_size = total_size; 487 488 /* Streaming works best when the source units do not cross partition 489 boundaries much. This is because importing function from a source 490 unit tends to import a lot of global trees defined there. We should 491 get better about minimizing the function bounday, but until that 492 things works smoother if we order in source order. */ 493 qsort (order, n_nodes, sizeof (struct cgraph_node *), node_cmp); 494 noreorder.qsort (node_cmp); 495 496 if (symtab->dump_file) 497 { 498 for(i = 0; i < n_nodes; i++) 499 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n", 500 order[i]->name (), order[i]->tp_first_run); 501 for(i = 0; i < (int)noreorder.length(); i++) 502 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n", 503 noreorder[i]->name (), noreorder[i]->tp_first_run); 504 } 505 506 /* Collect all variables that should not be reordered. */ 507 FOR_EACH_VARIABLE (vnode) 508 if (vnode->get_partitioning_class () == SYMBOL_PARTITION 509 && vnode->no_reorder) 510 varpool_order.safe_push (vnode); 511 n_varpool_nodes = varpool_order.length (); 512 varpool_order.qsort (varpool_node_cmp); 513 514 /* Compute partition size and create the first partition. */ 515 if (PARAM_VALUE (MIN_PARTITION_SIZE) > max_partition_size) 516 fatal_error (input_location, "min partition size cannot be greater than max partition size"); 517 518 partition_size = total_size / n_lto_partitions; 519 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE)) 520 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE); 521 npartitions = 1; 522 partition = new_partition (""); 523 if (symtab->dump_file) 524 fprintf (symtab->dump_file, "Total unit size: %i, partition size: %i\n", 525 total_size, partition_size); 526 527 auto_vec<symtab_node *> next_nodes; 528 529 for (i = 0; i < n_nodes; i++) 530 { 531 if (symbol_partitioned_p (order[i])) 532 continue; 533 534 current_order = order[i]->order; 535 536 /* Output noreorder and varpool in program order first. */ 537 next_nodes.truncate (0); 538 while (varpool_pos < n_varpool_nodes 539 && varpool_order[varpool_pos]->order < current_order) 540 next_nodes.safe_push (varpool_order[varpool_pos++]); 541 while (noreorder_pos < (int)noreorder.length () 542 && noreorder[noreorder_pos]->order < current_order) 543 { 544 if (!noreorder[noreorder_pos]->alias) 545 total_size -= ipa_fn_summaries->get (noreorder[noreorder_pos])->size; 546 next_nodes.safe_push (noreorder[noreorder_pos++]); 547 } 548 add_sorted_nodes (next_nodes, partition); 549 550 if (!symbol_partitioned_p (order[i])) 551 add_symbol_to_partition (partition, order[i]); 552 if (!order[i]->alias) 553 total_size -= ipa_fn_summaries->get (order[i])->size; 554 555 556 /* Once we added a new node to the partition, we also want to add 557 all referenced variables unless they was already added into some 558 earlier partition. 559 add_symbol_to_partition adds possibly multiple nodes and 560 variables that are needed to satisfy needs of ORDER[i]. 561 We remember last visited cgraph and varpool node from last iteration 562 of outer loop that allows us to process every new addition. 563 564 At the same time we compute size of the boundary into COST. Every 565 callgraph or IPA reference edge leaving the partition contributes into 566 COST. Every edge inside partition was earlier computed as one leaving 567 it and thus we need to subtract it from COST. */ 568 while (last_visited_node < lto_symtab_encoder_size (partition->encoder)) 569 { 570 symtab_node *refs_node; 571 int j; 572 struct ipa_ref *ref = NULL; 573 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder, 574 last_visited_node); 575 576 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode)) 577 { 578 struct cgraph_edge *edge; 579 580 refs_node = node; 581 582 last_visited_node++; 583 584 gcc_assert (node->definition || node->weakref); 585 586 /* Compute boundary cost of callgraph edges. */ 587 for (edge = node->callees; edge; edge = edge->next_callee) 588 if (edge->callee->definition) 589 { 590 int edge_cost = edge->frequency (); 591 int index; 592 593 if (!edge_cost) 594 edge_cost = 1; 595 gcc_assert (edge_cost > 0); 596 index = lto_symtab_encoder_lookup (partition->encoder, 597 edge->callee); 598 if (index != LCC_NOT_FOUND 599 && index < last_visited_node - 1) 600 cost -= edge_cost, internal += edge_cost; 601 else 602 cost += edge_cost; 603 } 604 for (edge = node->callers; edge; edge = edge->next_caller) 605 { 606 int edge_cost = edge->frequency (); 607 int index; 608 609 gcc_assert (edge->caller->definition); 610 if (!edge_cost) 611 edge_cost = 1; 612 gcc_assert (edge_cost > 0); 613 index = lto_symtab_encoder_lookup (partition->encoder, 614 edge->caller); 615 if (index != LCC_NOT_FOUND 616 && index < last_visited_node - 1) 617 cost -= edge_cost; 618 else 619 cost += edge_cost; 620 } 621 } 622 else 623 { 624 refs_node = snode; 625 last_visited_node++; 626 } 627 628 /* Compute boundary cost of IPA REF edges and at the same time look into 629 variables referenced from current partition and try to add them. */ 630 for (j = 0; refs_node->iterate_reference (j, ref); j++) 631 if (is_a <varpool_node *> (ref->referred)) 632 { 633 int index; 634 635 vnode = dyn_cast <varpool_node *> (ref->referred); 636 if (!vnode->definition) 637 continue; 638 if (!symbol_partitioned_p (vnode) 639 && !vnode->no_reorder 640 && vnode->get_partitioning_class () == SYMBOL_PARTITION) 641 add_symbol_to_partition (partition, vnode); 642 index = lto_symtab_encoder_lookup (partition->encoder, 643 vnode); 644 if (index != LCC_NOT_FOUND 645 && index < last_visited_node - 1) 646 cost--, internal++; 647 else 648 cost++; 649 } 650 else 651 { 652 int index; 653 654 node = dyn_cast <cgraph_node *> (ref->referred); 655 if (!node->definition) 656 continue; 657 index = lto_symtab_encoder_lookup (partition->encoder, 658 node); 659 if (index != LCC_NOT_FOUND 660 && index < last_visited_node - 1) 661 cost--, internal++; 662 else 663 cost++; 664 } 665 for (j = 0; refs_node->iterate_referring (j, ref); j++) 666 if (is_a <varpool_node *> (ref->referring)) 667 { 668 int index; 669 670 vnode = dyn_cast <varpool_node *> (ref->referring); 671 gcc_assert (vnode->definition); 672 /* It is better to couple variables with their users, 673 because it allows them to be removed. Coupling 674 with objects they refer to only helps to reduce 675 number of symbols promoted to hidden. */ 676 if (!symbol_partitioned_p (vnode) 677 && !vnode->no_reorder 678 && !vnode->can_remove_if_no_refs_p () 679 && vnode->get_partitioning_class () == SYMBOL_PARTITION) 680 add_symbol_to_partition (partition, vnode); 681 index = lto_symtab_encoder_lookup (partition->encoder, 682 vnode); 683 if (index != LCC_NOT_FOUND 684 && index < last_visited_node - 1) 685 cost--; 686 else 687 cost++; 688 } 689 else 690 { 691 int index; 692 693 node = dyn_cast <cgraph_node *> (ref->referring); 694 gcc_assert (node->definition); 695 index = lto_symtab_encoder_lookup (partition->encoder, 696 node); 697 if (index != LCC_NOT_FOUND 698 && index < last_visited_node - 1) 699 cost--; 700 else 701 cost++; 702 } 703 } 704 705 /* If the partition is large enough, start looking for smallest boundary cost. */ 706 if (partition->insns < partition_size * 3 / 4 707 || best_cost == INT_MAX 708 || ((!cost 709 || (best_internal * (HOST_WIDE_INT) cost 710 > (internal * (HOST_WIDE_INT)best_cost))) 711 && partition->insns < partition_size * 5 / 4)) 712 { 713 best_cost = cost; 714 best_internal = internal; 715 best_i = i; 716 best_n_nodes = lto_symtab_encoder_size (partition->encoder); 717 best_total_size = total_size; 718 best_varpool_pos = varpool_pos; 719 } 720 if (symtab->dump_file) 721 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, cost %i/%i " 722 "best %i/%i, step %i\n", i, 723 order[i]->name (), order[i]->order, 724 partition->insns, cost, internal, 725 best_cost, best_internal, best_i); 726 /* Partition is too large, unwind into step when best cost was reached and 727 start new partition. */ 728 if (partition->insns > 2 * partition_size 729 || partition->insns > max_partition_size) 730 { 731 if (best_i != i) 732 { 733 if (symtab->dump_file) 734 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n", 735 i - best_i, best_i); 736 undo_partition (partition, best_n_nodes); 737 varpool_pos = best_varpool_pos; 738 } 739 i = best_i; 740 /* When we are finished, avoid creating empty partition. */ 741 while (i < n_nodes - 1 && symbol_partitioned_p (order[i + 1])) 742 i++; 743 if (i == n_nodes - 1) 744 break; 745 partition = new_partition (""); 746 last_visited_node = 0; 747 total_size = best_total_size; 748 cost = 0; 749 750 if (symtab->dump_file) 751 fprintf (symtab->dump_file, "New partition\n"); 752 best_n_nodes = 0; 753 best_cost = INT_MAX; 754 755 /* Since the size of partitions is just approximate, update the size after 756 we finished current one. */ 757 if (npartitions < n_lto_partitions) 758 partition_size = total_size / (n_lto_partitions - npartitions); 759 else 760 /* Watch for overflow. */ 761 partition_size = INT_MAX / 16; 762 763 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE)) 764 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE); 765 npartitions ++; 766 } 767 } 768 769 next_nodes.truncate (0); 770 771 /* Varables that are not reachable from the code go into last partition. */ 772 FOR_EACH_VARIABLE (vnode) 773 if (vnode->get_partitioning_class () == SYMBOL_PARTITION 774 && !symbol_partitioned_p (vnode)) 775 next_nodes.safe_push (vnode); 776 777 /* Output remaining ordered symbols. */ 778 while (varpool_pos < n_varpool_nodes) 779 next_nodes.safe_push (varpool_order[varpool_pos++]); 780 while (noreorder_pos < (int)noreorder.length ()) 781 next_nodes.safe_push (noreorder[noreorder_pos++]); 782 add_sorted_nodes (next_nodes, partition); 783 784 free (order); 785 786 if (symtab->dump_file) 787 { 788 fprintf (symtab->dump_file, "\nPartition sizes:\n"); 789 unsigned partitions = ltrans_partitions.length (); 790 791 for (unsigned i = 0; i < partitions ; i++) 792 { 793 ltrans_partition p = ltrans_partitions[i]; 794 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)" 795 " symbols and %d (%2.2f%%) insns\n", i, p->symbols, 796 100.0 * p->symbols / n_nodes, p->insns, 797 100.0 * p->insns / original_total_size); 798 } 799 800 fprintf (symtab->dump_file, "\n"); 801 } 802 } 803 804 /* Return true if we must not change the name of the NODE. The name as 805 extracted from the corresponding decl should be passed in NAME. */ 806 807 static bool 808 must_not_rename (symtab_node *node, const char *name) 809 { 810 /* Our renaming machinery do not handle more than one change of assembler name. 811 We should not need more than one anyway. */ 812 if (node->lto_file_data 813 && lto_get_decl_name_mapping (node->lto_file_data, name) != name) 814 { 815 if (symtab->dump_file) 816 fprintf (symtab->dump_file, 817 "Not privatizing symbol name: %s. It privatized already.\n", 818 name); 819 return true; 820 } 821 /* Avoid mangling of already mangled clones. 822 ??? should have a flag whether a symbol has a 'private' name already, 823 since we produce some symbols like that i.e. for global constructors 824 that are not really clones. */ 825 if (node->unique_name) 826 { 827 if (symtab->dump_file) 828 fprintf (symtab->dump_file, 829 "Not privatizing symbol name: %s. Has unique name.\n", 830 name); 831 return true; 832 } 833 return false; 834 } 835 836 /* If we are an offload compiler, we may have to rewrite symbols to be 837 valid on this target. Return either PTR or a modified version of it. */ 838 839 static const char * 840 maybe_rewrite_identifier (const char *ptr) 841 { 842 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL) 843 #ifndef NO_DOT_IN_LABEL 844 char valid = '.'; 845 const char reject[] = "$"; 846 #elif !defined NO_DOLLAR_IN_LABEL 847 char valid = '$'; 848 const char reject[] = "."; 849 #else 850 char valid = '_'; 851 const char reject[] = ".$"; 852 #endif 853 854 char *copy = NULL; 855 const char *match = ptr; 856 for (;;) 857 { 858 size_t off = strcspn (match, reject); 859 if (match[off] == '\0') 860 break; 861 if (copy == NULL) 862 { 863 copy = xstrdup (ptr); 864 match = copy; 865 } 866 copy[off] = valid; 867 } 868 return match; 869 #else 870 return ptr; 871 #endif 872 } 873 874 /* Ensure that the symbol in NODE is valid for the target, and if not, 875 rewrite it. */ 876 877 static void 878 validize_symbol_for_target (symtab_node *node) 879 { 880 tree decl = node->decl; 881 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); 882 883 if (must_not_rename (node, name)) 884 return; 885 886 const char *name2 = maybe_rewrite_identifier (name); 887 if (name2 != name) 888 { 889 symtab->change_decl_assembler_name (decl, get_identifier (name2)); 890 if (node->lto_file_data) 891 lto_record_renamed_decl (node->lto_file_data, name, 892 IDENTIFIER_POINTER 893 (DECL_ASSEMBLER_NAME (decl))); 894 } 895 } 896 897 /* Helper for privatize_symbol_name. Mangle NODE symbol name 898 represented by DECL. */ 899 900 static bool 901 privatize_symbol_name_1 (symtab_node *node, tree decl) 902 { 903 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)); 904 905 if (must_not_rename (node, name)) 906 return false; 907 908 name = maybe_rewrite_identifier (name); 909 symtab->change_decl_assembler_name (decl, 910 clone_function_name_1 (name, 911 "lto_priv")); 912 913 if (node->lto_file_data) 914 lto_record_renamed_decl (node->lto_file_data, name, 915 IDENTIFIER_POINTER 916 (DECL_ASSEMBLER_NAME (decl))); 917 918 if (symtab->dump_file) 919 fprintf (symtab->dump_file, 920 "Privatizing symbol name: %s -> %s\n", 921 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl))); 922 923 return true; 924 } 925 926 /* Mangle NODE symbol name into a local name. 927 This is necessary to do 928 1) if two or more static vars of same assembler name 929 are merged into single ltrans unit. 930 2) if previously static var was promoted hidden to avoid possible conflict 931 with symbols defined out of the LTO world. */ 932 933 static bool 934 privatize_symbol_name (symtab_node *node) 935 { 936 if (!privatize_symbol_name_1 (node, node->decl)) 937 return false; 938 939 /* We could change name which is a target of transparent alias 940 chain of instrumented function name. Fix alias chain if so .*/ 941 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node)) 942 { 943 tree iname = NULL_TREE; 944 if (cnode->instrumentation_clone) 945 { 946 /* If we want to privatize instrumentation clone 947 then we also need to privatize original function. */ 948 if (cnode->instrumented_version) 949 privatize_symbol_name (cnode->instrumented_version); 950 else 951 privatize_symbol_name_1 (cnode, cnode->orig_decl); 952 iname = DECL_ASSEMBLER_NAME (cnode->decl); 953 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl); 954 } 955 else if (cnode->instrumented_version 956 && cnode->instrumented_version->orig_decl == cnode->decl) 957 { 958 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl); 959 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl); 960 } 961 } 962 963 return true; 964 } 965 966 /* Promote variable VNODE to be static. */ 967 968 static void 969 promote_symbol (symtab_node *node) 970 { 971 /* We already promoted ... */ 972 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN 973 && DECL_VISIBILITY_SPECIFIED (node->decl) 974 && TREE_PUBLIC (node->decl)) 975 { 976 validize_symbol_for_target (node); 977 return; 978 } 979 980 gcc_checking_assert (!TREE_PUBLIC (node->decl) 981 && !DECL_EXTERNAL (node->decl)); 982 /* Be sure that newly public symbol does not conflict with anything already 983 defined by the non-LTO part. */ 984 privatize_symbol_name (node); 985 TREE_PUBLIC (node->decl) = 1; 986 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN; 987 DECL_VISIBILITY_SPECIFIED (node->decl) = true; 988 if (symtab->dump_file) 989 fprintf (symtab->dump_file, 990 "Promoting as hidden: %s (%s)\n", node->name (), 991 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl))); 992 993 /* Promoting a symbol also promotes all transparent aliases with exception 994 of weakref where the visibility flags are always wrong and set to 995 !PUBLIC. */ 996 ipa_ref *ref; 997 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++) 998 { 999 struct symtab_node *alias = ref->referring; 1000 if (alias->transparent_alias && !alias->weakref) 1001 { 1002 TREE_PUBLIC (alias->decl) = 1; 1003 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN; 1004 DECL_VISIBILITY_SPECIFIED (alias->decl) = true; 1005 if (symtab->dump_file) 1006 fprintf (symtab->dump_file, 1007 "Promoting alias as hidden: %s\n", 1008 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl))); 1009 } 1010 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl)); 1011 } 1012 } 1013 1014 /* Return true if NODE needs named section even if it won't land in 1015 the partition symbol table. 1016 1017 FIXME: we should really not use named sections for inline clones 1018 and master clones. */ 1019 1020 static bool 1021 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node) 1022 { 1023 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node); 1024 if (!cnode) 1025 return false; 1026 if (node->real_symbol_p ()) 1027 return false; 1028 return (!encoder 1029 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND 1030 && lto_symtab_encoder_encode_body_p (encoder, 1031 cnode))); 1032 } 1033 1034 /* If NODE represents a static variable. See if there are other variables 1035 of the same name in partition ENCODER (or in whole compilation unit if 1036 ENCODER is NULL) and if so, mangle the statics. Always mangle all 1037 conflicting statics, so we reduce changes of silently miscompiling 1038 asm statements referring to them by symbol name. */ 1039 1040 static void 1041 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node) 1042 { 1043 tree decl = node->decl; 1044 symtab_node *s; 1045 tree name = DECL_ASSEMBLER_NAME (decl); 1046 1047 /* See if this is static symbol. */ 1048 if (((node->externally_visible && !node->weakref) 1049 /* FIXME: externally_visible is somewhat illogically not set for 1050 external symbols (i.e. those not defined). Remove this test 1051 once this is fixed. */ 1052 || DECL_EXTERNAL (node->decl) 1053 || !node->real_symbol_p ()) 1054 && !may_need_named_section_p (encoder, node)) 1055 return; 1056 1057 /* Now walk symbols sharing the same name and see if there are any conflicts. 1058 (all types of symbols counts here, since we can not have static of the 1059 same name as external or public symbol.) */ 1060 for (s = symtab_node::get_for_asmname (name); 1061 s; s = s->next_sharing_asm_name) 1062 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s)) 1063 && s->decl != node->decl 1064 && (!encoder 1065 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND)) 1066 break; 1067 1068 /* OK, no confict, so we have nothing to do. */ 1069 if (!s) 1070 return; 1071 1072 if (symtab->dump_file) 1073 fprintf (symtab->dump_file, 1074 "Renaming statics with asm name: %s\n", node->name ()); 1075 1076 /* Assign every symbol in the set that shares the same ASM name an unique 1077 mangled name. */ 1078 for (s = symtab_node::get_for_asmname (name); s;) 1079 if ((!s->externally_visible || s->weakref) 1080 /* Transparent aliases having same name as target are renamed at a 1081 time their target gets new name. Transparent aliases that use 1082 separate assembler name require the name to be unique. */ 1083 && (!s->transparent_alias || !s->definition || s->weakref 1084 || !symbol_table::assembler_names_equal_p 1085 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)), 1086 IDENTIFIER_POINTER 1087 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl)))) 1088 && ((s->real_symbol_p () 1089 && !DECL_EXTERNAL (s->decl) 1090 && !TREE_PUBLIC (s->decl)) 1091 || may_need_named_section_p (encoder, s)) 1092 && (!encoder 1093 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND)) 1094 { 1095 if (privatize_symbol_name (s)) 1096 /* Re-start from beginning since we do not know how many 1097 symbols changed a name. */ 1098 s = symtab_node::get_for_asmname (name); 1099 else s = s->next_sharing_asm_name; 1100 } 1101 else s = s->next_sharing_asm_name; 1102 } 1103 1104 /* Find out all static decls that need to be promoted to global because 1105 of cross file sharing. This function must be run in the WPA mode after 1106 all inlinees are added. */ 1107 1108 void 1109 lto_promote_cross_file_statics (void) 1110 { 1111 unsigned i, n_sets; 1112 1113 gcc_assert (flag_wpa); 1114 1115 lto_stream_offload_p = false; 1116 select_what_to_stream (); 1117 1118 /* First compute boundaries. */ 1119 n_sets = ltrans_partitions.length (); 1120 for (i = 0; i < n_sets; i++) 1121 { 1122 ltrans_partition part 1123 = ltrans_partitions[i]; 1124 part->encoder = compute_ltrans_boundary (part->encoder); 1125 } 1126 1127 /* Look at boundaries and promote symbols as needed. */ 1128 for (i = 0; i < n_sets; i++) 1129 { 1130 lto_symtab_encoder_iterator lsei; 1131 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder; 1132 1133 for (lsei = lsei_start (encoder); !lsei_end_p (lsei); 1134 lsei_next (&lsei)) 1135 { 1136 symtab_node *node = lsei_node (lsei); 1137 1138 /* If symbol is static, rename it if its assembler name 1139 clashes with anything else in this unit. */ 1140 rename_statics (encoder, node); 1141 1142 /* No need to promote if symbol already is externally visible ... */ 1143 if (node->externally_visible 1144 /* ... or if it is part of current partition ... */ 1145 || lto_symtab_encoder_in_partition_p (encoder, node) 1146 /* ... or if we do not partition it. This mean that it will 1147 appear in every partition referencing it. */ 1148 || node->get_partitioning_class () != SYMBOL_PARTITION) 1149 { 1150 validize_symbol_for_target (node); 1151 continue; 1152 } 1153 1154 promote_symbol (node); 1155 } 1156 } 1157 } 1158 1159 /* Rename statics in the whole unit in the case that 1160 we do -flto-partition=none. */ 1161 1162 void 1163 lto_promote_statics_nonwpa (void) 1164 { 1165 symtab_node *node; 1166 FOR_EACH_SYMBOL (node) 1167 { 1168 rename_statics (NULL, node); 1169 validize_symbol_for_target (node); 1170 } 1171 } 1172