1 /* LTO partitioning logic routines.
2 Copyright (C) 2009-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "basic-block.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "alloc-pool.h"
29 #include "stringpool.h"
30 #include "cgraph.h"
31 #include "lto-streamer.h"
32 #include "params.h"
33 #include "symbol-summary.h"
34 #include "tree-vrp.h"
35 #include "ipa-prop.h"
36 #include "ipa-fnsummary.h"
37 #include "lto-partition.h"
38 #include "sreal.h"
39
40 vec<ltrans_partition> ltrans_partitions;
41
42 static void add_symbol_to_partition (ltrans_partition part, symtab_node *node);
43
44
45 /* Helper for qsort; compare partitions and return one with smaller order. */
46
47 static int
cmp_partitions_order(const void * a,const void * b)48 cmp_partitions_order (const void *a, const void *b)
49 {
50 const struct ltrans_partition_def *pa
51 = *(struct ltrans_partition_def *const *)a;
52 const struct ltrans_partition_def *pb
53 = *(struct ltrans_partition_def *const *)b;
54 int ordera = -1, orderb = -1;
55
56 if (lto_symtab_encoder_size (pa->encoder))
57 ordera = lto_symtab_encoder_deref (pa->encoder, 0)->order;
58 if (lto_symtab_encoder_size (pb->encoder))
59 orderb = lto_symtab_encoder_deref (pb->encoder, 0)->order;
60 return orderb - ordera;
61 }
62
63 /* Create new partition with name NAME. */
64
65 static ltrans_partition
new_partition(const char * name)66 new_partition (const char *name)
67 {
68 ltrans_partition part = XCNEW (struct ltrans_partition_def);
69 part->encoder = lto_symtab_encoder_new (false);
70 part->name = name;
71 part->insns = 0;
72 part->symbols = 0;
73 ltrans_partitions.safe_push (part);
74 return part;
75 }
76
77 /* Free memory used by ltrans datastructures. */
78
79 void
free_ltrans_partitions(void)80 free_ltrans_partitions (void)
81 {
82 unsigned int idx;
83 ltrans_partition part;
84 for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
85 {
86 if (part->initializers_visited)
87 delete part->initializers_visited;
88 /* Symtab encoder is freed after streaming. */
89 free (part);
90 }
91 ltrans_partitions.release ();
92 }
93
94 /* Return true if symbol is already in some partition. */
95
96 static inline bool
symbol_partitioned_p(symtab_node * node)97 symbol_partitioned_p (symtab_node *node)
98 {
99 return node->aux;
100 }
101
102 /* Add references into the partition. */
103 static void
add_references_to_partition(ltrans_partition part,symtab_node * node)104 add_references_to_partition (ltrans_partition part, symtab_node *node)
105 {
106 int i;
107 struct ipa_ref *ref = NULL;
108
109 /* Add all duplicated references to the partition. */
110 for (i = 0; node->iterate_reference (i, ref); i++)
111 if (ref->referred->get_partitioning_class () == SYMBOL_DUPLICATE)
112 add_symbol_to_partition (part, ref->referred);
113 /* References to a readonly variable may be constant foled into its value.
114 Recursively look into the initializers of the constant variable and add
115 references, too. */
116 else if (is_a <varpool_node *> (ref->referred)
117 && (dyn_cast <varpool_node *> (ref->referred)
118 ->ctor_useable_for_folding_p ()
119 || POINTER_BOUNDS_P (ref->referred->decl))
120 && !lto_symtab_encoder_in_partition_p (part->encoder, ref->referred))
121 {
122 if (!part->initializers_visited)
123 part->initializers_visited = new hash_set<symtab_node *>;
124 if (!part->initializers_visited->add (ref->referred))
125 add_references_to_partition (part, ref->referred);
126 }
127 }
128
129 /* Helper function for add_symbol_to_partition doing the actual dirty work
130 of adding NODE to PART. */
131
132 static bool
add_symbol_to_partition_1(ltrans_partition part,symtab_node * node)133 add_symbol_to_partition_1 (ltrans_partition part, symtab_node *node)
134 {
135 enum symbol_partitioning_class c = node->get_partitioning_class ();
136 struct ipa_ref *ref;
137 symtab_node *node1;
138
139 /* If NODE is already there, we have nothing to do. */
140 if (lto_symtab_encoder_in_partition_p (part->encoder, node))
141 return true;
142
143 /* non-duplicated aliases or tunks of a duplicated symbol needs to be output
144 just once.
145
146 Be lax about comdats; they may or may not be duplicated and we may
147 end up in need to duplicate keyed comdat because it has unkeyed alias. */
148 if (c == SYMBOL_PARTITION && !DECL_COMDAT (node->decl)
149 && symbol_partitioned_p (node))
150 return false;
151
152 /* Be sure that we never try to duplicate partitioned symbol
153 or add external symbol. */
154 gcc_assert (c != SYMBOL_EXTERNAL
155 && (c == SYMBOL_DUPLICATE || !symbol_partitioned_p (node)));
156
157 part->symbols++;
158
159 lto_set_symtab_encoder_in_partition (part->encoder, node);
160
161 if (symbol_partitioned_p (node))
162 {
163 node->in_other_partition = 1;
164 if (symtab->dump_file)
165 fprintf (symtab->dump_file,
166 "Symbol node %s now used in multiple partitions\n",
167 node->name ());
168 }
169 node->aux = (void *)((size_t)node->aux + 1);
170
171 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
172 {
173 struct cgraph_edge *e;
174 if (!node->alias && c == SYMBOL_PARTITION)
175 part->insns += ipa_fn_summaries->get (cnode)->size;
176
177 /* Add all inline clones and callees that are duplicated. */
178 for (e = cnode->callees; e; e = e->next_callee)
179 if (!e->inline_failed)
180 add_symbol_to_partition_1 (part, e->callee);
181 else if (e->callee->get_partitioning_class () == SYMBOL_DUPLICATE)
182 add_symbol_to_partition (part, e->callee);
183
184 /* Add all thunks associated with the function. */
185 for (e = cnode->callers; e; e = e->next_caller)
186 if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to)
187 add_symbol_to_partition_1 (part, e->caller);
188
189 /* Instrumented version is actually the same function.
190 Therefore put it into the same partition. */
191 if (cnode->instrumented_version)
192 add_symbol_to_partition_1 (part, cnode->instrumented_version);
193 }
194
195 add_references_to_partition (part, node);
196
197 /* Add all aliases associated with the symbol. */
198
199 FOR_EACH_ALIAS (node, ref)
200 if (!ref->referring->transparent_alias)
201 add_symbol_to_partition_1 (part, ref->referring);
202 else
203 {
204 struct ipa_ref *ref2;
205 /* We do not need to add transparent aliases if they are not used.
206 However we must add aliases of transparent aliases if they exist. */
207 FOR_EACH_ALIAS (ref->referring, ref2)
208 {
209 /* Nested transparent aliases are not permitted. */
210 gcc_checking_assert (!ref2->referring->transparent_alias);
211 add_symbol_to_partition_1 (part, ref2->referring);
212 }
213 }
214
215 /* Ensure that SAME_COMDAT_GROUP lists all allways added in a group. */
216 if (node->same_comdat_group)
217 for (node1 = node->same_comdat_group;
218 node1 != node; node1 = node1->same_comdat_group)
219 if (!node->alias)
220 {
221 bool added = add_symbol_to_partition_1 (part, node1);
222 gcc_assert (added);
223 }
224 return true;
225 }
226
227 /* If symbol NODE is really part of other symbol's definition (i.e. it is
228 internal label, thunk, alias or so), return the outer symbol.
229 When add_symbol_to_partition_1 is called on the outer symbol it must
230 eventually add NODE, too. */
231 static symtab_node *
contained_in_symbol(symtab_node * node)232 contained_in_symbol (symtab_node *node)
233 {
234 /* There is no need to consider transparent aliases to be part of the
235 definition: they are only useful insite the partition they are output
236 and thus we will always see an explicit reference to it. */
237 if (node->transparent_alias)
238 return node;
239 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
240 {
241 cnode = cnode->function_symbol ();
242 if (cnode->global.inlined_to)
243 cnode = cnode->global.inlined_to;
244 return cnode;
245 }
246 else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
247 return vnode->ultimate_alias_target ();
248 return node;
249 }
250
251 /* Add symbol NODE to partition. When definition of NODE is part
252 of other symbol definition, add the other symbol, too. */
253
254 static void
add_symbol_to_partition(ltrans_partition part,symtab_node * node)255 add_symbol_to_partition (ltrans_partition part, symtab_node *node)
256 {
257 symtab_node *node1;
258
259 /* Verify that we do not try to duplicate something that can not be. */
260 gcc_checking_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
261 || !symbol_partitioned_p (node));
262
263 while ((node1 = contained_in_symbol (node)) != node)
264 node = node1;
265
266 /* If we have duplicated symbol contained in something we can not duplicate,
267 we are very badly screwed. The other way is possible, so we do not
268 assert this in add_symbol_to_partition_1.
269
270 Be lax about comdats; they may or may not be duplicated and we may
271 end up in need to duplicate keyed comdat because it has unkeyed alias. */
272
273 gcc_assert (node->get_partitioning_class () == SYMBOL_DUPLICATE
274 || DECL_COMDAT (node->decl)
275 || !symbol_partitioned_p (node));
276
277 add_symbol_to_partition_1 (part, node);
278 }
279
280 /* Undo all additions until number of cgraph nodes in PARITION is N_CGRAPH_NODES
281 and number of varpool nodes is N_VARPOOL_NODES. */
282
283 static void
undo_partition(ltrans_partition partition,unsigned int n_nodes)284 undo_partition (ltrans_partition partition, unsigned int n_nodes)
285 {
286 while (lto_symtab_encoder_size (partition->encoder) > (int)n_nodes)
287 {
288 symtab_node *node = lto_symtab_encoder_deref (partition->encoder,
289 n_nodes);
290 partition->symbols--;
291 cgraph_node *cnode;
292
293 /* After UNDO we no longer know what was visited. */
294 if (partition->initializers_visited)
295 delete partition->initializers_visited;
296 partition->initializers_visited = NULL;
297
298 if (!node->alias && (cnode = dyn_cast <cgraph_node *> (node))
299 && node->get_partitioning_class () == SYMBOL_PARTITION)
300 partition->insns -= ipa_fn_summaries->get (cnode)->size;
301 lto_symtab_encoder_delete_node (partition->encoder, node);
302 node->aux = (void *)((size_t)node->aux - 1);
303 }
304 }
305
306 /* Group cgrah nodes by input files. This is used mainly for testing
307 right now. */
308
309 void
lto_1_to_1_map(void)310 lto_1_to_1_map (void)
311 {
312 symtab_node *node;
313 struct lto_file_decl_data *file_data;
314 hash_map<lto_file_decl_data *, ltrans_partition> pmap;
315 ltrans_partition partition;
316 int npartitions = 0;
317
318 FOR_EACH_SYMBOL (node)
319 {
320 if (node->get_partitioning_class () != SYMBOL_PARTITION
321 || symbol_partitioned_p (node))
322 continue;
323
324 file_data = node->lto_file_data;
325
326 if (file_data)
327 {
328 ltrans_partition *slot = &pmap.get_or_insert (file_data);
329 if (*slot)
330 partition = *slot;
331 else
332 {
333 partition = new_partition (file_data->file_name);
334 *slot = partition;
335 npartitions++;
336 }
337 }
338 else if (!file_data && ltrans_partitions.length ())
339 partition = ltrans_partitions[0];
340 else
341 {
342 partition = new_partition ("");
343 pmap.put (NULL, partition);
344 npartitions++;
345 }
346
347 add_symbol_to_partition (partition, node);
348 }
349
350 /* If the cgraph is empty, create one cgraph node set so that there is still
351 an output file for any variables that need to be exported in a DSO. */
352 if (!npartitions)
353 new_partition ("empty");
354
355 /* Order partitions by order of symbols because they are linked into binary
356 that way. */
357 ltrans_partitions.qsort (cmp_partitions_order);
358 }
359
360 /* Maximal partitioning. Put every new symbol into new partition if possible. */
361
362 void
lto_max_map(void)363 lto_max_map (void)
364 {
365 symtab_node *node;
366 ltrans_partition partition;
367 int npartitions = 0;
368
369 FOR_EACH_SYMBOL (node)
370 {
371 if (node->get_partitioning_class () != SYMBOL_PARTITION
372 || symbol_partitioned_p (node))
373 continue;
374 partition = new_partition (node->asm_name ());
375 add_symbol_to_partition (partition, node);
376 npartitions++;
377 }
378 if (!npartitions)
379 new_partition ("empty");
380 }
381
382 /* Helper function for qsort; sort nodes by order. noreorder functions must have
383 been removed earlier. */
384 static int
node_cmp(const void * pa,const void * pb)385 node_cmp (const void *pa, const void *pb)
386 {
387 const struct cgraph_node *a = *(const struct cgraph_node * const *) pa;
388 const struct cgraph_node *b = *(const struct cgraph_node * const *) pb;
389
390 /* Profile reorder flag enables function reordering based on first execution
391 of a function. All functions with profile are placed in ascending
392 order at the beginning. */
393
394 if (flag_profile_reorder_functions)
395 {
396 /* Functions with time profile are sorted in ascending order. */
397 if (a->tp_first_run && b->tp_first_run)
398 return a->tp_first_run != b->tp_first_run
399 ? a->tp_first_run - b->tp_first_run
400 : a->order - b->order;
401
402 /* Functions with time profile are sorted before the functions
403 that do not have the profile. */
404 if (a->tp_first_run || b->tp_first_run)
405 return b->tp_first_run - a->tp_first_run;
406 }
407
408 return b->order - a->order;
409 }
410
411 /* Helper function for qsort; sort nodes by order. */
412 static int
varpool_node_cmp(const void * pa,const void * pb)413 varpool_node_cmp (const void *pa, const void *pb)
414 {
415 const symtab_node *a = *static_cast<const symtab_node * const *> (pa);
416 const symtab_node *b = *static_cast<const symtab_node * const *> (pb);
417 return b->order - a->order;
418 }
419
420 /* Add all symtab nodes from NEXT_NODE to PARTITION in order. */
421
422 static void
add_sorted_nodes(vec<symtab_node * > & next_nodes,ltrans_partition partition)423 add_sorted_nodes (vec<symtab_node *> &next_nodes, ltrans_partition partition)
424 {
425 unsigned i;
426 symtab_node *node;
427
428 next_nodes.qsort (varpool_node_cmp);
429 FOR_EACH_VEC_ELT (next_nodes, i, node)
430 if (!symbol_partitioned_p (node))
431 add_symbol_to_partition (partition, node);
432 }
433
434 /* Return true if we should account reference from N1 to N2 in cost
435 of partition boundary. */
436
437 bool
account_reference_p(symtab_node * n1,symtab_node * n2)438 account_reference_p (symtab_node *n1, symtab_node *n2)
439 {
440 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (n1))
441 n1 = cnode;
442 /* Do not account references from aliases - they are never split across
443 partitions. */
444 if (n1->alias)
445 return false;
446 /* Do not account recursion - the code below will handle it incorrectly
447 otherwise. Do not account references to external symbols: they will
448 never become local. Finally do not account references to duplicated
449 symbols: they will be always local. */
450 if (n1 == n2
451 || !n2->definition
452 || n2->get_partitioning_class () != SYMBOL_PARTITION)
453 return false;
454 /* If referring node is external symbol do not account it to boundary
455 cost. Those are added into units only to enable possible constant
456 folding and devirtulization.
457
458 Here we do not know if it will ever be added to some partition
459 (this is decided by compute_ltrans_boundary) and second it is not
460 that likely that constant folding will actually use the reference. */
461 if (contained_in_symbol (n1)
462 ->get_partitioning_class () == SYMBOL_EXTERNAL)
463 return false;
464 return true;
465 }
466
467
468 /* Group cgraph nodes into equally-sized partitions.
469
470 The partitioning algorithm is simple: nodes are taken in predefined order.
471 The order corresponds to the order we want functions to have in the final
472 output. In the future this will be given by function reordering pass, but
473 at the moment we use the topological order, which is a good approximation.
474
475 The goal is to partition this linear order into intervals (partitions) so
476 that all the partitions have approximately the same size and the number of
477 callgraph or IPA reference edges crossing boundaries is minimal.
478
479 This is a lot faster (O(n) in size of callgraph) than algorithms doing
480 priority-based graph clustering that are generally O(n^2) and, since
481 WHOPR is designed to make things go well across partitions, it leads
482 to good results.
483
484 We compute the expected size of a partition as:
485
486 max (total_size / lto_partitions, min_partition_size)
487
488 We use dynamic expected size of partition so small programs are partitioned
489 into enough partitions to allow use of multiple CPUs, while large programs
490 are not partitioned too much. Creating too many partitions significantly
491 increases the streaming overhead.
492
493 In the future, we would like to bound the maximal size of partitions so as
494 to prevent the LTRANS stage from consuming too much memory. At the moment,
495 however, the WPA stage is the most memory intensive for large benchmarks,
496 since too many types and declarations are read into memory.
497
498 The function implements a simple greedy algorithm. Nodes are being added
499 to the current partition until after 3/4 of the expected partition size is
500 reached. Past this threshold, we keep track of boundary size (number of
501 edges going to other partitions) and continue adding functions until after
502 the current partition has grown to twice the expected partition size. Then
503 the process is undone to the point where the minimal ratio of boundary size
504 and in-partition calls was reached. */
505
506 void
lto_balanced_map(int n_lto_partitions,int max_partition_size)507 lto_balanced_map (int n_lto_partitions, int max_partition_size)
508 {
509 int n_varpool_nodes = 0, varpool_pos = 0, best_varpool_pos = 0;
510 auto_vec <cgraph_node *> order (symtab->cgraph_count);
511 auto_vec<cgraph_node *> noreorder;
512 auto_vec<varpool_node *> varpool_order;
513 struct cgraph_node *node;
514 int64_t original_total_size, total_size = 0;
515 int64_t partition_size;
516 ltrans_partition partition;
517 int last_visited_node = 0;
518 varpool_node *vnode;
519 int64_t cost = 0, internal = 0;
520 unsigned int best_n_nodes = 0, best_i = 0;
521 int64_t best_cost = -1, best_internal = 0, best_size = 0;
522 int npartitions;
523 int current_order = -1;
524 int noreorder_pos = 0;
525
526 FOR_EACH_VARIABLE (vnode)
527 gcc_assert (!vnode->aux);
528
529 FOR_EACH_DEFINED_FUNCTION (node)
530 if (node->get_partitioning_class () == SYMBOL_PARTITION)
531 {
532 if (node->no_reorder)
533 noreorder.safe_push (node);
534 else
535 order.safe_push (node);
536 if (!node->alias)
537 total_size += ipa_fn_summaries->get (node)->size;
538 }
539
540 original_total_size = total_size;
541
542 /* Streaming works best when the source units do not cross partition
543 boundaries much. This is because importing function from a source
544 unit tends to import a lot of global trees defined there. We should
545 get better about minimizing the function bounday, but until that
546 things works smoother if we order in source order. */
547 order.qsort (node_cmp);
548 noreorder.qsort (node_cmp);
549
550 if (symtab->dump_file)
551 {
552 for (unsigned i = 0; i < order.length (); i++)
553 fprintf (symtab->dump_file, "Balanced map symbol order:%s:%u\n",
554 order[i]->name (), order[i]->tp_first_run);
555 for (unsigned i = 0; i < noreorder.length (); i++)
556 fprintf (symtab->dump_file, "Balanced map symbol no_reorder:%s:%u\n",
557 noreorder[i]->name (), noreorder[i]->tp_first_run);
558 }
559
560 /* Collect all variables that should not be reordered. */
561 FOR_EACH_VARIABLE (vnode)
562 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
563 && vnode->no_reorder)
564 varpool_order.safe_push (vnode);
565 n_varpool_nodes = varpool_order.length ();
566 varpool_order.qsort (varpool_node_cmp);
567
568 /* Compute partition size and create the first partition. */
569 if (PARAM_VALUE (MIN_PARTITION_SIZE) > max_partition_size)
570 fatal_error (input_location, "min partition size cannot be greater "
571 "than max partition size");
572
573 partition_size = total_size / n_lto_partitions;
574 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
575 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
576 npartitions = 1;
577 partition = new_partition ("");
578 if (symtab->dump_file)
579 fprintf (symtab->dump_file, "Total unit size: %" PRId64
580 ", partition size: %" PRId64 "\n",
581 total_size, partition_size);
582
583 auto_vec<symtab_node *> next_nodes;
584
585 for (unsigned i = 0; i < order.length (); i++)
586 {
587 if (symbol_partitioned_p (order[i]))
588 continue;
589
590 current_order = order[i]->order;
591
592 /* Output noreorder and varpool in program order first. */
593 next_nodes.truncate (0);
594 while (varpool_pos < n_varpool_nodes
595 && varpool_order[varpool_pos]->order < current_order)
596 next_nodes.safe_push (varpool_order[varpool_pos++]);
597 while (noreorder_pos < (int)noreorder.length ()
598 && noreorder[noreorder_pos]->order < current_order)
599 next_nodes.safe_push (noreorder[noreorder_pos++]);
600 add_sorted_nodes (next_nodes, partition);
601
602 if (!symbol_partitioned_p (order[i]))
603 add_symbol_to_partition (partition, order[i]);
604
605
606 /* Once we added a new node to the partition, we also want to add
607 all referenced variables unless they was already added into some
608 earlier partition.
609 add_symbol_to_partition adds possibly multiple nodes and
610 variables that are needed to satisfy needs of ORDER[i].
611 We remember last visited cgraph and varpool node from last iteration
612 of outer loop that allows us to process every new addition.
613
614 At the same time we compute size of the boundary into COST. Every
615 callgraph or IPA reference edge leaving the partition contributes into
616 COST. Every edge inside partition was earlier computed as one leaving
617 it and thus we need to subtract it from COST. */
618 while (last_visited_node < lto_symtab_encoder_size (partition->encoder))
619 {
620 int j;
621 struct ipa_ref *ref = NULL;
622 symtab_node *snode = lto_symtab_encoder_deref (partition->encoder,
623 last_visited_node);
624
625 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
626 {
627 struct cgraph_edge *edge;
628
629
630 last_visited_node++;
631
632 gcc_assert (node->definition || node->weakref);
633
634 /* Compute boundary cost of callgraph edges. */
635 for (edge = node->callees; edge; edge = edge->next_callee)
636 /* Inline edges will always end up local. */
637 if (edge->inline_failed
638 && account_reference_p (node, edge->callee))
639 {
640 int edge_cost = edge->frequency ();
641 int index;
642
643 if (!edge_cost)
644 edge_cost = 1;
645 gcc_assert (edge_cost > 0);
646 index = lto_symtab_encoder_lookup (partition->encoder,
647 edge->callee);
648 if (index != LCC_NOT_FOUND
649 && index < last_visited_node - 1)
650 cost -= edge_cost, internal += edge_cost;
651 else
652 cost += edge_cost;
653 }
654 for (edge = node->callers; edge; edge = edge->next_caller)
655 if (edge->inline_failed
656 && account_reference_p (edge->caller, node))
657 {
658 int edge_cost = edge->frequency ();
659 int index;
660
661 gcc_assert (edge->caller->definition);
662 if (!edge_cost)
663 edge_cost = 1;
664 gcc_assert (edge_cost > 0);
665 index = lto_symtab_encoder_lookup (partition->encoder,
666 edge->caller);
667 if (index != LCC_NOT_FOUND
668 && index < last_visited_node - 1)
669 cost -= edge_cost, internal += edge_cost;
670 else
671 cost += edge_cost;
672 }
673 }
674 else
675 last_visited_node++;
676
677 /* Compute boundary cost of IPA REF edges and at the same time look into
678 variables referenced from current partition and try to add them. */
679 for (j = 0; snode->iterate_reference (j, ref); j++)
680 if (!account_reference_p (snode, ref->referred))
681 ;
682 else if (is_a <varpool_node *> (ref->referred))
683 {
684 int index;
685
686 vnode = dyn_cast <varpool_node *> (ref->referred);
687 if (!symbol_partitioned_p (vnode)
688 && !vnode->no_reorder
689 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
690 add_symbol_to_partition (partition, vnode);
691 index = lto_symtab_encoder_lookup (partition->encoder,
692 vnode);
693 if (index != LCC_NOT_FOUND
694 && index < last_visited_node - 1)
695 cost--, internal++;
696 else
697 cost++;
698 }
699 else
700 {
701 int index;
702
703 node = dyn_cast <cgraph_node *> (ref->referred);
704 index = lto_symtab_encoder_lookup (partition->encoder,
705 node);
706 if (index != LCC_NOT_FOUND
707 && index < last_visited_node - 1)
708 cost--, internal++;
709 else
710 cost++;
711 }
712 for (j = 0; snode->iterate_referring (j, ref); j++)
713 if (!account_reference_p (ref->referring, snode))
714 ;
715 else if (is_a <varpool_node *> (ref->referring))
716 {
717 int index;
718
719 vnode = dyn_cast <varpool_node *> (ref->referring);
720 gcc_assert (vnode->definition);
721 /* It is better to couple variables with their users,
722 because it allows them to be removed. Coupling
723 with objects they refer to only helps to reduce
724 number of symbols promoted to hidden. */
725 if (!symbol_partitioned_p (vnode)
726 && !vnode->no_reorder
727 && !vnode->can_remove_if_no_refs_p ()
728 && vnode->get_partitioning_class () == SYMBOL_PARTITION)
729 add_symbol_to_partition (partition, vnode);
730 index = lto_symtab_encoder_lookup (partition->encoder,
731 vnode);
732 if (index != LCC_NOT_FOUND
733 && index < last_visited_node - 1)
734 cost--, internal++;
735 else
736 cost++;
737 }
738 else
739 {
740 int index;
741
742 node = dyn_cast <cgraph_node *> (ref->referring);
743 gcc_assert (node->definition);
744 index = lto_symtab_encoder_lookup (partition->encoder,
745 node);
746 if (index != LCC_NOT_FOUND
747 && index < last_visited_node - 1)
748 cost--, internal++;
749 else
750 cost++;
751 }
752 }
753
754 gcc_assert (cost >= 0 && internal >= 0);
755
756 /* If the partition is large enough, start looking for smallest boundary cost.
757 If partition still seems too small (less than 7/8 of target weight) accept
758 any cost. If partition has right size, optimize for highest internal/cost.
759 Later we stop building partition if its size is 9/8 of the target wight. */
760 if (partition->insns < partition_size * 7 / 8
761 || best_cost == -1
762 || (!cost
763 || ((sreal)best_internal * (sreal) cost
764 < ((sreal) internal * (sreal)best_cost))))
765 {
766 best_cost = cost;
767 best_internal = internal;
768 best_size = partition->insns;
769 best_i = i;
770 best_n_nodes = lto_symtab_encoder_size (partition->encoder);
771 best_varpool_pos = varpool_pos;
772 }
773 if (symtab->dump_file)
774 fprintf (symtab->dump_file, "Step %i: added %s/%i, size %i, "
775 "cost %" PRId64 "/%" PRId64 " "
776 "best %" PRId64 "/%" PRId64", step %i\n", i,
777 order[i]->name (), order[i]->order,
778 partition->insns, cost, internal,
779 best_cost, best_internal, best_i);
780 /* Partition is too large, unwind into step when best cost was reached and
781 start new partition. */
782 if (partition->insns > 9 * partition_size / 8
783 || partition->insns > max_partition_size)
784 {
785 if (best_i != i)
786 {
787 if (symtab->dump_file)
788 fprintf (symtab->dump_file, "Unwinding %i insertions to step %i\n",
789 i - best_i, best_i);
790 undo_partition (partition, best_n_nodes);
791 varpool_pos = best_varpool_pos;
792 }
793 gcc_assert (best_size == partition->insns);
794 i = best_i;
795 if (symtab->dump_file)
796 fprintf (symtab->dump_file,
797 "Partition insns: %i (want %" PRId64 ")\n",
798 partition->insns, partition_size);
799 /* When we are finished, avoid creating empty partition. */
800 while (i < order.length () - 1 && symbol_partitioned_p (order[i + 1]))
801 i++;
802 if (i == order.length () - 1)
803 break;
804 total_size -= partition->insns;
805 partition = new_partition ("");
806 last_visited_node = 0;
807 cost = 0;
808
809 if (symtab->dump_file)
810 fprintf (symtab->dump_file, "New partition\n");
811 best_n_nodes = 0;
812 best_cost = -1;
813
814 /* Since the size of partitions is just approximate, update the size after
815 we finished current one. */
816 if (npartitions < n_lto_partitions)
817 partition_size = total_size / (n_lto_partitions - npartitions);
818 else
819 /* Watch for overflow. */
820 partition_size = INT_MAX / 16;
821
822 if (symtab->dump_file)
823 fprintf (symtab->dump_file,
824 "Total size: %" PRId64 " partition_size: %" PRId64 "\n",
825 total_size, partition_size);
826 if (partition_size < PARAM_VALUE (MIN_PARTITION_SIZE))
827 partition_size = PARAM_VALUE (MIN_PARTITION_SIZE);
828 npartitions ++;
829 }
830 }
831
832 next_nodes.truncate (0);
833
834 /* Varables that are not reachable from the code go into last partition. */
835 FOR_EACH_VARIABLE (vnode)
836 if (vnode->get_partitioning_class () == SYMBOL_PARTITION
837 && !symbol_partitioned_p (vnode))
838 next_nodes.safe_push (vnode);
839
840 /* Output remaining ordered symbols. */
841 while (varpool_pos < n_varpool_nodes)
842 next_nodes.safe_push (varpool_order[varpool_pos++]);
843 while (noreorder_pos < (int)noreorder.length ())
844 next_nodes.safe_push (noreorder[noreorder_pos++]);
845 /* For one partition the cost of boundary should be 0 unless we added final
846 symbols here (these are not accounted) or we have accounting bug. */
847 gcc_assert (next_nodes.length () || npartitions != 1 || !best_cost || best_cost == -1);
848 add_sorted_nodes (next_nodes, partition);
849
850 if (symtab->dump_file)
851 {
852 fprintf (symtab->dump_file, "\nPartition sizes:\n");
853 unsigned partitions = ltrans_partitions.length ();
854
855 for (unsigned i = 0; i < partitions ; i++)
856 {
857 ltrans_partition p = ltrans_partitions[i];
858 fprintf (symtab->dump_file, "partition %d contains %d (%2.2f%%)"
859 " symbols and %d (%2.2f%%) insns\n", i, p->symbols,
860 100.0 * p->symbols / order.length (), p->insns,
861 100.0 * p->insns / original_total_size);
862 }
863
864 fprintf (symtab->dump_file, "\n");
865 }
866 }
867
868 /* Return true if we must not change the name of the NODE. The name as
869 extracted from the corresponding decl should be passed in NAME. */
870
871 static bool
must_not_rename(symtab_node * node,const char * name)872 must_not_rename (symtab_node *node, const char *name)
873 {
874 /* Our renaming machinery do not handle more than one change of assembler name.
875 We should not need more than one anyway. */
876 if (node->lto_file_data
877 && lto_get_decl_name_mapping (node->lto_file_data, name) != name)
878 {
879 if (symtab->dump_file)
880 fprintf (symtab->dump_file,
881 "Not privatizing symbol name: %s. It privatized already.\n",
882 name);
883 return true;
884 }
885 /* Avoid mangling of already mangled clones.
886 ??? should have a flag whether a symbol has a 'private' name already,
887 since we produce some symbols like that i.e. for global constructors
888 that are not really clones. */
889 if (node->unique_name)
890 {
891 if (symtab->dump_file)
892 fprintf (symtab->dump_file,
893 "Not privatizing symbol name: %s. Has unique name.\n",
894 name);
895 return true;
896 }
897 return false;
898 }
899
900 /* If we are an offload compiler, we may have to rewrite symbols to be
901 valid on this target. Return either PTR or a modified version of it. */
902
903 static const char *
maybe_rewrite_identifier(const char * ptr)904 maybe_rewrite_identifier (const char *ptr)
905 {
906 #if defined ACCEL_COMPILER && (defined NO_DOT_IN_LABEL || defined NO_DOLLAR_IN_LABEL)
907 #ifndef NO_DOT_IN_LABEL
908 char valid = '.';
909 const char reject[] = "$";
910 #elif !defined NO_DOLLAR_IN_LABEL
911 char valid = '$';
912 const char reject[] = ".";
913 #else
914 char valid = '_';
915 const char reject[] = ".$";
916 #endif
917
918 char *copy = NULL;
919 const char *match = ptr;
920 for (;;)
921 {
922 size_t off = strcspn (match, reject);
923 if (match[off] == '\0')
924 break;
925 if (copy == NULL)
926 {
927 copy = xstrdup (ptr);
928 match = copy;
929 }
930 copy[off] = valid;
931 }
932 return match;
933 #else
934 return ptr;
935 #endif
936 }
937
938 /* Ensure that the symbol in NODE is valid for the target, and if not,
939 rewrite it. */
940
941 static void
validize_symbol_for_target(symtab_node * node)942 validize_symbol_for_target (symtab_node *node)
943 {
944 tree decl = node->decl;
945 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
946
947 if (must_not_rename (node, name))
948 return;
949
950 const char *name2 = maybe_rewrite_identifier (name);
951 if (name2 != name)
952 {
953 symtab->change_decl_assembler_name (decl, get_identifier (name2));
954 if (node->lto_file_data)
955 lto_record_renamed_decl (node->lto_file_data, name,
956 IDENTIFIER_POINTER
957 (DECL_ASSEMBLER_NAME (decl)));
958 }
959 }
960
961 /* Helper for privatize_symbol_name. Mangle NODE symbol name
962 represented by DECL. */
963
964 static bool
privatize_symbol_name_1(symtab_node * node,tree decl)965 privatize_symbol_name_1 (symtab_node *node, tree decl)
966 {
967 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
968
969 if (must_not_rename (node, name))
970 return false;
971
972 name = maybe_rewrite_identifier (name);
973 symtab->change_decl_assembler_name (decl,
974 clone_function_name_1 (name,
975 "lto_priv"));
976
977 if (node->lto_file_data)
978 lto_record_renamed_decl (node->lto_file_data, name,
979 IDENTIFIER_POINTER
980 (DECL_ASSEMBLER_NAME (decl)));
981
982 if (symtab->dump_file)
983 fprintf (symtab->dump_file,
984 "Privatizing symbol name: %s -> %s\n",
985 name, IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
986
987 return true;
988 }
989
990 /* Mangle NODE symbol name into a local name.
991 This is necessary to do
992 1) if two or more static vars of same assembler name
993 are merged into single ltrans unit.
994 2) if previously static var was promoted hidden to avoid possible conflict
995 with symbols defined out of the LTO world. */
996
997 static bool
privatize_symbol_name(symtab_node * node)998 privatize_symbol_name (symtab_node *node)
999 {
1000 if (!privatize_symbol_name_1 (node, node->decl))
1001 return false;
1002
1003 /* We could change name which is a target of transparent alias
1004 chain of instrumented function name. Fix alias chain if so .*/
1005 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1006 {
1007 tree iname = NULL_TREE;
1008 if (cnode->instrumentation_clone)
1009 {
1010 /* If we want to privatize instrumentation clone
1011 then we also need to privatize original function. */
1012 if (cnode->instrumented_version)
1013 privatize_symbol_name (cnode->instrumented_version);
1014 else
1015 privatize_symbol_name_1 (cnode, cnode->orig_decl);
1016 iname = DECL_ASSEMBLER_NAME (cnode->decl);
1017 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->orig_decl);
1018 }
1019 else if (cnode->instrumented_version
1020 && cnode->instrumented_version->orig_decl == cnode->decl)
1021 {
1022 iname = DECL_ASSEMBLER_NAME (cnode->instrumented_version->decl);
1023 TREE_CHAIN (iname) = DECL_ASSEMBLER_NAME (cnode->decl);
1024 }
1025 }
1026
1027 return true;
1028 }
1029
1030 /* Promote variable VNODE to be static. */
1031
1032 static void
promote_symbol(symtab_node * node)1033 promote_symbol (symtab_node *node)
1034 {
1035 /* We already promoted ... */
1036 if (DECL_VISIBILITY (node->decl) == VISIBILITY_HIDDEN
1037 && DECL_VISIBILITY_SPECIFIED (node->decl)
1038 && TREE_PUBLIC (node->decl))
1039 {
1040 validize_symbol_for_target (node);
1041 return;
1042 }
1043
1044 gcc_checking_assert (!TREE_PUBLIC (node->decl)
1045 && !DECL_EXTERNAL (node->decl));
1046 /* Be sure that newly public symbol does not conflict with anything already
1047 defined by the non-LTO part. */
1048 privatize_symbol_name (node);
1049 TREE_PUBLIC (node->decl) = 1;
1050 DECL_VISIBILITY (node->decl) = VISIBILITY_HIDDEN;
1051 DECL_VISIBILITY_SPECIFIED (node->decl) = true;
1052 if (symtab->dump_file)
1053 fprintf (symtab->dump_file,
1054 "Promoting as hidden: %s (%s)\n", node->name (),
1055 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1056
1057 /* Promoting a symbol also promotes all transparent aliases with exception
1058 of weakref where the visibility flags are always wrong and set to
1059 !PUBLIC. */
1060 ipa_ref *ref;
1061 for (unsigned i = 0; node->iterate_direct_aliases (i, ref); i++)
1062 {
1063 struct symtab_node *alias = ref->referring;
1064 if (alias->transparent_alias && !alias->weakref)
1065 {
1066 TREE_PUBLIC (alias->decl) = 1;
1067 DECL_VISIBILITY (alias->decl) = VISIBILITY_HIDDEN;
1068 DECL_VISIBILITY_SPECIFIED (alias->decl) = true;
1069 if (symtab->dump_file)
1070 fprintf (symtab->dump_file,
1071 "Promoting alias as hidden: %s\n",
1072 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1073 }
1074 gcc_assert (!alias->weakref || TREE_PUBLIC (alias->decl));
1075 }
1076 }
1077
1078 /* Return true if NODE needs named section even if it won't land in
1079 the partition symbol table.
1080
1081 FIXME: we should really not use named sections for inline clones
1082 and master clones. */
1083
1084 static bool
may_need_named_section_p(lto_symtab_encoder_t encoder,symtab_node * node)1085 may_need_named_section_p (lto_symtab_encoder_t encoder, symtab_node *node)
1086 {
1087 struct cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1088 if (!cnode)
1089 return false;
1090 if (node->real_symbol_p ())
1091 return false;
1092 return (!encoder
1093 || (lto_symtab_encoder_lookup (encoder, node) != LCC_NOT_FOUND
1094 && lto_symtab_encoder_encode_body_p (encoder,
1095 cnode)));
1096 }
1097
1098 /* If NODE represents a static variable. See if there are other variables
1099 of the same name in partition ENCODER (or in whole compilation unit if
1100 ENCODER is NULL) and if so, mangle the statics. Always mangle all
1101 conflicting statics, so we reduce changes of silently miscompiling
1102 asm statements referring to them by symbol name. */
1103
1104 static void
rename_statics(lto_symtab_encoder_t encoder,symtab_node * node)1105 rename_statics (lto_symtab_encoder_t encoder, symtab_node *node)
1106 {
1107 tree decl = node->decl;
1108 symtab_node *s;
1109 tree name = DECL_ASSEMBLER_NAME (decl);
1110
1111 /* See if this is static symbol. */
1112 if (((node->externally_visible && !node->weakref)
1113 /* FIXME: externally_visible is somewhat illogically not set for
1114 external symbols (i.e. those not defined). Remove this test
1115 once this is fixed. */
1116 || DECL_EXTERNAL (node->decl)
1117 || !node->real_symbol_p ())
1118 && !may_need_named_section_p (encoder, node))
1119 return;
1120
1121 /* Now walk symbols sharing the same name and see if there are any conflicts.
1122 (all types of symbols counts here, since we can not have static of the
1123 same name as external or public symbol.) */
1124 for (s = symtab_node::get_for_asmname (name);
1125 s; s = s->next_sharing_asm_name)
1126 if ((s->real_symbol_p () || may_need_named_section_p (encoder, s))
1127 && s->decl != node->decl
1128 && (!encoder
1129 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1130 break;
1131
1132 /* OK, no confict, so we have nothing to do. */
1133 if (!s)
1134 return;
1135
1136 if (symtab->dump_file)
1137 fprintf (symtab->dump_file,
1138 "Renaming statics with asm name: %s\n", node->name ());
1139
1140 /* Assign every symbol in the set that shares the same ASM name an unique
1141 mangled name. */
1142 for (s = symtab_node::get_for_asmname (name); s;)
1143 if ((!s->externally_visible || s->weakref)
1144 /* Transparent aliases having same name as target are renamed at a
1145 time their target gets new name. Transparent aliases that use
1146 separate assembler name require the name to be unique. */
1147 && (!s->transparent_alias || !s->definition || s->weakref
1148 || !symbol_table::assembler_names_equal_p
1149 (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (s->decl)),
1150 IDENTIFIER_POINTER
1151 (DECL_ASSEMBLER_NAME (s->get_alias_target()->decl))))
1152 && ((s->real_symbol_p ()
1153 && !DECL_EXTERNAL (s->decl)
1154 && !TREE_PUBLIC (s->decl))
1155 || may_need_named_section_p (encoder, s))
1156 && (!encoder
1157 || lto_symtab_encoder_lookup (encoder, s) != LCC_NOT_FOUND))
1158 {
1159 if (privatize_symbol_name (s))
1160 /* Re-start from beginning since we do not know how many
1161 symbols changed a name. */
1162 s = symtab_node::get_for_asmname (name);
1163 else s = s->next_sharing_asm_name;
1164 }
1165 else s = s->next_sharing_asm_name;
1166 }
1167
1168 /* Find out all static decls that need to be promoted to global because
1169 of cross file sharing. This function must be run in the WPA mode after
1170 all inlinees are added. */
1171
1172 void
lto_promote_cross_file_statics(void)1173 lto_promote_cross_file_statics (void)
1174 {
1175 unsigned i, n_sets;
1176
1177 gcc_assert (flag_wpa);
1178
1179 lto_stream_offload_p = false;
1180 select_what_to_stream ();
1181
1182 /* First compute boundaries. */
1183 n_sets = ltrans_partitions.length ();
1184 for (i = 0; i < n_sets; i++)
1185 {
1186 ltrans_partition part
1187 = ltrans_partitions[i];
1188 part->encoder = compute_ltrans_boundary (part->encoder);
1189 }
1190
1191 /* Look at boundaries and promote symbols as needed. */
1192 for (i = 0; i < n_sets; i++)
1193 {
1194 lto_symtab_encoder_iterator lsei;
1195 lto_symtab_encoder_t encoder = ltrans_partitions[i]->encoder;
1196
1197 for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
1198 lsei_next (&lsei))
1199 {
1200 symtab_node *node = lsei_node (lsei);
1201
1202 /* If symbol is static, rename it if its assembler name
1203 clashes with anything else in this unit. */
1204 rename_statics (encoder, node);
1205
1206 /* No need to promote if symbol already is externally visible ... */
1207 if (node->externally_visible
1208 /* ... or if it is part of current partition ... */
1209 || lto_symtab_encoder_in_partition_p (encoder, node)
1210 /* ... or if we do not partition it. This mean that it will
1211 appear in every partition referencing it. */
1212 || node->get_partitioning_class () != SYMBOL_PARTITION)
1213 {
1214 validize_symbol_for_target (node);
1215 continue;
1216 }
1217
1218 promote_symbol (node);
1219 }
1220 }
1221 }
1222
1223 /* Rename statics in the whole unit in the case that
1224 we do -flto-partition=none. */
1225
1226 void
lto_promote_statics_nonwpa(void)1227 lto_promote_statics_nonwpa (void)
1228 {
1229 symtab_node *node;
1230 FOR_EACH_SYMBOL (node)
1231 {
1232 rename_statics (NULL, node);
1233 validize_symbol_for_target (node);
1234 }
1235 }
1236