1 /* Write and read the cgraph to the memory mapped representation of a
2 .o file.
3
4 Copyright (C) 2009-2020 Free Software Foundation, Inc.
5 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42
43 /* True when asm nodes has been output. */
44 bool asm_nodes_output = false;
45
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *> nodes);
48
49 /* Number of LDPR values known to GCC. */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51
52 /* Cgraph streaming is organized as set of record whose type
53 is indicated by a tag. */
54 enum LTO_symtab_tags
55 {
56 /* Must leave 0 for the stopper. */
57
58 /* Cgraph node without body available. */
59 LTO_symtab_unavail_node = 1,
60 /* Cgraph node with function body. */
61 LTO_symtab_analyzed_node,
62 /* Cgraph edges. */
63 LTO_symtab_edge,
64 LTO_symtab_indirect_edge,
65 LTO_symtab_variable,
66 LTO_symtab_last_tag
67 };
68
69 /* Create a new symtab encoder.
70 if FOR_INPUT, the encoder allocate only datastructures needed
71 to read the symtab. */
72
73 lto_symtab_encoder_t
lto_symtab_encoder_new(bool for_input)74 lto_symtab_encoder_new (bool for_input)
75 {
76 lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
77
78 if (!for_input)
79 encoder->map = new hash_map<symtab_node *, size_t>;
80 encoder->nodes.create (0);
81 return encoder;
82 }
83
84
85 /* Delete ENCODER and its components. */
86
87 void
lto_symtab_encoder_delete(lto_symtab_encoder_t encoder)88 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
89 {
90 encoder->nodes.release ();
91 if (encoder->map)
92 delete encoder->map;
93 free (encoder);
94 }
95
96
97 /* Return the existing reference number of NODE in the symtab encoder in
98 output block OB. Assign a new reference if this is the first time
99 NODE is encoded. */
100
101 int
lto_symtab_encoder_encode(lto_symtab_encoder_t encoder,symtab_node * node)102 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
103 symtab_node *node)
104 {
105 int ref;
106
107 if (!encoder->map)
108 {
109 lto_encoder_entry entry = {node, false, false, false};
110
111 ref = encoder->nodes.length ();
112 encoder->nodes.safe_push (entry);
113 return ref;
114 }
115
116 size_t *slot = encoder->map->get (node);
117 if (!slot || !*slot)
118 {
119 lto_encoder_entry entry = {node, false, false, false};
120 ref = encoder->nodes.length ();
121 if (!slot)
122 encoder->map->put (node, ref + 1);
123 encoder->nodes.safe_push (entry);
124 }
125 else
126 ref = *slot - 1;
127
128 return ref;
129 }
130
131 /* Remove NODE from encoder. */
132
133 bool
lto_symtab_encoder_delete_node(lto_symtab_encoder_t encoder,symtab_node * node)134 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
135 symtab_node *node)
136 {
137 int index;
138 lto_encoder_entry last_node;
139
140 size_t *slot = encoder->map->get (node);
141 if (slot == NULL || !*slot)
142 return false;
143
144 index = *slot - 1;
145 gcc_checking_assert (encoder->nodes[index].node == node);
146
147 /* Remove from vector. We do this by swapping node with the last element
148 of the vector. */
149 last_node = encoder->nodes.pop ();
150 if (last_node.node != node)
151 {
152 gcc_assert (encoder->map->put (last_node.node, index + 1));
153
154 /* Move the last element to the original spot of NODE. */
155 encoder->nodes[index] = last_node;
156 }
157
158 /* Remove element from hash table. */
159 encoder->map->remove (node);
160 return true;
161 }
162
163
164 /* Return TRUE if we should encode the body of NODE (if any). */
165
166 bool
lto_symtab_encoder_encode_body_p(lto_symtab_encoder_t encoder,struct cgraph_node * node)167 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
168 struct cgraph_node *node)
169 {
170 int index = lto_symtab_encoder_lookup (encoder, node);
171 return encoder->nodes[index].body;
172 }
173
174 /* Specify that we encode the body of NODE in this partition. */
175
176 static void
lto_set_symtab_encoder_encode_body(lto_symtab_encoder_t encoder,struct cgraph_node * node)177 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
178 struct cgraph_node *node)
179 {
180 int index = lto_symtab_encoder_encode (encoder, node);
181 gcc_checking_assert (encoder->nodes[index].node == node);
182 encoder->nodes[index].body = true;
183 }
184
185 /* Return TRUE if we should encode initializer of NODE (if any). */
186
187 bool
lto_symtab_encoder_encode_initializer_p(lto_symtab_encoder_t encoder,varpool_node * node)188 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
189 varpool_node *node)
190 {
191 int index = lto_symtab_encoder_lookup (encoder, node);
192 if (index == LCC_NOT_FOUND)
193 return false;
194 return encoder->nodes[index].initializer;
195 }
196
197 /* Specify that we should encode initializer of NODE (if any). */
198
199 static void
lto_set_symtab_encoder_encode_initializer(lto_symtab_encoder_t encoder,varpool_node * node)200 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
201 varpool_node *node)
202 {
203 int index = lto_symtab_encoder_lookup (encoder, node);
204 encoder->nodes[index].initializer = true;
205 }
206
207 /* Return TRUE if NODE is in this partition. */
208
209 bool
lto_symtab_encoder_in_partition_p(lto_symtab_encoder_t encoder,symtab_node * node)210 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
211 symtab_node *node)
212 {
213 int index = lto_symtab_encoder_lookup (encoder, node);
214 if (index == LCC_NOT_FOUND)
215 return false;
216 return encoder->nodes[index].in_partition;
217 }
218
219 /* Specify that NODE is in this partition. */
220
221 void
lto_set_symtab_encoder_in_partition(lto_symtab_encoder_t encoder,symtab_node * node)222 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
223 symtab_node *node)
224 {
225 int index = lto_symtab_encoder_encode (encoder, node);
226 encoder->nodes[index].in_partition = true;
227 }
228
229 /* Output the cgraph EDGE to OB using ENCODER. */
230
231 static void
lto_output_edge(struct lto_simple_output_block * ob,struct cgraph_edge * edge,lto_symtab_encoder_t encoder)232 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
233 lto_symtab_encoder_t encoder)
234 {
235 unsigned int uid;
236 intptr_t ref;
237 struct bitpack_d bp;
238
239 if (edge->indirect_unknown_callee)
240 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
241 LTO_symtab_indirect_edge);
242 else
243 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 LTO_symtab_edge);
245
246 ref = lto_symtab_encoder_lookup (encoder, edge->caller);
247 gcc_assert (ref != LCC_NOT_FOUND);
248 streamer_write_hwi_stream (ob->main_stream, ref);
249
250 if (!edge->indirect_unknown_callee)
251 {
252 ref = lto_symtab_encoder_lookup (encoder, edge->callee);
253 gcc_assert (ref != LCC_NOT_FOUND);
254 streamer_write_hwi_stream (ob->main_stream, ref);
255 }
256
257 edge->count.stream_out (ob->main_stream);
258
259 bp = bitpack_create (ob->main_stream);
260 uid = !edge->call_stmt ? edge->lto_stmt_uid
261 : gimple_uid (edge->call_stmt) + 1;
262 bp_pack_enum (&bp, cgraph_inline_failed_t,
263 CIF_N_REASONS, edge->inline_failed);
264 gcc_checking_assert (uid || edge->caller->thunk.thunk_p);
265 bp_pack_var_len_unsigned (&bp, uid);
266 bp_pack_value (&bp, edge->speculative_id, 16);
267 bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
268 bp_pack_value (&bp, edge->speculative, 1);
269 bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
270 gcc_assert (!edge->call_stmt_cannot_inline_p
271 || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
272 bp_pack_value (&bp, edge->can_throw_external, 1);
273 bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
274 if (edge->indirect_unknown_callee)
275 {
276 int flags = edge->indirect_info->ecf_flags;
277 bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
278 bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
279 bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
280 bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
281 bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
282 bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
283 /* Flags that should not appear on indirect calls. */
284 gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
285 | ECF_MAY_BE_ALLOCA
286 | ECF_SIBCALL
287 | ECF_LEAF
288 | ECF_NOVOPS)));
289
290 bp_pack_value (&bp, edge->indirect_info->num_speculative_call_targets,
291 16);
292 }
293 streamer_write_bitpack (&bp);
294 }
295
296 /* Return if NODE contain references from other partitions. */
297
298 bool
referenced_from_other_partition_p(symtab_node * node,lto_symtab_encoder_t encoder)299 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
300 {
301 int i;
302 struct ipa_ref *ref = NULL;
303
304 for (i = 0; node->iterate_referring (i, ref); i++)
305 {
306 /* Ignore references from non-offloadable nodes while streaming NODE into
307 offload LTO section. */
308 if (!ref->referring->need_lto_streaming)
309 continue;
310
311 if (ref->referring->in_other_partition
312 || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
313 return true;
314 }
315 return false;
316 }
317
318 /* Return true when node is reachable from other partition. */
319
320 bool
reachable_from_other_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)321 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
322 {
323 struct cgraph_edge *e;
324 if (!node->definition)
325 return false;
326 if (node->inlined_to)
327 return false;
328 for (e = node->callers; e; e = e->next_caller)
329 {
330 /* Ignore references from non-offloadable nodes while streaming NODE into
331 offload LTO section. */
332 if (!e->caller->need_lto_streaming)
333 continue;
334
335 if (e->caller->in_other_partition
336 || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
337 return true;
338 }
339 return false;
340 }
341
342 /* Return if NODE contain references from other partitions. */
343
344 bool
referenced_from_this_partition_p(symtab_node * node,lto_symtab_encoder_t encoder)345 referenced_from_this_partition_p (symtab_node *node,
346 lto_symtab_encoder_t encoder)
347 {
348 int i;
349 struct ipa_ref *ref = NULL;
350
351 for (i = 0; node->iterate_referring (i, ref); i++)
352 if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
353 return true;
354 return false;
355 }
356
357 /* Return true when node is reachable from other partition. */
358
359 bool
reachable_from_this_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)360 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
361 {
362 struct cgraph_edge *e;
363 for (e = node->callers; e; e = e->next_caller)
364 if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
365 return true;
366 return false;
367 }
368
369 /* Output the cgraph NODE to OB. ENCODER is used to find the
370 reference number of NODE->inlined_to. SET is the set of nodes we
371 are writing to the current file. If NODE is not in SET, then NODE
372 is a boundary of a cgraph_node_set and we pretend NODE just has a
373 decl and no callees. WRITTEN_DECLS is the set of FUNCTION_DECLs
374 that have had their callgraph node written so far. This is used to
375 determine if NODE is a clone of a previously written node. */
376
377 static void
lto_output_node(struct lto_simple_output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)378 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
379 lto_symtab_encoder_t encoder)
380 {
381 unsigned int tag;
382 struct bitpack_d bp;
383 bool boundary_p;
384 intptr_t ref;
385 bool in_other_partition = false;
386 struct cgraph_node *clone_of, *ultimate_clone_of;
387 ipa_opt_pass_d *pass;
388 int i;
389 const char *comdat;
390 const char *section;
391 tree group;
392
393 boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
394
395 if (node->analyzed && (!boundary_p || node->alias
396 || (node->thunk.thunk_p && !node->inlined_to)))
397 tag = LTO_symtab_analyzed_node;
398 else
399 tag = LTO_symtab_unavail_node;
400
401 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
402 tag);
403 streamer_write_hwi_stream (ob->main_stream, node->order);
404
405 /* In WPA mode, we only output part of the call-graph. Also, we
406 fake cgraph node attributes. There are two cases that we care.
407
408 Boundary nodes: There are nodes that are not part of SET but are
409 called from within SET. We artificially make them look like
410 externally visible nodes with no function body.
411
412 Cherry-picked nodes: These are nodes we pulled from other
413 translation units into SET during IPA-inlining. We make them as
414 local static nodes to prevent clashes with other local statics. */
415 if (boundary_p && node->analyzed
416 && node->get_partitioning_class () == SYMBOL_PARTITION)
417 {
418 /* Inline clones cannot be part of boundary.
419 gcc_assert (!node->inlined_to);
420
421 FIXME: At the moment they can be, when partition contains an inline
422 clone that is clone of inline clone from outside partition. We can
423 reshape the clone tree and make other tree to be the root, but it
424 needs a bit extra work and will be promplty done by cgraph_remove_node
425 after reading back. */
426 in_other_partition = 1;
427 }
428
429 clone_of = node->clone_of;
430 while (clone_of
431 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
432 if (clone_of->prev_sibling_clone)
433 clone_of = clone_of->prev_sibling_clone;
434 else
435 clone_of = clone_of->clone_of;
436
437 /* See if body of the master function is output. If not, we are seeing only
438 an declaration and we do not need to pass down clone tree. */
439 ultimate_clone_of = clone_of;
440 while (ultimate_clone_of && ultimate_clone_of->clone_of)
441 ultimate_clone_of = ultimate_clone_of->clone_of;
442
443 if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
444 clone_of = NULL;
445
446 if (tag == LTO_symtab_analyzed_node)
447 gcc_assert (clone_of || !node->clone_of);
448 if (!clone_of)
449 streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
450 else
451 streamer_write_hwi_stream (ob->main_stream, ref);
452
453
454 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
455 node->count.stream_out (ob->main_stream);
456 streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
457
458 streamer_write_hwi_stream (ob->main_stream,
459 node->ipa_transforms_to_apply.length ());
460 FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
461 streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
462
463 if (tag == LTO_symtab_analyzed_node)
464 {
465 if (node->inlined_to)
466 {
467 ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
468 gcc_assert (ref != LCC_NOT_FOUND);
469 }
470 else
471 ref = LCC_NOT_FOUND;
472
473 streamer_write_hwi_stream (ob->main_stream, ref);
474 }
475
476 group = node->get_comdat_group ();
477 if (group)
478 comdat = IDENTIFIER_POINTER (group);
479 else
480 comdat = "";
481 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
482
483 if (group)
484 {
485 if (node->same_comdat_group)
486 {
487 ref = LCC_NOT_FOUND;
488 for (struct symtab_node *n = node->same_comdat_group;
489 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
490 ref = lto_symtab_encoder_lookup (encoder, n);
491 }
492 else
493 ref = LCC_NOT_FOUND;
494 streamer_write_hwi_stream (ob->main_stream, ref);
495 }
496
497 section = node->get_section ();
498 if (!section)
499 section = "";
500
501 streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
502
503 bp = bitpack_create (ob->main_stream);
504 bp_pack_value (&bp, node->local, 1);
505 bp_pack_value (&bp, node->externally_visible, 1);
506 bp_pack_value (&bp, node->no_reorder, 1);
507 bp_pack_value (&bp, node->definition, 1);
508 bp_pack_value (&bp, node->versionable, 1);
509 bp_pack_value (&bp, node->can_change_signature, 1);
510 bp_pack_value (&bp, node->redefined_extern_inline, 1);
511 bp_pack_value (&bp, node->force_output, 1);
512 bp_pack_value (&bp, node->forced_by_abi, 1);
513 bp_pack_value (&bp, node->unique_name, 1);
514 bp_pack_value (&bp, node->body_removed, 1);
515 bp_pack_value (&bp, node->implicit_section, 1);
516 bp_pack_value (&bp, node->address_taken, 1);
517 bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
518 && node->get_partitioning_class () == SYMBOL_PARTITION
519 && (reachable_from_other_partition_p (node, encoder)
520 || referenced_from_other_partition_p (node, encoder)), 1);
521 bp_pack_value (&bp, node->lowered, 1);
522 bp_pack_value (&bp, in_other_partition, 1);
523 bp_pack_value (&bp, node->alias, 1);
524 bp_pack_value (&bp, node->transparent_alias, 1);
525 bp_pack_value (&bp, node->weakref, 1);
526 bp_pack_value (&bp, node->symver, 1);
527 bp_pack_value (&bp, node->frequency, 2);
528 bp_pack_value (&bp, node->only_called_at_startup, 1);
529 bp_pack_value (&bp, node->only_called_at_exit, 1);
530 bp_pack_value (&bp, node->tm_clone, 1);
531 bp_pack_value (&bp, node->calls_comdat_local, 1);
532 bp_pack_value (&bp, node->icf_merged, 1);
533 bp_pack_value (&bp, node->nonfreeing_fn, 1);
534 bp_pack_value (&bp, node->merged_comdat, 1);
535 bp_pack_value (&bp, node->merged_extern_inline, 1);
536 bp_pack_value (&bp, node->thunk.thunk_p, 1);
537 bp_pack_value (&bp, node->parallelized_function, 1);
538 bp_pack_enum (&bp, ld_plugin_symbol_resolution,
539 LDPR_NUM_KNOWN,
540 /* When doing incremental link, we will get new resolution
541 info next time we process the file. */
542 flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
543 bp_pack_value (&bp, node->split_part, 1);
544 streamer_write_bitpack (&bp);
545 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
546
547 /* Stream thunk info always because we use it in
548 ipa_polymorphic_call_context::ipa_polymorphic_call_context
549 to properly interpret THIS pointers for thunks that has been converted
550 to Gimple. */
551 if (node->definition)
552 {
553 streamer_write_uhwi_stream
554 (ob->main_stream,
555 1 + (node->thunk.this_adjusting != 0) * 2
556 + (node->thunk.virtual_offset_p != 0) * 4);
557 streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
558 streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
559 streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
560 }
561 streamer_write_hwi_stream (ob->main_stream, node->profile_id);
562 streamer_write_hwi_stream (ob->main_stream, node->unit_id);
563 if (DECL_STATIC_CONSTRUCTOR (node->decl))
564 streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
565 if (DECL_STATIC_DESTRUCTOR (node->decl))
566 streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
567 }
568
569 /* Output the varpool NODE to OB.
570 If NODE is not in SET, then NODE is a boundary. */
571
572 static void
lto_output_varpool_node(struct lto_simple_output_block * ob,varpool_node * node,lto_symtab_encoder_t encoder)573 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
574 lto_symtab_encoder_t encoder)
575 {
576 bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
577 bool encode_initializer_p
578 = (node->definition
579 && lto_symtab_encoder_encode_initializer_p (encoder, node));
580 struct bitpack_d bp;
581 int ref;
582 const char *comdat;
583 const char *section;
584 tree group;
585
586 gcc_assert (!encode_initializer_p || node->definition);
587 gcc_assert (boundary_p || encode_initializer_p);
588
589 streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
590 LTO_symtab_variable);
591 streamer_write_hwi_stream (ob->main_stream, node->order);
592 lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
593 bp = bitpack_create (ob->main_stream);
594 bp_pack_value (&bp, node->externally_visible, 1);
595 bp_pack_value (&bp, node->no_reorder, 1);
596 bp_pack_value (&bp, node->force_output, 1);
597 bp_pack_value (&bp, node->forced_by_abi, 1);
598 bp_pack_value (&bp, node->unique_name, 1);
599 bp_pack_value (&bp,
600 node->body_removed
601 || (!encode_initializer_p && !node->alias && node->definition),
602 1);
603 bp_pack_value (&bp, node->implicit_section, 1);
604 bp_pack_value (&bp, node->writeonly, 1);
605 bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
606 1);
607 bp_pack_value (&bp, node->alias, 1);
608 bp_pack_value (&bp, node->transparent_alias, 1);
609 bp_pack_value (&bp, node->weakref, 1);
610 bp_pack_value (&bp, node->symver, 1);
611 bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
612 gcc_assert (node->definition || !node->analyzed);
613 /* Constant pool initializers can be de-unified into individual ltrans units.
614 FIXME: Alternatively at -Os we may want to avoid generating for them the local
615 labels and share them across LTRANS partitions. */
616 if (node->get_partitioning_class () != SYMBOL_PARTITION)
617 {
618 bp_pack_value (&bp, 0, 1); /* used_from_other_parition. */
619 bp_pack_value (&bp, 0, 1); /* in_other_partition. */
620 }
621 else
622 {
623 bp_pack_value (&bp, node->definition
624 && referenced_from_other_partition_p (node, encoder), 1);
625 bp_pack_value (&bp, node->analyzed
626 && boundary_p && !DECL_EXTERNAL (node->decl), 1);
627 /* in_other_partition. */
628 }
629 bp_pack_value (&bp, node->tls_model, 3);
630 bp_pack_value (&bp, node->used_by_single_function, 1);
631 bp_pack_value (&bp, node->dynamically_initialized, 1);
632 streamer_write_bitpack (&bp);
633
634 group = node->get_comdat_group ();
635 if (group)
636 comdat = IDENTIFIER_POINTER (group);
637 else
638 comdat = "";
639 streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
640
641 if (group)
642 {
643 if (node->same_comdat_group)
644 {
645 ref = LCC_NOT_FOUND;
646 for (struct symtab_node *n = node->same_comdat_group;
647 ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
648 ref = lto_symtab_encoder_lookup (encoder, n);
649 }
650 else
651 ref = LCC_NOT_FOUND;
652 streamer_write_hwi_stream (ob->main_stream, ref);
653 }
654
655 section = node->get_section ();
656 if (!section)
657 section = "";
658 streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
659
660 streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
661 LDPR_NUM_KNOWN, node->resolution);
662 }
663
664 /* Output the varpool NODE to OB.
665 If NODE is not in SET, then NODE is a boundary. */
666
667 static void
lto_output_ref(struct lto_simple_output_block * ob,struct ipa_ref * ref,lto_symtab_encoder_t encoder)668 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
669 lto_symtab_encoder_t encoder)
670 {
671 struct bitpack_d bp;
672 int nref;
673 int uid = !ref->stmt ? ref->lto_stmt_uid : gimple_uid (ref->stmt) + 1;
674 struct cgraph_node *node;
675
676 bp = bitpack_create (ob->main_stream);
677 bp_pack_value (&bp, ref->use, 3);
678 bp_pack_value (&bp, ref->speculative, 1);
679 streamer_write_bitpack (&bp);
680 nref = lto_symtab_encoder_lookup (encoder, ref->referred);
681 gcc_assert (nref != LCC_NOT_FOUND);
682 streamer_write_hwi_stream (ob->main_stream, nref);
683
684 node = dyn_cast <cgraph_node *> (ref->referring);
685 if (node)
686 {
687 if (ref->stmt)
688 uid = gimple_uid (ref->stmt) + 1;
689 streamer_write_hwi_stream (ob->main_stream, uid);
690 bp_pack_value (&bp, ref->speculative_id, 16);
691 streamer_write_bitpack (&bp);
692 }
693 }
694
695 /* Stream out profile_summary to OB. */
696
697 static void
output_profile_summary(struct lto_simple_output_block * ob)698 output_profile_summary (struct lto_simple_output_block *ob)
699 {
700 if (profile_info)
701 {
702 /* We do not output num and run_max, they are not used by
703 GCC profile feedback and they are difficult to merge from multiple
704 units. */
705 unsigned runs = (profile_info->runs);
706 streamer_write_uhwi_stream (ob->main_stream, runs);
707
708 /* IPA-profile computes hot bb threshold based on cumulated
709 whole program profile. We need to stream it down to ltrans. */
710 if (flag_wpa)
711 streamer_write_gcov_count_stream (ob->main_stream,
712 get_hot_bb_threshold ());
713 }
714 else
715 streamer_write_uhwi_stream (ob->main_stream, 0);
716 }
717
718 /* Output all callees or indirect outgoing edges. EDGE must be the first such
719 edge. */
720
721 static void
output_outgoing_cgraph_edges(struct cgraph_edge * edge,struct lto_simple_output_block * ob,lto_symtab_encoder_t encoder)722 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
723 struct lto_simple_output_block *ob,
724 lto_symtab_encoder_t encoder)
725 {
726 if (!edge)
727 return;
728
729 /* Output edges in backward direction, so the reconstructed callgraph match
730 and it is easy to associate call sites in the IPA pass summaries. */
731 while (edge->next_callee)
732 edge = edge->next_callee;
733 for (; edge; edge = edge->prev_callee)
734 lto_output_edge (ob, edge, encoder);
735 }
736
737 /* Output the part of the cgraph in SET. */
738
739 static void
output_refs(lto_symtab_encoder_t encoder)740 output_refs (lto_symtab_encoder_t encoder)
741 {
742 struct lto_simple_output_block *ob;
743 int count;
744 struct ipa_ref *ref;
745
746 ob = lto_create_simple_output_block (LTO_section_refs);
747
748 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
749 {
750 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
751
752 /* IPA_REF_ALIAS references are always preserved
753 in the boundary. Alias node can't have other references and
754 can be always handled as if it's not in the boundary. */
755 if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
756 continue;
757
758 count = node->ref_list.nreferences ();
759 if (count)
760 {
761 streamer_write_gcov_count_stream (ob->main_stream, count);
762 streamer_write_uhwi_stream (ob->main_stream,
763 lto_symtab_encoder_lookup (encoder, node));
764 for (int i = 0; node->iterate_reference (i, ref); i++)
765 lto_output_ref (ob, ref, encoder);
766 }
767 }
768
769 streamer_write_uhwi_stream (ob->main_stream, 0);
770
771 lto_destroy_simple_output_block (ob);
772 }
773
774 /* Add NODE into encoder as well as nodes it is cloned from.
775 Do it in a way so clones appear first. */
776
777 static void
add_node_to(lto_symtab_encoder_t encoder,struct cgraph_node * node,bool include_body)778 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
779 bool include_body)
780 {
781 if (node->clone_of)
782 add_node_to (encoder, node->clone_of, include_body);
783 else if (include_body)
784 lto_set_symtab_encoder_encode_body (encoder, node);
785 lto_symtab_encoder_encode (encoder, node);
786 }
787
788 /* Add all references in NODE to encoders. */
789
790 static void
create_references(lto_symtab_encoder_t encoder,symtab_node * node)791 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
792 {
793 int i;
794 struct ipa_ref *ref = NULL;
795 for (i = 0; node->iterate_reference (i, ref); i++)
796 if (is_a <cgraph_node *> (ref->referred))
797 add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
798 else
799 lto_symtab_encoder_encode (encoder, ref->referred);
800 }
801
802 /* Select what needs to be streamed out. In regular lto mode stream everything.
803 In offload lto mode stream only nodes marked as offloadable. */
804 void
select_what_to_stream(void)805 select_what_to_stream (void)
806 {
807 struct symtab_node *snode;
808 FOR_EACH_SYMBOL (snode)
809 snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
810 }
811
812 /* Find all symbols we want to stream into given partition and insert them
813 to encoders.
814
815 The function actually replaces IN_ENCODER by new one. The reason is that
816 streaming code needs clone's origin to be streamed before clone. This
817 means that we need to insert the nodes in specific order. This order is
818 ignored by the partitioning logic earlier. */
819
820 lto_symtab_encoder_t
compute_ltrans_boundary(lto_symtab_encoder_t in_encoder)821 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
822 {
823 struct cgraph_edge *edge;
824 int i;
825 lto_symtab_encoder_t encoder;
826 lto_symtab_encoder_iterator lsei;
827 hash_set<void *> reachable_call_targets;
828
829 encoder = lto_symtab_encoder_new (false);
830
831 /* Go over all entries in the IN_ENCODER and duplicate them to
832 ENCODER. At the same time insert masters of clones so
833 every master appears before clone. */
834 for (lsei = lsei_start_function_in_partition (in_encoder);
835 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
836 {
837 struct cgraph_node *node = lsei_cgraph_node (lsei);
838 if (!node->need_lto_streaming)
839 continue;
840 add_node_to (encoder, node, true);
841 lto_set_symtab_encoder_in_partition (encoder, node);
842 create_references (encoder, node);
843 }
844 for (lsei = lsei_start_variable_in_partition (in_encoder);
845 !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
846 {
847 varpool_node *vnode = lsei_varpool_node (lsei);
848
849 if (!vnode->need_lto_streaming)
850 continue;
851 lto_set_symtab_encoder_in_partition (encoder, vnode);
852 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
853 create_references (encoder, vnode);
854 }
855 /* Pickle in also the initializer of all referenced readonly variables
856 to help folding. Constant pool variables are not shared, so we must
857 pickle those too. */
858 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
859 {
860 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
861 if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
862 {
863 if (!lto_symtab_encoder_encode_initializer_p (encoder,
864 vnode)
865 && (((vnode->ctor_useable_for_folding_p ()
866 && (!DECL_VIRTUAL_P (vnode->decl)
867 || !flag_wpa
868 || flag_ltrans_devirtualize)))))
869 {
870 lto_set_symtab_encoder_encode_initializer (encoder, vnode);
871 create_references (encoder, vnode);
872 }
873 }
874 }
875
876 /* Go over all the nodes again to include callees that are not in
877 SET. */
878 for (lsei = lsei_start_function_in_partition (encoder);
879 !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
880 {
881 struct cgraph_node *node = lsei_cgraph_node (lsei);
882 for (edge = node->callees; edge; edge = edge->next_callee)
883 {
884 struct cgraph_node *callee = edge->callee;
885 if (!lto_symtab_encoder_in_partition_p (encoder, callee))
886 {
887 /* We should have moved all the inlines. */
888 gcc_assert (!callee->inlined_to);
889 add_node_to (encoder, callee, false);
890 }
891 }
892 /* Add all possible targets for late devirtualization. */
893 if (flag_ltrans_devirtualize || !flag_wpa)
894 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
895 if (edge->indirect_info->polymorphic)
896 {
897 unsigned int i;
898 void *cache_token;
899 bool final;
900 vec <cgraph_node *>targets
901 = possible_polymorphic_call_targets
902 (edge, &final, &cache_token);
903 if (!reachable_call_targets.add (cache_token))
904 {
905 for (i = 0; i < targets.length (); i++)
906 {
907 struct cgraph_node *callee = targets[i];
908
909 /* Adding an external declarations into the unit serves
910 no purpose and just increases its boundary. */
911 if (callee->definition
912 && !lto_symtab_encoder_in_partition_p
913 (encoder, callee))
914 {
915 gcc_assert (!callee->inlined_to);
916 add_node_to (encoder, callee, false);
917 }
918 }
919 }
920 }
921 }
922 /* Be sure to also insert alias targert and thunk callees. These needs
923 to stay to aid local calling conventions. */
924 for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
925 {
926 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
927 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
928
929 if (node->alias && node->analyzed)
930 create_references (encoder, node);
931 if (cnode
932 && cnode->thunk.thunk_p && !cnode->inlined_to)
933 add_node_to (encoder, cnode->callees->callee, false);
934 while (node->transparent_alias && node->analyzed)
935 {
936 node = node->get_alias_target ();
937 if (is_a <cgraph_node *> (node))
938 add_node_to (encoder, dyn_cast <cgraph_node *> (node),
939 false);
940 else
941 lto_symtab_encoder_encode (encoder, node);
942 }
943 }
944 lto_symtab_encoder_delete (in_encoder);
945 return encoder;
946 }
947
948 /* Output the part of the symtab in SET and VSET. */
949
950 void
output_symtab(void)951 output_symtab (void)
952 {
953 struct cgraph_node *node;
954 struct lto_simple_output_block *ob;
955 int i, n_nodes;
956 lto_symtab_encoder_t encoder;
957
958 if (flag_wpa)
959 output_cgraph_opt_summary ();
960
961 ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
962
963 output_profile_summary (ob);
964
965 /* An encoder for cgraph nodes should have been created by
966 ipa_write_summaries_1. */
967 gcc_assert (ob->decl_state->symtab_node_encoder);
968 encoder = ob->decl_state->symtab_node_encoder;
969
970 /* Write out the nodes. We must first output a node and then its clones,
971 otherwise at a time reading back the node there would be nothing to clone
972 from. */
973 n_nodes = lto_symtab_encoder_size (encoder);
974 for (i = 0; i < n_nodes; i++)
975 {
976 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
977 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
978 lto_output_node (ob, cnode, encoder);
979 else
980 lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
981 }
982
983 /* Go over the nodes in SET again to write edges. */
984 for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
985 {
986 node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
987 if (node
988 && ((node->thunk.thunk_p && !node->inlined_to)
989 || lto_symtab_encoder_in_partition_p (encoder, node)))
990 {
991 output_outgoing_cgraph_edges (node->callees, ob, encoder);
992 output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
993 }
994 }
995
996 streamer_write_uhwi_stream (ob->main_stream, 0);
997
998 lto_destroy_simple_output_block (ob);
999
1000 /* Emit toplevel asms.
1001 When doing WPA we must output every asm just once. Since we do not partition asm
1002 nodes at all, output them to first output. This is kind of hack, but should work
1003 well. */
1004 if (!asm_nodes_output)
1005 {
1006 asm_nodes_output = true;
1007 lto_output_toplevel_asms ();
1008 }
1009
1010 output_refs (encoder);
1011 }
1012
1013 /* Return identifier encoded in IB as a plain string. */
1014
1015 static tree
read_identifier(class lto_input_block * ib)1016 read_identifier (class lto_input_block *ib)
1017 {
1018 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1019 tree id;
1020
1021 if (ib->data[ib->p + len])
1022 lto_section_overrun (ib);
1023 if (!len)
1024 {
1025 ib->p++;
1026 return NULL;
1027 }
1028 id = get_identifier (ib->data + ib->p);
1029 ib->p += len + 1;
1030 return id;
1031 }
1032
1033 /* Return string encoded in IB, NULL if string is empty. */
1034
1035 static const char *
read_string(class lto_input_block * ib)1036 read_string (class lto_input_block *ib)
1037 {
1038 unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1039 const char *str;
1040
1041 if (ib->data[ib->p + len])
1042 lto_section_overrun (ib);
1043 if (!len)
1044 {
1045 ib->p++;
1046 return NULL;
1047 }
1048 str = ib->data + ib->p;
1049 ib->p += len + 1;
1050 return str;
1051 }
1052
1053 /* Output function/variable tables that will allow libgomp to look up offload
1054 target code.
1055 OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1056 varpool_node::get_create. In WHOPR (partitioned) mode during the WPA stage
1057 both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables. */
1058
1059 void
output_offload_tables(void)1060 output_offload_tables (void)
1061 {
1062 if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1063 return;
1064
1065 struct lto_simple_output_block *ob
1066 = lto_create_simple_output_block (LTO_section_offload_table);
1067
1068 for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1069 {
1070 symtab_node *node = symtab_node::get ((*offload_funcs)[i]);
1071 if (!node)
1072 continue;
1073 node->force_output = true;
1074 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1075 LTO_symtab_last_tag, LTO_symtab_unavail_node);
1076 lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1077 (*offload_funcs)[i]);
1078 }
1079
1080 for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1081 {
1082 symtab_node *node = symtab_node::get ((*offload_vars)[i]);
1083 if (!node)
1084 continue;
1085 node->force_output = true;
1086 streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1087 LTO_symtab_last_tag, LTO_symtab_variable);
1088 lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1089 (*offload_vars)[i]);
1090 }
1091
1092 streamer_write_uhwi_stream (ob->main_stream, 0);
1093 lto_destroy_simple_output_block (ob);
1094
1095 /* In WHOPR mode during the WPA stage the joint offload tables need to be
1096 streamed to one partition only. That's why we free offload_funcs and
1097 offload_vars after the first call of output_offload_tables. */
1098 if (flag_wpa)
1099 {
1100 vec_free (offload_funcs);
1101 vec_free (offload_vars);
1102 }
1103 }
1104
1105 /* Verify the partitioning of NODE. */
1106
1107 static inline void
verify_node_partition(symtab_node * node)1108 verify_node_partition (symtab_node *node)
1109 {
1110 if (flag_ltrans)
1111 return;
1112
1113 #ifdef ACCEL_COMPILER
1114 if (node->in_other_partition)
1115 {
1116 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1117 error_at (DECL_SOURCE_LOCATION (node->decl),
1118 "function %qs has been referenced in offloaded code but"
1119 " hasn%'t been marked to be included in the offloaded code",
1120 node->name ());
1121 else if (VAR_P (node->decl))
1122 error_at (DECL_SOURCE_LOCATION (node->decl),
1123 "variable %qs has been referenced in offloaded code but"
1124 " hasn%'t been marked to be included in the offloaded code",
1125 node->name ());
1126 else
1127 gcc_unreachable ();
1128 }
1129 #else
1130 gcc_assert (!node->in_other_partition
1131 && !node->used_from_other_partition);
1132 #endif
1133 }
1134
1135 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1136 STACK_SIZE, SELF_TIME and SELF_SIZE. This is called either to initialize
1137 NODE or to replace the values in it, for instance because the first
1138 time we saw it, the function body was not available but now it
1139 is. BP is a bitpack with all the bitflags for NODE read from the
1140 stream. */
1141
1142 static void
input_overwrite_node(struct lto_file_decl_data * file_data,struct cgraph_node * node,enum LTO_symtab_tags tag,struct bitpack_d * bp)1143 input_overwrite_node (struct lto_file_decl_data *file_data,
1144 struct cgraph_node *node,
1145 enum LTO_symtab_tags tag,
1146 struct bitpack_d *bp)
1147 {
1148 node->aux = (void *) tag;
1149 node->lto_file_data = file_data;
1150
1151 node->local = bp_unpack_value (bp, 1);
1152 node->externally_visible = bp_unpack_value (bp, 1);
1153 node->no_reorder = bp_unpack_value (bp, 1);
1154 node->definition = bp_unpack_value (bp, 1);
1155 node->versionable = bp_unpack_value (bp, 1);
1156 node->can_change_signature = bp_unpack_value (bp, 1);
1157 node->redefined_extern_inline = bp_unpack_value (bp, 1);
1158 node->force_output = bp_unpack_value (bp, 1);
1159 node->forced_by_abi = bp_unpack_value (bp, 1);
1160 node->unique_name = bp_unpack_value (bp, 1);
1161 node->body_removed = bp_unpack_value (bp, 1);
1162 node->implicit_section = bp_unpack_value (bp, 1);
1163 node->address_taken = bp_unpack_value (bp, 1);
1164 node->used_from_other_partition = bp_unpack_value (bp, 1);
1165 node->lowered = bp_unpack_value (bp, 1);
1166 node->analyzed = tag == LTO_symtab_analyzed_node;
1167 node->in_other_partition = bp_unpack_value (bp, 1);
1168 if (node->in_other_partition
1169 /* Avoid updating decl when we are seeing just inline clone.
1170 When inlining function that has functions already inlined into it,
1171 we produce clones of inline clones.
1172
1173 WPA partitioning might put each clone into different unit and
1174 we might end up streaming inline clone from other partition
1175 to support clone we are interested in. */
1176 && (!node->clone_of
1177 || node->clone_of->decl != node->decl))
1178 {
1179 DECL_EXTERNAL (node->decl) = 1;
1180 TREE_STATIC (node->decl) = 0;
1181 }
1182 node->alias = bp_unpack_value (bp, 1);
1183 node->transparent_alias = bp_unpack_value (bp, 1);
1184 node->weakref = bp_unpack_value (bp, 1);
1185 node->symver = bp_unpack_value (bp, 1);
1186 node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1187 node->only_called_at_startup = bp_unpack_value (bp, 1);
1188 node->only_called_at_exit = bp_unpack_value (bp, 1);
1189 node->tm_clone = bp_unpack_value (bp, 1);
1190 node->calls_comdat_local = bp_unpack_value (bp, 1);
1191 node->icf_merged = bp_unpack_value (bp, 1);
1192 node->nonfreeing_fn = bp_unpack_value (bp, 1);
1193 node->merged_comdat = bp_unpack_value (bp, 1);
1194 node->merged_extern_inline = bp_unpack_value (bp, 1);
1195 node->thunk.thunk_p = bp_unpack_value (bp, 1);
1196 node->parallelized_function = bp_unpack_value (bp, 1);
1197 node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1198 LDPR_NUM_KNOWN);
1199 node->split_part = bp_unpack_value (bp, 1);
1200 verify_node_partition (node);
1201 }
1202
1203 /* Return string alias is alias of. */
1204
1205 static tree
get_alias_symbol(tree decl)1206 get_alias_symbol (tree decl)
1207 {
1208 tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1209 return get_identifier (TREE_STRING_POINTER
1210 (TREE_VALUE (TREE_VALUE (alias))));
1211 }
1212
1213 /* Read a node from input_block IB. TAG is the node's tag just read.
1214 Return the node read or overwriten. */
1215
1216 static struct cgraph_node *
input_node(struct lto_file_decl_data * file_data,class lto_input_block * ib,enum LTO_symtab_tags tag,vec<symtab_node * > nodes)1217 input_node (struct lto_file_decl_data *file_data,
1218 class lto_input_block *ib,
1219 enum LTO_symtab_tags tag,
1220 vec<symtab_node *> nodes)
1221 {
1222 gcc::pass_manager *passes = g->get_passes ();
1223 tree fn_decl;
1224 struct cgraph_node *node;
1225 struct bitpack_d bp;
1226 unsigned decl_index;
1227 int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1228 int clone_ref;
1229 int order;
1230 int i, count;
1231 tree group;
1232 const char *section;
1233 order = streamer_read_hwi (ib) + file_data->order_base;
1234 clone_ref = streamer_read_hwi (ib);
1235
1236 decl_index = streamer_read_uhwi (ib);
1237 fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1238
1239 if (clone_ref != LCC_NOT_FOUND)
1240 {
1241 node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1242 profile_count::uninitialized (), false,
1243 vNULL, false, NULL, NULL);
1244 }
1245 else
1246 {
1247 /* Declaration of functions can be already merged with a declaration
1248 from other input file. We keep cgraph unmerged until after streaming
1249 of ipa passes is done. Alays forcingly create a fresh node. */
1250 node = symtab->create_empty ();
1251 node->decl = fn_decl;
1252 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1253 node->ifunc_resolver = 1;
1254 node->register_symbol ();
1255 }
1256
1257 node->order = order;
1258 if (order >= symtab->order)
1259 symtab->order = order + 1;
1260
1261 node->count = profile_count::stream_in (ib);
1262 node->count_materialization_scale = streamer_read_hwi (ib);
1263
1264 count = streamer_read_hwi (ib);
1265 node->ipa_transforms_to_apply = vNULL;
1266 for (i = 0; i < count; i++)
1267 {
1268 opt_pass *pass;
1269 int pid = streamer_read_hwi (ib);
1270
1271 gcc_assert (pid < passes->passes_by_id_size);
1272 pass = passes->passes_by_id[pid];
1273 node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1274 }
1275
1276 if (tag == LTO_symtab_analyzed_node)
1277 ref = streamer_read_hwi (ib);
1278
1279 group = read_identifier (ib);
1280 if (group)
1281 ref2 = streamer_read_hwi (ib);
1282
1283 /* Make sure that we have not read this node before. Nodes that
1284 have already been read will have their tag stored in the 'aux'
1285 field. Since built-in functions can be referenced in multiple
1286 functions, they are expected to be read more than once. */
1287 if (node->aux && !fndecl_built_in_p (node->decl))
1288 internal_error ("bytecode stream: found multiple instances of cgraph "
1289 "node with uid %d", node->get_uid ());
1290
1291 node->tp_first_run = streamer_read_uhwi (ib);
1292
1293 bp = streamer_read_bitpack (ib);
1294
1295 input_overwrite_node (file_data, node, tag, &bp);
1296
1297 /* Store a reference for now, and fix up later to be a pointer. */
1298 node->inlined_to = (cgraph_node *) (intptr_t) ref;
1299
1300 if (group)
1301 {
1302 node->set_comdat_group (group);
1303 /* Store a reference for now, and fix up later to be a pointer. */
1304 node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1305 }
1306 else
1307 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1308 section = read_string (ib);
1309 if (section)
1310 node->set_section_for_node (section);
1311
1312 if (node->definition)
1313 {
1314 int type = streamer_read_uhwi (ib);
1315 HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1316 HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1317 HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1318
1319 node->thunk.fixed_offset = fixed_offset;
1320 node->thunk.virtual_value = virtual_value;
1321 node->thunk.indirect_offset = indirect_offset;
1322 node->thunk.this_adjusting = (type & 2);
1323 node->thunk.virtual_offset_p = (type & 4);
1324 }
1325 if (node->alias && !node->analyzed && node->weakref)
1326 node->alias_target = get_alias_symbol (node->decl);
1327 node->profile_id = streamer_read_hwi (ib);
1328 node->unit_id = streamer_read_hwi (ib) + file_data->unit_base;
1329 if (symtab->max_unit < node->unit_id)
1330 symtab->max_unit = node->unit_id;
1331 if (DECL_STATIC_CONSTRUCTOR (node->decl))
1332 node->set_init_priority (streamer_read_hwi (ib));
1333 if (DECL_STATIC_DESTRUCTOR (node->decl))
1334 node->set_fini_priority (streamer_read_hwi (ib));
1335
1336 return node;
1337 }
1338
1339 /* Read a node from input_block IB. TAG is the node's tag just read.
1340 Return the node read or overwriten. */
1341
1342 static varpool_node *
input_varpool_node(struct lto_file_decl_data * file_data,class lto_input_block * ib)1343 input_varpool_node (struct lto_file_decl_data *file_data,
1344 class lto_input_block *ib)
1345 {
1346 int decl_index;
1347 tree var_decl;
1348 varpool_node *node;
1349 struct bitpack_d bp;
1350 int ref = LCC_NOT_FOUND;
1351 int order;
1352 tree group;
1353 const char *section;
1354
1355 order = streamer_read_hwi (ib) + file_data->order_base;
1356 decl_index = streamer_read_uhwi (ib);
1357 var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1358
1359 /* Declaration of functions can be already merged with a declaration
1360 from other input file. We keep cgraph unmerged until after streaming
1361 of ipa passes is done. Alays forcingly create a fresh node. */
1362 node = varpool_node::create_empty ();
1363 node->decl = var_decl;
1364 node->register_symbol ();
1365
1366 node->order = order;
1367 if (order >= symtab->order)
1368 symtab->order = order + 1;
1369 node->lto_file_data = file_data;
1370
1371 bp = streamer_read_bitpack (ib);
1372 node->externally_visible = bp_unpack_value (&bp, 1);
1373 node->no_reorder = bp_unpack_value (&bp, 1);
1374 node->force_output = bp_unpack_value (&bp, 1);
1375 node->forced_by_abi = bp_unpack_value (&bp, 1);
1376 node->unique_name = bp_unpack_value (&bp, 1);
1377 node->body_removed = bp_unpack_value (&bp, 1);
1378 node->implicit_section = bp_unpack_value (&bp, 1);
1379 node->writeonly = bp_unpack_value (&bp, 1);
1380 node->definition = bp_unpack_value (&bp, 1);
1381 node->alias = bp_unpack_value (&bp, 1);
1382 node->transparent_alias = bp_unpack_value (&bp, 1);
1383 node->weakref = bp_unpack_value (&bp, 1);
1384 node->symver = bp_unpack_value (&bp, 1);
1385 node->analyzed = bp_unpack_value (&bp, 1);
1386 node->used_from_other_partition = bp_unpack_value (&bp, 1);
1387 node->in_other_partition = bp_unpack_value (&bp, 1);
1388 if (node->in_other_partition)
1389 {
1390 DECL_EXTERNAL (node->decl) = 1;
1391 TREE_STATIC (node->decl) = 0;
1392 }
1393 if (node->alias && !node->analyzed && node->weakref)
1394 node->alias_target = get_alias_symbol (node->decl);
1395 node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1396 node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1397 node->dynamically_initialized = bp_unpack_value (&bp, 1);
1398 group = read_identifier (ib);
1399 if (group)
1400 {
1401 node->set_comdat_group (group);
1402 ref = streamer_read_hwi (ib);
1403 /* Store a reference for now, and fix up later to be a pointer. */
1404 node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1405 }
1406 else
1407 node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1408 section = read_string (ib);
1409 if (section)
1410 node->set_section_for_node (section);
1411 node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1412 LDPR_NUM_KNOWN);
1413 verify_node_partition (node);
1414 return node;
1415 }
1416
1417 /* Read a node from input_block IB. TAG is the node's tag just read.
1418 Return the node read or overwriten. */
1419
1420 static void
input_ref(class lto_input_block * ib,symtab_node * referring_node,vec<symtab_node * > nodes)1421 input_ref (class lto_input_block *ib,
1422 symtab_node *referring_node,
1423 vec<symtab_node *> nodes)
1424 {
1425 symtab_node *node = NULL;
1426 struct bitpack_d bp;
1427 enum ipa_ref_use use;
1428 bool speculative;
1429 struct ipa_ref *ref;
1430
1431 bp = streamer_read_bitpack (ib);
1432 use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1433 speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1434 node = nodes[streamer_read_hwi (ib)];
1435 ref = referring_node->create_reference (node, use);
1436 ref->speculative = speculative;
1437 if (is_a <cgraph_node *> (referring_node))
1438 {
1439 ref->lto_stmt_uid = streamer_read_hwi (ib);
1440 bp = streamer_read_bitpack (ib);
1441 ref->speculative_id = bp_unpack_value (&bp, 16);
1442 }
1443 }
1444
1445 /* Read an edge from IB. NODES points to a vector of previously read nodes for
1446 decoding caller and callee of the edge to be read. If INDIRECT is true, the
1447 edge being read is indirect (in the sense that it has
1448 indirect_unknown_callee set). */
1449
1450 static void
input_edge(class lto_input_block * ib,vec<symtab_node * > nodes,bool indirect)1451 input_edge (class lto_input_block *ib, vec<symtab_node *> nodes,
1452 bool indirect)
1453 {
1454 struct cgraph_node *caller, *callee;
1455 struct cgraph_edge *edge;
1456 unsigned int stmt_id, speculative_id;
1457 profile_count count;
1458 cgraph_inline_failed_t inline_failed;
1459 struct bitpack_d bp;
1460 int ecf_flags = 0;
1461
1462 caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1463 if (caller == NULL || caller->decl == NULL_TREE)
1464 internal_error ("bytecode stream: no caller found while reading edge");
1465
1466 if (!indirect)
1467 {
1468 callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1469 if (callee == NULL || callee->decl == NULL_TREE)
1470 internal_error ("bytecode stream: no callee found while reading edge");
1471 }
1472 else
1473 callee = NULL;
1474
1475 count = profile_count::stream_in (ib);
1476
1477 bp = streamer_read_bitpack (ib);
1478 inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1479 stmt_id = bp_unpack_var_len_unsigned (&bp);
1480 speculative_id = bp_unpack_value (&bp, 16);
1481
1482 if (indirect)
1483 edge = caller->create_indirect_edge (NULL, 0, count);
1484 else
1485 edge = caller->create_edge (callee, NULL, count);
1486
1487 edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1488 edge->speculative = bp_unpack_value (&bp, 1);
1489 edge->lto_stmt_uid = stmt_id;
1490 edge->speculative_id = speculative_id;
1491 edge->inline_failed = inline_failed;
1492 edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1493 edge->can_throw_external = bp_unpack_value (&bp, 1);
1494 edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1495 if (indirect)
1496 {
1497 if (bp_unpack_value (&bp, 1))
1498 ecf_flags |= ECF_CONST;
1499 if (bp_unpack_value (&bp, 1))
1500 ecf_flags |= ECF_PURE;
1501 if (bp_unpack_value (&bp, 1))
1502 ecf_flags |= ECF_NORETURN;
1503 if (bp_unpack_value (&bp, 1))
1504 ecf_flags |= ECF_MALLOC;
1505 if (bp_unpack_value (&bp, 1))
1506 ecf_flags |= ECF_NOTHROW;
1507 if (bp_unpack_value (&bp, 1))
1508 ecf_flags |= ECF_RETURNS_TWICE;
1509 edge->indirect_info->ecf_flags = ecf_flags;
1510
1511 edge->indirect_info->num_speculative_call_targets
1512 = bp_unpack_value (&bp, 16);
1513 }
1514 }
1515
1516
1517 /* Read a cgraph from IB using the info in FILE_DATA. */
1518
1519 static vec<symtab_node *>
input_cgraph_1(struct lto_file_decl_data * file_data,class lto_input_block * ib)1520 input_cgraph_1 (struct lto_file_decl_data *file_data,
1521 class lto_input_block *ib)
1522 {
1523 enum LTO_symtab_tags tag;
1524 vec<symtab_node *> nodes = vNULL;
1525 symtab_node *node;
1526 unsigned i;
1527
1528 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1529 file_data->order_base = symtab->order;
1530 file_data->unit_base = symtab->max_unit + 1;
1531 while (tag)
1532 {
1533 if (tag == LTO_symtab_edge)
1534 input_edge (ib, nodes, false);
1535 else if (tag == LTO_symtab_indirect_edge)
1536 input_edge (ib, nodes, true);
1537 else if (tag == LTO_symtab_variable)
1538 {
1539 node = input_varpool_node (file_data, ib);
1540 nodes.safe_push (node);
1541 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1542 }
1543 else
1544 {
1545 node = input_node (file_data, ib, tag, nodes);
1546 if (node == NULL || node->decl == NULL_TREE)
1547 internal_error ("bytecode stream: found empty cgraph node");
1548 nodes.safe_push (node);
1549 lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1550 }
1551
1552 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1553 }
1554
1555 lto_input_toplevel_asms (file_data, file_data->order_base);
1556
1557 /* AUX pointers should be all non-zero for function nodes read from the stream. */
1558 if (flag_checking)
1559 {
1560 FOR_EACH_VEC_ELT (nodes, i, node)
1561 gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1562 }
1563 FOR_EACH_VEC_ELT (nodes, i, node)
1564 {
1565 int ref;
1566 if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1567 {
1568 ref = (int) (intptr_t) cnode->inlined_to;
1569
1570 /* We share declaration of builtins, so we may read same node twice. */
1571 if (!node->aux)
1572 continue;
1573 node->aux = NULL;
1574
1575 /* Fixup inlined_to from reference to pointer. */
1576 if (ref != LCC_NOT_FOUND)
1577 dyn_cast<cgraph_node *> (node)->inlined_to
1578 = dyn_cast<cgraph_node *> (nodes[ref]);
1579 else
1580 cnode->inlined_to = NULL;
1581 }
1582
1583 ref = (int) (intptr_t) node->same_comdat_group;
1584
1585 /* Fixup same_comdat_group from reference to pointer. */
1586 if (ref != LCC_NOT_FOUND)
1587 node->same_comdat_group = nodes[ref];
1588 else
1589 node->same_comdat_group = NULL;
1590 }
1591 FOR_EACH_VEC_ELT (nodes, i, node)
1592 node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1593 return nodes;
1594 }
1595
1596 /* Input ipa_refs. */
1597
1598 static void
input_refs(class lto_input_block * ib,vec<symtab_node * > nodes)1599 input_refs (class lto_input_block *ib,
1600 vec<symtab_node *> nodes)
1601 {
1602 int count;
1603 int idx;
1604 while (true)
1605 {
1606 symtab_node *node;
1607 count = streamer_read_uhwi (ib);
1608 if (!count)
1609 break;
1610 idx = streamer_read_uhwi (ib);
1611 node = nodes[idx];
1612 while (count)
1613 {
1614 input_ref (ib, node, nodes);
1615 count--;
1616 }
1617 }
1618 }
1619
1620 /* Input profile_info from IB. */
1621 static void
input_profile_summary(class lto_input_block * ib,struct lto_file_decl_data * file_data)1622 input_profile_summary (class lto_input_block *ib,
1623 struct lto_file_decl_data *file_data)
1624 {
1625 unsigned int runs = streamer_read_uhwi (ib);
1626 if (runs)
1627 {
1628 file_data->profile_info.runs = runs;
1629
1630 /* IPA-profile computes hot bb threshold based on cumulated
1631 whole program profile. We need to stream it down to ltrans. */
1632 if (flag_ltrans)
1633 set_hot_bb_threshold (streamer_read_gcov_count (ib));
1634 }
1635
1636 }
1637
1638 /* Rescale profile summaries to the same number of runs in the whole unit. */
1639
1640 static void
merge_profile_summaries(struct lto_file_decl_data ** file_data_vec)1641 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1642 {
1643 struct lto_file_decl_data *file_data;
1644 unsigned int j;
1645 gcov_unsigned_t max_runs = 0;
1646 struct cgraph_node *node;
1647 struct cgraph_edge *edge;
1648
1649 /* Find unit with maximal number of runs. If we ever get serious about
1650 roundoff errors, we might also consider computing smallest common
1651 multiply. */
1652 for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1653 if (max_runs < file_data->profile_info.runs)
1654 max_runs = file_data->profile_info.runs;
1655
1656 if (!max_runs)
1657 return;
1658
1659 /* Simple overflow check. We probably don't need to support that many train
1660 runs. Such a large value probably imply data corruption anyway. */
1661 if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1662 {
1663 sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1664 INT_MAX / REG_BR_PROB_BASE);
1665 return;
1666 }
1667
1668 profile_info = XCNEW (gcov_summary);
1669 profile_info->runs = max_runs;
1670
1671 /* If merging already happent at WPA time, we are done. */
1672 if (flag_ltrans)
1673 return;
1674
1675 /* Now compute count_materialization_scale of each node.
1676 During LTRANS we already have values of count_materialization_scale
1677 computed, so just update them. */
1678 FOR_EACH_FUNCTION (node)
1679 if (node->lto_file_data
1680 && node->lto_file_data->profile_info.runs)
1681 {
1682 int scale;
1683
1684 scale = RDIV (node->count_materialization_scale * max_runs,
1685 node->lto_file_data->profile_info.runs);
1686 node->count_materialization_scale = scale;
1687 if (scale < 0)
1688 fatal_error (input_location, "Profile information in %s corrupted",
1689 file_data->file_name);
1690
1691 if (scale == REG_BR_PROB_BASE)
1692 continue;
1693 for (edge = node->callees; edge; edge = edge->next_callee)
1694 if (edge->count.ipa ().nonzero_p ())
1695 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1696 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1697 if (edge->count.ipa ().nonzero_p ())
1698 edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1699 if (node->count.ipa ().nonzero_p ())
1700 node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1701 }
1702 }
1703
1704 /* Input and merge the symtab from each of the .o files passed to
1705 lto1. */
1706
1707 void
input_symtab(void)1708 input_symtab (void)
1709 {
1710 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1711 struct lto_file_decl_data *file_data;
1712 unsigned int j = 0;
1713 struct cgraph_node *node;
1714
1715 while ((file_data = file_data_vec[j++]))
1716 {
1717 const char *data;
1718 size_t len;
1719 class lto_input_block *ib;
1720 vec<symtab_node *> nodes;
1721
1722 ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1723 &data, &len);
1724 if (!ib)
1725 fatal_error (input_location,
1726 "cannot find LTO cgraph in %s", file_data->file_name);
1727 input_profile_summary (ib, file_data);
1728 file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1729 nodes = input_cgraph_1 (file_data, ib);
1730 lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1731 ib, data, len);
1732
1733 ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1734 &data, &len);
1735 if (!ib)
1736 fatal_error (input_location, "cannot find LTO section refs in %s",
1737 file_data->file_name);
1738 input_refs (ib, nodes);
1739 lto_destroy_simple_input_block (file_data, LTO_section_refs,
1740 ib, data, len);
1741 if (flag_ltrans)
1742 input_cgraph_opt_summary (nodes);
1743 nodes.release ();
1744 }
1745
1746 merge_profile_summaries (file_data_vec);
1747
1748 /* Clear out the aux field that was used to store enough state to
1749 tell which nodes should be overwritten. */
1750 FOR_EACH_FUNCTION (node)
1751 {
1752 /* Some nodes may have been created by cgraph_node. This
1753 happens when the callgraph contains nested functions. If the
1754 node for the parent function was never emitted to the gimple
1755 file, cgraph_node will create a node for it when setting the
1756 context of the nested function. */
1757 if (node->lto_file_data)
1758 node->aux = NULL;
1759 }
1760 }
1761
1762 /* Input function/variable tables that will allow libgomp to look up offload
1763 target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS. */
1764
1765 void
input_offload_tables(bool do_force_output)1766 input_offload_tables (bool do_force_output)
1767 {
1768 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1769 struct lto_file_decl_data *file_data;
1770 unsigned int j = 0;
1771
1772 while ((file_data = file_data_vec[j++]))
1773 {
1774 const char *data;
1775 size_t len;
1776 class lto_input_block *ib
1777 = lto_create_simple_input_block (file_data, LTO_section_offload_table,
1778 &data, &len);
1779 if (!ib)
1780 continue;
1781
1782 enum LTO_symtab_tags tag
1783 = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1784 while (tag)
1785 {
1786 if (tag == LTO_symtab_unavail_node)
1787 {
1788 int decl_index = streamer_read_uhwi (ib);
1789 tree fn_decl
1790 = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1791 vec_safe_push (offload_funcs, fn_decl);
1792
1793 /* Prevent IPA from removing fn_decl as unreachable, since there
1794 may be no refs from the parent function to child_fn in offload
1795 LTO mode. */
1796 if (do_force_output)
1797 cgraph_node::get (fn_decl)->mark_force_output ();
1798 }
1799 else if (tag == LTO_symtab_variable)
1800 {
1801 int decl_index = streamer_read_uhwi (ib);
1802 tree var_decl
1803 = lto_file_decl_data_get_var_decl (file_data, decl_index);
1804 vec_safe_push (offload_vars, var_decl);
1805
1806 /* Prevent IPA from removing var_decl as unused, since there
1807 may be no refs to var_decl in offload LTO mode. */
1808 if (do_force_output)
1809 varpool_node::get (var_decl)->force_output = 1;
1810 }
1811 else
1812 fatal_error (input_location,
1813 "invalid offload table in %s", file_data->file_name);
1814
1815 tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1816 }
1817
1818 lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1819 ib, data, len);
1820 }
1821 }
1822
1823 /* True when we need optimization summary for NODE. */
1824
1825 static int
output_cgraph_opt_summary_p(struct cgraph_node * node)1826 output_cgraph_opt_summary_p (struct cgraph_node *node)
1827 {
1828 return ((node->clone_of || node->former_clone_of)
1829 && (node->clone.tree_map
1830 || node->clone.param_adjustments));
1831 }
1832
1833 /* Output optimization summary for EDGE to OB. */
1834 static void
output_edge_opt_summary(struct output_block * ob ATTRIBUTE_UNUSED,struct cgraph_edge * edge ATTRIBUTE_UNUSED)1835 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1836 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1837 {
1838 }
1839
1840 /* Output optimization summary for NODE to OB. */
1841
1842 static void
output_node_opt_summary(struct output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)1843 output_node_opt_summary (struct output_block *ob,
1844 struct cgraph_node *node,
1845 lto_symtab_encoder_t encoder)
1846 {
1847 struct ipa_replace_map *map;
1848 int i;
1849 struct cgraph_edge *e;
1850
1851 /* TODO: Should this code be moved to ipa-param-manipulation? */
1852 struct bitpack_d bp;
1853 bp = bitpack_create (ob->main_stream);
1854 bp_pack_value (&bp, (node->clone.param_adjustments != NULL), 1);
1855 streamer_write_bitpack (&bp);
1856 if (ipa_param_adjustments *adjustments = node->clone.param_adjustments)
1857 {
1858 streamer_write_uhwi (ob, vec_safe_length (adjustments->m_adj_params));
1859 ipa_adjusted_param *adj;
1860 FOR_EACH_VEC_SAFE_ELT (adjustments->m_adj_params, i, adj)
1861 {
1862 bp = bitpack_create (ob->main_stream);
1863 bp_pack_value (&bp, adj->base_index, IPA_PARAM_MAX_INDEX_BITS);
1864 bp_pack_value (&bp, adj->prev_clone_index, IPA_PARAM_MAX_INDEX_BITS);
1865 bp_pack_value (&bp, adj->op, 2);
1866 bp_pack_value (&bp, adj->param_prefix_index, 2);
1867 bp_pack_value (&bp, adj->prev_clone_adjustment, 1);
1868 bp_pack_value (&bp, adj->reverse, 1);
1869 bp_pack_value (&bp, adj->user_flag, 1);
1870 streamer_write_bitpack (&bp);
1871 if (adj->op == IPA_PARAM_OP_SPLIT
1872 || adj->op == IPA_PARAM_OP_NEW)
1873 {
1874 stream_write_tree (ob, adj->type, true);
1875 if (adj->op == IPA_PARAM_OP_SPLIT)
1876 {
1877 stream_write_tree (ob, adj->alias_ptr_type, true);
1878 streamer_write_uhwi (ob, adj->unit_offset);
1879 }
1880 }
1881 }
1882 streamer_write_hwi (ob, adjustments->m_always_copy_start);
1883 bp = bitpack_create (ob->main_stream);
1884 bp_pack_value (&bp, node->clone.param_adjustments->m_skip_return, 1);
1885 streamer_write_bitpack (&bp);
1886 }
1887
1888 streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1889 FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1890 {
1891 streamer_write_uhwi (ob, map->parm_num);
1892 gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1893 stream_write_tree (ob, map->new_tree, true);
1894 }
1895
1896 if (lto_symtab_encoder_in_partition_p (encoder, node))
1897 {
1898 for (e = node->callees; e; e = e->next_callee)
1899 output_edge_opt_summary (ob, e);
1900 for (e = node->indirect_calls; e; e = e->next_callee)
1901 output_edge_opt_summary (ob, e);
1902 }
1903 }
1904
1905 /* Output optimization summaries stored in callgraph.
1906 At the moment it is the clone info structure. */
1907
1908 static void
output_cgraph_opt_summary(void)1909 output_cgraph_opt_summary (void)
1910 {
1911 int i, n_nodes;
1912 lto_symtab_encoder_t encoder;
1913 struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1914 unsigned count = 0;
1915
1916 ob->symbol = NULL;
1917 encoder = ob->decl_state->symtab_node_encoder;
1918 n_nodes = lto_symtab_encoder_size (encoder);
1919 for (i = 0; i < n_nodes; i++)
1920 {
1921 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1922 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1923 if (cnode && output_cgraph_opt_summary_p (cnode))
1924 count++;
1925 }
1926 streamer_write_uhwi (ob, count);
1927 for (i = 0; i < n_nodes; i++)
1928 {
1929 symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1930 cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1931 if (cnode && output_cgraph_opt_summary_p (cnode))
1932 {
1933 streamer_write_uhwi (ob, i);
1934 output_node_opt_summary (ob, cnode, encoder);
1935 }
1936 }
1937 produce_asm (ob, NULL);
1938 destroy_output_block (ob);
1939 }
1940
1941 /* Input optimisation summary of EDGE. */
1942
1943 static void
input_edge_opt_summary(struct cgraph_edge * edge ATTRIBUTE_UNUSED,class lto_input_block * ib_main ATTRIBUTE_UNUSED)1944 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1945 class lto_input_block *ib_main ATTRIBUTE_UNUSED)
1946 {
1947 }
1948
1949 /* Input optimisation summary of NODE. */
1950
1951 static void
input_node_opt_summary(struct cgraph_node * node,class lto_input_block * ib_main,class data_in * data_in)1952 input_node_opt_summary (struct cgraph_node *node,
1953 class lto_input_block *ib_main,
1954 class data_in *data_in)
1955 {
1956 int i;
1957 int count;
1958 struct cgraph_edge *e;
1959
1960 /* TODO: Should this code be moved to ipa-param-manipulation? */
1961 struct bitpack_d bp;
1962 bp = streamer_read_bitpack (ib_main);
1963 bool have_adjustments = bp_unpack_value (&bp, 1);
1964 if (have_adjustments)
1965 {
1966 count = streamer_read_uhwi (ib_main);
1967 vec<ipa_adjusted_param, va_gc> *new_params = NULL;
1968 for (i = 0; i < count; i++)
1969 {
1970 ipa_adjusted_param adj;
1971 memset (&adj, 0, sizeof (adj));
1972 bp = streamer_read_bitpack (ib_main);
1973 adj.base_index = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1974 adj.prev_clone_index
1975 = bp_unpack_value (&bp, IPA_PARAM_MAX_INDEX_BITS);
1976 adj.op = (enum ipa_parm_op) bp_unpack_value (&bp, 2);
1977 adj.param_prefix_index = bp_unpack_value (&bp, 2);
1978 adj.prev_clone_adjustment = bp_unpack_value (&bp, 1);
1979 adj.reverse = bp_unpack_value (&bp, 1);
1980 adj.user_flag = bp_unpack_value (&bp, 1);
1981 if (adj.op == IPA_PARAM_OP_SPLIT
1982 || adj.op == IPA_PARAM_OP_NEW)
1983 {
1984 adj.type = stream_read_tree (ib_main, data_in);
1985 if (adj.op == IPA_PARAM_OP_SPLIT)
1986 {
1987 adj.alias_ptr_type = stream_read_tree (ib_main, data_in);
1988 adj.unit_offset = streamer_read_uhwi (ib_main);
1989 }
1990 }
1991 vec_safe_push (new_params, adj);
1992 }
1993 int always_copy_start = streamer_read_hwi (ib_main);
1994 bp = streamer_read_bitpack (ib_main);
1995 bool skip_return = bp_unpack_value (&bp, 1);
1996 node->clone.param_adjustments
1997 = (new (ggc_alloc <ipa_param_adjustments> ())
1998 ipa_param_adjustments (new_params, always_copy_start, skip_return));
1999 }
2000
2001 count = streamer_read_uhwi (ib_main);
2002 for (i = 0; i < count; i++)
2003 {
2004 struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
2005
2006 vec_safe_push (node->clone.tree_map, map);
2007 map->parm_num = streamer_read_uhwi (ib_main);
2008 map->new_tree = stream_read_tree (ib_main, data_in);
2009 }
2010 for (e = node->callees; e; e = e->next_callee)
2011 input_edge_opt_summary (e, ib_main);
2012 for (e = node->indirect_calls; e; e = e->next_callee)
2013 input_edge_opt_summary (e, ib_main);
2014 }
2015
2016 /* Read section in file FILE_DATA of length LEN with data DATA. */
2017
2018 static void
input_cgraph_opt_section(struct lto_file_decl_data * file_data,const char * data,size_t len,vec<symtab_node * > nodes)2019 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
2020 const char *data, size_t len,
2021 vec<symtab_node *> nodes)
2022 {
2023 const struct lto_function_header *header =
2024 (const struct lto_function_header *) data;
2025 const int cfg_offset = sizeof (struct lto_function_header);
2026 const int main_offset = cfg_offset + header->cfg_size;
2027 const int string_offset = main_offset + header->main_size;
2028 class data_in *data_in;
2029 unsigned int i;
2030 unsigned int count;
2031
2032 lto_input_block ib_main ((const char *) data + main_offset,
2033 header->main_size, file_data->mode_table);
2034
2035 data_in =
2036 lto_data_in_create (file_data, (const char *) data + string_offset,
2037 header->string_size, vNULL);
2038 count = streamer_read_uhwi (&ib_main);
2039
2040 for (i = 0; i < count; i++)
2041 {
2042 int ref = streamer_read_uhwi (&ib_main);
2043 input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
2044 &ib_main, data_in);
2045 }
2046 lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
2047 len);
2048 lto_data_in_delete (data_in);
2049 }
2050
2051 /* Input optimization summary of cgraph. */
2052
2053 static void
input_cgraph_opt_summary(vec<symtab_node * > nodes)2054 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2055 {
2056 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2057 struct lto_file_decl_data *file_data;
2058 unsigned int j = 0;
2059
2060 while ((file_data = file_data_vec[j++]))
2061 {
2062 size_t len;
2063 const char *data
2064 = lto_get_summary_section_data (file_data, LTO_section_cgraph_opt_sum,
2065 &len);
2066 if (data)
2067 input_cgraph_opt_section (file_data, data, len, nodes);
2068 }
2069 }
2070