1 /* Write and read the cgraph to the memory mapped representation of a
2    .o file.
3 
4    Copyright (C) 2009-2019 Free Software Foundation, Inc.
5    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "predict.h"
31 #include "stringpool.h"
32 #include "tree-streamer.h"
33 #include "cgraph.h"
34 #include "tree-pass.h"
35 #include "profile.h"
36 #include "context.h"
37 #include "pass_manager.h"
38 #include "ipa-utils.h"
39 #include "omp-offload.h"
40 #include "stringpool.h"
41 #include "attribs.h"
42 
43 /* True when asm nodes has been output.  */
44 bool asm_nodes_output = false;
45 
46 static void output_cgraph_opt_summary (void);
47 static void input_cgraph_opt_summary (vec<symtab_node *>  nodes);
48 
49 /* Number of LDPR values known to GCC.  */
50 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
51 
52 /* All node orders are ofsetted by ORDER_BASE.  */
53 static int order_base;
54 
55 /* Cgraph streaming is organized as set of record whose type
56    is indicated by a tag.  */
57 enum LTO_symtab_tags
58 {
59   /* Must leave 0 for the stopper.  */
60 
61   /* Cgraph node without body available.  */
62   LTO_symtab_unavail_node = 1,
63   /* Cgraph node with function body.  */
64   LTO_symtab_analyzed_node,
65   /* Cgraph edges.  */
66   LTO_symtab_edge,
67   LTO_symtab_indirect_edge,
68   LTO_symtab_variable,
69   LTO_symtab_last_tag
70 };
71 
72 /* Create a new symtab encoder.
73    if FOR_INPUT, the encoder allocate only datastructures needed
74    to read the symtab.  */
75 
76 lto_symtab_encoder_t
lto_symtab_encoder_new(bool for_input)77 lto_symtab_encoder_new (bool for_input)
78 {
79   lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
80 
81   if (!for_input)
82     encoder->map = new hash_map<symtab_node *, size_t>;
83   encoder->nodes.create (0);
84   return encoder;
85 }
86 
87 
88 /* Delete ENCODER and its components.  */
89 
90 void
lto_symtab_encoder_delete(lto_symtab_encoder_t encoder)91 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
92 {
93    encoder->nodes.release ();
94    if (encoder->map)
95      delete encoder->map;
96    free (encoder);
97 }
98 
99 
100 /* Return the existing reference number of NODE in the symtab encoder in
101    output block OB.  Assign a new reference if this is the first time
102    NODE is encoded.  */
103 
104 int
lto_symtab_encoder_encode(lto_symtab_encoder_t encoder,symtab_node * node)105 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
106 			   symtab_node *node)
107 {
108   int ref;
109 
110   if (!encoder->map)
111     {
112       lto_encoder_entry entry = {node, false, false, false};
113 
114       ref = encoder->nodes.length ();
115       encoder->nodes.safe_push (entry);
116       return ref;
117     }
118 
119   size_t *slot = encoder->map->get (node);
120   if (!slot || !*slot)
121     {
122       lto_encoder_entry entry = {node, false, false, false};
123       ref = encoder->nodes.length ();
124       if (!slot)
125         encoder->map->put (node, ref + 1);
126       encoder->nodes.safe_push (entry);
127     }
128   else
129     ref = *slot - 1;
130 
131   return ref;
132 }
133 
134 /* Remove NODE from encoder.  */
135 
136 bool
lto_symtab_encoder_delete_node(lto_symtab_encoder_t encoder,symtab_node * node)137 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
138 			        symtab_node *node)
139 {
140   int index;
141   lto_encoder_entry last_node;
142 
143   size_t *slot = encoder->map->get (node);
144   if (slot == NULL || !*slot)
145     return false;
146 
147   index = *slot - 1;
148   gcc_checking_assert (encoder->nodes[index].node == node);
149 
150   /* Remove from vector. We do this by swapping node with the last element
151      of the vector.  */
152   last_node = encoder->nodes.pop ();
153   if (last_node.node != node)
154     {
155       gcc_assert (encoder->map->put (last_node.node, index + 1));
156 
157       /* Move the last element to the original spot of NODE.  */
158       encoder->nodes[index] = last_node;
159     }
160 
161   /* Remove element from hash table.  */
162   encoder->map->remove (node);
163   return true;
164 }
165 
166 
167 /* Return TRUE if we should encode the body of NODE (if any).  */
168 
169 bool
lto_symtab_encoder_encode_body_p(lto_symtab_encoder_t encoder,struct cgraph_node * node)170 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
171 				  struct cgraph_node *node)
172 {
173   int index = lto_symtab_encoder_lookup (encoder, node);
174   return encoder->nodes[index].body;
175 }
176 
177 /* Specify that we encode the body of NODE in this partition.  */
178 
179 static void
lto_set_symtab_encoder_encode_body(lto_symtab_encoder_t encoder,struct cgraph_node * node)180 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
181 				    struct cgraph_node *node)
182 {
183   int index = lto_symtab_encoder_encode (encoder, node);
184   gcc_checking_assert (encoder->nodes[index].node == node);
185   encoder->nodes[index].body = true;
186 }
187 
188 /* Return TRUE if we should encode initializer of NODE (if any).  */
189 
190 bool
lto_symtab_encoder_encode_initializer_p(lto_symtab_encoder_t encoder,varpool_node * node)191 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
192 					 varpool_node *node)
193 {
194   int index = lto_symtab_encoder_lookup (encoder, node);
195   if (index == LCC_NOT_FOUND)
196     return false;
197   return encoder->nodes[index].initializer;
198 }
199 
200 /* Specify that we should encode initializer of NODE (if any).  */
201 
202 static void
lto_set_symtab_encoder_encode_initializer(lto_symtab_encoder_t encoder,varpool_node * node)203 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
204 					   varpool_node *node)
205 {
206   int index = lto_symtab_encoder_lookup (encoder, node);
207   encoder->nodes[index].initializer = true;
208 }
209 
210 /* Return TRUE if NODE is in this partition.  */
211 
212 bool
lto_symtab_encoder_in_partition_p(lto_symtab_encoder_t encoder,symtab_node * node)213 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
214 				   symtab_node *node)
215 {
216   int index = lto_symtab_encoder_lookup (encoder, node);
217   if (index == LCC_NOT_FOUND)
218     return false;
219   return encoder->nodes[index].in_partition;
220 }
221 
222 /* Specify that NODE is in this partition.  */
223 
224 void
lto_set_symtab_encoder_in_partition(lto_symtab_encoder_t encoder,symtab_node * node)225 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
226 				     symtab_node *node)
227 {
228   int index = lto_symtab_encoder_encode (encoder, node);
229   encoder->nodes[index].in_partition = true;
230 }
231 
232 /* Output the cgraph EDGE to OB using ENCODER.  */
233 
234 static void
lto_output_edge(struct lto_simple_output_block * ob,struct cgraph_edge * edge,lto_symtab_encoder_t encoder)235 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
236 		 lto_symtab_encoder_t encoder)
237 {
238   unsigned int uid;
239   intptr_t ref;
240   struct bitpack_d bp;
241 
242   if (edge->indirect_unknown_callee)
243     streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
244 			 LTO_symtab_indirect_edge);
245   else
246     streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
247 			 LTO_symtab_edge);
248 
249   ref = lto_symtab_encoder_lookup (encoder, edge->caller);
250   gcc_assert (ref != LCC_NOT_FOUND);
251   streamer_write_hwi_stream (ob->main_stream, ref);
252 
253   if (!edge->indirect_unknown_callee)
254     {
255       ref = lto_symtab_encoder_lookup (encoder, edge->callee);
256       gcc_assert (ref != LCC_NOT_FOUND);
257       streamer_write_hwi_stream (ob->main_stream, ref);
258     }
259 
260   edge->count.stream_out (ob->main_stream);
261 
262   bp = bitpack_create (ob->main_stream);
263   uid = (!gimple_has_body_p (edge->caller->decl) || edge->caller->thunk.thunk_p
264 	 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
265   bp_pack_enum (&bp, cgraph_inline_failed_t,
266 	        CIF_N_REASONS, edge->inline_failed);
267   bp_pack_var_len_unsigned (&bp, uid);
268   bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
269   bp_pack_value (&bp, edge->speculative, 1);
270   bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
271   gcc_assert (!edge->call_stmt_cannot_inline_p
272 	      || edge->inline_failed != CIF_BODY_NOT_AVAILABLE);
273   bp_pack_value (&bp, edge->can_throw_external, 1);
274   bp_pack_value (&bp, edge->in_polymorphic_cdtor, 1);
275   if (edge->indirect_unknown_callee)
276     {
277       int flags = edge->indirect_info->ecf_flags;
278       bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
279       bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
280       bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
281       bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
282       bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
283       bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
284       /* Flags that should not appear on indirect calls.  */
285       gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
286 			     | ECF_MAY_BE_ALLOCA
287 			     | ECF_SIBCALL
288 			     | ECF_LEAF
289 			     | ECF_NOVOPS)));
290     }
291   streamer_write_bitpack (&bp);
292   if (edge->indirect_unknown_callee)
293     {
294       streamer_write_hwi_stream (ob->main_stream,
295 			         edge->indirect_info->common_target_id);
296       if (edge->indirect_info->common_target_id)
297 	streamer_write_hwi_stream
298 	   (ob->main_stream, edge->indirect_info->common_target_probability);
299     }
300 }
301 
302 /* Return if NODE contain references from other partitions.  */
303 
304 bool
referenced_from_other_partition_p(symtab_node * node,lto_symtab_encoder_t encoder)305 referenced_from_other_partition_p (symtab_node *node, lto_symtab_encoder_t encoder)
306 {
307   int i;
308   struct ipa_ref *ref = NULL;
309 
310   for (i = 0; node->iterate_referring (i, ref); i++)
311     {
312       /* Ignore references from non-offloadable nodes while streaming NODE into
313 	 offload LTO section.  */
314       if (!ref->referring->need_lto_streaming)
315 	continue;
316 
317       if (ref->referring->in_other_partition
318           || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
319 	return true;
320     }
321   return false;
322 }
323 
324 /* Return true when node is reachable from other partition.  */
325 
326 bool
reachable_from_other_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)327 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
328 {
329   struct cgraph_edge *e;
330   if (!node->definition)
331     return false;
332   if (node->global.inlined_to)
333     return false;
334   for (e = node->callers; e; e = e->next_caller)
335     {
336       /* Ignore references from non-offloadable nodes while streaming NODE into
337 	 offload LTO section.  */
338       if (!e->caller->need_lto_streaming)
339 	continue;
340 
341       if (e->caller->in_other_partition
342 	  || !lto_symtab_encoder_in_partition_p (encoder, e->caller))
343 	return true;
344     }
345   return false;
346 }
347 
348 /* Return if NODE contain references from other partitions.  */
349 
350 bool
referenced_from_this_partition_p(symtab_node * node,lto_symtab_encoder_t encoder)351 referenced_from_this_partition_p (symtab_node *node,
352 				  lto_symtab_encoder_t encoder)
353 {
354   int i;
355   struct ipa_ref *ref = NULL;
356 
357   for (i = 0; node->iterate_referring (i, ref); i++)
358     if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
359       return true;
360   return false;
361 }
362 
363 /* Return true when node is reachable from other partition.  */
364 
365 bool
reachable_from_this_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)366 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
367 {
368   struct cgraph_edge *e;
369   for (e = node->callers; e; e = e->next_caller)
370     if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
371       return true;
372   return false;
373 }
374 
375 /* Output the cgraph NODE to OB.  ENCODER is used to find the
376    reference number of NODE->inlined_to.  SET is the set of nodes we
377    are writing to the current file.  If NODE is not in SET, then NODE
378    is a boundary of a cgraph_node_set and we pretend NODE just has a
379    decl and no callees.  WRITTEN_DECLS is the set of FUNCTION_DECLs
380    that have had their callgraph node written so far.  This is used to
381    determine if NODE is a clone of a previously written node.  */
382 
383 static void
lto_output_node(struct lto_simple_output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)384 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
385 		 lto_symtab_encoder_t encoder)
386 {
387   unsigned int tag;
388   struct bitpack_d bp;
389   bool boundary_p;
390   intptr_t ref;
391   bool in_other_partition = false;
392   struct cgraph_node *clone_of, *ultimate_clone_of;
393   ipa_opt_pass_d *pass;
394   int i;
395   const char *comdat;
396   const char *section;
397   tree group;
398 
399   boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
400 
401   if (node->analyzed && (!boundary_p || node->alias
402 			 || (node->thunk.thunk_p && !node->global.inlined_to)))
403     tag = LTO_symtab_analyzed_node;
404   else
405     tag = LTO_symtab_unavail_node;
406 
407   streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
408 		       tag);
409   streamer_write_hwi_stream (ob->main_stream, node->order);
410 
411   /* In WPA mode, we only output part of the call-graph.  Also, we
412      fake cgraph node attributes.  There are two cases that we care.
413 
414      Boundary nodes: There are nodes that are not part of SET but are
415      called from within SET.  We artificially make them look like
416      externally visible nodes with no function body.
417 
418      Cherry-picked nodes:  These are nodes we pulled from other
419      translation units into SET during IPA-inlining.  We make them as
420      local static nodes to prevent clashes with other local statics.  */
421   if (boundary_p && node->analyzed
422       && node->get_partitioning_class () == SYMBOL_PARTITION)
423     {
424       /* Inline clones cannot be part of boundary.
425          gcc_assert (!node->global.inlined_to);
426 
427 	 FIXME: At the moment they can be, when partition contains an inline
428 	 clone that is clone of inline clone from outside partition.  We can
429 	 reshape the clone tree and make other tree to be the root, but it
430 	 needs a bit extra work and will be promplty done by cgraph_remove_node
431 	 after reading back.  */
432       in_other_partition = 1;
433     }
434 
435   clone_of = node->clone_of;
436   while (clone_of
437 	 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
438     if (clone_of->prev_sibling_clone)
439       clone_of = clone_of->prev_sibling_clone;
440     else
441       clone_of = clone_of->clone_of;
442 
443   /* See if body of the master function is output.  If not, we are seeing only
444      an declaration and we do not need to pass down clone tree. */
445   ultimate_clone_of = clone_of;
446   while (ultimate_clone_of && ultimate_clone_of->clone_of)
447     ultimate_clone_of = ultimate_clone_of->clone_of;
448 
449   if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
450     clone_of = NULL;
451 
452   if (tag == LTO_symtab_analyzed_node)
453     gcc_assert (clone_of || !node->clone_of);
454   if (!clone_of)
455     streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
456   else
457     streamer_write_hwi_stream (ob->main_stream, ref);
458 
459 
460   lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
461   node->count.stream_out (ob->main_stream);
462   streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
463 
464   streamer_write_hwi_stream (ob->main_stream,
465 			     node->ipa_transforms_to_apply.length ());
466   FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
467     streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
468 
469   if (tag == LTO_symtab_analyzed_node)
470     {
471       if (node->global.inlined_to)
472 	{
473 	  ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
474 	  gcc_assert (ref != LCC_NOT_FOUND);
475 	}
476       else
477 	ref = LCC_NOT_FOUND;
478 
479       streamer_write_hwi_stream (ob->main_stream, ref);
480     }
481 
482   group = node->get_comdat_group ();
483   if (group)
484     comdat = IDENTIFIER_POINTER (group);
485   else
486     comdat = "";
487   streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
488 
489   if (group)
490     {
491       if (node->same_comdat_group)
492 	{
493 	  ref = LCC_NOT_FOUND;
494 	  for (struct symtab_node *n = node->same_comdat_group;
495 	       ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
496 	    ref = lto_symtab_encoder_lookup (encoder, n);
497 	}
498       else
499 	ref = LCC_NOT_FOUND;
500       streamer_write_hwi_stream (ob->main_stream, ref);
501     }
502 
503   section = node->get_section ();
504   if (!section)
505     section = "";
506 
507   streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
508 
509   bp = bitpack_create (ob->main_stream);
510   bp_pack_value (&bp, node->local.local, 1);
511   bp_pack_value (&bp, node->externally_visible, 1);
512   bp_pack_value (&bp, node->no_reorder, 1);
513   bp_pack_value (&bp, node->definition, 1);
514   bp_pack_value (&bp, node->local.versionable, 1);
515   bp_pack_value (&bp, node->local.can_change_signature, 1);
516   bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
517   bp_pack_value (&bp, node->force_output, 1);
518   bp_pack_value (&bp, node->forced_by_abi, 1);
519   bp_pack_value (&bp, node->unique_name, 1);
520   bp_pack_value (&bp, node->body_removed, 1);
521   bp_pack_value (&bp, node->implicit_section, 1);
522   bp_pack_value (&bp, node->address_taken, 1);
523   bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
524 		 && node->get_partitioning_class () == SYMBOL_PARTITION
525 		 && (reachable_from_other_partition_p (node, encoder)
526 		     || referenced_from_other_partition_p (node, encoder)), 1);
527   bp_pack_value (&bp, node->lowered, 1);
528   bp_pack_value (&bp, in_other_partition, 1);
529   bp_pack_value (&bp, node->alias, 1);
530   bp_pack_value (&bp, node->transparent_alias, 1);
531   bp_pack_value (&bp, node->weakref, 1);
532   bp_pack_value (&bp, node->frequency, 2);
533   bp_pack_value (&bp, node->only_called_at_startup, 1);
534   bp_pack_value (&bp, node->only_called_at_exit, 1);
535   bp_pack_value (&bp, node->tm_clone, 1);
536   bp_pack_value (&bp, node->calls_comdat_local, 1);
537   bp_pack_value (&bp, node->icf_merged, 1);
538   bp_pack_value (&bp, node->nonfreeing_fn, 1);
539   bp_pack_value (&bp, node->thunk.thunk_p, 1);
540   bp_pack_value (&bp, node->parallelized_function, 1);
541   bp_pack_enum (&bp, ld_plugin_symbol_resolution,
542 	        LDPR_NUM_KNOWN,
543 		/* When doing incremental link, we will get new resolution
544 		   info next time we process the file.  */
545 		flag_incremental_link ? LDPR_UNKNOWN : node->resolution);
546   bp_pack_value (&bp, node->split_part, 1);
547   streamer_write_bitpack (&bp);
548   streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
549 
550   /* Stream thunk info always because we use it in
551      ipa_polymorphic_call_context::ipa_polymorphic_call_context
552      to properly interpret THIS pointers for thunks that has been converted
553      to Gimple.  */
554   if (node->definition)
555     {
556       streamer_write_uhwi_stream
557 	 (ob->main_stream,
558 	  1 + (node->thunk.this_adjusting != 0) * 2
559 	  + (node->thunk.virtual_offset_p != 0) * 4);
560       streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
561       streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
562       streamer_write_uhwi_stream (ob->main_stream, node->thunk.indirect_offset);
563     }
564   streamer_write_hwi_stream (ob->main_stream, node->profile_id);
565   if (DECL_STATIC_CONSTRUCTOR (node->decl))
566     streamer_write_hwi_stream (ob->main_stream, node->get_init_priority ());
567   if (DECL_STATIC_DESTRUCTOR (node->decl))
568     streamer_write_hwi_stream (ob->main_stream, node->get_fini_priority ());
569 }
570 
571 /* Output the varpool NODE to OB.
572    If NODE is not in SET, then NODE is a boundary.  */
573 
574 static void
lto_output_varpool_node(struct lto_simple_output_block * ob,varpool_node * node,lto_symtab_encoder_t encoder)575 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
576 			 lto_symtab_encoder_t encoder)
577 {
578   bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
579   bool encode_initializer_p
580 	 = (node->definition
581 	    && lto_symtab_encoder_encode_initializer_p (encoder, node));
582   struct bitpack_d bp;
583   int ref;
584   const char *comdat;
585   const char *section;
586   tree group;
587 
588   gcc_assert (!encode_initializer_p || node->definition);
589   gcc_assert (boundary_p || encode_initializer_p);
590 
591   streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
592 		       LTO_symtab_variable);
593   streamer_write_hwi_stream (ob->main_stream, node->order);
594   lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
595   bp = bitpack_create (ob->main_stream);
596   bp_pack_value (&bp, node->externally_visible, 1);
597   bp_pack_value (&bp, node->no_reorder, 1);
598   bp_pack_value (&bp, node->force_output, 1);
599   bp_pack_value (&bp, node->forced_by_abi, 1);
600   bp_pack_value (&bp, node->unique_name, 1);
601   bp_pack_value (&bp,
602 		 node->body_removed
603 		 || (!encode_initializer_p && !node->alias && node->definition),
604 		 1);
605   bp_pack_value (&bp, node->implicit_section, 1);
606   bp_pack_value (&bp, node->writeonly, 1);
607   bp_pack_value (&bp, node->definition && (encode_initializer_p || node->alias),
608 		 1);
609   bp_pack_value (&bp, node->alias, 1);
610   bp_pack_value (&bp, node->transparent_alias, 1);
611   bp_pack_value (&bp, node->weakref, 1);
612   bp_pack_value (&bp, node->analyzed && (!boundary_p || node->alias), 1);
613   gcc_assert (node->definition || !node->analyzed);
614   /* Constant pool initializers can be de-unified into individual ltrans units.
615      FIXME: Alternatively at -Os we may want to avoid generating for them the local
616      labels and share them across LTRANS partitions.  */
617   if (node->get_partitioning_class () != SYMBOL_PARTITION)
618     {
619       bp_pack_value (&bp, 0, 1);  /* used_from_other_parition.  */
620       bp_pack_value (&bp, 0, 1);  /* in_other_partition.  */
621     }
622   else
623     {
624       bp_pack_value (&bp, node->definition
625 		     && referenced_from_other_partition_p (node, encoder), 1);
626       bp_pack_value (&bp, node->analyzed
627 		     && boundary_p && !DECL_EXTERNAL (node->decl), 1);
628 	  /* in_other_partition.  */
629     }
630   bp_pack_value (&bp, node->tls_model, 3);
631   bp_pack_value (&bp, node->used_by_single_function, 1);
632   bp_pack_value (&bp, node->dynamically_initialized, 1);
633   streamer_write_bitpack (&bp);
634 
635   group = node->get_comdat_group ();
636   if (group)
637     comdat = IDENTIFIER_POINTER (group);
638   else
639     comdat = "";
640   streamer_write_data_stream (ob->main_stream, comdat, strlen (comdat) + 1);
641 
642   if (group)
643     {
644       if (node->same_comdat_group)
645 	{
646 	  ref = LCC_NOT_FOUND;
647 	  for (struct symtab_node *n = node->same_comdat_group;
648 	       ref == LCC_NOT_FOUND && n != node; n = n->same_comdat_group)
649 	    ref = lto_symtab_encoder_lookup (encoder, n);
650 	}
651       else
652 	ref = LCC_NOT_FOUND;
653       streamer_write_hwi_stream (ob->main_stream, ref);
654     }
655 
656   section = node->get_section ();
657   if (!section)
658     section = "";
659   streamer_write_data_stream (ob->main_stream, section, strlen (section) + 1);
660 
661   streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
662 		       LDPR_NUM_KNOWN, node->resolution);
663 }
664 
665 /* Output the varpool NODE to OB.
666    If NODE is not in SET, then NODE is a boundary.  */
667 
668 static void
lto_output_ref(struct lto_simple_output_block * ob,struct ipa_ref * ref,lto_symtab_encoder_t encoder)669 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
670 		lto_symtab_encoder_t encoder)
671 {
672   struct bitpack_d bp;
673   int nref;
674   int uid = ref->lto_stmt_uid;
675   struct cgraph_node *node;
676 
677   bp = bitpack_create (ob->main_stream);
678   bp_pack_value (&bp, ref->use, 3);
679   bp_pack_value (&bp, ref->speculative, 1);
680   streamer_write_bitpack (&bp);
681   nref = lto_symtab_encoder_lookup (encoder, ref->referred);
682   gcc_assert (nref != LCC_NOT_FOUND);
683   streamer_write_hwi_stream (ob->main_stream, nref);
684 
685   node = dyn_cast <cgraph_node *> (ref->referring);
686   if (node)
687     {
688       if (ref->stmt)
689 	uid = gimple_uid (ref->stmt) + 1;
690       streamer_write_hwi_stream (ob->main_stream, uid);
691     }
692 }
693 
694 /* Stream out profile_summary to OB.  */
695 
696 static void
output_profile_summary(struct lto_simple_output_block * ob)697 output_profile_summary (struct lto_simple_output_block *ob)
698 {
699   if (profile_info)
700     {
701       /* We do not output num and run_max, they are not used by
702          GCC profile feedback and they are difficult to merge from multiple
703          units.  */
704       unsigned runs = (profile_info->runs);
705       streamer_write_uhwi_stream (ob->main_stream, runs);
706 
707       /* IPA-profile computes hot bb threshold based on cumulated
708 	 whole program profile.  We need to stream it down to ltrans.  */
709        if (flag_wpa)
710          streamer_write_gcov_count_stream (ob->main_stream,
711 					   get_hot_bb_threshold ());
712     }
713   else
714     streamer_write_uhwi_stream (ob->main_stream, 0);
715 }
716 
717 /* Output all callees or indirect outgoing edges.  EDGE must be the first such
718    edge.  */
719 
720 static void
output_outgoing_cgraph_edges(struct cgraph_edge * edge,struct lto_simple_output_block * ob,lto_symtab_encoder_t encoder)721 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
722 			      struct lto_simple_output_block *ob,
723 			      lto_symtab_encoder_t encoder)
724 {
725   if (!edge)
726     return;
727 
728   /* Output edges in backward direction, so the reconstructed callgraph match
729      and it is easy to associate call sites in the IPA pass summaries.  */
730   while (edge->next_callee)
731     edge = edge->next_callee;
732   for (; edge; edge = edge->prev_callee)
733     lto_output_edge (ob, edge, encoder);
734 }
735 
736 /* Output the part of the cgraph in SET.  */
737 
738 static void
output_refs(lto_symtab_encoder_t encoder)739 output_refs (lto_symtab_encoder_t encoder)
740 {
741   struct lto_simple_output_block *ob;
742   int count;
743   struct ipa_ref *ref;
744 
745   ob = lto_create_simple_output_block (LTO_section_refs);
746 
747   for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
748     {
749       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
750 
751       /* IPA_REF_ALIAS references are always preserved
752 	 in the boundary.  Alias node can't have other references and
753 	 can be always handled as if it's not in the boundary.  */
754       if (!node->alias && !lto_symtab_encoder_in_partition_p (encoder, node))
755 	continue;
756 
757       count = node->ref_list.nreferences ();
758       if (count)
759 	{
760 	  streamer_write_gcov_count_stream (ob->main_stream, count);
761 	  streamer_write_uhwi_stream (ob->main_stream,
762 				     lto_symtab_encoder_lookup (encoder, node));
763 	  for (int i = 0; node->iterate_reference (i, ref); i++)
764 	    lto_output_ref (ob, ref, encoder);
765 	}
766     }
767 
768   streamer_write_uhwi_stream (ob->main_stream, 0);
769 
770   lto_destroy_simple_output_block (ob);
771 }
772 
773 /* Add NODE into encoder as well as nodes it is cloned from.
774    Do it in a way so clones appear first.  */
775 
776 static void
add_node_to(lto_symtab_encoder_t encoder,struct cgraph_node * node,bool include_body)777 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
778 	     bool include_body)
779 {
780   if (node->clone_of)
781     add_node_to (encoder, node->clone_of, include_body);
782   else if (include_body)
783     lto_set_symtab_encoder_encode_body (encoder, node);
784   lto_symtab_encoder_encode (encoder, node);
785 }
786 
787 /* Add all references in NODE to encoders.  */
788 
789 static void
create_references(lto_symtab_encoder_t encoder,symtab_node * node)790 create_references (lto_symtab_encoder_t encoder, symtab_node *node)
791 {
792   int i;
793   struct ipa_ref *ref = NULL;
794   for (i = 0; node->iterate_reference (i, ref); i++)
795     if (is_a <cgraph_node *> (ref->referred))
796       add_node_to (encoder, dyn_cast <cgraph_node *> (ref->referred), false);
797     else
798       lto_symtab_encoder_encode (encoder, ref->referred);
799 }
800 
801 /* Select what needs to be streamed out.  In regular lto mode stream everything.
802    In offload lto mode stream only nodes marked as offloadable.  */
803 void
select_what_to_stream(void)804 select_what_to_stream (void)
805 {
806   struct symtab_node *snode;
807   FOR_EACH_SYMBOL (snode)
808     snode->need_lto_streaming = !lto_stream_offload_p || snode->offloadable;
809 }
810 
811 /* Find all symbols we want to stream into given partition and insert them
812    to encoders.
813 
814    The function actually replaces IN_ENCODER by new one.  The reason is that
815    streaming code needs clone's origin to be streamed before clone.  This
816    means that we need to insert the nodes in specific order.  This order is
817    ignored by the partitioning logic earlier.  */
818 
819 lto_symtab_encoder_t
compute_ltrans_boundary(lto_symtab_encoder_t in_encoder)820 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
821 {
822   struct cgraph_edge *edge;
823   int i;
824   lto_symtab_encoder_t encoder;
825   lto_symtab_encoder_iterator lsei;
826   hash_set<void *> reachable_call_targets;
827 
828   encoder = lto_symtab_encoder_new (false);
829 
830   /* Go over all entries in the IN_ENCODER and duplicate them to
831      ENCODER. At the same time insert masters of clones so
832      every master appears before clone.  */
833   for (lsei = lsei_start_function_in_partition (in_encoder);
834        !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
835     {
836       struct cgraph_node *node = lsei_cgraph_node (lsei);
837       if (!node->need_lto_streaming)
838 	continue;
839       add_node_to (encoder, node, true);
840       lto_set_symtab_encoder_in_partition (encoder, node);
841       create_references (encoder, node);
842     }
843   for (lsei = lsei_start_variable_in_partition (in_encoder);
844        !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
845     {
846       varpool_node *vnode = lsei_varpool_node (lsei);
847 
848       if (!vnode->need_lto_streaming)
849 	continue;
850       lto_set_symtab_encoder_in_partition (encoder, vnode);
851       lto_set_symtab_encoder_encode_initializer (encoder, vnode);
852       create_references (encoder, vnode);
853     }
854   /* Pickle in also the initializer of all referenced readonly variables
855      to help folding.  Constant pool variables are not shared, so we must
856      pickle those too.  */
857   for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
858     {
859       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
860       if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
861 	{
862 	  if (!lto_symtab_encoder_encode_initializer_p (encoder,
863 							vnode)
864 	      && (((vnode->ctor_useable_for_folding_p ()
865 		   && (!DECL_VIRTUAL_P (vnode->decl)
866 		       || !flag_wpa
867 		       || flag_ltrans_devirtualize)))))
868 	    {
869 	      lto_set_symtab_encoder_encode_initializer (encoder, vnode);
870 	      create_references (encoder, vnode);
871 	    }
872        }
873     }
874 
875   /* Go over all the nodes again to include callees that are not in
876      SET.  */
877   for (lsei = lsei_start_function_in_partition (encoder);
878        !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
879     {
880       struct cgraph_node *node = lsei_cgraph_node (lsei);
881       for (edge = node->callees; edge; edge = edge->next_callee)
882 	{
883 	  struct cgraph_node *callee = edge->callee;
884 	  if (!lto_symtab_encoder_in_partition_p (encoder, callee))
885 	    {
886 	      /* We should have moved all the inlines.  */
887 	      gcc_assert (!callee->global.inlined_to);
888 	      add_node_to (encoder, callee, false);
889 	    }
890 	}
891       /* Add all possible targets for late devirtualization.  */
892       if (flag_ltrans_devirtualize || !flag_wpa)
893 	for (edge = node->indirect_calls; edge; edge = edge->next_callee)
894 	  if (edge->indirect_info->polymorphic)
895 	    {
896 	      unsigned int i;
897 	      void *cache_token;
898 	      bool final;
899 	      vec <cgraph_node *>targets
900 		= possible_polymorphic_call_targets
901 		    (edge, &final, &cache_token);
902 	      if (!reachable_call_targets.add (cache_token))
903 		{
904 		  for (i = 0; i < targets.length (); i++)
905 		    {
906 		      struct cgraph_node *callee = targets[i];
907 
908 		      /* Adding an external declarations into the unit serves
909 			 no purpose and just increases its boundary.  */
910 		      if (callee->definition
911 			  && !lto_symtab_encoder_in_partition_p
912 			       (encoder, callee))
913 			{
914 			  gcc_assert (!callee->global.inlined_to);
915 			  add_node_to (encoder, callee, false);
916 			}
917 		    }
918 		}
919 	    }
920     }
921   /* Be sure to also insert alias targert and thunk callees.  These needs
922      to stay to aid local calling conventions.  */
923   for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
924     {
925       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
926       cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
927 
928       if (node->alias && node->analyzed)
929 	create_references (encoder, node);
930       if (cnode
931 	  && cnode->thunk.thunk_p && !cnode->global.inlined_to)
932 	add_node_to (encoder, cnode->callees->callee, false);
933       while (node->transparent_alias && node->analyzed)
934 	{
935 	  node = node->get_alias_target ();
936 	  if (is_a <cgraph_node *> (node))
937 	    add_node_to (encoder, dyn_cast <cgraph_node *> (node),
938 			 false);
939 	  else
940 	    lto_symtab_encoder_encode (encoder, node);
941 	}
942     }
943   lto_symtab_encoder_delete (in_encoder);
944   return encoder;
945 }
946 
947 /* Output the part of the symtab in SET and VSET.  */
948 
949 void
output_symtab(void)950 output_symtab (void)
951 {
952   struct cgraph_node *node;
953   struct lto_simple_output_block *ob;
954   int i, n_nodes;
955   lto_symtab_encoder_t encoder;
956 
957   if (flag_wpa)
958     output_cgraph_opt_summary ();
959 
960   ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
961 
962   output_profile_summary (ob);
963 
964   /* An encoder for cgraph nodes should have been created by
965      ipa_write_summaries_1.  */
966   gcc_assert (ob->decl_state->symtab_node_encoder);
967   encoder = ob->decl_state->symtab_node_encoder;
968 
969   /* Write out the nodes.  We must first output a node and then its clones,
970      otherwise at a time reading back the node there would be nothing to clone
971      from.  */
972   n_nodes = lto_symtab_encoder_size (encoder);
973   for (i = 0; i < n_nodes; i++)
974     {
975       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
976       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
977         lto_output_node (ob, cnode, encoder);
978       else
979 	lto_output_varpool_node (ob, dyn_cast<varpool_node *> (node), encoder);
980     }
981 
982   /* Go over the nodes in SET again to write edges.  */
983   for (int i = 0; i < lto_symtab_encoder_size (encoder); i++)
984     {
985       node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
986       if (node
987 	  && ((node->thunk.thunk_p && !node->global.inlined_to)
988 	      || lto_symtab_encoder_in_partition_p (encoder, node)))
989 	{
990 	  output_outgoing_cgraph_edges (node->callees, ob, encoder);
991 	  output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
992 	}
993     }
994 
995   streamer_write_uhwi_stream (ob->main_stream, 0);
996 
997   lto_destroy_simple_output_block (ob);
998 
999   /* Emit toplevel asms.
1000      When doing WPA we must output every asm just once.  Since we do not partition asm
1001      nodes at all, output them to first output.  This is kind of hack, but should work
1002      well.  */
1003   if (!asm_nodes_output)
1004     {
1005       asm_nodes_output = true;
1006       lto_output_toplevel_asms ();
1007     }
1008 
1009   output_refs (encoder);
1010 }
1011 
1012 /* Return identifier encoded in IB as a plain string.  */
1013 
1014 static tree
read_identifier(struct lto_input_block * ib)1015 read_identifier (struct lto_input_block *ib)
1016 {
1017   unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1018   tree id;
1019 
1020   if (ib->data[ib->p + len])
1021     lto_section_overrun (ib);
1022   if (!len)
1023     {
1024       ib->p++;
1025       return NULL;
1026     }
1027   id = get_identifier (ib->data + ib->p);
1028   ib->p += len + 1;
1029   return id;
1030 }
1031 
1032 /* Return string encoded in IB, NULL if string is empty.  */
1033 
1034 static const char *
read_string(struct lto_input_block * ib)1035 read_string (struct lto_input_block *ib)
1036 {
1037   unsigned int len = strnlen (ib->data + ib->p, ib->len - ib->p - 1);
1038   const char *str;
1039 
1040   if (ib->data[ib->p + len])
1041     lto_section_overrun (ib);
1042   if (!len)
1043     {
1044       ib->p++;
1045       return NULL;
1046     }
1047   str = ib->data + ib->p;
1048   ib->p += len + 1;
1049   return str;
1050 }
1051 
1052 /* Output function/variable tables that will allow libgomp to look up offload
1053    target code.
1054    OFFLOAD_FUNCS is filled in expand_omp_target, OFFLOAD_VARS is filled in
1055    varpool_node::get_create.  In WHOPR (partitioned) mode during the WPA stage
1056    both OFFLOAD_FUNCS and OFFLOAD_VARS are filled by input_offload_tables.  */
1057 
1058 void
output_offload_tables(void)1059 output_offload_tables (void)
1060 {
1061   if (vec_safe_is_empty (offload_funcs) && vec_safe_is_empty (offload_vars))
1062     return;
1063 
1064   struct lto_simple_output_block *ob
1065     = lto_create_simple_output_block (LTO_section_offload_table);
1066 
1067   for (unsigned i = 0; i < vec_safe_length (offload_funcs); i++)
1068     {
1069       streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1070 			   LTO_symtab_last_tag, LTO_symtab_unavail_node);
1071       lto_output_fn_decl_index (ob->decl_state, ob->main_stream,
1072 				(*offload_funcs)[i]);
1073     }
1074 
1075   for (unsigned i = 0; i < vec_safe_length (offload_vars); i++)
1076     {
1077       streamer_write_enum (ob->main_stream, LTO_symtab_tags,
1078 			   LTO_symtab_last_tag, LTO_symtab_variable);
1079       lto_output_var_decl_index (ob->decl_state, ob->main_stream,
1080 				 (*offload_vars)[i]);
1081     }
1082 
1083   streamer_write_uhwi_stream (ob->main_stream, 0);
1084   lto_destroy_simple_output_block (ob);
1085 
1086   /* In WHOPR mode during the WPA stage the joint offload tables need to be
1087      streamed to one partition only.  That's why we free offload_funcs and
1088      offload_vars after the first call of output_offload_tables.  */
1089   if (flag_wpa)
1090     {
1091       vec_free (offload_funcs);
1092       vec_free (offload_vars);
1093     }
1094 }
1095 
1096 /* Verify the partitioning of NODE.  */
1097 
1098 static inline void
verify_node_partition(symtab_node * node)1099 verify_node_partition (symtab_node *node)
1100 {
1101   if (flag_ltrans)
1102     return;
1103 
1104 #ifdef ACCEL_COMPILER
1105   if (node->in_other_partition)
1106     {
1107       if (TREE_CODE (node->decl) == FUNCTION_DECL)
1108 	error_at (DECL_SOURCE_LOCATION (node->decl),
1109 		  "function %qs has been referenced in offloaded code but"
1110 		  " hasn%'t been marked to be included in the offloaded code",
1111 		  node->name ());
1112       else if (VAR_P (node->decl))
1113 	error_at (DECL_SOURCE_LOCATION (node->decl),
1114 		  "variable %qs has been referenced in offloaded code but"
1115 		  " hasn%'t been marked to be included in the offloaded code",
1116 		  node->name ());
1117       else
1118 	gcc_unreachable ();
1119     }
1120 #else
1121   gcc_assert (!node->in_other_partition
1122 	      && !node->used_from_other_partition);
1123 #endif
1124 }
1125 
1126 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
1127    STACK_SIZE, SELF_TIME and SELF_SIZE.  This is called either to initialize
1128    NODE or to replace the values in it, for instance because the first
1129    time we saw it, the function body was not available but now it
1130    is.  BP is a bitpack with all the bitflags for NODE read from the
1131    stream.  */
1132 
1133 static void
input_overwrite_node(struct lto_file_decl_data * file_data,struct cgraph_node * node,enum LTO_symtab_tags tag,struct bitpack_d * bp)1134 input_overwrite_node (struct lto_file_decl_data *file_data,
1135 		      struct cgraph_node *node,
1136 		      enum LTO_symtab_tags tag,
1137 		      struct bitpack_d *bp)
1138 {
1139   node->aux = (void *) tag;
1140   node->lto_file_data = file_data;
1141 
1142   node->local.local = bp_unpack_value (bp, 1);
1143   node->externally_visible = bp_unpack_value (bp, 1);
1144   node->no_reorder = bp_unpack_value (bp, 1);
1145   node->definition = bp_unpack_value (bp, 1);
1146   node->local.versionable = bp_unpack_value (bp, 1);
1147   node->local.can_change_signature = bp_unpack_value (bp, 1);
1148   node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
1149   node->force_output = bp_unpack_value (bp, 1);
1150   node->forced_by_abi = bp_unpack_value (bp, 1);
1151   node->unique_name = bp_unpack_value (bp, 1);
1152   node->body_removed = bp_unpack_value (bp, 1);
1153   node->implicit_section = bp_unpack_value (bp, 1);
1154   node->address_taken = bp_unpack_value (bp, 1);
1155   node->used_from_other_partition = bp_unpack_value (bp, 1);
1156   node->lowered = bp_unpack_value (bp, 1);
1157   node->analyzed = tag == LTO_symtab_analyzed_node;
1158   node->in_other_partition = bp_unpack_value (bp, 1);
1159   if (node->in_other_partition
1160       /* Avoid updating decl when we are seeing just inline clone.
1161 	 When inlining function that has functions already inlined into it,
1162 	 we produce clones of inline clones.
1163 
1164 	 WPA partitioning might put each clone into different unit and
1165 	 we might end up streaming inline clone from other partition
1166 	 to support clone we are interested in. */
1167       && (!node->clone_of
1168 	  || node->clone_of->decl != node->decl))
1169     {
1170       DECL_EXTERNAL (node->decl) = 1;
1171       TREE_STATIC (node->decl) = 0;
1172     }
1173   node->alias = bp_unpack_value (bp, 1);
1174   node->transparent_alias = bp_unpack_value (bp, 1);
1175   node->weakref = bp_unpack_value (bp, 1);
1176   node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
1177   node->only_called_at_startup = bp_unpack_value (bp, 1);
1178   node->only_called_at_exit = bp_unpack_value (bp, 1);
1179   node->tm_clone = bp_unpack_value (bp, 1);
1180   node->calls_comdat_local = bp_unpack_value (bp, 1);
1181   node->icf_merged = bp_unpack_value (bp, 1);
1182   node->nonfreeing_fn = bp_unpack_value (bp, 1);
1183   node->thunk.thunk_p = bp_unpack_value (bp, 1);
1184   node->parallelized_function = bp_unpack_value (bp, 1);
1185   node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1186 				     LDPR_NUM_KNOWN);
1187   node->split_part = bp_unpack_value (bp, 1);
1188   verify_node_partition (node);
1189 }
1190 
1191 /* Return string alias is alias of.  */
1192 
1193 static tree
get_alias_symbol(tree decl)1194 get_alias_symbol (tree decl)
1195 {
1196   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1197   return get_identifier (TREE_STRING_POINTER
1198 			  (TREE_VALUE (TREE_VALUE (alias))));
1199 }
1200 
1201 /* Read a node from input_block IB.  TAG is the node's tag just read.
1202    Return the node read or overwriten.  */
1203 
1204 static struct cgraph_node *
input_node(struct lto_file_decl_data * file_data,struct lto_input_block * ib,enum LTO_symtab_tags tag,vec<symtab_node * > nodes)1205 input_node (struct lto_file_decl_data *file_data,
1206 	    struct lto_input_block *ib,
1207 	    enum LTO_symtab_tags tag,
1208 	    vec<symtab_node *> nodes)
1209 {
1210   gcc::pass_manager *passes = g->get_passes ();
1211   tree fn_decl;
1212   struct cgraph_node *node;
1213   struct bitpack_d bp;
1214   unsigned decl_index;
1215   int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1216   int clone_ref;
1217   int order;
1218   int i, count;
1219   tree group;
1220   const char *section;
1221   order = streamer_read_hwi (ib) + order_base;
1222   clone_ref = streamer_read_hwi (ib);
1223 
1224   decl_index = streamer_read_uhwi (ib);
1225   fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1226 
1227   if (clone_ref != LCC_NOT_FOUND)
1228     {
1229       node = dyn_cast<cgraph_node *> (nodes[clone_ref])->create_clone (fn_decl,
1230 	profile_count::uninitialized (), false,
1231 	vNULL, false, NULL, NULL);
1232     }
1233   else
1234     {
1235       /* Declaration of functions can be already merged with a declaration
1236 	 from other input file.  We keep cgraph unmerged until after streaming
1237 	 of ipa passes is done.  Alays forcingly create a fresh node.  */
1238       node = symtab->create_empty ();
1239       node->decl = fn_decl;
1240       if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (fn_decl)))
1241 	node->ifunc_resolver = 1;
1242       node->register_symbol ();
1243     }
1244 
1245   node->order = order;
1246   if (order >= symtab->order)
1247     symtab->order = order + 1;
1248 
1249   node->count = profile_count::stream_in (ib);
1250   node->count_materialization_scale = streamer_read_hwi (ib);
1251 
1252   count = streamer_read_hwi (ib);
1253   node->ipa_transforms_to_apply = vNULL;
1254   for (i = 0; i < count; i++)
1255     {
1256       opt_pass *pass;
1257       int pid = streamer_read_hwi (ib);
1258 
1259       gcc_assert (pid < passes->passes_by_id_size);
1260       pass = passes->passes_by_id[pid];
1261       node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1262     }
1263 
1264   if (tag == LTO_symtab_analyzed_node)
1265     ref = streamer_read_hwi (ib);
1266 
1267   group = read_identifier (ib);
1268   if (group)
1269     ref2 = streamer_read_hwi (ib);
1270 
1271   /* Make sure that we have not read this node before.  Nodes that
1272      have already been read will have their tag stored in the 'aux'
1273      field.  Since built-in functions can be referenced in multiple
1274      functions, they are expected to be read more than once.  */
1275   if (node->aux && !fndecl_built_in_p (node->decl))
1276     internal_error ("bytecode stream: found multiple instances of cgraph "
1277 		    "node with uid %d", node->get_uid ());
1278 
1279   node->tp_first_run = streamer_read_uhwi (ib);
1280 
1281   bp = streamer_read_bitpack (ib);
1282 
1283   input_overwrite_node (file_data, node, tag, &bp);
1284 
1285   /* Store a reference for now, and fix up later to be a pointer.  */
1286   node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
1287 
1288   if (group)
1289     {
1290       node->set_comdat_group (group);
1291       /* Store a reference for now, and fix up later to be a pointer.  */
1292       node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1293     }
1294   else
1295     node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1296   section = read_string (ib);
1297   if (section)
1298     node->set_section_for_node (section);
1299 
1300   if (node->definition)
1301     {
1302       int type = streamer_read_uhwi (ib);
1303       HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1304       HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1305       HOST_WIDE_INT indirect_offset = streamer_read_uhwi (ib);
1306 
1307       node->thunk.fixed_offset = fixed_offset;
1308       node->thunk.virtual_value = virtual_value;
1309       node->thunk.indirect_offset = indirect_offset;
1310       node->thunk.this_adjusting = (type & 2);
1311       node->thunk.virtual_offset_p = (type & 4);
1312     }
1313   if (node->alias && !node->analyzed && node->weakref)
1314     node->alias_target = get_alias_symbol (node->decl);
1315   node->profile_id = streamer_read_hwi (ib);
1316   if (DECL_STATIC_CONSTRUCTOR (node->decl))
1317     node->set_init_priority (streamer_read_hwi (ib));
1318   if (DECL_STATIC_DESTRUCTOR (node->decl))
1319     node->set_fini_priority (streamer_read_hwi (ib));
1320 
1321   return node;
1322 }
1323 
1324 /* Read a node from input_block IB.  TAG is the node's tag just read.
1325    Return the node read or overwriten.  */
1326 
1327 static varpool_node *
input_varpool_node(struct lto_file_decl_data * file_data,struct lto_input_block * ib)1328 input_varpool_node (struct lto_file_decl_data *file_data,
1329 		    struct lto_input_block *ib)
1330 {
1331   int decl_index;
1332   tree var_decl;
1333   varpool_node *node;
1334   struct bitpack_d bp;
1335   int ref = LCC_NOT_FOUND;
1336   int order;
1337   tree group;
1338   const char *section;
1339 
1340   order = streamer_read_hwi (ib) + order_base;
1341   decl_index = streamer_read_uhwi (ib);
1342   var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1343 
1344   /* Declaration of functions can be already merged with a declaration
1345      from other input file.  We keep cgraph unmerged until after streaming
1346      of ipa passes is done.  Alays forcingly create a fresh node.  */
1347   node = varpool_node::create_empty ();
1348   node->decl = var_decl;
1349   node->register_symbol ();
1350 
1351   node->order = order;
1352   if (order >= symtab->order)
1353     symtab->order = order + 1;
1354   node->lto_file_data = file_data;
1355 
1356   bp = streamer_read_bitpack (ib);
1357   node->externally_visible = bp_unpack_value (&bp, 1);
1358   node->no_reorder = bp_unpack_value (&bp, 1);
1359   node->force_output = bp_unpack_value (&bp, 1);
1360   node->forced_by_abi = bp_unpack_value (&bp, 1);
1361   node->unique_name = bp_unpack_value (&bp, 1);
1362   node->body_removed = bp_unpack_value (&bp, 1);
1363   node->implicit_section = bp_unpack_value (&bp, 1);
1364   node->writeonly = bp_unpack_value (&bp, 1);
1365   node->definition = bp_unpack_value (&bp, 1);
1366   node->alias = bp_unpack_value (&bp, 1);
1367   node->transparent_alias = bp_unpack_value (&bp, 1);
1368   node->weakref = bp_unpack_value (&bp, 1);
1369   node->analyzed = bp_unpack_value (&bp, 1);
1370   node->used_from_other_partition = bp_unpack_value (&bp, 1);
1371   node->in_other_partition = bp_unpack_value (&bp, 1);
1372   if (node->in_other_partition)
1373     {
1374       DECL_EXTERNAL (node->decl) = 1;
1375       TREE_STATIC (node->decl) = 0;
1376     }
1377   if (node->alias && !node->analyzed && node->weakref)
1378     node->alias_target = get_alias_symbol (node->decl);
1379   node->tls_model = (enum tls_model)bp_unpack_value (&bp, 3);
1380   node->used_by_single_function = (enum tls_model)bp_unpack_value (&bp, 1);
1381   node->dynamically_initialized = bp_unpack_value (&bp, 1);
1382   group = read_identifier (ib);
1383   if (group)
1384     {
1385       node->set_comdat_group (group);
1386       ref = streamer_read_hwi (ib);
1387       /* Store a reference for now, and fix up later to be a pointer.  */
1388       node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1389     }
1390   else
1391     node->same_comdat_group = (symtab_node *) (intptr_t) LCC_NOT_FOUND;
1392   section = read_string (ib);
1393   if (section)
1394     node->set_section_for_node (section);
1395   node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1396 					        LDPR_NUM_KNOWN);
1397   verify_node_partition (node);
1398   return node;
1399 }
1400 
1401 /* Read a node from input_block IB.  TAG is the node's tag just read.
1402    Return the node read or overwriten.  */
1403 
1404 static void
input_ref(struct lto_input_block * ib,symtab_node * referring_node,vec<symtab_node * > nodes)1405 input_ref (struct lto_input_block *ib,
1406 	   symtab_node *referring_node,
1407 	   vec<symtab_node *> nodes)
1408 {
1409   symtab_node *node = NULL;
1410   struct bitpack_d bp;
1411   enum ipa_ref_use use;
1412   bool speculative;
1413   struct ipa_ref *ref;
1414 
1415   bp = streamer_read_bitpack (ib);
1416   use = (enum ipa_ref_use) bp_unpack_value (&bp, 3);
1417   speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1418   node = nodes[streamer_read_hwi (ib)];
1419   ref = referring_node->create_reference (node, use);
1420   ref->speculative = speculative;
1421   if (is_a <cgraph_node *> (referring_node))
1422     ref->lto_stmt_uid = streamer_read_hwi (ib);
1423 }
1424 
1425 /* Read an edge from IB.  NODES points to a vector of previously read nodes for
1426    decoding caller and callee of the edge to be read.  If INDIRECT is true, the
1427    edge being read is indirect (in the sense that it has
1428    indirect_unknown_callee set).  */
1429 
1430 static void
input_edge(struct lto_input_block * ib,vec<symtab_node * > nodes,bool indirect)1431 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1432 	    bool indirect)
1433 {
1434   struct cgraph_node *caller, *callee;
1435   struct cgraph_edge *edge;
1436   unsigned int stmt_id;
1437   profile_count count;
1438   cgraph_inline_failed_t inline_failed;
1439   struct bitpack_d bp;
1440   int ecf_flags = 0;
1441 
1442   caller = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1443   if (caller == NULL || caller->decl == NULL_TREE)
1444     internal_error ("bytecode stream: no caller found while reading edge");
1445 
1446   if (!indirect)
1447     {
1448       callee = dyn_cast<cgraph_node *> (nodes[streamer_read_hwi (ib)]);
1449       if (callee == NULL || callee->decl == NULL_TREE)
1450 	internal_error ("bytecode stream: no callee found while reading edge");
1451     }
1452   else
1453     callee = NULL;
1454 
1455   count = profile_count::stream_in (ib);
1456 
1457   bp = streamer_read_bitpack (ib);
1458   inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1459   stmt_id = bp_unpack_var_len_unsigned (&bp);
1460 
1461   if (indirect)
1462     edge = caller->create_indirect_edge (NULL, 0, count);
1463   else
1464     edge = caller->create_edge (callee, NULL, count);
1465 
1466   edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1467   edge->speculative = bp_unpack_value (&bp, 1);
1468   edge->lto_stmt_uid = stmt_id;
1469   edge->inline_failed = inline_failed;
1470   edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1471   edge->can_throw_external = bp_unpack_value (&bp, 1);
1472   edge->in_polymorphic_cdtor = bp_unpack_value (&bp, 1);
1473   if (indirect)
1474     {
1475       if (bp_unpack_value (&bp, 1))
1476 	ecf_flags |= ECF_CONST;
1477       if (bp_unpack_value (&bp, 1))
1478 	ecf_flags |= ECF_PURE;
1479       if (bp_unpack_value (&bp, 1))
1480 	ecf_flags |= ECF_NORETURN;
1481       if (bp_unpack_value (&bp, 1))
1482 	ecf_flags |= ECF_MALLOC;
1483       if (bp_unpack_value (&bp, 1))
1484 	ecf_flags |= ECF_NOTHROW;
1485       if (bp_unpack_value (&bp, 1))
1486 	ecf_flags |= ECF_RETURNS_TWICE;
1487       edge->indirect_info->ecf_flags = ecf_flags;
1488       edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1489       if (edge->indirect_info->common_target_id)
1490         edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1491     }
1492 }
1493 
1494 
1495 /* Read a cgraph from IB using the info in FILE_DATA.  */
1496 
1497 static vec<symtab_node *>
input_cgraph_1(struct lto_file_decl_data * file_data,struct lto_input_block * ib)1498 input_cgraph_1 (struct lto_file_decl_data *file_data,
1499 		struct lto_input_block *ib)
1500 {
1501   enum LTO_symtab_tags tag;
1502   vec<symtab_node *> nodes = vNULL;
1503   symtab_node *node;
1504   unsigned i;
1505 
1506   tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1507   order_base = symtab->order;
1508   while (tag)
1509     {
1510       if (tag == LTO_symtab_edge)
1511         input_edge (ib, nodes, false);
1512       else if (tag == LTO_symtab_indirect_edge)
1513         input_edge (ib, nodes, true);
1514       else if (tag == LTO_symtab_variable)
1515         {
1516 	  node = input_varpool_node (file_data, ib);
1517           nodes.safe_push (node);
1518 	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1519         }
1520       else
1521 	{
1522 	  node = input_node (file_data, ib, tag, nodes);
1523 	  if (node == NULL || node->decl == NULL_TREE)
1524 	    internal_error ("bytecode stream: found empty cgraph node");
1525 	  nodes.safe_push (node);
1526 	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1527 	}
1528 
1529       tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1530     }
1531 
1532   lto_input_toplevel_asms (file_data, order_base);
1533 
1534   /* AUX pointers should be all non-zero for function nodes read from the stream.  */
1535   if (flag_checking)
1536     {
1537       FOR_EACH_VEC_ELT (nodes, i, node)
1538 	gcc_assert (node->aux || !is_a <cgraph_node *> (node));
1539     }
1540   FOR_EACH_VEC_ELT (nodes, i, node)
1541     {
1542       int ref;
1543       if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
1544 	{
1545 	  ref = (int) (intptr_t) cnode->global.inlined_to;
1546 
1547 	  /* We share declaration of builtins, so we may read same node twice.  */
1548 	  if (!node->aux)
1549 	    continue;
1550 	  node->aux = NULL;
1551 
1552 	  /* Fixup inlined_to from reference to pointer.  */
1553 	  if (ref != LCC_NOT_FOUND)
1554 	    dyn_cast<cgraph_node *> (node)->global.inlined_to
1555 	      = dyn_cast<cgraph_node *> (nodes[ref]);
1556 	  else
1557 	    cnode->global.inlined_to = NULL;
1558 	}
1559 
1560       ref = (int) (intptr_t) node->same_comdat_group;
1561 
1562       /* Fixup same_comdat_group from reference to pointer.  */
1563       if (ref != LCC_NOT_FOUND)
1564 	node->same_comdat_group = nodes[ref];
1565       else
1566 	node->same_comdat_group = NULL;
1567     }
1568   FOR_EACH_VEC_ELT (nodes, i, node)
1569     node->aux = is_a <cgraph_node *> (node) ? (void *)1 : NULL;
1570   return nodes;
1571 }
1572 
1573 /* Input ipa_refs.  */
1574 
1575 static void
input_refs(struct lto_input_block * ib,vec<symtab_node * > nodes)1576 input_refs (struct lto_input_block *ib,
1577 	    vec<symtab_node *> nodes)
1578 {
1579   int count;
1580   int idx;
1581   while (true)
1582     {
1583       symtab_node *node;
1584       count = streamer_read_uhwi (ib);
1585       if (!count)
1586 	break;
1587       idx = streamer_read_uhwi (ib);
1588       node = nodes[idx];
1589       while (count)
1590 	{
1591 	  input_ref (ib, node, nodes);
1592 	  count--;
1593 	}
1594     }
1595 }
1596 
1597 /* Input profile_info from IB.  */
1598 static void
input_profile_summary(struct lto_input_block * ib,struct lto_file_decl_data * file_data)1599 input_profile_summary (struct lto_input_block *ib,
1600 		       struct lto_file_decl_data *file_data)
1601 {
1602   unsigned int runs = streamer_read_uhwi (ib);
1603   if (runs)
1604     {
1605       file_data->profile_info.runs = runs;
1606 
1607       /* IPA-profile computes hot bb threshold based on cumulated
1608 	 whole program profile.  We need to stream it down to ltrans.  */
1609       if (flag_ltrans)
1610 	set_hot_bb_threshold (streamer_read_gcov_count (ib));
1611     }
1612 
1613 }
1614 
1615 /* Rescale profile summaries to the same number of runs in the whole unit.  */
1616 
1617 static void
merge_profile_summaries(struct lto_file_decl_data ** file_data_vec)1618 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1619 {
1620   struct lto_file_decl_data *file_data;
1621   unsigned int j;
1622   gcov_unsigned_t max_runs = 0;
1623   struct cgraph_node *node;
1624   struct cgraph_edge *edge;
1625 
1626   /* Find unit with maximal number of runs.  If we ever get serious about
1627      roundoff errors, we might also consider computing smallest common
1628      multiply.  */
1629   for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1630     if (max_runs < file_data->profile_info.runs)
1631       max_runs = file_data->profile_info.runs;
1632 
1633   if (!max_runs)
1634     return;
1635 
1636   /* Simple overflow check.  We probably don't need to support that many train
1637      runs. Such a large value probably imply data corruption anyway.  */
1638   if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1639     {
1640       sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1641 	     INT_MAX / REG_BR_PROB_BASE);
1642       return;
1643     }
1644 
1645   profile_info = XCNEW (gcov_summary);
1646   profile_info->runs = max_runs;
1647 
1648   /* If merging already happent at WPA time, we are done.  */
1649   if (flag_ltrans)
1650     return;
1651 
1652   /* Now compute count_materialization_scale of each node.
1653      During LTRANS we already have values of count_materialization_scale
1654      computed, so just update them.  */
1655   FOR_EACH_FUNCTION (node)
1656     if (node->lto_file_data
1657 	&& node->lto_file_data->profile_info.runs)
1658       {
1659 	int scale;
1660 
1661 	scale = RDIV (node->count_materialization_scale * max_runs,
1662                       node->lto_file_data->profile_info.runs);
1663 	node->count_materialization_scale = scale;
1664 	if (scale < 0)
1665 	  fatal_error (input_location, "Profile information in %s corrupted",
1666 		       file_data->file_name);
1667 
1668 	if (scale == REG_BR_PROB_BASE)
1669 	  continue;
1670 	for (edge = node->callees; edge; edge = edge->next_callee)
1671 	  if (edge->count.ipa ().nonzero_p ())
1672 	    edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1673 	for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1674 	  if (edge->count.ipa ().nonzero_p ())
1675 	    edge->count = edge->count.apply_scale (scale, REG_BR_PROB_BASE);
1676 	if (node->count.ipa ().nonzero_p ())
1677 	  node->count = node->count.apply_scale (scale, REG_BR_PROB_BASE);
1678       }
1679 }
1680 
1681 /* Input and merge the symtab from each of the .o files passed to
1682    lto1.  */
1683 
1684 void
input_symtab(void)1685 input_symtab (void)
1686 {
1687   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1688   struct lto_file_decl_data *file_data;
1689   unsigned int j = 0;
1690   struct cgraph_node *node;
1691 
1692   while ((file_data = file_data_vec[j++]))
1693     {
1694       const char *data;
1695       size_t len;
1696       struct lto_input_block *ib;
1697       vec<symtab_node *> nodes;
1698 
1699       ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1700 					  &data, &len);
1701       if (!ib)
1702 	fatal_error (input_location,
1703 		     "cannot find LTO cgraph in %s", file_data->file_name);
1704       input_profile_summary (ib, file_data);
1705       file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1706       nodes = input_cgraph_1 (file_data, ib);
1707       lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1708 				      ib, data, len);
1709 
1710       ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1711 					  &data, &len);
1712       if (!ib)
1713 	fatal_error (input_location, "cannot find LTO section refs in %s",
1714 		     file_data->file_name);
1715       input_refs (ib, nodes);
1716       lto_destroy_simple_input_block (file_data, LTO_section_refs,
1717 				      ib, data, len);
1718       if (flag_ltrans)
1719 	input_cgraph_opt_summary (nodes);
1720       nodes.release ();
1721     }
1722 
1723   merge_profile_summaries (file_data_vec);
1724 
1725   /* Clear out the aux field that was used to store enough state to
1726      tell which nodes should be overwritten.  */
1727   FOR_EACH_FUNCTION (node)
1728     {
1729       /* Some nodes may have been created by cgraph_node.  This
1730 	 happens when the callgraph contains nested functions.  If the
1731 	 node for the parent function was never emitted to the gimple
1732 	 file, cgraph_node will create a node for it when setting the
1733 	 context of the nested function.  */
1734       if (node->lto_file_data)
1735 	node->aux = NULL;
1736     }
1737 }
1738 
1739 /* Input function/variable tables that will allow libgomp to look up offload
1740    target code, and store them into OFFLOAD_FUNCS and OFFLOAD_VARS.  */
1741 
1742 void
input_offload_tables(bool do_force_output)1743 input_offload_tables (bool do_force_output)
1744 {
1745   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1746   struct lto_file_decl_data *file_data;
1747   unsigned int j = 0;
1748 
1749   while ((file_data = file_data_vec[j++]))
1750     {
1751       const char *data;
1752       size_t len;
1753       struct lto_input_block *ib
1754 	= lto_create_simple_input_block (file_data, LTO_section_offload_table,
1755 					 &data, &len);
1756       if (!ib)
1757 	continue;
1758 
1759       enum LTO_symtab_tags tag
1760 	= streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1761       while (tag)
1762 	{
1763 	  if (tag == LTO_symtab_unavail_node)
1764 	    {
1765 	      int decl_index = streamer_read_uhwi (ib);
1766 	      tree fn_decl
1767 		= lto_file_decl_data_get_fn_decl (file_data, decl_index);
1768 	      vec_safe_push (offload_funcs, fn_decl);
1769 
1770 	      /* Prevent IPA from removing fn_decl as unreachable, since there
1771 		 may be no refs from the parent function to child_fn in offload
1772 		 LTO mode.  */
1773 	      if (do_force_output)
1774 		cgraph_node::get (fn_decl)->mark_force_output ();
1775 	    }
1776 	  else if (tag == LTO_symtab_variable)
1777 	    {
1778 	      int decl_index = streamer_read_uhwi (ib);
1779 	      tree var_decl
1780 		= lto_file_decl_data_get_var_decl (file_data, decl_index);
1781 	      vec_safe_push (offload_vars, var_decl);
1782 
1783 	      /* Prevent IPA from removing var_decl as unused, since there
1784 		 may be no refs to var_decl in offload LTO mode.  */
1785 	      if (do_force_output)
1786 		varpool_node::get (var_decl)->force_output = 1;
1787 	    }
1788 	  else
1789 	    fatal_error (input_location,
1790 			 "invalid offload table in %s", file_data->file_name);
1791 
1792 	  tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1793 	}
1794 
1795       lto_destroy_simple_input_block (file_data, LTO_section_offload_table,
1796 				      ib, data, len);
1797     }
1798 }
1799 
1800 /* True when we need optimization summary for NODE.  */
1801 
1802 static int
output_cgraph_opt_summary_p(struct cgraph_node * node)1803 output_cgraph_opt_summary_p (struct cgraph_node *node)
1804 {
1805   return ((node->clone_of || node->former_clone_of)
1806 	  && (node->clone.tree_map
1807 	      || node->clone.args_to_skip
1808 	      || node->clone.combined_args_to_skip));
1809 }
1810 
1811 /* Output optimization summary for EDGE to OB.  */
1812 static void
output_edge_opt_summary(struct output_block * ob ATTRIBUTE_UNUSED,struct cgraph_edge * edge ATTRIBUTE_UNUSED)1813 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1814 			 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1815 {
1816 }
1817 
1818 /* Output optimization summary for NODE to OB.  */
1819 
1820 static void
output_node_opt_summary(struct output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)1821 output_node_opt_summary (struct output_block *ob,
1822 			 struct cgraph_node *node,
1823 			 lto_symtab_encoder_t encoder)
1824 {
1825   unsigned int index;
1826   bitmap_iterator bi;
1827   struct ipa_replace_map *map;
1828   struct bitpack_d bp;
1829   int i;
1830   struct cgraph_edge *e;
1831 
1832   if (node->clone.args_to_skip)
1833     {
1834       streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1835       EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1836 	streamer_write_uhwi (ob, index);
1837     }
1838   else
1839     streamer_write_uhwi (ob, 0);
1840   if (node->clone.combined_args_to_skip)
1841     {
1842       streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1843       EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1844 	streamer_write_uhwi (ob, index);
1845     }
1846   else
1847     streamer_write_uhwi (ob, 0);
1848   streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1849   FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1850     {
1851       /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1852          mechanism to store function local declarations into summaries.  */
1853       gcc_assert (!map->old_tree);
1854       streamer_write_uhwi (ob, map->parm_num);
1855       gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1856       stream_write_tree (ob, map->new_tree, true);
1857       bp = bitpack_create (ob->main_stream);
1858       bp_pack_value (&bp, map->replace_p, 1);
1859       bp_pack_value (&bp, map->ref_p, 1);
1860       streamer_write_bitpack (&bp);
1861     }
1862 
1863   if (lto_symtab_encoder_in_partition_p (encoder, node))
1864     {
1865       for (e = node->callees; e; e = e->next_callee)
1866 	output_edge_opt_summary (ob, e);
1867       for (e = node->indirect_calls; e; e = e->next_callee)
1868 	output_edge_opt_summary (ob, e);
1869     }
1870 }
1871 
1872 /* Output optimization summaries stored in callgraph.
1873    At the moment it is the clone info structure.  */
1874 
1875 static void
output_cgraph_opt_summary(void)1876 output_cgraph_opt_summary (void)
1877 {
1878   int i, n_nodes;
1879   lto_symtab_encoder_t encoder;
1880   struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1881   unsigned count = 0;
1882 
1883   ob->symbol = NULL;
1884   encoder = ob->decl_state->symtab_node_encoder;
1885   n_nodes = lto_symtab_encoder_size (encoder);
1886   for (i = 0; i < n_nodes; i++)
1887     {
1888       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1889       cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1890       if (cnode && output_cgraph_opt_summary_p (cnode))
1891 	count++;
1892     }
1893   streamer_write_uhwi (ob, count);
1894   for (i = 0; i < n_nodes; i++)
1895     {
1896       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1897       cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
1898       if (cnode && output_cgraph_opt_summary_p (cnode))
1899 	{
1900 	  streamer_write_uhwi (ob, i);
1901 	  output_node_opt_summary (ob, cnode, encoder);
1902 	}
1903     }
1904   produce_asm (ob, NULL);
1905   destroy_output_block (ob);
1906 }
1907 
1908 /* Input optimisation summary of EDGE.  */
1909 
1910 static void
input_edge_opt_summary(struct cgraph_edge * edge ATTRIBUTE_UNUSED,struct lto_input_block * ib_main ATTRIBUTE_UNUSED)1911 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1912 			struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1913 {
1914 }
1915 
1916 /* Input optimisation summary of NODE.  */
1917 
1918 static void
input_node_opt_summary(struct cgraph_node * node,struct lto_input_block * ib_main,struct data_in * data_in)1919 input_node_opt_summary (struct cgraph_node *node,
1920 			struct lto_input_block *ib_main,
1921 			struct data_in *data_in)
1922 {
1923   int i;
1924   int count;
1925   int bit;
1926   struct bitpack_d bp;
1927   struct cgraph_edge *e;
1928 
1929   count = streamer_read_uhwi (ib_main);
1930   if (count)
1931     node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1932   for (i = 0; i < count; i++)
1933     {
1934       bit = streamer_read_uhwi (ib_main);
1935       bitmap_set_bit (node->clone.args_to_skip, bit);
1936     }
1937   count = streamer_read_uhwi (ib_main);
1938   if (count)
1939     node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1940   for (i = 0; i < count; i++)
1941     {
1942       bit = streamer_read_uhwi (ib_main);
1943       bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1944     }
1945   count = streamer_read_uhwi (ib_main);
1946   for (i = 0; i < count; i++)
1947     {
1948       struct ipa_replace_map *map = ggc_alloc<ipa_replace_map> ();
1949 
1950       vec_safe_push (node->clone.tree_map, map);
1951       map->parm_num = streamer_read_uhwi (ib_main);
1952       map->old_tree = NULL;
1953       map->new_tree = stream_read_tree (ib_main, data_in);
1954       bp = streamer_read_bitpack (ib_main);
1955       map->replace_p = bp_unpack_value (&bp, 1);
1956       map->ref_p = bp_unpack_value (&bp, 1);
1957     }
1958   for (e = node->callees; e; e = e->next_callee)
1959     input_edge_opt_summary (e, ib_main);
1960   for (e = node->indirect_calls; e; e = e->next_callee)
1961     input_edge_opt_summary (e, ib_main);
1962 }
1963 
1964 /* Read section in file FILE_DATA of length LEN with data DATA.  */
1965 
1966 static void
input_cgraph_opt_section(struct lto_file_decl_data * file_data,const char * data,size_t len,vec<symtab_node * > nodes)1967 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1968 			  const char *data, size_t len,
1969 			  vec<symtab_node *> nodes)
1970 {
1971   const struct lto_function_header *header =
1972     (const struct lto_function_header *) data;
1973   const int cfg_offset = sizeof (struct lto_function_header);
1974   const int main_offset = cfg_offset + header->cfg_size;
1975   const int string_offset = main_offset + header->main_size;
1976   struct data_in *data_in;
1977   unsigned int i;
1978   unsigned int count;
1979 
1980   lto_input_block ib_main ((const char *) data + main_offset,
1981 			   header->main_size, file_data->mode_table);
1982 
1983   data_in =
1984     lto_data_in_create (file_data, (const char *) data + string_offset,
1985 			header->string_size, vNULL);
1986   count = streamer_read_uhwi (&ib_main);
1987 
1988   for (i = 0; i < count; i++)
1989     {
1990       int ref = streamer_read_uhwi (&ib_main);
1991       input_node_opt_summary (dyn_cast<cgraph_node *> (nodes[ref]),
1992 			      &ib_main, data_in);
1993     }
1994   lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1995 			 len);
1996   lto_data_in_delete (data_in);
1997 }
1998 
1999 /* Input optimization summary of cgraph.  */
2000 
2001 static void
input_cgraph_opt_summary(vec<symtab_node * > nodes)2002 input_cgraph_opt_summary (vec<symtab_node *> nodes)
2003 {
2004   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
2005   struct lto_file_decl_data *file_data;
2006   unsigned int j = 0;
2007 
2008   while ((file_data = file_data_vec[j++]))
2009     {
2010       size_t len;
2011       const char *data =
2012 	lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
2013 			      &len);
2014 
2015       if (data)
2016 	input_cgraph_opt_section (file_data, data, len, nodes);
2017     }
2018 }
2019