1 /* Write and read the cgraph to the memory mapped representation of a
2    .o file.
3 
4    Copyright (C) 2009-2014 Free Software Foundation, Inc.
5    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stringpool.h"
29 #include "basic-block.h"
30 #include "tree-ssa-alias.h"
31 #include "internal-fn.h"
32 #include "gimple-expr.h"
33 #include "is-a.h"
34 #include "gimple.h"
35 #include "expr.h"
36 #include "flags.h"
37 #include "params.h"
38 #include "input.h"
39 #include "hashtab.h"
40 #include "langhooks.h"
41 #include "bitmap.h"
42 #include "function.h"
43 #include "diagnostic-core.h"
44 #include "except.h"
45 #include "timevar.h"
46 #include "lto-streamer.h"
47 #include "data-streamer.h"
48 #include "tree-streamer.h"
49 #include "gcov-io.h"
50 #include "tree-pass.h"
51 #include "profile.h"
52 #include "context.h"
53 #include "pass_manager.h"
54 #include "ipa-utils.h"
55 
56 /* True when asm nodes has been output.  */
57 bool asm_nodes_output = false;
58 
59 static void output_cgraph_opt_summary (void);
60 static void input_cgraph_opt_summary (vec<symtab_node *>  nodes);
61 
62 /* Number of LDPR values known to GCC.  */
63 #define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
64 
65 /* All node orders are ofsetted by ORDER_BASE.  */
66 static int order_base;
67 
68 /* Cgraph streaming is organized as set of record whose type
69    is indicated by a tag.  */
70 enum LTO_symtab_tags
71 {
72   /* Must leave 0 for the stopper.  */
73 
74   /* Cgraph node without body available.  */
75   LTO_symtab_unavail_node = 1,
76   /* Cgraph node with function body.  */
77   LTO_symtab_analyzed_node,
78   /* Cgraph edges.  */
79   LTO_symtab_edge,
80   LTO_symtab_indirect_edge,
81   LTO_symtab_variable,
82   LTO_symtab_last_tag
83 };
84 
85 /* Create a new symtab encoder.
86    if FOR_INPUT, the encoder allocate only datastructures needed
87    to read the symtab.  */
88 
89 lto_symtab_encoder_t
lto_symtab_encoder_new(bool for_input)90 lto_symtab_encoder_new (bool for_input)
91 {
92   lto_symtab_encoder_t encoder = XCNEW (struct lto_symtab_encoder_d);
93 
94   if (!for_input)
95     encoder->map = pointer_map_create ();
96   encoder->nodes.create (0);
97   return encoder;
98 }
99 
100 
101 /* Delete ENCODER and its components.  */
102 
103 void
lto_symtab_encoder_delete(lto_symtab_encoder_t encoder)104 lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
105 {
106    encoder->nodes.release ();
107    if (encoder->map)
108      pointer_map_destroy (encoder->map);
109    free (encoder);
110 }
111 
112 
113 /* Return the existing reference number of NODE in the symtab encoder in
114    output block OB.  Assign a new reference if this is the first time
115    NODE is encoded.  */
116 
117 int
lto_symtab_encoder_encode(lto_symtab_encoder_t encoder,symtab_node * node)118 lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
119 			   symtab_node *node)
120 {
121   int ref;
122   void **slot;
123 
124   if (!encoder->map)
125     {
126       lto_encoder_entry entry = {node, false, false, false};
127 
128       ref = encoder->nodes.length ();
129       encoder->nodes.safe_push (entry);
130       return ref;
131     }
132 
133   slot = pointer_map_contains (encoder->map, node);
134   if (!slot || !*slot)
135     {
136       lto_encoder_entry entry = {node, false, false, false};
137       ref = encoder->nodes.length ();
138       if (!slot)
139         slot = pointer_map_insert (encoder->map, node);
140       *slot = (void *) (intptr_t) (ref + 1);
141       encoder->nodes.safe_push (entry);
142     }
143   else
144     ref = (size_t) *slot - 1;
145 
146   return ref;
147 }
148 
149 /* Remove NODE from encoder.  */
150 
151 bool
lto_symtab_encoder_delete_node(lto_symtab_encoder_t encoder,symtab_node * node)152 lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
153 			        symtab_node *node)
154 {
155   void **slot, **last_slot;
156   int index;
157   lto_encoder_entry last_node;
158 
159   slot = pointer_map_contains (encoder->map, node);
160   if (slot == NULL || !*slot)
161     return false;
162 
163   index = (size_t) *slot - 1;
164   gcc_checking_assert (encoder->nodes[index].node == node);
165 
166   /* Remove from vector. We do this by swapping node with the last element
167      of the vector.  */
168   last_node = encoder->nodes.pop ();
169   if (last_node.node != node)
170     {
171       last_slot = pointer_map_contains (encoder->map, last_node.node);
172       gcc_checking_assert (last_slot && *last_slot);
173       *last_slot = (void *)(size_t) (index + 1);
174 
175       /* Move the last element to the original spot of NODE.  */
176       encoder->nodes[index] = last_node;
177     }
178 
179   /* Remove element from hash table.  */
180   *slot = NULL;
181   return true;
182 }
183 
184 
185 /* Return TRUE if we should encode initializer of NODE (if any).  */
186 
187 bool
lto_symtab_encoder_encode_body_p(lto_symtab_encoder_t encoder,struct cgraph_node * node)188 lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
189 				  struct cgraph_node *node)
190 {
191   int index = lto_symtab_encoder_lookup (encoder, node);
192   return encoder->nodes[index].body;
193 }
194 
195 /* Return TRUE if we should encode body of NODE (if any).  */
196 
197 static void
lto_set_symtab_encoder_encode_body(lto_symtab_encoder_t encoder,struct cgraph_node * node)198 lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
199 				    struct cgraph_node *node)
200 {
201   int index = lto_symtab_encoder_encode (encoder, node);
202   gcc_checking_assert (encoder->nodes[index].node == node);
203   encoder->nodes[index].body = true;
204 }
205 
206 /* Return TRUE if we should encode initializer of NODE (if any).  */
207 
208 bool
lto_symtab_encoder_encode_initializer_p(lto_symtab_encoder_t encoder,varpool_node * node)209 lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
210 					 varpool_node *node)
211 {
212   int index = lto_symtab_encoder_lookup (encoder, node);
213   if (index == LCC_NOT_FOUND)
214     return false;
215   return encoder->nodes[index].initializer;
216 }
217 
218 /* Return TRUE if we should encode initializer of NODE (if any).  */
219 
220 static void
lto_set_symtab_encoder_encode_initializer(lto_symtab_encoder_t encoder,varpool_node * node)221 lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
222 					   varpool_node *node)
223 {
224   int index = lto_symtab_encoder_lookup (encoder, node);
225   encoder->nodes[index].initializer = true;
226 }
227 
228 /* Return TRUE if we should encode initializer of NODE (if any).  */
229 
230 bool
lto_symtab_encoder_in_partition_p(lto_symtab_encoder_t encoder,symtab_node * node)231 lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
232 				   symtab_node *node)
233 {
234   int index = lto_symtab_encoder_lookup (encoder, node);
235   if (index == LCC_NOT_FOUND)
236     return false;
237   return encoder->nodes[index].in_partition;
238 }
239 
240 /* Return TRUE if we should encode body of NODE (if any).  */
241 
242 void
lto_set_symtab_encoder_in_partition(lto_symtab_encoder_t encoder,symtab_node * node)243 lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
244 				     symtab_node *node)
245 {
246   int index = lto_symtab_encoder_encode (encoder, node);
247   encoder->nodes[index].in_partition = true;
248 }
249 
250 /* Output the cgraph EDGE to OB using ENCODER.  */
251 
252 static void
lto_output_edge(struct lto_simple_output_block * ob,struct cgraph_edge * edge,lto_symtab_encoder_t encoder)253 lto_output_edge (struct lto_simple_output_block *ob, struct cgraph_edge *edge,
254 		 lto_symtab_encoder_t encoder)
255 {
256   unsigned int uid;
257   intptr_t ref;
258   struct bitpack_d bp;
259 
260   if (edge->indirect_unknown_callee)
261     streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
262 			 LTO_symtab_indirect_edge);
263   else
264     streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
265 			 LTO_symtab_edge);
266 
267   ref = lto_symtab_encoder_lookup (encoder, edge->caller);
268   gcc_assert (ref != LCC_NOT_FOUND);
269   streamer_write_hwi_stream (ob->main_stream, ref);
270 
271   if (!edge->indirect_unknown_callee)
272     {
273       ref = lto_symtab_encoder_lookup (encoder, edge->callee);
274       gcc_assert (ref != LCC_NOT_FOUND);
275       streamer_write_hwi_stream (ob->main_stream, ref);
276     }
277 
278   streamer_write_gcov_count_stream (ob->main_stream, edge->count);
279 
280   bp = bitpack_create (ob->main_stream);
281   uid = (!gimple_has_body_p (edge->caller->decl)
282 	 ? edge->lto_stmt_uid : gimple_uid (edge->call_stmt) + 1);
283   bp_pack_enum (&bp, cgraph_inline_failed_t,
284 	        CIF_N_REASONS, edge->inline_failed);
285   bp_pack_var_len_unsigned (&bp, uid);
286   bp_pack_var_len_unsigned (&bp, edge->frequency);
287   bp_pack_value (&bp, edge->indirect_inlining_edge, 1);
288   bp_pack_value (&bp, edge->speculative, 1);
289   bp_pack_value (&bp, edge->call_stmt_cannot_inline_p, 1);
290   bp_pack_value (&bp, edge->can_throw_external, 1);
291   if (edge->indirect_unknown_callee)
292     {
293       int flags = edge->indirect_info->ecf_flags;
294       bp_pack_value (&bp, (flags & ECF_CONST) != 0, 1);
295       bp_pack_value (&bp, (flags & ECF_PURE) != 0, 1);
296       bp_pack_value (&bp, (flags & ECF_NORETURN) != 0, 1);
297       bp_pack_value (&bp, (flags & ECF_MALLOC) != 0, 1);
298       bp_pack_value (&bp, (flags & ECF_NOTHROW) != 0, 1);
299       bp_pack_value (&bp, (flags & ECF_RETURNS_TWICE) != 0, 1);
300       /* Flags that should not appear on indirect calls.  */
301       gcc_assert (!(flags & (ECF_LOOPING_CONST_OR_PURE
302 			     | ECF_MAY_BE_ALLOCA
303 			     | ECF_SIBCALL
304 			     | ECF_LEAF
305 			     | ECF_NOVOPS)));
306     }
307   streamer_write_bitpack (&bp);
308   if (edge->indirect_unknown_callee)
309     {
310       streamer_write_hwi_stream (ob->main_stream,
311 			         edge->indirect_info->common_target_id);
312       if (edge->indirect_info->common_target_id)
313 	streamer_write_hwi_stream
314 	   (ob->main_stream, edge->indirect_info->common_target_probability);
315     }
316 }
317 
318 /* Return if LIST contain references from other partitions.  */
319 
320 bool
referenced_from_other_partition_p(struct ipa_ref_list * list,lto_symtab_encoder_t encoder)321 referenced_from_other_partition_p (struct ipa_ref_list *list, lto_symtab_encoder_t encoder)
322 {
323   int i;
324   struct ipa_ref *ref;
325   for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
326     {
327       if (ref->referring->in_other_partition
328           || !lto_symtab_encoder_in_partition_p (encoder, ref->referring))
329 	return true;
330     }
331   return false;
332 }
333 
334 /* Return true when node is reachable from other partition.  */
335 
336 bool
reachable_from_other_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)337 reachable_from_other_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
338 {
339   struct cgraph_edge *e;
340   if (!node->definition)
341     return false;
342   if (node->global.inlined_to)
343     return false;
344   for (e = node->callers; e; e = e->next_caller)
345     if (e->caller->in_other_partition
346 	|| !lto_symtab_encoder_in_partition_p (encoder, e->caller))
347       return true;
348   return false;
349 }
350 
351 /* Return if LIST contain references from other partitions.  */
352 
353 bool
referenced_from_this_partition_p(struct ipa_ref_list * list,lto_symtab_encoder_t encoder)354 referenced_from_this_partition_p (struct ipa_ref_list *list,
355 				  lto_symtab_encoder_t encoder)
356 {
357   int i;
358   struct ipa_ref *ref;
359   for (i = 0; ipa_ref_list_referring_iterate (list, i, ref); i++)
360     if (lto_symtab_encoder_in_partition_p (encoder, ref->referring))
361       return true;
362   return false;
363 }
364 
365 /* Return true when node is reachable from other partition.  */
366 
367 bool
reachable_from_this_partition_p(struct cgraph_node * node,lto_symtab_encoder_t encoder)368 reachable_from_this_partition_p (struct cgraph_node *node, lto_symtab_encoder_t encoder)
369 {
370   struct cgraph_edge *e;
371   for (e = node->callers; e; e = e->next_caller)
372     if (lto_symtab_encoder_in_partition_p (encoder, e->caller))
373       return true;
374   return false;
375 }
376 
377 /* Output the cgraph NODE to OB.  ENCODER is used to find the
378    reference number of NODE->inlined_to.  SET is the set of nodes we
379    are writing to the current file.  If NODE is not in SET, then NODE
380    is a boundary of a cgraph_node_set and we pretend NODE just has a
381    decl and no callees.  WRITTEN_DECLS is the set of FUNCTION_DECLs
382    that have had their callgraph node written so far.  This is used to
383    determine if NODE is a clone of a previously written node.  */
384 
385 static void
lto_output_node(struct lto_simple_output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)386 lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
387 		 lto_symtab_encoder_t encoder)
388 {
389   unsigned int tag;
390   struct bitpack_d bp;
391   bool boundary_p;
392   intptr_t ref;
393   bool in_other_partition = false;
394   struct cgraph_node *clone_of, *ultimate_clone_of;
395   ipa_opt_pass_d *pass;
396   int i;
397   bool alias_p;
398 
399   boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
400 
401   if (node->analyzed && !boundary_p)
402     tag = LTO_symtab_analyzed_node;
403   else
404     tag = LTO_symtab_unavail_node;
405 
406   streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
407 		       tag);
408   streamer_write_hwi_stream (ob->main_stream, node->order);
409 
410   /* In WPA mode, we only output part of the call-graph.  Also, we
411      fake cgraph node attributes.  There are two cases that we care.
412 
413      Boundary nodes: There are nodes that are not part of SET but are
414      called from within SET.  We artificially make them look like
415      externally visible nodes with no function body.
416 
417      Cherry-picked nodes:  These are nodes we pulled from other
418      translation units into SET during IPA-inlining.  We make them as
419      local static nodes to prevent clashes with other local statics.  */
420   if (boundary_p && node->analyzed
421       && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION)
422     {
423       /* Inline clones can not be part of boundary.
424          gcc_assert (!node->global.inlined_to);
425 
426 	 FIXME: At the moment they can be, when partition contains an inline
427 	 clone that is clone of inline clone from outside partition.  We can
428 	 reshape the clone tree and make other tree to be the root, but it
429 	 needs a bit extra work and will be promplty done by cgraph_remove_node
430 	 after reading back.  */
431       in_other_partition = 1;
432     }
433 
434   clone_of = node->clone_of;
435   while (clone_of
436 	 && (ref = lto_symtab_encoder_lookup (encoder, clone_of)) == LCC_NOT_FOUND)
437     if (clone_of->prev_sibling_clone)
438       clone_of = clone_of->prev_sibling_clone;
439     else
440       clone_of = clone_of->clone_of;
441 
442   /* See if body of the master function is output.  If not, we are seeing only
443      an declaration and we do not need to pass down clone tree. */
444   ultimate_clone_of = clone_of;
445   while (ultimate_clone_of && ultimate_clone_of->clone_of)
446     ultimate_clone_of = ultimate_clone_of->clone_of;
447 
448   if (clone_of && !lto_symtab_encoder_encode_body_p (encoder, ultimate_clone_of))
449     clone_of = NULL;
450 
451   if (tag == LTO_symtab_analyzed_node)
452     gcc_assert (clone_of || !node->clone_of);
453   if (!clone_of)
454     streamer_write_hwi_stream (ob->main_stream, LCC_NOT_FOUND);
455   else
456     streamer_write_hwi_stream (ob->main_stream, ref);
457 
458 
459   lto_output_fn_decl_index (ob->decl_state, ob->main_stream, node->decl);
460   streamer_write_gcov_count_stream (ob->main_stream, node->count);
461   streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
462 
463   streamer_write_hwi_stream (ob->main_stream,
464 			     node->ipa_transforms_to_apply.length ());
465   FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
466     streamer_write_hwi_stream (ob->main_stream, pass->static_pass_number);
467 
468   if (tag == LTO_symtab_analyzed_node)
469     {
470       if (node->global.inlined_to)
471 	{
472 	  ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
473 	  gcc_assert (ref != LCC_NOT_FOUND);
474 	}
475       else
476 	ref = LCC_NOT_FOUND;
477 
478       streamer_write_hwi_stream (ob->main_stream, ref);
479     }
480 
481   if (node->same_comdat_group && !boundary_p)
482     {
483       ref = lto_symtab_encoder_lookup (encoder,
484 				       node->same_comdat_group);
485       gcc_assert (ref != LCC_NOT_FOUND);
486     }
487   else
488     ref = LCC_NOT_FOUND;
489   streamer_write_hwi_stream (ob->main_stream, ref);
490 
491   streamer_write_hwi_stream (ob->main_stream, node->tp_first_run);
492 
493   bp = bitpack_create (ob->main_stream);
494   bp_pack_value (&bp, node->local.local, 1);
495   bp_pack_value (&bp, node->externally_visible, 1);
496   bp_pack_value (&bp, node->definition, 1);
497   bp_pack_value (&bp, node->local.versionable, 1);
498   bp_pack_value (&bp, node->local.can_change_signature, 1);
499   bp_pack_value (&bp, node->local.redefined_extern_inline, 1);
500   bp_pack_value (&bp, node->force_output, 1);
501   bp_pack_value (&bp, node->forced_by_abi, 1);
502   bp_pack_value (&bp, node->unique_name, 1);
503   bp_pack_value (&bp, node->body_removed, 1);
504   bp_pack_value (&bp, node->address_taken, 1);
505   bp_pack_value (&bp, tag == LTO_symtab_analyzed_node
506 		 && symtab_get_symbol_partitioning_class (node) == SYMBOL_PARTITION
507 		 && (reachable_from_other_partition_p (node, encoder)
508 		     || referenced_from_other_partition_p (&node->ref_list,
509 							   encoder)), 1);
510   bp_pack_value (&bp, node->lowered, 1);
511   bp_pack_value (&bp, in_other_partition, 1);
512   /* Real aliases in a boundary become non-aliases. However we still stream
513      alias info on weakrefs.
514      TODO: We lose a bit of information here - when we know that variable is
515      defined in other unit, we may use the info on aliases to resolve
516      symbol1 != symbol2 type tests that we can do only for locally defined objects
517      otherwise.  */
518   alias_p = node->alias && (!boundary_p || node->weakref);
519   bp_pack_value (&bp, alias_p, 1);
520   bp_pack_value (&bp, node->weakref, 1);
521   bp_pack_value (&bp, node->frequency, 2);
522   bp_pack_value (&bp, node->only_called_at_startup, 1);
523   bp_pack_value (&bp, node->only_called_at_exit, 1);
524   bp_pack_value (&bp, node->tm_clone, 1);
525   bp_pack_value (&bp, node->calls_comdat_local, 1);
526   bp_pack_value (&bp, node->thunk.thunk_p && !boundary_p, 1);
527   bp_pack_enum (&bp, ld_plugin_symbol_resolution,
528 	        LDPR_NUM_KNOWN, node->resolution);
529   streamer_write_bitpack (&bp);
530 
531   if (node->thunk.thunk_p && !boundary_p)
532     {
533       streamer_write_uhwi_stream
534 	 (ob->main_stream,
535 	  1 + (node->thunk.this_adjusting != 0) * 2
536 	  + (node->thunk.virtual_offset_p != 0) * 4);
537       streamer_write_uhwi_stream (ob->main_stream, node->thunk.fixed_offset);
538       streamer_write_uhwi_stream (ob->main_stream, node->thunk.virtual_value);
539     }
540   streamer_write_hwi_stream (ob->main_stream, node->profile_id);
541 }
542 
543 /* Output the varpool NODE to OB.
544    If NODE is not in SET, then NODE is a boundary.  */
545 
546 static void
lto_output_varpool_node(struct lto_simple_output_block * ob,varpool_node * node,lto_symtab_encoder_t encoder)547 lto_output_varpool_node (struct lto_simple_output_block *ob, varpool_node *node,
548 			 lto_symtab_encoder_t encoder)
549 {
550   bool boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
551   struct bitpack_d bp;
552   int ref;
553   bool alias_p;
554 
555   streamer_write_enum (ob->main_stream, LTO_symtab_tags, LTO_symtab_last_tag,
556 		       LTO_symtab_variable);
557   streamer_write_hwi_stream (ob->main_stream, node->order);
558   lto_output_var_decl_index (ob->decl_state, ob->main_stream, node->decl);
559   bp = bitpack_create (ob->main_stream);
560   bp_pack_value (&bp, node->externally_visible, 1);
561   bp_pack_value (&bp, node->force_output, 1);
562   bp_pack_value (&bp, node->forced_by_abi, 1);
563   bp_pack_value (&bp, node->unique_name, 1);
564   bp_pack_value (&bp, node->body_removed, 1);
565   bp_pack_value (&bp, node->definition, 1);
566   alias_p = node->alias && (!boundary_p || node->weakref);
567   bp_pack_value (&bp, alias_p, 1);
568   bp_pack_value (&bp, node->weakref, 1);
569   bp_pack_value (&bp, node->analyzed && !boundary_p, 1);
570   gcc_assert (node->definition || !node->analyzed);
571   /* Constant pool initializers can be de-unified into individual ltrans units.
572      FIXME: Alternatively at -Os we may want to avoid generating for them the local
573      labels and share them across LTRANS partitions.  */
574   if (symtab_get_symbol_partitioning_class (node) != SYMBOL_PARTITION)
575     {
576       bp_pack_value (&bp, 0, 1);  /* used_from_other_parition.  */
577       bp_pack_value (&bp, 0, 1);  /* in_other_partition.  */
578     }
579   else
580     {
581       bp_pack_value (&bp, node->definition
582 		     && referenced_from_other_partition_p (&node->ref_list,
583 							   encoder), 1);
584       bp_pack_value (&bp, node->analyzed
585 		     && boundary_p && !DECL_EXTERNAL (node->decl), 1);
586 	  /* in_other_partition.  */
587     }
588   streamer_write_bitpack (&bp);
589   if (node->same_comdat_group && !boundary_p)
590     {
591       ref = lto_symtab_encoder_lookup (encoder,
592 				       node->same_comdat_group);
593       gcc_assert (ref != LCC_NOT_FOUND);
594     }
595   else
596     ref = LCC_NOT_FOUND;
597   streamer_write_hwi_stream (ob->main_stream, ref);
598   streamer_write_enum (ob->main_stream, ld_plugin_symbol_resolution,
599 		       LDPR_NUM_KNOWN, node->resolution);
600 }
601 
602 /* Output the varpool NODE to OB.
603    If NODE is not in SET, then NODE is a boundary.  */
604 
605 static void
lto_output_ref(struct lto_simple_output_block * ob,struct ipa_ref * ref,lto_symtab_encoder_t encoder)606 lto_output_ref (struct lto_simple_output_block *ob, struct ipa_ref *ref,
607 		lto_symtab_encoder_t encoder)
608 {
609   struct bitpack_d bp;
610   int nref;
611   int uid = ref->lto_stmt_uid;
612   struct cgraph_node *node;
613 
614   bp = bitpack_create (ob->main_stream);
615   bp_pack_value (&bp, ref->use, 2);
616   bp_pack_value (&bp, ref->speculative, 1);
617   streamer_write_bitpack (&bp);
618   nref = lto_symtab_encoder_lookup (encoder, ref->referred);
619   gcc_assert (nref != LCC_NOT_FOUND);
620   streamer_write_hwi_stream (ob->main_stream, nref);
621 
622   node = dyn_cast <cgraph_node> (ref->referring);
623   if (node)
624     {
625       if (ref->stmt)
626 	uid = gimple_uid (ref->stmt) + 1;
627       streamer_write_hwi_stream (ob->main_stream, uid);
628     }
629 }
630 
631 /* Stream out profile_summary to OB.  */
632 
633 static void
output_profile_summary(struct lto_simple_output_block * ob)634 output_profile_summary (struct lto_simple_output_block *ob)
635 {
636   unsigned h_ix;
637   struct bitpack_d bp;
638 
639   if (profile_info)
640     {
641       /* We do not output num and run_max, they are not used by
642          GCC profile feedback and they are difficult to merge from multiple
643          units.  */
644       gcc_assert (profile_info->runs);
645       streamer_write_uhwi_stream (ob->main_stream, profile_info->runs);
646       streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_max);
647 
648       /* sum_all is needed for computing the working set with the
649          histogram.  */
650       streamer_write_gcov_count_stream (ob->main_stream, profile_info->sum_all);
651 
652       /* Create and output a bitpack of non-zero histogram entries indices.  */
653       bp = bitpack_create (ob->main_stream);
654       for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
655         bp_pack_value (&bp, profile_info->histogram[h_ix].num_counters > 0, 1);
656       streamer_write_bitpack (&bp);
657       /* Now stream out only those non-zero entries.  */
658       for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
659         {
660           if (!profile_info->histogram[h_ix].num_counters)
661             continue;
662           streamer_write_gcov_count_stream (ob->main_stream,
663                                       profile_info->histogram[h_ix].num_counters);
664           streamer_write_gcov_count_stream (ob->main_stream,
665                                       profile_info->histogram[h_ix].min_value);
666           streamer_write_gcov_count_stream (ob->main_stream,
667                                       profile_info->histogram[h_ix].cum_value);
668          }
669       /* IPA-profile computes hot bb threshold based on cumulated
670 	 whole program profile.  We need to stream it down to ltrans.  */
671        if (flag_wpa)
672          streamer_write_gcov_count_stream (ob->main_stream,
673 					   get_hot_bb_threshold ());
674     }
675   else
676     streamer_write_uhwi_stream (ob->main_stream, 0);
677 }
678 
679 /* Output all callees or indirect outgoing edges.  EDGE must be the first such
680    edge.  */
681 
682 static void
output_outgoing_cgraph_edges(struct cgraph_edge * edge,struct lto_simple_output_block * ob,lto_symtab_encoder_t encoder)683 output_outgoing_cgraph_edges (struct cgraph_edge *edge,
684 			      struct lto_simple_output_block *ob,
685 			      lto_symtab_encoder_t encoder)
686 {
687   if (!edge)
688     return;
689 
690   /* Output edges in backward direction, so the reconstructed callgraph match
691      and it is easy to associate call sites in the IPA pass summaries.  */
692   while (edge->next_callee)
693     edge = edge->next_callee;
694   for (; edge; edge = edge->prev_callee)
695     lto_output_edge (ob, edge, encoder);
696 }
697 
698 /* Output the part of the cgraph in SET.  */
699 
700 static void
output_refs(lto_symtab_encoder_t encoder)701 output_refs (lto_symtab_encoder_t encoder)
702 {
703   lto_symtab_encoder_iterator lsei;
704   struct lto_simple_output_block *ob;
705   int count;
706   struct ipa_ref *ref;
707   int i;
708 
709   ob = lto_create_simple_output_block (LTO_section_refs);
710 
711   for (lsei = lsei_start_in_partition (encoder); !lsei_end_p (lsei);
712        lsei_next_in_partition (&lsei))
713     {
714       symtab_node *node = lsei_node (lsei);
715 
716       count = ipa_ref_list_nreferences (&node->ref_list);
717       if (count)
718 	{
719 	  streamer_write_gcov_count_stream (ob->main_stream, count);
720 	  streamer_write_uhwi_stream (ob->main_stream,
721 				     lto_symtab_encoder_lookup (encoder, node));
722 	  for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list,
723 						      i, ref); i++)
724 	    lto_output_ref (ob, ref, encoder);
725 	}
726     }
727 
728   streamer_write_uhwi_stream (ob->main_stream, 0);
729 
730   lto_destroy_simple_output_block (ob);
731 }
732 
733 /* Add NODE into encoder as well as nodes it is cloned from.
734    Do it in a way so clones appear first.  */
735 
736 static void
add_node_to(lto_symtab_encoder_t encoder,struct cgraph_node * node,bool include_body)737 add_node_to (lto_symtab_encoder_t encoder, struct cgraph_node *node,
738 	     bool include_body)
739 {
740   if (node->clone_of)
741     add_node_to (encoder, node->clone_of, include_body);
742   else if (include_body)
743     lto_set_symtab_encoder_encode_body (encoder, node);
744   lto_symtab_encoder_encode (encoder, node);
745 }
746 
747 /* Add all references in LIST to encoders.  */
748 
749 static void
add_references(lto_symtab_encoder_t encoder,struct ipa_ref_list * list)750 add_references (lto_symtab_encoder_t encoder,
751 		struct ipa_ref_list *list)
752 {
753   int i;
754   struct ipa_ref *ref;
755   for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
756     if (is_a <cgraph_node> (ref->referred))
757       add_node_to (encoder, ipa_ref_node (ref), false);
758     else
759       lto_symtab_encoder_encode (encoder, ref->referred);
760 }
761 
762 /* Find all symbols we want to stream into given partition and insert them
763    to encoders.
764 
765    The function actually replaces IN_ENCODER by new one.  The reason is that
766    streaming code needs clone's origin to be streamed before clone.  This
767    means that we need to insert the nodes in specific order.  This order is
768    ignored by the partitioning logic earlier.  */
769 
770 lto_symtab_encoder_t
compute_ltrans_boundary(lto_symtab_encoder_t in_encoder)771 compute_ltrans_boundary (lto_symtab_encoder_t in_encoder)
772 {
773   struct cgraph_node *node;
774   struct cgraph_edge *edge;
775   int i;
776   lto_symtab_encoder_t encoder;
777   lto_symtab_encoder_iterator lsei;
778   struct pointer_set_t *reachable_call_targets = pointer_set_create ();
779 
780   encoder = lto_symtab_encoder_new (false);
781 
782   /* Go over all entries in the IN_ENCODER and duplicate them to
783      ENCODER. At the same time insert masters of clones so
784      every master appears before clone.  */
785   for (lsei = lsei_start_function_in_partition (in_encoder);
786        !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
787     {
788       node = lsei_cgraph_node (lsei);
789       add_node_to (encoder, node, true);
790       lto_set_symtab_encoder_in_partition (encoder, node);
791       add_references (encoder, &node->ref_list);
792       /* For proper debug info, we need to ship the origins, too.  */
793       if (DECL_ABSTRACT_ORIGIN (node->decl))
794 	{
795 	  struct cgraph_node *origin_node
796 	  = cgraph_get_node (DECL_ABSTRACT_ORIGIN (node->decl));
797 	  add_node_to (encoder, origin_node, true);
798 	}
799     }
800   for (lsei = lsei_start_variable_in_partition (in_encoder);
801        !lsei_end_p (lsei); lsei_next_variable_in_partition (&lsei))
802     {
803       varpool_node *vnode = lsei_varpool_node (lsei);
804 
805       lto_set_symtab_encoder_in_partition (encoder, vnode);
806       lto_set_symtab_encoder_encode_initializer (encoder, vnode);
807       add_references (encoder, &vnode->ref_list);
808       /* For proper debug info, we need to ship the origins, too.  */
809       if (DECL_ABSTRACT_ORIGIN (vnode->decl))
810 	{
811 	  varpool_node *origin_node
812 	  = varpool_get_node (DECL_ABSTRACT_ORIGIN (node->decl));
813 	  lto_set_symtab_encoder_in_partition (encoder, origin_node);
814 	}
815     }
816   /* Pickle in also the initializer of all referenced readonly variables
817      to help folding.  Constant pool variables are not shared, so we must
818      pickle those too.  */
819   for (i = 0; i < lto_symtab_encoder_size (encoder); i++)
820     {
821       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
822       if (varpool_node *vnode = dyn_cast <varpool_node> (node))
823 	{
824 	  if (!lto_symtab_encoder_encode_initializer_p (encoder,
825 							vnode)
826 	      && ctor_for_folding (vnode->decl) != error_mark_node)
827 	    {
828 	      lto_set_symtab_encoder_encode_initializer (encoder, vnode);
829 	      add_references (encoder, &vnode->ref_list);
830 	    }
831        }
832     }
833 
834   /* Go over all the nodes again to include callees that are not in
835      SET.  */
836   for (lsei = lsei_start_function_in_partition (encoder);
837        !lsei_end_p (lsei); lsei_next_function_in_partition (&lsei))
838     {
839       node = lsei_cgraph_node (lsei);
840       for (edge = node->callees; edge; edge = edge->next_callee)
841 	{
842 	  struct cgraph_node *callee = edge->callee;
843 	  if (!lto_symtab_encoder_in_partition_p (encoder, callee))
844 	    {
845 	      /* We should have moved all the inlines.  */
846 	      gcc_assert (!callee->global.inlined_to);
847 	      add_node_to (encoder, callee, false);
848 	    }
849 	}
850       /* Add all possible targets for late devirtualization.  */
851       if (flag_devirtualize)
852 	for (edge = node->indirect_calls; edge; edge = edge->next_callee)
853 	  if (edge->indirect_info->polymorphic)
854 	    {
855 	      unsigned int i;
856 	      void *cache_token;
857 	      bool final;
858 	      vec <cgraph_node *>targets
859 		= possible_polymorphic_call_targets
860 		    (edge, &final, &cache_token);
861 	      if (!pointer_set_insert (reachable_call_targets,
862 				       cache_token))
863 		{
864 		  for (i = 0; i < targets.length (); i++)
865 		    {
866 		      struct cgraph_node *callee = targets[i];
867 
868 		      /* Adding an external declarations into the unit serves
869 			 no purpose and just increases its boundary.  */
870 		      if (callee->definition
871 			  && !lto_symtab_encoder_in_partition_p
872 			       (encoder, callee))
873 			{
874 			  gcc_assert (!callee->global.inlined_to);
875 			  add_node_to (encoder, callee, false);
876 			}
877 		    }
878 		}
879 	    }
880     }
881   lto_symtab_encoder_delete (in_encoder);
882   pointer_set_destroy (reachable_call_targets);
883   return encoder;
884 }
885 
886 /* Output the part of the symtab in SET and VSET.  */
887 
888 void
output_symtab(void)889 output_symtab (void)
890 {
891   struct cgraph_node *node;
892   struct lto_simple_output_block *ob;
893   lto_symtab_encoder_iterator lsei;
894   int i, n_nodes;
895   lto_symtab_encoder_t encoder;
896 
897   if (flag_wpa)
898     output_cgraph_opt_summary ();
899 
900   ob = lto_create_simple_output_block (LTO_section_symtab_nodes);
901 
902   output_profile_summary (ob);
903 
904   /* An encoder for cgraph nodes should have been created by
905      ipa_write_summaries_1.  */
906   gcc_assert (ob->decl_state->symtab_node_encoder);
907   encoder = ob->decl_state->symtab_node_encoder;
908 
909   /* Write out the nodes.  We must first output a node and then its clones,
910      otherwise at a time reading back the node there would be nothing to clone
911      from.  */
912   n_nodes = lto_symtab_encoder_size (encoder);
913   for (i = 0; i < n_nodes; i++)
914     {
915       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
916       if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
917         lto_output_node (ob, cnode, encoder);
918       else
919         lto_output_varpool_node (ob, varpool (node), encoder);
920 
921     }
922 
923   /* Go over the nodes in SET again to write edges.  */
924   for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
925        lsei_next_function_in_partition (&lsei))
926     {
927       node = lsei_cgraph_node (lsei);
928       output_outgoing_cgraph_edges (node->callees, ob, encoder);
929       output_outgoing_cgraph_edges (node->indirect_calls, ob, encoder);
930     }
931 
932   streamer_write_uhwi_stream (ob->main_stream, 0);
933 
934   lto_destroy_simple_output_block (ob);
935 
936   /* Emit toplevel asms.
937      When doing WPA we must output every asm just once.  Since we do not partition asm
938      nodes at all, output them to first output.  This is kind of hack, but should work
939      well.  */
940   if (!asm_nodes_output)
941     {
942       asm_nodes_output = true;
943       lto_output_toplevel_asms ();
944     }
945 
946   output_refs (encoder);
947 }
948 
949 /* Overwrite the information in NODE based on FILE_DATA, TAG, FLAGS,
950    STACK_SIZE, SELF_TIME and SELF_SIZE.  This is called either to initialize
951    NODE or to replace the values in it, for instance because the first
952    time we saw it, the function body was not available but now it
953    is.  BP is a bitpack with all the bitflags for NODE read from the
954    stream.  */
955 
956 static void
input_overwrite_node(struct lto_file_decl_data * file_data,struct cgraph_node * node,enum LTO_symtab_tags tag,struct bitpack_d * bp)957 input_overwrite_node (struct lto_file_decl_data *file_data,
958 		      struct cgraph_node *node,
959 		      enum LTO_symtab_tags tag,
960 		      struct bitpack_d *bp)
961 {
962   node->aux = (void *) tag;
963   node->lto_file_data = file_data;
964 
965   node->local.local = bp_unpack_value (bp, 1);
966   node->externally_visible = bp_unpack_value (bp, 1);
967   node->definition = bp_unpack_value (bp, 1);
968   node->local.versionable = bp_unpack_value (bp, 1);
969   node->local.can_change_signature = bp_unpack_value (bp, 1);
970   node->local.redefined_extern_inline = bp_unpack_value (bp, 1);
971   node->force_output = bp_unpack_value (bp, 1);
972   node->forced_by_abi = bp_unpack_value (bp, 1);
973   node->unique_name = bp_unpack_value (bp, 1);
974   node->body_removed = bp_unpack_value (bp, 1);
975   node->address_taken = bp_unpack_value (bp, 1);
976   node->used_from_other_partition = bp_unpack_value (bp, 1);
977   node->lowered = bp_unpack_value (bp, 1);
978   node->analyzed = tag == LTO_symtab_analyzed_node;
979   node->in_other_partition = bp_unpack_value (bp, 1);
980   if (node->in_other_partition
981       /* Avoid updating decl when we are seeing just inline clone.
982 	 When inlining function that has functions already inlined into it,
983 	 we produce clones of inline clones.
984 
985 	 WPA partitioning might put each clone into different unit and
986 	 we might end up streaming inline clone from other partition
987 	 to support clone we are interested in. */
988       && (!node->clone_of
989 	  || node->clone_of->decl != node->decl))
990     {
991       DECL_EXTERNAL (node->decl) = 1;
992       TREE_STATIC (node->decl) = 0;
993     }
994   node->alias = bp_unpack_value (bp, 1);
995   node->weakref = bp_unpack_value (bp, 1);
996   node->frequency = (enum node_frequency)bp_unpack_value (bp, 2);
997   node->only_called_at_startup = bp_unpack_value (bp, 1);
998   node->only_called_at_exit = bp_unpack_value (bp, 1);
999   node->tm_clone = bp_unpack_value (bp, 1);
1000   node->calls_comdat_local = bp_unpack_value (bp, 1);
1001   node->thunk.thunk_p = bp_unpack_value (bp, 1);
1002   node->resolution = bp_unpack_enum (bp, ld_plugin_symbol_resolution,
1003 				     LDPR_NUM_KNOWN);
1004   gcc_assert (flag_ltrans
1005 	      || (!node->in_other_partition
1006 		  && !node->used_from_other_partition));
1007 }
1008 
1009 /* Return string alias is alias of.  */
1010 
1011 static tree
get_alias_symbol(tree decl)1012 get_alias_symbol (tree decl)
1013 {
1014   tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
1015   return get_identifier (TREE_STRING_POINTER
1016 			  (TREE_VALUE (TREE_VALUE (alias))));
1017 }
1018 
1019 /* Read a node from input_block IB.  TAG is the node's tag just read.
1020    Return the node read or overwriten.  */
1021 
1022 static struct cgraph_node *
input_node(struct lto_file_decl_data * file_data,struct lto_input_block * ib,enum LTO_symtab_tags tag,vec<symtab_node * > nodes)1023 input_node (struct lto_file_decl_data *file_data,
1024 	    struct lto_input_block *ib,
1025 	    enum LTO_symtab_tags tag,
1026 	    vec<symtab_node *> nodes)
1027 {
1028   gcc::pass_manager *passes = g->get_passes ();
1029   tree fn_decl;
1030   struct cgraph_node *node;
1031   struct bitpack_d bp;
1032   unsigned decl_index;
1033   int ref = LCC_NOT_FOUND, ref2 = LCC_NOT_FOUND;
1034   int clone_ref;
1035   int order;
1036   int i, count;
1037 
1038   order = streamer_read_hwi (ib) + order_base;
1039   clone_ref = streamer_read_hwi (ib);
1040 
1041   decl_index = streamer_read_uhwi (ib);
1042   fn_decl = lto_file_decl_data_get_fn_decl (file_data, decl_index);
1043 
1044   if (clone_ref != LCC_NOT_FOUND)
1045     {
1046       node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
1047 				0, CGRAPH_FREQ_BASE, false,
1048 				vNULL, false, NULL, NULL);
1049     }
1050   else
1051     {
1052       /* Declaration of functions can be already merged with a declaration
1053 	 from other input file.  We keep cgraph unmerged until after streaming
1054 	 of ipa passes is done.  Alays forcingly create a fresh node.  */
1055       node = cgraph_create_empty_node ();
1056       node->decl = fn_decl;
1057       symtab_register_node (node);
1058     }
1059 
1060   node->order = order;
1061   if (order >= symtab_order)
1062     symtab_order = order + 1;
1063 
1064   node->count = streamer_read_gcov_count (ib);
1065   node->count_materialization_scale = streamer_read_hwi (ib);
1066 
1067   count = streamer_read_hwi (ib);
1068   node->ipa_transforms_to_apply = vNULL;
1069   for (i = 0; i < count; i++)
1070     {
1071       opt_pass *pass;
1072       int pid = streamer_read_hwi (ib);
1073 
1074       gcc_assert (pid < passes->passes_by_id_size);
1075       pass = passes->passes_by_id[pid];
1076       node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass_d *) pass);
1077     }
1078 
1079   if (tag == LTO_symtab_analyzed_node)
1080     ref = streamer_read_hwi (ib);
1081 
1082   ref2 = streamer_read_hwi (ib);
1083 
1084   /* Make sure that we have not read this node before.  Nodes that
1085      have already been read will have their tag stored in the 'aux'
1086      field.  Since built-in functions can be referenced in multiple
1087      functions, they are expected to be read more than once.  */
1088   if (node->aux && !DECL_BUILT_IN (node->decl))
1089     internal_error ("bytecode stream: found multiple instances of cgraph "
1090 		    "node with uid %d", node->uid);
1091 
1092   node->tp_first_run = streamer_read_uhwi (ib);
1093 
1094   bp = streamer_read_bitpack (ib);
1095 
1096   input_overwrite_node (file_data, node, tag, &bp);
1097 
1098   /* Store a reference for now, and fix up later to be a pointer.  */
1099   node->global.inlined_to = (cgraph_node_ptr) (intptr_t) ref;
1100 
1101   /* Store a reference for now, and fix up later to be a pointer.  */
1102   node->same_comdat_group = (symtab_node *) (intptr_t) ref2;
1103 
1104   if (node->thunk.thunk_p)
1105     {
1106       int type = streamer_read_uhwi (ib);
1107       HOST_WIDE_INT fixed_offset = streamer_read_uhwi (ib);
1108       HOST_WIDE_INT virtual_value = streamer_read_uhwi (ib);
1109 
1110       node->thunk.fixed_offset = fixed_offset;
1111       node->thunk.this_adjusting = (type & 2);
1112       node->thunk.virtual_value = virtual_value;
1113       node->thunk.virtual_offset_p = (type & 4);
1114     }
1115   if (node->alias && !node->analyzed && node->weakref)
1116     node->alias_target = get_alias_symbol (node->decl);
1117   node->profile_id = streamer_read_hwi (ib);
1118   return node;
1119 }
1120 
1121 /* Read a node from input_block IB.  TAG is the node's tag just read.
1122    Return the node read or overwriten.  */
1123 
1124 static varpool_node *
input_varpool_node(struct lto_file_decl_data * file_data,struct lto_input_block * ib)1125 input_varpool_node (struct lto_file_decl_data *file_data,
1126 		    struct lto_input_block *ib)
1127 {
1128   int decl_index;
1129   tree var_decl;
1130   varpool_node *node;
1131   struct bitpack_d bp;
1132   int ref = LCC_NOT_FOUND;
1133   int order;
1134 
1135   order = streamer_read_hwi (ib) + order_base;
1136   decl_index = streamer_read_uhwi (ib);
1137   var_decl = lto_file_decl_data_get_var_decl (file_data, decl_index);
1138 
1139   /* Declaration of functions can be already merged with a declaration
1140      from other input file.  We keep cgraph unmerged until after streaming
1141      of ipa passes is done.  Alays forcingly create a fresh node.  */
1142   node = varpool_create_empty_node ();
1143   node->decl = var_decl;
1144   symtab_register_node (node);
1145 
1146   node->order = order;
1147   if (order >= symtab_order)
1148     symtab_order = order + 1;
1149   node->lto_file_data = file_data;
1150 
1151   bp = streamer_read_bitpack (ib);
1152   node->externally_visible = bp_unpack_value (&bp, 1);
1153   node->force_output = bp_unpack_value (&bp, 1);
1154   node->forced_by_abi = bp_unpack_value (&bp, 1);
1155   node->unique_name = bp_unpack_value (&bp, 1);
1156   node->body_removed = bp_unpack_value (&bp, 1);
1157   node->definition = bp_unpack_value (&bp, 1);
1158   node->alias = bp_unpack_value (&bp, 1);
1159   node->weakref = bp_unpack_value (&bp, 1);
1160   node->analyzed = bp_unpack_value (&bp, 1);
1161   node->used_from_other_partition = bp_unpack_value (&bp, 1);
1162   node->in_other_partition = bp_unpack_value (&bp, 1);
1163   if (node->in_other_partition)
1164     {
1165       DECL_EXTERNAL (node->decl) = 1;
1166       TREE_STATIC (node->decl) = 0;
1167     }
1168   if (node->alias && !node->analyzed && node->weakref)
1169     node->alias_target = get_alias_symbol (node->decl);
1170   ref = streamer_read_hwi (ib);
1171   /* Store a reference for now, and fix up later to be a pointer.  */
1172   node->same_comdat_group = (symtab_node *) (intptr_t) ref;
1173   node->resolution = streamer_read_enum (ib, ld_plugin_symbol_resolution,
1174 					        LDPR_NUM_KNOWN);
1175   gcc_assert (flag_ltrans
1176 	      || (!node->in_other_partition
1177 		  && !node->used_from_other_partition));
1178 
1179   return node;
1180 }
1181 
1182 /* Read a node from input_block IB.  TAG is the node's tag just read.
1183    Return the node read or overwriten.  */
1184 
1185 static void
input_ref(struct lto_input_block * ib,symtab_node * referring_node,vec<symtab_node * > nodes)1186 input_ref (struct lto_input_block *ib,
1187 	   symtab_node *referring_node,
1188 	   vec<symtab_node *> nodes)
1189 {
1190   symtab_node *node = NULL;
1191   struct bitpack_d bp;
1192   enum ipa_ref_use use;
1193   bool speculative;
1194   struct ipa_ref *ref;
1195 
1196   bp = streamer_read_bitpack (ib);
1197   use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
1198   speculative = (enum ipa_ref_use) bp_unpack_value (&bp, 1);
1199   node = nodes[streamer_read_hwi (ib)];
1200   ref = ipa_record_reference (referring_node, node, use, NULL);
1201   ref->speculative = speculative;
1202   if (is_a <cgraph_node> (referring_node))
1203     ref->lto_stmt_uid = streamer_read_hwi (ib);
1204 }
1205 
1206 /* Read an edge from IB.  NODES points to a vector of previously read nodes for
1207    decoding caller and callee of the edge to be read.  If INDIRECT is true, the
1208    edge being read is indirect (in the sense that it has
1209    indirect_unknown_callee set).  */
1210 
1211 static void
input_edge(struct lto_input_block * ib,vec<symtab_node * > nodes,bool indirect)1212 input_edge (struct lto_input_block *ib, vec<symtab_node *> nodes,
1213 	    bool indirect)
1214 {
1215   struct cgraph_node *caller, *callee;
1216   struct cgraph_edge *edge;
1217   unsigned int stmt_id;
1218   gcov_type count;
1219   int freq;
1220   cgraph_inline_failed_t inline_failed;
1221   struct bitpack_d bp;
1222   int ecf_flags = 0;
1223 
1224   caller = cgraph (nodes[streamer_read_hwi (ib)]);
1225   if (caller == NULL || caller->decl == NULL_TREE)
1226     internal_error ("bytecode stream: no caller found while reading edge");
1227 
1228   if (!indirect)
1229     {
1230       callee = cgraph (nodes[streamer_read_hwi (ib)]);
1231       if (callee == NULL || callee->decl == NULL_TREE)
1232 	internal_error ("bytecode stream: no callee found while reading edge");
1233     }
1234   else
1235     callee = NULL;
1236 
1237   count = streamer_read_gcov_count (ib);
1238 
1239   bp = streamer_read_bitpack (ib);
1240   inline_failed = bp_unpack_enum (&bp, cgraph_inline_failed_t, CIF_N_REASONS);
1241   stmt_id = bp_unpack_var_len_unsigned (&bp);
1242   freq = (int) bp_unpack_var_len_unsigned (&bp);
1243 
1244   if (indirect)
1245     edge = cgraph_create_indirect_edge (caller, NULL, 0, count, freq);
1246   else
1247     edge = cgraph_create_edge (caller, callee, NULL, count, freq);
1248 
1249   edge->indirect_inlining_edge = bp_unpack_value (&bp, 1);
1250   edge->speculative = bp_unpack_value (&bp, 1);
1251   edge->lto_stmt_uid = stmt_id;
1252   edge->inline_failed = inline_failed;
1253   edge->call_stmt_cannot_inline_p = bp_unpack_value (&bp, 1);
1254   edge->can_throw_external = bp_unpack_value (&bp, 1);
1255   if (indirect)
1256     {
1257       if (bp_unpack_value (&bp, 1))
1258 	ecf_flags |= ECF_CONST;
1259       if (bp_unpack_value (&bp, 1))
1260 	ecf_flags |= ECF_PURE;
1261       if (bp_unpack_value (&bp, 1))
1262 	ecf_flags |= ECF_NORETURN;
1263       if (bp_unpack_value (&bp, 1))
1264 	ecf_flags |= ECF_MALLOC;
1265       if (bp_unpack_value (&bp, 1))
1266 	ecf_flags |= ECF_NOTHROW;
1267       if (bp_unpack_value (&bp, 1))
1268 	ecf_flags |= ECF_RETURNS_TWICE;
1269       edge->indirect_info->ecf_flags = ecf_flags;
1270       edge->indirect_info->common_target_id = streamer_read_hwi (ib);
1271       if (edge->indirect_info->common_target_id)
1272         edge->indirect_info->common_target_probability = streamer_read_hwi (ib);
1273     }
1274 }
1275 
1276 
1277 /* Read a cgraph from IB using the info in FILE_DATA.  */
1278 
1279 static vec<symtab_node *>
input_cgraph_1(struct lto_file_decl_data * file_data,struct lto_input_block * ib)1280 input_cgraph_1 (struct lto_file_decl_data *file_data,
1281 		struct lto_input_block *ib)
1282 {
1283   enum LTO_symtab_tags tag;
1284   vec<symtab_node *> nodes = vNULL;
1285   symtab_node *node;
1286   unsigned i;
1287 
1288   tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1289   order_base = symtab_order;
1290   while (tag)
1291     {
1292       if (tag == LTO_symtab_edge)
1293         input_edge (ib, nodes, false);
1294       else if (tag == LTO_symtab_indirect_edge)
1295         input_edge (ib, nodes, true);
1296       else if (tag == LTO_symtab_variable)
1297         {
1298 	  node = input_varpool_node (file_data, ib);
1299           nodes.safe_push (node);
1300 	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1301         }
1302       else
1303 	{
1304 	  node = input_node (file_data, ib, tag, nodes);
1305 	  if (node == NULL || node->decl == NULL_TREE)
1306 	    internal_error ("bytecode stream: found empty cgraph node");
1307 	  nodes.safe_push (node);
1308 	  lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
1309 	}
1310 
1311       tag = streamer_read_enum (ib, LTO_symtab_tags, LTO_symtab_last_tag);
1312     }
1313 
1314   lto_input_toplevel_asms (file_data, order_base);
1315 
1316   /* AUX pointers should be all non-zero for function nodes read from the stream.  */
1317 #ifdef ENABLE_CHECKING
1318   FOR_EACH_VEC_ELT (nodes, i, node)
1319     gcc_assert (node->aux || !is_a <cgraph_node> (node));
1320 #endif
1321   FOR_EACH_VEC_ELT (nodes, i, node)
1322     {
1323       int ref;
1324       if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
1325 	{
1326 	  ref = (int) (intptr_t) cnode->global.inlined_to;
1327 
1328 	  /* We share declaration of builtins, so we may read same node twice.  */
1329 	  if (!node->aux)
1330 	    continue;
1331 	  node->aux = NULL;
1332 
1333 	  /* Fixup inlined_to from reference to pointer.  */
1334 	  if (ref != LCC_NOT_FOUND)
1335 	    cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
1336 	  else
1337 	    cnode->global.inlined_to = NULL;
1338 	}
1339 
1340       ref = (int) (intptr_t) node->same_comdat_group;
1341 
1342       /* Fixup same_comdat_group from reference to pointer.  */
1343       if (ref != LCC_NOT_FOUND)
1344 	node->same_comdat_group = nodes[ref];
1345       else
1346 	node->same_comdat_group = NULL;
1347     }
1348   FOR_EACH_VEC_ELT (nodes, i, node)
1349     node->aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
1350   return nodes;
1351 }
1352 
1353 /* Input ipa_refs.  */
1354 
1355 static void
input_refs(struct lto_input_block * ib,vec<symtab_node * > nodes)1356 input_refs (struct lto_input_block *ib,
1357 	    vec<symtab_node *> nodes)
1358 {
1359   int count;
1360   int idx;
1361   while (true)
1362     {
1363       symtab_node *node;
1364       count = streamer_read_uhwi (ib);
1365       if (!count)
1366 	break;
1367       idx = streamer_read_uhwi (ib);
1368       node = nodes[idx];
1369       while (count)
1370 	{
1371 	  input_ref (ib, node, nodes);
1372 	  count--;
1373 	}
1374     }
1375 }
1376 
1377 
1378 static struct gcov_ctr_summary lto_gcov_summary;
1379 
1380 /* Input profile_info from IB.  */
1381 static void
input_profile_summary(struct lto_input_block * ib,struct lto_file_decl_data * file_data)1382 input_profile_summary (struct lto_input_block *ib,
1383 		       struct lto_file_decl_data *file_data)
1384 {
1385   unsigned h_ix;
1386   struct bitpack_d bp;
1387   unsigned int runs = streamer_read_uhwi (ib);
1388   if (runs)
1389     {
1390       file_data->profile_info.runs = runs;
1391       file_data->profile_info.sum_max = streamer_read_gcov_count (ib);
1392       file_data->profile_info.sum_all = streamer_read_gcov_count (ib);
1393 
1394       memset (file_data->profile_info.histogram, 0,
1395               sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1396       /* Input the bitpack of non-zero histogram indices.  */
1397       bp = streamer_read_bitpack (ib);
1398       /* Read in and unpack the full bitpack, flagging non-zero
1399          histogram entries by setting the num_counters non-zero.  */
1400       for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1401         {
1402           file_data->profile_info.histogram[h_ix].num_counters
1403               = bp_unpack_value (&bp, 1);
1404         }
1405       for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1406         {
1407           if (!file_data->profile_info.histogram[h_ix].num_counters)
1408             continue;
1409 
1410           file_data->profile_info.histogram[h_ix].num_counters
1411               = streamer_read_gcov_count (ib);
1412           file_data->profile_info.histogram[h_ix].min_value
1413               = streamer_read_gcov_count (ib);
1414           file_data->profile_info.histogram[h_ix].cum_value
1415               = streamer_read_gcov_count (ib);
1416         }
1417       /* IPA-profile computes hot bb threshold based on cumulated
1418 	 whole program profile.  We need to stream it down to ltrans.  */
1419       if (flag_ltrans)
1420 	set_hot_bb_threshold (streamer_read_gcov_count (ib));
1421     }
1422 
1423 }
1424 
1425 /* Rescale profile summaries to the same number of runs in the whole unit.  */
1426 
1427 static void
merge_profile_summaries(struct lto_file_decl_data ** file_data_vec)1428 merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
1429 {
1430   struct lto_file_decl_data *file_data;
1431   unsigned int j, h_ix;
1432   gcov_unsigned_t max_runs = 0;
1433   struct cgraph_node *node;
1434   struct cgraph_edge *edge;
1435   gcov_type saved_sum_all = 0;
1436   gcov_ctr_summary *saved_profile_info = 0;
1437   int saved_scale = 0;
1438 
1439   /* Find unit with maximal number of runs.  If we ever get serious about
1440      roundoff errors, we might also consider computing smallest common
1441      multiply.  */
1442   for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1443     if (max_runs < file_data->profile_info.runs)
1444       max_runs = file_data->profile_info.runs;
1445 
1446   if (!max_runs)
1447     return;
1448 
1449   /* Simple overflow check.  We probably don't need to support that many train
1450      runs. Such a large value probably imply data corruption anyway.  */
1451   if (max_runs > INT_MAX / REG_BR_PROB_BASE)
1452     {
1453       sorry ("At most %i profile runs is supported. Perhaps corrupted profile?",
1454 	     INT_MAX / REG_BR_PROB_BASE);
1455       return;
1456     }
1457 
1458   profile_info = &lto_gcov_summary;
1459   lto_gcov_summary.runs = max_runs;
1460   lto_gcov_summary.sum_max = 0;
1461   memset (lto_gcov_summary.histogram, 0,
1462           sizeof (gcov_bucket_type) * GCOV_HISTOGRAM_SIZE);
1463 
1464   /* Rescale all units to the maximal number of runs.
1465      sum_max can not be easily merged, as we have no idea what files come from
1466      the same run.  We do not use the info anyway, so leave it 0.  */
1467   for (j = 0; (file_data = file_data_vec[j]) != NULL; j++)
1468     if (file_data->profile_info.runs)
1469       {
1470 	int scale = GCOV_COMPUTE_SCALE (max_runs,
1471                                         file_data->profile_info.runs);
1472 	lto_gcov_summary.sum_max
1473             = MAX (lto_gcov_summary.sum_max,
1474                    apply_scale (file_data->profile_info.sum_max, scale));
1475 	lto_gcov_summary.sum_all
1476             = MAX (lto_gcov_summary.sum_all,
1477                    apply_scale (file_data->profile_info.sum_all, scale));
1478         /* Save a pointer to the profile_info with the largest
1479            scaled sum_all and the scale for use in merging the
1480            histogram.  */
1481         if (!saved_profile_info
1482             || lto_gcov_summary.sum_all > saved_sum_all)
1483           {
1484             saved_profile_info = &file_data->profile_info;
1485             saved_sum_all = lto_gcov_summary.sum_all;
1486             saved_scale = scale;
1487           }
1488       }
1489 
1490   gcc_assert (saved_profile_info);
1491 
1492   /* Scale up the histogram from the profile that had the largest
1493      scaled sum_all above.  */
1494   for (h_ix = 0; h_ix < GCOV_HISTOGRAM_SIZE; h_ix++)
1495     {
1496       /* Scale up the min value as we did the corresponding sum_all
1497          above. Use that to find the new histogram index.  */
1498       gcov_type scaled_min
1499           = apply_scale (saved_profile_info->histogram[h_ix].min_value,
1500                          saved_scale);
1501       /* The new index may be shared with another scaled histogram entry,
1502          so we need to account for a non-zero histogram entry at new_ix.  */
1503       unsigned new_ix = gcov_histo_index (scaled_min);
1504       lto_gcov_summary.histogram[new_ix].min_value
1505           = (lto_gcov_summary.histogram[new_ix].num_counters
1506              ? MIN (lto_gcov_summary.histogram[new_ix].min_value, scaled_min)
1507              : scaled_min);
1508       /* Some of the scaled counter values would ostensibly need to be placed
1509          into different (larger) histogram buckets, but we keep things simple
1510          here and place the scaled cumulative counter value in the bucket
1511          corresponding to the scaled minimum counter value.  */
1512       lto_gcov_summary.histogram[new_ix].cum_value
1513           += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
1514                           saved_scale);
1515       lto_gcov_summary.histogram[new_ix].num_counters
1516           += saved_profile_info->histogram[h_ix].num_counters;
1517     }
1518 
1519   /* Watch roundoff errors.  */
1520   if (lto_gcov_summary.sum_max < max_runs)
1521     lto_gcov_summary.sum_max = max_runs;
1522 
1523   /* If merging already happent at WPA time, we are done.  */
1524   if (flag_ltrans)
1525     return;
1526 
1527   /* Now compute count_materialization_scale of each node.
1528      During LTRANS we already have values of count_materialization_scale
1529      computed, so just update them.  */
1530   FOR_EACH_FUNCTION (node)
1531     if (node->lto_file_data
1532 	&& node->lto_file_data->profile_info.runs)
1533       {
1534 	int scale;
1535 
1536 	scale = RDIV (node->count_materialization_scale * max_runs,
1537                       node->lto_file_data->profile_info.runs);
1538 	node->count_materialization_scale = scale;
1539 	if (scale < 0)
1540 	  fatal_error ("Profile information in %s corrupted",
1541 		       file_data->file_name);
1542 
1543 	if (scale == REG_BR_PROB_BASE)
1544 	  continue;
1545 	for (edge = node->callees; edge; edge = edge->next_callee)
1546 	  edge->count = apply_scale (edge->count, scale);
1547 	node->count = apply_scale (node->count, scale);
1548       }
1549 }
1550 
1551 /* Input and merge the symtab from each of the .o files passed to
1552    lto1.  */
1553 
1554 void
input_symtab(void)1555 input_symtab (void)
1556 {
1557   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1558   struct lto_file_decl_data *file_data;
1559   unsigned int j = 0;
1560   struct cgraph_node *node;
1561 
1562   while ((file_data = file_data_vec[j++]))
1563     {
1564       const char *data;
1565       size_t len;
1566       struct lto_input_block *ib;
1567       vec<symtab_node *> nodes;
1568 
1569       ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
1570 					  &data, &len);
1571       if (!ib)
1572 	fatal_error ("cannot find LTO cgraph in %s", file_data->file_name);
1573       input_profile_summary (ib, file_data);
1574       file_data->symtab_node_encoder = lto_symtab_encoder_new (true);
1575       nodes = input_cgraph_1 (file_data, ib);
1576       lto_destroy_simple_input_block (file_data, LTO_section_symtab_nodes,
1577 				      ib, data, len);
1578 
1579       ib = lto_create_simple_input_block (file_data, LTO_section_refs,
1580 					  &data, &len);
1581       if (!ib)
1582 	fatal_error ("cannot find LTO section refs in %s",
1583 		     file_data->file_name);
1584       input_refs (ib, nodes);
1585       lto_destroy_simple_input_block (file_data, LTO_section_refs,
1586 				      ib, data, len);
1587       if (flag_ltrans)
1588 	input_cgraph_opt_summary (nodes);
1589       nodes.release ();
1590     }
1591 
1592   merge_profile_summaries (file_data_vec);
1593   get_working_sets ();
1594 
1595 
1596   /* Clear out the aux field that was used to store enough state to
1597      tell which nodes should be overwritten.  */
1598   FOR_EACH_FUNCTION (node)
1599     {
1600       /* Some nodes may have been created by cgraph_node.  This
1601 	 happens when the callgraph contains nested functions.  If the
1602 	 node for the parent function was never emitted to the gimple
1603 	 file, cgraph_node will create a node for it when setting the
1604 	 context of the nested function.  */
1605       if (node->lto_file_data)
1606 	node->aux = NULL;
1607     }
1608 }
1609 
1610 /* True when we need optimization summary for NODE.  */
1611 
1612 static int
output_cgraph_opt_summary_p(struct cgraph_node * node)1613 output_cgraph_opt_summary_p (struct cgraph_node *node)
1614 {
1615   return (node->clone_of
1616 	  && (node->clone.tree_map
1617 	      || node->clone.args_to_skip
1618 	      || node->clone.combined_args_to_skip));
1619 }
1620 
1621 /* Output optimization summary for EDGE to OB.  */
1622 static void
output_edge_opt_summary(struct output_block * ob ATTRIBUTE_UNUSED,struct cgraph_edge * edge ATTRIBUTE_UNUSED)1623 output_edge_opt_summary (struct output_block *ob ATTRIBUTE_UNUSED,
1624 			 struct cgraph_edge *edge ATTRIBUTE_UNUSED)
1625 {
1626 }
1627 
1628 /* Output optimization summary for NODE to OB.  */
1629 
1630 static void
output_node_opt_summary(struct output_block * ob,struct cgraph_node * node,lto_symtab_encoder_t encoder)1631 output_node_opt_summary (struct output_block *ob,
1632 			 struct cgraph_node *node,
1633 			 lto_symtab_encoder_t encoder)
1634 {
1635   unsigned int index;
1636   bitmap_iterator bi;
1637   struct ipa_replace_map *map;
1638   struct bitpack_d bp;
1639   int i;
1640   struct cgraph_edge *e;
1641 
1642   if (node->clone.args_to_skip)
1643     {
1644       streamer_write_uhwi (ob, bitmap_count_bits (node->clone.args_to_skip));
1645       EXECUTE_IF_SET_IN_BITMAP (node->clone.args_to_skip, 0, index, bi)
1646 	streamer_write_uhwi (ob, index);
1647     }
1648   else
1649     streamer_write_uhwi (ob, 0);
1650   if (node->clone.combined_args_to_skip)
1651     {
1652       streamer_write_uhwi (ob, bitmap_count_bits (node->clone.combined_args_to_skip));
1653       EXECUTE_IF_SET_IN_BITMAP (node->clone.combined_args_to_skip, 0, index, bi)
1654 	streamer_write_uhwi (ob, index);
1655     }
1656   else
1657     streamer_write_uhwi (ob, 0);
1658   streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
1659   FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
1660     {
1661       /* At the moment we assume all old trees to be PARM_DECLs, because we have no
1662          mechanism to store function local declarations into summaries.  */
1663       gcc_assert (!map->old_tree);
1664       streamer_write_uhwi (ob, map->parm_num);
1665       gcc_assert (EXPR_LOCATION (map->new_tree) == UNKNOWN_LOCATION);
1666       stream_write_tree (ob, map->new_tree, true);
1667       bp = bitpack_create (ob->main_stream);
1668       bp_pack_value (&bp, map->replace_p, 1);
1669       bp_pack_value (&bp, map->ref_p, 1);
1670       streamer_write_bitpack (&bp);
1671     }
1672 
1673   if (lto_symtab_encoder_in_partition_p (encoder, node))
1674     {
1675       for (e = node->callees; e; e = e->next_callee)
1676 	output_edge_opt_summary (ob, e);
1677       for (e = node->indirect_calls; e; e = e->next_callee)
1678 	output_edge_opt_summary (ob, e);
1679     }
1680 }
1681 
1682 /* Output optimization summaries stored in callgraph.
1683    At the moment it is the clone info structure.  */
1684 
1685 static void
output_cgraph_opt_summary(void)1686 output_cgraph_opt_summary (void)
1687 {
1688   int i, n_nodes;
1689   lto_symtab_encoder_t encoder;
1690   struct output_block *ob = create_output_block (LTO_section_cgraph_opt_sum);
1691   unsigned count = 0;
1692 
1693   ob->cgraph_node = NULL;
1694   encoder = ob->decl_state->symtab_node_encoder;
1695   n_nodes = lto_symtab_encoder_size (encoder);
1696   for (i = 0; i < n_nodes; i++)
1697     {
1698       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1699       cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1700       if (cnode && output_cgraph_opt_summary_p (cnode))
1701 	count++;
1702     }
1703   streamer_write_uhwi (ob, count);
1704   for (i = 0; i < n_nodes; i++)
1705     {
1706       symtab_node *node = lto_symtab_encoder_deref (encoder, i);
1707       cgraph_node *cnode = dyn_cast <cgraph_node> (node);
1708       if (cnode && output_cgraph_opt_summary_p (cnode))
1709 	{
1710 	  streamer_write_uhwi (ob, i);
1711 	  output_node_opt_summary (ob, cnode, encoder);
1712 	}
1713     }
1714   produce_asm (ob, NULL);
1715   destroy_output_block (ob);
1716 }
1717 
1718 /* Input optimisation summary of EDGE.  */
1719 
1720 static void
input_edge_opt_summary(struct cgraph_edge * edge ATTRIBUTE_UNUSED,struct lto_input_block * ib_main ATTRIBUTE_UNUSED)1721 input_edge_opt_summary (struct cgraph_edge *edge ATTRIBUTE_UNUSED,
1722 			struct lto_input_block *ib_main ATTRIBUTE_UNUSED)
1723 {
1724 }
1725 
1726 /* Input optimisation summary of NODE.  */
1727 
1728 static void
input_node_opt_summary(struct cgraph_node * node,struct lto_input_block * ib_main,struct data_in * data_in)1729 input_node_opt_summary (struct cgraph_node *node,
1730 			struct lto_input_block *ib_main,
1731 			struct data_in *data_in)
1732 {
1733   int i;
1734   int count;
1735   int bit;
1736   struct bitpack_d bp;
1737   struct cgraph_edge *e;
1738 
1739   count = streamer_read_uhwi (ib_main);
1740   if (count)
1741     node->clone.args_to_skip = BITMAP_GGC_ALLOC ();
1742   for (i = 0; i < count; i++)
1743     {
1744       bit = streamer_read_uhwi (ib_main);
1745       bitmap_set_bit (node->clone.args_to_skip, bit);
1746     }
1747   count = streamer_read_uhwi (ib_main);
1748   if (count)
1749     node->clone.combined_args_to_skip = BITMAP_GGC_ALLOC ();
1750   for (i = 0; i < count; i++)
1751     {
1752       bit = streamer_read_uhwi (ib_main);
1753       bitmap_set_bit (node->clone.combined_args_to_skip, bit);
1754     }
1755   count = streamer_read_uhwi (ib_main);
1756   for (i = 0; i < count; i++)
1757     {
1758       struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
1759 
1760       vec_safe_push (node->clone.tree_map, map);
1761       map->parm_num = streamer_read_uhwi (ib_main);
1762       map->old_tree = NULL;
1763       map->new_tree = stream_read_tree (ib_main, data_in);
1764       bp = streamer_read_bitpack (ib_main);
1765       map->replace_p = bp_unpack_value (&bp, 1);
1766       map->ref_p = bp_unpack_value (&bp, 1);
1767     }
1768   for (e = node->callees; e; e = e->next_callee)
1769     input_edge_opt_summary (e, ib_main);
1770   for (e = node->indirect_calls; e; e = e->next_callee)
1771     input_edge_opt_summary (e, ib_main);
1772 }
1773 
1774 /* Read section in file FILE_DATA of length LEN with data DATA.  */
1775 
1776 static void
input_cgraph_opt_section(struct lto_file_decl_data * file_data,const char * data,size_t len,vec<symtab_node * > nodes)1777 input_cgraph_opt_section (struct lto_file_decl_data *file_data,
1778 			  const char *data, size_t len,
1779 			  vec<symtab_node *> nodes)
1780 {
1781   const struct lto_function_header *header =
1782     (const struct lto_function_header *) data;
1783   const int cfg_offset = sizeof (struct lto_function_header);
1784   const int main_offset = cfg_offset + header->cfg_size;
1785   const int string_offset = main_offset + header->main_size;
1786   struct data_in *data_in;
1787   struct lto_input_block ib_main;
1788   unsigned int i;
1789   unsigned int count;
1790 
1791   LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
1792 			header->main_size);
1793 
1794   data_in =
1795     lto_data_in_create (file_data, (const char *) data + string_offset,
1796 			header->string_size, vNULL);
1797   count = streamer_read_uhwi (&ib_main);
1798 
1799   for (i = 0; i < count; i++)
1800     {
1801       int ref = streamer_read_uhwi (&ib_main);
1802       input_node_opt_summary (cgraph (nodes[ref]),
1803 			      &ib_main, data_in);
1804     }
1805   lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
1806 			 len);
1807   lto_data_in_delete (data_in);
1808 }
1809 
1810 /* Input optimization summary of cgraph.  */
1811 
1812 static void
input_cgraph_opt_summary(vec<symtab_node * > nodes)1813 input_cgraph_opt_summary (vec<symtab_node *> nodes)
1814 {
1815   struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
1816   struct lto_file_decl_data *file_data;
1817   unsigned int j = 0;
1818 
1819   while ((file_data = file_data_vec[j++]))
1820     {
1821       size_t len;
1822       const char *data =
1823 	lto_get_section_data (file_data, LTO_section_cgraph_opt_sum, NULL,
1824 			      &len);
1825 
1826       if (data)
1827 	input_cgraph_opt_section (file_data, data, len, nodes);
1828     }
1829 }
1830