1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2020 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename() */
48 #include "output.h"
49 #include "ipa-utils.h"
50 #include "toplev.h"
51
52
53 static void lto_write_tree (struct output_block*, tree, bool);
54
55 /* Clear the line info stored in DATA_IN. */
56
57 static void
clear_line_info(struct output_block * ob)58 clear_line_info (struct output_block *ob)
59 {
60 ob->current_file = NULL;
61 ob->current_line = 0;
62 ob->current_col = 0;
63 ob->current_sysp = false;
64 ob->reset_locus = true;
65 ob->emit_pwd = true;
66 /* Initialize to something that will never appear as block,
67 so that the first location with block in a function etc.
68 always streams a change_block bit and the first block. */
69 ob->current_block = void_node;
70 }
71
72
73 /* Create the output block and return it. SECTION_TYPE is
74 LTO_section_function_body or LTO_static_initializer. */
75
76 struct output_block *
create_output_block(enum lto_section_type section_type)77 create_output_block (enum lto_section_type section_type)
78 {
79 struct output_block *ob = XCNEW (struct output_block);
80 if (streamer_dump_file)
81 fprintf (streamer_dump_file, "Creating output block for %s\n",
82 lto_section_name [section_type]);
83
84 ob->section_type = section_type;
85 ob->decl_state = lto_get_out_decl_state ();
86 /* Only global decl stream in non-wpa will ever be considered by tree
87 merging. */
88 if (!flag_wpa && section_type == LTO_section_decls)
89 ob->local_trees = new (hash_set <tree>);
90 ob->main_stream = XCNEW (struct lto_output_stream);
91 ob->string_stream = XCNEW (struct lto_output_stream);
92 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
93
94 if (section_type == LTO_section_function_body)
95 ob->cfg_stream = XCNEW (struct lto_output_stream);
96
97 clear_line_info (ob);
98
99 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
100 gcc_obstack_init (&ob->obstack);
101
102 return ob;
103 }
104
105
106 /* Destroy the output block OB. */
107
108 void
destroy_output_block(struct output_block * ob)109 destroy_output_block (struct output_block *ob)
110 {
111 enum lto_section_type section_type = ob->section_type;
112
113 delete ob->string_hash_table;
114 ob->string_hash_table = NULL;
115 delete ob->local_trees;
116
117 free (ob->main_stream);
118 free (ob->string_stream);
119 if (section_type == LTO_section_function_body)
120 free (ob->cfg_stream);
121
122 streamer_tree_cache_delete (ob->writer_cache);
123 obstack_free (&ob->obstack, NULL);
124
125 free (ob);
126 }
127
128
129 /* Look up NODE in the type table and write the index for it to OB. */
130
131 static void
output_type_ref(struct output_block * ob,tree node)132 output_type_ref (struct output_block *ob, tree node)
133 {
134 streamer_write_record_start (ob, LTO_type_ref);
135 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
136 }
137
138 /* Wrapper around variably_modified_type_p avoiding type modification
139 during WPA streaming. */
140
141 static bool
lto_variably_modified_type_p(tree type)142 lto_variably_modified_type_p (tree type)
143 {
144 return (in_lto_p
145 ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
146 : variably_modified_type_p (type, NULL_TREE));
147 }
148
149
150 /* Return true if tree node T is written to various tables. For these
151 nodes, we sometimes want to write their phyiscal representation
152 (via lto_output_tree), and sometimes we need to emit an index
153 reference into a table (via lto_output_tree_ref). */
154
155 static bool
tree_is_indexable(tree t)156 tree_is_indexable (tree t)
157 {
158 /* Parameters and return values of functions of variably modified types
159 must go to global stream, because they may be used in the type
160 definition. */
161 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
162 && DECL_CONTEXT (t))
163 return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
164 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
165 We should no longer need to stream it. */
166 else if (TREE_CODE (t) == IMPORTED_DECL)
167 gcc_unreachable ();
168 else if (TREE_CODE (t) == LABEL_DECL)
169 return FORCED_LABEL (t) || DECL_NONLOCAL (t);
170 else if (((VAR_P (t) && !TREE_STATIC (t))
171 || TREE_CODE (t) == TYPE_DECL
172 || TREE_CODE (t) == CONST_DECL
173 || TREE_CODE (t) == NAMELIST_DECL)
174 && decl_function_context (t))
175 return false;
176 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
177 return false;
178 /* Variably modified types need to be streamed alongside function
179 bodies because they can refer to local entities. Together with
180 them we have to localize their members as well.
181 ??? In theory that includes non-FIELD_DECLs as well. */
182 else if (TYPE_P (t)
183 && lto_variably_modified_type_p (t))
184 return false;
185 else if (TREE_CODE (t) == FIELD_DECL
186 && lto_variably_modified_type_p (DECL_CONTEXT (t)))
187 return false;
188 else
189 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
190 }
191
192
193 /* Output info about new location into bitpack BP.
194 After outputting bitpack, lto_output_location_data has
195 to be done to output actual data. */
196
197 static void
lto_output_location_1(struct output_block * ob,struct bitpack_d * bp,location_t orig_loc,bool block_p)198 lto_output_location_1 (struct output_block *ob, struct bitpack_d *bp,
199 location_t orig_loc, bool block_p)
200 {
201 location_t loc = LOCATION_LOCUS (orig_loc);
202
203 if (loc >= RESERVED_LOCATION_COUNT)
204 {
205 expanded_location xloc = expand_location (loc);
206
207 if (ob->reset_locus)
208 {
209 if (xloc.file == NULL)
210 ob->current_file = "";
211 if (xloc.line == 0)
212 ob->current_line = 1;
213 if (xloc.column == 0)
214 ob->current_col = 1;
215 ob->reset_locus = false;
216 }
217
218 /* As RESERVED_LOCATION_COUNT is 2, we can use the spare value of
219 3 without wasting additional bits to signalize file change.
220 If RESERVED_LOCATION_COUNT changes, reconsider this. */
221 gcc_checking_assert (RESERVED_LOCATION_COUNT == 2);
222 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1,
223 RESERVED_LOCATION_COUNT
224 + (ob->current_file != xloc.file));
225
226 bp_pack_value (bp, ob->current_line != xloc.line, 1);
227 bp_pack_value (bp, ob->current_col != xloc.column, 1);
228
229 if (ob->current_file != xloc.file)
230 {
231 bool stream_pwd = false;
232 const char *remapped = remap_debug_filename (xloc.file);
233 if (ob->emit_pwd && remapped && !IS_ABSOLUTE_PATH (remapped))
234 {
235 stream_pwd = true;
236 ob->emit_pwd = false;
237 }
238 bp_pack_value (bp, stream_pwd, 1);
239 if (stream_pwd)
240 bp_pack_string (ob, bp, get_src_pwd (), true);
241 bp_pack_string (ob, bp, remapped, true);
242 bp_pack_value (bp, xloc.sysp, 1);
243 }
244 ob->current_file = xloc.file;
245 ob->current_sysp = xloc.sysp;
246
247 if (ob->current_line != xloc.line)
248 bp_pack_var_len_unsigned (bp, xloc.line);
249 ob->current_line = xloc.line;
250
251 if (ob->current_col != xloc.column)
252 bp_pack_var_len_unsigned (bp, xloc.column);
253 ob->current_col = xloc.column;
254 }
255 else
256 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1, loc);
257
258 if (block_p)
259 {
260 tree block = LOCATION_BLOCK (orig_loc);
261 bp_pack_value (bp, ob->current_block != block, 1);
262 streamer_write_bitpack (bp);
263 if (ob->current_block != block)
264 lto_output_tree (ob, block, true, true);
265 ob->current_block = block;
266 }
267 }
268
269 /* Output info about new location into bitpack BP.
270 After outputting bitpack, lto_output_location_data has
271 to be done to output actual data. */
272
273 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)274 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
275 location_t loc)
276 {
277 lto_output_location_1 (ob, bp, loc, false);
278 }
279
280 /* Output info about new location into bitpack BP.
281 After outputting bitpack, lto_output_location_data has
282 to be done to output actual data. Like lto_output_location, but
283 additionally output LOCATION_BLOCK info too and write the BP bitpack. */
284
285 void
lto_output_location_and_block(struct output_block * ob,struct bitpack_d * bp,location_t loc)286 lto_output_location_and_block (struct output_block *ob, struct bitpack_d *bp,
287 location_t loc)
288 {
289 lto_output_location_1 (ob, bp, loc, true);
290 }
291
292
293 /* If EXPR is an indexable tree node, output a reference to it to
294 output block OB. Otherwise, output the physical representation of
295 EXPR to OB. */
296
297 static void
lto_output_tree_ref(struct output_block * ob,tree expr)298 lto_output_tree_ref (struct output_block *ob, tree expr)
299 {
300 enum tree_code code;
301
302 if (TYPE_P (expr))
303 {
304 output_type_ref (ob, expr);
305 return;
306 }
307
308 code = TREE_CODE (expr);
309 switch (code)
310 {
311 case SSA_NAME:
312 streamer_write_record_start (ob, LTO_ssa_name_ref);
313 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
314 break;
315
316 case FIELD_DECL:
317 streamer_write_record_start (ob, LTO_field_decl_ref);
318 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
319 break;
320
321 case FUNCTION_DECL:
322 streamer_write_record_start (ob, LTO_function_decl_ref);
323 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
324 break;
325
326 case VAR_DECL:
327 case DEBUG_EXPR_DECL:
328 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
329 /* FALLTHRU */
330 case PARM_DECL:
331 streamer_write_record_start (ob, LTO_global_decl_ref);
332 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
333 break;
334
335 case CONST_DECL:
336 streamer_write_record_start (ob, LTO_const_decl_ref);
337 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
338 break;
339
340 case IMPORTED_DECL:
341 gcc_assert (decl_function_context (expr) == NULL);
342 streamer_write_record_start (ob, LTO_imported_decl_ref);
343 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
344 break;
345
346 case TYPE_DECL:
347 streamer_write_record_start (ob, LTO_type_decl_ref);
348 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
349 break;
350
351 case NAMELIST_DECL:
352 streamer_write_record_start (ob, LTO_namelist_decl_ref);
353 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
354 break;
355
356 case NAMESPACE_DECL:
357 streamer_write_record_start (ob, LTO_namespace_decl_ref);
358 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
359 break;
360
361 case LABEL_DECL:
362 streamer_write_record_start (ob, LTO_label_decl_ref);
363 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
364 break;
365
366 case RESULT_DECL:
367 streamer_write_record_start (ob, LTO_result_decl_ref);
368 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
369 break;
370
371 case TRANSLATION_UNIT_DECL:
372 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
373 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
374 break;
375
376 default:
377 /* No other node is indexable, so it should have been handled by
378 lto_output_tree. */
379 gcc_unreachable ();
380 }
381 }
382
383
384 /* Return true if EXPR is a tree node that can be written to disk. */
385
386 static inline bool
lto_is_streamable(tree expr)387 lto_is_streamable (tree expr)
388 {
389 enum tree_code code = TREE_CODE (expr);
390
391 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
392 name version in lto_output_tree_ref (see output_ssa_names). */
393 return !is_lang_specific (expr)
394 && code != SSA_NAME
395 && code != LANG_TYPE
396 && code != MODIFY_EXPR
397 && code != INIT_EXPR
398 && code != TARGET_EXPR
399 && code != BIND_EXPR
400 && code != WITH_CLEANUP_EXPR
401 && code != STATEMENT_LIST
402 && (code == CASE_LABEL_EXPR
403 || code == DECL_EXPR
404 || TREE_CODE_CLASS (code) != tcc_statement);
405 }
406
407 /* Very rough estimate of streaming size of the initializer. If we ignored
408 presence of strings, we could simply just count number of non-indexable
409 tree nodes and number of references to indexable nodes. Strings however
410 may be very large and we do not want to dump them int othe global stream.
411
412 Count the size of initializer until the size in DATA is positive. */
413
414 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)415 subtract_estimated_size (tree *tp, int *ws, void *data)
416 {
417 long *sum = (long *)data;
418 if (tree_is_indexable (*tp))
419 {
420 /* Indexable tree is one reference to global stream.
421 Guess it may be about 4 bytes. */
422 *sum -= 4;
423 *ws = 0;
424 }
425 /* String table entry + base of tree node needs to be streamed. */
426 if (TREE_CODE (*tp) == STRING_CST)
427 *sum -= TREE_STRING_LENGTH (*tp) + 8;
428 else
429 {
430 /* Identifiers are also variable length but should not appear
431 naked in constructor. */
432 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
433 /* We do not really make attempt to work out size of pickled tree, as
434 it is very variable. Make it bigger than the reference. */
435 *sum -= 16;
436 }
437 if (*sum < 0)
438 return *tp;
439 return NULL_TREE;
440 }
441
442
443 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
444
445 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)446 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
447 {
448 gcc_checking_assert (DECL_P (expr)
449 && TREE_CODE (expr) != FUNCTION_DECL
450 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
451
452 /* Handle DECL_INITIAL for symbols. */
453 tree initial = DECL_INITIAL (expr);
454 if (VAR_P (expr)
455 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
456 && !DECL_IN_CONSTANT_POOL (expr)
457 && initial)
458 {
459 varpool_node *vnode;
460 /* Extra section needs about 30 bytes; do not produce it for simple
461 scalar values. */
462 if (!(vnode = varpool_node::get (expr))
463 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
464 initial = error_mark_node;
465 if (initial != error_mark_node)
466 {
467 long max_size = 30;
468 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
469 NULL))
470 initial = error_mark_node;
471 }
472 }
473
474 return initial;
475 }
476
477
478 /* Write a physical representation of tree node EXPR to output block
479 OB. If REF_P is true, the leaves of EXPR are emitted as references
480 via lto_output_tree_ref. IX is the index into the streamer cache
481 where EXPR is stored. */
482
483 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)484 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
485 {
486 /* Pack all the non-pointer fields in EXPR into a bitpack and write
487 the resulting bitpack. */
488 streamer_write_tree_bitfields (ob, expr);
489
490 /* Write all the pointer fields in EXPR. */
491 streamer_write_tree_body (ob, expr, ref_p);
492
493 /* Write any LTO-specific data to OB. */
494 if (DECL_P (expr)
495 && TREE_CODE (expr) != FUNCTION_DECL
496 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
497 {
498 /* Handle DECL_INITIAL for symbols. */
499 tree initial = get_symbol_initial_value
500 (ob->decl_state->symtab_node_encoder, expr);
501 stream_write_tree (ob, initial, ref_p);
502 }
503
504 /* Stream references to early generated DIEs. Keep in sync with the
505 trees handled in dwarf2out_die_ref_for_decl. */
506 if ((DECL_P (expr)
507 && TREE_CODE (expr) != FIELD_DECL
508 && TREE_CODE (expr) != DEBUG_EXPR_DECL
509 && TREE_CODE (expr) != TYPE_DECL)
510 || TREE_CODE (expr) == BLOCK)
511 {
512 const char *sym;
513 unsigned HOST_WIDE_INT off;
514 if (debug_info_level > DINFO_LEVEL_NONE
515 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
516 {
517 streamer_write_string (ob, ob->main_stream, sym, true);
518 streamer_write_uhwi (ob, off);
519 }
520 else
521 streamer_write_string (ob, ob->main_stream, NULL, true);
522 }
523 }
524
525 /* Write a physical representation of tree node EXPR to output block
526 OB. If REF_P is true, the leaves of EXPR are emitted as references
527 via lto_output_tree_ref. IX is the index into the streamer cache
528 where EXPR is stored. */
529
530 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)531 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
532 {
533 if (!lto_is_streamable (expr))
534 internal_error ("tree code %qs is not supported in LTO streams",
535 get_tree_code_name (TREE_CODE (expr)));
536
537 /* Write the header, containing everything needed to materialize
538 EXPR on the reading side. */
539 streamer_write_tree_header (ob, expr);
540
541 lto_write_tree_1 (ob, expr, ref_p);
542 }
543
544 /* Emit the physical representation of tree node EXPR to output block OB,
545 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
546 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
547
548 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)549 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
550 bool ref_p, bool this_ref_p)
551 {
552 unsigned ix;
553
554 gcc_checking_assert (expr != NULL_TREE
555 && !(this_ref_p && tree_is_indexable (expr)));
556
557 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
558 expr, hash, &ix);
559 gcc_assert (!exists_p);
560 if (TREE_CODE (expr) == INTEGER_CST
561 && !TREE_OVERFLOW (expr))
562 {
563 /* Shared INTEGER_CST nodes are special because they need their
564 original type to be materialized by the reader (to implement
565 TYPE_CACHED_VALUES). */
566 streamer_write_integer_cst (ob, expr, ref_p);
567 }
568 else
569 {
570 /* This is the first time we see EXPR, write its fields
571 to OB. */
572 lto_write_tree (ob, expr, ref_p);
573 }
574 }
575
576 class DFS
577 {
578 public:
579 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
580 bool single_p);
581 ~DFS ();
582
583 struct scc_entry
584 {
585 tree t;
586 hashval_t hash;
587 };
588 auto_vec<scc_entry,32> sccstack;
589
590 private:
591 struct sccs
592 {
593 unsigned int dfsnum;
594 unsigned int low;
595 };
596 struct worklist
597 {
598 tree expr;
599 sccs *from_state;
600 sccs *cstate;
601 bool ref_p;
602 bool this_ref_p;
603 };
604 /* Maximum index of scc stack containing a local tree. */
605 int max_local_entry;
606
607 static int scc_entry_compare (const void *, const void *);
608
609 void DFS_write_tree_body (struct output_block *ob,
610 tree expr, sccs *expr_state, bool ref_p);
611
612 void DFS_write_tree (struct output_block *ob, sccs *from_state,
613 tree expr, bool ref_p, bool this_ref_p);
614
615 hashval_t
616 hash_scc (struct output_block *ob, unsigned first, unsigned size,
617 bool ref_p, bool this_ref_p);
618
619 hash_map<tree, sccs *> sccstate;
620 auto_vec<worklist, 32> worklist_vec;
621 struct obstack sccstate_obstack;
622 };
623
624 /* Return true if type can not be merged with structurally same tree in
625 other translation unit. During stream out this information is propagated
626 to all trees referring to T and they are not streamed with additional
627 information needed by the tree merging in lto-common.c (in particular,
628 scc hash codes are not streamed).
629
630 TRANSLATION_UNIT_DECL is handled specially since references to it does
631 not make other trees local as well. */
632
633 static bool
local_tree_p(tree t)634 local_tree_p (tree t)
635 {
636 switch (TREE_CODE (t))
637 {
638 case LABEL_DECL:
639 return true;
640 case NAMESPACE_DECL:
641 return !DECL_NAME (t);
642 case VAR_DECL:
643 case FUNCTION_DECL:
644 return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
645 case RECORD_TYPE:
646 case UNION_TYPE:
647 case ENUMERAL_TYPE:
648 /* Anonymous namespace types are local.
649 Only work hard for main variants;
650 variant types will inherit locality. */
651 return TYPE_MAIN_VARIANT (t) == t
652 && odr_type_p (t) && type_with_linkage_p (t)
653 && type_in_anonymous_namespace_p (t);
654 default:
655 return false;
656 }
657 }
658
659 /* Emit the physical representation of tree node EXPR to output block OB,
660 using depth-first search on the subgraph. If THIS_REF_P is true, the
661 leaves of EXPR are emitted as references via lto_output_tree_ref.
662 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
663 this is for a rewalk of a single leaf SCC. */
664
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)665 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
666 bool single_p)
667 {
668 unsigned int next_dfs_num = 1;
669
670 max_local_entry = -1;
671 gcc_obstack_init (&sccstate_obstack);
672 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
673 while (!worklist_vec.is_empty ())
674 {
675 worklist &w = worklist_vec.last ();
676 expr = w.expr;
677 sccs *from_state = w.from_state;
678 sccs *cstate = w.cstate;
679 ref_p = w.ref_p;
680 this_ref_p = w.this_ref_p;
681 if (cstate == NULL)
682 {
683 sccs **slot = &sccstate.get_or_insert (expr);
684 cstate = *slot;
685 if (cstate)
686 {
687 gcc_checking_assert (from_state);
688 if (cstate->dfsnum < from_state->dfsnum)
689 from_state->low = MIN (cstate->dfsnum, from_state->low);
690 worklist_vec.pop ();
691 continue;
692 }
693
694 scc_entry e = { expr, 0 };
695 /* Not yet visited. DFS recurse and push it onto the stack. */
696 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
697 if (ob->local_trees && local_tree_p (expr))
698 max_local_entry = sccstack.length ();
699 sccstack.safe_push (e);
700 cstate->dfsnum = next_dfs_num++;
701 cstate->low = cstate->dfsnum;
702 w.cstate = cstate;
703
704 if (TREE_CODE (expr) == INTEGER_CST
705 && !TREE_OVERFLOW (expr))
706 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
707 else
708 {
709 DFS_write_tree_body (ob, expr, cstate, ref_p);
710
711 /* Walk any LTO-specific edges. */
712 if (DECL_P (expr)
713 && TREE_CODE (expr) != FUNCTION_DECL
714 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
715 {
716 /* Handle DECL_INITIAL for symbols. */
717 tree initial
718 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
719 expr);
720 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
721 }
722 }
723 continue;
724 }
725
726 /* See if we found an SCC. */
727 if (cstate->low == cstate->dfsnum)
728 {
729 unsigned first, size;
730 tree x;
731
732 /* If we are re-walking a single leaf SCC just pop it,
733 let earlier worklist item access the sccstack. */
734 if (single_p)
735 {
736 worklist_vec.pop ();
737 continue;
738 }
739
740 /* Pop the SCC and compute its size. */
741 first = sccstack.length ();
742 do
743 {
744 x = sccstack[--first].t;
745 }
746 while (x != expr);
747 size = sccstack.length () - first;
748
749 /* No need to compute hashes for LTRANS units, we don't perform
750 any merging there. */
751 hashval_t scc_hash = 0;
752 unsigned scc_entry_len = 0;
753 bool local_to_unit = !ob->local_trees
754 || max_local_entry >= (int)first;
755
756 /* Remember that trees are local so info gets propagated to other
757 SCCs. */
758 if (local_to_unit && ob->local_trees)
759 {
760 for (unsigned i = 0; i < size; ++i)
761 ob->local_trees->add (sccstack[first + i].t);
762 }
763
764 /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
765 tree. We can not mark it local because references to it does not
766 make other trees local (all global decls reffer to it via
767 CONTEXT). */
768 if (size == 1
769 && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
770 local_to_unit = true;
771
772 if (!local_to_unit)
773 {
774 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
775
776 /* Put the entries with the least number of collisions first. */
777 unsigned entry_start = 0;
778 scc_entry_len = size + 1;
779 for (unsigned i = 0; i < size;)
780 {
781 unsigned from = i;
782 for (i = i + 1; i < size
783 && (sccstack[first + i].hash
784 == sccstack[first + from].hash); ++i)
785 ;
786 if (i - from < scc_entry_len)
787 {
788 scc_entry_len = i - from;
789 entry_start = from;
790 }
791 }
792 for (unsigned i = 0; i < scc_entry_len; ++i)
793 std::swap (sccstack[first + i],
794 sccstack[first + entry_start + i]);
795
796 /* We already sorted SCC deterministically in hash_scc. */
797
798 /* Check that we have only one SCC.
799 Naturally we may have conflicts if hash function is not
800 strong enough. Lets see how far this gets. */
801 gcc_checking_assert (scc_entry_len == 1);
802 }
803
804 worklist_vec.pop ();
805
806 /* Only global decl sections are considered by tree merging. */
807 if (ob->section_type != LTO_section_decls)
808 {
809 /* If this is the original tree we stream and it forms SCC
810 by itself then we do not need to stream SCC at all. */
811 if (worklist_vec.is_empty () && first == 0 && size == 1)
812 return;
813 streamer_write_record_start (ob, LTO_trees);
814 streamer_write_uhwi (ob, size);
815 }
816 /* Write LTO_tree_scc if tree merging is going to be performed. */
817 else if (!local_to_unit
818 /* These are special since sharing is not done by tree
819 merging machinery. We can not special case them earlier
820 because we still need to compute hash for further sharing
821 of trees referring to them. */
822 && (size != 1
823 || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
824 && (TREE_CODE (sccstack[first].t) != INTEGER_CST
825 || TREE_OVERFLOW (sccstack[first].t)))))
826
827 {
828 gcc_checking_assert (ob->section_type == LTO_section_decls);
829 streamer_write_record_start (ob, LTO_tree_scc);
830 /* In wast majority of cases scc_entry_len is 1 and size is small
831 integer. Use extra bit of size to stream info about
832 exceptions. */
833 streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
834 if (scc_entry_len != 1)
835 streamer_write_uhwi (ob, scc_entry_len);
836 streamer_write_uhwi (ob, scc_hash);
837 }
838 /* Non-trivial SCCs must be packed to trees blocks so forward
839 references work correctly. */
840 else if (size != 1)
841 {
842 streamer_write_record_start (ob, LTO_trees);
843 streamer_write_uhwi (ob, size);
844 }
845
846 /* Write size-1 SCCs without wrapping them inside SCC bundles.
847 All INTEGER_CSTs need to be handled this way as we need
848 their type to materialize them. Also builtins are handled
849 this way. */
850 if (size == 1)
851 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
852 else
853 {
854
855 /* Write all headers and populate the streamer cache. */
856 for (unsigned i = 0; i < size; ++i)
857 {
858 hashval_t hash = sccstack[first+i].hash;
859 tree t = sccstack[first+i].t;
860 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
861 t, hash, NULL);
862 gcc_assert (!exists_p);
863
864 if (!lto_is_streamable (t))
865 internal_error ("tree code %qs is not supported "
866 "in LTO streams",
867 get_tree_code_name (TREE_CODE (t)));
868
869 /* Write the header, containing everything needed to
870 materialize EXPR on the reading side. */
871 streamer_write_tree_header (ob, t);
872 }
873
874 /* Write the bitpacks and tree references. */
875 for (unsigned i = 0; i < size; ++i)
876 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
877 }
878
879 /* Finally truncate the vector. */
880 sccstack.truncate (first);
881 if ((int)first <= max_local_entry)
882 max_local_entry = first - 1;
883
884 if (from_state)
885 from_state->low = MIN (from_state->low, cstate->low);
886 continue;
887 }
888
889 gcc_checking_assert (from_state);
890 from_state->low = MIN (from_state->low, cstate->low);
891 if (cstate->dfsnum < from_state->dfsnum)
892 from_state->low = MIN (cstate->dfsnum, from_state->low);
893 worklist_vec.pop ();
894 }
895 }
896
~DFS()897 DFS::~DFS ()
898 {
899 obstack_free (&sccstate_obstack, NULL);
900 }
901
902 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
903 DFS recurse for all tree edges originating from it. */
904
905 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)906 DFS::DFS_write_tree_body (struct output_block *ob,
907 tree expr, sccs *expr_state, bool ref_p)
908 {
909 #define DFS_follow_tree_edge(DEST) \
910 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
911
912 enum tree_code code;
913
914 if (streamer_dump_file)
915 {
916 print_node_brief (streamer_dump_file, " Streaming ",
917 expr, 4);
918 fprintf (streamer_dump_file, " to %s\n",
919 lto_section_name [ob->section_type]);
920 }
921
922 code = TREE_CODE (expr);
923
924 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
925 {
926 if (TREE_CODE (expr) != IDENTIFIER_NODE)
927 DFS_follow_tree_edge (TREE_TYPE (expr));
928 }
929
930 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
931 {
932 unsigned int count = vector_cst_encoded_nelts (expr);
933 for (unsigned int i = 0; i < count; ++i)
934 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
935 }
936
937 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
938 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
939 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
940
941 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
942 {
943 DFS_follow_tree_edge (TREE_REALPART (expr));
944 DFS_follow_tree_edge (TREE_IMAGPART (expr));
945 }
946
947 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
948 {
949 /* Drop names that were created for anonymous entities. */
950 if (DECL_NAME (expr)
951 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
952 && IDENTIFIER_ANON_P (DECL_NAME (expr)))
953 ;
954 else
955 DFS_follow_tree_edge (DECL_NAME (expr));
956 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
957 && ! DECL_CONTEXT (expr))
958 DFS_follow_tree_edge ((*all_translation_units)[0]);
959 else
960 DFS_follow_tree_edge (DECL_CONTEXT (expr));
961 }
962
963 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
964 {
965 DFS_follow_tree_edge (DECL_SIZE (expr));
966 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
967
968 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
969 special handling in LTO, it must be handled by streamer hooks. */
970
971 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
972
973 /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
974 declarations which should be eliminated by decl merging. Be sure none
975 leaks to this point. */
976 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
977 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
978
979 if ((VAR_P (expr)
980 || TREE_CODE (expr) == PARM_DECL)
981 && DECL_HAS_VALUE_EXPR_P (expr))
982 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
983 if (VAR_P (expr)
984 && DECL_HAS_DEBUG_EXPR_P (expr))
985 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
986 }
987
988 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
989 {
990 /* Make sure we don't inadvertently set the assembler name. */
991 if (DECL_ASSEMBLER_NAME_SET_P (expr))
992 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
993 }
994
995 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
996 {
997 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
998 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
999 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
1000 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
1001 gcc_checking_assert (!DECL_FCONTEXT (expr));
1002 }
1003
1004 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1005 {
1006 gcc_checking_assert (DECL_VINDEX (expr) == NULL);
1007 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
1008 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
1009 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
1010 }
1011
1012 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1013 {
1014 DFS_follow_tree_edge (TYPE_SIZE (expr));
1015 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
1016 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
1017 DFS_follow_tree_edge (TYPE_NAME (expr));
1018 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
1019 reconstructed during fixup. */
1020 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
1021 during fixup. */
1022 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
1023 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
1024 /* TYPE_CANONICAL is re-computed during type merging, so no need
1025 to follow it here. */
1026 /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
1027 it cannot be freed by free_lang_data without triggering ICEs in
1028 langhooks. */
1029 }
1030
1031 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1032 {
1033 if (TREE_CODE (expr) == ARRAY_TYPE)
1034 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
1035 else if (RECORD_OR_UNION_TYPE_P (expr))
1036 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
1037 DFS_follow_tree_edge (t);
1038 else if (TREE_CODE (expr) == FUNCTION_TYPE
1039 || TREE_CODE (expr) == METHOD_TYPE)
1040 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1041
1042 if (!POINTER_TYPE_P (expr))
1043 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1044 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1045 }
1046
1047 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1048 {
1049 DFS_follow_tree_edge (TREE_PURPOSE (expr));
1050 DFS_follow_tree_edge (TREE_VALUE (expr));
1051 DFS_follow_tree_edge (TREE_CHAIN (expr));
1052 }
1053
1054 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1055 {
1056 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1057 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1058 }
1059
1060 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1061 {
1062 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1063 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1064 DFS_follow_tree_edge (TREE_BLOCK (expr));
1065 }
1066
1067 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1068 {
1069 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1070 {
1071 /* We would have to stream externals in the block chain as
1072 non-references but we should have dropped them in
1073 free-lang-data. */
1074 gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1075 DFS_follow_tree_edge (t);
1076 }
1077
1078 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1079 DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1080
1081 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
1082 information for early inlined BLOCKs so drop it on the floor instead
1083 of ICEing in dwarf2out.c. */
1084
1085 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1086 streaming time. */
1087
1088 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
1089 list is re-constructed from BLOCK_SUPERCONTEXT. */
1090 }
1091
1092 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1093 {
1094 unsigned i;
1095 tree t;
1096
1097 /* Note that the number of BINFO slots has already been emitted in
1098 EXPR's header (see streamer_write_tree_header) because this length
1099 is needed to build the empty BINFO node on the reader side. */
1100 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1101 DFS_follow_tree_edge (t);
1102 DFS_follow_tree_edge (BINFO_OFFSET (expr));
1103 DFS_follow_tree_edge (BINFO_VTABLE (expr));
1104
1105 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1106 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1107 by C++ FE only. */
1108 }
1109
1110 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1111 {
1112 unsigned i;
1113 tree index, value;
1114
1115 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1116 {
1117 DFS_follow_tree_edge (index);
1118 DFS_follow_tree_edge (value);
1119 }
1120 }
1121
1122 if (code == OMP_CLAUSE)
1123 {
1124 int i;
1125 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1126 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1127 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1128 }
1129
1130 #undef DFS_follow_tree_edge
1131 }
1132
1133 /* Return a hash value for the tree T.
1134 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
1135 may hold hash values if trees inside current SCC. */
1136
1137 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)1138 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1139 {
1140 inchash::hash hstate;
1141
1142 #define visit(SIBLING) \
1143 do { \
1144 unsigned ix; \
1145 if (!SIBLING) \
1146 hstate.add_int (0); \
1147 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1148 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1149 else if (map) \
1150 hstate.add_int (*map->get (SIBLING)); \
1151 else \
1152 hstate.add_int (1); \
1153 } while (0)
1154
1155 /* Hash TS_BASE. */
1156 enum tree_code code = TREE_CODE (t);
1157 hstate.add_int (code);
1158 if (!TYPE_P (t))
1159 {
1160 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1161 hstate.add_flag (TREE_CONSTANT (t));
1162 hstate.add_flag (TREE_READONLY (t));
1163 hstate.add_flag (TREE_PUBLIC (t));
1164 }
1165 hstate.add_flag (TREE_ADDRESSABLE (t));
1166 hstate.add_flag (TREE_THIS_VOLATILE (t));
1167 if (DECL_P (t))
1168 hstate.add_flag (DECL_UNSIGNED (t));
1169 else if (TYPE_P (t))
1170 hstate.add_flag (TYPE_UNSIGNED (t));
1171 if (TYPE_P (t))
1172 hstate.add_flag (TYPE_ARTIFICIAL (t));
1173 else
1174 hstate.add_flag (TREE_NO_WARNING (t));
1175 hstate.add_flag (TREE_NOTHROW (t));
1176 hstate.add_flag (TREE_STATIC (t));
1177 hstate.add_flag (TREE_PROTECTED (t));
1178 hstate.add_flag (TREE_DEPRECATED (t));
1179 if (code != TREE_BINFO)
1180 hstate.add_flag (TREE_PRIVATE (t));
1181 if (TYPE_P (t))
1182 {
1183 hstate.add_flag (AGGREGATE_TYPE_P (t)
1184 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1185 hstate.add_flag (TYPE_ADDR_SPACE (t));
1186 }
1187 else if (code == SSA_NAME)
1188 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1189 hstate.commit_flag ();
1190
1191 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1192 hstate.add_wide_int (wi::to_widest (t));
1193
1194 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1195 {
1196 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1197 hstate.add_flag (r.cl);
1198 hstate.add_flag (r.sign);
1199 hstate.add_flag (r.signalling);
1200 hstate.add_flag (r.canonical);
1201 hstate.commit_flag ();
1202 hstate.add_int (r.uexp);
1203 hstate.add (r.sig, sizeof (r.sig));
1204 }
1205
1206 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1207 {
1208 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1209 hstate.add_int (f.mode);
1210 hstate.add_int (f.data.low);
1211 hstate.add_int (f.data.high);
1212 }
1213
1214 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1215 {
1216 hstate.add_hwi (DECL_MODE (t));
1217 hstate.add_flag (DECL_NONLOCAL (t));
1218 hstate.add_flag (DECL_VIRTUAL_P (t));
1219 hstate.add_flag (DECL_IGNORED_P (t));
1220 hstate.add_flag (DECL_ABSTRACT_P (t));
1221 hstate.add_flag (DECL_ARTIFICIAL (t));
1222 hstate.add_flag (DECL_USER_ALIGN (t));
1223 hstate.add_flag (DECL_PRESERVE_P (t));
1224 hstate.add_flag (DECL_EXTERNAL (t));
1225 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1226 hstate.commit_flag ();
1227 hstate.add_int (DECL_ALIGN (t));
1228 if (code == LABEL_DECL)
1229 {
1230 hstate.add_int (EH_LANDING_PAD_NR (t));
1231 hstate.add_int (LABEL_DECL_UID (t));
1232 }
1233 else if (code == FIELD_DECL)
1234 {
1235 hstate.add_flag (DECL_PACKED (t));
1236 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1237 hstate.add_flag (DECL_PADDING_P (t));
1238 hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1239 hstate.add_int (DECL_OFFSET_ALIGN (t));
1240 }
1241 else if (code == VAR_DECL)
1242 {
1243 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1244 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1245 }
1246 if (code == RESULT_DECL
1247 || code == PARM_DECL
1248 || code == VAR_DECL)
1249 {
1250 hstate.add_flag (DECL_BY_REFERENCE (t));
1251 if (code == VAR_DECL
1252 || code == PARM_DECL)
1253 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1254 }
1255 hstate.commit_flag ();
1256 }
1257
1258 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1259 hstate.add_int (DECL_REGISTER (t));
1260
1261 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1262 {
1263 hstate.add_flag (DECL_COMMON (t));
1264 hstate.add_flag (DECL_DLLIMPORT_P (t));
1265 hstate.add_flag (DECL_WEAK (t));
1266 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1267 hstate.add_flag (DECL_COMDAT (t));
1268 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1269 hstate.add_int (DECL_VISIBILITY (t));
1270 if (code == VAR_DECL)
1271 {
1272 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1273 hstate.add_flag (DECL_HARD_REGISTER (t));
1274 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1275 }
1276 if (TREE_CODE (t) == FUNCTION_DECL)
1277 {
1278 hstate.add_flag (DECL_FINAL_P (t));
1279 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1280 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1281 }
1282 hstate.commit_flag ();
1283 }
1284
1285 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1286 {
1287 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1288 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1289 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1290 hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1291 hstate.add_flag (DECL_UNINLINABLE (t));
1292 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1293 hstate.add_flag (DECL_IS_NOVOPS (t));
1294 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1295 hstate.add_flag (DECL_IS_MALLOC (t));
1296 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1297 hstate.add_flag (DECL_STATIC_CHAIN (t));
1298 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1299 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1300 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1301 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1302 hstate.add_flag (DECL_PURE_P (t));
1303 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1304 hstate.commit_flag ();
1305 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1306 hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1307 }
1308
1309 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1310 {
1311 hstate.add_hwi (TYPE_MODE (t));
1312 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1313 no streaming. */
1314 hstate.add_flag (TYPE_PACKED (t));
1315 hstate.add_flag (TYPE_RESTRICT (t));
1316 hstate.add_flag (TYPE_USER_ALIGN (t));
1317 hstate.add_flag (TYPE_READONLY (t));
1318 if (RECORD_OR_UNION_TYPE_P (t))
1319 {
1320 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1321 hstate.add_flag (TYPE_FINAL_P (t));
1322 hstate.add_flag (TYPE_CXX_ODR_P (t));
1323 }
1324 else if (code == ARRAY_TYPE)
1325 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1326 if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1327 hstate.add_flag (TYPE_STRING_FLAG (t));
1328 if (AGGREGATE_TYPE_P (t))
1329 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1330 hstate.commit_flag ();
1331 hstate.add_int (TYPE_PRECISION (t));
1332 hstate.add_int (TYPE_ALIGN (t));
1333 hstate.add_int (TYPE_EMPTY_P (t));
1334 }
1335
1336 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1337 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1338 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1339
1340 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1341 /* We don't stream these when passing things to a different target. */
1342 && !lto_stream_offload_p)
1343 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1344
1345 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1346 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1347
1348 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1349 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1350
1351 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1352 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1353
1354 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1355 {
1356 if (code != IDENTIFIER_NODE)
1357 visit (TREE_TYPE (t));
1358 }
1359
1360 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1361 {
1362 unsigned int count = vector_cst_encoded_nelts (t);
1363 for (unsigned int i = 0; i < count; ++i)
1364 visit (VECTOR_CST_ENCODED_ELT (t, i));
1365 }
1366
1367 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1368 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1369 visit (POLY_INT_CST_COEFF (t, i));
1370
1371 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1372 {
1373 visit (TREE_REALPART (t));
1374 visit (TREE_IMAGPART (t));
1375 }
1376
1377 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1378 {
1379 /* Drop names that were created for anonymous entities. */
1380 if (DECL_NAME (t)
1381 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1382 && IDENTIFIER_ANON_P (DECL_NAME (t)))
1383 ;
1384 else
1385 visit (DECL_NAME (t));
1386 if (DECL_FILE_SCOPE_P (t))
1387 ;
1388 else
1389 visit (DECL_CONTEXT (t));
1390 }
1391
1392 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1393 {
1394 visit (DECL_SIZE (t));
1395 visit (DECL_SIZE_UNIT (t));
1396 visit (DECL_ATTRIBUTES (t));
1397 if ((code == VAR_DECL
1398 || code == PARM_DECL)
1399 && DECL_HAS_VALUE_EXPR_P (t))
1400 visit (DECL_VALUE_EXPR (t));
1401 if (code == VAR_DECL
1402 && DECL_HAS_DEBUG_EXPR_P (t))
1403 visit (DECL_DEBUG_EXPR (t));
1404 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1405 be able to call get_symbol_initial_value. */
1406 }
1407
1408 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1409 {
1410 if (DECL_ASSEMBLER_NAME_SET_P (t))
1411 visit (DECL_ASSEMBLER_NAME (t));
1412 }
1413
1414 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1415 {
1416 visit (DECL_FIELD_OFFSET (t));
1417 visit (DECL_BIT_FIELD_TYPE (t));
1418 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1419 visit (DECL_FIELD_BIT_OFFSET (t));
1420 }
1421
1422 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1423 {
1424 visit (DECL_FUNCTION_PERSONALITY (t));
1425 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1426 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1427 }
1428
1429 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1430 {
1431 visit (TYPE_SIZE (t));
1432 visit (TYPE_SIZE_UNIT (t));
1433 visit (TYPE_ATTRIBUTES (t));
1434 visit (TYPE_NAME (t));
1435 visit (TYPE_MAIN_VARIANT (t));
1436 if (TYPE_FILE_SCOPE_P (t))
1437 ;
1438 else
1439 visit (TYPE_CONTEXT (t));
1440 }
1441
1442 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1443 {
1444 if (code == ARRAY_TYPE)
1445 visit (TYPE_DOMAIN (t));
1446 else if (RECORD_OR_UNION_TYPE_P (t))
1447 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1448 visit (f);
1449 else if (code == FUNCTION_TYPE
1450 || code == METHOD_TYPE)
1451 visit (TYPE_ARG_TYPES (t));
1452 if (!POINTER_TYPE_P (t))
1453 visit (TYPE_MIN_VALUE_RAW (t));
1454 visit (TYPE_MAX_VALUE_RAW (t));
1455 }
1456
1457 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1458 {
1459 visit (TREE_PURPOSE (t));
1460 visit (TREE_VALUE (t));
1461 visit (TREE_CHAIN (t));
1462 }
1463
1464 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1465 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1466 visit (TREE_VEC_ELT (t, i));
1467
1468 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1469 {
1470 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1471 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1472 visit (TREE_OPERAND (t, i));
1473 }
1474
1475 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1476 {
1477 unsigned i;
1478 tree b;
1479 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1480 visit (b);
1481 visit (BINFO_OFFSET (t));
1482 visit (BINFO_VTABLE (t));
1483 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1484 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1485 by C++ FE only. */
1486 }
1487
1488 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1489 {
1490 unsigned i;
1491 tree index, value;
1492 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1493 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1494 {
1495 visit (index);
1496 visit (value);
1497 }
1498 }
1499
1500 if (code == OMP_CLAUSE)
1501 {
1502 int i;
1503 HOST_WIDE_INT val;
1504
1505 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1506 switch (OMP_CLAUSE_CODE (t))
1507 {
1508 case OMP_CLAUSE_DEFAULT:
1509 val = OMP_CLAUSE_DEFAULT_KIND (t);
1510 break;
1511 case OMP_CLAUSE_SCHEDULE:
1512 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1513 break;
1514 case OMP_CLAUSE_DEPEND:
1515 val = OMP_CLAUSE_DEPEND_KIND (t);
1516 break;
1517 case OMP_CLAUSE_MAP:
1518 val = OMP_CLAUSE_MAP_KIND (t);
1519 break;
1520 case OMP_CLAUSE_PROC_BIND:
1521 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1522 break;
1523 case OMP_CLAUSE_REDUCTION:
1524 case OMP_CLAUSE_TASK_REDUCTION:
1525 case OMP_CLAUSE_IN_REDUCTION:
1526 val = OMP_CLAUSE_REDUCTION_CODE (t);
1527 break;
1528 default:
1529 val = 0;
1530 break;
1531 }
1532 hstate.add_hwi (val);
1533 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1534 visit (OMP_CLAUSE_OPERAND (t, i));
1535 visit (OMP_CLAUSE_CHAIN (t));
1536 }
1537
1538 return hstate.end ();
1539
1540 #undef visit
1541 }
1542
1543 /* Compare two SCC entries by their hash value for qsorting them. */
1544
1545 int
scc_entry_compare(const void * p1_,const void * p2_)1546 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1547 {
1548 const scc_entry *p1 = (const scc_entry *) p1_;
1549 const scc_entry *p2 = (const scc_entry *) p2_;
1550 if (p1->hash < p2->hash)
1551 return -1;
1552 else if (p1->hash > p2->hash)
1553 return 1;
1554 return 0;
1555 }
1556
1557 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1558 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1559
1560 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1561 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1562 bool ref_p, bool this_ref_p)
1563 {
1564 unsigned int last_classes = 0, iterations = 0;
1565
1566 /* Compute hash values for the SCC members. */
1567 for (unsigned i = 0; i < size; ++i)
1568 sccstack[first+i].hash
1569 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1570
1571 if (size == 1)
1572 return sccstack[first].hash;
1573
1574 /* We aim to get unique hash for every tree within SCC and compute hash value
1575 of the whole SCC by combining all values together in a stable (entry-point
1576 independent) order. This guarantees that the same SCC regions within
1577 different translation units will get the same hash values and therefore
1578 will be merged at WPA time.
1579
1580 Often the hashes are already unique. In that case we compute the SCC hash
1581 by combining individual hash values in an increasing order.
1582
1583 If there are duplicates, we seek at least one tree with unique hash (and
1584 pick one with minimal hash and this property). Then we obtain a stable
1585 order by DFS walk starting from this unique tree and then use the index
1586 within this order to make individual hash values unique.
1587
1588 If there is no tree with unique hash, we iteratively propagate the hash
1589 values across the internal edges of SCC. This usually quickly leads
1590 to unique hashes. Consider, for example, an SCC containing two pointers
1591 that are identical except for the types they point to and assume that
1592 these types are also part of the SCC. The propagation will add the
1593 points-to type information into their hash values. */
1594 do
1595 {
1596 /* Sort the SCC so we can easily check for uniqueness. */
1597 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1598
1599 unsigned int classes = 1;
1600 int firstunique = -1;
1601
1602 /* Find the tree with lowest unique hash (if it exists) and compute
1603 the number of equivalence classes. */
1604 if (sccstack[first].hash != sccstack[first+1].hash)
1605 firstunique = 0;
1606 for (unsigned i = 1; i < size; ++i)
1607 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1608 {
1609 classes++;
1610 if (firstunique == -1
1611 && (i == size - 1
1612 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1613 firstunique = i;
1614 }
1615
1616 /* If we found a tree with unique hash, stop the iteration. */
1617 if (firstunique != -1
1618 /* Also terminate if we run out of iterations or if the number of
1619 equivalence classes is no longer increasing.
1620 For example a cyclic list of trees that are all equivalent will
1621 never have unique entry point; we however do not build such SCCs
1622 in our IL. */
1623 || classes <= last_classes || iterations > 16)
1624 {
1625 hashval_t scc_hash;
1626
1627 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1628 starting from FIRSTUNIQUE to obtain a stable order. */
1629 if (classes != size && firstunique != -1)
1630 {
1631 hash_map <tree, hashval_t> map(size*2);
1632
1633 /* Store hash values into a map, so we can associate them with
1634 the reordered SCC. */
1635 for (unsigned i = 0; i < size; ++i)
1636 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1637
1638 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1639 true);
1640 gcc_assert (again.sccstack.length () == size);
1641
1642 memcpy (sccstack.address () + first,
1643 again.sccstack.address (),
1644 sizeof (scc_entry) * size);
1645
1646 /* Update hash values of individual members by hashing in the
1647 index within the stable order. This ensures uniqueness.
1648 Also compute the SCC hash by mixing in all hash values in
1649 the stable order we obtained. */
1650 sccstack[first].hash = *map.get (sccstack[first].t);
1651 scc_hash = sccstack[first].hash;
1652 for (unsigned i = 1; i < size; ++i)
1653 {
1654 sccstack[first+i].hash
1655 = iterative_hash_hashval_t (i,
1656 *map.get (sccstack[first+i].t));
1657 scc_hash
1658 = iterative_hash_hashval_t (scc_hash,
1659 sccstack[first+i].hash);
1660 }
1661 }
1662 /* If we got a unique hash value for each tree, then sort already
1663 ensured entry-point independent order. Only compute the final
1664 SCC hash.
1665
1666 If we failed to find the unique entry point, we go by the same
1667 route. We will eventually introduce unwanted hash conflicts. */
1668 else
1669 {
1670 scc_hash = sccstack[first].hash;
1671 for (unsigned i = 1; i < size; ++i)
1672 scc_hash
1673 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1674
1675 /* We cannot 100% guarantee that the hash won't conflict so as
1676 to make it impossible to find a unique hash. This however
1677 should be an extremely rare case. ICE for now so possible
1678 issues are found and evaluated. */
1679 gcc_checking_assert (classes == size);
1680 }
1681
1682 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1683 hash into the hash of each element. */
1684 for (unsigned i = 0; i < size; ++i)
1685 sccstack[first+i].hash
1686 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1687 return scc_hash;
1688 }
1689
1690 last_classes = classes;
1691 iterations++;
1692
1693 /* We failed to identify the entry point; propagate hash values across
1694 the edges. */
1695 hash_map <tree, hashval_t> map(size*2);
1696
1697 for (unsigned i = 0; i < size; ++i)
1698 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1699
1700 for (unsigned i = 0; i < size; i++)
1701 sccstack[first+i].hash
1702 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1703 }
1704 while (true);
1705 }
1706
1707 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1708 already in the streamer cache. Main routine called for
1709 each visit of EXPR. */
1710
1711 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1712 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1713 tree expr, bool ref_p, bool this_ref_p)
1714 {
1715 /* Handle special cases. */
1716 if (expr == NULL_TREE)
1717 return;
1718
1719 /* Do not DFS walk into indexable trees. */
1720 if (this_ref_p && tree_is_indexable (expr))
1721 return;
1722
1723 /* Check if we already streamed EXPR. */
1724 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1725 {
1726 /* Refernece to a local tree makes entry also local. We always process
1727 top of stack entry, so set max to number of entries in stack - 1. */
1728 if (ob->local_trees
1729 && ob->local_trees->contains (expr))
1730 max_local_entry = sccstack.length () - 1;
1731 return;
1732 }
1733
1734 worklist w;
1735 w.expr = expr;
1736 w.from_state = from_state;
1737 w.cstate = NULL;
1738 w.ref_p = ref_p;
1739 w.this_ref_p = this_ref_p;
1740 worklist_vec.safe_push (w);
1741 }
1742
1743
1744 /* Emit the physical representation of tree node EXPR to output block OB.
1745 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1746 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1747
1748 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1749 lto_output_tree (struct output_block *ob, tree expr,
1750 bool ref_p, bool this_ref_p)
1751 {
1752 unsigned ix;
1753 bool existed_p;
1754
1755 if (expr == NULL_TREE)
1756 {
1757 streamer_write_record_start (ob, LTO_null);
1758 return;
1759 }
1760
1761 if (this_ref_p && tree_is_indexable (expr))
1762 {
1763 lto_output_tree_ref (ob, expr);
1764 return;
1765 }
1766
1767 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1768 if (existed_p)
1769 {
1770 /* If a node has already been streamed out, make sure that
1771 we don't write it more than once. Otherwise, the reader
1772 will instantiate two different nodes for the same object. */
1773 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1774 streamer_write_uhwi (ob, ix);
1775 if (streamer_debugging)
1776 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1777 lto_tree_code_to_tag (TREE_CODE (expr)));
1778 lto_stats.num_pickle_refs_output++;
1779 }
1780 else
1781 {
1782 /* This is the first time we see EXPR, write all reachable
1783 trees to OB. */
1784 static bool in_dfs_walk;
1785
1786 /* Protect against recursion which means disconnect between
1787 what tree edges we walk in the DFS walk and what edges
1788 we stream out. */
1789 gcc_assert (!in_dfs_walk);
1790
1791 if (streamer_dump_file)
1792 {
1793 print_node_brief (streamer_dump_file, " Streaming SCC of ",
1794 expr, 4);
1795 fprintf (streamer_dump_file, "\n");
1796 }
1797
1798 /* Start the DFS walk. */
1799 /* Save ob state ... */
1800 /* let's see ... */
1801 in_dfs_walk = true;
1802 DFS (ob, expr, ref_p, this_ref_p, false);
1803 in_dfs_walk = false;
1804
1805 /* Finally append a reference to the tree we were writing. */
1806 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1807
1808 /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1809 it. */
1810 if (!existed_p)
1811 lto_output_tree_1 (ob, expr, 0, ref_p, this_ref_p);
1812 else if (this_ref_p)
1813 {
1814 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1815 streamer_write_uhwi (ob, ix);
1816 if (streamer_debugging)
1817 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1818 lto_tree_code_to_tag (TREE_CODE (expr)));
1819 }
1820 if (streamer_dump_file)
1821 {
1822 print_node_brief (streamer_dump_file, " Finished SCC of ",
1823 expr, 4);
1824 fprintf (streamer_dump_file, "\n\n");
1825 }
1826 lto_stats.num_pickle_refs_output++;
1827 }
1828 }
1829
1830
1831 /* Output to OB a list of try/catch handlers starting with FIRST. */
1832
1833 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1834 output_eh_try_list (struct output_block *ob, eh_catch first)
1835 {
1836 eh_catch n;
1837
1838 for (n = first; n; n = n->next_catch)
1839 {
1840 streamer_write_record_start (ob, LTO_eh_catch);
1841 stream_write_tree (ob, n->type_list, true);
1842 stream_write_tree (ob, n->filter_list, true);
1843 stream_write_tree (ob, n->label, true);
1844 }
1845
1846 streamer_write_record_start (ob, LTO_null);
1847 }
1848
1849
1850 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1851 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1852 detect EH region sharing. */
1853
1854 static void
output_eh_region(struct output_block * ob,eh_region r)1855 output_eh_region (struct output_block *ob, eh_region r)
1856 {
1857 enum LTO_tags tag;
1858
1859 if (r == NULL)
1860 {
1861 streamer_write_record_start (ob, LTO_null);
1862 return;
1863 }
1864
1865 if (r->type == ERT_CLEANUP)
1866 tag = LTO_ert_cleanup;
1867 else if (r->type == ERT_TRY)
1868 tag = LTO_ert_try;
1869 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1870 tag = LTO_ert_allowed_exceptions;
1871 else if (r->type == ERT_MUST_NOT_THROW)
1872 tag = LTO_ert_must_not_throw;
1873 else
1874 gcc_unreachable ();
1875
1876 streamer_write_record_start (ob, tag);
1877 streamer_write_hwi (ob, r->index);
1878
1879 if (r->outer)
1880 streamer_write_hwi (ob, r->outer->index);
1881 else
1882 streamer_write_zero (ob);
1883
1884 if (r->inner)
1885 streamer_write_hwi (ob, r->inner->index);
1886 else
1887 streamer_write_zero (ob);
1888
1889 if (r->next_peer)
1890 streamer_write_hwi (ob, r->next_peer->index);
1891 else
1892 streamer_write_zero (ob);
1893
1894 if (r->type == ERT_TRY)
1895 {
1896 output_eh_try_list (ob, r->u.eh_try.first_catch);
1897 }
1898 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1899 {
1900 stream_write_tree (ob, r->u.allowed.type_list, true);
1901 stream_write_tree (ob, r->u.allowed.label, true);
1902 streamer_write_uhwi (ob, r->u.allowed.filter);
1903 }
1904 else if (r->type == ERT_MUST_NOT_THROW)
1905 {
1906 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1907 bitpack_d bp = bitpack_create (ob->main_stream);
1908 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1909 streamer_write_bitpack (&bp);
1910 }
1911
1912 if (r->landing_pads)
1913 streamer_write_hwi (ob, r->landing_pads->index);
1914 else
1915 streamer_write_zero (ob);
1916 }
1917
1918
1919 /* Output landing pad LP to OB. */
1920
1921 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1922 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1923 {
1924 if (lp == NULL)
1925 {
1926 streamer_write_record_start (ob, LTO_null);
1927 return;
1928 }
1929
1930 streamer_write_record_start (ob, LTO_eh_landing_pad);
1931 streamer_write_hwi (ob, lp->index);
1932 if (lp->next_lp)
1933 streamer_write_hwi (ob, lp->next_lp->index);
1934 else
1935 streamer_write_zero (ob);
1936
1937 if (lp->region)
1938 streamer_write_hwi (ob, lp->region->index);
1939 else
1940 streamer_write_zero (ob);
1941
1942 stream_write_tree (ob, lp->post_landing_pad, true);
1943 }
1944
1945
1946 /* Output the existing eh_table to OB. */
1947
1948 static void
output_eh_regions(struct output_block * ob,struct function * fn)1949 output_eh_regions (struct output_block *ob, struct function *fn)
1950 {
1951 if (fn->eh && fn->eh->region_tree)
1952 {
1953 unsigned i;
1954 eh_region eh;
1955 eh_landing_pad lp;
1956 tree ttype;
1957
1958 streamer_write_record_start (ob, LTO_eh_table);
1959
1960 /* Emit the index of the root of the EH region tree. */
1961 streamer_write_hwi (ob, fn->eh->region_tree->index);
1962
1963 /* Emit all the EH regions in the region array. */
1964 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1965 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1966 output_eh_region (ob, eh);
1967
1968 /* Emit all landing pads. */
1969 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1970 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1971 output_eh_lp (ob, lp);
1972
1973 /* Emit all the runtime type data. */
1974 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1975 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1976 stream_write_tree (ob, ttype, true);
1977
1978 /* Emit the table of action chains. */
1979 if (targetm.arm_eabi_unwinder)
1980 {
1981 tree t;
1982 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1983 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1984 stream_write_tree (ob, t, true);
1985 }
1986 else
1987 {
1988 uchar c;
1989 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1990 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1991 streamer_write_char_stream (ob->main_stream, c);
1992 }
1993 }
1994
1995 /* The LTO_null either terminates the record or indicates that there
1996 are no eh_records at all. */
1997 streamer_write_record_start (ob, LTO_null);
1998 }
1999
2000
2001 /* Output all of the active ssa names to the ssa_names stream. */
2002
2003 static void
output_ssa_names(struct output_block * ob,struct function * fn)2004 output_ssa_names (struct output_block *ob, struct function *fn)
2005 {
2006 unsigned int i, len;
2007
2008 len = vec_safe_length (SSANAMES (fn));
2009 streamer_write_uhwi (ob, len);
2010
2011 for (i = 1; i < len; i++)
2012 {
2013 tree ptr = (*SSANAMES (fn))[i];
2014
2015 if (ptr == NULL_TREE
2016 || SSA_NAME_IN_FREE_LIST (ptr)
2017 || virtual_operand_p (ptr)
2018 /* Simply skip unreleased SSA names. */
2019 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
2020 && (! SSA_NAME_DEF_STMT (ptr)
2021 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
2022 continue;
2023
2024 streamer_write_uhwi (ob, i);
2025 streamer_write_char_stream (ob->main_stream,
2026 SSA_NAME_IS_DEFAULT_DEF (ptr));
2027 if (SSA_NAME_VAR (ptr))
2028 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2029 else
2030 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
2031 stream_write_tree (ob, TREE_TYPE (ptr), true);
2032 }
2033
2034 streamer_write_zero (ob);
2035 }
2036
2037
2038
2039 /* Output the cfg. */
2040
2041 static void
output_cfg(struct output_block * ob,struct function * fn)2042 output_cfg (struct output_block *ob, struct function *fn)
2043 {
2044 struct lto_output_stream *tmp_stream = ob->main_stream;
2045 basic_block bb;
2046
2047 ob->main_stream = ob->cfg_stream;
2048
2049 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2050 profile_status_for_fn (fn));
2051
2052 /* Output the number of the highest basic block. */
2053 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2054
2055 FOR_ALL_BB_FN (bb, fn)
2056 {
2057 edge_iterator ei;
2058 edge e;
2059
2060 streamer_write_hwi (ob, bb->index);
2061
2062 /* Output the successors and the edge flags. */
2063 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2064 FOR_EACH_EDGE (e, ei, bb->succs)
2065 {
2066 bitpack_d bp = bitpack_create (ob->main_stream);
2067 bp_pack_var_len_unsigned (&bp, e->dest->index);
2068 bp_pack_var_len_unsigned (&bp, e->flags);
2069 stream_output_location_and_block (ob, &bp, e->goto_locus);
2070 e->probability.stream_out (ob);
2071 }
2072 }
2073
2074 streamer_write_hwi (ob, -1);
2075
2076 bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2077 while (bb->next_bb)
2078 {
2079 streamer_write_hwi (ob, bb->next_bb->index);
2080 bb = bb->next_bb;
2081 }
2082
2083 streamer_write_hwi (ob, -1);
2084
2085 /* Output the number of loops. */
2086 streamer_write_uhwi (ob, number_of_loops (fn));
2087
2088 /* Output each loop, skipping the tree root which has number zero. */
2089 for (unsigned i = 1; i < number_of_loops (fn); ++i)
2090 {
2091 class loop *loop = get_loop (fn, i);
2092
2093 /* Write the index of the loop header. That's enough to rebuild
2094 the loop tree on the reader side. Stream -1 for an unused
2095 loop entry. */
2096 if (!loop)
2097 {
2098 streamer_write_hwi (ob, -1);
2099 continue;
2100 }
2101 else
2102 streamer_write_hwi (ob, loop->header->index);
2103
2104 /* Write everything copy_loop_info copies. */
2105 streamer_write_enum (ob->main_stream,
2106 loop_estimation, EST_LAST, loop->estimate_state);
2107 streamer_write_hwi (ob, loop->any_upper_bound);
2108 if (loop->any_upper_bound)
2109 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
2110 streamer_write_hwi (ob, loop->any_likely_upper_bound);
2111 if (loop->any_likely_upper_bound)
2112 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
2113 streamer_write_hwi (ob, loop->any_estimate);
2114 if (loop->any_estimate)
2115 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
2116
2117 /* Write OMP SIMD related info. */
2118 streamer_write_hwi (ob, loop->safelen);
2119 streamer_write_hwi (ob, loop->unroll);
2120 streamer_write_hwi (ob, loop->owned_clique);
2121 streamer_write_hwi (ob, loop->dont_vectorize);
2122 streamer_write_hwi (ob, loop->force_vectorize);
2123 streamer_write_hwi (ob, loop->finite_p);
2124 stream_write_tree (ob, loop->simduid, true);
2125 }
2126
2127 ob->main_stream = tmp_stream;
2128 }
2129
2130
2131 /* Create the header in the file using OB. If the section type is for
2132 a function, set FN to the decl for that function. */
2133
2134 void
produce_asm(struct output_block * ob,tree fn)2135 produce_asm (struct output_block *ob, tree fn)
2136 {
2137 enum lto_section_type section_type = ob->section_type;
2138 struct lto_function_header header;
2139 char *section_name;
2140
2141 if (section_type == LTO_section_function_body)
2142 {
2143 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2144 section_name = lto_get_section_name (section_type, name,
2145 symtab_node::get (fn)->order,
2146 NULL);
2147 }
2148 else
2149 section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2150
2151 lto_begin_section (section_name, !flag_wpa);
2152 free (section_name);
2153
2154 /* The entire header is stream computed here. */
2155 memset (&header, 0, sizeof (struct lto_function_header));
2156
2157 if (section_type == LTO_section_function_body)
2158 header.cfg_size = ob->cfg_stream->total_size;
2159 header.main_size = ob->main_stream->total_size;
2160 header.string_size = ob->string_stream->total_size;
2161 lto_write_data (&header, sizeof header);
2162
2163 /* Put all of the gimple and the string table out the asm file as a
2164 block of text. */
2165 if (section_type == LTO_section_function_body)
2166 lto_write_stream (ob->cfg_stream);
2167 lto_write_stream (ob->main_stream);
2168 lto_write_stream (ob->string_stream);
2169
2170 lto_end_section ();
2171 }
2172
2173
2174 /* Output the base body of struct function FN using output block OB. */
2175
2176 static void
output_struct_function_base(struct output_block * ob,struct function * fn)2177 output_struct_function_base (struct output_block *ob, struct function *fn)
2178 {
2179 struct bitpack_d bp;
2180 unsigned i;
2181 tree t;
2182
2183 /* Output the static chain and non-local goto save area. */
2184 stream_write_tree (ob, fn->static_chain_decl, true);
2185 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2186
2187 /* Output all the local variables in the function. */
2188 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2189 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2190 stream_write_tree (ob, t, true);
2191
2192 /* Output current IL state of the function. */
2193 streamer_write_uhwi (ob, fn->curr_properties);
2194
2195 /* Write all the attributes for FN. */
2196 bp = bitpack_create (ob->main_stream);
2197 bp_pack_value (&bp, fn->is_thunk, 1);
2198 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2199 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2200 bp_pack_value (&bp, fn->returns_struct, 1);
2201 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2202 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2203 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2204 bp_pack_value (&bp, fn->after_inlining, 1);
2205 bp_pack_value (&bp, fn->stdarg, 1);
2206 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2207 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2208 bp_pack_value (&bp, fn->calls_alloca, 1);
2209 bp_pack_value (&bp, fn->calls_setjmp, 1);
2210 bp_pack_value (&bp, fn->calls_eh_return, 1);
2211 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2212 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2213 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2214 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2215 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2216
2217 /* Output the function start and end loci. */
2218 stream_output_location (ob, &bp, fn->function_start_locus);
2219 stream_output_location (ob, &bp, fn->function_end_locus);
2220
2221 /* Save the instance discriminator if present. */
2222 int *instance_number_p = NULL;
2223 if (decl_to_instance_map)
2224 instance_number_p = decl_to_instance_map->get (fn->decl);
2225 bp_pack_value (&bp, !!instance_number_p, 1);
2226 if (instance_number_p)
2227 bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2228
2229 streamer_write_bitpack (&bp);
2230 }
2231
2232
2233 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2234
2235 static void
collect_block_tree_leafs(tree root,vec<tree> & leafs)2236 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2237 {
2238 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2239 if (! BLOCK_SUBBLOCKS (root))
2240 leafs.safe_push (root);
2241 else
2242 collect_block_tree_leafs (root, leafs);
2243 }
2244
2245 /* This performs function body modifications that are needed for streaming
2246 to work. */
2247
2248 void
lto_prepare_function_for_streaming(struct cgraph_node * node)2249 lto_prepare_function_for_streaming (struct cgraph_node *node)
2250 {
2251 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2252 basic_block bb;
2253
2254 if (number_of_loops (fn))
2255 {
2256 push_cfun (fn);
2257 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2258 loop_optimizer_finalize ();
2259 pop_cfun ();
2260 }
2261 /* We will renumber the statements. The code that does this uses
2262 the same ordering that we use for serializing them so we can use
2263 the same code on the other end and not have to write out the
2264 statement numbers. We do not assign UIDs to PHIs here because
2265 virtual PHIs get re-computed on-the-fly which would make numbers
2266 inconsistent. */
2267 set_gimple_stmt_max_uid (fn, 0);
2268 FOR_ALL_BB_FN (bb, fn)
2269 {
2270 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2271 gsi_next (&gsi))
2272 {
2273 gphi *stmt = gsi.phi ();
2274
2275 /* Virtual PHIs are not going to be streamed. */
2276 if (!virtual_operand_p (gimple_phi_result (stmt)))
2277 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2278 }
2279 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2280 gsi_next (&gsi))
2281 {
2282 gimple *stmt = gsi_stmt (gsi);
2283 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2284 }
2285 }
2286 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2287 virtual phis now. */
2288 FOR_ALL_BB_FN (bb, fn)
2289 {
2290 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2291 gsi_next (&gsi))
2292 {
2293 gphi *stmt = gsi.phi ();
2294 if (virtual_operand_p (gimple_phi_result (stmt)))
2295 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2296 }
2297 }
2298
2299 }
2300
2301 /* Output the body of function NODE->DECL. */
2302
2303 static void
output_function(struct cgraph_node * node)2304 output_function (struct cgraph_node *node)
2305 {
2306 tree function;
2307 struct function *fn;
2308 basic_block bb;
2309 struct output_block *ob;
2310
2311 if (streamer_dump_file)
2312 fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2313 node->dump_name ());
2314
2315 function = node->decl;
2316 fn = DECL_STRUCT_FUNCTION (function);
2317 ob = create_output_block (LTO_section_function_body);
2318
2319 ob->symbol = node;
2320
2321 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2322
2323 /* Make string 0 be a NULL string. */
2324 streamer_write_char_stream (ob->string_stream, 0);
2325
2326 streamer_write_record_start (ob, LTO_function);
2327
2328 /* Output decls for parameters and args. */
2329 stream_write_tree (ob, DECL_RESULT (function), true);
2330 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2331
2332 /* Output debug args if available. */
2333 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2334 if (! debugargs)
2335 streamer_write_uhwi (ob, 0);
2336 else
2337 {
2338 streamer_write_uhwi (ob, (*debugargs)->length ());
2339 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2340 stream_write_tree (ob, (**debugargs)[i], true);
2341 }
2342
2343 /* Output DECL_INITIAL for the function, which contains the tree of
2344 lexical scopes. */
2345 stream_write_tree (ob, DECL_INITIAL (function), true);
2346 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2347 collect block tree leafs and stream those. */
2348 auto_vec<tree> block_tree_leafs;
2349 if (DECL_INITIAL (function))
2350 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2351 streamer_write_uhwi (ob, block_tree_leafs.length ());
2352 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2353 stream_write_tree (ob, block_tree_leafs[i], true);
2354
2355 /* We also stream abstract functions where we stream only stuff needed for
2356 debug info. */
2357 if (gimple_has_body_p (function))
2358 {
2359 streamer_write_uhwi (ob, 1);
2360 output_struct_function_base (ob, fn);
2361
2362 output_cfg (ob, fn);
2363
2364 /* Output all the SSA names used in the function. */
2365 output_ssa_names (ob, fn);
2366
2367 /* Output any exception handling regions. */
2368 output_eh_regions (ob, fn);
2369
2370 /* Output the code for the function. */
2371 FOR_ALL_BB_FN (bb, fn)
2372 output_bb (ob, bb, fn);
2373
2374 /* The terminator for this function. */
2375 streamer_write_record_start (ob, LTO_null);
2376 }
2377 else
2378 streamer_write_uhwi (ob, 0);
2379
2380 /* Create a section to hold the pickled output of this function. */
2381 produce_asm (ob, function);
2382
2383 destroy_output_block (ob);
2384 if (streamer_dump_file)
2385 fprintf (streamer_dump_file, "Finished streaming %s\n",
2386 node->dump_name ());
2387 }
2388
2389 /* Output the body of function NODE->DECL. */
2390
2391 static void
output_constructor(struct varpool_node * node)2392 output_constructor (struct varpool_node *node)
2393 {
2394 tree var = node->decl;
2395 struct output_block *ob;
2396
2397 if (streamer_dump_file)
2398 fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2399 node->dump_name ());
2400
2401 timevar_push (TV_IPA_LTO_CTORS_OUT);
2402 ob = create_output_block (LTO_section_function_body);
2403
2404 ob->symbol = node;
2405
2406 /* Make string 0 be a NULL string. */
2407 streamer_write_char_stream (ob->string_stream, 0);
2408
2409 /* Output DECL_INITIAL for the function, which contains the tree of
2410 lexical scopes. */
2411 stream_write_tree (ob, DECL_INITIAL (var), true);
2412
2413 /* Create a section to hold the pickled output of this function. */
2414 produce_asm (ob, var);
2415
2416 destroy_output_block (ob);
2417 if (streamer_dump_file)
2418 fprintf (streamer_dump_file, "Finished streaming %s\n",
2419 node->dump_name ());
2420 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2421 }
2422
2423
2424 /* Emit toplevel asms. */
2425
2426 void
lto_output_toplevel_asms(void)2427 lto_output_toplevel_asms (void)
2428 {
2429 struct output_block *ob;
2430 struct asm_node *can;
2431 char *section_name;
2432 struct lto_simple_header_with_strings header;
2433
2434 if (!symtab->first_asm_symbol ())
2435 return;
2436
2437 ob = create_output_block (LTO_section_asm);
2438
2439 /* Make string 0 be a NULL string. */
2440 streamer_write_char_stream (ob->string_stream, 0);
2441
2442 for (can = symtab->first_asm_symbol (); can; can = can->next)
2443 {
2444 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2445 streamer_write_hwi (ob, can->order);
2446 }
2447
2448 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2449
2450 section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2451 lto_begin_section (section_name, !flag_wpa);
2452 free (section_name);
2453
2454 /* The entire header stream is computed here. */
2455 memset (&header, 0, sizeof (header));
2456
2457 header.main_size = ob->main_stream->total_size;
2458 header.string_size = ob->string_stream->total_size;
2459 lto_write_data (&header, sizeof header);
2460
2461 /* Put all of the gimple and the string table out the asm file as a
2462 block of text. */
2463 lto_write_stream (ob->main_stream);
2464 lto_write_stream (ob->string_stream);
2465
2466 lto_end_section ();
2467
2468 destroy_output_block (ob);
2469 }
2470
2471
2472 /* Copy the function body or variable constructor of NODE without deserializing. */
2473
2474 static void
copy_function_or_variable(struct symtab_node * node)2475 copy_function_or_variable (struct symtab_node *node)
2476 {
2477 tree function = node->decl;
2478 struct lto_file_decl_data *file_data = node->lto_file_data;
2479 const char *data;
2480 size_t len;
2481 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2482 char *section_name =
2483 lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2484 size_t i, j;
2485 struct lto_in_decl_state *in_state;
2486 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2487
2488 if (streamer_dump_file)
2489 fprintf (streamer_dump_file, "Copying section for %s\n", name);
2490 lto_begin_section (section_name, false);
2491 free (section_name);
2492
2493 /* We may have renamed the declaration, e.g., a static function. */
2494 name = lto_get_decl_name_mapping (file_data, name);
2495
2496 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2497 name, node->order - file_data->order_base,
2498 &len);
2499 gcc_assert (data);
2500
2501 /* Do a bit copy of the function body. */
2502 lto_write_raw_data (data, len);
2503
2504 /* Copy decls. */
2505 in_state =
2506 lto_get_function_in_decl_state (node->lto_file_data, function);
2507 out_state->compressed = in_state->compressed;
2508 gcc_assert (in_state);
2509
2510 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2511 {
2512 size_t n = vec_safe_length (in_state->streams[i]);
2513 vec<tree, va_gc> *trees = in_state->streams[i];
2514 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2515
2516 /* The out state must have the same indices and the in state.
2517 So just copy the vector. All the encoders in the in state
2518 must be empty where we reach here. */
2519 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2520 encoder->trees.reserve_exact (n);
2521 for (j = 0; j < n; j++)
2522 encoder->trees.safe_push ((*trees)[j]);
2523 }
2524
2525 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2526 data, len);
2527 lto_end_section ();
2528 }
2529
2530 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2531
2532 static tree
wrap_refs(tree * tp,int * ws,void *)2533 wrap_refs (tree *tp, int *ws, void *)
2534 {
2535 tree t = *tp;
2536 if (handled_component_p (t)
2537 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2538 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2539 {
2540 tree decl = TREE_OPERAND (t, 0);
2541 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2542 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2543 build1 (ADDR_EXPR, ptrtype, decl),
2544 build_int_cst (ptrtype, 0));
2545 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2546 *ws = 0;
2547 }
2548 else if (TREE_CODE (t) == CONSTRUCTOR)
2549 ;
2550 else if (!EXPR_P (t))
2551 *ws = 0;
2552 return NULL_TREE;
2553 }
2554
2555 /* Remove functions that are no longer used from offload_funcs, and mark the
2556 remaining ones with DECL_PRESERVE_P. */
2557
2558 static void
prune_offload_funcs(void)2559 prune_offload_funcs (void)
2560 {
2561 if (!offload_funcs)
2562 return;
2563
2564 unsigned ix, ix2;
2565 tree *elem_ptr;
2566 VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2567 cgraph_node::get (*elem_ptr) == NULL);
2568
2569 tree fn_decl;
2570 FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2571 DECL_PRESERVE_P (fn_decl) = 1;
2572 }
2573
2574 /* Produce LTO section that contains global information
2575 about LTO bytecode. */
2576
2577 static void
produce_lto_section()2578 produce_lto_section ()
2579 {
2580 /* Stream LTO meta section. */
2581 output_block *ob = create_output_block (LTO_section_lto);
2582
2583 char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2584 lto_begin_section (section_name, false);
2585 free (section_name);
2586
2587 #ifdef HAVE_ZSTD_H
2588 lto_compression compression = ZSTD;
2589 #else
2590 lto_compression compression = ZLIB;
2591 #endif
2592
2593 bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2594 lto_section s
2595 = { LTO_major_version, LTO_minor_version, slim_object, 0 };
2596 s.set_compression (compression);
2597 lto_write_data (&s, sizeof s);
2598 lto_end_section ();
2599 destroy_output_block (ob);
2600 }
2601
2602 /* Compare symbols to get them sorted by filename (to optimize streaming) */
2603
2604 static int
cmp_symbol_files(const void * pn1,const void * pn2,void * id_map_)2605 cmp_symbol_files (const void *pn1, const void *pn2, void *id_map_)
2606 {
2607 const symtab_node *n1 = *(const symtab_node * const *)pn1;
2608 const symtab_node *n2 = *(const symtab_node * const *)pn2;
2609 hash_map<lto_file_decl_data *, int> *id_map
2610 = (hash_map<lto_file_decl_data *, int> *)id_map_;
2611
2612 int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2613 int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2614
2615 /* Order files same way as they appeared in the command line to reduce
2616 seeking while copying sections. */
2617 if (file_order1 != file_order2)
2618 return file_order1 - file_order2;
2619
2620 /* Order within static library. */
2621 if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2622 return *id_map->get (n1->lto_file_data) - *id_map->get (n2->lto_file_data);
2623
2624 /* And finaly order by the definition order. */
2625 return n1->order - n2->order;
2626 }
2627
2628 /* Main entry point from the pass manager. */
2629
2630 void
lto_output(void)2631 lto_output (void)
2632 {
2633 struct lto_out_decl_state *decl_state;
2634 bitmap output = NULL;
2635 bitmap_obstack output_obstack;
2636 unsigned int i, n_nodes;
2637 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2638 auto_vec<symtab_node *> symbols_to_copy;
2639
2640 prune_offload_funcs ();
2641
2642 if (flag_checking)
2643 {
2644 bitmap_obstack_initialize (&output_obstack);
2645 output = BITMAP_ALLOC (&output_obstack);
2646 }
2647
2648 /* Initialize the streamer. */
2649 lto_streamer_init ();
2650
2651 produce_lto_section ();
2652
2653 n_nodes = lto_symtab_encoder_size (encoder);
2654 /* Prepare vector of functions to output and then sort it to optimize
2655 section copying. */
2656 for (i = 0; i < n_nodes; i++)
2657 {
2658 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2659 if (snode->alias)
2660 continue;
2661 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2662 {
2663 if (lto_symtab_encoder_encode_body_p (encoder, node))
2664 symbols_to_copy.safe_push (node);
2665 }
2666 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2667 {
2668 /* Wrap symbol references inside the ctor in a type
2669 preserving MEM_REF. */
2670 tree ctor = DECL_INITIAL (node->decl);
2671 if (ctor && !in_lto_p)
2672 walk_tree (&ctor, wrap_refs, NULL, NULL);
2673 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2674 && lto_symtab_encoder_encode_initializer_p (encoder, node))
2675 symbols_to_copy.safe_push (node);
2676 }
2677 }
2678 /* Map the section hash to an order it appears in symbols_to_copy
2679 since we want to sort same ID symbols next to each other but need
2680 to avoid making overall order depend on the actual hash value. */
2681 int order = 0;
2682 hash_map<lto_file_decl_data *, int> id_map;
2683 for (i = 0; i < symbols_to_copy.length (); ++i)
2684 {
2685 symtab_node *snode = symbols_to_copy[i];
2686 if (snode->lto_file_data)
2687 {
2688 bool existed_p = false;
2689 int &ord = id_map.get_or_insert (snode->lto_file_data, &existed_p);
2690 if (!existed_p)
2691 ord = order++;
2692 }
2693 }
2694 symbols_to_copy.sort (cmp_symbol_files, (void *)&id_map);
2695 for (i = 0; i < symbols_to_copy.length (); i++)
2696 {
2697 symtab_node *snode = symbols_to_copy[i];
2698 cgraph_node *cnode;
2699 varpool_node *vnode;
2700
2701 if (flag_checking)
2702 gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2703
2704 decl_state = lto_new_out_decl_state ();
2705 lto_push_out_decl_state (decl_state);
2706
2707 if ((cnode = dyn_cast <cgraph_node *> (snode))
2708 && (gimple_has_body_p (cnode->decl)
2709 || (!flag_wpa
2710 && flag_incremental_link != INCREMENTAL_LINK_LTO)
2711 /* Thunks have no body but they may be synthetized
2712 at WPA time. */
2713 || DECL_ARGUMENTS (cnode->decl)))
2714 output_function (cnode);
2715 else if ((vnode = dyn_cast <varpool_node *> (snode))
2716 && (DECL_INITIAL (vnode->decl) != error_mark_node
2717 || (!flag_wpa
2718 && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2719 output_constructor (vnode);
2720 else
2721 copy_function_or_variable (snode);
2722 gcc_assert (lto_get_out_decl_state () == decl_state);
2723 lto_pop_out_decl_state ();
2724 lto_record_function_out_decl_state (snode->decl, decl_state);
2725 }
2726
2727 /* Emit the callgraph after emitting function bodies. This needs to
2728 be done now to make sure that all the statements in every function
2729 have been renumbered so that edges can be associated with call
2730 statements using the statement UIDs. */
2731 output_symtab ();
2732
2733 output_offload_tables ();
2734
2735 if (flag_checking)
2736 {
2737 BITMAP_FREE (output);
2738 bitmap_obstack_release (&output_obstack);
2739 }
2740 }
2741
2742 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2743 from it and required for correct representation of its semantics.
2744 Each node in ENCODER must be a global declaration or a type. A node
2745 is written only once, even if it appears multiple times in the
2746 vector. Certain transitively-reachable nodes, such as those
2747 representing expressions, may be duplicated, but such nodes
2748 must not appear in ENCODER itself. */
2749
2750 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2751 write_global_stream (struct output_block *ob,
2752 struct lto_tree_ref_encoder *encoder)
2753 {
2754 tree t;
2755 size_t index;
2756 const size_t size = lto_tree_ref_encoder_size (encoder);
2757
2758 for (index = 0; index < size; index++)
2759 {
2760 t = lto_tree_ref_encoder_get_tree (encoder, index);
2761 if (streamer_dump_file)
2762 {
2763 fprintf (streamer_dump_file, " %i:", (int)index);
2764 print_node_brief (streamer_dump_file, "", t, 4);
2765 fprintf (streamer_dump_file, "\n");
2766 }
2767 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2768 stream_write_tree (ob, t, false);
2769 }
2770 }
2771
2772
2773 /* Write a sequence of indices into the globals vector corresponding
2774 to the trees in ENCODER. These are used by the reader to map the
2775 indices used to refer to global entities within function bodies to
2776 their referents. */
2777
2778 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2779 write_global_references (struct output_block *ob,
2780 struct lto_tree_ref_encoder *encoder)
2781 {
2782 tree t;
2783 uint32_t index;
2784 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2785
2786 /* Write size and slot indexes as 32-bit unsigned numbers. */
2787 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2788 data[0] = size;
2789
2790 for (index = 0; index < size; index++)
2791 {
2792 unsigned slot_num;
2793
2794 t = lto_tree_ref_encoder_get_tree (encoder, index);
2795 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2796 gcc_assert (slot_num != (unsigned)-1);
2797 data[index + 1] = slot_num;
2798 }
2799
2800 lto_write_data (data, sizeof (int32_t) * (size + 1));
2801 free (data);
2802 }
2803
2804
2805 /* Write all the streams in an lto_out_decl_state STATE using
2806 output block OB and output stream OUT_STREAM. */
2807
2808 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2809 lto_output_decl_state_streams (struct output_block *ob,
2810 struct lto_out_decl_state *state)
2811 {
2812 int i;
2813
2814 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2815 write_global_stream (ob, &state->streams[i]);
2816 }
2817
2818
2819 /* Write all the references in an lto_out_decl_state STATE using
2820 output block OB and output stream OUT_STREAM. */
2821
2822 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2823 lto_output_decl_state_refs (struct output_block *ob,
2824 struct lto_out_decl_state *state)
2825 {
2826 unsigned i;
2827 unsigned ref;
2828 tree decl;
2829
2830 /* Write reference to FUNCTION_DECL. If there is not function,
2831 write reference to void_type_node. */
2832 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2833 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2834 gcc_assert (ref != (unsigned)-1);
2835 ref = ref * 2 + (state->compressed ? 1 : 0);
2836 lto_write_data (&ref, sizeof (uint32_t));
2837
2838 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2839 write_global_references (ob, &state->streams[i]);
2840 }
2841
2842
2843 /* Return the written size of STATE. */
2844
2845 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2846 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2847 {
2848 int i;
2849 size_t size;
2850
2851 size = sizeof (int32_t); /* fn_ref. */
2852 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2853 {
2854 size += sizeof (int32_t); /* vector size. */
2855 size += (lto_tree_ref_encoder_size (&state->streams[i])
2856 * sizeof (int32_t));
2857 }
2858 return size;
2859 }
2860
2861
2862 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2863 so far. */
2864
2865 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2866 write_symbol (struct streamer_tree_cache_d *cache,
2867 tree t, hash_set<const char *> *seen, bool alias)
2868 {
2869 const char *name;
2870 enum gcc_plugin_symbol_kind kind;
2871 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2872 unsigned slot_num;
2873 uint64_t size;
2874 const char *comdat;
2875 unsigned char c;
2876
2877 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2878
2879 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2880
2881 /* This behaves like assemble_name_raw in varasm.c, performing the
2882 same name manipulations that ASM_OUTPUT_LABELREF does. */
2883 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2884
2885 if (seen->add (name))
2886 return;
2887
2888 streamer_tree_cache_lookup (cache, t, &slot_num);
2889 gcc_assert (slot_num != (unsigned)-1);
2890
2891 if (DECL_EXTERNAL (t))
2892 {
2893 if (DECL_WEAK (t))
2894 kind = GCCPK_WEAKUNDEF;
2895 else
2896 kind = GCCPK_UNDEF;
2897 }
2898 else
2899 {
2900 if (DECL_WEAK (t))
2901 kind = GCCPK_WEAKDEF;
2902 else if (DECL_COMMON (t))
2903 kind = GCCPK_COMMON;
2904 else
2905 kind = GCCPK_DEF;
2906
2907 /* When something is defined, it should have node attached. */
2908 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2909 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2910 || (cgraph_node::get (t)
2911 && cgraph_node::get (t)->definition));
2912 }
2913
2914 /* Imitate what default_elf_asm_output_external do.
2915 When symbol is external, we need to output it with DEFAULT visibility
2916 when compiling with -fvisibility=default, while with HIDDEN visibility
2917 when symbol has attribute (visibility("hidden")) specified.
2918 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2919 right. */
2920
2921 if (DECL_EXTERNAL (t)
2922 && !targetm.binds_local_p (t))
2923 visibility = GCCPV_DEFAULT;
2924 else
2925 switch (DECL_VISIBILITY (t))
2926 {
2927 case VISIBILITY_DEFAULT:
2928 visibility = GCCPV_DEFAULT;
2929 break;
2930 case VISIBILITY_PROTECTED:
2931 visibility = GCCPV_PROTECTED;
2932 break;
2933 case VISIBILITY_HIDDEN:
2934 visibility = GCCPV_HIDDEN;
2935 break;
2936 case VISIBILITY_INTERNAL:
2937 visibility = GCCPV_INTERNAL;
2938 break;
2939 }
2940
2941 if (kind == GCCPK_COMMON
2942 && DECL_SIZE_UNIT (t)
2943 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2944 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2945 else
2946 size = 0;
2947
2948 if (DECL_ONE_ONLY (t))
2949 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2950 else
2951 comdat = "";
2952
2953 lto_write_data (name, strlen (name) + 1);
2954 lto_write_data (comdat, strlen (comdat) + 1);
2955 c = (unsigned char) kind;
2956 lto_write_data (&c, 1);
2957 c = (unsigned char) visibility;
2958 lto_write_data (&c, 1);
2959 lto_write_data (&size, 8);
2960 lto_write_data (&slot_num, 4);
2961 }
2962
2963 /* Write extension information for symbols (symbol type, section flags). */
2964
2965 static void
write_symbol_extension_info(tree t)2966 write_symbol_extension_info (tree t)
2967 {
2968 unsigned char c;
2969 c = ((unsigned char) TREE_CODE (t) == VAR_DECL
2970 ? GCCST_VARIABLE : GCCST_FUNCTION);
2971 lto_write_data (&c, 1);
2972 unsigned char section_kind = 0;
2973 if (TREE_CODE (t) == VAR_DECL)
2974 {
2975 section *s = get_variable_section (t, false);
2976 if (s->common.flags & SECTION_BSS)
2977 section_kind |= GCCSSK_BSS;
2978 }
2979 lto_write_data (§ion_kind, 1);
2980 }
2981
2982 /* Write an IL symbol table to OB.
2983 SET and VSET are cgraph/varpool node sets we are outputting. */
2984
2985 static unsigned int
produce_symtab(struct output_block * ob)2986 produce_symtab (struct output_block *ob)
2987 {
2988 unsigned int streamed_symbols = 0;
2989 struct streamer_tree_cache_d *cache = ob->writer_cache;
2990 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
2991 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2992 lto_symtab_encoder_iterator lsei;
2993
2994 lto_begin_section (section_name, false);
2995 free (section_name);
2996
2997 hash_set<const char *> seen;
2998
2999 /* Write the symbol table.
3000 First write everything defined and then all declarations.
3001 This is necessary to handle cases where we have duplicated symbols. */
3002 for (lsei = lsei_start (encoder);
3003 !lsei_end_p (lsei); lsei_next (&lsei))
3004 {
3005 symtab_node *node = lsei_node (lsei);
3006
3007 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3008 continue;
3009 write_symbol (cache, node->decl, &seen, false);
3010 ++streamed_symbols;
3011 }
3012 for (lsei = lsei_start (encoder);
3013 !lsei_end_p (lsei); lsei_next (&lsei))
3014 {
3015 symtab_node *node = lsei_node (lsei);
3016
3017 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3018 continue;
3019 write_symbol (cache, node->decl, &seen, false);
3020 ++streamed_symbols;
3021 }
3022
3023 lto_end_section ();
3024
3025 return streamed_symbols;
3026 }
3027
3028 /* Symtab extension version. */
3029 #define LTO_SYMTAB_EXTENSION_VERSION 1
3030
3031 /* Write an IL symbol table extension to OB.
3032 SET and VSET are cgraph/varpool node sets we are outputting. */
3033
3034 static void
produce_symtab_extension(struct output_block * ob,unsigned int previous_streamed_symbols)3035 produce_symtab_extension (struct output_block *ob,
3036 unsigned int previous_streamed_symbols)
3037 {
3038 unsigned int streamed_symbols = 0;
3039 char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3040 NULL, 0, NULL);
3041 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3042 lto_symtab_encoder_iterator lsei;
3043
3044 lto_begin_section (section_name, false);
3045 free (section_name);
3046
3047 unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3048 lto_write_data (&version, 1);
3049
3050 /* Write the symbol table.
3051 First write everything defined and then all declarations.
3052 This is necessary to handle cases where we have duplicated symbols. */
3053 for (lsei = lsei_start (encoder);
3054 !lsei_end_p (lsei); lsei_next (&lsei))
3055 {
3056 symtab_node *node = lsei_node (lsei);
3057
3058 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3059 continue;
3060 write_symbol_extension_info (node->decl);
3061 ++streamed_symbols;
3062 }
3063 for (lsei = lsei_start (encoder);
3064 !lsei_end_p (lsei); lsei_next (&lsei))
3065 {
3066 symtab_node *node = lsei_node (lsei);
3067
3068 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3069 continue;
3070 write_symbol_extension_info (node->decl);
3071 ++streamed_symbols;
3072 }
3073
3074 gcc_assert (previous_streamed_symbols == streamed_symbols);
3075 lto_end_section ();
3076 }
3077
3078
3079 /* Init the streamer_mode_table for output, where we collect info on what
3080 machine_mode values have been streamed. */
3081 void
lto_output_init_mode_table(void)3082 lto_output_init_mode_table (void)
3083 {
3084 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
3085 }
3086
3087
3088 /* Write the mode table. */
3089 static void
lto_write_mode_table(void)3090 lto_write_mode_table (void)
3091 {
3092 struct output_block *ob;
3093 ob = create_output_block (LTO_section_mode_table);
3094 bitpack_d bp = bitpack_create (ob->main_stream);
3095
3096 /* Ensure that for GET_MODE_INNER (m) != m we have
3097 also the inner mode marked. */
3098 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3099 if (streamer_mode_table[i])
3100 {
3101 machine_mode m = (machine_mode) i;
3102 machine_mode inner_m = GET_MODE_INNER (m);
3103 if (inner_m != m)
3104 streamer_mode_table[(int) inner_m] = 1;
3105 }
3106 /* First stream modes that have GET_MODE_INNER (m) == m,
3107 so that we can refer to them afterwards. */
3108 for (int pass = 0; pass < 2; pass++)
3109 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3110 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3111 {
3112 machine_mode m = (machine_mode) i;
3113 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3114 continue;
3115 bp_pack_value (&bp, m, 8);
3116 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3117 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
3118 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
3119 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
3120 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
3121 switch (GET_MODE_CLASS (m))
3122 {
3123 case MODE_FRACT:
3124 case MODE_UFRACT:
3125 case MODE_ACCUM:
3126 case MODE_UACCUM:
3127 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
3128 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
3129 break;
3130 case MODE_FLOAT:
3131 case MODE_DECIMAL_FLOAT:
3132 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3133 break;
3134 default:
3135 break;
3136 }
3137 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3138 }
3139 bp_pack_value (&bp, VOIDmode, 8);
3140
3141 streamer_write_bitpack (&bp);
3142
3143 char *section_name
3144 = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3145 lto_begin_section (section_name, !flag_wpa);
3146 free (section_name);
3147
3148 /* The entire header stream is computed here. */
3149 struct lto_simple_header_with_strings header;
3150 memset (&header, 0, sizeof (header));
3151
3152 header.main_size = ob->main_stream->total_size;
3153 header.string_size = ob->string_stream->total_size;
3154 lto_write_data (&header, sizeof header);
3155
3156 /* Put all of the gimple and the string table out the asm file as a
3157 block of text. */
3158 lto_write_stream (ob->main_stream);
3159 lto_write_stream (ob->string_stream);
3160
3161 lto_end_section ();
3162 destroy_output_block (ob);
3163 }
3164
3165
3166 /* This pass is run after all of the functions are serialized and all
3167 of the IPA passes have written their serialized forms. This pass
3168 causes the vector of all of the global decls and types used from
3169 this file to be written in to a section that can then be read in to
3170 recover these on other side. */
3171
3172 void
produce_asm_for_decls(void)3173 produce_asm_for_decls (void)
3174 {
3175 struct lto_out_decl_state *out_state;
3176 struct lto_out_decl_state *fn_out_state;
3177 struct lto_decl_header header;
3178 char *section_name;
3179 struct output_block *ob;
3180 unsigned idx, num_fns;
3181 size_t decl_state_size;
3182 int32_t num_decl_states;
3183
3184 ob = create_output_block (LTO_section_decls);
3185
3186 memset (&header, 0, sizeof (struct lto_decl_header));
3187
3188 section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3189 lto_begin_section (section_name, !flag_wpa);
3190 free (section_name);
3191
3192 /* Make string 0 be a NULL string. */
3193 streamer_write_char_stream (ob->string_stream, 0);
3194
3195 gcc_assert (!alias_pairs);
3196
3197 /* Get rid of the global decl state hash tables to save some memory. */
3198 out_state = lto_get_out_decl_state ();
3199 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3200 if (out_state->streams[i].tree_hash_table)
3201 {
3202 delete out_state->streams[i].tree_hash_table;
3203 out_state->streams[i].tree_hash_table = NULL;
3204 }
3205
3206 /* Write the global symbols. */
3207 if (streamer_dump_file)
3208 fprintf (streamer_dump_file, "Outputting global stream\n");
3209 lto_output_decl_state_streams (ob, out_state);
3210 num_fns = lto_function_decl_states.length ();
3211 for (idx = 0; idx < num_fns; idx++)
3212 {
3213 fn_out_state =
3214 lto_function_decl_states[idx];
3215 if (streamer_dump_file)
3216 fprintf (streamer_dump_file, "Outputting stream for %s\n",
3217 IDENTIFIER_POINTER
3218 (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3219 lto_output_decl_state_streams (ob, fn_out_state);
3220 }
3221
3222 /* Currently not used. This field would allow us to preallocate
3223 the globals vector, so that it need not be resized as it is extended. */
3224 header.num_nodes = -1;
3225
3226 /* Compute the total size of all decl out states. */
3227 decl_state_size = sizeof (int32_t);
3228 decl_state_size += lto_out_decl_state_written_size (out_state);
3229 for (idx = 0; idx < num_fns; idx++)
3230 {
3231 fn_out_state =
3232 lto_function_decl_states[idx];
3233 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3234 }
3235 header.decl_state_size = decl_state_size;
3236
3237 header.main_size = ob->main_stream->total_size;
3238 header.string_size = ob->string_stream->total_size;
3239
3240 lto_write_data (&header, sizeof header);
3241
3242 /* Write the main out-decl state, followed by out-decl states of
3243 functions. */
3244 num_decl_states = num_fns + 1;
3245 lto_write_data (&num_decl_states, sizeof (num_decl_states));
3246 lto_output_decl_state_refs (ob, out_state);
3247 for (idx = 0; idx < num_fns; idx++)
3248 {
3249 fn_out_state = lto_function_decl_states[idx];
3250 lto_output_decl_state_refs (ob, fn_out_state);
3251 }
3252
3253 lto_write_stream (ob->main_stream);
3254 lto_write_stream (ob->string_stream);
3255
3256 lto_end_section ();
3257
3258 /* Write the symbol table. It is used by linker to determine dependencies
3259 and thus we can skip it for WPA. */
3260 if (!flag_wpa)
3261 {
3262 unsigned int streamed_symbols = produce_symtab (ob);
3263 produce_symtab_extension (ob, streamed_symbols);
3264 }
3265
3266 /* Write command line opts. */
3267 lto_write_options ();
3268
3269 /* Deallocate memory and clean up. */
3270 for (idx = 0; idx < num_fns; idx++)
3271 {
3272 fn_out_state =
3273 lto_function_decl_states[idx];
3274 lto_delete_out_decl_state (fn_out_state);
3275 }
3276 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3277 lto_function_decl_states.release ();
3278 destroy_output_block (ob);
3279 if (lto_stream_offload_p)
3280 lto_write_mode_table ();
3281 }
3282