1 /* Write the GIMPLE representation to a file stream.
2
3 Copyright (C) 2009-2018 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45
46
47 static void lto_write_tree (struct output_block*, tree, bool);
48
49 /* Clear the line info stored in DATA_IN. */
50
51 static void
clear_line_info(struct output_block * ob)52 clear_line_info (struct output_block *ob)
53 {
54 ob->current_file = NULL;
55 ob->current_line = 0;
56 ob->current_col = 0;
57 ob->current_sysp = false;
58 }
59
60
61 /* Create the output block and return it. SECTION_TYPE is
62 LTO_section_function_body or LTO_static_initializer. */
63
64 struct output_block *
create_output_block(enum lto_section_type section_type)65 create_output_block (enum lto_section_type section_type)
66 {
67 struct output_block *ob = XCNEW (struct output_block);
68
69 ob->section_type = section_type;
70 ob->decl_state = lto_get_out_decl_state ();
71 ob->main_stream = XCNEW (struct lto_output_stream);
72 ob->string_stream = XCNEW (struct lto_output_stream);
73 ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
74
75 if (section_type == LTO_section_function_body)
76 ob->cfg_stream = XCNEW (struct lto_output_stream);
77
78 clear_line_info (ob);
79
80 ob->string_hash_table = new hash_table<string_slot_hasher> (37);
81 gcc_obstack_init (&ob->obstack);
82
83 return ob;
84 }
85
86
87 /* Destroy the output block OB. */
88
89 void
destroy_output_block(struct output_block * ob)90 destroy_output_block (struct output_block *ob)
91 {
92 enum lto_section_type section_type = ob->section_type;
93
94 delete ob->string_hash_table;
95 ob->string_hash_table = NULL;
96
97 free (ob->main_stream);
98 free (ob->string_stream);
99 if (section_type == LTO_section_function_body)
100 free (ob->cfg_stream);
101
102 streamer_tree_cache_delete (ob->writer_cache);
103 obstack_free (&ob->obstack, NULL);
104
105 free (ob);
106 }
107
108
109 /* Look up NODE in the type table and write the index for it to OB. */
110
111 static void
output_type_ref(struct output_block * ob,tree node)112 output_type_ref (struct output_block *ob, tree node)
113 {
114 streamer_write_record_start (ob, LTO_type_ref);
115 lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
116 }
117
118
119 /* Return true if tree node T is written to various tables. For these
120 nodes, we sometimes want to write their phyiscal representation
121 (via lto_output_tree), and sometimes we need to emit an index
122 reference into a table (via lto_output_tree_ref). */
123
124 static bool
tree_is_indexable(tree t)125 tree_is_indexable (tree t)
126 {
127 /* Parameters and return values of functions of variably modified types
128 must go to global stream, because they may be used in the type
129 definition. */
130 if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
131 && DECL_CONTEXT (t))
132 return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
133 /* IMPORTED_DECL is put into BLOCK and thus it never can be shared. */
134 else if (TREE_CODE (t) == IMPORTED_DECL)
135 return false;
136 else if (((VAR_P (t) && !TREE_STATIC (t))
137 || TREE_CODE (t) == TYPE_DECL
138 || TREE_CODE (t) == CONST_DECL
139 || TREE_CODE (t) == NAMELIST_DECL)
140 && decl_function_context (t))
141 return false;
142 else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
143 return false;
144 /* Variably modified types need to be streamed alongside function
145 bodies because they can refer to local entities. Together with
146 them we have to localize their members as well.
147 ??? In theory that includes non-FIELD_DECLs as well. */
148 else if (TYPE_P (t)
149 && variably_modified_type_p (t, NULL_TREE))
150 return false;
151 else if (TREE_CODE (t) == FIELD_DECL
152 && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
153 return false;
154 else
155 return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
156 }
157
158
159 /* Output info about new location into bitpack BP.
160 After outputting bitpack, lto_output_location_data has
161 to be done to output actual data. */
162
163 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)164 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
165 location_t loc)
166 {
167 expanded_location xloc;
168
169 loc = LOCATION_LOCUS (loc);
170 bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
171 loc < RESERVED_LOCATION_COUNT
172 ? loc : RESERVED_LOCATION_COUNT);
173 if (loc < RESERVED_LOCATION_COUNT)
174 return;
175
176 xloc = expand_location (loc);
177
178 bp_pack_value (bp, ob->current_file != xloc.file, 1);
179 bp_pack_value (bp, ob->current_line != xloc.line, 1);
180 bp_pack_value (bp, ob->current_col != xloc.column, 1);
181
182 if (ob->current_file != xloc.file)
183 {
184 bp_pack_string (ob, bp, xloc.file, true);
185 bp_pack_value (bp, xloc.sysp, 1);
186 }
187 ob->current_file = xloc.file;
188 ob->current_sysp = xloc.sysp;
189
190 if (ob->current_line != xloc.line)
191 bp_pack_var_len_unsigned (bp, xloc.line);
192 ob->current_line = xloc.line;
193
194 if (ob->current_col != xloc.column)
195 bp_pack_var_len_unsigned (bp, xloc.column);
196 ob->current_col = xloc.column;
197 }
198
199
200 /* If EXPR is an indexable tree node, output a reference to it to
201 output block OB. Otherwise, output the physical representation of
202 EXPR to OB. */
203
204 static void
lto_output_tree_ref(struct output_block * ob,tree expr)205 lto_output_tree_ref (struct output_block *ob, tree expr)
206 {
207 enum tree_code code;
208
209 if (TYPE_P (expr))
210 {
211 output_type_ref (ob, expr);
212 return;
213 }
214
215 code = TREE_CODE (expr);
216 switch (code)
217 {
218 case SSA_NAME:
219 streamer_write_record_start (ob, LTO_ssa_name_ref);
220 streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
221 break;
222
223 case FIELD_DECL:
224 streamer_write_record_start (ob, LTO_field_decl_ref);
225 lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
226 break;
227
228 case FUNCTION_DECL:
229 streamer_write_record_start (ob, LTO_function_decl_ref);
230 lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
231 break;
232
233 case VAR_DECL:
234 case DEBUG_EXPR_DECL:
235 gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
236 /* FALLTHRU */
237 case PARM_DECL:
238 streamer_write_record_start (ob, LTO_global_decl_ref);
239 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
240 break;
241
242 case CONST_DECL:
243 streamer_write_record_start (ob, LTO_const_decl_ref);
244 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
245 break;
246
247 case IMPORTED_DECL:
248 gcc_assert (decl_function_context (expr) == NULL);
249 streamer_write_record_start (ob, LTO_imported_decl_ref);
250 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
251 break;
252
253 case TYPE_DECL:
254 streamer_write_record_start (ob, LTO_type_decl_ref);
255 lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
256 break;
257
258 case NAMELIST_DECL:
259 streamer_write_record_start (ob, LTO_namelist_decl_ref);
260 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
261 break;
262
263 case NAMESPACE_DECL:
264 streamer_write_record_start (ob, LTO_namespace_decl_ref);
265 lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
266 break;
267
268 case LABEL_DECL:
269 streamer_write_record_start (ob, LTO_label_decl_ref);
270 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
271 break;
272
273 case RESULT_DECL:
274 streamer_write_record_start (ob, LTO_result_decl_ref);
275 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
276 break;
277
278 case TRANSLATION_UNIT_DECL:
279 streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
280 lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
281 break;
282
283 default:
284 /* No other node is indexable, so it should have been handled by
285 lto_output_tree. */
286 gcc_unreachable ();
287 }
288 }
289
290
291 /* Return true if EXPR is a tree node that can be written to disk. */
292
293 static inline bool
lto_is_streamable(tree expr)294 lto_is_streamable (tree expr)
295 {
296 enum tree_code code = TREE_CODE (expr);
297
298 /* Notice that we reject SSA_NAMEs as well. We only emit the SSA
299 name version in lto_output_tree_ref (see output_ssa_names). */
300 return !is_lang_specific (expr)
301 && code != SSA_NAME
302 && code != LANG_TYPE
303 && code != MODIFY_EXPR
304 && code != INIT_EXPR
305 && code != TARGET_EXPR
306 && code != BIND_EXPR
307 && code != WITH_CLEANUP_EXPR
308 && code != STATEMENT_LIST
309 && (code == CASE_LABEL_EXPR
310 || code == DECL_EXPR
311 || TREE_CODE_CLASS (code) != tcc_statement);
312 }
313
314 /* Very rough estimate of streaming size of the initializer. If we ignored
315 presence of strings, we could simply just count number of non-indexable
316 tree nodes and number of references to indexable nodes. Strings however
317 may be very large and we do not want to dump them int othe global stream.
318
319 Count the size of initializer until the size in DATA is positive. */
320
321 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)322 subtract_estimated_size (tree *tp, int *ws, void *data)
323 {
324 long *sum = (long *)data;
325 if (tree_is_indexable (*tp))
326 {
327 /* Indexable tree is one reference to global stream.
328 Guess it may be about 4 bytes. */
329 *sum -= 4;
330 *ws = 0;
331 }
332 /* String table entry + base of tree node needs to be streamed. */
333 if (TREE_CODE (*tp) == STRING_CST)
334 *sum -= TREE_STRING_LENGTH (*tp) + 8;
335 else
336 {
337 /* Identifiers are also variable length but should not appear
338 naked in constructor. */
339 gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
340 /* We do not really make attempt to work out size of pickled tree, as
341 it is very variable. Make it bigger than the reference. */
342 *sum -= 16;
343 }
344 if (*sum < 0)
345 return *tp;
346 return NULL_TREE;
347 }
348
349
350 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL. */
351
352 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)353 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
354 {
355 gcc_checking_assert (DECL_P (expr)
356 && TREE_CODE (expr) != FUNCTION_DECL
357 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
358
359 /* Handle DECL_INITIAL for symbols. */
360 tree initial = DECL_INITIAL (expr);
361 if (VAR_P (expr)
362 && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
363 && !DECL_IN_CONSTANT_POOL (expr)
364 && initial)
365 {
366 varpool_node *vnode;
367 /* Extra section needs about 30 bytes; do not produce it for simple
368 scalar values. */
369 if (!(vnode = varpool_node::get (expr))
370 || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
371 initial = error_mark_node;
372 if (initial != error_mark_node)
373 {
374 long max_size = 30;
375 if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
376 NULL))
377 initial = error_mark_node;
378 }
379 }
380
381 return initial;
382 }
383
384
385 /* Write a physical representation of tree node EXPR to output block
386 OB. If REF_P is true, the leaves of EXPR are emitted as references
387 via lto_output_tree_ref. IX is the index into the streamer cache
388 where EXPR is stored. */
389
390 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)391 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
392 {
393 /* Pack all the non-pointer fields in EXPR into a bitpack and write
394 the resulting bitpack. */
395 streamer_write_tree_bitfields (ob, expr);
396
397 /* Write all the pointer fields in EXPR. */
398 streamer_write_tree_body (ob, expr, ref_p);
399
400 /* Write any LTO-specific data to OB. */
401 if (DECL_P (expr)
402 && TREE_CODE (expr) != FUNCTION_DECL
403 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
404 {
405 /* Handle DECL_INITIAL for symbols. */
406 tree initial = get_symbol_initial_value
407 (ob->decl_state->symtab_node_encoder, expr);
408 stream_write_tree (ob, initial, ref_p);
409 }
410
411 /* Stream references to early generated DIEs. Keep in sync with the
412 trees handled in dwarf2out_die_ref_for_decl. */
413 if ((DECL_P (expr)
414 && TREE_CODE (expr) != FIELD_DECL
415 && TREE_CODE (expr) != DEBUG_EXPR_DECL
416 && TREE_CODE (expr) != TYPE_DECL)
417 || TREE_CODE (expr) == BLOCK)
418 {
419 const char *sym;
420 unsigned HOST_WIDE_INT off;
421 if (debug_info_level > DINFO_LEVEL_NONE
422 && debug_hooks->die_ref_for_decl (expr, &sym, &off))
423 {
424 streamer_write_string (ob, ob->main_stream, sym, true);
425 streamer_write_uhwi (ob, off);
426 }
427 else
428 streamer_write_string (ob, ob->main_stream, NULL, true);
429 }
430 }
431
432 /* Write a physical representation of tree node EXPR to output block
433 OB. If REF_P is true, the leaves of EXPR are emitted as references
434 via lto_output_tree_ref. IX is the index into the streamer cache
435 where EXPR is stored. */
436
437 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)438 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
439 {
440 if (!lto_is_streamable (expr))
441 internal_error ("tree code %qs is not supported in LTO streams",
442 get_tree_code_name (TREE_CODE (expr)));
443
444 /* Write the header, containing everything needed to materialize
445 EXPR on the reading side. */
446 streamer_write_tree_header (ob, expr);
447
448 lto_write_tree_1 (ob, expr, ref_p);
449
450 /* Mark the end of EXPR. */
451 streamer_write_zero (ob);
452 }
453
454 /* Emit the physical representation of tree node EXPR to output block OB,
455 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
456 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
457
458 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)459 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
460 bool ref_p, bool this_ref_p)
461 {
462 unsigned ix;
463
464 gcc_checking_assert (expr != NULL_TREE
465 && !(this_ref_p && tree_is_indexable (expr)));
466
467 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
468 expr, hash, &ix);
469 gcc_assert (!exists_p);
470 if (TREE_CODE (expr) == INTEGER_CST
471 && !TREE_OVERFLOW (expr))
472 {
473 /* Shared INTEGER_CST nodes are special because they need their
474 original type to be materialized by the reader (to implement
475 TYPE_CACHED_VALUES). */
476 streamer_write_integer_cst (ob, expr, ref_p);
477 }
478 else
479 {
480 /* This is the first time we see EXPR, write its fields
481 to OB. */
482 lto_write_tree (ob, expr, ref_p);
483 }
484 }
485
486 class DFS
487 {
488 public:
489 DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
490 bool single_p);
491 ~DFS ();
492
493 struct scc_entry
494 {
495 tree t;
496 hashval_t hash;
497 };
498 vec<scc_entry> sccstack;
499
500 private:
501 struct sccs
502 {
503 unsigned int dfsnum;
504 unsigned int low;
505 };
506 struct worklist
507 {
508 tree expr;
509 sccs *from_state;
510 sccs *cstate;
511 bool ref_p;
512 bool this_ref_p;
513 };
514
515 static int scc_entry_compare (const void *, const void *);
516
517 void DFS_write_tree_body (struct output_block *ob,
518 tree expr, sccs *expr_state, bool ref_p);
519
520 void DFS_write_tree (struct output_block *ob, sccs *from_state,
521 tree expr, bool ref_p, bool this_ref_p);
522
523 hashval_t
524 hash_scc (struct output_block *ob, unsigned first, unsigned size,
525 bool ref_p, bool this_ref_p);
526
527 hash_map<tree, sccs *> sccstate;
528 vec<worklist> worklist_vec;
529 struct obstack sccstate_obstack;
530 };
531
532 /* Emit the physical representation of tree node EXPR to output block OB,
533 using depth-first search on the subgraph. If THIS_REF_P is true, the
534 leaves of EXPR are emitted as references via lto_output_tree_ref.
535 REF_P is used for streaming siblings of EXPR. If SINGLE_P is true,
536 this is for a rewalk of a single leaf SCC. */
537
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)538 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
539 bool single_p)
540 {
541 unsigned int next_dfs_num = 1;
542 sccstack.create (0);
543 gcc_obstack_init (&sccstate_obstack);
544 worklist_vec = vNULL;
545 DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
546 while (!worklist_vec.is_empty ())
547 {
548 worklist &w = worklist_vec.last ();
549 expr = w.expr;
550 sccs *from_state = w.from_state;
551 sccs *cstate = w.cstate;
552 ref_p = w.ref_p;
553 this_ref_p = w.this_ref_p;
554 if (cstate == NULL)
555 {
556 sccs **slot = &sccstate.get_or_insert (expr);
557 cstate = *slot;
558 if (cstate)
559 {
560 gcc_checking_assert (from_state);
561 if (cstate->dfsnum < from_state->dfsnum)
562 from_state->low = MIN (cstate->dfsnum, from_state->low);
563 worklist_vec.pop ();
564 continue;
565 }
566
567 scc_entry e = { expr, 0 };
568 /* Not yet visited. DFS recurse and push it onto the stack. */
569 *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
570 sccstack.safe_push (e);
571 cstate->dfsnum = next_dfs_num++;
572 cstate->low = cstate->dfsnum;
573 w.cstate = cstate;
574
575 if (TREE_CODE (expr) == INTEGER_CST
576 && !TREE_OVERFLOW (expr))
577 DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
578 else
579 {
580 DFS_write_tree_body (ob, expr, cstate, ref_p);
581
582 /* Walk any LTO-specific edges. */
583 if (DECL_P (expr)
584 && TREE_CODE (expr) != FUNCTION_DECL
585 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
586 {
587 /* Handle DECL_INITIAL for symbols. */
588 tree initial
589 = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
590 expr);
591 DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
592 }
593 }
594 continue;
595 }
596
597 /* See if we found an SCC. */
598 if (cstate->low == cstate->dfsnum)
599 {
600 unsigned first, size;
601 tree x;
602
603 /* If we are re-walking a single leaf SCC just pop it,
604 let earlier worklist item access the sccstack. */
605 if (single_p)
606 {
607 worklist_vec.pop ();
608 continue;
609 }
610
611 /* Pop the SCC and compute its size. */
612 first = sccstack.length ();
613 do
614 {
615 x = sccstack[--first].t;
616 }
617 while (x != expr);
618 size = sccstack.length () - first;
619
620 /* No need to compute hashes for LTRANS units, we don't perform
621 any merging there. */
622 hashval_t scc_hash = 0;
623 unsigned scc_entry_len = 0;
624 if (!flag_wpa)
625 {
626 scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
627
628 /* Put the entries with the least number of collisions first. */
629 unsigned entry_start = 0;
630 scc_entry_len = size + 1;
631 for (unsigned i = 0; i < size;)
632 {
633 unsigned from = i;
634 for (i = i + 1; i < size
635 && (sccstack[first + i].hash
636 == sccstack[first + from].hash); ++i)
637 ;
638 if (i - from < scc_entry_len)
639 {
640 scc_entry_len = i - from;
641 entry_start = from;
642 }
643 }
644 for (unsigned i = 0; i < scc_entry_len; ++i)
645 std::swap (sccstack[first + i],
646 sccstack[first + entry_start + i]);
647
648 /* We already sorted SCC deterministically in hash_scc. */
649
650 /* Check that we have only one SCC.
651 Naturally we may have conflicts if hash function is not
652 strong enough. Lets see how far this gets. */
653 gcc_checking_assert (scc_entry_len == 1);
654 }
655
656 /* Write LTO_tree_scc. */
657 streamer_write_record_start (ob, LTO_tree_scc);
658 streamer_write_uhwi (ob, size);
659 streamer_write_uhwi (ob, scc_hash);
660
661 /* Write size-1 SCCs without wrapping them inside SCC bundles.
662 All INTEGER_CSTs need to be handled this way as we need
663 their type to materialize them. Also builtins are handled
664 this way.
665 ??? We still wrap these in LTO_tree_scc so at the
666 input side we can properly identify the tree we want
667 to ultimatively return. */
668 if (size == 1)
669 lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
670 else
671 {
672 /* Write the size of the SCC entry candidates. */
673 streamer_write_uhwi (ob, scc_entry_len);
674
675 /* Write all headers and populate the streamer cache. */
676 for (unsigned i = 0; i < size; ++i)
677 {
678 hashval_t hash = sccstack[first+i].hash;
679 tree t = sccstack[first+i].t;
680 bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
681 t, hash, NULL);
682 gcc_assert (!exists_p);
683
684 if (!lto_is_streamable (t))
685 internal_error ("tree code %qs is not supported "
686 "in LTO streams",
687 get_tree_code_name (TREE_CODE (t)));
688
689 /* Write the header, containing everything needed to
690 materialize EXPR on the reading side. */
691 streamer_write_tree_header (ob, t);
692 }
693
694 /* Write the bitpacks and tree references. */
695 for (unsigned i = 0; i < size; ++i)
696 {
697 lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
698
699 /* Mark the end of the tree. */
700 streamer_write_zero (ob);
701 }
702 }
703
704 /* Finally truncate the vector. */
705 sccstack.truncate (first);
706
707 if (from_state)
708 from_state->low = MIN (from_state->low, cstate->low);
709 worklist_vec.pop ();
710 continue;
711 }
712
713 gcc_checking_assert (from_state);
714 from_state->low = MIN (from_state->low, cstate->low);
715 if (cstate->dfsnum < from_state->dfsnum)
716 from_state->low = MIN (cstate->dfsnum, from_state->low);
717 worklist_vec.pop ();
718 }
719 worklist_vec.release ();
720 }
721
~DFS()722 DFS::~DFS ()
723 {
724 sccstack.release ();
725 obstack_free (&sccstate_obstack, NULL);
726 }
727
728 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
729 DFS recurse for all tree edges originating from it. */
730
731 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)732 DFS::DFS_write_tree_body (struct output_block *ob,
733 tree expr, sccs *expr_state, bool ref_p)
734 {
735 #define DFS_follow_tree_edge(DEST) \
736 DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
737
738 enum tree_code code;
739
740 code = TREE_CODE (expr);
741
742 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
743 {
744 if (TREE_CODE (expr) != IDENTIFIER_NODE)
745 DFS_follow_tree_edge (TREE_TYPE (expr));
746 }
747
748 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
749 {
750 unsigned int count = vector_cst_encoded_nelts (expr);
751 for (unsigned int i = 0; i < count; ++i)
752 DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
753 }
754
755 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
756 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
757 DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
758
759 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
760 {
761 DFS_follow_tree_edge (TREE_REALPART (expr));
762 DFS_follow_tree_edge (TREE_IMAGPART (expr));
763 }
764
765 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
766 {
767 /* Drop names that were created for anonymous entities. */
768 if (DECL_NAME (expr)
769 && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
770 && anon_aggrname_p (DECL_NAME (expr)))
771 ;
772 else
773 DFS_follow_tree_edge (DECL_NAME (expr));
774 if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
775 && ! DECL_CONTEXT (expr))
776 DFS_follow_tree_edge ((*all_translation_units)[0]);
777 else
778 DFS_follow_tree_edge (DECL_CONTEXT (expr));
779 }
780
781 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
782 {
783 DFS_follow_tree_edge (DECL_SIZE (expr));
784 DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
785
786 /* Note, DECL_INITIAL is not handled here. Since DECL_INITIAL needs
787 special handling in LTO, it must be handled by streamer hooks. */
788
789 DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
790
791 /* Do not follow DECL_ABSTRACT_ORIGIN. We cannot handle debug information
792 for early inlining so drop it on the floor instead of ICEing in
793 dwarf2out.c.
794 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
795 declarations which should be eliminated by decl merging. Be sure none
796 leaks to this point. */
797 gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
798 DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
799
800 if ((VAR_P (expr)
801 || TREE_CODE (expr) == PARM_DECL)
802 && DECL_HAS_VALUE_EXPR_P (expr))
803 DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
804 if (VAR_P (expr)
805 && DECL_HAS_DEBUG_EXPR_P (expr))
806 DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
807 }
808
809 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
810 {
811 if (TREE_CODE (expr) == TYPE_DECL)
812 DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
813 }
814
815 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
816 {
817 /* Make sure we don't inadvertently set the assembler name. */
818 if (DECL_ASSEMBLER_NAME_SET_P (expr))
819 DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
820 }
821
822 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
823 {
824 DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
825 DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
826 DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
827 DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
828 DFS_follow_tree_edge (DECL_FCONTEXT (expr));
829 }
830
831 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
832 {
833 DFS_follow_tree_edge (DECL_VINDEX (expr));
834 DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
835 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
836 DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
837 }
838
839 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
840 {
841 DFS_follow_tree_edge (TYPE_SIZE (expr));
842 DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
843 DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
844 DFS_follow_tree_edge (TYPE_NAME (expr));
845 /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO. They will be
846 reconstructed during fixup. */
847 /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
848 during fixup. */
849 DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
850 DFS_follow_tree_edge (TYPE_CONTEXT (expr));
851 /* TYPE_CANONICAL is re-computed during type merging, so no need
852 to follow it here. */
853 DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
854 }
855
856 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
857 {
858 if (TREE_CODE (expr) == ENUMERAL_TYPE)
859 DFS_follow_tree_edge (TYPE_VALUES (expr));
860 else if (TREE_CODE (expr) == ARRAY_TYPE)
861 DFS_follow_tree_edge (TYPE_DOMAIN (expr));
862 else if (RECORD_OR_UNION_TYPE_P (expr))
863 for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
864 DFS_follow_tree_edge (t);
865 else if (TREE_CODE (expr) == FUNCTION_TYPE
866 || TREE_CODE (expr) == METHOD_TYPE)
867 DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
868
869 if (!POINTER_TYPE_P (expr))
870 DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
871 DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
872 }
873
874 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
875 {
876 DFS_follow_tree_edge (TREE_PURPOSE (expr));
877 DFS_follow_tree_edge (TREE_VALUE (expr));
878 DFS_follow_tree_edge (TREE_CHAIN (expr));
879 }
880
881 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
882 {
883 for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
884 DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
885 }
886
887 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
888 {
889 for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
890 DFS_follow_tree_edge (TREE_OPERAND (expr, i));
891 DFS_follow_tree_edge (TREE_BLOCK (expr));
892 }
893
894 if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
895 {
896 for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
897 if (VAR_OR_FUNCTION_DECL_P (t)
898 && DECL_EXTERNAL (t))
899 /* We have to stream externals in the block chain as
900 non-references. See also
901 tree-streamer-out.c:streamer_write_chain. */
902 DFS_write_tree (ob, expr_state, t, ref_p, false);
903 else
904 DFS_follow_tree_edge (t);
905
906 DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
907
908 /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
909 handle - those that represent inlined function scopes.
910 For the drop rest them on the floor instead of ICEing
911 in dwarf2out.c, but keep the notion of whether the block
912 is an inlined block by refering to itself for the sake of
913 tree_nonartificial_location. */
914 if (inlined_function_outer_scope_p (expr))
915 {
916 tree ultimate_origin = block_ultimate_origin (expr);
917 DFS_follow_tree_edge (ultimate_origin);
918 }
919 else if (BLOCK_ABSTRACT_ORIGIN (expr))
920 DFS_follow_tree_edge (expr);
921 /* Do not follow BLOCK_NONLOCALIZED_VARS. We cannot handle debug
922 information for early inlined BLOCKs so drop it on the floor instead
923 of ICEing in dwarf2out.c. */
924
925 /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
926 streaming time. */
927
928 /* Do not output BLOCK_SUBBLOCKS. Instead on streaming-in this
929 list is re-constructed from BLOCK_SUPERCONTEXT. */
930 }
931
932 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
933 {
934 unsigned i;
935 tree t;
936
937 /* Note that the number of BINFO slots has already been emitted in
938 EXPR's header (see streamer_write_tree_header) because this length
939 is needed to build the empty BINFO node on the reader side. */
940 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
941 DFS_follow_tree_edge (t);
942 DFS_follow_tree_edge (BINFO_OFFSET (expr));
943 DFS_follow_tree_edge (BINFO_VTABLE (expr));
944 DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
945
946 /* The number of BINFO_BASE_ACCESSES has already been emitted in
947 EXPR's bitfield section. */
948 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
949 DFS_follow_tree_edge (t);
950
951 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
952 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
953 }
954
955 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
956 {
957 unsigned i;
958 tree index, value;
959
960 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
961 {
962 DFS_follow_tree_edge (index);
963 DFS_follow_tree_edge (value);
964 }
965 }
966
967 if (code == OMP_CLAUSE)
968 {
969 int i;
970 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
971 DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
972 DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
973 }
974
975 #undef DFS_follow_tree_edge
976 }
977
978 /* Return a hash value for the tree T.
979 CACHE holds hash values of trees outside current SCC. MAP, if non-NULL,
980 may hold hash values if trees inside current SCC. */
981
982 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)983 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
984 {
985 inchash::hash hstate;
986
987 #define visit(SIBLING) \
988 do { \
989 unsigned ix; \
990 if (!SIBLING) \
991 hstate.add_int (0); \
992 else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
993 hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
994 else if (map) \
995 hstate.add_int (*map->get (SIBLING)); \
996 else \
997 hstate.add_int (1); \
998 } while (0)
999
1000 /* Hash TS_BASE. */
1001 enum tree_code code = TREE_CODE (t);
1002 hstate.add_int (code);
1003 if (!TYPE_P (t))
1004 {
1005 hstate.add_flag (TREE_SIDE_EFFECTS (t));
1006 hstate.add_flag (TREE_CONSTANT (t));
1007 hstate.add_flag (TREE_READONLY (t));
1008 hstate.add_flag (TREE_PUBLIC (t));
1009 }
1010 hstate.add_flag (TREE_ADDRESSABLE (t));
1011 hstate.add_flag (TREE_THIS_VOLATILE (t));
1012 if (DECL_P (t))
1013 hstate.add_flag (DECL_UNSIGNED (t));
1014 else if (TYPE_P (t))
1015 hstate.add_flag (TYPE_UNSIGNED (t));
1016 if (TYPE_P (t))
1017 hstate.add_flag (TYPE_ARTIFICIAL (t));
1018 else
1019 hstate.add_flag (TREE_NO_WARNING (t));
1020 hstate.add_flag (TREE_NOTHROW (t));
1021 hstate.add_flag (TREE_STATIC (t));
1022 hstate.add_flag (TREE_PROTECTED (t));
1023 hstate.add_flag (TREE_DEPRECATED (t));
1024 if (code != TREE_BINFO)
1025 hstate.add_flag (TREE_PRIVATE (t));
1026 if (TYPE_P (t))
1027 {
1028 hstate.add_flag (AGGREGATE_TYPE_P (t)
1029 ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1030 hstate.add_flag (TYPE_ADDR_SPACE (t));
1031 }
1032 else if (code == SSA_NAME)
1033 hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1034 hstate.commit_flag ();
1035
1036 if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1037 hstate.add_wide_int (wi::to_widest (t));
1038
1039 if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1040 {
1041 REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1042 hstate.add_flag (r.cl);
1043 hstate.add_flag (r.sign);
1044 hstate.add_flag (r.signalling);
1045 hstate.add_flag (r.canonical);
1046 hstate.commit_flag ();
1047 hstate.add_int (r.uexp);
1048 hstate.add (r.sig, sizeof (r.sig));
1049 }
1050
1051 if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1052 {
1053 FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1054 hstate.add_int (f.mode);
1055 hstate.add_int (f.data.low);
1056 hstate.add_int (f.data.high);
1057 }
1058
1059 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1060 {
1061 hstate.add_hwi (DECL_MODE (t));
1062 hstate.add_flag (DECL_NONLOCAL (t));
1063 hstate.add_flag (DECL_VIRTUAL_P (t));
1064 hstate.add_flag (DECL_IGNORED_P (t));
1065 hstate.add_flag (DECL_ABSTRACT_P (t));
1066 hstate.add_flag (DECL_ARTIFICIAL (t));
1067 hstate.add_flag (DECL_USER_ALIGN (t));
1068 hstate.add_flag (DECL_PRESERVE_P (t));
1069 hstate.add_flag (DECL_EXTERNAL (t));
1070 hstate.add_flag (DECL_GIMPLE_REG_P (t));
1071 hstate.commit_flag ();
1072 hstate.add_int (DECL_ALIGN (t));
1073 if (code == LABEL_DECL)
1074 {
1075 hstate.add_int (EH_LANDING_PAD_NR (t));
1076 hstate.add_int (LABEL_DECL_UID (t));
1077 }
1078 else if (code == FIELD_DECL)
1079 {
1080 hstate.add_flag (DECL_PACKED (t));
1081 hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1082 hstate.add_flag (DECL_PADDING_P (t));
1083 hstate.add_int (DECL_OFFSET_ALIGN (t));
1084 }
1085 else if (code == VAR_DECL)
1086 {
1087 hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1088 hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1089 }
1090 if (code == RESULT_DECL
1091 || code == PARM_DECL
1092 || code == VAR_DECL)
1093 {
1094 hstate.add_flag (DECL_BY_REFERENCE (t));
1095 if (code == VAR_DECL
1096 || code == PARM_DECL)
1097 hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1098 }
1099 hstate.commit_flag ();
1100 }
1101
1102 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1103 hstate.add_int (DECL_REGISTER (t));
1104
1105 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1106 {
1107 hstate.add_flag (DECL_COMMON (t));
1108 hstate.add_flag (DECL_DLLIMPORT_P (t));
1109 hstate.add_flag (DECL_WEAK (t));
1110 hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1111 hstate.add_flag (DECL_COMDAT (t));
1112 hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1113 hstate.add_int (DECL_VISIBILITY (t));
1114 if (code == VAR_DECL)
1115 {
1116 /* DECL_IN_TEXT_SECTION is set during final asm output only. */
1117 hstate.add_flag (DECL_HARD_REGISTER (t));
1118 hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1119 }
1120 if (TREE_CODE (t) == FUNCTION_DECL)
1121 {
1122 hstate.add_flag (DECL_FINAL_P (t));
1123 hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1124 hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1125 }
1126 hstate.commit_flag ();
1127 }
1128
1129 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1130 {
1131 hstate.add_int (DECL_BUILT_IN_CLASS (t));
1132 hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1133 hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1134 hstate.add_flag (DECL_UNINLINABLE (t));
1135 hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1136 hstate.add_flag (DECL_IS_NOVOPS (t));
1137 hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1138 hstate.add_flag (DECL_IS_MALLOC (t));
1139 hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1140 hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1141 hstate.add_flag (DECL_STATIC_CHAIN (t));
1142 hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1143 hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1144 hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1145 hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1146 hstate.add_flag (DECL_PURE_P (t));
1147 hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1148 hstate.commit_flag ();
1149 if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1150 hstate.add_int (DECL_FUNCTION_CODE (t));
1151 }
1152
1153 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1154 {
1155 hstate.add_hwi (TYPE_MODE (t));
1156 hstate.add_flag (TYPE_STRING_FLAG (t));
1157 /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1158 no streaming. */
1159 hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1160 hstate.add_flag (TYPE_PACKED (t));
1161 hstate.add_flag (TYPE_RESTRICT (t));
1162 hstate.add_flag (TYPE_USER_ALIGN (t));
1163 hstate.add_flag (TYPE_READONLY (t));
1164 if (RECORD_OR_UNION_TYPE_P (t))
1165 {
1166 hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1167 hstate.add_flag (TYPE_FINAL_P (t));
1168 }
1169 else if (code == ARRAY_TYPE)
1170 hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1171 if (AGGREGATE_TYPE_P (t))
1172 hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1173 hstate.commit_flag ();
1174 hstate.add_int (TYPE_PRECISION (t));
1175 hstate.add_int (TYPE_ALIGN (t));
1176 hstate.add_int (TYPE_EMPTY_P (t));
1177 }
1178
1179 if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1180 hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1181 strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1182
1183 if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1184 /* We don't stream these when passing things to a different target. */
1185 && !lto_stream_offload_p)
1186 hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1187
1188 if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1189 hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1190
1191 if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1192 hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1193
1194 if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1195 hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1196
1197 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1198 {
1199 if (code != IDENTIFIER_NODE)
1200 visit (TREE_TYPE (t));
1201 }
1202
1203 if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1204 {
1205 unsigned int count = vector_cst_encoded_nelts (t);
1206 for (unsigned int i = 0; i < count; ++i)
1207 visit (VECTOR_CST_ENCODED_ELT (t, i));
1208 }
1209
1210 if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1211 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1212 visit (POLY_INT_CST_COEFF (t, i));
1213
1214 if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1215 {
1216 visit (TREE_REALPART (t));
1217 visit (TREE_IMAGPART (t));
1218 }
1219
1220 if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1221 {
1222 /* Drop names that were created for anonymous entities. */
1223 if (DECL_NAME (t)
1224 && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1225 && anon_aggrname_p (DECL_NAME (t)))
1226 ;
1227 else
1228 visit (DECL_NAME (t));
1229 if (DECL_FILE_SCOPE_P (t))
1230 ;
1231 else
1232 visit (DECL_CONTEXT (t));
1233 }
1234
1235 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1236 {
1237 visit (DECL_SIZE (t));
1238 visit (DECL_SIZE_UNIT (t));
1239 visit (DECL_ATTRIBUTES (t));
1240 if ((code == VAR_DECL
1241 || code == PARM_DECL)
1242 && DECL_HAS_VALUE_EXPR_P (t))
1243 visit (DECL_VALUE_EXPR (t));
1244 if (code == VAR_DECL
1245 && DECL_HAS_DEBUG_EXPR_P (t))
1246 visit (DECL_DEBUG_EXPR (t));
1247 /* ??? Hash DECL_INITIAL as streamed. Needs the output-block to
1248 be able to call get_symbol_initial_value. */
1249 }
1250
1251 if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1252 {
1253 if (code == TYPE_DECL)
1254 visit (DECL_ORIGINAL_TYPE (t));
1255 }
1256
1257 if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1258 {
1259 if (DECL_ASSEMBLER_NAME_SET_P (t))
1260 visit (DECL_ASSEMBLER_NAME (t));
1261 }
1262
1263 if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1264 {
1265 visit (DECL_FIELD_OFFSET (t));
1266 visit (DECL_BIT_FIELD_TYPE (t));
1267 visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1268 visit (DECL_FIELD_BIT_OFFSET (t));
1269 visit (DECL_FCONTEXT (t));
1270 }
1271
1272 if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1273 {
1274 visit (DECL_VINDEX (t));
1275 visit (DECL_FUNCTION_PERSONALITY (t));
1276 visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1277 visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1278 }
1279
1280 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1281 {
1282 visit (TYPE_SIZE (t));
1283 visit (TYPE_SIZE_UNIT (t));
1284 visit (TYPE_ATTRIBUTES (t));
1285 visit (TYPE_NAME (t));
1286 visit (TYPE_MAIN_VARIANT (t));
1287 if (TYPE_FILE_SCOPE_P (t))
1288 ;
1289 else
1290 visit (TYPE_CONTEXT (t));
1291 visit (TYPE_STUB_DECL (t));
1292 }
1293
1294 if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1295 {
1296 if (code == ENUMERAL_TYPE)
1297 visit (TYPE_VALUES (t));
1298 else if (code == ARRAY_TYPE)
1299 visit (TYPE_DOMAIN (t));
1300 else if (RECORD_OR_UNION_TYPE_P (t))
1301 for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1302 visit (f);
1303 else if (code == FUNCTION_TYPE
1304 || code == METHOD_TYPE)
1305 visit (TYPE_ARG_TYPES (t));
1306 if (!POINTER_TYPE_P (t))
1307 visit (TYPE_MIN_VALUE_RAW (t));
1308 visit (TYPE_MAX_VALUE_RAW (t));
1309 }
1310
1311 if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1312 {
1313 visit (TREE_PURPOSE (t));
1314 visit (TREE_VALUE (t));
1315 visit (TREE_CHAIN (t));
1316 }
1317
1318 if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1319 for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1320 visit (TREE_VEC_ELT (t, i));
1321
1322 if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1323 {
1324 hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1325 for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1326 visit (TREE_OPERAND (t, i));
1327 }
1328
1329 if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1330 {
1331 unsigned i;
1332 tree b;
1333 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1334 visit (b);
1335 visit (BINFO_OFFSET (t));
1336 visit (BINFO_VTABLE (t));
1337 visit (BINFO_VPTR_FIELD (t));
1338 FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1339 visit (b);
1340 /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1341 and BINFO_VPTR_INDEX; these are used by C++ FE only. */
1342 }
1343
1344 if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1345 {
1346 unsigned i;
1347 tree index, value;
1348 hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1349 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1350 {
1351 visit (index);
1352 visit (value);
1353 }
1354 }
1355
1356 if (code == OMP_CLAUSE)
1357 {
1358 int i;
1359 HOST_WIDE_INT val;
1360
1361 hstate.add_hwi (OMP_CLAUSE_CODE (t));
1362 switch (OMP_CLAUSE_CODE (t))
1363 {
1364 case OMP_CLAUSE_DEFAULT:
1365 val = OMP_CLAUSE_DEFAULT_KIND (t);
1366 break;
1367 case OMP_CLAUSE_SCHEDULE:
1368 val = OMP_CLAUSE_SCHEDULE_KIND (t);
1369 break;
1370 case OMP_CLAUSE_DEPEND:
1371 val = OMP_CLAUSE_DEPEND_KIND (t);
1372 break;
1373 case OMP_CLAUSE_MAP:
1374 val = OMP_CLAUSE_MAP_KIND (t);
1375 break;
1376 case OMP_CLAUSE_PROC_BIND:
1377 val = OMP_CLAUSE_PROC_BIND_KIND (t);
1378 break;
1379 case OMP_CLAUSE_REDUCTION:
1380 val = OMP_CLAUSE_REDUCTION_CODE (t);
1381 break;
1382 default:
1383 val = 0;
1384 break;
1385 }
1386 hstate.add_hwi (val);
1387 for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1388 visit (OMP_CLAUSE_OPERAND (t, i));
1389 visit (OMP_CLAUSE_CHAIN (t));
1390 }
1391
1392 return hstate.end ();
1393
1394 #undef visit
1395 }
1396
1397 /* Compare two SCC entries by their hash value for qsorting them. */
1398
1399 int
scc_entry_compare(const void * p1_,const void * p2_)1400 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1401 {
1402 const scc_entry *p1 = (const scc_entry *) p1_;
1403 const scc_entry *p2 = (const scc_entry *) p2_;
1404 if (p1->hash < p2->hash)
1405 return -1;
1406 else if (p1->hash > p2->hash)
1407 return 1;
1408 return 0;
1409 }
1410
1411 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1412 THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST. */
1413
1414 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1415 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1416 bool ref_p, bool this_ref_p)
1417 {
1418 unsigned int last_classes = 0, iterations = 0;
1419
1420 /* Compute hash values for the SCC members. */
1421 for (unsigned i = 0; i < size; ++i)
1422 sccstack[first+i].hash
1423 = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1424
1425 if (size == 1)
1426 return sccstack[first].hash;
1427
1428 /* We aim to get unique hash for every tree within SCC and compute hash value
1429 of the whole SCC by combining all values together in a stable (entry-point
1430 independent) order. This guarantees that the same SCC regions within
1431 different translation units will get the same hash values and therefore
1432 will be merged at WPA time.
1433
1434 Often the hashes are already unique. In that case we compute the SCC hash
1435 by combining individual hash values in an increasing order.
1436
1437 If there are duplicates, we seek at least one tree with unique hash (and
1438 pick one with minimal hash and this property). Then we obtain a stable
1439 order by DFS walk starting from this unique tree and then use the index
1440 within this order to make individual hash values unique.
1441
1442 If there is no tree with unique hash, we iteratively propagate the hash
1443 values across the internal edges of SCC. This usually quickly leads
1444 to unique hashes. Consider, for example, an SCC containing two pointers
1445 that are identical except for the types they point to and assume that
1446 these types are also part of the SCC. The propagation will add the
1447 points-to type information into their hash values. */
1448 do
1449 {
1450 /* Sort the SCC so we can easily check for uniqueness. */
1451 qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1452
1453 unsigned int classes = 1;
1454 int firstunique = -1;
1455
1456 /* Find the tree with lowest unique hash (if it exists) and compute
1457 the number of equivalence classes. */
1458 if (sccstack[first].hash != sccstack[first+1].hash)
1459 firstunique = 0;
1460 for (unsigned i = 1; i < size; ++i)
1461 if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1462 {
1463 classes++;
1464 if (firstunique == -1
1465 && (i == size - 1
1466 || sccstack[first+i+1].hash != sccstack[first+i].hash))
1467 firstunique = i;
1468 }
1469
1470 /* If we found a tree with unique hash, stop the iteration. */
1471 if (firstunique != -1
1472 /* Also terminate if we run out of iterations or if the number of
1473 equivalence classes is no longer increasing.
1474 For example a cyclic list of trees that are all equivalent will
1475 never have unique entry point; we however do not build such SCCs
1476 in our IL. */
1477 || classes <= last_classes || iterations > 16)
1478 {
1479 hashval_t scc_hash;
1480
1481 /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1482 starting from FIRSTUNIQUE to obtain a stable order. */
1483 if (classes != size && firstunique != -1)
1484 {
1485 hash_map <tree, hashval_t> map(size*2);
1486
1487 /* Store hash values into a map, so we can associate them with
1488 the reordered SCC. */
1489 for (unsigned i = 0; i < size; ++i)
1490 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1491
1492 DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1493 true);
1494 gcc_assert (again.sccstack.length () == size);
1495
1496 memcpy (sccstack.address () + first,
1497 again.sccstack.address (),
1498 sizeof (scc_entry) * size);
1499
1500 /* Update hash values of individual members by hashing in the
1501 index within the stable order. This ensures uniqueness.
1502 Also compute the SCC hash by mixing in all hash values in
1503 the stable order we obtained. */
1504 sccstack[first].hash = *map.get (sccstack[first].t);
1505 scc_hash = sccstack[first].hash;
1506 for (unsigned i = 1; i < size; ++i)
1507 {
1508 sccstack[first+i].hash
1509 = iterative_hash_hashval_t (i,
1510 *map.get (sccstack[first+i].t));
1511 scc_hash
1512 = iterative_hash_hashval_t (scc_hash,
1513 sccstack[first+i].hash);
1514 }
1515 }
1516 /* If we got a unique hash value for each tree, then sort already
1517 ensured entry-point independent order. Only compute the final
1518 SCC hash.
1519
1520 If we failed to find the unique entry point, we go by the same
1521 route. We will eventually introduce unwanted hash conflicts. */
1522 else
1523 {
1524 scc_hash = sccstack[first].hash;
1525 for (unsigned i = 1; i < size; ++i)
1526 scc_hash
1527 = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1528
1529 /* We cannot 100% guarantee that the hash won't conflict so as
1530 to make it impossible to find a unique hash. This however
1531 should be an extremely rare case. ICE for now so possible
1532 issues are found and evaluated. */
1533 gcc_checking_assert (classes == size);
1534 }
1535
1536 /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1537 hash into the hash of each element. */
1538 for (unsigned i = 0; i < size; ++i)
1539 sccstack[first+i].hash
1540 = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1541 return scc_hash;
1542 }
1543
1544 last_classes = classes;
1545 iterations++;
1546
1547 /* We failed to identify the entry point; propagate hash values across
1548 the edges. */
1549 hash_map <tree, hashval_t> map(size*2);
1550
1551 for (unsigned i = 0; i < size; ++i)
1552 map.put (sccstack[first+i].t, sccstack[first+i].hash);
1553
1554 for (unsigned i = 0; i < size; i++)
1555 sccstack[first+i].hash
1556 = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1557 }
1558 while (true);
1559 }
1560
1561 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1562 already in the streamer cache. Main routine called for
1563 each visit of EXPR. */
1564
1565 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1566 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1567 tree expr, bool ref_p, bool this_ref_p)
1568 {
1569 /* Handle special cases. */
1570 if (expr == NULL_TREE)
1571 return;
1572
1573 /* Do not DFS walk into indexable trees. */
1574 if (this_ref_p && tree_is_indexable (expr))
1575 return;
1576
1577 /* Check if we already streamed EXPR. */
1578 if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1579 return;
1580
1581 worklist w;
1582 w.expr = expr;
1583 w.from_state = from_state;
1584 w.cstate = NULL;
1585 w.ref_p = ref_p;
1586 w.this_ref_p = this_ref_p;
1587 worklist_vec.safe_push (w);
1588 }
1589
1590
1591 /* Emit the physical representation of tree node EXPR to output block OB.
1592 If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1593 lto_output_tree_ref. REF_P is used for streaming siblings of EXPR. */
1594
1595 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1596 lto_output_tree (struct output_block *ob, tree expr,
1597 bool ref_p, bool this_ref_p)
1598 {
1599 unsigned ix;
1600 bool existed_p;
1601
1602 if (expr == NULL_TREE)
1603 {
1604 streamer_write_record_start (ob, LTO_null);
1605 return;
1606 }
1607
1608 if (this_ref_p && tree_is_indexable (expr))
1609 {
1610 lto_output_tree_ref (ob, expr);
1611 return;
1612 }
1613
1614 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1615 if (existed_p)
1616 {
1617 /* If a node has already been streamed out, make sure that
1618 we don't write it more than once. Otherwise, the reader
1619 will instantiate two different nodes for the same object. */
1620 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1621 streamer_write_uhwi (ob, ix);
1622 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1623 lto_tree_code_to_tag (TREE_CODE (expr)));
1624 lto_stats.num_pickle_refs_output++;
1625 }
1626 else
1627 {
1628 /* This is the first time we see EXPR, write all reachable
1629 trees to OB. */
1630 static bool in_dfs_walk;
1631
1632 /* Protect against recursion which means disconnect between
1633 what tree edges we walk in the DFS walk and what edges
1634 we stream out. */
1635 gcc_assert (!in_dfs_walk);
1636
1637 /* Start the DFS walk. */
1638 /* Save ob state ... */
1639 /* let's see ... */
1640 in_dfs_walk = true;
1641 DFS (ob, expr, ref_p, this_ref_p, false);
1642 in_dfs_walk = false;
1643
1644 /* Finally append a reference to the tree we were writing.
1645 ??? If expr ended up as a singleton we could have
1646 inlined it here and avoid outputting a reference. */
1647 existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1648 gcc_assert (existed_p);
1649 streamer_write_record_start (ob, LTO_tree_pickle_reference);
1650 streamer_write_uhwi (ob, ix);
1651 streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1652 lto_tree_code_to_tag (TREE_CODE (expr)));
1653 lto_stats.num_pickle_refs_output++;
1654 }
1655 }
1656
1657
1658 /* Output to OB a list of try/catch handlers starting with FIRST. */
1659
1660 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1661 output_eh_try_list (struct output_block *ob, eh_catch first)
1662 {
1663 eh_catch n;
1664
1665 for (n = first; n; n = n->next_catch)
1666 {
1667 streamer_write_record_start (ob, LTO_eh_catch);
1668 stream_write_tree (ob, n->type_list, true);
1669 stream_write_tree (ob, n->filter_list, true);
1670 stream_write_tree (ob, n->label, true);
1671 }
1672
1673 streamer_write_record_start (ob, LTO_null);
1674 }
1675
1676
1677 /* Output EH region R in function FN to OB. CURR_RN is the slot index
1678 that is being emitted in FN->EH->REGION_ARRAY. This is used to
1679 detect EH region sharing. */
1680
1681 static void
output_eh_region(struct output_block * ob,eh_region r)1682 output_eh_region (struct output_block *ob, eh_region r)
1683 {
1684 enum LTO_tags tag;
1685
1686 if (r == NULL)
1687 {
1688 streamer_write_record_start (ob, LTO_null);
1689 return;
1690 }
1691
1692 if (r->type == ERT_CLEANUP)
1693 tag = LTO_ert_cleanup;
1694 else if (r->type == ERT_TRY)
1695 tag = LTO_ert_try;
1696 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1697 tag = LTO_ert_allowed_exceptions;
1698 else if (r->type == ERT_MUST_NOT_THROW)
1699 tag = LTO_ert_must_not_throw;
1700 else
1701 gcc_unreachable ();
1702
1703 streamer_write_record_start (ob, tag);
1704 streamer_write_hwi (ob, r->index);
1705
1706 if (r->outer)
1707 streamer_write_hwi (ob, r->outer->index);
1708 else
1709 streamer_write_zero (ob);
1710
1711 if (r->inner)
1712 streamer_write_hwi (ob, r->inner->index);
1713 else
1714 streamer_write_zero (ob);
1715
1716 if (r->next_peer)
1717 streamer_write_hwi (ob, r->next_peer->index);
1718 else
1719 streamer_write_zero (ob);
1720
1721 if (r->type == ERT_TRY)
1722 {
1723 output_eh_try_list (ob, r->u.eh_try.first_catch);
1724 }
1725 else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1726 {
1727 stream_write_tree (ob, r->u.allowed.type_list, true);
1728 stream_write_tree (ob, r->u.allowed.label, true);
1729 streamer_write_uhwi (ob, r->u.allowed.filter);
1730 }
1731 else if (r->type == ERT_MUST_NOT_THROW)
1732 {
1733 stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1734 bitpack_d bp = bitpack_create (ob->main_stream);
1735 stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1736 streamer_write_bitpack (&bp);
1737 }
1738
1739 if (r->landing_pads)
1740 streamer_write_hwi (ob, r->landing_pads->index);
1741 else
1742 streamer_write_zero (ob);
1743 }
1744
1745
1746 /* Output landing pad LP to OB. */
1747
1748 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1749 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1750 {
1751 if (lp == NULL)
1752 {
1753 streamer_write_record_start (ob, LTO_null);
1754 return;
1755 }
1756
1757 streamer_write_record_start (ob, LTO_eh_landing_pad);
1758 streamer_write_hwi (ob, lp->index);
1759 if (lp->next_lp)
1760 streamer_write_hwi (ob, lp->next_lp->index);
1761 else
1762 streamer_write_zero (ob);
1763
1764 if (lp->region)
1765 streamer_write_hwi (ob, lp->region->index);
1766 else
1767 streamer_write_zero (ob);
1768
1769 stream_write_tree (ob, lp->post_landing_pad, true);
1770 }
1771
1772
1773 /* Output the existing eh_table to OB. */
1774
1775 static void
output_eh_regions(struct output_block * ob,struct function * fn)1776 output_eh_regions (struct output_block *ob, struct function *fn)
1777 {
1778 if (fn->eh && fn->eh->region_tree)
1779 {
1780 unsigned i;
1781 eh_region eh;
1782 eh_landing_pad lp;
1783 tree ttype;
1784
1785 streamer_write_record_start (ob, LTO_eh_table);
1786
1787 /* Emit the index of the root of the EH region tree. */
1788 streamer_write_hwi (ob, fn->eh->region_tree->index);
1789
1790 /* Emit all the EH regions in the region array. */
1791 streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1792 FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1793 output_eh_region (ob, eh);
1794
1795 /* Emit all landing pads. */
1796 streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1797 FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1798 output_eh_lp (ob, lp);
1799
1800 /* Emit all the runtime type data. */
1801 streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1802 FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1803 stream_write_tree (ob, ttype, true);
1804
1805 /* Emit the table of action chains. */
1806 if (targetm.arm_eabi_unwinder)
1807 {
1808 tree t;
1809 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1810 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1811 stream_write_tree (ob, t, true);
1812 }
1813 else
1814 {
1815 uchar c;
1816 streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1817 FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1818 streamer_write_char_stream (ob->main_stream, c);
1819 }
1820 }
1821
1822 /* The LTO_null either terminates the record or indicates that there
1823 are no eh_records at all. */
1824 streamer_write_record_start (ob, LTO_null);
1825 }
1826
1827
1828 /* Output all of the active ssa names to the ssa_names stream. */
1829
1830 static void
output_ssa_names(struct output_block * ob,struct function * fn)1831 output_ssa_names (struct output_block *ob, struct function *fn)
1832 {
1833 unsigned int i, len;
1834
1835 len = vec_safe_length (SSANAMES (fn));
1836 streamer_write_uhwi (ob, len);
1837
1838 for (i = 1; i < len; i++)
1839 {
1840 tree ptr = (*SSANAMES (fn))[i];
1841
1842 if (ptr == NULL_TREE
1843 || SSA_NAME_IN_FREE_LIST (ptr)
1844 || virtual_operand_p (ptr)
1845 /* Simply skip unreleased SSA names. */
1846 || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1847 && (! SSA_NAME_DEF_STMT (ptr)
1848 || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1849 continue;
1850
1851 streamer_write_uhwi (ob, i);
1852 streamer_write_char_stream (ob->main_stream,
1853 SSA_NAME_IS_DEFAULT_DEF (ptr));
1854 if (SSA_NAME_VAR (ptr))
1855 stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1856 else
1857 /* ??? This drops SSA_NAME_IDENTIFIER on the floor. */
1858 stream_write_tree (ob, TREE_TYPE (ptr), true);
1859 }
1860
1861 streamer_write_zero (ob);
1862 }
1863
1864
1865
1866 /* Output the cfg. */
1867
1868 static void
output_cfg(struct output_block * ob,struct function * fn)1869 output_cfg (struct output_block *ob, struct function *fn)
1870 {
1871 struct lto_output_stream *tmp_stream = ob->main_stream;
1872 basic_block bb;
1873
1874 ob->main_stream = ob->cfg_stream;
1875
1876 streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1877 profile_status_for_fn (fn));
1878
1879 /* Output the number of the highest basic block. */
1880 streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1881
1882 FOR_ALL_BB_FN (bb, fn)
1883 {
1884 edge_iterator ei;
1885 edge e;
1886
1887 streamer_write_hwi (ob, bb->index);
1888
1889 /* Output the successors and the edge flags. */
1890 streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1891 FOR_EACH_EDGE (e, ei, bb->succs)
1892 {
1893 streamer_write_uhwi (ob, e->dest->index);
1894 e->probability.stream_out (ob);
1895 streamer_write_uhwi (ob, e->flags);
1896 }
1897 }
1898
1899 streamer_write_hwi (ob, -1);
1900
1901 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1902 while (bb->next_bb)
1903 {
1904 streamer_write_hwi (ob, bb->next_bb->index);
1905 bb = bb->next_bb;
1906 }
1907
1908 streamer_write_hwi (ob, -1);
1909
1910 /* ??? The cfgloop interface is tied to cfun. */
1911 gcc_assert (cfun == fn);
1912
1913 /* Output the number of loops. */
1914 streamer_write_uhwi (ob, number_of_loops (fn));
1915
1916 /* Output each loop, skipping the tree root which has number zero. */
1917 for (unsigned i = 1; i < number_of_loops (fn); ++i)
1918 {
1919 struct loop *loop = get_loop (fn, i);
1920
1921 /* Write the index of the loop header. That's enough to rebuild
1922 the loop tree on the reader side. Stream -1 for an unused
1923 loop entry. */
1924 if (!loop)
1925 {
1926 streamer_write_hwi (ob, -1);
1927 continue;
1928 }
1929 else
1930 streamer_write_hwi (ob, loop->header->index);
1931
1932 /* Write everything copy_loop_info copies. */
1933 streamer_write_enum (ob->main_stream,
1934 loop_estimation, EST_LAST, loop->estimate_state);
1935 streamer_write_hwi (ob, loop->any_upper_bound);
1936 if (loop->any_upper_bound)
1937 streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1938 streamer_write_hwi (ob, loop->any_likely_upper_bound);
1939 if (loop->any_likely_upper_bound)
1940 streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1941 streamer_write_hwi (ob, loop->any_estimate);
1942 if (loop->any_estimate)
1943 streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1944
1945 /* Write OMP SIMD related info. */
1946 streamer_write_hwi (ob, loop->safelen);
1947 streamer_write_hwi (ob, loop->unroll);
1948 streamer_write_hwi (ob, loop->owned_clique);
1949 streamer_write_hwi (ob, loop->dont_vectorize);
1950 streamer_write_hwi (ob, loop->force_vectorize);
1951 stream_write_tree (ob, loop->simduid, true);
1952 }
1953
1954 ob->main_stream = tmp_stream;
1955 }
1956
1957
1958 /* Create the header in the file using OB. If the section type is for
1959 a function, set FN to the decl for that function. */
1960
1961 void
produce_asm(struct output_block * ob,tree fn)1962 produce_asm (struct output_block *ob, tree fn)
1963 {
1964 enum lto_section_type section_type = ob->section_type;
1965 struct lto_function_header header;
1966 char *section_name;
1967
1968 if (section_type == LTO_section_function_body)
1969 {
1970 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1971 section_name = lto_get_section_name (section_type, name, NULL);
1972 }
1973 else
1974 section_name = lto_get_section_name (section_type, NULL, NULL);
1975
1976 lto_begin_section (section_name, !flag_wpa);
1977 free (section_name);
1978
1979 /* The entire header is stream computed here. */
1980 memset (&header, 0, sizeof (struct lto_function_header));
1981
1982 /* Write the header. */
1983 header.major_version = LTO_major_version;
1984 header.minor_version = LTO_minor_version;
1985
1986 if (section_type == LTO_section_function_body)
1987 header.cfg_size = ob->cfg_stream->total_size;
1988 header.main_size = ob->main_stream->total_size;
1989 header.string_size = ob->string_stream->total_size;
1990 lto_write_data (&header, sizeof header);
1991
1992 /* Put all of the gimple and the string table out the asm file as a
1993 block of text. */
1994 if (section_type == LTO_section_function_body)
1995 lto_write_stream (ob->cfg_stream);
1996 lto_write_stream (ob->main_stream);
1997 lto_write_stream (ob->string_stream);
1998
1999 lto_end_section ();
2000 }
2001
2002
2003 /* Output the base body of struct function FN using output block OB. */
2004
2005 static void
output_struct_function_base(struct output_block * ob,struct function * fn)2006 output_struct_function_base (struct output_block *ob, struct function *fn)
2007 {
2008 struct bitpack_d bp;
2009 unsigned i;
2010 tree t;
2011
2012 /* Output the static chain and non-local goto save area. */
2013 stream_write_tree (ob, fn->static_chain_decl, true);
2014 stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2015
2016 /* Output all the local variables in the function. */
2017 streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2018 FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2019 stream_write_tree (ob, t, true);
2020
2021 /* Output current IL state of the function. */
2022 streamer_write_uhwi (ob, fn->curr_properties);
2023
2024 /* Write all the attributes for FN. */
2025 bp = bitpack_create (ob->main_stream);
2026 bp_pack_value (&bp, fn->is_thunk, 1);
2027 bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2028 bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2029 bp_pack_value (&bp, fn->returns_struct, 1);
2030 bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2031 bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2032 bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2033 bp_pack_value (&bp, fn->after_inlining, 1);
2034 bp_pack_value (&bp, fn->stdarg, 1);
2035 bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2036 bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2037 bp_pack_value (&bp, fn->calls_alloca, 1);
2038 bp_pack_value (&bp, fn->calls_setjmp, 1);
2039 bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2040 bp_pack_value (&bp, fn->has_simduid_loops, 1);
2041 bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2042 bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2043 bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2044
2045 /* Output the function start and end loci. */
2046 stream_output_location (ob, &bp, fn->function_start_locus);
2047 stream_output_location (ob, &bp, fn->function_end_locus);
2048
2049 streamer_write_bitpack (&bp);
2050 }
2051
2052
2053 /* Collect all leaf BLOCKs beyond ROOT into LEAFS. */
2054
2055 static void
collect_block_tree_leafs(tree root,vec<tree> & leafs)2056 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2057 {
2058 for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2059 if (! BLOCK_SUBBLOCKS (root))
2060 leafs.safe_push (root);
2061 else
2062 collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2063 }
2064
2065 /* Output the body of function NODE->DECL. */
2066
2067 static void
output_function(struct cgraph_node * node)2068 output_function (struct cgraph_node *node)
2069 {
2070 tree function;
2071 struct function *fn;
2072 basic_block bb;
2073 struct output_block *ob;
2074
2075 function = node->decl;
2076 fn = DECL_STRUCT_FUNCTION (function);
2077 ob = create_output_block (LTO_section_function_body);
2078
2079 clear_line_info (ob);
2080 ob->symbol = node;
2081
2082 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2083
2084 /* Set current_function_decl and cfun. */
2085 push_cfun (fn);
2086
2087 /* Make string 0 be a NULL string. */
2088 streamer_write_char_stream (ob->string_stream, 0);
2089
2090 streamer_write_record_start (ob, LTO_function);
2091
2092 /* Output decls for parameters and args. */
2093 stream_write_tree (ob, DECL_RESULT (function), true);
2094 streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2095
2096 /* Output debug args if available. */
2097 vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2098 if (! debugargs)
2099 streamer_write_uhwi (ob, 0);
2100 else
2101 {
2102 streamer_write_uhwi (ob, (*debugargs)->length ());
2103 for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2104 stream_write_tree (ob, (**debugargs)[i], true);
2105 }
2106
2107 /* Output DECL_INITIAL for the function, which contains the tree of
2108 lexical scopes. */
2109 stream_write_tree (ob, DECL_INITIAL (function), true);
2110 /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2111 collect block tree leafs and stream those. */
2112 auto_vec<tree> block_tree_leafs;
2113 if (DECL_INITIAL (function))
2114 collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2115 streamer_write_uhwi (ob, block_tree_leafs.length ());
2116 for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2117 stream_write_tree (ob, block_tree_leafs[i], true);
2118
2119 /* We also stream abstract functions where we stream only stuff needed for
2120 debug info. */
2121 if (gimple_has_body_p (function))
2122 {
2123 streamer_write_uhwi (ob, 1);
2124 output_struct_function_base (ob, fn);
2125
2126 /* Output all the SSA names used in the function. */
2127 output_ssa_names (ob, fn);
2128
2129 /* Output any exception handling regions. */
2130 output_eh_regions (ob, fn);
2131
2132
2133 /* We will renumber the statements. The code that does this uses
2134 the same ordering that we use for serializing them so we can use
2135 the same code on the other end and not have to write out the
2136 statement numbers. We do not assign UIDs to PHIs here because
2137 virtual PHIs get re-computed on-the-fly which would make numbers
2138 inconsistent. */
2139 set_gimple_stmt_max_uid (cfun, 0);
2140 FOR_ALL_BB_FN (bb, cfun)
2141 {
2142 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2143 gsi_next (&gsi))
2144 {
2145 gphi *stmt = gsi.phi ();
2146
2147 /* Virtual PHIs are not going to be streamed. */
2148 if (!virtual_operand_p (gimple_phi_result (stmt)))
2149 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2150 }
2151 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2152 gsi_next (&gsi))
2153 {
2154 gimple *stmt = gsi_stmt (gsi);
2155 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2156 }
2157 }
2158 /* To avoid keeping duplicate gimple IDs in the statements, renumber
2159 virtual phis now. */
2160 FOR_ALL_BB_FN (bb, cfun)
2161 {
2162 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2163 gsi_next (&gsi))
2164 {
2165 gphi *stmt = gsi.phi ();
2166 if (virtual_operand_p (gimple_phi_result (stmt)))
2167 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2168 }
2169 }
2170
2171 /* Output the code for the function. */
2172 FOR_ALL_BB_FN (bb, fn)
2173 output_bb (ob, bb, fn);
2174
2175 /* The terminator for this function. */
2176 streamer_write_record_start (ob, LTO_null);
2177
2178 output_cfg (ob, fn);
2179
2180 pop_cfun ();
2181 }
2182 else
2183 streamer_write_uhwi (ob, 0);
2184
2185 /* Create a section to hold the pickled output of this function. */
2186 produce_asm (ob, function);
2187
2188 destroy_output_block (ob);
2189 }
2190
2191 /* Output the body of function NODE->DECL. */
2192
2193 static void
output_constructor(struct varpool_node * node)2194 output_constructor (struct varpool_node *node)
2195 {
2196 tree var = node->decl;
2197 struct output_block *ob;
2198
2199 ob = create_output_block (LTO_section_function_body);
2200
2201 clear_line_info (ob);
2202 ob->symbol = node;
2203
2204 /* Make string 0 be a NULL string. */
2205 streamer_write_char_stream (ob->string_stream, 0);
2206
2207 /* Output DECL_INITIAL for the function, which contains the tree of
2208 lexical scopes. */
2209 stream_write_tree (ob, DECL_INITIAL (var), true);
2210
2211 /* Create a section to hold the pickled output of this function. */
2212 produce_asm (ob, var);
2213
2214 destroy_output_block (ob);
2215 }
2216
2217
2218 /* Emit toplevel asms. */
2219
2220 void
lto_output_toplevel_asms(void)2221 lto_output_toplevel_asms (void)
2222 {
2223 struct output_block *ob;
2224 struct asm_node *can;
2225 char *section_name;
2226 struct lto_simple_header_with_strings header;
2227
2228 if (!symtab->first_asm_symbol ())
2229 return;
2230
2231 ob = create_output_block (LTO_section_asm);
2232
2233 /* Make string 0 be a NULL string. */
2234 streamer_write_char_stream (ob->string_stream, 0);
2235
2236 for (can = symtab->first_asm_symbol (); can; can = can->next)
2237 {
2238 streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2239 streamer_write_hwi (ob, can->order);
2240 }
2241
2242 streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2243
2244 section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2245 lto_begin_section (section_name, !flag_wpa);
2246 free (section_name);
2247
2248 /* The entire header stream is computed here. */
2249 memset (&header, 0, sizeof (header));
2250
2251 /* Write the header. */
2252 header.major_version = LTO_major_version;
2253 header.minor_version = LTO_minor_version;
2254
2255 header.main_size = ob->main_stream->total_size;
2256 header.string_size = ob->string_stream->total_size;
2257 lto_write_data (&header, sizeof header);
2258
2259 /* Put all of the gimple and the string table out the asm file as a
2260 block of text. */
2261 lto_write_stream (ob->main_stream);
2262 lto_write_stream (ob->string_stream);
2263
2264 lto_end_section ();
2265
2266 destroy_output_block (ob);
2267 }
2268
2269
2270 /* Copy the function body or variable constructor of NODE without deserializing. */
2271
2272 static void
copy_function_or_variable(struct symtab_node * node)2273 copy_function_or_variable (struct symtab_node *node)
2274 {
2275 tree function = node->decl;
2276 struct lto_file_decl_data *file_data = node->lto_file_data;
2277 const char *data;
2278 size_t len;
2279 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2280 char *section_name =
2281 lto_get_section_name (LTO_section_function_body, name, NULL);
2282 size_t i, j;
2283 struct lto_in_decl_state *in_state;
2284 struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2285
2286 lto_begin_section (section_name, false);
2287 free (section_name);
2288
2289 /* We may have renamed the declaration, e.g., a static function. */
2290 name = lto_get_decl_name_mapping (file_data, name);
2291
2292 data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2293 name, &len);
2294 gcc_assert (data);
2295
2296 /* Do a bit copy of the function body. */
2297 lto_write_raw_data (data, len);
2298
2299 /* Copy decls. */
2300 in_state =
2301 lto_get_function_in_decl_state (node->lto_file_data, function);
2302 out_state->compressed = in_state->compressed;
2303 gcc_assert (in_state);
2304
2305 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2306 {
2307 size_t n = vec_safe_length (in_state->streams[i]);
2308 vec<tree, va_gc> *trees = in_state->streams[i];
2309 struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2310
2311 /* The out state must have the same indices and the in state.
2312 So just copy the vector. All the encoders in the in state
2313 must be empty where we reach here. */
2314 gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2315 encoder->trees.reserve_exact (n);
2316 for (j = 0; j < n; j++)
2317 encoder->trees.safe_push ((*trees)[j]);
2318 }
2319
2320 lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2321 data, len);
2322 lto_end_section ();
2323 }
2324
2325 /* Wrap symbol references in *TP inside a type-preserving MEM_REF. */
2326
2327 static tree
wrap_refs(tree * tp,int * ws,void *)2328 wrap_refs (tree *tp, int *ws, void *)
2329 {
2330 tree t = *tp;
2331 if (handled_component_p (t)
2332 && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2333 && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2334 {
2335 tree decl = TREE_OPERAND (t, 0);
2336 tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2337 TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2338 build1 (ADDR_EXPR, ptrtype, decl),
2339 build_int_cst (ptrtype, 0));
2340 TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2341 *ws = 0;
2342 }
2343 else if (TREE_CODE (t) == CONSTRUCTOR)
2344 ;
2345 else if (!EXPR_P (t))
2346 *ws = 0;
2347 return NULL_TREE;
2348 }
2349
2350 /* Remove functions that are no longer used from offload_funcs, and mark the
2351 remaining ones with DECL_PRESERVE_P. */
2352
2353 static void
prune_offload_funcs(void)2354 prune_offload_funcs (void)
2355 {
2356 if (!offload_funcs)
2357 return;
2358
2359 unsigned int write_index = 0;
2360 for (unsigned read_index = 0; read_index < vec_safe_length (offload_funcs);
2361 read_index++)
2362 {
2363 tree fn_decl = (*offload_funcs)[read_index];
2364 bool remove_p = cgraph_node::get (fn_decl) == NULL;
2365 if (remove_p)
2366 continue;
2367
2368 DECL_PRESERVE_P (fn_decl) = 1;
2369
2370 if (write_index != read_index)
2371 (*offload_funcs)[write_index] = (*offload_funcs)[read_index];
2372
2373 write_index++;
2374 }
2375
2376 offload_funcs->truncate (write_index);
2377 }
2378
2379 /* Main entry point from the pass manager. */
2380
2381 void
lto_output(void)2382 lto_output (void)
2383 {
2384 struct lto_out_decl_state *decl_state;
2385 bitmap output = NULL;
2386 int i, n_nodes;
2387 lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2388
2389 prune_offload_funcs ();
2390
2391 if (flag_checking)
2392 output = lto_bitmap_alloc ();
2393
2394 /* Initialize the streamer. */
2395 lto_streamer_init ();
2396
2397 n_nodes = lto_symtab_encoder_size (encoder);
2398 /* Process only the functions with bodies. */
2399 for (i = 0; i < n_nodes; i++)
2400 {
2401 symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2402 if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2403 {
2404 if (lto_symtab_encoder_encode_body_p (encoder, node)
2405 && !node->alias
2406 && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2407 {
2408 if (flag_checking)
2409 {
2410 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2411 bitmap_set_bit (output, DECL_UID (node->decl));
2412 }
2413 decl_state = lto_new_out_decl_state ();
2414 lto_push_out_decl_state (decl_state);
2415 if (gimple_has_body_p (node->decl) || !flag_wpa
2416 /* Thunks have no body but they may be synthetized
2417 at WPA time. */
2418 || DECL_ARGUMENTS (node->decl))
2419 output_function (node);
2420 else
2421 copy_function_or_variable (node);
2422 gcc_assert (lto_get_out_decl_state () == decl_state);
2423 lto_pop_out_decl_state ();
2424 lto_record_function_out_decl_state (node->decl, decl_state);
2425 }
2426 }
2427 else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2428 {
2429 /* Wrap symbol references inside the ctor in a type
2430 preserving MEM_REF. */
2431 tree ctor = DECL_INITIAL (node->decl);
2432 if (ctor && !in_lto_p)
2433 walk_tree (&ctor, wrap_refs, NULL, NULL);
2434 if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2435 && lto_symtab_encoder_encode_initializer_p (encoder, node)
2436 && !node->alias)
2437 {
2438 timevar_push (TV_IPA_LTO_CTORS_OUT);
2439 if (flag_checking)
2440 {
2441 gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2442 bitmap_set_bit (output, DECL_UID (node->decl));
2443 }
2444 decl_state = lto_new_out_decl_state ();
2445 lto_push_out_decl_state (decl_state);
2446 if (DECL_INITIAL (node->decl) != error_mark_node
2447 || !flag_wpa)
2448 output_constructor (node);
2449 else
2450 copy_function_or_variable (node);
2451 gcc_assert (lto_get_out_decl_state () == decl_state);
2452 lto_pop_out_decl_state ();
2453 lto_record_function_out_decl_state (node->decl, decl_state);
2454 timevar_pop (TV_IPA_LTO_CTORS_OUT);
2455 }
2456 }
2457 }
2458
2459 /* Emit the callgraph after emitting function bodies. This needs to
2460 be done now to make sure that all the statements in every function
2461 have been renumbered so that edges can be associated with call
2462 statements using the statement UIDs. */
2463 output_symtab ();
2464
2465 output_offload_tables ();
2466
2467 #if CHECKING_P
2468 lto_bitmap_free (output);
2469 #endif
2470 }
2471
2472 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2473 from it and required for correct representation of its semantics.
2474 Each node in ENCODER must be a global declaration or a type. A node
2475 is written only once, even if it appears multiple times in the
2476 vector. Certain transitively-reachable nodes, such as those
2477 representing expressions, may be duplicated, but such nodes
2478 must not appear in ENCODER itself. */
2479
2480 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2481 write_global_stream (struct output_block *ob,
2482 struct lto_tree_ref_encoder *encoder)
2483 {
2484 tree t;
2485 size_t index;
2486 const size_t size = lto_tree_ref_encoder_size (encoder);
2487
2488 for (index = 0; index < size; index++)
2489 {
2490 t = lto_tree_ref_encoder_get_tree (encoder, index);
2491 if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2492 stream_write_tree (ob, t, false);
2493 }
2494 }
2495
2496
2497 /* Write a sequence of indices into the globals vector corresponding
2498 to the trees in ENCODER. These are used by the reader to map the
2499 indices used to refer to global entities within function bodies to
2500 their referents. */
2501
2502 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2503 write_global_references (struct output_block *ob,
2504 struct lto_tree_ref_encoder *encoder)
2505 {
2506 tree t;
2507 uint32_t index;
2508 const uint32_t size = lto_tree_ref_encoder_size (encoder);
2509
2510 /* Write size and slot indexes as 32-bit unsigned numbers. */
2511 uint32_t *data = XNEWVEC (uint32_t, size + 1);
2512 data[0] = size;
2513
2514 for (index = 0; index < size; index++)
2515 {
2516 unsigned slot_num;
2517
2518 t = lto_tree_ref_encoder_get_tree (encoder, index);
2519 streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2520 gcc_assert (slot_num != (unsigned)-1);
2521 data[index + 1] = slot_num;
2522 }
2523
2524 lto_write_data (data, sizeof (int32_t) * (size + 1));
2525 free (data);
2526 }
2527
2528
2529 /* Write all the streams in an lto_out_decl_state STATE using
2530 output block OB and output stream OUT_STREAM. */
2531
2532 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2533 lto_output_decl_state_streams (struct output_block *ob,
2534 struct lto_out_decl_state *state)
2535 {
2536 int i;
2537
2538 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2539 write_global_stream (ob, &state->streams[i]);
2540 }
2541
2542
2543 /* Write all the references in an lto_out_decl_state STATE using
2544 output block OB and output stream OUT_STREAM. */
2545
2546 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2547 lto_output_decl_state_refs (struct output_block *ob,
2548 struct lto_out_decl_state *state)
2549 {
2550 unsigned i;
2551 unsigned ref;
2552 tree decl;
2553
2554 /* Write reference to FUNCTION_DECL. If there is not function,
2555 write reference to void_type_node. */
2556 decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2557 streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2558 gcc_assert (ref != (unsigned)-1);
2559 ref = ref * 2 + (state->compressed ? 1 : 0);
2560 lto_write_data (&ref, sizeof (uint32_t));
2561
2562 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2563 write_global_references (ob, &state->streams[i]);
2564 }
2565
2566
2567 /* Return the written size of STATE. */
2568
2569 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2570 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2571 {
2572 int i;
2573 size_t size;
2574
2575 size = sizeof (int32_t); /* fn_ref. */
2576 for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2577 {
2578 size += sizeof (int32_t); /* vector size. */
2579 size += (lto_tree_ref_encoder_size (&state->streams[i])
2580 * sizeof (int32_t));
2581 }
2582 return size;
2583 }
2584
2585
2586 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2587 so far. */
2588
2589 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2590 write_symbol (struct streamer_tree_cache_d *cache,
2591 tree t, hash_set<const char *> *seen, bool alias)
2592 {
2593 const char *name;
2594 enum gcc_plugin_symbol_kind kind;
2595 enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2596 unsigned slot_num;
2597 uint64_t size;
2598 const char *comdat;
2599 unsigned char c;
2600
2601 gcc_checking_assert (TREE_PUBLIC (t)
2602 && !is_builtin_fn (t)
2603 && !DECL_ABSTRACT_P (t)
2604 && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2605
2606 gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2607
2608 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2609
2610 /* This behaves like assemble_name_raw in varasm.c, performing the
2611 same name manipulations that ASM_OUTPUT_LABELREF does. */
2612 name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2613
2614 if (seen->add (name))
2615 return;
2616
2617 streamer_tree_cache_lookup (cache, t, &slot_num);
2618 gcc_assert (slot_num != (unsigned)-1);
2619
2620 if (DECL_EXTERNAL (t))
2621 {
2622 if (DECL_WEAK (t))
2623 kind = GCCPK_WEAKUNDEF;
2624 else
2625 kind = GCCPK_UNDEF;
2626 }
2627 else
2628 {
2629 if (DECL_WEAK (t))
2630 kind = GCCPK_WEAKDEF;
2631 else if (DECL_COMMON (t))
2632 kind = GCCPK_COMMON;
2633 else
2634 kind = GCCPK_DEF;
2635
2636 /* When something is defined, it should have node attached. */
2637 gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2638 gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2639 || (cgraph_node::get (t)
2640 && cgraph_node::get (t)->definition));
2641 }
2642
2643 /* Imitate what default_elf_asm_output_external do.
2644 When symbol is external, we need to output it with DEFAULT visibility
2645 when compiling with -fvisibility=default, while with HIDDEN visibility
2646 when symbol has attribute (visibility("hidden")) specified.
2647 targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2648 right. */
2649
2650 if (DECL_EXTERNAL (t)
2651 && !targetm.binds_local_p (t))
2652 visibility = GCCPV_DEFAULT;
2653 else
2654 switch (DECL_VISIBILITY (t))
2655 {
2656 case VISIBILITY_DEFAULT:
2657 visibility = GCCPV_DEFAULT;
2658 break;
2659 case VISIBILITY_PROTECTED:
2660 visibility = GCCPV_PROTECTED;
2661 break;
2662 case VISIBILITY_HIDDEN:
2663 visibility = GCCPV_HIDDEN;
2664 break;
2665 case VISIBILITY_INTERNAL:
2666 visibility = GCCPV_INTERNAL;
2667 break;
2668 }
2669
2670 if (kind == GCCPK_COMMON
2671 && DECL_SIZE_UNIT (t)
2672 && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2673 size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2674 else
2675 size = 0;
2676
2677 if (DECL_ONE_ONLY (t))
2678 comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2679 else
2680 comdat = "";
2681
2682 lto_write_data (name, strlen (name) + 1);
2683 lto_write_data (comdat, strlen (comdat) + 1);
2684 c = (unsigned char) kind;
2685 lto_write_data (&c, 1);
2686 c = (unsigned char) visibility;
2687 lto_write_data (&c, 1);
2688 lto_write_data (&size, 8);
2689 lto_write_data (&slot_num, 4);
2690 }
2691
2692 /* Write an IL symbol table to OB.
2693 SET and VSET are cgraph/varpool node sets we are outputting. */
2694
2695 static void
produce_symtab(struct output_block * ob)2696 produce_symtab (struct output_block *ob)
2697 {
2698 struct streamer_tree_cache_d *cache = ob->writer_cache;
2699 char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2700 lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2701 lto_symtab_encoder_iterator lsei;
2702
2703 lto_begin_section (section_name, false);
2704 free (section_name);
2705
2706 hash_set<const char *> seen;
2707
2708 /* Write the symbol table.
2709 First write everything defined and then all declarations.
2710 This is necessary to handle cases where we have duplicated symbols. */
2711 for (lsei = lsei_start (encoder);
2712 !lsei_end_p (lsei); lsei_next (&lsei))
2713 {
2714 symtab_node *node = lsei_node (lsei);
2715
2716 if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2717 continue;
2718 write_symbol (cache, node->decl, &seen, false);
2719 }
2720 for (lsei = lsei_start (encoder);
2721 !lsei_end_p (lsei); lsei_next (&lsei))
2722 {
2723 symtab_node *node = lsei_node (lsei);
2724
2725 if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2726 continue;
2727 write_symbol (cache, node->decl, &seen, false);
2728 }
2729
2730 lto_end_section ();
2731 }
2732
2733
2734 /* Init the streamer_mode_table for output, where we collect info on what
2735 machine_mode values have been streamed. */
2736 void
lto_output_init_mode_table(void)2737 lto_output_init_mode_table (void)
2738 {
2739 memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2740 }
2741
2742
2743 /* Write the mode table. */
2744 static void
lto_write_mode_table(void)2745 lto_write_mode_table (void)
2746 {
2747 struct output_block *ob;
2748 ob = create_output_block (LTO_section_mode_table);
2749 bitpack_d bp = bitpack_create (ob->main_stream);
2750
2751 /* Ensure that for GET_MODE_INNER (m) != m we have
2752 also the inner mode marked. */
2753 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2754 if (streamer_mode_table[i])
2755 {
2756 machine_mode m = (machine_mode) i;
2757 machine_mode inner_m = GET_MODE_INNER (m);
2758 if (inner_m != m)
2759 streamer_mode_table[(int) inner_m] = 1;
2760 }
2761 /* First stream modes that have GET_MODE_INNER (m) == m,
2762 so that we can refer to them afterwards. */
2763 for (int pass = 0; pass < 2; pass++)
2764 for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2765 if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2766 {
2767 machine_mode m = (machine_mode) i;
2768 if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2769 continue;
2770 bp_pack_value (&bp, m, 8);
2771 bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2772 bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2773 bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2774 bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2775 bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2776 switch (GET_MODE_CLASS (m))
2777 {
2778 case MODE_FRACT:
2779 case MODE_UFRACT:
2780 case MODE_ACCUM:
2781 case MODE_UACCUM:
2782 bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2783 bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2784 break;
2785 case MODE_FLOAT:
2786 case MODE_DECIMAL_FLOAT:
2787 bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2788 break;
2789 default:
2790 break;
2791 }
2792 bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2793 }
2794 bp_pack_value (&bp, VOIDmode, 8);
2795
2796 streamer_write_bitpack (&bp);
2797
2798 char *section_name
2799 = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2800 lto_begin_section (section_name, !flag_wpa);
2801 free (section_name);
2802
2803 /* The entire header stream is computed here. */
2804 struct lto_simple_header_with_strings header;
2805 memset (&header, 0, sizeof (header));
2806
2807 /* Write the header. */
2808 header.major_version = LTO_major_version;
2809 header.minor_version = LTO_minor_version;
2810
2811 header.main_size = ob->main_stream->total_size;
2812 header.string_size = ob->string_stream->total_size;
2813 lto_write_data (&header, sizeof header);
2814
2815 /* Put all of the gimple and the string table out the asm file as a
2816 block of text. */
2817 lto_write_stream (ob->main_stream);
2818 lto_write_stream (ob->string_stream);
2819
2820 lto_end_section ();
2821 destroy_output_block (ob);
2822 }
2823
2824
2825 /* This pass is run after all of the functions are serialized and all
2826 of the IPA passes have written their serialized forms. This pass
2827 causes the vector of all of the global decls and types used from
2828 this file to be written in to a section that can then be read in to
2829 recover these on other side. */
2830
2831 void
produce_asm_for_decls(void)2832 produce_asm_for_decls (void)
2833 {
2834 struct lto_out_decl_state *out_state;
2835 struct lto_out_decl_state *fn_out_state;
2836 struct lto_decl_header header;
2837 char *section_name;
2838 struct output_block *ob;
2839 unsigned idx, num_fns;
2840 size_t decl_state_size;
2841 int32_t num_decl_states;
2842
2843 ob = create_output_block (LTO_section_decls);
2844
2845 memset (&header, 0, sizeof (struct lto_decl_header));
2846
2847 section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2848 lto_begin_section (section_name, !flag_wpa);
2849 free (section_name);
2850
2851 /* Make string 0 be a NULL string. */
2852 streamer_write_char_stream (ob->string_stream, 0);
2853
2854 gcc_assert (!alias_pairs);
2855
2856 /* Get rid of the global decl state hash tables to save some memory. */
2857 out_state = lto_get_out_decl_state ();
2858 for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2859 if (out_state->streams[i].tree_hash_table)
2860 {
2861 delete out_state->streams[i].tree_hash_table;
2862 out_state->streams[i].tree_hash_table = NULL;
2863 }
2864
2865 /* Write the global symbols. */
2866 lto_output_decl_state_streams (ob, out_state);
2867 num_fns = lto_function_decl_states.length ();
2868 for (idx = 0; idx < num_fns; idx++)
2869 {
2870 fn_out_state =
2871 lto_function_decl_states[idx];
2872 lto_output_decl_state_streams (ob, fn_out_state);
2873 }
2874
2875 header.major_version = LTO_major_version;
2876 header.minor_version = LTO_minor_version;
2877
2878 /* Currently not used. This field would allow us to preallocate
2879 the globals vector, so that it need not be resized as it is extended. */
2880 header.num_nodes = -1;
2881
2882 /* Compute the total size of all decl out states. */
2883 decl_state_size = sizeof (int32_t);
2884 decl_state_size += lto_out_decl_state_written_size (out_state);
2885 for (idx = 0; idx < num_fns; idx++)
2886 {
2887 fn_out_state =
2888 lto_function_decl_states[idx];
2889 decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2890 }
2891 header.decl_state_size = decl_state_size;
2892
2893 header.main_size = ob->main_stream->total_size;
2894 header.string_size = ob->string_stream->total_size;
2895
2896 lto_write_data (&header, sizeof header);
2897
2898 /* Write the main out-decl state, followed by out-decl states of
2899 functions. */
2900 num_decl_states = num_fns + 1;
2901 lto_write_data (&num_decl_states, sizeof (num_decl_states));
2902 lto_output_decl_state_refs (ob, out_state);
2903 for (idx = 0; idx < num_fns; idx++)
2904 {
2905 fn_out_state = lto_function_decl_states[idx];
2906 lto_output_decl_state_refs (ob, fn_out_state);
2907 }
2908
2909 lto_write_stream (ob->main_stream);
2910 lto_write_stream (ob->string_stream);
2911
2912 lto_end_section ();
2913
2914 /* Write the symbol table. It is used by linker to determine dependencies
2915 and thus we can skip it for WPA. */
2916 if (!flag_wpa)
2917 produce_symtab (ob);
2918
2919 /* Write command line opts. */
2920 lto_write_options ();
2921
2922 /* Deallocate memory and clean up. */
2923 for (idx = 0; idx < num_fns; idx++)
2924 {
2925 fn_out_state =
2926 lto_function_decl_states[idx];
2927 lto_delete_out_decl_state (fn_out_state);
2928 }
2929 lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2930 lto_function_decl_states.release ();
2931 destroy_output_block (ob);
2932 if (lto_stream_offload_p)
2933 lto_write_mode_table ();
2934 }
2935