1 /* Write the GIMPLE representation to a file stream.
2 
3    Copyright (C) 2009-2021 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename()  */
48 #include "output.h"
49 #include "ipa-utils.h"
50 #include "toplev.h"
51 
52 
53 static void lto_write_tree (struct output_block*, tree, bool);
54 
55 /* Clear the line info stored in DATA_IN.  */
56 
57 static void
clear_line_info(struct output_block * ob)58 clear_line_info (struct output_block *ob)
59 {
60   ob->current_file = NULL;
61   ob->current_line = 0;
62   ob->current_col = 0;
63   ob->current_sysp = false;
64   ob->reset_locus = true;
65   ob->emit_pwd = true;
66   /* Initialize to something that will never appear as block,
67      so that the first location with block in a function etc.
68      always streams a change_block bit and the first block.  */
69   ob->current_block = void_node;
70 }
71 
72 
73 /* Create the output block and return it.  SECTION_TYPE is
74    LTO_section_function_body or LTO_static_initializer.  */
75 
76 struct output_block *
create_output_block(enum lto_section_type section_type)77 create_output_block (enum lto_section_type section_type)
78 {
79   struct output_block *ob = XCNEW (struct output_block);
80   if (streamer_dump_file)
81     fprintf (streamer_dump_file, "Creating output block for %s\n",
82 	     lto_section_name[section_type]);
83 
84   ob->section_type = section_type;
85   ob->decl_state = lto_get_out_decl_state ();
86   /* Only global decl stream in non-wpa will ever be considered by tree
87      merging.  */
88   if (!flag_wpa && section_type == LTO_section_decls)
89     ob->local_trees = new (hash_set <tree>);
90   ob->main_stream = XCNEW (struct lto_output_stream);
91   ob->string_stream = XCNEW (struct lto_output_stream);
92   ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
93 
94   if (section_type == LTO_section_function_body)
95     ob->cfg_stream = XCNEW (struct lto_output_stream);
96 
97   clear_line_info (ob);
98 
99   ob->string_hash_table = new hash_table<string_slot_hasher> (37);
100   gcc_obstack_init (&ob->obstack);
101 
102   return ob;
103 }
104 
105 
106 /* Destroy the output block OB.  */
107 
108 void
destroy_output_block(struct output_block * ob)109 destroy_output_block (struct output_block *ob)
110 {
111   enum lto_section_type section_type = ob->section_type;
112 
113   delete ob->string_hash_table;
114   ob->string_hash_table = NULL;
115   delete ob->local_trees;
116 
117   free (ob->main_stream);
118   free (ob->string_stream);
119   if (section_type == LTO_section_function_body)
120     free (ob->cfg_stream);
121 
122   streamer_tree_cache_delete (ob->writer_cache);
123   obstack_free (&ob->obstack, NULL);
124 
125   free (ob);
126 }
127 
128 
129 /* Wrapper around variably_modified_type_p avoiding type modification
130    during WPA streaming.  */
131 
132 static bool
lto_variably_modified_type_p(tree type)133 lto_variably_modified_type_p (tree type)
134 {
135   return (in_lto_p
136 	  ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
137 	  : variably_modified_type_p (type, NULL_TREE));
138 }
139 
140 
141 /* Return true if tree node T is written to various tables.  For these
142    nodes, we sometimes want to write their phyiscal representation
143    (via lto_output_tree), and sometimes we need to emit an index
144    reference into a table (via lto_output_tree_ref).  */
145 
146 static bool
tree_is_indexable(tree t)147 tree_is_indexable (tree t)
148 {
149   /* Parameters and return values of functions of variably modified types
150      must go to global stream, because they may be used in the type
151      definition.  */
152   if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
153       && DECL_CONTEXT (t))
154     return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
155   /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
156      We should no longer need to stream it.  */
157   else if (TREE_CODE (t) == IMPORTED_DECL)
158     gcc_unreachable ();
159   else if (TREE_CODE (t) == LABEL_DECL)
160     return FORCED_LABEL (t) || DECL_NONLOCAL (t);
161   else if (((VAR_P (t) && !TREE_STATIC (t))
162 	    || TREE_CODE (t) == TYPE_DECL
163 	    || TREE_CODE (t) == CONST_DECL
164 	    || TREE_CODE (t) == NAMELIST_DECL)
165 	   && decl_function_context (t))
166     return false;
167   else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
168     return false;
169   /* Variably modified types need to be streamed alongside function
170      bodies because they can refer to local entities.  Together with
171      them we have to localize their members as well.
172      ???  In theory that includes non-FIELD_DECLs as well.  */
173   else if (TYPE_P (t)
174 	   && lto_variably_modified_type_p (t))
175     return false;
176   else if (TREE_CODE (t) == FIELD_DECL
177 	   && lto_variably_modified_type_p (DECL_CONTEXT (t)))
178     return false;
179   else
180     return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
181 }
182 
183 
184 /* Output info about new location into bitpack BP.
185    After outputting bitpack, lto_output_location_data has
186    to be done to output actual data.  */
187 
188 static void
lto_output_location_1(struct output_block * ob,struct bitpack_d * bp,location_t orig_loc,bool block_p)189 lto_output_location_1 (struct output_block *ob, struct bitpack_d *bp,
190 		       location_t orig_loc, bool block_p)
191 {
192   location_t loc = LOCATION_LOCUS (orig_loc);
193 
194   if (loc >= RESERVED_LOCATION_COUNT)
195     {
196       expanded_location xloc = expand_location (loc);
197 
198       if (ob->reset_locus)
199 	{
200 	  if (xloc.file == NULL)
201 	    ob->current_file = "";
202 	  if (xloc.line == 0)
203 	    ob->current_line = 1;
204 	  if (xloc.column == 0)
205 	    ob->current_col = 1;
206 	  ob->reset_locus = false;
207 	}
208 
209       /* As RESERVED_LOCATION_COUNT is 2, we can use the spare value of
210 	 3 without wasting additional bits to signalize file change.
211 	 If RESERVED_LOCATION_COUNT changes, reconsider this.  */
212       gcc_checking_assert (RESERVED_LOCATION_COUNT == 2);
213       bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1,
214 			    RESERVED_LOCATION_COUNT
215 			    + (ob->current_file != xloc.file));
216 
217       bp_pack_value (bp, ob->current_line != xloc.line, 1);
218       bp_pack_value (bp, ob->current_col != xloc.column, 1);
219 
220       if (ob->current_file != xloc.file)
221 	{
222 	  bool stream_pwd = false;
223 	  const char *remapped = remap_debug_filename (xloc.file);
224 	  if (ob->emit_pwd && remapped && !IS_ABSOLUTE_PATH (remapped))
225 	    {
226 	      stream_pwd = true;
227 	      ob->emit_pwd = false;
228 	    }
229 	  bp_pack_value (bp, stream_pwd, 1);
230 	  if (stream_pwd)
231 	    bp_pack_string (ob, bp, get_src_pwd (), true);
232 	  bp_pack_string (ob, bp, remapped, true);
233 	  bp_pack_value (bp, xloc.sysp, 1);
234 	}
235       ob->current_file = xloc.file;
236       ob->current_sysp = xloc.sysp;
237 
238       if (ob->current_line != xloc.line)
239 	bp_pack_var_len_unsigned (bp, xloc.line);
240       ob->current_line = xloc.line;
241 
242       if (ob->current_col != xloc.column)
243 	bp_pack_var_len_unsigned (bp, xloc.column);
244       ob->current_col = xloc.column;
245     }
246   else
247     bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1, loc);
248 
249   if (block_p)
250     {
251       tree block = LOCATION_BLOCK (orig_loc);
252       bp_pack_value (bp, ob->current_block != block, 1);
253       streamer_write_bitpack (bp);
254       if (ob->current_block != block)
255 	lto_output_tree (ob, block, true, true);
256       ob->current_block = block;
257     }
258 }
259 
260 /* Output info about new location into bitpack BP.
261    After outputting bitpack, lto_output_location_data has
262    to be done to output actual data.  */
263 
264 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)265 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
266 		     location_t loc)
267 {
268   lto_output_location_1 (ob, bp, loc, false);
269 }
270 
271 /* Output info about new location into bitpack BP.
272    After outputting bitpack, lto_output_location_data has
273    to be done to output actual data.  Like lto_output_location, but
274    additionally output LOCATION_BLOCK info too and write the BP bitpack.  */
275 
276 void
lto_output_location_and_block(struct output_block * ob,struct bitpack_d * bp,location_t loc)277 lto_output_location_and_block (struct output_block *ob, struct bitpack_d *bp,
278 			       location_t loc)
279 {
280   lto_output_location_1 (ob, bp, loc, true);
281 }
282 
283 
284 /* Lookup NAME in ENCODER.  If NAME is not found, create a new entry in
285    ENCODER for NAME with the next available index of ENCODER,  then
286    print the index to OBS.
287    Return the index.  */
288 
289 
290 static unsigned
lto_get_index(struct lto_tree_ref_encoder * encoder,tree t)291 lto_get_index (struct lto_tree_ref_encoder *encoder, tree t)
292 {
293   bool existed_p;
294 
295   unsigned int &index
296     = encoder->tree_hash_table->get_or_insert (t, &existed_p);
297   if (!existed_p)
298     {
299       index = encoder->trees.length ();
300       if (streamer_dump_file)
301 	{
302 	  print_node_brief (streamer_dump_file, "     Encoding indexable ",
303 			    t, 4);
304 	  fprintf (streamer_dump_file, "  as %i \n", index);
305 	}
306       encoder->trees.safe_push (t);
307     }
308 
309   return index;
310 }
311 
312 
313 /* If EXPR is an indexable tree node, output a reference to it to
314    output block OB.  Otherwise, output the physical representation of
315    EXPR to OB.  */
316 
317 static void
lto_indexable_tree_ref(struct output_block * ob,tree expr,enum LTO_tags * tag,unsigned * index)318 lto_indexable_tree_ref (struct output_block *ob, tree expr,
319 			enum LTO_tags *tag, unsigned *index)
320 {
321   gcc_checking_assert (tree_is_indexable (expr));
322 
323   if (TREE_CODE (expr) == SSA_NAME)
324     {
325       *tag = LTO_ssa_name_ref;
326       *index = SSA_NAME_VERSION (expr);
327     }
328   else
329     {
330       *tag = LTO_global_stream_ref;
331       *index = lto_get_index (&ob->decl_state->streams[LTO_DECL_STREAM], expr);
332     }
333 }
334 
335 
336 /* Output a static or extern var DECL to OBS.  */
337 
338 void
lto_output_var_decl_ref(struct lto_out_decl_state * decl_state,struct lto_output_stream * obs,tree decl)339 lto_output_var_decl_ref (struct lto_out_decl_state *decl_state,
340 			 struct lto_output_stream * obs, tree decl)
341 {
342   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
343   streamer_write_uhwi_stream
344      (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM],
345 			  decl));
346 }
347 
348 
349 /* Output a static or extern var DECL to OBS.  */
350 
351 void
lto_output_fn_decl_ref(struct lto_out_decl_state * decl_state,struct lto_output_stream * obs,tree decl)352 lto_output_fn_decl_ref (struct lto_out_decl_state *decl_state,
353 			struct lto_output_stream * obs, tree decl)
354 {
355   gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
356   streamer_write_uhwi_stream
357      (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM], decl));
358 }
359 
360 /* Return true if EXPR is a tree node that can be written to disk.  */
361 
362 static inline bool
lto_is_streamable(tree expr)363 lto_is_streamable (tree expr)
364 {
365   enum tree_code code = TREE_CODE (expr);
366 
367   /* Notice that we reject SSA_NAMEs as well.  We only emit the SSA
368      name version in lto_output_tree_ref (see output_ssa_names).  */
369   return !is_lang_specific (expr)
370 	 && code != SSA_NAME
371 	 && code != LANG_TYPE
372 	 && code != MODIFY_EXPR
373 	 && code != INIT_EXPR
374 	 && code != TARGET_EXPR
375 	 && code != BIND_EXPR
376 	 && code != WITH_CLEANUP_EXPR
377 	 && code != STATEMENT_LIST
378 	 && (code == CASE_LABEL_EXPR
379 	     || code == DECL_EXPR
380 	     || TREE_CODE_CLASS (code) != tcc_statement);
381 }
382 
383 /* Very rough estimate of streaming size of the initializer.  If we ignored
384    presence of strings, we could simply just count number of non-indexable
385    tree nodes and number of references to indexable nodes.  Strings however
386    may be very large and we do not want to dump them int othe global stream.
387 
388    Count the size of initializer until the size in DATA is positive.  */
389 
390 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)391 subtract_estimated_size (tree *tp, int *ws, void *data)
392 {
393   long *sum = (long *)data;
394   if (tree_is_indexable (*tp))
395     {
396       /* Indexable tree is one reference to global stream.
397 	 Guess it may be about 4 bytes.  */
398       *sum -= 4;
399       *ws = 0;
400     }
401   /* String table entry + base of tree node needs to be streamed.  */
402   if (TREE_CODE (*tp) == STRING_CST)
403     *sum -= TREE_STRING_LENGTH (*tp) + 8;
404   else
405     {
406       /* Identifiers are also variable length but should not appear
407 	 naked in constructor.  */
408       gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
409       /* We do not really make attempt to work out size of pickled tree, as
410 	 it is very variable. Make it bigger than the reference.  */
411       *sum -= 16;
412     }
413   if (*sum < 0)
414     return *tp;
415   return NULL_TREE;
416 }
417 
418 
419 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL.  */
420 
421 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)422 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
423 {
424   gcc_checking_assert (DECL_P (expr)
425 		       && TREE_CODE (expr) != FUNCTION_DECL
426 		       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
427 
428   /* Handle DECL_INITIAL for symbols.  */
429   tree initial = DECL_INITIAL (expr);
430   if (VAR_P (expr)
431       && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
432       && !DECL_IN_CONSTANT_POOL (expr)
433       && initial)
434     {
435       varpool_node *vnode;
436       /* Extra section needs about 30 bytes; do not produce it for simple
437 	 scalar values.  */
438       if (!(vnode = varpool_node::get (expr))
439 	  || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
440         initial = error_mark_node;
441       if (initial != error_mark_node)
442 	{
443 	  long max_size = 30;
444 	  if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
445 			 NULL))
446 	    initial = error_mark_node;
447 	}
448     }
449 
450   return initial;
451 }
452 
453 
454 /* Output reference to tree T to the stream.
455    Assume that T is already in encoder cache.
456    This is used to stream tree bodies where we know the DFS walk arranged
457    everything to cache.  Must be matched with stream_read_tree_ref.  */
458 
459 void
stream_write_tree_ref(struct output_block * ob,tree t)460 stream_write_tree_ref (struct output_block *ob, tree t)
461 {
462   if (!t)
463     streamer_write_zero (ob);
464   else
465     {
466       unsigned int ix;
467       bool existed_p = streamer_tree_cache_lookup (ob->writer_cache, t, &ix);
468       if (existed_p)
469 	streamer_write_hwi (ob, ix + 1);
470       else
471 	{
472 	  enum LTO_tags tag;
473 	  unsigned ix;
474 	  int id = 0;
475 
476 	  lto_indexable_tree_ref (ob, t, &tag, &ix);
477 	  if (tag == LTO_ssa_name_ref)
478 	    id = 1;
479 	  else
480 	    gcc_checking_assert (tag == LTO_global_stream_ref);
481 	  streamer_write_hwi (ob, -(int)(ix * 2 + id + 1));
482 	}
483       if (streamer_debugging)
484 	streamer_write_uhwi (ob, TREE_CODE (t));
485     }
486 }
487 
488 
489 
490 /* Write a physical representation of tree node EXPR to output block
491    OB.  If REF_P is true, the leaves of EXPR are emitted as references
492    via lto_output_tree_ref.  IX is the index into the streamer cache
493    where EXPR is stored.  */
494 
495 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)496 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
497 {
498   if (streamer_dump_file)
499     {
500       print_node_brief (streamer_dump_file, "     Streaming body of ",
501 			expr, 4);
502       fprintf (streamer_dump_file, "  to %s\n",
503 	       lto_section_name[ob->section_type]);
504     }
505 
506   /* Pack all the non-pointer fields in EXPR into a bitpack and write
507      the resulting bitpack.  */
508   streamer_write_tree_bitfields (ob, expr);
509 
510   /* Write all the pointer fields in EXPR.  */
511   streamer_write_tree_body (ob, expr);
512 
513   /* Write any LTO-specific data to OB.  */
514   if (DECL_P (expr)
515       && TREE_CODE (expr) != FUNCTION_DECL
516       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
517     {
518       /* Handle DECL_INITIAL for symbols.  */
519       tree initial = get_symbol_initial_value
520 			 (ob->decl_state->symtab_node_encoder, expr);
521       stream_write_tree (ob, initial, ref_p);
522     }
523 
524   /* Stream references to early generated DIEs.  Keep in sync with the
525      trees handled in dwarf2out_die_ref_for_decl.  */
526   if ((DECL_P (expr)
527        && TREE_CODE (expr) != FIELD_DECL
528        && TREE_CODE (expr) != DEBUG_EXPR_DECL
529        && TREE_CODE (expr) != TYPE_DECL)
530       || TREE_CODE (expr) == BLOCK)
531     {
532       const char *sym;
533       unsigned HOST_WIDE_INT off;
534       if (debug_info_level > DINFO_LEVEL_NONE
535 	  && debug_hooks->die_ref_for_decl (expr, &sym, &off))
536 	{
537 	  streamer_write_string (ob, ob->main_stream, sym, true);
538 	  streamer_write_uhwi (ob, off);
539 	}
540       else
541 	streamer_write_string (ob, ob->main_stream, NULL, true);
542     }
543 }
544 
545 /* Write a physical representation of tree node EXPR to output block
546    OB.  If REF_P is true, the leaves of EXPR are emitted as references
547    via lto_output_tree_ref.  IX is the index into the streamer cache
548    where EXPR is stored.  */
549 
550 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)551 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
552 {
553   if (!lto_is_streamable (expr))
554     internal_error ("tree code %qs is not supported in LTO streams",
555 		    get_tree_code_name (TREE_CODE (expr)));
556 
557   /* Write the header, containing everything needed to materialize
558      EXPR on the reading side.  */
559   streamer_write_tree_header (ob, expr);
560 
561   lto_write_tree_1 (ob, expr, ref_p);
562 }
563 
564 /* Emit the physical representation of tree node EXPR to output block OB,
565    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
566    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
567 
568 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)569 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
570 		   bool ref_p, bool this_ref_p)
571 {
572   unsigned ix;
573 
574   gcc_checking_assert (expr != NULL_TREE
575 		       && !(this_ref_p && tree_is_indexable (expr)));
576 
577   bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
578 					      expr, hash, &ix);
579   gcc_assert (!exists_p);
580   if (TREE_CODE (expr) == INTEGER_CST
581       && !TREE_OVERFLOW (expr))
582     {
583       /* Shared INTEGER_CST nodes are special because they need their
584 	 original type to be materialized by the reader (to implement
585 	 TYPE_CACHED_VALUES).  */
586       streamer_write_integer_cst (ob, expr);
587     }
588   else
589     {
590       /* This is the first time we see EXPR, write its fields
591 	 to OB.  */
592       lto_write_tree (ob, expr, ref_p);
593     }
594 }
595 
596 class DFS
597 {
598 public:
599   DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
600        bool single_p);
601   ~DFS ();
602 
603   struct scc_entry
604   {
605     tree t;
606     hashval_t hash;
607   };
608   auto_vec<scc_entry,32> sccstack;
609 
610 private:
611   struct sccs
612   {
613     unsigned int dfsnum;
614     unsigned int low;
615   };
616   struct worklist
617   {
618     tree expr;
619     sccs *from_state;
620     sccs *cstate;
621     bool ref_p;
622     bool this_ref_p;
623   };
624   /* Maximum index of scc stack containing a local tree.  */
625   int max_local_entry;
626 
627   static int scc_entry_compare (const void *, const void *);
628 
629   void DFS_write_tree_body (struct output_block *ob,
630 			    tree expr, sccs *expr_state, bool ref_p);
631 
632   void DFS_write_tree (struct output_block *ob, sccs *from_state,
633 		       tree expr, bool ref_p, bool this_ref_p);
634 
635   hashval_t
636   hash_scc (struct output_block *ob, unsigned first, unsigned size,
637 	    bool ref_p, bool this_ref_p);
638 
639   hash_map<tree, sccs *> sccstate;
640   auto_vec<worklist, 32> worklist_vec;
641   struct obstack sccstate_obstack;
642 };
643 
644 /* Return true if type can not be merged with structurally same tree in
645    other translation unit.  During stream out this information is propagated
646    to all trees referring to T and they are not streamed with additional
647    information needed by the tree merging in lto-common.c (in particular,
648    scc hash codes are not streamed).
649 
650    TRANSLATION_UNIT_DECL is handled specially since references to it does
651    not make other trees local as well.  */
652 
653 static bool
local_tree_p(tree t)654 local_tree_p (tree t)
655 {
656   switch (TREE_CODE (t))
657     {
658     case LABEL_DECL:
659       return true;
660     case NAMESPACE_DECL:
661       return !DECL_NAME (t);
662     case VAR_DECL:
663     case FUNCTION_DECL:
664       return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
665     case RECORD_TYPE:
666     case UNION_TYPE:
667     case ENUMERAL_TYPE:
668       /* Anonymous namespace types are local.
669 	 Only work hard for main variants;
670 	 variant types will inherit locality.  */
671       return TYPE_MAIN_VARIANT (t) == t
672 	     && odr_type_p (t) && type_with_linkage_p (t)
673 	     && type_in_anonymous_namespace_p (t);
674     default:
675       return false;
676     }
677 }
678 
679 /* Emit the physical representation of tree node EXPR to output block OB,
680    using depth-first search on the subgraph.  If THIS_REF_P is true, the
681    leaves of EXPR are emitted as references via lto_output_tree_ref.
682    REF_P is used for streaming siblings of EXPR.  If SINGLE_P is true,
683    this is for a rewalk of a single leaf SCC.  */
684 
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)685 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
686 	  bool single_p)
687 {
688   unsigned int next_dfs_num = 1;
689 
690   max_local_entry = -1;
691   gcc_obstack_init (&sccstate_obstack);
692   DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
693   while (!worklist_vec.is_empty ())
694     {
695       worklist &w = worklist_vec.last ();
696       expr = w.expr;
697       sccs *from_state = w.from_state;
698       sccs *cstate = w.cstate;
699       ref_p = w.ref_p;
700       this_ref_p = w.this_ref_p;
701       if (cstate == NULL)
702 	{
703 	  sccs **slot = &sccstate.get_or_insert (expr);
704 	  cstate = *slot;
705 	  if (cstate)
706 	    {
707 	      gcc_checking_assert (from_state);
708 	      if (cstate->dfsnum < from_state->dfsnum)
709 		from_state->low = MIN (cstate->dfsnum, from_state->low);
710 	      worklist_vec.pop ();
711 	      continue;
712 	    }
713 
714 	  scc_entry e = { expr, 0 };
715 	  /* Not yet visited.  DFS recurse and push it onto the stack.  */
716 	  *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
717 	  if (ob->local_trees && local_tree_p (expr))
718 	    max_local_entry = sccstack.length ();
719 	  sccstack.safe_push (e);
720 	  cstate->dfsnum = next_dfs_num++;
721 	  cstate->low = cstate->dfsnum;
722 	  w.cstate = cstate;
723 
724 	  if (TREE_CODE (expr) == INTEGER_CST
725 	      && !TREE_OVERFLOW (expr))
726 	    DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
727 	  else
728 	    {
729 	      DFS_write_tree_body (ob, expr, cstate, ref_p);
730 
731 	      /* Walk any LTO-specific edges.  */
732 	      if (DECL_P (expr)
733 		  && TREE_CODE (expr) != FUNCTION_DECL
734 		  && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
735 		{
736 		  /* Handle DECL_INITIAL for symbols.  */
737 		  tree initial
738 		    = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
739 						expr);
740 		  DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
741 		}
742 	    }
743 	  continue;
744 	}
745 
746       /* See if we found an SCC.  */
747       if (cstate->low == cstate->dfsnum)
748 	{
749 	  unsigned first, size;
750 	  tree x;
751 
752 	  /* If we are re-walking a single leaf SCC just pop it,
753 	     let earlier worklist item access the sccstack.  */
754 	  if (single_p)
755 	    {
756 	      worklist_vec.pop ();
757 	      continue;
758 	    }
759 
760 	  /* Pop the SCC and compute its size.  */
761 	  first = sccstack.length ();
762 	  do
763 	    {
764 	      x = sccstack[--first].t;
765 	    }
766 	  while (x != expr);
767 	  size = sccstack.length () - first;
768 
769 	  /* No need to compute hashes for LTRANS units, we don't perform
770 	     any merging there.  */
771 	  hashval_t scc_hash = 0;
772 	  unsigned scc_entry_len = 0;
773 	  bool local_to_unit = !ob->local_trees
774 			       || max_local_entry >= (int)first;
775 
776 	  /* Remember that trees are local so info gets propagated to other
777 	     SCCs.  */
778 	  if (local_to_unit && ob->local_trees)
779 	    {
780 	      for (unsigned i = 0; i < size; ++i)
781 		ob->local_trees->add (sccstack[first + i].t);
782 	    }
783 
784 	  /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
785 	     tree.  We can not mark it local because references to it does not
786 	     make other trees local (all global decls reffer to it via
787 	     CONTEXT).  */
788 	  if (size == 1
789 	      && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
790 	    local_to_unit = true;
791 
792 	  if (!local_to_unit)
793 	    {
794 	      scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
795 
796 	      /* Put the entries with the least number of collisions first.  */
797 	      unsigned entry_start = 0;
798 	      scc_entry_len = size + 1;
799 	      for (unsigned i = 0; i < size;)
800 		{
801 		  unsigned from = i;
802 		  for (i = i + 1; i < size
803 		       && (sccstack[first + i].hash
804 			   == sccstack[first + from].hash); ++i)
805 		    ;
806 		  if (i - from < scc_entry_len)
807 		    {
808 		      scc_entry_len = i - from;
809 		      entry_start = from;
810 		    }
811 		}
812 	      for (unsigned i = 0; i < scc_entry_len; ++i)
813 		std::swap (sccstack[first + i],
814 			   sccstack[first + entry_start + i]);
815 
816 	      /* We already sorted SCC deterministically in hash_scc.  */
817 
818 	      /* Check that we have only one SCC.
819 		 Naturally we may have conflicts if hash function is not
820 		 strong enough.  Lets see how far this gets.  */
821 	      gcc_checking_assert (scc_entry_len == 1);
822 	    }
823 
824 	  worklist_vec.pop ();
825 
826 	  unsigned int prev_size = ob->main_stream->total_size;
827 
828 	  /* Only global decl sections are considered by tree merging.  */
829 	  if (ob->section_type != LTO_section_decls)
830 	    {
831 	      /* If this is the original tree we stream and it forms SCC
832 		 by itself then we do not need to stream SCC at all.  */
833 	      if (worklist_vec.is_empty () && first == 0 && size == 1)
834 		 return;
835 	      if (streamer_dump_file)
836 		{
837 		  fprintf (streamer_dump_file,
838 			   "     Start of LTO_trees of size %i\n", size);
839 		}
840 	      streamer_write_record_start (ob, LTO_trees);
841 	      streamer_write_uhwi (ob, size);
842 	    }
843 	  /* Write LTO_tree_scc if tree merging is going to be performed.  */
844 	  else if (!local_to_unit
845 		   /* These are special since sharing is not done by tree
846 		      merging machinery.  We can not special case them earlier
847 		      because we still need to compute hash for further sharing
848 		      of trees referring to them.  */
849 		   && (size != 1
850 		       || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
851 			   && (TREE_CODE (sccstack[first].t) != INTEGER_CST
852 			       || TREE_OVERFLOW (sccstack[first].t)))))
853 
854 	    {
855 	      gcc_checking_assert (ob->section_type == LTO_section_decls);
856 	      if (streamer_dump_file)
857 		{
858 		  fprintf (streamer_dump_file,
859 			   "     Start of LTO_tree_scc of size %i\n", size);
860 		}
861 	      streamer_write_record_start (ob, LTO_tree_scc);
862 	      /* In wast majority of cases scc_entry_len is 1 and size is small
863 		 integer.  Use extra bit of size to stream info about
864 		 exceptions.  */
865 	      streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
866 	      if (scc_entry_len != 1)
867 		streamer_write_uhwi (ob, scc_entry_len);
868 	      streamer_write_uhwi (ob, scc_hash);
869 	    }
870 	  /* Non-trivial SCCs must be packed to trees blocks so forward
871 	     references work correctly.  */
872 	  else if (size != 1)
873 	    {
874 	      if (streamer_dump_file)
875 		{
876 		  fprintf (streamer_dump_file,
877 			   "     Start of LTO_trees of size %i\n", size);
878 		}
879 	      streamer_write_record_start (ob, LTO_trees);
880 	      streamer_write_uhwi (ob, size);
881 	    }
882 	  else if (streamer_dump_file)
883 	    {
884 	      fprintf (streamer_dump_file, "     Streaming single tree\n");
885 	    }
886 
887 	  /* Write size-1 SCCs without wrapping them inside SCC bundles.
888 	     All INTEGER_CSTs need to be handled this way as we need
889 	     their type to materialize them.  Also builtins are handled
890 	     this way.  */
891 	  if (size == 1)
892 	    lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
893 	  else
894 	    {
895 
896 	      /* Write all headers and populate the streamer cache.  */
897 	      for (unsigned i = 0; i < size; ++i)
898 		{
899 		  hashval_t hash = sccstack[first+i].hash;
900 		  tree t = sccstack[first+i].t;
901 		  bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
902 							      t, hash, NULL);
903 		  gcc_assert (!exists_p);
904 
905 		  if (!lto_is_streamable (t))
906 		    internal_error ("tree code %qs is not supported "
907 				    "in LTO streams",
908 				    get_tree_code_name (TREE_CODE (t)));
909 
910 		  /* Write the header, containing everything needed to
911 		     materialize EXPR on the reading side.  */
912 		  streamer_write_tree_header (ob, t);
913 		}
914 
915 	      /* Write the bitpacks and tree references.  */
916 	      for (unsigned i = 0; i < size; ++i)
917 		lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
918 	    }
919 	  if (streamer_dump_file)
920 	    fprintf (streamer_dump_file, "     %u bytes\n",
921 		     ob->main_stream->total_size - prev_size);
922 
923 	  /* Finally truncate the vector.  */
924 	  sccstack.truncate (first);
925 	  if ((int)first <= max_local_entry)
926 	    max_local_entry = first - 1;
927 
928 	  if (from_state)
929 	    from_state->low = MIN (from_state->low, cstate->low);
930 	  continue;
931 	}
932 
933       gcc_checking_assert (from_state);
934       from_state->low = MIN (from_state->low, cstate->low);
935       if (cstate->dfsnum < from_state->dfsnum)
936 	from_state->low = MIN (cstate->dfsnum, from_state->low);
937       worklist_vec.pop ();
938     }
939 }
940 
~DFS()941 DFS::~DFS ()
942 {
943   obstack_free (&sccstate_obstack, NULL);
944 }
945 
946 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
947    DFS recurse for all tree edges originating from it.  */
948 
949 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)950 DFS::DFS_write_tree_body (struct output_block *ob,
951 			  tree expr, sccs *expr_state, bool ref_p)
952 {
953 #define DFS_follow_tree_edge(DEST) \
954   DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
955 
956   enum tree_code code;
957 
958   code = TREE_CODE (expr);
959 
960   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
961     {
962       if (TREE_CODE (expr) != IDENTIFIER_NODE)
963 	DFS_follow_tree_edge (TREE_TYPE (expr));
964     }
965 
966   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
967     {
968       unsigned int count = vector_cst_encoded_nelts (expr);
969       for (unsigned int i = 0; i < count; ++i)
970 	DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
971     }
972 
973   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
974     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
975       DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
976 
977   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
978     {
979       DFS_follow_tree_edge (TREE_REALPART (expr));
980       DFS_follow_tree_edge (TREE_IMAGPART (expr));
981     }
982 
983   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
984     {
985       /* Drop names that were created for anonymous entities.  */
986       if (DECL_NAME (expr)
987 	  && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
988 	  && IDENTIFIER_ANON_P (DECL_NAME (expr)))
989 	;
990       else
991 	DFS_follow_tree_edge (DECL_NAME (expr));
992       if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
993 	  && ! DECL_CONTEXT (expr))
994 	DFS_follow_tree_edge ((*all_translation_units)[0]);
995       else
996 	DFS_follow_tree_edge (DECL_CONTEXT (expr));
997     }
998 
999   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1000     {
1001       DFS_follow_tree_edge (DECL_SIZE (expr));
1002       DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
1003 
1004       /* Note, DECL_INITIAL is not handled here.  Since DECL_INITIAL needs
1005 	 special handling in LTO, it must be handled by streamer hooks.  */
1006 
1007       DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
1008 
1009       /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
1010 	 declarations which should be eliminated by decl merging. Be sure none
1011 	 leaks to this point.  */
1012       gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
1013       DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
1014 
1015       if ((VAR_P (expr)
1016 	   || TREE_CODE (expr) == PARM_DECL)
1017 	  && DECL_HAS_VALUE_EXPR_P (expr))
1018 	DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
1019       if (VAR_P (expr)
1020 	  && DECL_HAS_DEBUG_EXPR_P (expr))
1021 	DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
1022     }
1023 
1024   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1025     {
1026       /* Make sure we don't inadvertently set the assembler name.  */
1027       if (DECL_ASSEMBLER_NAME_SET_P (expr))
1028 	DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
1029     }
1030 
1031   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1032     {
1033       DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
1034       DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
1035       DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
1036       DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
1037       gcc_checking_assert (!DECL_FCONTEXT (expr));
1038     }
1039 
1040   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1041     {
1042       gcc_checking_assert (DECL_VINDEX (expr) == NULL);
1043       DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
1044       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
1045       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
1046     }
1047 
1048   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1049     {
1050       DFS_follow_tree_edge (TYPE_SIZE (expr));
1051       DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
1052       DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
1053       DFS_follow_tree_edge (TYPE_NAME (expr));
1054       /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO.  They will be
1055 	 reconstructed during fixup.  */
1056       /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
1057 	 during fixup.  */
1058       DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
1059       DFS_follow_tree_edge (TYPE_CONTEXT (expr));
1060       /* TYPE_CANONICAL is re-computed during type merging, so no need
1061 	 to follow it here.  */
1062       /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
1063 	 it cannot be freed by free_lang_data without triggering ICEs in
1064 	 langhooks.  */
1065     }
1066 
1067   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1068     {
1069       if (TREE_CODE (expr) == ARRAY_TYPE)
1070 	DFS_follow_tree_edge (TYPE_DOMAIN (expr));
1071       else if (RECORD_OR_UNION_TYPE_P (expr))
1072 	for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
1073 	  DFS_follow_tree_edge (t);
1074       else if (TREE_CODE (expr) == FUNCTION_TYPE
1075 	       || TREE_CODE (expr) == METHOD_TYPE)
1076 	DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1077 
1078       if (!POINTER_TYPE_P (expr))
1079 	DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1080       DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1081     }
1082 
1083   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1084     {
1085       DFS_follow_tree_edge (TREE_PURPOSE (expr));
1086       DFS_follow_tree_edge (TREE_VALUE (expr));
1087       DFS_follow_tree_edge (TREE_CHAIN (expr));
1088     }
1089 
1090   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1091     {
1092       for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1093 	DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1094     }
1095 
1096   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1097     {
1098       for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1099 	DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1100       DFS_follow_tree_edge (TREE_BLOCK (expr));
1101     }
1102 
1103   if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1104     {
1105       for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1106 	{
1107 	  /* We would have to stream externals in the block chain as
1108 	     non-references but we should have dropped them in
1109 	     free-lang-data.  */
1110 	  gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1111 	  DFS_follow_tree_edge (t);
1112 	}
1113 
1114       DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1115       DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1116 
1117       /* Do not follow BLOCK_NONLOCALIZED_VARS.  We cannot handle debug
1118 	 information for early inlined BLOCKs so drop it on the floor instead
1119 	 of ICEing in dwarf2out.c.  */
1120 
1121       /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1122 	 streaming time.  */
1123 
1124       /* Do not output BLOCK_SUBBLOCKS.  Instead on streaming-in this
1125 	 list is re-constructed from BLOCK_SUPERCONTEXT.  */
1126     }
1127 
1128   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1129     {
1130       unsigned i;
1131       tree t;
1132 
1133       /* Note that the number of BINFO slots has already been emitted in
1134 	 EXPR's header (see streamer_write_tree_header) because this length
1135 	 is needed to build the empty BINFO node on the reader side.  */
1136       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1137 	DFS_follow_tree_edge (t);
1138       DFS_follow_tree_edge (BINFO_OFFSET (expr));
1139       DFS_follow_tree_edge (BINFO_VTABLE (expr));
1140 
1141       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1142 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1143 	 by C++ FE only.  */
1144     }
1145 
1146   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1147     {
1148       unsigned i;
1149       tree index, value;
1150 
1151       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1152 	{
1153 	  DFS_follow_tree_edge (index);
1154 	  DFS_follow_tree_edge (value);
1155 	}
1156     }
1157 
1158   if (code == OMP_CLAUSE)
1159     {
1160       int i;
1161       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1162 	DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1163       DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1164     }
1165 
1166 #undef DFS_follow_tree_edge
1167 }
1168 
1169 /* Return a hash value for the tree T.
1170    CACHE holds hash values of trees outside current SCC.  MAP, if non-NULL,
1171    may hold hash values if trees inside current SCC.  */
1172 
1173 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)1174 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1175 {
1176   inchash::hash hstate;
1177 
1178 #define visit(SIBLING) \
1179   do { \
1180     unsigned ix; \
1181     if (!SIBLING) \
1182       hstate.add_int (0); \
1183     else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1184       hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1185     else if (map) \
1186       hstate.add_int (*map->get (SIBLING)); \
1187     else \
1188       hstate.add_int (1); \
1189   } while (0)
1190 
1191   /* Hash TS_BASE.  */
1192   enum tree_code code = TREE_CODE (t);
1193   hstate.add_int (code);
1194   if (!TYPE_P (t))
1195     {
1196       hstate.add_flag (TREE_SIDE_EFFECTS (t));
1197       hstate.add_flag (TREE_CONSTANT (t));
1198       hstate.add_flag (TREE_READONLY (t));
1199       hstate.add_flag (TREE_PUBLIC (t));
1200     }
1201   hstate.add_flag (TREE_ADDRESSABLE (t));
1202   hstate.add_flag (TREE_THIS_VOLATILE (t));
1203   if (DECL_P (t))
1204     hstate.add_flag (DECL_UNSIGNED (t));
1205   else if (TYPE_P (t))
1206     hstate.add_flag (TYPE_UNSIGNED (t));
1207   if (TYPE_P (t))
1208     hstate.add_flag (TYPE_ARTIFICIAL (t));
1209   else
1210     hstate.add_flag (TREE_NO_WARNING (t));
1211   hstate.add_flag (TREE_NOTHROW (t));
1212   hstate.add_flag (TREE_STATIC (t));
1213   hstate.add_flag (TREE_PROTECTED (t));
1214   hstate.add_flag (TREE_DEPRECATED (t));
1215   if (code != TREE_BINFO)
1216     hstate.add_flag (TREE_PRIVATE (t));
1217   if (TYPE_P (t))
1218     {
1219       hstate.add_flag (AGGREGATE_TYPE_P (t)
1220 		       ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1221       hstate.add_flag (TYPE_ADDR_SPACE (t));
1222     }
1223   else if (code == SSA_NAME)
1224     hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1225   hstate.commit_flag ();
1226 
1227   if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1228     hstate.add_wide_int (wi::to_widest (t));
1229 
1230   if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1231     {
1232       REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1233       hstate.add_flag (r.cl);
1234       hstate.add_flag (r.sign);
1235       hstate.add_flag (r.signalling);
1236       hstate.add_flag (r.canonical);
1237       hstate.commit_flag ();
1238       hstate.add_int (r.uexp);
1239       hstate.add (r.sig, sizeof (r.sig));
1240     }
1241 
1242   if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1243     {
1244       FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1245       hstate.add_int (f.mode);
1246       hstate.add_int (f.data.low);
1247       hstate.add_int (f.data.high);
1248     }
1249 
1250   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1251     {
1252       hstate.add_hwi (DECL_MODE (t));
1253       hstate.add_flag (DECL_NONLOCAL (t));
1254       hstate.add_flag (DECL_VIRTUAL_P (t));
1255       hstate.add_flag (DECL_IGNORED_P (t));
1256       hstate.add_flag (DECL_ABSTRACT_P (t));
1257       hstate.add_flag (DECL_ARTIFICIAL (t));
1258       hstate.add_flag (DECL_USER_ALIGN (t));
1259       hstate.add_flag (DECL_PRESERVE_P (t));
1260       hstate.add_flag (DECL_EXTERNAL (t));
1261       hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1262       hstate.commit_flag ();
1263       hstate.add_int (DECL_ALIGN (t));
1264       if (code == LABEL_DECL)
1265 	{
1266           hstate.add_int (EH_LANDING_PAD_NR (t));
1267 	  hstate.add_int (LABEL_DECL_UID (t));
1268 	}
1269       else if (code == FIELD_DECL)
1270 	{
1271 	  hstate.add_flag (DECL_PACKED (t));
1272 	  hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1273 	  hstate.add_flag (DECL_PADDING_P (t));
1274 	  hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1275 	  hstate.add_int (DECL_OFFSET_ALIGN (t));
1276 	}
1277       else if (code == VAR_DECL)
1278 	{
1279 	  hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1280 	  hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1281 	}
1282       if (code == RESULT_DECL
1283 	  || code == PARM_DECL
1284 	  || code == VAR_DECL)
1285 	{
1286 	  hstate.add_flag (DECL_BY_REFERENCE (t));
1287 	  if (code == VAR_DECL
1288 	      || code == PARM_DECL)
1289 	    hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1290 	}
1291       hstate.commit_flag ();
1292     }
1293 
1294   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1295     hstate.add_int (DECL_REGISTER (t));
1296 
1297   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1298     {
1299       hstate.add_flag (DECL_COMMON (t));
1300       hstate.add_flag (DECL_DLLIMPORT_P (t));
1301       hstate.add_flag (DECL_WEAK (t));
1302       hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1303       hstate.add_flag (DECL_COMDAT (t));
1304       hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1305       hstate.add_int (DECL_VISIBILITY (t));
1306       if (code == VAR_DECL)
1307 	{
1308 	  /* DECL_IN_TEXT_SECTION is set during final asm output only.  */
1309 	  hstate.add_flag (DECL_HARD_REGISTER (t));
1310 	  hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1311 	}
1312       if (TREE_CODE (t) == FUNCTION_DECL)
1313         {
1314 	  hstate.add_flag (DECL_FINAL_P (t));
1315 	  hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1316 	  hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1317 	}
1318       hstate.commit_flag ();
1319     }
1320 
1321   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1322     {
1323       hstate.add_int (DECL_BUILT_IN_CLASS (t));
1324       hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1325       hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1326       hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1327       hstate.add_flag (DECL_UNINLINABLE (t));
1328       hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1329       hstate.add_flag (DECL_IS_NOVOPS (t));
1330       hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1331       hstate.add_flag (DECL_IS_MALLOC (t));
1332       hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1333       hstate.add_flag (DECL_STATIC_CHAIN (t));
1334       hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1335       hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1336       hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1337       hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1338       hstate.add_flag (DECL_PURE_P (t));
1339       hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1340       hstate.commit_flag ();
1341       if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1342 	hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1343     }
1344 
1345   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1346     {
1347       hstate.add_hwi (TYPE_MODE (t));
1348       /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1349 	 no streaming.  */
1350       hstate.add_flag (TYPE_PACKED (t));
1351       hstate.add_flag (TYPE_RESTRICT (t));
1352       hstate.add_flag (TYPE_USER_ALIGN (t));
1353       hstate.add_flag (TYPE_READONLY (t));
1354       if (RECORD_OR_UNION_TYPE_P (t))
1355 	{
1356 	  hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1357 	  hstate.add_flag (TYPE_FINAL_P (t));
1358           hstate.add_flag (TYPE_CXX_ODR_P (t));
1359 	}
1360       else if (code == ARRAY_TYPE)
1361 	hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1362       if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1363         hstate.add_flag (TYPE_STRING_FLAG (t));
1364       if (AGGREGATE_TYPE_P (t))
1365 	hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1366       hstate.commit_flag ();
1367       hstate.add_int (TYPE_PRECISION (t));
1368       hstate.add_int (TYPE_ALIGN (t));
1369       hstate.add_int (TYPE_EMPTY_P (t));
1370     }
1371 
1372   if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1373     hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1374 			strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1375 
1376   if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1377       /* We don't stream these when passing things to a different target.  */
1378       && !lto_stream_offload_p)
1379     hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1380 
1381   if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1382     hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1383 
1384   if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1385     hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1386 
1387   if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1388     hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1389 
1390   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1391     {
1392       if (code != IDENTIFIER_NODE)
1393 	visit (TREE_TYPE (t));
1394     }
1395 
1396   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1397     {
1398       unsigned int count = vector_cst_encoded_nelts (t);
1399       for (unsigned int i = 0; i < count; ++i)
1400 	visit (VECTOR_CST_ENCODED_ELT (t, i));
1401     }
1402 
1403   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1404     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1405       visit (POLY_INT_CST_COEFF (t, i));
1406 
1407   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1408     {
1409       visit (TREE_REALPART (t));
1410       visit (TREE_IMAGPART (t));
1411     }
1412 
1413   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1414     {
1415       /* Drop names that were created for anonymous entities.  */
1416       if (DECL_NAME (t)
1417 	  && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1418 	  && IDENTIFIER_ANON_P (DECL_NAME (t)))
1419 	;
1420       else
1421 	visit (DECL_NAME (t));
1422       if (DECL_FILE_SCOPE_P (t))
1423 	;
1424       else
1425 	visit (DECL_CONTEXT (t));
1426     }
1427 
1428   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1429     {
1430       visit (DECL_SIZE (t));
1431       visit (DECL_SIZE_UNIT (t));
1432       visit (DECL_ATTRIBUTES (t));
1433       if ((code == VAR_DECL
1434 	   || code == PARM_DECL)
1435 	  && DECL_HAS_VALUE_EXPR_P (t))
1436 	visit (DECL_VALUE_EXPR (t));
1437       if (code == VAR_DECL
1438 	  && DECL_HAS_DEBUG_EXPR_P (t))
1439 	visit (DECL_DEBUG_EXPR (t));
1440       /* ???  Hash DECL_INITIAL as streamed.  Needs the output-block to
1441          be able to call get_symbol_initial_value.  */
1442     }
1443 
1444   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1445     {
1446       if (DECL_ASSEMBLER_NAME_SET_P (t))
1447 	visit (DECL_ASSEMBLER_NAME (t));
1448     }
1449 
1450   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1451     {
1452       visit (DECL_FIELD_OFFSET (t));
1453       visit (DECL_BIT_FIELD_TYPE (t));
1454       visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1455       visit (DECL_FIELD_BIT_OFFSET (t));
1456     }
1457 
1458   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1459     {
1460       visit (DECL_FUNCTION_PERSONALITY (t));
1461       visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1462       visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1463     }
1464 
1465   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1466     {
1467       visit (TYPE_SIZE (t));
1468       visit (TYPE_SIZE_UNIT (t));
1469       visit (TYPE_ATTRIBUTES (t));
1470       visit (TYPE_NAME (t));
1471       visit (TYPE_MAIN_VARIANT (t));
1472       if (TYPE_FILE_SCOPE_P (t))
1473 	;
1474       else
1475 	visit (TYPE_CONTEXT (t));
1476     }
1477 
1478   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1479     {
1480       if (code == ARRAY_TYPE)
1481 	visit (TYPE_DOMAIN (t));
1482       else if (RECORD_OR_UNION_TYPE_P (t))
1483 	for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1484 	  visit (f);
1485       else if (code == FUNCTION_TYPE
1486 	       || code == METHOD_TYPE)
1487 	visit (TYPE_ARG_TYPES (t));
1488       if (!POINTER_TYPE_P (t))
1489 	visit (TYPE_MIN_VALUE_RAW (t));
1490       visit (TYPE_MAX_VALUE_RAW (t));
1491     }
1492 
1493   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1494     {
1495       visit (TREE_PURPOSE (t));
1496       visit (TREE_VALUE (t));
1497       visit (TREE_CHAIN (t));
1498     }
1499 
1500   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1501     for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1502       visit (TREE_VEC_ELT (t, i));
1503 
1504   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1505     {
1506       hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1507       for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1508 	visit (TREE_OPERAND (t, i));
1509     }
1510 
1511   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1512     {
1513       unsigned i;
1514       tree b;
1515       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1516 	visit (b);
1517       visit (BINFO_OFFSET (t));
1518       visit (BINFO_VTABLE (t));
1519       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1520 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1521 	 by C++ FE only.  */
1522     }
1523 
1524   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1525     {
1526       unsigned i;
1527       tree index, value;
1528       hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1529       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1530 	{
1531 	  visit (index);
1532 	  visit (value);
1533 	}
1534     }
1535 
1536   if (code == OMP_CLAUSE)
1537     {
1538       int i;
1539       HOST_WIDE_INT val;
1540 
1541       hstate.add_hwi (OMP_CLAUSE_CODE (t));
1542       switch (OMP_CLAUSE_CODE (t))
1543 	{
1544 	case OMP_CLAUSE_DEFAULT:
1545 	  val = OMP_CLAUSE_DEFAULT_KIND (t);
1546 	  break;
1547 	case OMP_CLAUSE_SCHEDULE:
1548 	  val = OMP_CLAUSE_SCHEDULE_KIND (t);
1549 	  break;
1550 	case OMP_CLAUSE_DEPEND:
1551 	  val = OMP_CLAUSE_DEPEND_KIND (t);
1552 	  break;
1553 	case OMP_CLAUSE_MAP:
1554 	  val = OMP_CLAUSE_MAP_KIND (t);
1555 	  break;
1556 	case OMP_CLAUSE_PROC_BIND:
1557 	  val = OMP_CLAUSE_PROC_BIND_KIND (t);
1558 	  break;
1559 	case OMP_CLAUSE_REDUCTION:
1560 	case OMP_CLAUSE_TASK_REDUCTION:
1561 	case OMP_CLAUSE_IN_REDUCTION:
1562 	  val = OMP_CLAUSE_REDUCTION_CODE (t);
1563 	  break;
1564 	default:
1565 	  val = 0;
1566 	  break;
1567 	}
1568       hstate.add_hwi (val);
1569       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1570 	visit (OMP_CLAUSE_OPERAND (t, i));
1571       visit (OMP_CLAUSE_CHAIN (t));
1572     }
1573 
1574   return hstate.end ();
1575 
1576 #undef visit
1577 }
1578 
1579 /* Compare two SCC entries by their hash value for qsorting them.  */
1580 
1581 int
scc_entry_compare(const void * p1_,const void * p2_)1582 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1583 {
1584   const scc_entry *p1 = (const scc_entry *) p1_;
1585   const scc_entry *p2 = (const scc_entry *) p2_;
1586   if (p1->hash < p2->hash)
1587     return -1;
1588   else if (p1->hash > p2->hash)
1589     return 1;
1590   return 0;
1591 }
1592 
1593 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1594    THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST.  */
1595 
1596 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1597 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1598 	       bool ref_p, bool this_ref_p)
1599 {
1600   unsigned int last_classes = 0, iterations = 0;
1601 
1602   /* Compute hash values for the SCC members.  */
1603   for (unsigned i = 0; i < size; ++i)
1604     sccstack[first+i].hash
1605       = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1606 
1607   if (size == 1)
1608     return sccstack[first].hash;
1609 
1610   /* We aim to get unique hash for every tree within SCC and compute hash value
1611      of the whole SCC by combining all values together in a stable (entry-point
1612      independent) order.  This guarantees that the same SCC regions within
1613      different translation units will get the same hash values and therefore
1614      will be merged at WPA time.
1615 
1616      Often the hashes are already unique.  In that case we compute the SCC hash
1617      by combining individual hash values in an increasing order.
1618 
1619      If there are duplicates, we seek at least one tree with unique hash (and
1620      pick one with minimal hash and this property).  Then we obtain a stable
1621      order by DFS walk starting from this unique tree and then use the index
1622      within this order to make individual hash values unique.
1623 
1624      If there is no tree with unique hash, we iteratively propagate the hash
1625      values across the internal edges of SCC.  This usually quickly leads
1626      to unique hashes.  Consider, for example, an SCC containing two pointers
1627      that are identical except for the types they point to and assume that
1628      these types are also part of the SCC.  The propagation will add the
1629      points-to type information into their hash values.  */
1630   do
1631     {
1632       /* Sort the SCC so we can easily check for uniqueness.  */
1633       qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1634 
1635       unsigned int classes = 1;
1636       int firstunique = -1;
1637 
1638       /* Find the tree with lowest unique hash (if it exists) and compute
1639 	 the number of equivalence classes.  */
1640       if (sccstack[first].hash != sccstack[first+1].hash)
1641 	firstunique = 0;
1642       for (unsigned i = 1; i < size; ++i)
1643 	if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1644 	  {
1645 	    classes++;
1646 	    if (firstunique == -1
1647 		&& (i == size - 1
1648 		    || sccstack[first+i+1].hash != sccstack[first+i].hash))
1649 	      firstunique = i;
1650 	  }
1651 
1652       /* If we found a tree with unique hash, stop the iteration.  */
1653       if (firstunique != -1
1654 	  /* Also terminate if we run out of iterations or if the number of
1655 	     equivalence classes is no longer increasing.
1656 	     For example a cyclic list of trees that are all equivalent will
1657 	     never have unique entry point; we however do not build such SCCs
1658 	     in our IL.  */
1659 	  || classes <= last_classes || iterations > 16)
1660 	{
1661           hashval_t scc_hash;
1662 
1663 	  /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1664 	     starting from FIRSTUNIQUE to obtain a stable order.  */
1665 	  if (classes != size && firstunique != -1)
1666 	    {
1667 	      hash_map <tree, hashval_t> map(size*2);
1668 
1669 	      /* Store hash values into a map, so we can associate them with
1670 		 the reordered SCC.  */
1671 	      for (unsigned i = 0; i < size; ++i)
1672 		map.put (sccstack[first+i].t, sccstack[first+i].hash);
1673 
1674 	      DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1675 			 true);
1676 	      gcc_assert (again.sccstack.length () == size);
1677 
1678 	      memcpy (sccstack.address () + first,
1679 		      again.sccstack.address (),
1680 		      sizeof (scc_entry) * size);
1681 
1682 	      /* Update hash values of individual members by hashing in the
1683 		 index within the stable order.  This ensures uniqueness.
1684 		 Also compute the SCC hash by mixing in all hash values in
1685 		 the stable order we obtained.  */
1686 	      sccstack[first].hash = *map.get (sccstack[first].t);
1687 	      scc_hash = sccstack[first].hash;
1688 	      for (unsigned i = 1; i < size; ++i)
1689 		{
1690 		  sccstack[first+i].hash
1691 		    = iterative_hash_hashval_t (i,
1692 						*map.get (sccstack[first+i].t));
1693 		  scc_hash
1694 		    = iterative_hash_hashval_t (scc_hash,
1695 						sccstack[first+i].hash);
1696 		}
1697 	    }
1698 	  /* If we got a unique hash value for each tree, then sort already
1699 	     ensured entry-point independent order.  Only compute the final
1700 	     SCC hash.
1701 
1702 	     If we failed to find the unique entry point, we go by the same
1703 	     route.  We will eventually introduce unwanted hash conflicts.  */
1704 	  else
1705 	    {
1706 	      scc_hash = sccstack[first].hash;
1707 	      for (unsigned i = 1; i < size; ++i)
1708 		scc_hash
1709 		  = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1710 
1711 	      /* We cannot 100% guarantee that the hash won't conflict so as
1712 		 to make it impossible to find a unique hash.  This however
1713 		 should be an extremely rare case.  ICE for now so possible
1714 		 issues are found and evaluated.  */
1715 	      gcc_checking_assert (classes == size);
1716 	    }
1717 
1718 	  /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1719 	     hash into the hash of each element.  */
1720 	  for (unsigned i = 0; i < size; ++i)
1721 	    sccstack[first+i].hash
1722 	      = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1723 	  return scc_hash;
1724 	}
1725 
1726       last_classes = classes;
1727       iterations++;
1728 
1729       /* We failed to identify the entry point; propagate hash values across
1730 	 the edges.  */
1731       hash_map <tree, hashval_t> map(size*2);
1732 
1733       for (unsigned i = 0; i < size; ++i)
1734 	map.put (sccstack[first+i].t, sccstack[first+i].hash);
1735 
1736       for (unsigned i = 0; i < size; i++)
1737 	sccstack[first+i].hash
1738 	  = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1739     }
1740   while (true);
1741 }
1742 
1743 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1744    already in the streamer cache.  Main routine called for
1745    each visit of EXPR.  */
1746 
1747 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1748 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1749 		     tree expr, bool ref_p, bool this_ref_p)
1750 {
1751   /* Handle special cases.  */
1752   if (expr == NULL_TREE)
1753     return;
1754 
1755   /* Do not DFS walk into indexable trees.  */
1756   if (this_ref_p && tree_is_indexable (expr))
1757     return;
1758 
1759   /* Check if we already streamed EXPR.  */
1760   if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1761     {
1762       /* Reference to a local tree makes entry also local.  We always process
1763 	 top of stack entry, so set max to number of entries in stack - 1.  */
1764       if (ob->local_trees
1765 	  && ob->local_trees->contains (expr))
1766 	max_local_entry = sccstack.length () - 1;
1767       return;
1768     }
1769 
1770   worklist w;
1771   w.expr = expr;
1772   w.from_state = from_state;
1773   w.cstate = NULL;
1774   w.ref_p = ref_p;
1775   w.this_ref_p = this_ref_p;
1776   worklist_vec.safe_push (w);
1777 }
1778 
1779 
1780 /* Emit the physical representation of tree node EXPR to output block OB.
1781    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1782    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
1783 
1784 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1785 lto_output_tree (struct output_block *ob, tree expr,
1786 		 bool ref_p, bool this_ref_p)
1787 {
1788   unsigned ix;
1789   bool existed_p;
1790   unsigned int size = ob->main_stream->total_size;
1791   /* This is the first time we see EXPR, write all reachable
1792      trees to OB.  */
1793   static bool in_dfs_walk;
1794 
1795   if (expr == NULL_TREE)
1796     {
1797       streamer_write_record_start (ob, LTO_null);
1798       return;
1799     }
1800 
1801   if (this_ref_p && tree_is_indexable (expr))
1802     {
1803       enum LTO_tags tag;
1804       unsigned ix;
1805 
1806       lto_indexable_tree_ref (ob, expr, &tag, &ix);
1807       streamer_write_record_start (ob, tag);
1808       streamer_write_uhwi (ob, ix);
1809       return;
1810     }
1811 
1812   existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1813   if (existed_p)
1814     {
1815       if (streamer_dump_file)
1816 	{
1817 	  if (in_dfs_walk)
1818 	    print_node_brief (streamer_dump_file, "     Streaming ref to ",
1819 			      expr, 4);
1820 	  else
1821 	    print_node_brief (streamer_dump_file, "   Streaming ref to ",
1822 			      expr, 4);
1823 	  fprintf (streamer_dump_file, "\n");
1824 	}
1825       /* If a node has already been streamed out, make sure that
1826 	 we don't write it more than once.  Otherwise, the reader
1827 	 will instantiate two different nodes for the same object.  */
1828       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1829       streamer_write_uhwi (ob, ix);
1830       if (streamer_debugging)
1831 	streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1832 			     lto_tree_code_to_tag (TREE_CODE (expr)));
1833       lto_stats.num_pickle_refs_output++;
1834     }
1835   else
1836     {
1837       /* Protect against recursion which means disconnect between
1838 	 what tree edges we walk in the DFS walk and what edges
1839 	 we stream out.  */
1840       gcc_assert (!in_dfs_walk);
1841 
1842       if (streamer_dump_file)
1843 	{
1844 	  print_node_brief (streamer_dump_file, "   Streaming tree ",
1845 			    expr, 4);
1846 	  fprintf (streamer_dump_file, "\n");
1847 	}
1848 
1849       /* Start the DFS walk.  */
1850       /* Save ob state ... */
1851       /* let's see ... */
1852       in_dfs_walk = true;
1853       DFS (ob, expr, ref_p, this_ref_p, false);
1854 
1855       /* Finally append a reference to the tree we were writing.  */
1856       existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1857 
1858       /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1859 	 it.  */
1860       if (!existed_p)
1861 	lto_output_tree_1 (ob, expr, 0, ref_p, this_ref_p);
1862       else if (this_ref_p)
1863 	{
1864 	  if (streamer_dump_file)
1865 	    {
1866 	      print_node_brief (streamer_dump_file,
1867 				"   Streaming final ref to ",
1868 				expr, 4);
1869 	      fprintf (streamer_dump_file, "\n");
1870 	    }
1871 	  streamer_write_record_start (ob, LTO_tree_pickle_reference);
1872 	  streamer_write_uhwi (ob, ix);
1873 	  if (streamer_debugging)
1874 	    streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1875 				 lto_tree_code_to_tag (TREE_CODE (expr)));
1876 	}
1877       in_dfs_walk = false;
1878       lto_stats.num_pickle_refs_output++;
1879     }
1880   if (streamer_dump_file && !in_dfs_walk)
1881     fprintf (streamer_dump_file, "    %u bytes\n",
1882 	     ob->main_stream->total_size - size);
1883 }
1884 
1885 
1886 /* Output to OB a list of try/catch handlers starting with FIRST.  */
1887 
1888 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1889 output_eh_try_list (struct output_block *ob, eh_catch first)
1890 {
1891   eh_catch n;
1892 
1893   for (n = first; n; n = n->next_catch)
1894     {
1895       streamer_write_record_start (ob, LTO_eh_catch);
1896       stream_write_tree (ob, n->type_list, true);
1897       stream_write_tree (ob, n->filter_list, true);
1898       stream_write_tree (ob, n->label, true);
1899     }
1900 
1901   streamer_write_record_start (ob, LTO_null);
1902 }
1903 
1904 
1905 /* Output EH region R in function FN to OB.  CURR_RN is the slot index
1906    that is being emitted in FN->EH->REGION_ARRAY.  This is used to
1907    detect EH region sharing.  */
1908 
1909 static void
output_eh_region(struct output_block * ob,eh_region r)1910 output_eh_region (struct output_block *ob, eh_region r)
1911 {
1912   enum LTO_tags tag;
1913 
1914   if (r == NULL)
1915     {
1916       streamer_write_record_start (ob, LTO_null);
1917       return;
1918     }
1919 
1920   if (r->type == ERT_CLEANUP)
1921     tag = LTO_ert_cleanup;
1922   else if (r->type == ERT_TRY)
1923     tag = LTO_ert_try;
1924   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1925     tag = LTO_ert_allowed_exceptions;
1926   else if (r->type == ERT_MUST_NOT_THROW)
1927     tag = LTO_ert_must_not_throw;
1928   else
1929     gcc_unreachable ();
1930 
1931   streamer_write_record_start (ob, tag);
1932   streamer_write_hwi (ob, r->index);
1933 
1934   if (r->outer)
1935     streamer_write_hwi (ob, r->outer->index);
1936   else
1937     streamer_write_zero (ob);
1938 
1939   if (r->inner)
1940     streamer_write_hwi (ob, r->inner->index);
1941   else
1942     streamer_write_zero (ob);
1943 
1944   if (r->next_peer)
1945     streamer_write_hwi (ob, r->next_peer->index);
1946   else
1947     streamer_write_zero (ob);
1948 
1949   if (r->type == ERT_TRY)
1950     {
1951       output_eh_try_list (ob, r->u.eh_try.first_catch);
1952     }
1953   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1954     {
1955       stream_write_tree (ob, r->u.allowed.type_list, true);
1956       stream_write_tree (ob, r->u.allowed.label, true);
1957       streamer_write_uhwi (ob, r->u.allowed.filter);
1958     }
1959   else if (r->type == ERT_MUST_NOT_THROW)
1960     {
1961       stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1962       bitpack_d bp = bitpack_create (ob->main_stream);
1963       stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1964       streamer_write_bitpack (&bp);
1965     }
1966 
1967   if (r->landing_pads)
1968     streamer_write_hwi (ob, r->landing_pads->index);
1969   else
1970     streamer_write_zero (ob);
1971 }
1972 
1973 
1974 /* Output landing pad LP to OB.  */
1975 
1976 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1977 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1978 {
1979   if (lp == NULL)
1980     {
1981       streamer_write_record_start (ob, LTO_null);
1982       return;
1983     }
1984 
1985   streamer_write_record_start (ob, LTO_eh_landing_pad);
1986   streamer_write_hwi (ob, lp->index);
1987   if (lp->next_lp)
1988     streamer_write_hwi (ob, lp->next_lp->index);
1989   else
1990     streamer_write_zero (ob);
1991 
1992   if (lp->region)
1993     streamer_write_hwi (ob, lp->region->index);
1994   else
1995     streamer_write_zero (ob);
1996 
1997   stream_write_tree (ob, lp->post_landing_pad, true);
1998 }
1999 
2000 
2001 /* Output the existing eh_table to OB.  */
2002 
2003 static void
output_eh_regions(struct output_block * ob,struct function * fn)2004 output_eh_regions (struct output_block *ob, struct function *fn)
2005 {
2006   if (fn->eh && fn->eh->region_tree)
2007     {
2008       unsigned i;
2009       eh_region eh;
2010       eh_landing_pad lp;
2011       tree ttype;
2012 
2013       streamer_write_record_start (ob, LTO_eh_table);
2014 
2015       /* Emit the index of the root of the EH region tree.  */
2016       streamer_write_hwi (ob, fn->eh->region_tree->index);
2017 
2018       /* Emit all the EH regions in the region array.  */
2019       streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
2020       FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
2021 	output_eh_region (ob, eh);
2022 
2023       /* Emit all landing pads.  */
2024       streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
2025       FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
2026 	output_eh_lp (ob, lp);
2027 
2028       /* Emit all the runtime type data.  */
2029       streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
2030       FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
2031 	stream_write_tree (ob, ttype, true);
2032 
2033       /* Emit the table of action chains.  */
2034       if (targetm.arm_eabi_unwinder)
2035 	{
2036 	  tree t;
2037 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
2038 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
2039 	    stream_write_tree (ob, t, true);
2040 	}
2041       else
2042 	{
2043 	  uchar c;
2044 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
2045 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
2046 	    streamer_write_char_stream (ob->main_stream, c);
2047 	}
2048     }
2049 
2050   /* The LTO_null either terminates the record or indicates that there
2051      are no eh_records at all.  */
2052   streamer_write_record_start (ob, LTO_null);
2053 }
2054 
2055 
2056 /* Output all of the active ssa names to the ssa_names stream.  */
2057 
2058 static void
output_ssa_names(struct output_block * ob,struct function * fn)2059 output_ssa_names (struct output_block *ob, struct function *fn)
2060 {
2061   unsigned int i, len;
2062 
2063   len = vec_safe_length (SSANAMES (fn));
2064   streamer_write_uhwi (ob, len);
2065 
2066   for (i = 1; i < len; i++)
2067     {
2068       tree ptr = (*SSANAMES (fn))[i];
2069 
2070       if (ptr == NULL_TREE
2071 	  || SSA_NAME_IN_FREE_LIST (ptr)
2072 	  || virtual_operand_p (ptr)
2073 	  /* Simply skip unreleased SSA names.  */
2074 	  || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
2075 	      && (! SSA_NAME_DEF_STMT (ptr)
2076 		  || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
2077 	continue;
2078 
2079       streamer_write_uhwi (ob, i);
2080       streamer_write_char_stream (ob->main_stream,
2081 				  SSA_NAME_IS_DEFAULT_DEF (ptr));
2082       if (SSA_NAME_VAR (ptr))
2083 	stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2084       else
2085 	/* ???  This drops SSA_NAME_IDENTIFIER on the floor.  */
2086 	stream_write_tree (ob, TREE_TYPE (ptr), true);
2087     }
2088 
2089   streamer_write_zero (ob);
2090 }
2091 
2092 
2093 
2094 /* Output the cfg.  */
2095 
2096 static void
output_cfg(struct output_block * ob,struct function * fn)2097 output_cfg (struct output_block *ob, struct function *fn)
2098 {
2099   struct lto_output_stream *tmp_stream = ob->main_stream;
2100   basic_block bb;
2101 
2102   ob->main_stream = ob->cfg_stream;
2103 
2104   streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2105 		       profile_status_for_fn (fn));
2106 
2107   /* Output the number of the highest basic block.  */
2108   streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2109 
2110   FOR_ALL_BB_FN (bb, fn)
2111     {
2112       edge_iterator ei;
2113       edge e;
2114 
2115       streamer_write_hwi (ob, bb->index);
2116 
2117       /* Output the successors and the edge flags.  */
2118       streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2119       FOR_EACH_EDGE (e, ei, bb->succs)
2120 	{
2121 	  bitpack_d bp = bitpack_create (ob->main_stream);
2122 	  bp_pack_var_len_unsigned (&bp, e->dest->index);
2123 	  bp_pack_var_len_unsigned (&bp, e->flags);
2124 	  stream_output_location_and_block (ob, &bp, e->goto_locus);
2125 	  e->probability.stream_out (ob);
2126 	}
2127     }
2128 
2129   streamer_write_hwi (ob, -1);
2130 
2131   bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2132   while (bb->next_bb)
2133     {
2134       streamer_write_hwi (ob, bb->next_bb->index);
2135       bb = bb->next_bb;
2136     }
2137 
2138   streamer_write_hwi (ob, -1);
2139 
2140   /* Output the number of loops.  */
2141   streamer_write_uhwi (ob, number_of_loops (fn));
2142 
2143   /* Output each loop, skipping the tree root which has number zero.  */
2144   for (unsigned i = 1; i < number_of_loops (fn); ++i)
2145     {
2146       class loop *loop = get_loop (fn, i);
2147 
2148       /* Write the index of the loop header.  That's enough to rebuild
2149          the loop tree on the reader side.  Stream -1 for an unused
2150 	 loop entry.  */
2151       if (!loop)
2152 	{
2153 	  streamer_write_hwi (ob, -1);
2154 	  continue;
2155 	}
2156       else
2157 	streamer_write_hwi (ob, loop->header->index);
2158 
2159       /* Write everything copy_loop_info copies.  */
2160       streamer_write_enum (ob->main_stream,
2161 			   loop_estimation, EST_LAST, loop->estimate_state);
2162       streamer_write_hwi (ob, loop->any_upper_bound);
2163       if (loop->any_upper_bound)
2164 	streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
2165       streamer_write_hwi (ob, loop->any_likely_upper_bound);
2166       if (loop->any_likely_upper_bound)
2167 	streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
2168       streamer_write_hwi (ob, loop->any_estimate);
2169       if (loop->any_estimate)
2170 	streamer_write_widest_int (ob, loop->nb_iterations_estimate);
2171 
2172       /* Write OMP SIMD related info.  */
2173       streamer_write_hwi (ob, loop->safelen);
2174       streamer_write_hwi (ob, loop->unroll);
2175       streamer_write_hwi (ob, loop->owned_clique);
2176       streamer_write_hwi (ob, loop->dont_vectorize);
2177       streamer_write_hwi (ob, loop->force_vectorize);
2178       streamer_write_hwi (ob, loop->finite_p);
2179       stream_write_tree (ob, loop->simduid, true);
2180     }
2181 
2182   ob->main_stream = tmp_stream;
2183 }
2184 
2185 
2186 /* Create the header in the file using OB.  If the section type is for
2187    a function, set FN to the decl for that function.  */
2188 
2189 void
produce_asm(struct output_block * ob,tree fn)2190 produce_asm (struct output_block *ob, tree fn)
2191 {
2192   enum lto_section_type section_type = ob->section_type;
2193   struct lto_function_header header;
2194   char *section_name;
2195 
2196   if (section_type == LTO_section_function_body)
2197     {
2198       const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2199       section_name = lto_get_section_name (section_type, name,
2200 					   symtab_node::get (fn)->order,
2201 					   NULL);
2202     }
2203   else
2204     section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2205 
2206   lto_begin_section (section_name, !flag_wpa);
2207   free (section_name);
2208 
2209   /* The entire header is stream computed here.  */
2210   memset (&header, 0, sizeof (struct lto_function_header));
2211 
2212   if (section_type == LTO_section_function_body)
2213     header.cfg_size = ob->cfg_stream->total_size;
2214   header.main_size = ob->main_stream->total_size;
2215   header.string_size = ob->string_stream->total_size;
2216   lto_write_data (&header, sizeof header);
2217 
2218   /* Put all of the gimple and the string table out the asm file as a
2219      block of text.  */
2220   if (section_type == LTO_section_function_body)
2221     lto_write_stream (ob->cfg_stream);
2222   lto_write_stream (ob->main_stream);
2223   lto_write_stream (ob->string_stream);
2224 
2225   lto_end_section ();
2226 }
2227 
2228 
2229 /* Output the base body of struct function FN using output block OB.  */
2230 
2231 static void
output_struct_function_base(struct output_block * ob,struct function * fn)2232 output_struct_function_base (struct output_block *ob, struct function *fn)
2233 {
2234   struct bitpack_d bp;
2235   unsigned i;
2236   tree t;
2237 
2238   /* Output the static chain and non-local goto save area.  */
2239   stream_write_tree (ob, fn->static_chain_decl, true);
2240   stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2241 
2242   /* Output all the local variables in the function.  */
2243   streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2244   FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2245     stream_write_tree (ob, t, true);
2246 
2247   /* Output current IL state of the function.  */
2248   streamer_write_uhwi (ob, fn->curr_properties);
2249 
2250   /* Write all the attributes for FN.  */
2251   bp = bitpack_create (ob->main_stream);
2252   bp_pack_value (&bp, fn->is_thunk, 1);
2253   bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2254   bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2255   bp_pack_value (&bp, fn->returns_struct, 1);
2256   bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2257   bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2258   bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2259   bp_pack_value (&bp, fn->after_inlining, 1);
2260   bp_pack_value (&bp, fn->stdarg, 1);
2261   bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2262   bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2263   bp_pack_value (&bp, fn->calls_alloca, 1);
2264   bp_pack_value (&bp, fn->calls_setjmp, 1);
2265   bp_pack_value (&bp, fn->calls_eh_return, 1);
2266   bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2267   bp_pack_value (&bp, fn->has_simduid_loops, 1);
2268   bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2269   bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2270   bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2271 
2272   /* Output the function start and end loci.  */
2273   stream_output_location (ob, &bp, fn->function_start_locus);
2274   stream_output_location (ob, &bp, fn->function_end_locus);
2275 
2276   /* Save the instance discriminator if present.  */
2277   int *instance_number_p = NULL;
2278   if (decl_to_instance_map)
2279     instance_number_p = decl_to_instance_map->get (fn->decl);
2280   bp_pack_value (&bp, !!instance_number_p, 1);
2281   if (instance_number_p)
2282     bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2283 
2284   streamer_write_bitpack (&bp);
2285 }
2286 
2287 
2288 /* Collect all leaf BLOCKs beyond ROOT into LEAFS.  */
2289 
2290 static void
collect_block_tree_leafs(tree root,vec<tree> & leafs)2291 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2292 {
2293   for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2294     if (! BLOCK_SUBBLOCKS (root))
2295       leafs.safe_push (root);
2296     else
2297       collect_block_tree_leafs (root, leafs);
2298 }
2299 
2300 /* This performs function body modifications that are needed for streaming
2301    to work.  */
2302 
2303 void
lto_prepare_function_for_streaming(struct cgraph_node * node)2304 lto_prepare_function_for_streaming (struct cgraph_node *node)
2305 {
2306   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2307   basic_block bb;
2308 
2309   if (number_of_loops (fn))
2310     {
2311       push_cfun (fn);
2312       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2313       loop_optimizer_finalize ();
2314       pop_cfun ();
2315     }
2316   /* We will renumber the statements.  The code that does this uses
2317      the same ordering that we use for serializing them so we can use
2318      the same code on the other end and not have to write out the
2319      statement numbers.  We do not assign UIDs to PHIs here because
2320      virtual PHIs get re-computed on-the-fly which would make numbers
2321      inconsistent.  */
2322   set_gimple_stmt_max_uid (fn, 0);
2323   FOR_ALL_BB_FN (bb, fn)
2324     {
2325       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2326 	   gsi_next (&gsi))
2327 	{
2328 	  gphi *stmt = gsi.phi ();
2329 
2330 	  /* Virtual PHIs are not going to be streamed.  */
2331 	  if (!virtual_operand_p (gimple_phi_result (stmt)))
2332 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2333 	}
2334       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2335 	   gsi_next (&gsi))
2336 	{
2337 	  gimple *stmt = gsi_stmt (gsi);
2338 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2339 	}
2340     }
2341   /* To avoid keeping duplicate gimple IDs in the statements, renumber
2342      virtual phis now.  */
2343   FOR_ALL_BB_FN (bb, fn)
2344     {
2345       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2346 	   gsi_next (&gsi))
2347 	{
2348 	  gphi *stmt = gsi.phi ();
2349 	  if (virtual_operand_p (gimple_phi_result (stmt)))
2350 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2351 	}
2352     }
2353 
2354 }
2355 
2356 /* Emit the chain of tree nodes starting at T.  OB is the output block
2357    to write to.  REF_P is true if chain elements should be emitted
2358    as references.  */
2359 
2360 static void
streamer_write_chain(struct output_block * ob,tree t,bool ref_p)2361 streamer_write_chain (struct output_block *ob, tree t, bool ref_p)
2362 {
2363   while (t)
2364     {
2365       /* We avoid outputting external vars or functions by reference
2366 	 to the global decls section as we do not want to have them
2367 	 enter decl merging.  We should not need to do this anymore because
2368 	 free_lang_data removes them from block scopes.  */
2369       gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
2370       stream_write_tree (ob, t, ref_p);
2371 
2372       t = TREE_CHAIN (t);
2373     }
2374 
2375   /* Write a sentinel to terminate the chain.  */
2376   stream_write_tree (ob, NULL_TREE, ref_p);
2377 }
2378 
2379 /* Output the body of function NODE->DECL.  */
2380 
2381 static void
output_function(struct cgraph_node * node)2382 output_function (struct cgraph_node *node)
2383 {
2384   tree function;
2385   struct function *fn;
2386   basic_block bb;
2387   struct output_block *ob;
2388 
2389   if (streamer_dump_file)
2390     fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2391 	     node->dump_name ());
2392 
2393   function = node->decl;
2394   fn = DECL_STRUCT_FUNCTION (function);
2395   ob = create_output_block (LTO_section_function_body);
2396 
2397   ob->symbol = node;
2398 
2399   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2400 
2401   /* Make string 0 be a NULL string.  */
2402   streamer_write_char_stream (ob->string_stream, 0);
2403 
2404   streamer_write_record_start (ob, LTO_function);
2405 
2406   /* Output decls for parameters and args.  */
2407   stream_write_tree (ob, DECL_RESULT (function), true);
2408   streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2409 
2410   /* Output debug args if available. */
2411   vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2412   if (! debugargs)
2413     streamer_write_uhwi (ob, 0);
2414   else
2415     {
2416       streamer_write_uhwi (ob, (*debugargs)->length ());
2417       for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2418 	stream_write_tree (ob, (**debugargs)[i], true);
2419     }
2420 
2421   /* Output DECL_INITIAL for the function, which contains the tree of
2422      lexical scopes.  */
2423   stream_write_tree (ob, DECL_INITIAL (function), true);
2424   /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2425      collect block tree leafs and stream those.  */
2426   auto_vec<tree> block_tree_leafs;
2427   if (DECL_INITIAL (function) && DECL_INITIAL (function) != error_mark_node)
2428     collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2429   streamer_write_uhwi (ob, block_tree_leafs.length ());
2430   for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2431     stream_write_tree (ob, block_tree_leafs[i], true);
2432 
2433   /* We also stream abstract functions where we stream only stuff needed for
2434      debug info.  */
2435   if (gimple_has_body_p (function))
2436     {
2437       streamer_write_uhwi (ob, 1);
2438       output_struct_function_base (ob, fn);
2439 
2440       output_cfg (ob, fn);
2441 
2442       /* Output all the SSA names used in the function.  */
2443       output_ssa_names (ob, fn);
2444 
2445       /* Output any exception handling regions.  */
2446       output_eh_regions (ob, fn);
2447 
2448       /* Output the code for the function.  */
2449       FOR_ALL_BB_FN (bb, fn)
2450 	output_bb (ob, bb, fn);
2451 
2452       /* The terminator for this function.  */
2453       streamer_write_record_start (ob, LTO_null);
2454    }
2455   else
2456     streamer_write_uhwi (ob, 0);
2457 
2458   /* Create a section to hold the pickled output of this function.   */
2459   produce_asm (ob, function);
2460 
2461   destroy_output_block (ob);
2462   if (streamer_dump_file)
2463     fprintf (streamer_dump_file, "Finished streaming %s\n",
2464 	     node->dump_name ());
2465 }
2466 
2467 /* Output the body of function NODE->DECL.  */
2468 
2469 static void
output_constructor(struct varpool_node * node)2470 output_constructor (struct varpool_node *node)
2471 {
2472   tree var = node->decl;
2473   struct output_block *ob;
2474 
2475   if (streamer_dump_file)
2476     fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2477 	     node->dump_name ());
2478 
2479   timevar_push (TV_IPA_LTO_CTORS_OUT);
2480   ob = create_output_block (LTO_section_function_body);
2481 
2482   ob->symbol = node;
2483 
2484   /* Make string 0 be a NULL string.  */
2485   streamer_write_char_stream (ob->string_stream, 0);
2486 
2487   /* Output DECL_INITIAL for the function, which contains the tree of
2488      lexical scopes.  */
2489   stream_write_tree (ob, DECL_INITIAL (var), true);
2490 
2491   /* Create a section to hold the pickled output of this function.   */
2492   produce_asm (ob, var);
2493 
2494   destroy_output_block (ob);
2495   if (streamer_dump_file)
2496     fprintf (streamer_dump_file, "Finished streaming %s\n",
2497 	     node->dump_name ());
2498   timevar_pop (TV_IPA_LTO_CTORS_OUT);
2499 }
2500 
2501 
2502 /* Emit toplevel asms.  */
2503 
2504 void
lto_output_toplevel_asms(void)2505 lto_output_toplevel_asms (void)
2506 {
2507   struct output_block *ob;
2508   struct asm_node *can;
2509   char *section_name;
2510   struct lto_simple_header_with_strings header;
2511 
2512   if (!symtab->first_asm_symbol ())
2513     return;
2514 
2515   ob = create_output_block (LTO_section_asm);
2516 
2517   /* Make string 0 be a NULL string.  */
2518   streamer_write_char_stream (ob->string_stream, 0);
2519 
2520   for (can = symtab->first_asm_symbol (); can; can = can->next)
2521     {
2522       streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2523       streamer_write_hwi (ob, can->order);
2524     }
2525 
2526   streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2527 
2528   section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2529   lto_begin_section (section_name, !flag_wpa);
2530   free (section_name);
2531 
2532   /* The entire header stream is computed here.  */
2533   memset (&header, 0, sizeof (header));
2534 
2535   header.main_size = ob->main_stream->total_size;
2536   header.string_size = ob->string_stream->total_size;
2537   lto_write_data (&header, sizeof header);
2538 
2539   /* Put all of the gimple and the string table out the asm file as a
2540      block of text.  */
2541   lto_write_stream (ob->main_stream);
2542   lto_write_stream (ob->string_stream);
2543 
2544   lto_end_section ();
2545 
2546   destroy_output_block (ob);
2547 }
2548 
2549 
2550 /* Copy the function body or variable constructor of NODE without deserializing. */
2551 
2552 static void
copy_function_or_variable(struct symtab_node * node)2553 copy_function_or_variable (struct symtab_node *node)
2554 {
2555   tree function = node->decl;
2556   struct lto_file_decl_data *file_data = node->lto_file_data;
2557   const char *data;
2558   size_t len;
2559   const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2560   char *section_name =
2561     lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2562   size_t i, j;
2563   struct lto_in_decl_state *in_state;
2564   struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2565 
2566   if (streamer_dump_file)
2567     fprintf (streamer_dump_file, "Copying section for %s\n", name);
2568   lto_begin_section (section_name, false);
2569   free (section_name);
2570 
2571   /* We may have renamed the declaration, e.g., a static function.  */
2572   name = lto_get_decl_name_mapping (file_data, name);
2573 
2574   data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2575 				   name, node->order - file_data->order_base,
2576 				   &len);
2577   gcc_assert (data);
2578 
2579   /* Do a bit copy of the function body.  */
2580   lto_write_raw_data (data, len);
2581 
2582   /* Copy decls. */
2583   in_state =
2584     lto_get_function_in_decl_state (node->lto_file_data, function);
2585   out_state->compressed = in_state->compressed;
2586   gcc_assert (in_state);
2587 
2588   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2589     {
2590       size_t n = vec_safe_length (in_state->streams[i]);
2591       vec<tree, va_gc> *trees = in_state->streams[i];
2592       struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2593 
2594       /* The out state must have the same indices and the in state.
2595 	 So just copy the vector.  All the encoders in the in state
2596 	 must be empty where we reach here. */
2597       gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2598       encoder->trees.reserve_exact (n);
2599       for (j = 0; j < n; j++)
2600 	encoder->trees.safe_push ((*trees)[j]);
2601     }
2602 
2603   lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2604 			     data, len);
2605   lto_end_section ();
2606 }
2607 
2608 /* Wrap symbol references in *TP inside a type-preserving MEM_REF.  */
2609 
2610 static tree
wrap_refs(tree * tp,int * ws,void *)2611 wrap_refs (tree *tp, int *ws, void *)
2612 {
2613   tree t = *tp;
2614   if (handled_component_p (t)
2615       && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2616       && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2617     {
2618       tree decl = TREE_OPERAND (t, 0);
2619       tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2620       TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2621 				    build1 (ADDR_EXPR, ptrtype, decl),
2622 				    build_int_cst (ptrtype, 0));
2623       TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2624       *ws = 0;
2625     }
2626   else if (TREE_CODE (t) == CONSTRUCTOR)
2627     ;
2628   else if (!EXPR_P (t))
2629     *ws = 0;
2630   return NULL_TREE;
2631 }
2632 
2633 /* Remove functions that are no longer used from offload_funcs, and mark the
2634    remaining ones with DECL_PRESERVE_P.  */
2635 
2636 static void
prune_offload_funcs(void)2637 prune_offload_funcs (void)
2638 {
2639   if (!offload_funcs)
2640     return;
2641 
2642   unsigned ix, ix2;
2643   tree *elem_ptr;
2644   VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2645 			 cgraph_node::get (*elem_ptr) == NULL);
2646 
2647   tree fn_decl;
2648   FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2649     DECL_PRESERVE_P (fn_decl) = 1;
2650 }
2651 
2652 /* Produce LTO section that contains global information
2653    about LTO bytecode.  */
2654 
2655 static void
produce_lto_section()2656 produce_lto_section ()
2657 {
2658   /* Stream LTO meta section.  */
2659   output_block *ob = create_output_block (LTO_section_lto);
2660 
2661   char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2662   lto_begin_section (section_name, false);
2663   free (section_name);
2664 
2665 #ifdef HAVE_ZSTD_H
2666   lto_compression compression = ZSTD;
2667 #else
2668   lto_compression compression = ZLIB;
2669 #endif
2670 
2671   bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2672   lto_section s
2673     = { LTO_major_version, LTO_minor_version, slim_object, 0, 0 };
2674   s.set_compression (compression);
2675   lto_write_data (&s, sizeof s);
2676   lto_end_section ();
2677   destroy_output_block (ob);
2678 }
2679 
2680 /* Compare symbols to get them sorted by filename (to optimize streaming)  */
2681 
2682 static int
cmp_symbol_files(const void * pn1,const void * pn2,void * id_map_)2683 cmp_symbol_files (const void *pn1, const void *pn2, void *id_map_)
2684 {
2685   const symtab_node *n1 = *(const symtab_node * const *)pn1;
2686   const symtab_node *n2 = *(const symtab_node * const *)pn2;
2687   hash_map<lto_file_decl_data *, int> *id_map
2688     = (hash_map<lto_file_decl_data *, int> *)id_map_;
2689 
2690   int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2691   int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2692 
2693   /* Order files same way as they appeared in the command line to reduce
2694      seeking while copying sections.  */
2695   if (file_order1 != file_order2)
2696     return file_order1 - file_order2;
2697 
2698   /* Order within static library.  */
2699   if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2700     return *id_map->get (n1->lto_file_data) - *id_map->get (n2->lto_file_data);
2701 
2702   /* And finaly order by the definition order.  */
2703   return n1->order - n2->order;
2704 }
2705 
2706 /* Main entry point from the pass manager.  */
2707 
2708 void
lto_output(void)2709 lto_output (void)
2710 {
2711   struct lto_out_decl_state *decl_state;
2712   bitmap output = NULL;
2713   bitmap_obstack output_obstack;
2714   unsigned int i, n_nodes;
2715   lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2716   auto_vec<symtab_node *> symbols_to_copy;
2717 
2718   prune_offload_funcs ();
2719 
2720   if (flag_checking)
2721     {
2722       bitmap_obstack_initialize (&output_obstack);
2723       output = BITMAP_ALLOC (&output_obstack);
2724     }
2725 
2726   /* Initialize the streamer.  */
2727   lto_streamer_init ();
2728 
2729   produce_lto_section ();
2730 
2731   n_nodes = lto_symtab_encoder_size (encoder);
2732   /* Prepare vector of functions to output and then sort it to optimize
2733      section copying.  */
2734   for (i = 0; i < n_nodes; i++)
2735     {
2736       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2737       if (snode->alias)
2738 	continue;
2739       if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2740 	{
2741 	  if (lto_symtab_encoder_encode_body_p (encoder, node))
2742 	    symbols_to_copy.safe_push (node);
2743 	}
2744       else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2745 	{
2746 	  /* Wrap symbol references inside the ctor in a type
2747 	     preserving MEM_REF.  */
2748 	  tree ctor = DECL_INITIAL (node->decl);
2749 	  if (ctor && !in_lto_p)
2750 	    walk_tree (&ctor, wrap_refs, NULL, NULL);
2751 	  if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2752 	      && lto_symtab_encoder_encode_initializer_p (encoder, node))
2753 	    symbols_to_copy.safe_push (node);
2754 	}
2755     }
2756   /* Map the section hash to an order it appears in symbols_to_copy
2757      since we want to sort same ID symbols next to each other but need
2758      to avoid making overall order depend on the actual hash value.  */
2759   int order = 0;
2760   hash_map<lto_file_decl_data *, int> id_map;
2761   for (i = 0; i < symbols_to_copy.length (); ++i)
2762     {
2763       symtab_node *snode = symbols_to_copy[i];
2764       if (snode->lto_file_data)
2765 	{
2766 	  bool existed_p = false;
2767 	  int &ord = id_map.get_or_insert (snode->lto_file_data, &existed_p);
2768 	  if (!existed_p)
2769 	    ord = order++;
2770 	}
2771     }
2772   symbols_to_copy.sort (cmp_symbol_files, (void *)&id_map);
2773   for (i = 0; i < symbols_to_copy.length (); i++)
2774     {
2775       symtab_node *snode = symbols_to_copy[i];
2776       cgraph_node *cnode;
2777       varpool_node *vnode;
2778 
2779       if (flag_checking)
2780 	gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2781 
2782       decl_state = lto_new_out_decl_state ();
2783       lto_push_out_decl_state (decl_state);
2784 
2785       if ((cnode = dyn_cast <cgraph_node *> (snode))
2786 	  && (gimple_has_body_p (cnode->decl)
2787 	      || (!flag_wpa
2788 		  && flag_incremental_link != INCREMENTAL_LINK_LTO)
2789 	      /* Thunks have no body but they may be synthetized
2790 		 at WPA time.  */
2791 	      || DECL_ARGUMENTS (cnode->decl)
2792 	      || cnode->declare_variant_alt))
2793 	output_function (cnode);
2794       else if ((vnode = dyn_cast <varpool_node *> (snode))
2795 	       && (DECL_INITIAL (vnode->decl) != error_mark_node
2796 		   || (!flag_wpa
2797 		       && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2798 	output_constructor (vnode);
2799       else
2800 	copy_function_or_variable (snode);
2801       gcc_assert (lto_get_out_decl_state () == decl_state);
2802       lto_pop_out_decl_state ();
2803       lto_record_function_out_decl_state (snode->decl, decl_state);
2804     }
2805 
2806   /* Emit the callgraph after emitting function bodies.  This needs to
2807      be done now to make sure that all the statements in every function
2808      have been renumbered so that edges can be associated with call
2809      statements using the statement UIDs.  */
2810   output_symtab ();
2811 
2812   output_offload_tables ();
2813 
2814   if (flag_checking)
2815     {
2816       BITMAP_FREE (output);
2817       bitmap_obstack_release (&output_obstack);
2818     }
2819 }
2820 
2821 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2822    from it and required for correct representation of its semantics.
2823    Each node in ENCODER must be a global declaration or a type.  A node
2824    is written only once, even if it appears multiple times in the
2825    vector.  Certain transitively-reachable nodes, such as those
2826    representing expressions, may be duplicated, but such nodes
2827    must not appear in ENCODER itself.  */
2828 
2829 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2830 write_global_stream (struct output_block *ob,
2831 		     struct lto_tree_ref_encoder *encoder)
2832 {
2833   tree t;
2834   size_t index;
2835   const size_t size = lto_tree_ref_encoder_size (encoder);
2836 
2837   for (index = 0; index < size; index++)
2838     {
2839       t = lto_tree_ref_encoder_get_tree (encoder, index);
2840       if (streamer_dump_file)
2841 	{
2842           fprintf (streamer_dump_file, " %i:", (int)index);
2843 	  print_node_brief (streamer_dump_file, "", t, 4);
2844           fprintf (streamer_dump_file, "\n");
2845 	}
2846       if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2847 	stream_write_tree (ob, t, false);
2848     }
2849 }
2850 
2851 
2852 /* Write a sequence of indices into the globals vector corresponding
2853    to the trees in ENCODER.  These are used by the reader to map the
2854    indices used to refer to global entities within function bodies to
2855    their referents.  */
2856 
2857 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2858 write_global_references (struct output_block *ob,
2859 			 struct lto_tree_ref_encoder *encoder)
2860 {
2861   tree t;
2862   uint32_t index;
2863   const uint32_t size = lto_tree_ref_encoder_size (encoder);
2864 
2865   /* Write size and slot indexes as 32-bit unsigned numbers. */
2866   uint32_t *data = XNEWVEC (uint32_t, size + 1);
2867   data[0] = size;
2868 
2869   for (index = 0; index < size; index++)
2870     {
2871       unsigned slot_num;
2872 
2873       t = lto_tree_ref_encoder_get_tree (encoder, index);
2874       streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2875       gcc_assert (slot_num != (unsigned)-1);
2876       data[index + 1] = slot_num;
2877     }
2878 
2879   lto_write_data (data, sizeof (int32_t) * (size + 1));
2880   free (data);
2881 }
2882 
2883 
2884 /* Write all the streams in an lto_out_decl_state STATE using
2885    output block OB and output stream OUT_STREAM.  */
2886 
2887 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2888 lto_output_decl_state_streams (struct output_block *ob,
2889 			       struct lto_out_decl_state *state)
2890 {
2891   int i;
2892 
2893   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2894     write_global_stream (ob, &state->streams[i]);
2895 }
2896 
2897 
2898 /* Write all the references in an lto_out_decl_state STATE using
2899    output block OB and output stream OUT_STREAM.  */
2900 
2901 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2902 lto_output_decl_state_refs (struct output_block *ob,
2903 			    struct lto_out_decl_state *state)
2904 {
2905   unsigned i;
2906   unsigned ref;
2907   tree decl;
2908 
2909   /* Write reference to FUNCTION_DECL.  If there is not function,
2910      write reference to void_type_node. */
2911   decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2912   streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2913   gcc_assert (ref != (unsigned)-1);
2914   ref = ref * 2 + (state->compressed ? 1 : 0);
2915   lto_write_data (&ref, sizeof (uint32_t));
2916 
2917   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2918     write_global_references (ob, &state->streams[i]);
2919 }
2920 
2921 
2922 /* Return the written size of STATE. */
2923 
2924 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2925 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2926 {
2927   int i;
2928   size_t size;
2929 
2930   size = sizeof (int32_t);	/* fn_ref. */
2931   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2932     {
2933       size += sizeof (int32_t); /* vector size. */
2934       size += (lto_tree_ref_encoder_size (&state->streams[i])
2935 	       * sizeof (int32_t));
2936     }
2937   return size;
2938 }
2939 
2940 
2941 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2942    so far.  */
2943 
2944 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2945 write_symbol (struct streamer_tree_cache_d *cache,
2946 	      tree t, hash_set<const char *> *seen, bool alias)
2947 {
2948   const char *name;
2949   enum gcc_plugin_symbol_kind kind;
2950   enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2951   unsigned slot_num;
2952   uint64_t size;
2953   const char *comdat;
2954   unsigned char c;
2955 
2956   gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2957 
2958   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2959 
2960   /* This behaves like assemble_name_raw in varasm.c, performing the
2961      same name manipulations that ASM_OUTPUT_LABELREF does. */
2962   name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2963 
2964   if (seen->add (name))
2965     return;
2966 
2967   streamer_tree_cache_lookup (cache, t, &slot_num);
2968   gcc_assert (slot_num != (unsigned)-1);
2969 
2970   if (DECL_EXTERNAL (t))
2971     {
2972       if (DECL_WEAK (t))
2973 	kind = GCCPK_WEAKUNDEF;
2974       else
2975 	kind = GCCPK_UNDEF;
2976     }
2977   else
2978     {
2979       if (DECL_WEAK (t))
2980 	kind = GCCPK_WEAKDEF;
2981       else if (DECL_COMMON (t))
2982 	kind = GCCPK_COMMON;
2983       else
2984 	kind = GCCPK_DEF;
2985 
2986       /* When something is defined, it should have node attached.  */
2987       gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2988       gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2989 		  || (cgraph_node::get (t)
2990 		      && cgraph_node::get (t)->definition));
2991     }
2992 
2993   /* Imitate what default_elf_asm_output_external do.
2994      When symbol is external, we need to output it with DEFAULT visibility
2995      when compiling with -fvisibility=default, while with HIDDEN visibility
2996      when symbol has attribute (visibility("hidden")) specified.
2997      targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2998      right. */
2999 
3000   if (DECL_EXTERNAL (t)
3001       && !targetm.binds_local_p (t))
3002     visibility = GCCPV_DEFAULT;
3003   else
3004     switch (DECL_VISIBILITY (t))
3005       {
3006       case VISIBILITY_DEFAULT:
3007 	visibility = GCCPV_DEFAULT;
3008 	break;
3009       case VISIBILITY_PROTECTED:
3010 	visibility = GCCPV_PROTECTED;
3011 	break;
3012       case VISIBILITY_HIDDEN:
3013 	visibility = GCCPV_HIDDEN;
3014 	break;
3015       case VISIBILITY_INTERNAL:
3016 	visibility = GCCPV_INTERNAL;
3017 	break;
3018       }
3019 
3020   if (kind == GCCPK_COMMON
3021       && DECL_SIZE_UNIT (t)
3022       && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
3023     size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
3024   else
3025     size = 0;
3026 
3027   if (DECL_ONE_ONLY (t))
3028     comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
3029   else
3030     comdat = "";
3031 
3032   lto_write_data (name, strlen (name) + 1);
3033   lto_write_data (comdat, strlen (comdat) + 1);
3034   c = (unsigned char) kind;
3035   lto_write_data (&c, 1);
3036   c = (unsigned char) visibility;
3037   lto_write_data (&c, 1);
3038   lto_write_data (&size, 8);
3039   lto_write_data (&slot_num, 4);
3040 }
3041 
3042 /* Write extension information for symbols (symbol type, section flags).  */
3043 
3044 static void
write_symbol_extension_info(tree t)3045 write_symbol_extension_info (tree t)
3046 {
3047   unsigned char c;
3048   c = ((unsigned char) TREE_CODE (t) == VAR_DECL
3049        ? GCCST_VARIABLE : GCCST_FUNCTION);
3050   lto_write_data (&c, 1);
3051   unsigned char section_kind = 0;
3052   if (TREE_CODE (t) == VAR_DECL)
3053     {
3054       section *s = get_variable_section (t, false);
3055       if (s->common.flags & SECTION_BSS)
3056 	section_kind |= GCCSSK_BSS;
3057     }
3058   lto_write_data (&section_kind, 1);
3059 }
3060 
3061 /* Write an IL symbol table to OB.
3062    SET and VSET are cgraph/varpool node sets we are outputting.  */
3063 
3064 static unsigned int
produce_symtab(struct output_block * ob)3065 produce_symtab (struct output_block *ob)
3066 {
3067   unsigned int streamed_symbols = 0;
3068   struct streamer_tree_cache_d *cache = ob->writer_cache;
3069   char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
3070   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3071   lto_symtab_encoder_iterator lsei;
3072 
3073   lto_begin_section (section_name, false);
3074   free (section_name);
3075 
3076   hash_set<const char *> seen;
3077 
3078   /* Write the symbol table.
3079      First write everything defined and then all declarations.
3080      This is necessary to handle cases where we have duplicated symbols.  */
3081   for (lsei = lsei_start (encoder);
3082        !lsei_end_p (lsei); lsei_next (&lsei))
3083     {
3084       symtab_node *node = lsei_node (lsei);
3085 
3086       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3087 	continue;
3088       write_symbol (cache, node->decl, &seen, false);
3089       ++streamed_symbols;
3090     }
3091   for (lsei = lsei_start (encoder);
3092        !lsei_end_p (lsei); lsei_next (&lsei))
3093     {
3094       symtab_node *node = lsei_node (lsei);
3095 
3096       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3097 	continue;
3098       write_symbol (cache, node->decl, &seen, false);
3099       ++streamed_symbols;
3100     }
3101 
3102   lto_end_section ();
3103 
3104   return streamed_symbols;
3105 }
3106 
3107 /* Symtab extension version.  */
3108 #define LTO_SYMTAB_EXTENSION_VERSION 1
3109 
3110 /* Write an IL symbol table extension to OB.
3111    SET and VSET are cgraph/varpool node sets we are outputting.  */
3112 
3113 static void
produce_symtab_extension(struct output_block * ob,unsigned int previous_streamed_symbols)3114 produce_symtab_extension (struct output_block *ob,
3115 			  unsigned int previous_streamed_symbols)
3116 {
3117   unsigned int streamed_symbols = 0;
3118   char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3119 					     NULL, 0, NULL);
3120   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3121   lto_symtab_encoder_iterator lsei;
3122 
3123   lto_begin_section (section_name, false);
3124   free (section_name);
3125 
3126   unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3127   lto_write_data (&version, 1);
3128 
3129   /* Write the symbol table.
3130      First write everything defined and then all declarations.
3131      This is necessary to handle cases where we have duplicated symbols.  */
3132   for (lsei = lsei_start (encoder);
3133        !lsei_end_p (lsei); lsei_next (&lsei))
3134     {
3135       symtab_node *node = lsei_node (lsei);
3136 
3137       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3138 	continue;
3139       write_symbol_extension_info (node->decl);
3140       ++streamed_symbols;
3141     }
3142   for (lsei = lsei_start (encoder);
3143        !lsei_end_p (lsei); lsei_next (&lsei))
3144     {
3145       symtab_node *node = lsei_node (lsei);
3146 
3147       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3148 	continue;
3149       write_symbol_extension_info (node->decl);
3150       ++streamed_symbols;
3151     }
3152 
3153   gcc_assert (previous_streamed_symbols == streamed_symbols);
3154   lto_end_section ();
3155 }
3156 
3157 
3158 /* Init the streamer_mode_table for output, where we collect info on what
3159    machine_mode values have been streamed.  */
3160 void
lto_output_init_mode_table(void)3161 lto_output_init_mode_table (void)
3162 {
3163   memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
3164 }
3165 
3166 
3167 /* Write the mode table.  */
3168 static void
lto_write_mode_table(void)3169 lto_write_mode_table (void)
3170 {
3171   struct output_block *ob;
3172   ob = create_output_block (LTO_section_mode_table);
3173   bitpack_d bp = bitpack_create (ob->main_stream);
3174 
3175   /* Ensure that for GET_MODE_INNER (m) != m we have
3176      also the inner mode marked.  */
3177   for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3178     if (streamer_mode_table[i])
3179       {
3180 	machine_mode m = (machine_mode) i;
3181 	machine_mode inner_m = GET_MODE_INNER (m);
3182 	if (inner_m != m)
3183 	  streamer_mode_table[(int) inner_m] = 1;
3184       }
3185   /* First stream modes that have GET_MODE_INNER (m) == m,
3186      so that we can refer to them afterwards.  */
3187   for (int pass = 0; pass < 2; pass++)
3188     for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3189       if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3190 	{
3191 	  machine_mode m = (machine_mode) i;
3192 	  if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3193 	    continue;
3194 	  bp_pack_value (&bp, m, 8);
3195 	  bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3196 	  bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
3197 	  bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
3198 	  bp_pack_value (&bp, GET_MODE_INNER (m), 8);
3199 	  bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
3200 	  switch (GET_MODE_CLASS (m))
3201 	    {
3202 	    case MODE_FRACT:
3203 	    case MODE_UFRACT:
3204 	    case MODE_ACCUM:
3205 	    case MODE_UACCUM:
3206 	      bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
3207 	      bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
3208 	      break;
3209 	    case MODE_FLOAT:
3210 	    case MODE_DECIMAL_FLOAT:
3211 	      bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3212 	      break;
3213 	    default:
3214 	      break;
3215 	    }
3216 	  bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3217 	}
3218   bp_pack_value (&bp, VOIDmode, 8);
3219 
3220   streamer_write_bitpack (&bp);
3221 
3222   char *section_name
3223     = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3224   lto_begin_section (section_name, !flag_wpa);
3225   free (section_name);
3226 
3227   /* The entire header stream is computed here.  */
3228   struct lto_simple_header_with_strings header;
3229   memset (&header, 0, sizeof (header));
3230 
3231   header.main_size = ob->main_stream->total_size;
3232   header.string_size = ob->string_stream->total_size;
3233   lto_write_data (&header, sizeof header);
3234 
3235   /* Put all of the gimple and the string table out the asm file as a
3236      block of text.  */
3237   lto_write_stream (ob->main_stream);
3238   lto_write_stream (ob->string_stream);
3239 
3240   lto_end_section ();
3241   destroy_output_block (ob);
3242 }
3243 
3244 
3245 /* This pass is run after all of the functions are serialized and all
3246    of the IPA passes have written their serialized forms.  This pass
3247    causes the vector of all of the global decls and types used from
3248    this file to be written in to a section that can then be read in to
3249    recover these on other side.  */
3250 
3251 void
produce_asm_for_decls(void)3252 produce_asm_for_decls (void)
3253 {
3254   struct lto_out_decl_state *out_state;
3255   struct lto_out_decl_state *fn_out_state;
3256   struct lto_decl_header header;
3257   char *section_name;
3258   struct output_block *ob;
3259   unsigned idx, num_fns;
3260   size_t decl_state_size;
3261   int32_t num_decl_states;
3262 
3263   ob = create_output_block (LTO_section_decls);
3264 
3265   memset (&header, 0, sizeof (struct lto_decl_header));
3266 
3267   section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3268   lto_begin_section (section_name, !flag_wpa);
3269   free (section_name);
3270 
3271   /* Make string 0 be a NULL string.  */
3272   streamer_write_char_stream (ob->string_stream, 0);
3273 
3274   gcc_assert (!alias_pairs);
3275 
3276   /* Get rid of the global decl state hash tables to save some memory.  */
3277   out_state = lto_get_out_decl_state ();
3278   for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3279     if (out_state->streams[i].tree_hash_table)
3280       {
3281 	delete out_state->streams[i].tree_hash_table;
3282 	out_state->streams[i].tree_hash_table = NULL;
3283       }
3284 
3285   /* Write the global symbols.  */
3286   if (streamer_dump_file)
3287     fprintf (streamer_dump_file, "Outputting global stream\n");
3288   lto_output_decl_state_streams (ob, out_state);
3289   num_fns = lto_function_decl_states.length ();
3290   for (idx = 0; idx < num_fns; idx++)
3291     {
3292       fn_out_state =
3293 	lto_function_decl_states[idx];
3294       if (streamer_dump_file)
3295 	fprintf (streamer_dump_file, "Outputting stream for %s\n",
3296 		 IDENTIFIER_POINTER
3297 		    (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3298       lto_output_decl_state_streams (ob, fn_out_state);
3299     }
3300 
3301   /* Currently not used.  This field would allow us to preallocate
3302      the globals vector, so that it need not be resized as it is extended.  */
3303   header.num_nodes = -1;
3304 
3305   /* Compute the total size of all decl out states. */
3306   decl_state_size = sizeof (int32_t);
3307   decl_state_size += lto_out_decl_state_written_size (out_state);
3308   for (idx = 0; idx < num_fns; idx++)
3309     {
3310       fn_out_state =
3311 	lto_function_decl_states[idx];
3312       decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3313     }
3314   header.decl_state_size = decl_state_size;
3315 
3316   header.main_size = ob->main_stream->total_size;
3317   header.string_size = ob->string_stream->total_size;
3318 
3319   lto_write_data (&header, sizeof header);
3320 
3321   /* Write the main out-decl state, followed by out-decl states of
3322      functions. */
3323   num_decl_states = num_fns + 1;
3324   lto_write_data (&num_decl_states, sizeof (num_decl_states));
3325   lto_output_decl_state_refs (ob, out_state);
3326   for (idx = 0; idx < num_fns; idx++)
3327     {
3328       fn_out_state = lto_function_decl_states[idx];
3329       lto_output_decl_state_refs (ob, fn_out_state);
3330     }
3331 
3332   lto_write_stream (ob->main_stream);
3333   lto_write_stream (ob->string_stream);
3334 
3335   lto_end_section ();
3336 
3337   /* Write the symbol table.  It is used by linker to determine dependencies
3338      and thus we can skip it for WPA.  */
3339   if (!flag_wpa)
3340     {
3341       unsigned int streamed_symbols = produce_symtab (ob);
3342       produce_symtab_extension (ob, streamed_symbols);
3343     }
3344 
3345   /* Write command line opts.  */
3346   lto_write_options ();
3347 
3348   /* Deallocate memory and clean up.  */
3349   for (idx = 0; idx < num_fns; idx++)
3350     {
3351       fn_out_state =
3352 	lto_function_decl_states[idx];
3353       lto_delete_out_decl_state (fn_out_state);
3354     }
3355   lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3356   lto_function_decl_states.release ();
3357   destroy_output_block (ob);
3358   if (lto_stream_offload_p)
3359     lto_write_mode_table ();
3360 }
3361