1 /* Write the GIMPLE representation to a file stream.
2 
3    Copyright (C) 2009-2021 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 #include "file-prefix-map.h" /* remap_debug_filename()  */
48 #include "output.h"
49 #include "ipa-utils.h"
50 #include "toplev.h"
51 
52 
53 static void lto_write_tree (struct output_block*, tree, bool);
54 
55 /* Clear the line info stored in DATA_IN.  */
56 
57 static void
clear_line_info(struct output_block * ob)58 clear_line_info (struct output_block *ob)
59 {
60   ob->current_file = NULL;
61   ob->current_line = 0;
62   ob->current_col = 0;
63   ob->current_sysp = false;
64   ob->reset_locus = true;
65   ob->emit_pwd = true;
66   /* Initialize to something that will never appear as block,
67      so that the first location with block in a function etc.
68      always streams a change_block bit and the first block.  */
69   ob->current_block = void_node;
70 }
71 
72 
73 /* Create the output block and return it.  SECTION_TYPE is
74    LTO_section_function_body or LTO_static_initializer.  */
75 
76 struct output_block *
create_output_block(enum lto_section_type section_type)77 create_output_block (enum lto_section_type section_type)
78 {
79   struct output_block *ob = XCNEW (struct output_block);
80   if (streamer_dump_file)
81     fprintf (streamer_dump_file, "Creating output block for %s\n",
82 	     lto_section_name[section_type]);
83 
84   ob->section_type = section_type;
85   ob->decl_state = lto_get_out_decl_state ();
86   /* Only global decl stream in non-wpa will ever be considered by tree
87      merging.  */
88   if (!flag_wpa && section_type == LTO_section_decls)
89     ob->local_trees = new (hash_set <tree>);
90   ob->main_stream = XCNEW (struct lto_output_stream);
91   ob->string_stream = XCNEW (struct lto_output_stream);
92   ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
93 
94   if (section_type == LTO_section_function_body)
95     ob->cfg_stream = XCNEW (struct lto_output_stream);
96 
97   clear_line_info (ob);
98 
99   ob->string_hash_table = new hash_table<string_slot_hasher> (37);
100   gcc_obstack_init (&ob->obstack);
101 
102   return ob;
103 }
104 
105 
106 /* Destroy the output block OB.  */
107 
108 void
destroy_output_block(struct output_block * ob)109 destroy_output_block (struct output_block *ob)
110 {
111   enum lto_section_type section_type = ob->section_type;
112 
113   delete ob->string_hash_table;
114   ob->string_hash_table = NULL;
115   delete ob->local_trees;
116 
117   free (ob->main_stream);
118   free (ob->string_stream);
119   if (section_type == LTO_section_function_body)
120     free (ob->cfg_stream);
121 
122   streamer_tree_cache_delete (ob->writer_cache);
123   obstack_free (&ob->obstack, NULL);
124 
125   free (ob);
126 }
127 
128 
129 /* Wrapper around variably_modified_type_p avoiding type modification
130    during WPA streaming.  */
131 
132 static bool
lto_variably_modified_type_p(tree type)133 lto_variably_modified_type_p (tree type)
134 {
135   return (in_lto_p
136 	  ? TYPE_LANG_FLAG_0 (TYPE_MAIN_VARIANT (type))
137 	  : variably_modified_type_p (type, NULL_TREE));
138 }
139 
140 
141 /* Return true if tree node T is written to various tables.  For these
142    nodes, we sometimes want to write their phyiscal representation
143    (via lto_output_tree), and sometimes we need to emit an index
144    reference into a table (via lto_output_tree_ref).  */
145 
146 static bool
tree_is_indexable(tree t)147 tree_is_indexable (tree t)
148 {
149   /* Parameters and return values of functions of variably modified types
150      must go to global stream, because they may be used in the type
151      definition.  */
152   if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
153       && DECL_CONTEXT (t))
154     return lto_variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)));
155   /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
156      We should no longer need to stream it.  */
157   else if (TREE_CODE (t) == IMPORTED_DECL)
158     gcc_unreachable ();
159   else if (TREE_CODE (t) == LABEL_DECL)
160     return FORCED_LABEL (t) || DECL_NONLOCAL (t);
161   else if (((VAR_P (t) && !TREE_STATIC (t))
162 	    || TREE_CODE (t) == TYPE_DECL
163 	    || TREE_CODE (t) == CONST_DECL
164 	    || TREE_CODE (t) == NAMELIST_DECL)
165 	   && decl_function_context (t))
166     return false;
167   else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
168     return false;
169   /* Variably modified types need to be streamed alongside function
170      bodies because they can refer to local entities.  Together with
171      them we have to localize their members as well.
172      ???  In theory that includes non-FIELD_DECLs as well.  */
173   else if (TYPE_P (t)
174 	   && lto_variably_modified_type_p (t))
175     return false;
176   else if (TREE_CODE (t) == FIELD_DECL
177 	   && lto_variably_modified_type_p (DECL_CONTEXT (t)))
178     return false;
179   else
180     return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
181 }
182 
183 
184 /* Output info about new location into bitpack BP.
185    After outputting bitpack, lto_output_location_data has
186    to be done to output actual data.  */
187 
188 static void
lto_output_location_1(struct output_block * ob,struct bitpack_d * bp,location_t orig_loc,bool block_p)189 lto_output_location_1 (struct output_block *ob, struct bitpack_d *bp,
190 		       location_t orig_loc, bool block_p)
191 {
192   location_t loc = LOCATION_LOCUS (orig_loc);
193 
194   if (loc >= RESERVED_LOCATION_COUNT)
195     {
196       expanded_location xloc = expand_location (loc);
197 
198       if (ob->reset_locus)
199 	{
200 	  if (xloc.file == NULL)
201 	    ob->current_file = "";
202 	  if (xloc.line == 0)
203 	    ob->current_line = 1;
204 	  if (xloc.column == 0)
205 	    ob->current_col = 1;
206 	  ob->reset_locus = false;
207 	}
208 
209       /* As RESERVED_LOCATION_COUNT is 2, we can use the spare value of
210 	 3 without wasting additional bits to signalize file change.
211 	 If RESERVED_LOCATION_COUNT changes, reconsider this.  */
212       gcc_checking_assert (RESERVED_LOCATION_COUNT == 2);
213       bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1,
214 			    RESERVED_LOCATION_COUNT
215 			    + (ob->current_file != xloc.file));
216 
217       bp_pack_value (bp, ob->current_line != xloc.line, 1);
218       bp_pack_value (bp, ob->current_col != xloc.column, 1);
219 
220       if (ob->current_file != xloc.file)
221 	{
222 	  bool stream_pwd = false;
223 	  const char *remapped = remap_debug_filename (xloc.file);
224 	  if (ob->emit_pwd && remapped && !IS_ABSOLUTE_PATH (remapped))
225 	    {
226 	      stream_pwd = true;
227 	      ob->emit_pwd = false;
228 	    }
229 	  bp_pack_value (bp, stream_pwd, 1);
230 	  if (stream_pwd)
231 	    bp_pack_string (ob, bp, get_src_pwd (), true);
232 	  bp_pack_string (ob, bp, remapped, true);
233 	  bp_pack_value (bp, xloc.sysp, 1);
234 	}
235       ob->current_file = xloc.file;
236       ob->current_sysp = xloc.sysp;
237 
238       if (ob->current_line != xloc.line)
239 	bp_pack_var_len_unsigned (bp, xloc.line);
240       ob->current_line = xloc.line;
241 
242       if (ob->current_col != xloc.column)
243 	bp_pack_var_len_unsigned (bp, xloc.column);
244       ob->current_col = xloc.column;
245     }
246   else
247     bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT + 1, loc);
248 
249   if (block_p)
250     {
251       tree block = LOCATION_BLOCK (orig_loc);
252       bp_pack_value (bp, ob->current_block != block, 1);
253       streamer_write_bitpack (bp);
254       if (ob->current_block != block)
255 	lto_output_tree (ob, block, true, true);
256       ob->current_block = block;
257     }
258 }
259 
260 /* Output info about new location into bitpack BP.
261    After outputting bitpack, lto_output_location_data has
262    to be done to output actual data.  */
263 
264 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)265 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
266 		     location_t loc)
267 {
268   lto_output_location_1 (ob, bp, loc, false);
269 }
270 
271 /* Output info about new location into bitpack BP.
272    After outputting bitpack, lto_output_location_data has
273    to be done to output actual data.  Like lto_output_location, but
274    additionally output LOCATION_BLOCK info too and write the BP bitpack.  */
275 
276 void
lto_output_location_and_block(struct output_block * ob,struct bitpack_d * bp,location_t loc)277 lto_output_location_and_block (struct output_block *ob, struct bitpack_d *bp,
278 			       location_t loc)
279 {
280   lto_output_location_1 (ob, bp, loc, true);
281 }
282 
283 
284 /* Lookup NAME in ENCODER.  If NAME is not found, create a new entry in
285    ENCODER for NAME with the next available index of ENCODER,  then
286    print the index to OBS.
287    Return the index.  */
288 
289 
290 static unsigned
lto_get_index(struct lto_tree_ref_encoder * encoder,tree t)291 lto_get_index (struct lto_tree_ref_encoder *encoder, tree t)
292 {
293   bool existed_p;
294 
295   unsigned int &index
296     = encoder->tree_hash_table->get_or_insert (t, &existed_p);
297   if (!existed_p)
298     {
299       index = encoder->trees.length ();
300       if (streamer_dump_file)
301 	{
302 	  print_node_brief (streamer_dump_file, "     Encoding indexable ",
303 			    t, 4);
304 	  fprintf (streamer_dump_file, "  as %i \n", index);
305 	}
306       encoder->trees.safe_push (t);
307     }
308 
309   return index;
310 }
311 
312 
313 /* If EXPR is an indexable tree node, output a reference to it to
314    output block OB.  Otherwise, output the physical representation of
315    EXPR to OB.  */
316 
317 static void
lto_indexable_tree_ref(struct output_block * ob,tree expr,enum LTO_tags * tag,unsigned * index)318 lto_indexable_tree_ref (struct output_block *ob, tree expr,
319 			enum LTO_tags *tag, unsigned *index)
320 {
321   gcc_checking_assert (tree_is_indexable (expr));
322 
323   if (TREE_CODE (expr) == SSA_NAME)
324     {
325       *tag = LTO_ssa_name_ref;
326       *index = SSA_NAME_VERSION (expr);
327     }
328   else
329     {
330       *tag = LTO_global_stream_ref;
331       *index = lto_get_index (&ob->decl_state->streams[LTO_DECL_STREAM], expr);
332     }
333 }
334 
335 
336 /* Output a static or extern var DECL to OBS.  */
337 
338 void
lto_output_var_decl_ref(struct lto_out_decl_state * decl_state,struct lto_output_stream * obs,tree decl)339 lto_output_var_decl_ref (struct lto_out_decl_state *decl_state,
340 			 struct lto_output_stream * obs, tree decl)
341 {
342   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
343   streamer_write_uhwi_stream
344      (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM],
345 			  decl));
346 }
347 
348 
349 /* Output a static or extern var DECL to OBS.  */
350 
351 void
lto_output_fn_decl_ref(struct lto_out_decl_state * decl_state,struct lto_output_stream * obs,tree decl)352 lto_output_fn_decl_ref (struct lto_out_decl_state *decl_state,
353 			struct lto_output_stream * obs, tree decl)
354 {
355   gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
356   streamer_write_uhwi_stream
357      (obs, lto_get_index (&decl_state->streams[LTO_DECL_STREAM], decl));
358 }
359 
360 /* Return true if EXPR is a tree node that can be written to disk.  */
361 
362 static inline bool
lto_is_streamable(tree expr)363 lto_is_streamable (tree expr)
364 {
365   enum tree_code code = TREE_CODE (expr);
366 
367   /* Notice that we reject SSA_NAMEs as well.  We only emit the SSA
368      name version in lto_output_tree_ref (see output_ssa_names).  */
369   return !is_lang_specific (expr)
370 	 && code != SSA_NAME
371 	 && code != LANG_TYPE
372 	 && code != MODIFY_EXPR
373 	 && code != INIT_EXPR
374 	 && code != TARGET_EXPR
375 	 && code != BIND_EXPR
376 	 && code != WITH_CLEANUP_EXPR
377 	 && code != STATEMENT_LIST
378 	 && (code == CASE_LABEL_EXPR
379 	     || code == DECL_EXPR
380 	     || TREE_CODE_CLASS (code) != tcc_statement);
381 }
382 
383 /* Very rough estimate of streaming size of the initializer.  If we ignored
384    presence of strings, we could simply just count number of non-indexable
385    tree nodes and number of references to indexable nodes.  Strings however
386    may be very large and we do not want to dump them int othe global stream.
387 
388    Count the size of initializer until the size in DATA is positive.  */
389 
390 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)391 subtract_estimated_size (tree *tp, int *ws, void *data)
392 {
393   long *sum = (long *)data;
394   if (tree_is_indexable (*tp))
395     {
396       /* Indexable tree is one reference to global stream.
397 	 Guess it may be about 4 bytes.  */
398       *sum -= 4;
399       *ws = 0;
400     }
401   /* String table entry + base of tree node needs to be streamed.  */
402   if (TREE_CODE (*tp) == STRING_CST)
403     *sum -= TREE_STRING_LENGTH (*tp) + 8;
404   else
405     {
406       /* Identifiers are also variable length but should not appear
407 	 naked in constructor.  */
408       gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
409       /* We do not really make attempt to work out size of pickled tree, as
410 	 it is very variable. Make it bigger than the reference.  */
411       *sum -= 16;
412     }
413   if (*sum < 0)
414     return *tp;
415   return NULL_TREE;
416 }
417 
418 
419 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL.  */
420 
421 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)422 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
423 {
424   gcc_checking_assert (DECL_P (expr)
425 		       && TREE_CODE (expr) != FUNCTION_DECL
426 		       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
427 
428   /* Handle DECL_INITIAL for symbols.  */
429   tree initial = DECL_INITIAL (expr);
430   if (VAR_P (expr)
431       && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
432       && !DECL_IN_CONSTANT_POOL (expr)
433       && initial)
434     {
435       varpool_node *vnode;
436       /* Extra section needs about 30 bytes; do not produce it for simple
437 	 scalar values.  */
438       if (!(vnode = varpool_node::get (expr))
439 	  || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
440         initial = error_mark_node;
441       if (initial != error_mark_node)
442 	{
443 	  long max_size = 30;
444 	  if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
445 			 NULL))
446 	    initial = error_mark_node;
447 	}
448     }
449 
450   return initial;
451 }
452 
453 
454 /* Output reference to tree T to the stream.
455    Assume that T is already in encoder cache.
456    This is used to stream tree bodies where we know the DFS walk arranged
457    everything to cache.  Must be matched with stream_read_tree_ref.  */
458 
459 void
stream_write_tree_ref(struct output_block * ob,tree t)460 stream_write_tree_ref (struct output_block *ob, tree t)
461 {
462   if (!t)
463     streamer_write_zero (ob);
464   else
465     {
466       unsigned int ix;
467       bool existed_p = streamer_tree_cache_lookup (ob->writer_cache, t, &ix);
468       if (existed_p)
469 	streamer_write_hwi (ob, ix + 1);
470       else
471 	{
472 	  enum LTO_tags tag;
473 	  unsigned ix;
474 	  int id = 0;
475 
476 	  lto_indexable_tree_ref (ob, t, &tag, &ix);
477 	  if (tag == LTO_ssa_name_ref)
478 	    id = 1;
479 	  else
480 	    gcc_checking_assert (tag == LTO_global_stream_ref);
481 	  streamer_write_hwi (ob, -(int)(ix * 2 + id + 1));
482 	}
483       if (streamer_debugging)
484 	streamer_write_uhwi (ob, TREE_CODE (t));
485     }
486 }
487 
488 
489 
490 /* Write a physical representation of tree node EXPR to output block
491    OB.  If REF_P is true, the leaves of EXPR are emitted as references
492    via lto_output_tree_ref.  IX is the index into the streamer cache
493    where EXPR is stored.  */
494 
495 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)496 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
497 {
498   if (streamer_dump_file)
499     {
500       print_node_brief (streamer_dump_file, "     Streaming body of ",
501 			expr, 4);
502       fprintf (streamer_dump_file, "  to %s\n",
503 	       lto_section_name[ob->section_type]);
504     }
505 
506   /* Pack all the non-pointer fields in EXPR into a bitpack and write
507      the resulting bitpack.  */
508   streamer_write_tree_bitfields (ob, expr);
509 
510   /* Write all the pointer fields in EXPR.  */
511   streamer_write_tree_body (ob, expr);
512 
513   /* Write any LTO-specific data to OB.  */
514   if (DECL_P (expr)
515       && TREE_CODE (expr) != FUNCTION_DECL
516       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
517     {
518       /* Handle DECL_INITIAL for symbols.  */
519       tree initial = get_symbol_initial_value
520 			 (ob->decl_state->symtab_node_encoder, expr);
521       stream_write_tree (ob, initial, ref_p);
522     }
523 
524   /* Stream references to early generated DIEs.  Keep in sync with the
525      trees handled in dwarf2out_die_ref_for_decl.  */
526   if ((DECL_P (expr)
527        && TREE_CODE (expr) != FIELD_DECL
528        && TREE_CODE (expr) != DEBUG_EXPR_DECL
529        && TREE_CODE (expr) != TYPE_DECL)
530       || TREE_CODE (expr) == BLOCK)
531     {
532       const char *sym;
533       unsigned HOST_WIDE_INT off;
534       if (debug_info_level > DINFO_LEVEL_NONE
535 	  && debug_hooks->die_ref_for_decl (expr, &sym, &off))
536 	{
537 	  streamer_write_string (ob, ob->main_stream, sym, true);
538 	  streamer_write_uhwi (ob, off);
539 	}
540       else
541 	streamer_write_string (ob, ob->main_stream, NULL, true);
542     }
543 }
544 
545 /* Write a physical representation of tree node EXPR to output block
546    OB.  If REF_P is true, the leaves of EXPR are emitted as references
547    via lto_output_tree_ref.  IX is the index into the streamer cache
548    where EXPR is stored.  */
549 
550 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)551 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
552 {
553   if (!lto_is_streamable (expr))
554     internal_error ("tree code %qs is not supported in LTO streams",
555 		    get_tree_code_name (TREE_CODE (expr)));
556 
557   /* Write the header, containing everything needed to materialize
558      EXPR on the reading side.  */
559   streamer_write_tree_header (ob, expr);
560 
561   lto_write_tree_1 (ob, expr, ref_p);
562 }
563 
564 /* Emit the physical representation of tree node EXPR to output block OB,
565    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
566    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
567 
568 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)569 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
570 		   bool ref_p, bool this_ref_p)
571 {
572   unsigned ix;
573 
574   gcc_checking_assert (expr != NULL_TREE
575 		       && !(this_ref_p && tree_is_indexable (expr)));
576 
577   bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
578 					      expr, hash, &ix);
579   gcc_assert (!exists_p);
580   if (TREE_CODE (expr) == INTEGER_CST
581       && !TREE_OVERFLOW (expr))
582     {
583       /* Shared INTEGER_CST nodes are special because they need their
584 	 original type to be materialized by the reader (to implement
585 	 TYPE_CACHED_VALUES).  */
586       streamer_write_integer_cst (ob, expr);
587     }
588   else
589     {
590       /* This is the first time we see EXPR, write its fields
591 	 to OB.  */
592       lto_write_tree (ob, expr, ref_p);
593     }
594 }
595 
596 class DFS
597 {
598 public:
599   DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
600        bool single_p);
601   ~DFS ();
602 
603   struct scc_entry
604   {
605     tree t;
606     hashval_t hash;
607   };
608   auto_vec<scc_entry,32> sccstack;
609 
610 private:
611   struct sccs
612   {
613     unsigned int dfsnum;
614     unsigned int low;
615   };
616   struct worklist
617   {
618     tree expr;
619     sccs *from_state;
620     sccs *cstate;
621     bool ref_p;
622     bool this_ref_p;
623   };
624   /* Maximum index of scc stack containing a local tree.  */
625   int max_local_entry;
626 
627   static int scc_entry_compare (const void *, const void *);
628 
629   void DFS_write_tree_body (struct output_block *ob,
630 			    tree expr, sccs *expr_state, bool ref_p);
631 
632   void DFS_write_tree (struct output_block *ob, sccs *from_state,
633 		       tree expr, bool ref_p, bool this_ref_p);
634 
635   hashval_t
636   hash_scc (struct output_block *ob, unsigned first, unsigned size,
637 	    bool ref_p, bool this_ref_p);
638 
639   hash_map<tree, sccs *> sccstate;
640   auto_vec<worklist, 32> worklist_vec;
641   struct obstack sccstate_obstack;
642 };
643 
644 /* Return true if type can not be merged with structurally same tree in
645    other translation unit.  During stream out this information is propagated
646    to all trees referring to T and they are not streamed with additional
647    information needed by the tree merging in lto-common.c (in particular,
648    scc hash codes are not streamed).
649 
650    TRANSLATION_UNIT_DECL is handled specially since references to it does
651    not make other trees local as well.  */
652 
653 static bool
local_tree_p(tree t)654 local_tree_p (tree t)
655 {
656   switch (TREE_CODE (t))
657     {
658     case LABEL_DECL:
659       return true;
660     case NAMESPACE_DECL:
661       return !DECL_NAME (t);
662     case VAR_DECL:
663     case FUNCTION_DECL:
664       return !TREE_PUBLIC (t) && !DECL_EXTERNAL (t);
665     case RECORD_TYPE:
666     case UNION_TYPE:
667     case ENUMERAL_TYPE:
668       /* Anonymous namespace types are local.
669 	 Only work hard for main variants;
670 	 variant types will inherit locality.  */
671       return TYPE_MAIN_VARIANT (t) == t
672 	     && odr_type_p (t) && type_with_linkage_p (t)
673 	     && type_in_anonymous_namespace_p (t);
674     default:
675       return false;
676     }
677 }
678 
679 /* Emit the physical representation of tree node EXPR to output block OB,
680    using depth-first search on the subgraph.  If THIS_REF_P is true, the
681    leaves of EXPR are emitted as references via lto_output_tree_ref.
682    REF_P is used for streaming siblings of EXPR.  If SINGLE_P is true,
683    this is for a rewalk of a single leaf SCC.  */
684 
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)685 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
686 	  bool single_p)
687 {
688   unsigned int next_dfs_num = 1;
689 
690   max_local_entry = -1;
691   gcc_obstack_init (&sccstate_obstack);
692   DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
693   while (!worklist_vec.is_empty ())
694     {
695       worklist &w = worklist_vec.last ();
696       expr = w.expr;
697       sccs *from_state = w.from_state;
698       sccs *cstate = w.cstate;
699       ref_p = w.ref_p;
700       this_ref_p = w.this_ref_p;
701       if (cstate == NULL)
702 	{
703 	  sccs **slot = &sccstate.get_or_insert (expr);
704 	  cstate = *slot;
705 	  if (cstate)
706 	    {
707 	      gcc_checking_assert (from_state);
708 	      if (cstate->dfsnum < from_state->dfsnum)
709 		from_state->low = MIN (cstate->dfsnum, from_state->low);
710 	      worklist_vec.pop ();
711 	      continue;
712 	    }
713 
714 	  scc_entry e = { expr, 0 };
715 	  /* Not yet visited.  DFS recurse and push it onto the stack.  */
716 	  *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
717 	  if (ob->local_trees && local_tree_p (expr))
718 	    max_local_entry = sccstack.length ();
719 	  sccstack.safe_push (e);
720 	  cstate->dfsnum = next_dfs_num++;
721 	  cstate->low = cstate->dfsnum;
722 	  w.cstate = cstate;
723 
724 	  if (TREE_CODE (expr) == INTEGER_CST
725 	      && !TREE_OVERFLOW (expr))
726 	    DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
727 	  else
728 	    {
729 	      DFS_write_tree_body (ob, expr, cstate, ref_p);
730 
731 	      /* Walk any LTO-specific edges.  */
732 	      if (DECL_P (expr)
733 		  && TREE_CODE (expr) != FUNCTION_DECL
734 		  && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
735 		{
736 		  /* Handle DECL_INITIAL for symbols.  */
737 		  tree initial
738 		    = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
739 						expr);
740 		  DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
741 		}
742 	    }
743 	  continue;
744 	}
745 
746       /* See if we found an SCC.  */
747       if (cstate->low == cstate->dfsnum)
748 	{
749 	  unsigned first, size;
750 	  tree x;
751 
752 	  /* If we are re-walking a single leaf SCC just pop it,
753 	     let earlier worklist item access the sccstack.  */
754 	  if (single_p)
755 	    {
756 	      worklist_vec.pop ();
757 	      continue;
758 	    }
759 
760 	  /* Pop the SCC and compute its size.  */
761 	  first = sccstack.length ();
762 	  do
763 	    {
764 	      x = sccstack[--first].t;
765 	    }
766 	  while (x != expr);
767 	  size = sccstack.length () - first;
768 
769 	  /* No need to compute hashes for LTRANS units, we don't perform
770 	     any merging there.  */
771 	  hashval_t scc_hash = 0;
772 	  unsigned scc_entry_len = 0;
773 	  bool local_to_unit = !ob->local_trees
774 			       || max_local_entry >= (int)first;
775 
776 	  /* Remember that trees are local so info gets propagated to other
777 	     SCCs.  */
778 	  if (local_to_unit && ob->local_trees)
779 	    {
780 	      for (unsigned i = 0; i < size; ++i)
781 		ob->local_trees->add (sccstack[first + i].t);
782 	    }
783 
784 	  /* As a special case do not stream TRANSLATION_UNIT_DECL as shared
785 	     tree.  We can not mark it local because references to it does not
786 	     make other trees local (all global decls reffer to it via
787 	     CONTEXT).  */
788 	  if (size == 1
789 	      && TREE_CODE (sccstack[first].t) == TRANSLATION_UNIT_DECL)
790 	    local_to_unit = true;
791 
792 	  if (!local_to_unit)
793 	    {
794 	      scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
795 
796 	      /* Put the entries with the least number of collisions first.  */
797 	      unsigned entry_start = 0;
798 	      scc_entry_len = size + 1;
799 	      for (unsigned i = 0; i < size;)
800 		{
801 		  unsigned from = i;
802 		  for (i = i + 1; i < size
803 		       && (sccstack[first + i].hash
804 			   == sccstack[first + from].hash); ++i)
805 		    ;
806 		  if (i - from < scc_entry_len)
807 		    {
808 		      scc_entry_len = i - from;
809 		      entry_start = from;
810 		    }
811 		}
812 	      for (unsigned i = 0; i < scc_entry_len; ++i)
813 		std::swap (sccstack[first + i],
814 			   sccstack[first + entry_start + i]);
815 
816 	      /* We already sorted SCC deterministically in hash_scc.  */
817 
818 	      /* Check that we have only one SCC.
819 		 Naturally we may have conflicts if hash function is not
820 		 strong enough.  Lets see how far this gets.  */
821 	      gcc_checking_assert (scc_entry_len == 1);
822 	    }
823 
824 	  worklist_vec.pop ();
825 
826 	  unsigned int prev_size = ob->main_stream->total_size;
827 
828 	  /* Only global decl sections are considered by tree merging.  */
829 	  if (ob->section_type != LTO_section_decls)
830 	    {
831 	      /* If this is the original tree we stream and it forms SCC
832 		 by itself then we do not need to stream SCC at all.  */
833 	      if (worklist_vec.is_empty () && first == 0 && size == 1)
834 		 return;
835 	      if (streamer_dump_file)
836 		{
837 		  fprintf (streamer_dump_file,
838 			   "     Start of LTO_trees of size %i\n", size);
839 		}
840 	      streamer_write_record_start (ob, LTO_trees);
841 	      streamer_write_uhwi (ob, size);
842 	    }
843 	  /* Write LTO_tree_scc if tree merging is going to be performed.  */
844 	  else if (!local_to_unit
845 		   /* These are special since sharing is not done by tree
846 		      merging machinery.  We can not special case them earlier
847 		      because we still need to compute hash for further sharing
848 		      of trees referring to them.  */
849 		   && (size != 1
850 		       || (TREE_CODE (sccstack[first].t) != IDENTIFIER_NODE
851 			   && (TREE_CODE (sccstack[first].t) != INTEGER_CST
852 			       || TREE_OVERFLOW (sccstack[first].t)))))
853 
854 	    {
855 	      gcc_checking_assert (ob->section_type == LTO_section_decls);
856 	      if (streamer_dump_file)
857 		{
858 		  fprintf (streamer_dump_file,
859 			   "     Start of LTO_tree_scc of size %i\n", size);
860 		}
861 	      streamer_write_record_start (ob, LTO_tree_scc);
862 	      /* In wast majority of cases scc_entry_len is 1 and size is small
863 		 integer.  Use extra bit of size to stream info about
864 		 exceptions.  */
865 	      streamer_write_uhwi (ob, size * 2 + (scc_entry_len != 1));
866 	      if (scc_entry_len != 1)
867 		streamer_write_uhwi (ob, scc_entry_len);
868 	      streamer_write_uhwi (ob, scc_hash);
869 	    }
870 	  /* Non-trivial SCCs must be packed to trees blocks so forward
871 	     references work correctly.  */
872 	  else if (size != 1)
873 	    {
874 	      if (streamer_dump_file)
875 		{
876 		  fprintf (streamer_dump_file,
877 			   "     Start of LTO_trees of size %i\n", size);
878 		}
879 	      streamer_write_record_start (ob, LTO_trees);
880 	      streamer_write_uhwi (ob, size);
881 	    }
882 	  else if (streamer_dump_file)
883 	    {
884 	      fprintf (streamer_dump_file, "     Streaming single tree\n");
885 	    }
886 
887 	  /* Write size-1 SCCs without wrapping them inside SCC bundles.
888 	     All INTEGER_CSTs need to be handled this way as we need
889 	     their type to materialize them.  Also builtins are handled
890 	     this way.  */
891 	  if (size == 1)
892 	    lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
893 	  else
894 	    {
895 
896 	      /* Write all headers and populate the streamer cache.  */
897 	      for (unsigned i = 0; i < size; ++i)
898 		{
899 		  hashval_t hash = sccstack[first+i].hash;
900 		  tree t = sccstack[first+i].t;
901 		  bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
902 							      t, hash, NULL);
903 		  gcc_assert (!exists_p);
904 
905 		  if (!lto_is_streamable (t))
906 		    internal_error ("tree code %qs is not supported "
907 				    "in LTO streams",
908 				    get_tree_code_name (TREE_CODE (t)));
909 
910 		  /* Write the header, containing everything needed to
911 		     materialize EXPR on the reading side.  */
912 		  streamer_write_tree_header (ob, t);
913 		}
914 
915 	      /* Write the bitpacks and tree references.  */
916 	      for (unsigned i = 0; i < size; ++i)
917 		lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
918 	    }
919 	  if (streamer_dump_file)
920 	    fprintf (streamer_dump_file, "     %u bytes\n",
921 		     ob->main_stream->total_size - prev_size);
922 
923 	  /* Finally truncate the vector.  */
924 	  sccstack.truncate (first);
925 	  if ((int)first <= max_local_entry)
926 	    max_local_entry = first - 1;
927 
928 	  if (from_state)
929 	    from_state->low = MIN (from_state->low, cstate->low);
930 	  continue;
931 	}
932 
933       gcc_checking_assert (from_state);
934       from_state->low = MIN (from_state->low, cstate->low);
935       if (cstate->dfsnum < from_state->dfsnum)
936 	from_state->low = MIN (cstate->dfsnum, from_state->low);
937       worklist_vec.pop ();
938     }
939 }
940 
~DFS()941 DFS::~DFS ()
942 {
943   obstack_free (&sccstate_obstack, NULL);
944 }
945 
946 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
947    DFS recurse for all tree edges originating from it.  */
948 
949 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)950 DFS::DFS_write_tree_body (struct output_block *ob,
951 			  tree expr, sccs *expr_state, bool ref_p)
952 {
953 #define DFS_follow_tree_edge(DEST) \
954   DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
955 
956   enum tree_code code;
957 
958   code = TREE_CODE (expr);
959 
960   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
961     {
962       if (TREE_CODE (expr) != IDENTIFIER_NODE)
963 	DFS_follow_tree_edge (TREE_TYPE (expr));
964     }
965 
966   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
967     {
968       unsigned int count = vector_cst_encoded_nelts (expr);
969       for (unsigned int i = 0; i < count; ++i)
970 	DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
971     }
972 
973   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
974     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
975       DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
976 
977   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
978     {
979       DFS_follow_tree_edge (TREE_REALPART (expr));
980       DFS_follow_tree_edge (TREE_IMAGPART (expr));
981     }
982 
983   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
984     {
985       /* Drop names that were created for anonymous entities.  */
986       if (DECL_NAME (expr)
987 	  && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
988 	  && IDENTIFIER_ANON_P (DECL_NAME (expr)))
989 	;
990       else
991 	DFS_follow_tree_edge (DECL_NAME (expr));
992       if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
993 	  && ! DECL_CONTEXT (expr))
994 	DFS_follow_tree_edge ((*all_translation_units)[0]);
995       else
996 	DFS_follow_tree_edge (DECL_CONTEXT (expr));
997     }
998 
999   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1000     {
1001       DFS_follow_tree_edge (DECL_SIZE (expr));
1002       DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
1003 
1004       /* Note, DECL_INITIAL is not handled here.  Since DECL_INITIAL needs
1005 	 special handling in LTO, it must be handled by streamer hooks.  */
1006 
1007       DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
1008 
1009       /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
1010 	 declarations which should be eliminated by decl merging. Be sure none
1011 	 leaks to this point.  */
1012       gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
1013       DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
1014 
1015       if ((VAR_P (expr)
1016 	   || TREE_CODE (expr) == PARM_DECL)
1017 	  && DECL_HAS_VALUE_EXPR_P (expr))
1018 	DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
1019       if (VAR_P (expr)
1020 	  && DECL_HAS_DEBUG_EXPR_P (expr))
1021 	DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
1022     }
1023 
1024   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1025     {
1026       /* Make sure we don't inadvertently set the assembler name.  */
1027       if (DECL_ASSEMBLER_NAME_SET_P (expr))
1028 	DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
1029     }
1030 
1031   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1032     {
1033       DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
1034       DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
1035       DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
1036       DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
1037       gcc_checking_assert (!DECL_FCONTEXT (expr));
1038     }
1039 
1040   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1041     {
1042       gcc_checking_assert (DECL_VINDEX (expr) == NULL);
1043       DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
1044       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
1045       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
1046     }
1047 
1048   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1049     {
1050       DFS_follow_tree_edge (TYPE_SIZE (expr));
1051       DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
1052       DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
1053       DFS_follow_tree_edge (TYPE_NAME (expr));
1054       /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO.  They will be
1055 	 reconstructed during fixup.  */
1056       /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
1057 	 during fixup.  */
1058       DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
1059       DFS_follow_tree_edge (TYPE_CONTEXT (expr));
1060       /* TYPE_CANONICAL is re-computed during type merging, so no need
1061 	 to follow it here.  */
1062       /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
1063 	 it cannot be freed by free_lang_data without triggering ICEs in
1064 	 langhooks.  */
1065     }
1066 
1067   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1068     {
1069       if (TREE_CODE (expr) == ARRAY_TYPE)
1070 	DFS_follow_tree_edge (TYPE_DOMAIN (expr));
1071       else if (RECORD_OR_UNION_TYPE_P (expr))
1072 	for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
1073 	  DFS_follow_tree_edge (t);
1074       else if (TREE_CODE (expr) == FUNCTION_TYPE
1075 	       || TREE_CODE (expr) == METHOD_TYPE)
1076 	DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
1077 
1078       if (!POINTER_TYPE_P (expr))
1079 	DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
1080       DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
1081     }
1082 
1083   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1084     {
1085       DFS_follow_tree_edge (TREE_PURPOSE (expr));
1086       DFS_follow_tree_edge (TREE_VALUE (expr));
1087       DFS_follow_tree_edge (TREE_CHAIN (expr));
1088     }
1089 
1090   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1091     {
1092       for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
1093 	DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
1094     }
1095 
1096   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1097     {
1098       for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
1099 	DFS_follow_tree_edge (TREE_OPERAND (expr, i));
1100       DFS_follow_tree_edge (TREE_BLOCK (expr));
1101     }
1102 
1103   if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
1104     {
1105       for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
1106 	{
1107 	  /* We would have to stream externals in the block chain as
1108 	     non-references but we should have dropped them in
1109 	     free-lang-data.  */
1110 	  gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
1111 	  DFS_follow_tree_edge (t);
1112 	}
1113 
1114       DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
1115       DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
1116 
1117       /* Do not follow BLOCK_NONLOCALIZED_VARS.  We cannot handle debug
1118 	 information for early inlined BLOCKs so drop it on the floor instead
1119 	 of ICEing in dwarf2out.c.  */
1120 
1121       /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
1122 	 streaming time.  */
1123 
1124       /* Do not output BLOCK_SUBBLOCKS.  Instead on streaming-in this
1125 	 list is re-constructed from BLOCK_SUPERCONTEXT.  */
1126     }
1127 
1128   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1129     {
1130       unsigned i;
1131       tree t;
1132 
1133       /* Note that the number of BINFO slots has already been emitted in
1134 	 EXPR's header (see streamer_write_tree_header) because this length
1135 	 is needed to build the empty BINFO node on the reader side.  */
1136       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
1137 	DFS_follow_tree_edge (t);
1138       DFS_follow_tree_edge (BINFO_OFFSET (expr));
1139       DFS_follow_tree_edge (BINFO_VTABLE (expr));
1140 
1141       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
1142 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1143 	 by C++ FE only.  */
1144     }
1145 
1146   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1147     {
1148       unsigned i;
1149       tree index, value;
1150 
1151       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
1152 	{
1153 	  DFS_follow_tree_edge (index);
1154 	  DFS_follow_tree_edge (value);
1155 	}
1156     }
1157 
1158   if (code == OMP_CLAUSE)
1159     {
1160       int i;
1161       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
1162 	DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
1163       DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
1164     }
1165 
1166 #undef DFS_follow_tree_edge
1167 }
1168 
1169 /* Return a hash value for the tree T.
1170    CACHE holds hash values of trees outside current SCC.  MAP, if non-NULL,
1171    may hold hash values if trees inside current SCC.  */
1172 
1173 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)1174 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
1175 {
1176   inchash::hash hstate;
1177 
1178 #define visit(SIBLING) \
1179   do { \
1180     unsigned ix; \
1181     if (!SIBLING) \
1182       hstate.add_int (0); \
1183     else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
1184       hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
1185     else if (map) \
1186       hstate.add_int (*map->get (SIBLING)); \
1187     else \
1188       hstate.add_int (1); \
1189   } while (0)
1190 
1191   /* Hash TS_BASE.  */
1192   enum tree_code code = TREE_CODE (t);
1193   hstate.add_int (code);
1194   if (!TYPE_P (t))
1195     {
1196       hstate.add_flag (TREE_SIDE_EFFECTS (t));
1197       hstate.add_flag (TREE_CONSTANT (t));
1198       hstate.add_flag (TREE_READONLY (t));
1199       hstate.add_flag (TREE_PUBLIC (t));
1200     }
1201   hstate.add_flag (TREE_ADDRESSABLE (t));
1202   hstate.add_flag (TREE_THIS_VOLATILE (t));
1203   if (DECL_P (t))
1204     hstate.add_flag (DECL_UNSIGNED (t));
1205   else if (TYPE_P (t))
1206     hstate.add_flag (TYPE_UNSIGNED (t));
1207   if (TYPE_P (t))
1208     hstate.add_flag (TYPE_ARTIFICIAL (t));
1209   else
1210     hstate.add_flag (TREE_NO_WARNING (t));
1211   hstate.add_flag (TREE_NOTHROW (t));
1212   hstate.add_flag (TREE_STATIC (t));
1213   hstate.add_flag (TREE_PROTECTED (t));
1214   hstate.add_flag (TREE_DEPRECATED (t));
1215   if (code != TREE_BINFO)
1216     hstate.add_flag (TREE_PRIVATE (t));
1217   if (TYPE_P (t))
1218     {
1219       hstate.add_flag (AGGREGATE_TYPE_P (t)
1220 		       ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1221       hstate.add_flag (TYPE_ADDR_SPACE (t));
1222     }
1223   else if (code == SSA_NAME)
1224     hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1225   hstate.commit_flag ();
1226 
1227   if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1228     hstate.add_wide_int (wi::to_widest (t));
1229 
1230   if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1231     {
1232       REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1233       hstate.add_flag (r.cl);
1234       hstate.add_flag (r.sign);
1235       hstate.add_flag (r.signalling);
1236       hstate.add_flag (r.canonical);
1237       hstate.commit_flag ();
1238       hstate.add_int (r.uexp);
1239       hstate.add (r.sig, sizeof (r.sig));
1240     }
1241 
1242   if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1243     {
1244       FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1245       hstate.add_int (f.mode);
1246       hstate.add_int (f.data.low);
1247       hstate.add_int (f.data.high);
1248     }
1249 
1250   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1251     {
1252       hstate.add_hwi (DECL_MODE (t));
1253       hstate.add_flag (DECL_NONLOCAL (t));
1254       hstate.add_flag (DECL_VIRTUAL_P (t));
1255       hstate.add_flag (DECL_IGNORED_P (t));
1256       hstate.add_flag (DECL_ABSTRACT_P (t));
1257       hstate.add_flag (DECL_ARTIFICIAL (t));
1258       hstate.add_flag (DECL_USER_ALIGN (t));
1259       hstate.add_flag (DECL_PRESERVE_P (t));
1260       hstate.add_flag (DECL_EXTERNAL (t));
1261       hstate.add_flag (DECL_NOT_GIMPLE_REG_P (t));
1262       hstate.commit_flag ();
1263       hstate.add_int (DECL_ALIGN (t));
1264       if (code == LABEL_DECL)
1265 	{
1266           hstate.add_int (EH_LANDING_PAD_NR (t));
1267 	  hstate.add_int (LABEL_DECL_UID (t));
1268 	}
1269       else if (code == FIELD_DECL)
1270 	{
1271 	  hstate.add_flag (DECL_PACKED (t));
1272 	  hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1273 	  hstate.add_flag (DECL_PADDING_P (t));
1274 	  if (DECL_BIT_FIELD (t))
1275 	    hstate.add_flag (DECL_FIELD_CXX_ZERO_WIDTH_BIT_FIELD (t));
1276 	  else
1277 	    hstate.add_flag (DECL_FIELD_ABI_IGNORED (t));
1278 	  hstate.add_int (DECL_OFFSET_ALIGN (t));
1279 	}
1280       else if (code == VAR_DECL)
1281 	{
1282 	  hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1283 	  hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1284 	}
1285       if (code == RESULT_DECL
1286 	  || code == PARM_DECL
1287 	  || code == VAR_DECL)
1288 	{
1289 	  hstate.add_flag (DECL_BY_REFERENCE (t));
1290 	  if (code == VAR_DECL
1291 	      || code == PARM_DECL)
1292 	    hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1293 	}
1294       hstate.commit_flag ();
1295     }
1296 
1297   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1298     hstate.add_int (DECL_REGISTER (t));
1299 
1300   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1301     {
1302       hstate.add_flag (DECL_COMMON (t));
1303       hstate.add_flag (DECL_DLLIMPORT_P (t));
1304       hstate.add_flag (DECL_WEAK (t));
1305       hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1306       hstate.add_flag (DECL_COMDAT (t));
1307       hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1308       hstate.add_int (DECL_VISIBILITY (t));
1309       if (code == VAR_DECL)
1310 	{
1311 	  /* DECL_IN_TEXT_SECTION is set during final asm output only.  */
1312 	  hstate.add_flag (DECL_HARD_REGISTER (t));
1313 	  hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1314 	}
1315       if (TREE_CODE (t) == FUNCTION_DECL)
1316         {
1317 	  hstate.add_flag (DECL_FINAL_P (t));
1318 	  hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1319 	  hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1320 	}
1321       hstate.commit_flag ();
1322     }
1323 
1324   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1325     {
1326       hstate.add_int (DECL_BUILT_IN_CLASS (t));
1327       hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1328       hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1329       hstate.add_flag (FUNCTION_DECL_DECL_TYPE (t));
1330       hstate.add_flag (DECL_UNINLINABLE (t));
1331       hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1332       hstate.add_flag (DECL_IS_NOVOPS (t));
1333       hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1334       hstate.add_flag (DECL_IS_MALLOC (t));
1335       hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1336       hstate.add_flag (DECL_STATIC_CHAIN (t));
1337       hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1338       hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1339       hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1340       hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1341       hstate.add_flag (DECL_PURE_P (t));
1342       hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1343       hstate.commit_flag ();
1344       if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1345 	hstate.add_int (DECL_UNCHECKED_FUNCTION_CODE (t));
1346     }
1347 
1348   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1349     {
1350       hstate.add_hwi (TYPE_MODE (t));
1351       /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1352 	 no streaming.  */
1353       hstate.add_flag (TYPE_PACKED (t));
1354       hstate.add_flag (TYPE_RESTRICT (t));
1355       hstate.add_flag (TYPE_USER_ALIGN (t));
1356       hstate.add_flag (TYPE_READONLY (t));
1357       if (RECORD_OR_UNION_TYPE_P (t))
1358 	{
1359 	  hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1360 	  hstate.add_flag (TYPE_FINAL_P (t));
1361           hstate.add_flag (TYPE_CXX_ODR_P (t));
1362 	}
1363       else if (code == ARRAY_TYPE)
1364 	hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1365       if (code == ARRAY_TYPE || code == INTEGER_TYPE)
1366         hstate.add_flag (TYPE_STRING_FLAG (t));
1367       if (AGGREGATE_TYPE_P (t))
1368 	hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1369       hstate.commit_flag ();
1370       hstate.add_int (TYPE_PRECISION (t));
1371       hstate.add_int (TYPE_ALIGN (t));
1372       hstate.add_int (TYPE_EMPTY_P (t));
1373     }
1374 
1375   if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1376     hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1377 			strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1378 
1379   if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1380       /* We don't stream these when passing things to a different target.  */
1381       && !lto_stream_offload_p)
1382     hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1383 
1384   if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1385     hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1386 
1387   if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1388     hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1389 
1390   if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1391     hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1392 
1393   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1394     {
1395       if (code != IDENTIFIER_NODE)
1396 	visit (TREE_TYPE (t));
1397     }
1398 
1399   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1400     {
1401       unsigned int count = vector_cst_encoded_nelts (t);
1402       for (unsigned int i = 0; i < count; ++i)
1403 	visit (VECTOR_CST_ENCODED_ELT (t, i));
1404     }
1405 
1406   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1407     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1408       visit (POLY_INT_CST_COEFF (t, i));
1409 
1410   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1411     {
1412       visit (TREE_REALPART (t));
1413       visit (TREE_IMAGPART (t));
1414     }
1415 
1416   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1417     {
1418       /* Drop names that were created for anonymous entities.  */
1419       if (DECL_NAME (t)
1420 	  && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1421 	  && IDENTIFIER_ANON_P (DECL_NAME (t)))
1422 	;
1423       else
1424 	visit (DECL_NAME (t));
1425       if (DECL_FILE_SCOPE_P (t))
1426 	;
1427       else
1428 	visit (DECL_CONTEXT (t));
1429     }
1430 
1431   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1432     {
1433       visit (DECL_SIZE (t));
1434       visit (DECL_SIZE_UNIT (t));
1435       visit (DECL_ATTRIBUTES (t));
1436       if ((code == VAR_DECL
1437 	   || code == PARM_DECL)
1438 	  && DECL_HAS_VALUE_EXPR_P (t))
1439 	visit (DECL_VALUE_EXPR (t));
1440       if (code == VAR_DECL
1441 	  && DECL_HAS_DEBUG_EXPR_P (t))
1442 	visit (DECL_DEBUG_EXPR (t));
1443       /* ???  Hash DECL_INITIAL as streamed.  Needs the output-block to
1444          be able to call get_symbol_initial_value.  */
1445     }
1446 
1447   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1448     {
1449       if (DECL_ASSEMBLER_NAME_SET_P (t))
1450 	visit (DECL_ASSEMBLER_NAME (t));
1451     }
1452 
1453   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1454     {
1455       visit (DECL_FIELD_OFFSET (t));
1456       visit (DECL_BIT_FIELD_TYPE (t));
1457       visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1458       visit (DECL_FIELD_BIT_OFFSET (t));
1459     }
1460 
1461   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1462     {
1463       visit (DECL_FUNCTION_PERSONALITY (t));
1464       visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1465       visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1466     }
1467 
1468   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1469     {
1470       visit (TYPE_SIZE (t));
1471       visit (TYPE_SIZE_UNIT (t));
1472       visit (TYPE_ATTRIBUTES (t));
1473       visit (TYPE_NAME (t));
1474       visit (TYPE_MAIN_VARIANT (t));
1475       if (TYPE_FILE_SCOPE_P (t))
1476 	;
1477       else
1478 	visit (TYPE_CONTEXT (t));
1479     }
1480 
1481   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1482     {
1483       if (code == ARRAY_TYPE)
1484 	visit (TYPE_DOMAIN (t));
1485       else if (RECORD_OR_UNION_TYPE_P (t))
1486 	for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1487 	  visit (f);
1488       else if (code == FUNCTION_TYPE
1489 	       || code == METHOD_TYPE)
1490 	visit (TYPE_ARG_TYPES (t));
1491       if (!POINTER_TYPE_P (t))
1492 	visit (TYPE_MIN_VALUE_RAW (t));
1493       visit (TYPE_MAX_VALUE_RAW (t));
1494     }
1495 
1496   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1497     {
1498       visit (TREE_PURPOSE (t));
1499       visit (TREE_VALUE (t));
1500       visit (TREE_CHAIN (t));
1501     }
1502 
1503   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1504     for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1505       visit (TREE_VEC_ELT (t, i));
1506 
1507   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1508     {
1509       hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1510       for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1511 	visit (TREE_OPERAND (t, i));
1512     }
1513 
1514   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1515     {
1516       unsigned i;
1517       tree b;
1518       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1519 	visit (b);
1520       visit (BINFO_OFFSET (t));
1521       visit (BINFO_VTABLE (t));
1522       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1523 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1524 	 by C++ FE only.  */
1525     }
1526 
1527   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1528     {
1529       unsigned i;
1530       tree index, value;
1531       hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1532       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1533 	{
1534 	  visit (index);
1535 	  visit (value);
1536 	}
1537     }
1538 
1539   if (code == OMP_CLAUSE)
1540     {
1541       int i;
1542       HOST_WIDE_INT val;
1543 
1544       hstate.add_hwi (OMP_CLAUSE_CODE (t));
1545       switch (OMP_CLAUSE_CODE (t))
1546 	{
1547 	case OMP_CLAUSE_DEFAULT:
1548 	  val = OMP_CLAUSE_DEFAULT_KIND (t);
1549 	  break;
1550 	case OMP_CLAUSE_SCHEDULE:
1551 	  val = OMP_CLAUSE_SCHEDULE_KIND (t);
1552 	  break;
1553 	case OMP_CLAUSE_DEPEND:
1554 	  val = OMP_CLAUSE_DEPEND_KIND (t);
1555 	  break;
1556 	case OMP_CLAUSE_MAP:
1557 	  val = OMP_CLAUSE_MAP_KIND (t);
1558 	  break;
1559 	case OMP_CLAUSE_PROC_BIND:
1560 	  val = OMP_CLAUSE_PROC_BIND_KIND (t);
1561 	  break;
1562 	case OMP_CLAUSE_REDUCTION:
1563 	case OMP_CLAUSE_TASK_REDUCTION:
1564 	case OMP_CLAUSE_IN_REDUCTION:
1565 	  val = OMP_CLAUSE_REDUCTION_CODE (t);
1566 	  break;
1567 	default:
1568 	  val = 0;
1569 	  break;
1570 	}
1571       hstate.add_hwi (val);
1572       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1573 	visit (OMP_CLAUSE_OPERAND (t, i));
1574       visit (OMP_CLAUSE_CHAIN (t));
1575     }
1576 
1577   return hstate.end ();
1578 
1579 #undef visit
1580 }
1581 
1582 /* Compare two SCC entries by their hash value for qsorting them.  */
1583 
1584 int
scc_entry_compare(const void * p1_,const void * p2_)1585 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1586 {
1587   const scc_entry *p1 = (const scc_entry *) p1_;
1588   const scc_entry *p2 = (const scc_entry *) p2_;
1589   if (p1->hash < p2->hash)
1590     return -1;
1591   else if (p1->hash > p2->hash)
1592     return 1;
1593   return 0;
1594 }
1595 
1596 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1597    THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST.  */
1598 
1599 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1600 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1601 	       bool ref_p, bool this_ref_p)
1602 {
1603   unsigned int last_classes = 0, iterations = 0;
1604 
1605   /* Compute hash values for the SCC members.  */
1606   for (unsigned i = 0; i < size; ++i)
1607     sccstack[first+i].hash
1608       = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1609 
1610   if (size == 1)
1611     return sccstack[first].hash;
1612 
1613   /* We aim to get unique hash for every tree within SCC and compute hash value
1614      of the whole SCC by combining all values together in a stable (entry-point
1615      independent) order.  This guarantees that the same SCC regions within
1616      different translation units will get the same hash values and therefore
1617      will be merged at WPA time.
1618 
1619      Often the hashes are already unique.  In that case we compute the SCC hash
1620      by combining individual hash values in an increasing order.
1621 
1622      If there are duplicates, we seek at least one tree with unique hash (and
1623      pick one with minimal hash and this property).  Then we obtain a stable
1624      order by DFS walk starting from this unique tree and then use the index
1625      within this order to make individual hash values unique.
1626 
1627      If there is no tree with unique hash, we iteratively propagate the hash
1628      values across the internal edges of SCC.  This usually quickly leads
1629      to unique hashes.  Consider, for example, an SCC containing two pointers
1630      that are identical except for the types they point to and assume that
1631      these types are also part of the SCC.  The propagation will add the
1632      points-to type information into their hash values.  */
1633   do
1634     {
1635       /* Sort the SCC so we can easily check for uniqueness.  */
1636       qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1637 
1638       unsigned int classes = 1;
1639       int firstunique = -1;
1640 
1641       /* Find the tree with lowest unique hash (if it exists) and compute
1642 	 the number of equivalence classes.  */
1643       if (sccstack[first].hash != sccstack[first+1].hash)
1644 	firstunique = 0;
1645       for (unsigned i = 1; i < size; ++i)
1646 	if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1647 	  {
1648 	    classes++;
1649 	    if (firstunique == -1
1650 		&& (i == size - 1
1651 		    || sccstack[first+i+1].hash != sccstack[first+i].hash))
1652 	      firstunique = i;
1653 	  }
1654 
1655       /* If we found a tree with unique hash, stop the iteration.  */
1656       if (firstunique != -1
1657 	  /* Also terminate if we run out of iterations or if the number of
1658 	     equivalence classes is no longer increasing.
1659 	     For example a cyclic list of trees that are all equivalent will
1660 	     never have unique entry point; we however do not build such SCCs
1661 	     in our IL.  */
1662 	  || classes <= last_classes || iterations > 16)
1663 	{
1664           hashval_t scc_hash;
1665 
1666 	  /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1667 	     starting from FIRSTUNIQUE to obtain a stable order.  */
1668 	  if (classes != size && firstunique != -1)
1669 	    {
1670 	      hash_map <tree, hashval_t> map(size*2);
1671 
1672 	      /* Store hash values into a map, so we can associate them with
1673 		 the reordered SCC.  */
1674 	      for (unsigned i = 0; i < size; ++i)
1675 		map.put (sccstack[first+i].t, sccstack[first+i].hash);
1676 
1677 	      DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1678 			 true);
1679 	      gcc_assert (again.sccstack.length () == size);
1680 
1681 	      memcpy (sccstack.address () + first,
1682 		      again.sccstack.address (),
1683 		      sizeof (scc_entry) * size);
1684 
1685 	      /* Update hash values of individual members by hashing in the
1686 		 index within the stable order.  This ensures uniqueness.
1687 		 Also compute the SCC hash by mixing in all hash values in
1688 		 the stable order we obtained.  */
1689 	      sccstack[first].hash = *map.get (sccstack[first].t);
1690 	      scc_hash = sccstack[first].hash;
1691 	      for (unsigned i = 1; i < size; ++i)
1692 		{
1693 		  sccstack[first+i].hash
1694 		    = iterative_hash_hashval_t (i,
1695 						*map.get (sccstack[first+i].t));
1696 		  scc_hash
1697 		    = iterative_hash_hashval_t (scc_hash,
1698 						sccstack[first+i].hash);
1699 		}
1700 	    }
1701 	  /* If we got a unique hash value for each tree, then sort already
1702 	     ensured entry-point independent order.  Only compute the final
1703 	     SCC hash.
1704 
1705 	     If we failed to find the unique entry point, we go by the same
1706 	     route.  We will eventually introduce unwanted hash conflicts.  */
1707 	  else
1708 	    {
1709 	      scc_hash = sccstack[first].hash;
1710 	      for (unsigned i = 1; i < size; ++i)
1711 		scc_hash
1712 		  = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1713 
1714 	      /* We cannot 100% guarantee that the hash won't conflict so as
1715 		 to make it impossible to find a unique hash.  This however
1716 		 should be an extremely rare case.  ICE for now so possible
1717 		 issues are found and evaluated.  */
1718 	      gcc_checking_assert (classes == size);
1719 	    }
1720 
1721 	  /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1722 	     hash into the hash of each element.  */
1723 	  for (unsigned i = 0; i < size; ++i)
1724 	    sccstack[first+i].hash
1725 	      = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1726 	  return scc_hash;
1727 	}
1728 
1729       last_classes = classes;
1730       iterations++;
1731 
1732       /* We failed to identify the entry point; propagate hash values across
1733 	 the edges.  */
1734       hash_map <tree, hashval_t> map(size*2);
1735 
1736       for (unsigned i = 0; i < size; ++i)
1737 	map.put (sccstack[first+i].t, sccstack[first+i].hash);
1738 
1739       for (unsigned i = 0; i < size; i++)
1740 	sccstack[first+i].hash
1741 	  = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1742     }
1743   while (true);
1744 }
1745 
1746 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1747    already in the streamer cache.  Main routine called for
1748    each visit of EXPR.  */
1749 
1750 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1751 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1752 		     tree expr, bool ref_p, bool this_ref_p)
1753 {
1754   /* Handle special cases.  */
1755   if (expr == NULL_TREE)
1756     return;
1757 
1758   /* Do not DFS walk into indexable trees.  */
1759   if (this_ref_p && tree_is_indexable (expr))
1760     return;
1761 
1762   /* Check if we already streamed EXPR.  */
1763   if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1764     {
1765       /* Reference to a local tree makes entry also local.  We always process
1766 	 top of stack entry, so set max to number of entries in stack - 1.  */
1767       if (ob->local_trees
1768 	  && ob->local_trees->contains (expr))
1769 	max_local_entry = sccstack.length () - 1;
1770       return;
1771     }
1772 
1773   worklist w;
1774   w.expr = expr;
1775   w.from_state = from_state;
1776   w.cstate = NULL;
1777   w.ref_p = ref_p;
1778   w.this_ref_p = this_ref_p;
1779   worklist_vec.safe_push (w);
1780 }
1781 
1782 
1783 /* Emit the physical representation of tree node EXPR to output block OB.
1784    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1785    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
1786 
1787 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1788 lto_output_tree (struct output_block *ob, tree expr,
1789 		 bool ref_p, bool this_ref_p)
1790 {
1791   unsigned ix;
1792   bool existed_p;
1793   unsigned int size = ob->main_stream->total_size;
1794   /* This is the first time we see EXPR, write all reachable
1795      trees to OB.  */
1796   static bool in_dfs_walk;
1797 
1798   if (expr == NULL_TREE)
1799     {
1800       streamer_write_record_start (ob, LTO_null);
1801       return;
1802     }
1803 
1804   if (this_ref_p && tree_is_indexable (expr))
1805     {
1806       enum LTO_tags tag;
1807       unsigned ix;
1808 
1809       lto_indexable_tree_ref (ob, expr, &tag, &ix);
1810       streamer_write_record_start (ob, tag);
1811       streamer_write_uhwi (ob, ix);
1812       return;
1813     }
1814 
1815   existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1816   if (existed_p)
1817     {
1818       if (streamer_dump_file)
1819 	{
1820 	  if (in_dfs_walk)
1821 	    print_node_brief (streamer_dump_file, "     Streaming ref to ",
1822 			      expr, 4);
1823 	  else
1824 	    print_node_brief (streamer_dump_file, "   Streaming ref to ",
1825 			      expr, 4);
1826 	  fprintf (streamer_dump_file, "\n");
1827 	}
1828       /* If a node has already been streamed out, make sure that
1829 	 we don't write it more than once.  Otherwise, the reader
1830 	 will instantiate two different nodes for the same object.  */
1831       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1832       streamer_write_uhwi (ob, ix);
1833       if (streamer_debugging)
1834 	streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1835 			     lto_tree_code_to_tag (TREE_CODE (expr)));
1836       lto_stats.num_pickle_refs_output++;
1837     }
1838   else
1839     {
1840       /* Protect against recursion which means disconnect between
1841 	 what tree edges we walk in the DFS walk and what edges
1842 	 we stream out.  */
1843       gcc_assert (!in_dfs_walk);
1844 
1845       if (streamer_dump_file)
1846 	{
1847 	  print_node_brief (streamer_dump_file, "   Streaming tree ",
1848 			    expr, 4);
1849 	  fprintf (streamer_dump_file, "\n");
1850 	}
1851 
1852       /* Start the DFS walk.  */
1853       /* Save ob state ... */
1854       /* let's see ... */
1855       in_dfs_walk = true;
1856       DFS (ob, expr, ref_p, this_ref_p, false);
1857 
1858       /* Finally append a reference to the tree we were writing.  */
1859       existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1860 
1861       /* DFS walk above possibly skipped streaming EXPR itself to let us inline
1862 	 it.  */
1863       if (!existed_p)
1864 	lto_output_tree_1 (ob, expr, 0, ref_p, this_ref_p);
1865       else if (this_ref_p)
1866 	{
1867 	  if (streamer_dump_file)
1868 	    {
1869 	      print_node_brief (streamer_dump_file,
1870 				"   Streaming final ref to ",
1871 				expr, 4);
1872 	      fprintf (streamer_dump_file, "\n");
1873 	    }
1874 	  streamer_write_record_start (ob, LTO_tree_pickle_reference);
1875 	  streamer_write_uhwi (ob, ix);
1876 	  if (streamer_debugging)
1877 	    streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1878 				 lto_tree_code_to_tag (TREE_CODE (expr)));
1879 	}
1880       in_dfs_walk = false;
1881       lto_stats.num_pickle_refs_output++;
1882     }
1883   if (streamer_dump_file && !in_dfs_walk)
1884     fprintf (streamer_dump_file, "    %u bytes\n",
1885 	     ob->main_stream->total_size - size);
1886 }
1887 
1888 
1889 /* Output to OB a list of try/catch handlers starting with FIRST.  */
1890 
1891 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1892 output_eh_try_list (struct output_block *ob, eh_catch first)
1893 {
1894   eh_catch n;
1895 
1896   for (n = first; n; n = n->next_catch)
1897     {
1898       streamer_write_record_start (ob, LTO_eh_catch);
1899       stream_write_tree (ob, n->type_list, true);
1900       stream_write_tree (ob, n->filter_list, true);
1901       stream_write_tree (ob, n->label, true);
1902     }
1903 
1904   streamer_write_record_start (ob, LTO_null);
1905 }
1906 
1907 
1908 /* Output EH region R in function FN to OB.  CURR_RN is the slot index
1909    that is being emitted in FN->EH->REGION_ARRAY.  This is used to
1910    detect EH region sharing.  */
1911 
1912 static void
output_eh_region(struct output_block * ob,eh_region r)1913 output_eh_region (struct output_block *ob, eh_region r)
1914 {
1915   enum LTO_tags tag;
1916 
1917   if (r == NULL)
1918     {
1919       streamer_write_record_start (ob, LTO_null);
1920       return;
1921     }
1922 
1923   if (r->type == ERT_CLEANUP)
1924     tag = LTO_ert_cleanup;
1925   else if (r->type == ERT_TRY)
1926     tag = LTO_ert_try;
1927   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1928     tag = LTO_ert_allowed_exceptions;
1929   else if (r->type == ERT_MUST_NOT_THROW)
1930     tag = LTO_ert_must_not_throw;
1931   else
1932     gcc_unreachable ();
1933 
1934   streamer_write_record_start (ob, tag);
1935   streamer_write_hwi (ob, r->index);
1936 
1937   if (r->outer)
1938     streamer_write_hwi (ob, r->outer->index);
1939   else
1940     streamer_write_zero (ob);
1941 
1942   if (r->inner)
1943     streamer_write_hwi (ob, r->inner->index);
1944   else
1945     streamer_write_zero (ob);
1946 
1947   if (r->next_peer)
1948     streamer_write_hwi (ob, r->next_peer->index);
1949   else
1950     streamer_write_zero (ob);
1951 
1952   if (r->type == ERT_TRY)
1953     {
1954       output_eh_try_list (ob, r->u.eh_try.first_catch);
1955     }
1956   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1957     {
1958       stream_write_tree (ob, r->u.allowed.type_list, true);
1959       stream_write_tree (ob, r->u.allowed.label, true);
1960       streamer_write_uhwi (ob, r->u.allowed.filter);
1961     }
1962   else if (r->type == ERT_MUST_NOT_THROW)
1963     {
1964       stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1965       bitpack_d bp = bitpack_create (ob->main_stream);
1966       stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1967       streamer_write_bitpack (&bp);
1968     }
1969 
1970   if (r->landing_pads)
1971     streamer_write_hwi (ob, r->landing_pads->index);
1972   else
1973     streamer_write_zero (ob);
1974 }
1975 
1976 
1977 /* Output landing pad LP to OB.  */
1978 
1979 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1980 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1981 {
1982   if (lp == NULL)
1983     {
1984       streamer_write_record_start (ob, LTO_null);
1985       return;
1986     }
1987 
1988   streamer_write_record_start (ob, LTO_eh_landing_pad);
1989   streamer_write_hwi (ob, lp->index);
1990   if (lp->next_lp)
1991     streamer_write_hwi (ob, lp->next_lp->index);
1992   else
1993     streamer_write_zero (ob);
1994 
1995   if (lp->region)
1996     streamer_write_hwi (ob, lp->region->index);
1997   else
1998     streamer_write_zero (ob);
1999 
2000   stream_write_tree (ob, lp->post_landing_pad, true);
2001 }
2002 
2003 
2004 /* Output the existing eh_table to OB.  */
2005 
2006 static void
output_eh_regions(struct output_block * ob,struct function * fn)2007 output_eh_regions (struct output_block *ob, struct function *fn)
2008 {
2009   if (fn->eh && fn->eh->region_tree)
2010     {
2011       unsigned i;
2012       eh_region eh;
2013       eh_landing_pad lp;
2014       tree ttype;
2015 
2016       streamer_write_record_start (ob, LTO_eh_table);
2017 
2018       /* Emit the index of the root of the EH region tree.  */
2019       streamer_write_hwi (ob, fn->eh->region_tree->index);
2020 
2021       /* Emit all the EH regions in the region array.  */
2022       streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
2023       FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
2024 	output_eh_region (ob, eh);
2025 
2026       /* Emit all landing pads.  */
2027       streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
2028       FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
2029 	output_eh_lp (ob, lp);
2030 
2031       /* Emit all the runtime type data.  */
2032       streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
2033       FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
2034 	stream_write_tree (ob, ttype, true);
2035 
2036       /* Emit the table of action chains.  */
2037       if (targetm.arm_eabi_unwinder)
2038 	{
2039 	  tree t;
2040 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
2041 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
2042 	    stream_write_tree (ob, t, true);
2043 	}
2044       else
2045 	{
2046 	  uchar c;
2047 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
2048 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
2049 	    streamer_write_char_stream (ob->main_stream, c);
2050 	}
2051     }
2052 
2053   /* The LTO_null either terminates the record or indicates that there
2054      are no eh_records at all.  */
2055   streamer_write_record_start (ob, LTO_null);
2056 }
2057 
2058 
2059 /* Output all of the active ssa names to the ssa_names stream.  */
2060 
2061 static void
output_ssa_names(struct output_block * ob,struct function * fn)2062 output_ssa_names (struct output_block *ob, struct function *fn)
2063 {
2064   unsigned int i, len;
2065 
2066   len = vec_safe_length (SSANAMES (fn));
2067   streamer_write_uhwi (ob, len);
2068 
2069   for (i = 1; i < len; i++)
2070     {
2071       tree ptr = (*SSANAMES (fn))[i];
2072 
2073       if (ptr == NULL_TREE
2074 	  || SSA_NAME_IN_FREE_LIST (ptr)
2075 	  || virtual_operand_p (ptr)
2076 	  /* Simply skip unreleased SSA names.  */
2077 	  || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
2078 	      && (! SSA_NAME_DEF_STMT (ptr)
2079 		  || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
2080 	continue;
2081 
2082       streamer_write_uhwi (ob, i);
2083       streamer_write_char_stream (ob->main_stream,
2084 				  SSA_NAME_IS_DEFAULT_DEF (ptr));
2085       if (SSA_NAME_VAR (ptr))
2086 	stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
2087       else
2088 	/* ???  This drops SSA_NAME_IDENTIFIER on the floor.  */
2089 	stream_write_tree (ob, TREE_TYPE (ptr), true);
2090     }
2091 
2092   streamer_write_zero (ob);
2093 }
2094 
2095 
2096 
2097 /* Output the cfg.  */
2098 
2099 static void
output_cfg(struct output_block * ob,struct function * fn)2100 output_cfg (struct output_block *ob, struct function *fn)
2101 {
2102   struct lto_output_stream *tmp_stream = ob->main_stream;
2103   basic_block bb;
2104 
2105   ob->main_stream = ob->cfg_stream;
2106 
2107   streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
2108 		       profile_status_for_fn (fn));
2109 
2110   /* Output the number of the highest basic block.  */
2111   streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
2112 
2113   FOR_ALL_BB_FN (bb, fn)
2114     {
2115       edge_iterator ei;
2116       edge e;
2117 
2118       streamer_write_hwi (ob, bb->index);
2119 
2120       /* Output the successors and the edge flags.  */
2121       streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
2122       FOR_EACH_EDGE (e, ei, bb->succs)
2123 	{
2124 	  bitpack_d bp = bitpack_create (ob->main_stream);
2125 	  bp_pack_var_len_unsigned (&bp, e->dest->index);
2126 	  bp_pack_var_len_unsigned (&bp, e->flags);
2127 	  stream_output_location_and_block (ob, &bp, e->goto_locus);
2128 	  e->probability.stream_out (ob);
2129 	}
2130     }
2131 
2132   streamer_write_hwi (ob, -1);
2133 
2134   bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
2135   while (bb->next_bb)
2136     {
2137       streamer_write_hwi (ob, bb->next_bb->index);
2138       bb = bb->next_bb;
2139     }
2140 
2141   streamer_write_hwi (ob, -1);
2142 
2143   /* Output the number of loops.  */
2144   streamer_write_uhwi (ob, number_of_loops (fn));
2145 
2146   /* Output each loop, skipping the tree root which has number zero.  */
2147   for (unsigned i = 1; i < number_of_loops (fn); ++i)
2148     {
2149       class loop *loop = get_loop (fn, i);
2150 
2151       /* Write the index of the loop header.  That's enough to rebuild
2152          the loop tree on the reader side.  Stream -1 for an unused
2153 	 loop entry.  */
2154       if (!loop)
2155 	{
2156 	  streamer_write_hwi (ob, -1);
2157 	  continue;
2158 	}
2159       else
2160 	streamer_write_hwi (ob, loop->header->index);
2161 
2162       /* Write everything copy_loop_info copies.  */
2163       streamer_write_enum (ob->main_stream,
2164 			   loop_estimation, EST_LAST, loop->estimate_state);
2165       streamer_write_hwi (ob, loop->any_upper_bound);
2166       if (loop->any_upper_bound)
2167 	streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
2168       streamer_write_hwi (ob, loop->any_likely_upper_bound);
2169       if (loop->any_likely_upper_bound)
2170 	streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
2171       streamer_write_hwi (ob, loop->any_estimate);
2172       if (loop->any_estimate)
2173 	streamer_write_widest_int (ob, loop->nb_iterations_estimate);
2174 
2175       /* Write OMP SIMD related info.  */
2176       streamer_write_hwi (ob, loop->safelen);
2177       streamer_write_hwi (ob, loop->unroll);
2178       streamer_write_hwi (ob, loop->owned_clique);
2179       streamer_write_hwi (ob, loop->dont_vectorize);
2180       streamer_write_hwi (ob, loop->force_vectorize);
2181       streamer_write_hwi (ob, loop->finite_p);
2182       stream_write_tree (ob, loop->simduid, true);
2183     }
2184 
2185   ob->main_stream = tmp_stream;
2186 }
2187 
2188 
2189 /* Create the header in the file using OB.  If the section type is for
2190    a function, set FN to the decl for that function.  */
2191 
2192 void
produce_asm(struct output_block * ob,tree fn)2193 produce_asm (struct output_block *ob, tree fn)
2194 {
2195   enum lto_section_type section_type = ob->section_type;
2196   struct lto_function_header header;
2197   char *section_name;
2198 
2199   if (section_type == LTO_section_function_body)
2200     {
2201       const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
2202       section_name = lto_get_section_name (section_type, name,
2203 					   symtab_node::get (fn)->order,
2204 					   NULL);
2205     }
2206   else
2207     section_name = lto_get_section_name (section_type, NULL, 0, NULL);
2208 
2209   lto_begin_section (section_name, !flag_wpa);
2210   free (section_name);
2211 
2212   /* The entire header is stream computed here.  */
2213   memset (&header, 0, sizeof (struct lto_function_header));
2214 
2215   if (section_type == LTO_section_function_body)
2216     header.cfg_size = ob->cfg_stream->total_size;
2217   header.main_size = ob->main_stream->total_size;
2218   header.string_size = ob->string_stream->total_size;
2219   lto_write_data (&header, sizeof header);
2220 
2221   /* Put all of the gimple and the string table out the asm file as a
2222      block of text.  */
2223   if (section_type == LTO_section_function_body)
2224     lto_write_stream (ob->cfg_stream);
2225   lto_write_stream (ob->main_stream);
2226   lto_write_stream (ob->string_stream);
2227 
2228   lto_end_section ();
2229 }
2230 
2231 
2232 /* Output the base body of struct function FN using output block OB.  */
2233 
2234 static void
output_struct_function_base(struct output_block * ob,struct function * fn)2235 output_struct_function_base (struct output_block *ob, struct function *fn)
2236 {
2237   struct bitpack_d bp;
2238   unsigned i;
2239   tree t;
2240 
2241   /* Output the static chain and non-local goto save area.  */
2242   stream_write_tree (ob, fn->static_chain_decl, true);
2243   stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2244 
2245   /* Output all the local variables in the function.  */
2246   streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2247   FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2248     stream_write_tree (ob, t, true);
2249 
2250   /* Output current IL state of the function.  */
2251   streamer_write_uhwi (ob, fn->curr_properties);
2252 
2253   /* Write all the attributes for FN.  */
2254   bp = bitpack_create (ob->main_stream);
2255   bp_pack_value (&bp, fn->is_thunk, 1);
2256   bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2257   bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2258   bp_pack_value (&bp, fn->returns_struct, 1);
2259   bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2260   bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2261   bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2262   bp_pack_value (&bp, fn->after_inlining, 1);
2263   bp_pack_value (&bp, fn->stdarg, 1);
2264   bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2265   bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2266   bp_pack_value (&bp, fn->calls_alloca, 1);
2267   bp_pack_value (&bp, fn->calls_setjmp, 1);
2268   bp_pack_value (&bp, fn->calls_eh_return, 1);
2269   bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2270   bp_pack_value (&bp, fn->has_simduid_loops, 1);
2271   bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2272   bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2273   bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2274 
2275   /* Output the function start and end loci.  */
2276   stream_output_location (ob, &bp, fn->function_start_locus);
2277   stream_output_location (ob, &bp, fn->function_end_locus);
2278 
2279   /* Save the instance discriminator if present.  */
2280   int *instance_number_p = NULL;
2281   if (decl_to_instance_map)
2282     instance_number_p = decl_to_instance_map->get (fn->decl);
2283   bp_pack_value (&bp, !!instance_number_p, 1);
2284   if (instance_number_p)
2285     bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2286 
2287   streamer_write_bitpack (&bp);
2288 }
2289 
2290 
2291 /* Collect all leaf BLOCKs beyond ROOT into LEAFS.  */
2292 
2293 static void
collect_block_tree_leafs(tree root,vec<tree> & leafs)2294 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2295 {
2296   for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2297     if (! BLOCK_SUBBLOCKS (root))
2298       leafs.safe_push (root);
2299     else
2300       collect_block_tree_leafs (root, leafs);
2301 }
2302 
2303 /* This performs function body modifications that are needed for streaming
2304    to work.  */
2305 
2306 void
lto_prepare_function_for_streaming(struct cgraph_node * node)2307 lto_prepare_function_for_streaming (struct cgraph_node *node)
2308 {
2309   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2310   basic_block bb;
2311 
2312   if (number_of_loops (fn))
2313     {
2314       push_cfun (fn);
2315       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2316       loop_optimizer_finalize ();
2317       pop_cfun ();
2318     }
2319   /* We will renumber the statements.  The code that does this uses
2320      the same ordering that we use for serializing them so we can use
2321      the same code on the other end and not have to write out the
2322      statement numbers.  We do not assign UIDs to PHIs here because
2323      virtual PHIs get re-computed on-the-fly which would make numbers
2324      inconsistent.  */
2325   set_gimple_stmt_max_uid (fn, 0);
2326   FOR_ALL_BB_FN (bb, fn)
2327     {
2328       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2329 	   gsi_next (&gsi))
2330 	{
2331 	  gphi *stmt = gsi.phi ();
2332 
2333 	  /* Virtual PHIs are not going to be streamed.  */
2334 	  if (!virtual_operand_p (gimple_phi_result (stmt)))
2335 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2336 	}
2337       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2338 	   gsi_next (&gsi))
2339 	{
2340 	  gimple *stmt = gsi_stmt (gsi);
2341 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2342 	}
2343     }
2344   /* To avoid keeping duplicate gimple IDs in the statements, renumber
2345      virtual phis now.  */
2346   FOR_ALL_BB_FN (bb, fn)
2347     {
2348       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2349 	   gsi_next (&gsi))
2350 	{
2351 	  gphi *stmt = gsi.phi ();
2352 	  if (virtual_operand_p (gimple_phi_result (stmt)))
2353 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2354 	}
2355     }
2356 
2357 }
2358 
2359 /* Emit the chain of tree nodes starting at T.  OB is the output block
2360    to write to.  REF_P is true if chain elements should be emitted
2361    as references.  */
2362 
2363 static void
streamer_write_chain(struct output_block * ob,tree t,bool ref_p)2364 streamer_write_chain (struct output_block *ob, tree t, bool ref_p)
2365 {
2366   while (t)
2367     {
2368       /* We avoid outputting external vars or functions by reference
2369 	 to the global decls section as we do not want to have them
2370 	 enter decl merging.  We should not need to do this anymore because
2371 	 free_lang_data removes them from block scopes.  */
2372       gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
2373       stream_write_tree (ob, t, ref_p);
2374 
2375       t = TREE_CHAIN (t);
2376     }
2377 
2378   /* Write a sentinel to terminate the chain.  */
2379   stream_write_tree (ob, NULL_TREE, ref_p);
2380 }
2381 
2382 /* Output the body of function NODE->DECL.  */
2383 
2384 static void
output_function(struct cgraph_node * node)2385 output_function (struct cgraph_node *node)
2386 {
2387   tree function;
2388   struct function *fn;
2389   basic_block bb;
2390   struct output_block *ob;
2391 
2392   if (streamer_dump_file)
2393     fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2394 	     node->dump_name ());
2395 
2396   function = node->decl;
2397   fn = DECL_STRUCT_FUNCTION (function);
2398   ob = create_output_block (LTO_section_function_body);
2399 
2400   ob->symbol = node;
2401 
2402   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2403 
2404   /* Make string 0 be a NULL string.  */
2405   streamer_write_char_stream (ob->string_stream, 0);
2406 
2407   streamer_write_record_start (ob, LTO_function);
2408 
2409   /* Output decls for parameters and args.  */
2410   stream_write_tree (ob, DECL_RESULT (function), true);
2411   streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2412 
2413   /* Output debug args if available. */
2414   vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2415   if (! debugargs)
2416     streamer_write_uhwi (ob, 0);
2417   else
2418     {
2419       streamer_write_uhwi (ob, (*debugargs)->length ());
2420       for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2421 	stream_write_tree (ob, (**debugargs)[i], true);
2422     }
2423 
2424   /* Output DECL_INITIAL for the function, which contains the tree of
2425      lexical scopes.  */
2426   stream_write_tree (ob, DECL_INITIAL (function), true);
2427   /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2428      collect block tree leafs and stream those.  */
2429   auto_vec<tree> block_tree_leafs;
2430   if (DECL_INITIAL (function) && DECL_INITIAL (function) != error_mark_node)
2431     collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2432   streamer_write_uhwi (ob, block_tree_leafs.length ());
2433   for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2434     stream_write_tree (ob, block_tree_leafs[i], true);
2435 
2436   /* We also stream abstract functions where we stream only stuff needed for
2437      debug info.  */
2438   if (gimple_has_body_p (function))
2439     {
2440       streamer_write_uhwi (ob, 1);
2441       output_struct_function_base (ob, fn);
2442 
2443       output_cfg (ob, fn);
2444 
2445       /* Output all the SSA names used in the function.  */
2446       output_ssa_names (ob, fn);
2447 
2448       /* Output any exception handling regions.  */
2449       output_eh_regions (ob, fn);
2450 
2451       /* Output the code for the function.  */
2452       FOR_ALL_BB_FN (bb, fn)
2453 	output_bb (ob, bb, fn);
2454 
2455       /* The terminator for this function.  */
2456       streamer_write_record_start (ob, LTO_null);
2457    }
2458   else
2459     streamer_write_uhwi (ob, 0);
2460 
2461   /* Create a section to hold the pickled output of this function.   */
2462   produce_asm (ob, function);
2463 
2464   destroy_output_block (ob);
2465   if (streamer_dump_file)
2466     fprintf (streamer_dump_file, "Finished streaming %s\n",
2467 	     node->dump_name ());
2468 }
2469 
2470 /* Output the body of function NODE->DECL.  */
2471 
2472 static void
output_constructor(struct varpool_node * node)2473 output_constructor (struct varpool_node *node)
2474 {
2475   tree var = node->decl;
2476   struct output_block *ob;
2477 
2478   if (streamer_dump_file)
2479     fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2480 	     node->dump_name ());
2481 
2482   timevar_push (TV_IPA_LTO_CTORS_OUT);
2483   ob = create_output_block (LTO_section_function_body);
2484 
2485   ob->symbol = node;
2486 
2487   /* Make string 0 be a NULL string.  */
2488   streamer_write_char_stream (ob->string_stream, 0);
2489 
2490   /* Output DECL_INITIAL for the function, which contains the tree of
2491      lexical scopes.  */
2492   stream_write_tree (ob, DECL_INITIAL (var), true);
2493 
2494   /* Create a section to hold the pickled output of this function.   */
2495   produce_asm (ob, var);
2496 
2497   destroy_output_block (ob);
2498   if (streamer_dump_file)
2499     fprintf (streamer_dump_file, "Finished streaming %s\n",
2500 	     node->dump_name ());
2501   timevar_pop (TV_IPA_LTO_CTORS_OUT);
2502 }
2503 
2504 
2505 /* Emit toplevel asms.  */
2506 
2507 void
lto_output_toplevel_asms(void)2508 lto_output_toplevel_asms (void)
2509 {
2510   struct output_block *ob;
2511   struct asm_node *can;
2512   char *section_name;
2513   struct lto_simple_header_with_strings header;
2514 
2515   if (!symtab->first_asm_symbol ())
2516     return;
2517 
2518   ob = create_output_block (LTO_section_asm);
2519 
2520   /* Make string 0 be a NULL string.  */
2521   streamer_write_char_stream (ob->string_stream, 0);
2522 
2523   for (can = symtab->first_asm_symbol (); can; can = can->next)
2524     {
2525       streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2526       streamer_write_hwi (ob, can->order);
2527     }
2528 
2529   streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2530 
2531   section_name = lto_get_section_name (LTO_section_asm, NULL, 0, NULL);
2532   lto_begin_section (section_name, !flag_wpa);
2533   free (section_name);
2534 
2535   /* The entire header stream is computed here.  */
2536   memset (&header, 0, sizeof (header));
2537 
2538   header.main_size = ob->main_stream->total_size;
2539   header.string_size = ob->string_stream->total_size;
2540   lto_write_data (&header, sizeof header);
2541 
2542   /* Put all of the gimple and the string table out the asm file as a
2543      block of text.  */
2544   lto_write_stream (ob->main_stream);
2545   lto_write_stream (ob->string_stream);
2546 
2547   lto_end_section ();
2548 
2549   destroy_output_block (ob);
2550 }
2551 
2552 
2553 /* Copy the function body or variable constructor of NODE without deserializing. */
2554 
2555 static void
copy_function_or_variable(struct symtab_node * node)2556 copy_function_or_variable (struct symtab_node *node)
2557 {
2558   tree function = node->decl;
2559   struct lto_file_decl_data *file_data = node->lto_file_data;
2560   const char *data;
2561   size_t len;
2562   const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2563   char *section_name =
2564     lto_get_section_name (LTO_section_function_body, name, node->order, NULL);
2565   size_t i, j;
2566   struct lto_in_decl_state *in_state;
2567   struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2568 
2569   if (streamer_dump_file)
2570     fprintf (streamer_dump_file, "Copying section for %s\n", name);
2571   lto_begin_section (section_name, false);
2572   free (section_name);
2573 
2574   /* We may have renamed the declaration, e.g., a static function.  */
2575   name = lto_get_decl_name_mapping (file_data, name);
2576 
2577   data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2578 				   name, node->order - file_data->order_base,
2579 				   &len);
2580   gcc_assert (data);
2581 
2582   /* Do a bit copy of the function body.  */
2583   lto_write_raw_data (data, len);
2584 
2585   /* Copy decls. */
2586   in_state =
2587     lto_get_function_in_decl_state (node->lto_file_data, function);
2588   out_state->compressed = in_state->compressed;
2589   gcc_assert (in_state);
2590 
2591   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2592     {
2593       size_t n = vec_safe_length (in_state->streams[i]);
2594       vec<tree, va_gc> *trees = in_state->streams[i];
2595       struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2596 
2597       /* The out state must have the same indices and the in state.
2598 	 So just copy the vector.  All the encoders in the in state
2599 	 must be empty where we reach here. */
2600       gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2601       encoder->trees.reserve_exact (n);
2602       for (j = 0; j < n; j++)
2603 	encoder->trees.safe_push ((*trees)[j]);
2604     }
2605 
2606   lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2607 			     data, len);
2608   lto_end_section ();
2609 }
2610 
2611 /* Wrap symbol references in *TP inside a type-preserving MEM_REF.  */
2612 
2613 static tree
wrap_refs(tree * tp,int * ws,void *)2614 wrap_refs (tree *tp, int *ws, void *)
2615 {
2616   tree t = *tp;
2617   if (handled_component_p (t)
2618       && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2619       && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2620     {
2621       tree decl = TREE_OPERAND (t, 0);
2622       tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2623       TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2624 				    build1 (ADDR_EXPR, ptrtype, decl),
2625 				    build_int_cst (ptrtype, 0));
2626       TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2627       *ws = 0;
2628     }
2629   else if (TREE_CODE (t) == CONSTRUCTOR)
2630     ;
2631   else if (!EXPR_P (t))
2632     *ws = 0;
2633   return NULL_TREE;
2634 }
2635 
2636 /* Remove functions that are no longer used from offload_funcs, and mark the
2637    remaining ones with DECL_PRESERVE_P.  */
2638 
2639 static void
prune_offload_funcs(void)2640 prune_offload_funcs (void)
2641 {
2642   if (!offload_funcs)
2643     return;
2644 
2645   unsigned ix, ix2;
2646   tree *elem_ptr;
2647   VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2648 			 cgraph_node::get (*elem_ptr) == NULL);
2649 
2650   tree fn_decl;
2651   FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2652     DECL_PRESERVE_P (fn_decl) = 1;
2653 }
2654 
2655 /* Produce LTO section that contains global information
2656    about LTO bytecode.  */
2657 
2658 static void
produce_lto_section()2659 produce_lto_section ()
2660 {
2661   /* Stream LTO meta section.  */
2662   output_block *ob = create_output_block (LTO_section_lto);
2663 
2664   char * section_name = lto_get_section_name (LTO_section_lto, NULL, 0, NULL);
2665   lto_begin_section (section_name, false);
2666   free (section_name);
2667 
2668 #ifdef HAVE_ZSTD_H
2669   lto_compression compression = ZSTD;
2670 #else
2671   lto_compression compression = ZLIB;
2672 #endif
2673 
2674   bool slim_object = flag_generate_lto && !flag_fat_lto_objects;
2675   lto_section s
2676     = { LTO_major_version, LTO_minor_version, slim_object, 0, 0 };
2677   s.set_compression (compression);
2678   lto_write_data (&s, sizeof s);
2679   lto_end_section ();
2680   destroy_output_block (ob);
2681 }
2682 
2683 /* Compare symbols to get them sorted by filename (to optimize streaming)  */
2684 
2685 static int
cmp_symbol_files(const void * pn1,const void * pn2,void * id_map_)2686 cmp_symbol_files (const void *pn1, const void *pn2, void *id_map_)
2687 {
2688   const symtab_node *n1 = *(const symtab_node * const *)pn1;
2689   const symtab_node *n2 = *(const symtab_node * const *)pn2;
2690   hash_map<lto_file_decl_data *, int> *id_map
2691     = (hash_map<lto_file_decl_data *, int> *)id_map_;
2692 
2693   int file_order1 = n1->lto_file_data ? n1->lto_file_data->order : -1;
2694   int file_order2 = n2->lto_file_data ? n2->lto_file_data->order : -1;
2695 
2696   /* Order files same way as they appeared in the command line to reduce
2697      seeking while copying sections.  */
2698   if (file_order1 != file_order2)
2699     return file_order1 - file_order2;
2700 
2701   /* Order within static library.  */
2702   if (n1->lto_file_data && n1->lto_file_data->id != n2->lto_file_data->id)
2703     return *id_map->get (n1->lto_file_data) - *id_map->get (n2->lto_file_data);
2704 
2705   /* And finaly order by the definition order.  */
2706   return n1->order - n2->order;
2707 }
2708 
2709 /* Main entry point from the pass manager.  */
2710 
2711 void
lto_output(void)2712 lto_output (void)
2713 {
2714   struct lto_out_decl_state *decl_state;
2715   bitmap output = NULL;
2716   bitmap_obstack output_obstack;
2717   unsigned int i, n_nodes;
2718   lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2719   auto_vec<symtab_node *> symbols_to_copy;
2720 
2721   prune_offload_funcs ();
2722 
2723   if (flag_checking)
2724     {
2725       bitmap_obstack_initialize (&output_obstack);
2726       output = BITMAP_ALLOC (&output_obstack);
2727     }
2728 
2729   /* Initialize the streamer.  */
2730   lto_streamer_init ();
2731 
2732   produce_lto_section ();
2733 
2734   n_nodes = lto_symtab_encoder_size (encoder);
2735   /* Prepare vector of functions to output and then sort it to optimize
2736      section copying.  */
2737   for (i = 0; i < n_nodes; i++)
2738     {
2739       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2740       if (snode->alias)
2741 	continue;
2742       if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2743 	{
2744 	  if (lto_symtab_encoder_encode_body_p (encoder, node))
2745 	    symbols_to_copy.safe_push (node);
2746 	}
2747       else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2748 	{
2749 	  /* Wrap symbol references inside the ctor in a type
2750 	     preserving MEM_REF.  */
2751 	  tree ctor = DECL_INITIAL (node->decl);
2752 	  if (ctor && !in_lto_p)
2753 	    walk_tree (&ctor, wrap_refs, NULL, NULL);
2754 	  if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2755 	      && lto_symtab_encoder_encode_initializer_p (encoder, node))
2756 	    symbols_to_copy.safe_push (node);
2757 	}
2758     }
2759   /* Map the section hash to an order it appears in symbols_to_copy
2760      since we want to sort same ID symbols next to each other but need
2761      to avoid making overall order depend on the actual hash value.  */
2762   int order = 0;
2763   hash_map<lto_file_decl_data *, int> id_map;
2764   for (i = 0; i < symbols_to_copy.length (); ++i)
2765     {
2766       symtab_node *snode = symbols_to_copy[i];
2767       if (snode->lto_file_data)
2768 	{
2769 	  bool existed_p = false;
2770 	  int &ord = id_map.get_or_insert (snode->lto_file_data, &existed_p);
2771 	  if (!existed_p)
2772 	    ord = order++;
2773 	}
2774     }
2775   symbols_to_copy.sort (cmp_symbol_files, (void *)&id_map);
2776   for (i = 0; i < symbols_to_copy.length (); i++)
2777     {
2778       symtab_node *snode = symbols_to_copy[i];
2779       cgraph_node *cnode;
2780       varpool_node *vnode;
2781 
2782       if (flag_checking)
2783 	gcc_assert (bitmap_set_bit (output, DECL_UID (snode->decl)));
2784 
2785       decl_state = lto_new_out_decl_state ();
2786       lto_push_out_decl_state (decl_state);
2787 
2788       if ((cnode = dyn_cast <cgraph_node *> (snode))
2789 	  && (gimple_has_body_p (cnode->decl)
2790 	      || (!flag_wpa
2791 		  && flag_incremental_link != INCREMENTAL_LINK_LTO)
2792 	      /* Thunks have no body but they may be synthetized
2793 		 at WPA time.  */
2794 	      || DECL_ARGUMENTS (cnode->decl)
2795 	      || cnode->declare_variant_alt))
2796 	output_function (cnode);
2797       else if ((vnode = dyn_cast <varpool_node *> (snode))
2798 	       && (DECL_INITIAL (vnode->decl) != error_mark_node
2799 		   || (!flag_wpa
2800 		       && flag_incremental_link != INCREMENTAL_LINK_LTO)))
2801 	output_constructor (vnode);
2802       else
2803 	copy_function_or_variable (snode);
2804       gcc_assert (lto_get_out_decl_state () == decl_state);
2805       lto_pop_out_decl_state ();
2806       lto_record_function_out_decl_state (snode->decl, decl_state);
2807     }
2808 
2809   /* Emit the callgraph after emitting function bodies.  This needs to
2810      be done now to make sure that all the statements in every function
2811      have been renumbered so that edges can be associated with call
2812      statements using the statement UIDs.  */
2813   output_symtab ();
2814 
2815   output_offload_tables ();
2816 
2817   if (flag_checking)
2818     {
2819       BITMAP_FREE (output);
2820       bitmap_obstack_release (&output_obstack);
2821     }
2822 }
2823 
2824 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2825    from it and required for correct representation of its semantics.
2826    Each node in ENCODER must be a global declaration or a type.  A node
2827    is written only once, even if it appears multiple times in the
2828    vector.  Certain transitively-reachable nodes, such as those
2829    representing expressions, may be duplicated, but such nodes
2830    must not appear in ENCODER itself.  */
2831 
2832 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2833 write_global_stream (struct output_block *ob,
2834 		     struct lto_tree_ref_encoder *encoder)
2835 {
2836   tree t;
2837   size_t index;
2838   const size_t size = lto_tree_ref_encoder_size (encoder);
2839 
2840   for (index = 0; index < size; index++)
2841     {
2842       t = lto_tree_ref_encoder_get_tree (encoder, index);
2843       if (streamer_dump_file)
2844 	{
2845           fprintf (streamer_dump_file, " %i:", (int)index);
2846 	  print_node_brief (streamer_dump_file, "", t, 4);
2847           fprintf (streamer_dump_file, "\n");
2848 	}
2849       if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2850 	stream_write_tree (ob, t, false);
2851     }
2852 }
2853 
2854 
2855 /* Write a sequence of indices into the globals vector corresponding
2856    to the trees in ENCODER.  These are used by the reader to map the
2857    indices used to refer to global entities within function bodies to
2858    their referents.  */
2859 
2860 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2861 write_global_references (struct output_block *ob,
2862 			 struct lto_tree_ref_encoder *encoder)
2863 {
2864   tree t;
2865   uint32_t index;
2866   const uint32_t size = lto_tree_ref_encoder_size (encoder);
2867 
2868   /* Write size and slot indexes as 32-bit unsigned numbers. */
2869   uint32_t *data = XNEWVEC (uint32_t, size + 1);
2870   data[0] = size;
2871 
2872   for (index = 0; index < size; index++)
2873     {
2874       unsigned slot_num;
2875 
2876       t = lto_tree_ref_encoder_get_tree (encoder, index);
2877       streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2878       gcc_assert (slot_num != (unsigned)-1);
2879       data[index + 1] = slot_num;
2880     }
2881 
2882   lto_write_data (data, sizeof (int32_t) * (size + 1));
2883   free (data);
2884 }
2885 
2886 
2887 /* Write all the streams in an lto_out_decl_state STATE using
2888    output block OB and output stream OUT_STREAM.  */
2889 
2890 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2891 lto_output_decl_state_streams (struct output_block *ob,
2892 			       struct lto_out_decl_state *state)
2893 {
2894   int i;
2895 
2896   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2897     write_global_stream (ob, &state->streams[i]);
2898 }
2899 
2900 
2901 /* Write all the references in an lto_out_decl_state STATE using
2902    output block OB and output stream OUT_STREAM.  */
2903 
2904 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2905 lto_output_decl_state_refs (struct output_block *ob,
2906 			    struct lto_out_decl_state *state)
2907 {
2908   unsigned i;
2909   unsigned ref;
2910   tree decl;
2911 
2912   /* Write reference to FUNCTION_DECL.  If there is not function,
2913      write reference to void_type_node. */
2914   decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2915   streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2916   gcc_assert (ref != (unsigned)-1);
2917   ref = ref * 2 + (state->compressed ? 1 : 0);
2918   lto_write_data (&ref, sizeof (uint32_t));
2919 
2920   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2921     write_global_references (ob, &state->streams[i]);
2922 }
2923 
2924 
2925 /* Return the written size of STATE. */
2926 
2927 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2928 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2929 {
2930   int i;
2931   size_t size;
2932 
2933   size = sizeof (int32_t);	/* fn_ref. */
2934   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2935     {
2936       size += sizeof (int32_t); /* vector size. */
2937       size += (lto_tree_ref_encoder_size (&state->streams[i])
2938 	       * sizeof (int32_t));
2939     }
2940   return size;
2941 }
2942 
2943 
2944 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2945    so far.  */
2946 
2947 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2948 write_symbol (struct streamer_tree_cache_d *cache,
2949 	      tree t, hash_set<const char *> *seen, bool alias)
2950 {
2951   const char *name;
2952   enum gcc_plugin_symbol_kind kind;
2953   enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2954   unsigned slot_num;
2955   uint64_t size;
2956   const char *comdat;
2957   unsigned char c;
2958 
2959   gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2960 
2961   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2962 
2963   /* This behaves like assemble_name_raw in varasm.c, performing the
2964      same name manipulations that ASM_OUTPUT_LABELREF does. */
2965   name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2966 
2967   if (seen->add (name))
2968     return;
2969 
2970   streamer_tree_cache_lookup (cache, t, &slot_num);
2971   gcc_assert (slot_num != (unsigned)-1);
2972 
2973   if (DECL_EXTERNAL (t))
2974     {
2975       if (DECL_WEAK (t))
2976 	kind = GCCPK_WEAKUNDEF;
2977       else
2978 	kind = GCCPK_UNDEF;
2979     }
2980   else
2981     {
2982       if (DECL_WEAK (t))
2983 	kind = GCCPK_WEAKDEF;
2984       else if (DECL_COMMON (t))
2985 	kind = GCCPK_COMMON;
2986       else
2987 	kind = GCCPK_DEF;
2988 
2989       /* When something is defined, it should have node attached.  */
2990       gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2991       gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2992 		  || (cgraph_node::get (t)
2993 		      && cgraph_node::get (t)->definition));
2994     }
2995 
2996   /* Imitate what default_elf_asm_output_external do.
2997      When symbol is external, we need to output it with DEFAULT visibility
2998      when compiling with -fvisibility=default, while with HIDDEN visibility
2999      when symbol has attribute (visibility("hidden")) specified.
3000      targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
3001      right. */
3002 
3003   if (DECL_EXTERNAL (t)
3004       && !targetm.binds_local_p (t))
3005     visibility = GCCPV_DEFAULT;
3006   else
3007     switch (DECL_VISIBILITY (t))
3008       {
3009       case VISIBILITY_DEFAULT:
3010 	visibility = GCCPV_DEFAULT;
3011 	break;
3012       case VISIBILITY_PROTECTED:
3013 	visibility = GCCPV_PROTECTED;
3014 	break;
3015       case VISIBILITY_HIDDEN:
3016 	visibility = GCCPV_HIDDEN;
3017 	break;
3018       case VISIBILITY_INTERNAL:
3019 	visibility = GCCPV_INTERNAL;
3020 	break;
3021       }
3022 
3023   if (kind == GCCPK_COMMON
3024       && DECL_SIZE_UNIT (t)
3025       && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
3026     size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
3027   else
3028     size = 0;
3029 
3030   if (DECL_ONE_ONLY (t))
3031     comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
3032   else
3033     comdat = "";
3034 
3035   lto_write_data (name, strlen (name) + 1);
3036   lto_write_data (comdat, strlen (comdat) + 1);
3037   c = (unsigned char) kind;
3038   lto_write_data (&c, 1);
3039   c = (unsigned char) visibility;
3040   lto_write_data (&c, 1);
3041   lto_write_data (&size, 8);
3042   lto_write_data (&slot_num, 4);
3043 }
3044 
3045 /* Write extension information for symbols (symbol type, section flags).  */
3046 
3047 static void
write_symbol_extension_info(tree t)3048 write_symbol_extension_info (tree t)
3049 {
3050   unsigned char c;
3051   c = ((unsigned char) TREE_CODE (t) == VAR_DECL
3052        ? GCCST_VARIABLE : GCCST_FUNCTION);
3053   lto_write_data (&c, 1);
3054   unsigned char section_kind = 0;
3055   if (TREE_CODE (t) == VAR_DECL)
3056     {
3057       section *s = get_variable_section (t, false);
3058       if (s->common.flags & SECTION_BSS)
3059 	section_kind |= GCCSSK_BSS;
3060     }
3061   lto_write_data (&section_kind, 1);
3062 }
3063 
3064 /* Write an IL symbol table to OB.
3065    SET and VSET are cgraph/varpool node sets we are outputting.  */
3066 
3067 static unsigned int
produce_symtab(struct output_block * ob)3068 produce_symtab (struct output_block *ob)
3069 {
3070   unsigned int streamed_symbols = 0;
3071   struct streamer_tree_cache_d *cache = ob->writer_cache;
3072   char *section_name = lto_get_section_name (LTO_section_symtab, NULL, 0, NULL);
3073   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3074   lto_symtab_encoder_iterator lsei;
3075 
3076   lto_begin_section (section_name, false);
3077   free (section_name);
3078 
3079   hash_set<const char *> seen;
3080 
3081   /* Write the symbol table.
3082      First write everything defined and then all declarations.
3083      This is necessary to handle cases where we have duplicated symbols.  */
3084   for (lsei = lsei_start (encoder);
3085        !lsei_end_p (lsei); lsei_next (&lsei))
3086     {
3087       symtab_node *node = lsei_node (lsei);
3088 
3089       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3090 	continue;
3091       write_symbol (cache, node->decl, &seen, false);
3092       ++streamed_symbols;
3093     }
3094   for (lsei = lsei_start (encoder);
3095        !lsei_end_p (lsei); lsei_next (&lsei))
3096     {
3097       symtab_node *node = lsei_node (lsei);
3098 
3099       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3100 	continue;
3101       write_symbol (cache, node->decl, &seen, false);
3102       ++streamed_symbols;
3103     }
3104 
3105   lto_end_section ();
3106 
3107   return streamed_symbols;
3108 }
3109 
3110 /* Symtab extension version.  */
3111 #define LTO_SYMTAB_EXTENSION_VERSION 1
3112 
3113 /* Write an IL symbol table extension to OB.
3114    SET and VSET are cgraph/varpool node sets we are outputting.  */
3115 
3116 static void
produce_symtab_extension(struct output_block * ob,unsigned int previous_streamed_symbols)3117 produce_symtab_extension (struct output_block *ob,
3118 			  unsigned int previous_streamed_symbols)
3119 {
3120   unsigned int streamed_symbols = 0;
3121   char *section_name = lto_get_section_name (LTO_section_symtab_extension,
3122 					     NULL, 0, NULL);
3123   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
3124   lto_symtab_encoder_iterator lsei;
3125 
3126   lto_begin_section (section_name, false);
3127   free (section_name);
3128 
3129   unsigned char version = LTO_SYMTAB_EXTENSION_VERSION;
3130   lto_write_data (&version, 1);
3131 
3132   /* Write the symbol table.
3133      First write everything defined and then all declarations.
3134      This is necessary to handle cases where we have duplicated symbols.  */
3135   for (lsei = lsei_start (encoder);
3136        !lsei_end_p (lsei); lsei_next (&lsei))
3137     {
3138       symtab_node *node = lsei_node (lsei);
3139 
3140       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3141 	continue;
3142       write_symbol_extension_info (node->decl);
3143       ++streamed_symbols;
3144     }
3145   for (lsei = lsei_start (encoder);
3146        !lsei_end_p (lsei); lsei_next (&lsei))
3147     {
3148       symtab_node *node = lsei_node (lsei);
3149 
3150       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
3151 	continue;
3152       write_symbol_extension_info (node->decl);
3153       ++streamed_symbols;
3154     }
3155 
3156   gcc_assert (previous_streamed_symbols == streamed_symbols);
3157   lto_end_section ();
3158 }
3159 
3160 
3161 /* Init the streamer_mode_table for output, where we collect info on what
3162    machine_mode values have been streamed.  */
3163 void
lto_output_init_mode_table(void)3164 lto_output_init_mode_table (void)
3165 {
3166   memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
3167 }
3168 
3169 
3170 /* Write the mode table.  */
3171 static void
lto_write_mode_table(void)3172 lto_write_mode_table (void)
3173 {
3174   struct output_block *ob;
3175   ob = create_output_block (LTO_section_mode_table);
3176   bitpack_d bp = bitpack_create (ob->main_stream);
3177 
3178   /* Ensure that for GET_MODE_INNER (m) != m we have
3179      also the inner mode marked.  */
3180   for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3181     if (streamer_mode_table[i])
3182       {
3183 	machine_mode m = (machine_mode) i;
3184 	machine_mode inner_m = GET_MODE_INNER (m);
3185 	if (inner_m != m)
3186 	  streamer_mode_table[(int) inner_m] = 1;
3187       }
3188   /* First stream modes that have GET_MODE_INNER (m) == m,
3189      so that we can refer to them afterwards.  */
3190   for (int pass = 0; pass < 2; pass++)
3191     for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
3192       if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
3193 	{
3194 	  machine_mode m = (machine_mode) i;
3195 	  if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
3196 	    continue;
3197 	  bp_pack_value (&bp, m, 8);
3198 	  bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
3199 	  bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
3200 	  bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
3201 	  bp_pack_value (&bp, GET_MODE_INNER (m), 8);
3202 	  bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
3203 	  switch (GET_MODE_CLASS (m))
3204 	    {
3205 	    case MODE_FRACT:
3206 	    case MODE_UFRACT:
3207 	    case MODE_ACCUM:
3208 	    case MODE_UACCUM:
3209 	      bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
3210 	      bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
3211 	      break;
3212 	    case MODE_FLOAT:
3213 	    case MODE_DECIMAL_FLOAT:
3214 	      bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
3215 	      break;
3216 	    default:
3217 	      break;
3218 	    }
3219 	  bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
3220 	}
3221   bp_pack_value (&bp, VOIDmode, 8);
3222 
3223   streamer_write_bitpack (&bp);
3224 
3225   char *section_name
3226     = lto_get_section_name (LTO_section_mode_table, NULL, 0, NULL);
3227   lto_begin_section (section_name, !flag_wpa);
3228   free (section_name);
3229 
3230   /* The entire header stream is computed here.  */
3231   struct lto_simple_header_with_strings header;
3232   memset (&header, 0, sizeof (header));
3233 
3234   header.main_size = ob->main_stream->total_size;
3235   header.string_size = ob->string_stream->total_size;
3236   lto_write_data (&header, sizeof header);
3237 
3238   /* Put all of the gimple and the string table out the asm file as a
3239      block of text.  */
3240   lto_write_stream (ob->main_stream);
3241   lto_write_stream (ob->string_stream);
3242 
3243   lto_end_section ();
3244   destroy_output_block (ob);
3245 }
3246 
3247 
3248 /* This pass is run after all of the functions are serialized and all
3249    of the IPA passes have written their serialized forms.  This pass
3250    causes the vector of all of the global decls and types used from
3251    this file to be written in to a section that can then be read in to
3252    recover these on other side.  */
3253 
3254 void
produce_asm_for_decls(void)3255 produce_asm_for_decls (void)
3256 {
3257   struct lto_out_decl_state *out_state;
3258   struct lto_out_decl_state *fn_out_state;
3259   struct lto_decl_header header;
3260   char *section_name;
3261   struct output_block *ob;
3262   unsigned idx, num_fns;
3263   size_t decl_state_size;
3264   int32_t num_decl_states;
3265 
3266   ob = create_output_block (LTO_section_decls);
3267 
3268   memset (&header, 0, sizeof (struct lto_decl_header));
3269 
3270   section_name = lto_get_section_name (LTO_section_decls, NULL, 0, NULL);
3271   lto_begin_section (section_name, !flag_wpa);
3272   free (section_name);
3273 
3274   /* Make string 0 be a NULL string.  */
3275   streamer_write_char_stream (ob->string_stream, 0);
3276 
3277   gcc_assert (!alias_pairs);
3278 
3279   /* Get rid of the global decl state hash tables to save some memory.  */
3280   out_state = lto_get_out_decl_state ();
3281   for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
3282     if (out_state->streams[i].tree_hash_table)
3283       {
3284 	delete out_state->streams[i].tree_hash_table;
3285 	out_state->streams[i].tree_hash_table = NULL;
3286       }
3287 
3288   /* Write the global symbols.  */
3289   if (streamer_dump_file)
3290     fprintf (streamer_dump_file, "Outputting global stream\n");
3291   lto_output_decl_state_streams (ob, out_state);
3292   num_fns = lto_function_decl_states.length ();
3293   for (idx = 0; idx < num_fns; idx++)
3294     {
3295       fn_out_state =
3296 	lto_function_decl_states[idx];
3297       if (streamer_dump_file)
3298 	fprintf (streamer_dump_file, "Outputting stream for %s\n",
3299 		 IDENTIFIER_POINTER
3300 		    (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
3301       lto_output_decl_state_streams (ob, fn_out_state);
3302     }
3303 
3304   /* Currently not used.  This field would allow us to preallocate
3305      the globals vector, so that it need not be resized as it is extended.  */
3306   header.num_nodes = -1;
3307 
3308   /* Compute the total size of all decl out states. */
3309   decl_state_size = sizeof (int32_t);
3310   decl_state_size += lto_out_decl_state_written_size (out_state);
3311   for (idx = 0; idx < num_fns; idx++)
3312     {
3313       fn_out_state =
3314 	lto_function_decl_states[idx];
3315       decl_state_size += lto_out_decl_state_written_size (fn_out_state);
3316     }
3317   header.decl_state_size = decl_state_size;
3318 
3319   header.main_size = ob->main_stream->total_size;
3320   header.string_size = ob->string_stream->total_size;
3321 
3322   lto_write_data (&header, sizeof header);
3323 
3324   /* Write the main out-decl state, followed by out-decl states of
3325      functions. */
3326   num_decl_states = num_fns + 1;
3327   lto_write_data (&num_decl_states, sizeof (num_decl_states));
3328   lto_output_decl_state_refs (ob, out_state);
3329   for (idx = 0; idx < num_fns; idx++)
3330     {
3331       fn_out_state = lto_function_decl_states[idx];
3332       lto_output_decl_state_refs (ob, fn_out_state);
3333     }
3334 
3335   lto_write_stream (ob->main_stream);
3336   lto_write_stream (ob->string_stream);
3337 
3338   lto_end_section ();
3339 
3340   /* Write the symbol table.  It is used by linker to determine dependencies
3341      and thus we can skip it for WPA.  */
3342   if (!flag_wpa)
3343     {
3344       unsigned int streamed_symbols = produce_symtab (ob);
3345       produce_symtab_extension (ob, streamed_symbols);
3346     }
3347 
3348   /* Write command line opts.  */
3349   lto_write_options ();
3350 
3351   /* Deallocate memory and clean up.  */
3352   for (idx = 0; idx < num_fns; idx++)
3353     {
3354       fn_out_state =
3355 	lto_function_decl_states[idx];
3356       lto_delete_out_decl_state (fn_out_state);
3357     }
3358   lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
3359   lto_function_decl_states.release ();
3360   destroy_output_block (ob);
3361   if (lto_stream_offload_p)
3362     lto_write_mode_table ();
3363 }
3364