1 /* Write the GIMPLE representation to a file stream.
2 
3    Copyright (C) 2009-2019 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 #include "debug.h"
44 #include "omp-offload.h"
45 #include "print-tree.h"
46 #include "tree-dfa.h"
47 
48 
49 static void lto_write_tree (struct output_block*, tree, bool);
50 
51 /* Clear the line info stored in DATA_IN.  */
52 
53 static void
clear_line_info(struct output_block * ob)54 clear_line_info (struct output_block *ob)
55 {
56   ob->current_file = NULL;
57   ob->current_line = 0;
58   ob->current_col = 0;
59   ob->current_sysp = false;
60 }
61 
62 
63 /* Create the output block and return it.  SECTION_TYPE is
64    LTO_section_function_body or LTO_static_initializer.  */
65 
66 struct output_block *
create_output_block(enum lto_section_type section_type)67 create_output_block (enum lto_section_type section_type)
68 {
69   struct output_block *ob = XCNEW (struct output_block);
70   if (streamer_dump_file)
71     fprintf (streamer_dump_file, "Creating output block for %s\n",
72 	     lto_section_name [section_type]);
73 
74   ob->section_type = section_type;
75   ob->decl_state = lto_get_out_decl_state ();
76   ob->main_stream = XCNEW (struct lto_output_stream);
77   ob->string_stream = XCNEW (struct lto_output_stream);
78   ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
79 
80   if (section_type == LTO_section_function_body)
81     ob->cfg_stream = XCNEW (struct lto_output_stream);
82 
83   clear_line_info (ob);
84 
85   ob->string_hash_table = new hash_table<string_slot_hasher> (37);
86   gcc_obstack_init (&ob->obstack);
87 
88   return ob;
89 }
90 
91 
92 /* Destroy the output block OB.  */
93 
94 void
destroy_output_block(struct output_block * ob)95 destroy_output_block (struct output_block *ob)
96 {
97   enum lto_section_type section_type = ob->section_type;
98 
99   delete ob->string_hash_table;
100   ob->string_hash_table = NULL;
101 
102   free (ob->main_stream);
103   free (ob->string_stream);
104   if (section_type == LTO_section_function_body)
105     free (ob->cfg_stream);
106 
107   streamer_tree_cache_delete (ob->writer_cache);
108   obstack_free (&ob->obstack, NULL);
109 
110   free (ob);
111 }
112 
113 
114 /* Look up NODE in the type table and write the index for it to OB.  */
115 
116 static void
output_type_ref(struct output_block * ob,tree node)117 output_type_ref (struct output_block *ob, tree node)
118 {
119   streamer_write_record_start (ob, LTO_type_ref);
120   lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
121 }
122 
123 
124 /* Return true if tree node T is written to various tables.  For these
125    nodes, we sometimes want to write their phyiscal representation
126    (via lto_output_tree), and sometimes we need to emit an index
127    reference into a table (via lto_output_tree_ref).  */
128 
129 static bool
tree_is_indexable(tree t)130 tree_is_indexable (tree t)
131 {
132   /* Parameters and return values of functions of variably modified types
133      must go to global stream, because they may be used in the type
134      definition.  */
135   if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
136       && DECL_CONTEXT (t))
137     return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
138   /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.
139      We should no longer need to stream it.  */
140   else if (TREE_CODE (t) == IMPORTED_DECL)
141     gcc_unreachable ();
142   else if (TREE_CODE (t) == LABEL_DECL)
143     return FORCED_LABEL (t) || DECL_NONLOCAL (t);
144   else if (((VAR_P (t) && !TREE_STATIC (t))
145 	    || TREE_CODE (t) == TYPE_DECL
146 	    || TREE_CODE (t) == CONST_DECL
147 	    || TREE_CODE (t) == NAMELIST_DECL)
148 	   && decl_function_context (t))
149     return false;
150   else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
151     return false;
152   /* Variably modified types need to be streamed alongside function
153      bodies because they can refer to local entities.  Together with
154      them we have to localize their members as well.
155      ???  In theory that includes non-FIELD_DECLs as well.  */
156   else if (TYPE_P (t)
157 	   && variably_modified_type_p (t, NULL_TREE))
158     return false;
159   else if (TREE_CODE (t) == FIELD_DECL
160 	   && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
161     return false;
162   else
163     return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
164 }
165 
166 
167 /* Output info about new location into bitpack BP.
168    After outputting bitpack, lto_output_location_data has
169    to be done to output actual data.  */
170 
171 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)172 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
173 		     location_t loc)
174 {
175   expanded_location xloc;
176 
177   loc = LOCATION_LOCUS (loc);
178   bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
179 		        loc < RESERVED_LOCATION_COUNT
180 			? loc : RESERVED_LOCATION_COUNT);
181   if (loc < RESERVED_LOCATION_COUNT)
182     return;
183 
184   xloc = expand_location (loc);
185 
186   bp_pack_value (bp, ob->current_file != xloc.file, 1);
187   bp_pack_value (bp, ob->current_line != xloc.line, 1);
188   bp_pack_value (bp, ob->current_col != xloc.column, 1);
189 
190   if (ob->current_file != xloc.file)
191     {
192       bp_pack_string (ob, bp, xloc.file, true);
193       bp_pack_value (bp, xloc.sysp, 1);
194     }
195   ob->current_file = xloc.file;
196   ob->current_sysp = xloc.sysp;
197 
198   if (ob->current_line != xloc.line)
199     bp_pack_var_len_unsigned (bp, xloc.line);
200   ob->current_line = xloc.line;
201 
202   if (ob->current_col != xloc.column)
203     bp_pack_var_len_unsigned (bp, xloc.column);
204   ob->current_col = xloc.column;
205 }
206 
207 
208 /* If EXPR is an indexable tree node, output a reference to it to
209    output block OB.  Otherwise, output the physical representation of
210    EXPR to OB.  */
211 
212 static void
lto_output_tree_ref(struct output_block * ob,tree expr)213 lto_output_tree_ref (struct output_block *ob, tree expr)
214 {
215   enum tree_code code;
216 
217   if (TYPE_P (expr))
218     {
219       output_type_ref (ob, expr);
220       return;
221     }
222 
223   code = TREE_CODE (expr);
224   switch (code)
225     {
226     case SSA_NAME:
227       streamer_write_record_start (ob, LTO_ssa_name_ref);
228       streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
229       break;
230 
231     case FIELD_DECL:
232       streamer_write_record_start (ob, LTO_field_decl_ref);
233       lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
234       break;
235 
236     case FUNCTION_DECL:
237       streamer_write_record_start (ob, LTO_function_decl_ref);
238       lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
239       break;
240 
241     case VAR_DECL:
242     case DEBUG_EXPR_DECL:
243       gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
244       /* FALLTHRU */
245     case PARM_DECL:
246       streamer_write_record_start (ob, LTO_global_decl_ref);
247       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248       break;
249 
250     case CONST_DECL:
251       streamer_write_record_start (ob, LTO_const_decl_ref);
252       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
253       break;
254 
255     case IMPORTED_DECL:
256       gcc_assert (decl_function_context (expr) == NULL);
257       streamer_write_record_start (ob, LTO_imported_decl_ref);
258       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
259       break;
260 
261     case TYPE_DECL:
262       streamer_write_record_start (ob, LTO_type_decl_ref);
263       lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
264       break;
265 
266     case NAMELIST_DECL:
267       streamer_write_record_start (ob, LTO_namelist_decl_ref);
268       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
269       break;
270 
271     case NAMESPACE_DECL:
272       streamer_write_record_start (ob, LTO_namespace_decl_ref);
273       lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
274       break;
275 
276     case LABEL_DECL:
277       streamer_write_record_start (ob, LTO_label_decl_ref);
278       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
279       break;
280 
281     case RESULT_DECL:
282       streamer_write_record_start (ob, LTO_result_decl_ref);
283       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
284       break;
285 
286     case TRANSLATION_UNIT_DECL:
287       streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
288       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
289       break;
290 
291     default:
292       /* No other node is indexable, so it should have been handled by
293 	 lto_output_tree.  */
294       gcc_unreachable ();
295     }
296 }
297 
298 
299 /* Return true if EXPR is a tree node that can be written to disk.  */
300 
301 static inline bool
lto_is_streamable(tree expr)302 lto_is_streamable (tree expr)
303 {
304   enum tree_code code = TREE_CODE (expr);
305 
306   /* Notice that we reject SSA_NAMEs as well.  We only emit the SSA
307      name version in lto_output_tree_ref (see output_ssa_names).  */
308   return !is_lang_specific (expr)
309 	 && code != SSA_NAME
310 	 && code != LANG_TYPE
311 	 && code != MODIFY_EXPR
312 	 && code != INIT_EXPR
313 	 && code != TARGET_EXPR
314 	 && code != BIND_EXPR
315 	 && code != WITH_CLEANUP_EXPR
316 	 && code != STATEMENT_LIST
317 	 && (code == CASE_LABEL_EXPR
318 	     || code == DECL_EXPR
319 	     || TREE_CODE_CLASS (code) != tcc_statement);
320 }
321 
322 /* Very rough estimate of streaming size of the initializer.  If we ignored
323    presence of strings, we could simply just count number of non-indexable
324    tree nodes and number of references to indexable nodes.  Strings however
325    may be very large and we do not want to dump them int othe global stream.
326 
327    Count the size of initializer until the size in DATA is positive.  */
328 
329 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)330 subtract_estimated_size (tree *tp, int *ws, void *data)
331 {
332   long *sum = (long *)data;
333   if (tree_is_indexable (*tp))
334     {
335       /* Indexable tree is one reference to global stream.
336 	 Guess it may be about 4 bytes.  */
337       *sum -= 4;
338       *ws = 0;
339     }
340   /* String table entry + base of tree node needs to be streamed.  */
341   if (TREE_CODE (*tp) == STRING_CST)
342     *sum -= TREE_STRING_LENGTH (*tp) + 8;
343   else
344     {
345       /* Identifiers are also variable length but should not appear
346 	 naked in constructor.  */
347       gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
348       /* We do not really make attempt to work out size of pickled tree, as
349 	 it is very variable. Make it bigger than the reference.  */
350       *sum -= 16;
351     }
352   if (*sum < 0)
353     return *tp;
354   return NULL_TREE;
355 }
356 
357 
358 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL.  */
359 
360 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)361 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
362 {
363   gcc_checking_assert (DECL_P (expr)
364 		       && TREE_CODE (expr) != FUNCTION_DECL
365 		       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
366 
367   /* Handle DECL_INITIAL for symbols.  */
368   tree initial = DECL_INITIAL (expr);
369   if (VAR_P (expr)
370       && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
371       && !DECL_IN_CONSTANT_POOL (expr)
372       && initial)
373     {
374       varpool_node *vnode;
375       /* Extra section needs about 30 bytes; do not produce it for simple
376 	 scalar values.  */
377       if (!(vnode = varpool_node::get (expr))
378 	  || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
379         initial = error_mark_node;
380       if (initial != error_mark_node)
381 	{
382 	  long max_size = 30;
383 	  if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
384 			 NULL))
385 	    initial = error_mark_node;
386 	}
387     }
388 
389   return initial;
390 }
391 
392 
393 /* Write a physical representation of tree node EXPR to output block
394    OB.  If REF_P is true, the leaves of EXPR are emitted as references
395    via lto_output_tree_ref.  IX is the index into the streamer cache
396    where EXPR is stored.  */
397 
398 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)399 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
400 {
401   /* Pack all the non-pointer fields in EXPR into a bitpack and write
402      the resulting bitpack.  */
403   streamer_write_tree_bitfields (ob, expr);
404 
405   /* Write all the pointer fields in EXPR.  */
406   streamer_write_tree_body (ob, expr, ref_p);
407 
408   /* Write any LTO-specific data to OB.  */
409   if (DECL_P (expr)
410       && TREE_CODE (expr) != FUNCTION_DECL
411       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
412     {
413       /* Handle DECL_INITIAL for symbols.  */
414       tree initial = get_symbol_initial_value
415 			 (ob->decl_state->symtab_node_encoder, expr);
416       stream_write_tree (ob, initial, ref_p);
417     }
418 
419   /* Stream references to early generated DIEs.  Keep in sync with the
420      trees handled in dwarf2out_die_ref_for_decl.  */
421   if ((DECL_P (expr)
422        && TREE_CODE (expr) != FIELD_DECL
423        && TREE_CODE (expr) != DEBUG_EXPR_DECL
424        && TREE_CODE (expr) != TYPE_DECL)
425       || TREE_CODE (expr) == BLOCK)
426     {
427       const char *sym;
428       unsigned HOST_WIDE_INT off;
429       if (debug_info_level > DINFO_LEVEL_NONE
430 	  && debug_hooks->die_ref_for_decl (expr, &sym, &off))
431 	{
432 	  streamer_write_string (ob, ob->main_stream, sym, true);
433 	  streamer_write_uhwi (ob, off);
434 	}
435       else
436 	streamer_write_string (ob, ob->main_stream, NULL, true);
437     }
438 }
439 
440 /* Write a physical representation of tree node EXPR to output block
441    OB.  If REF_P is true, the leaves of EXPR are emitted as references
442    via lto_output_tree_ref.  IX is the index into the streamer cache
443    where EXPR is stored.  */
444 
445 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)446 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
447 {
448   if (!lto_is_streamable (expr))
449     internal_error ("tree code %qs is not supported in LTO streams",
450 		    get_tree_code_name (TREE_CODE (expr)));
451 
452   /* Write the header, containing everything needed to materialize
453      EXPR on the reading side.  */
454   streamer_write_tree_header (ob, expr);
455 
456   lto_write_tree_1 (ob, expr, ref_p);
457 
458   /* Mark the end of EXPR.  */
459   streamer_write_zero (ob);
460 }
461 
462 /* Emit the physical representation of tree node EXPR to output block OB,
463    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
464    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
465 
466 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)467 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
468 		   bool ref_p, bool this_ref_p)
469 {
470   unsigned ix;
471 
472   gcc_checking_assert (expr != NULL_TREE
473 		       && !(this_ref_p && tree_is_indexable (expr)));
474 
475   bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
476 					      expr, hash, &ix);
477   gcc_assert (!exists_p);
478   if (TREE_CODE (expr) == INTEGER_CST
479       && !TREE_OVERFLOW (expr))
480     {
481       /* Shared INTEGER_CST nodes are special because they need their
482 	 original type to be materialized by the reader (to implement
483 	 TYPE_CACHED_VALUES).  */
484       streamer_write_integer_cst (ob, expr, ref_p);
485     }
486   else
487     {
488       /* This is the first time we see EXPR, write its fields
489 	 to OB.  */
490       lto_write_tree (ob, expr, ref_p);
491     }
492 }
493 
494 class DFS
495 {
496 public:
497   DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
498        bool single_p);
499   ~DFS ();
500 
501   struct scc_entry
502   {
503     tree t;
504     hashval_t hash;
505   };
506   vec<scc_entry> sccstack;
507 
508 private:
509   struct sccs
510   {
511     unsigned int dfsnum;
512     unsigned int low;
513   };
514   struct worklist
515   {
516     tree expr;
517     sccs *from_state;
518     sccs *cstate;
519     bool ref_p;
520     bool this_ref_p;
521   };
522 
523   static int scc_entry_compare (const void *, const void *);
524 
525   void DFS_write_tree_body (struct output_block *ob,
526 			    tree expr, sccs *expr_state, bool ref_p);
527 
528   void DFS_write_tree (struct output_block *ob, sccs *from_state,
529 		       tree expr, bool ref_p, bool this_ref_p);
530 
531   hashval_t
532   hash_scc (struct output_block *ob, unsigned first, unsigned size,
533 	    bool ref_p, bool this_ref_p);
534 
535   hash_map<tree, sccs *> sccstate;
536   vec<worklist> worklist_vec;
537   struct obstack sccstate_obstack;
538 };
539 
540 /* Emit the physical representation of tree node EXPR to output block OB,
541    using depth-first search on the subgraph.  If THIS_REF_P is true, the
542    leaves of EXPR are emitted as references via lto_output_tree_ref.
543    REF_P is used for streaming siblings of EXPR.  If SINGLE_P is true,
544    this is for a rewalk of a single leaf SCC.  */
545 
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)546 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
547 	  bool single_p)
548 {
549   unsigned int next_dfs_num = 1;
550   sccstack.create (0);
551   gcc_obstack_init (&sccstate_obstack);
552   worklist_vec = vNULL;
553   DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
554   while (!worklist_vec.is_empty ())
555     {
556       worklist &w = worklist_vec.last ();
557       expr = w.expr;
558       sccs *from_state = w.from_state;
559       sccs *cstate = w.cstate;
560       ref_p = w.ref_p;
561       this_ref_p = w.this_ref_p;
562       if (cstate == NULL)
563 	{
564 	  sccs **slot = &sccstate.get_or_insert (expr);
565 	  cstate = *slot;
566 	  if (cstate)
567 	    {
568 	      gcc_checking_assert (from_state);
569 	      if (cstate->dfsnum < from_state->dfsnum)
570 		from_state->low = MIN (cstate->dfsnum, from_state->low);
571 	      worklist_vec.pop ();
572 	      continue;
573 	    }
574 
575 	  scc_entry e = { expr, 0 };
576 	  /* Not yet visited.  DFS recurse and push it onto the stack.  */
577 	  *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
578 	  sccstack.safe_push (e);
579 	  cstate->dfsnum = next_dfs_num++;
580 	  cstate->low = cstate->dfsnum;
581 	  w.cstate = cstate;
582 
583 	  if (TREE_CODE (expr) == INTEGER_CST
584 	      && !TREE_OVERFLOW (expr))
585 	    DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
586 	  else
587 	    {
588 	      DFS_write_tree_body (ob, expr, cstate, ref_p);
589 
590 	      /* Walk any LTO-specific edges.  */
591 	      if (DECL_P (expr)
592 		  && TREE_CODE (expr) != FUNCTION_DECL
593 		  && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
594 		{
595 		  /* Handle DECL_INITIAL for symbols.  */
596 		  tree initial
597 		    = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
598 						expr);
599 		  DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
600 		}
601 	    }
602 	  continue;
603 	}
604 
605       /* See if we found an SCC.  */
606       if (cstate->low == cstate->dfsnum)
607 	{
608 	  unsigned first, size;
609 	  tree x;
610 
611 	  /* If we are re-walking a single leaf SCC just pop it,
612 	     let earlier worklist item access the sccstack.  */
613 	  if (single_p)
614 	    {
615 	      worklist_vec.pop ();
616 	      continue;
617 	    }
618 
619 	  /* Pop the SCC and compute its size.  */
620 	  first = sccstack.length ();
621 	  do
622 	    {
623 	      x = sccstack[--first].t;
624 	    }
625 	  while (x != expr);
626 	  size = sccstack.length () - first;
627 
628 	  /* No need to compute hashes for LTRANS units, we don't perform
629 	     any merging there.  */
630 	  hashval_t scc_hash = 0;
631 	  unsigned scc_entry_len = 0;
632 	  if (!flag_wpa)
633 	    {
634 	      scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
635 
636 	      /* Put the entries with the least number of collisions first.  */
637 	      unsigned entry_start = 0;
638 	      scc_entry_len = size + 1;
639 	      for (unsigned i = 0; i < size;)
640 		{
641 		  unsigned from = i;
642 		  for (i = i + 1; i < size
643 		       && (sccstack[first + i].hash
644 			   == sccstack[first + from].hash); ++i)
645 		    ;
646 		  if (i - from < scc_entry_len)
647 		    {
648 		      scc_entry_len = i - from;
649 		      entry_start = from;
650 		    }
651 		}
652 	      for (unsigned i = 0; i < scc_entry_len; ++i)
653 		std::swap (sccstack[first + i],
654 			   sccstack[first + entry_start + i]);
655 
656 	      /* We already sorted SCC deterministically in hash_scc.  */
657 
658 	      /* Check that we have only one SCC.
659 		 Naturally we may have conflicts if hash function is not
660 		 strong enough.  Lets see how far this gets.  */
661 	      gcc_checking_assert (scc_entry_len == 1);
662 	    }
663 
664 	  /* Write LTO_tree_scc.  */
665 	  streamer_write_record_start (ob, LTO_tree_scc);
666 	  streamer_write_uhwi (ob, size);
667 	  streamer_write_uhwi (ob, scc_hash);
668 
669 	  /* Write size-1 SCCs without wrapping them inside SCC bundles.
670 	     All INTEGER_CSTs need to be handled this way as we need
671 	     their type to materialize them.  Also builtins are handled
672 	     this way.
673 	     ???  We still wrap these in LTO_tree_scc so at the
674 	     input side we can properly identify the tree we want
675 	     to ultimatively return.  */
676 	  if (size == 1)
677 	    lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
678 	  else
679 	    {
680 	      /* Write the size of the SCC entry candidates.  */
681 	      streamer_write_uhwi (ob, scc_entry_len);
682 
683 	      /* Write all headers and populate the streamer cache.  */
684 	      for (unsigned i = 0; i < size; ++i)
685 		{
686 		  hashval_t hash = sccstack[first+i].hash;
687 		  tree t = sccstack[first+i].t;
688 		  bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
689 							      t, hash, NULL);
690 		  gcc_assert (!exists_p);
691 
692 		  if (!lto_is_streamable (t))
693 		    internal_error ("tree code %qs is not supported "
694 				    "in LTO streams",
695 				    get_tree_code_name (TREE_CODE (t)));
696 
697 		  /* Write the header, containing everything needed to
698 		     materialize EXPR on the reading side.  */
699 		  streamer_write_tree_header (ob, t);
700 		}
701 
702 	      /* Write the bitpacks and tree references.  */
703 	      for (unsigned i = 0; i < size; ++i)
704 		{
705 		  lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
706 
707 		  /* Mark the end of the tree.  */
708 		  streamer_write_zero (ob);
709 		}
710 	    }
711 
712 	  /* Finally truncate the vector.  */
713 	  sccstack.truncate (first);
714 
715 	  if (from_state)
716 	    from_state->low = MIN (from_state->low, cstate->low);
717 	  worklist_vec.pop ();
718 	  continue;
719 	}
720 
721       gcc_checking_assert (from_state);
722       from_state->low = MIN (from_state->low, cstate->low);
723       if (cstate->dfsnum < from_state->dfsnum)
724 	from_state->low = MIN (cstate->dfsnum, from_state->low);
725       worklist_vec.pop ();
726     }
727   worklist_vec.release ();
728 }
729 
~DFS()730 DFS::~DFS ()
731 {
732   sccstack.release ();
733   obstack_free (&sccstate_obstack, NULL);
734 }
735 
736 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
737    DFS recurse for all tree edges originating from it.  */
738 
739 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)740 DFS::DFS_write_tree_body (struct output_block *ob,
741 			  tree expr, sccs *expr_state, bool ref_p)
742 {
743 #define DFS_follow_tree_edge(DEST) \
744   DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
745 
746   enum tree_code code;
747 
748   if (streamer_dump_file)
749     {
750       print_node_brief (streamer_dump_file, "    Streaming ",
751 	 		expr, 4);
752       fprintf (streamer_dump_file, "  to %s\n",
753 	       lto_section_name [ob->section_type]);
754     }
755 
756   code = TREE_CODE (expr);
757 
758   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
759     {
760       if (TREE_CODE (expr) != IDENTIFIER_NODE)
761 	DFS_follow_tree_edge (TREE_TYPE (expr));
762     }
763 
764   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
765     {
766       unsigned int count = vector_cst_encoded_nelts (expr);
767       for (unsigned int i = 0; i < count; ++i)
768 	DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
769     }
770 
771   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
772     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
773       DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
774 
775   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
776     {
777       DFS_follow_tree_edge (TREE_REALPART (expr));
778       DFS_follow_tree_edge (TREE_IMAGPART (expr));
779     }
780 
781   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
782     {
783       /* Drop names that were created for anonymous entities.  */
784       if (DECL_NAME (expr)
785 	  && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
786 	  && anon_aggrname_p (DECL_NAME (expr)))
787 	;
788       else
789 	DFS_follow_tree_edge (DECL_NAME (expr));
790       if (TREE_CODE (expr) != TRANSLATION_UNIT_DECL
791 	  && ! DECL_CONTEXT (expr))
792 	DFS_follow_tree_edge ((*all_translation_units)[0]);
793       else
794 	DFS_follow_tree_edge (DECL_CONTEXT (expr));
795     }
796 
797   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
798     {
799       DFS_follow_tree_edge (DECL_SIZE (expr));
800       DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
801 
802       /* Note, DECL_INITIAL is not handled here.  Since DECL_INITIAL needs
803 	 special handling in LTO, it must be handled by streamer hooks.  */
804 
805       DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
806 
807       /* We use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
808 	 declarations which should be eliminated by decl merging. Be sure none
809 	 leaks to this point.  */
810       gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
811       DFS_follow_tree_edge (DECL_ABSTRACT_ORIGIN (expr));
812 
813       if ((VAR_P (expr)
814 	   || TREE_CODE (expr) == PARM_DECL)
815 	  && DECL_HAS_VALUE_EXPR_P (expr))
816 	DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
817       if (VAR_P (expr)
818 	  && DECL_HAS_DEBUG_EXPR_P (expr))
819 	DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
820     }
821 
822   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
823     {
824       /* Make sure we don't inadvertently set the assembler name.  */
825       if (DECL_ASSEMBLER_NAME_SET_P (expr))
826 	DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
827     }
828 
829   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
830     {
831       DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
832       DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
833       DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
834       DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
835       gcc_checking_assert (!DECL_FCONTEXT (expr));
836     }
837 
838   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
839     {
840       gcc_checking_assert (DECL_VINDEX (expr) == NULL);
841       DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
842       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
843       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
844     }
845 
846   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
847     {
848       DFS_follow_tree_edge (TYPE_SIZE (expr));
849       DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
850       DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
851       DFS_follow_tree_edge (TYPE_NAME (expr));
852       /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO.  They will be
853 	 reconstructed during fixup.  */
854       /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
855 	 during fixup.  */
856       DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
857       DFS_follow_tree_edge (TYPE_CONTEXT (expr));
858       /* TYPE_CANONICAL is re-computed during type merging, so no need
859 	 to follow it here.  */
860       /* Do not stream TYPE_STUB_DECL; it is not needed by LTO but currently
861 	 it cannot be freed by free_lang_data without triggering ICEs in
862 	 langhooks.  */
863     }
864 
865   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
866     {
867       if (TREE_CODE (expr) == ENUMERAL_TYPE)
868 	DFS_follow_tree_edge (TYPE_VALUES (expr));
869       else if (TREE_CODE (expr) == ARRAY_TYPE)
870 	DFS_follow_tree_edge (TYPE_DOMAIN (expr));
871       else if (RECORD_OR_UNION_TYPE_P (expr))
872 	for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
873 	  DFS_follow_tree_edge (t);
874       else if (TREE_CODE (expr) == FUNCTION_TYPE
875 	       || TREE_CODE (expr) == METHOD_TYPE)
876 	DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
877 
878       if (!POINTER_TYPE_P (expr))
879 	DFS_follow_tree_edge (TYPE_MIN_VALUE_RAW (expr));
880       DFS_follow_tree_edge (TYPE_MAX_VALUE_RAW (expr));
881     }
882 
883   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
884     {
885       DFS_follow_tree_edge (TREE_PURPOSE (expr));
886       DFS_follow_tree_edge (TREE_VALUE (expr));
887       DFS_follow_tree_edge (TREE_CHAIN (expr));
888     }
889 
890   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
891     {
892       for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
893 	DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
894     }
895 
896   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
897     {
898       for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
899 	DFS_follow_tree_edge (TREE_OPERAND (expr, i));
900       DFS_follow_tree_edge (TREE_BLOCK (expr));
901     }
902 
903   if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
904     {
905       for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
906 	{
907 	  /* We would have to stream externals in the block chain as
908 	     non-references but we should have dropped them in
909 	     free-lang-data.  */
910 	  gcc_assert (!VAR_OR_FUNCTION_DECL_P (t) || !DECL_EXTERNAL (t));
911 	  DFS_follow_tree_edge (t);
912 	}
913 
914       DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
915       DFS_follow_tree_edge (BLOCK_ABSTRACT_ORIGIN (expr));
916 
917       /* Do not follow BLOCK_NONLOCALIZED_VARS.  We cannot handle debug
918 	 information for early inlined BLOCKs so drop it on the floor instead
919 	 of ICEing in dwarf2out.c.  */
920 
921       /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
922 	 streaming time.  */
923 
924       /* Do not output BLOCK_SUBBLOCKS.  Instead on streaming-in this
925 	 list is re-constructed from BLOCK_SUPERCONTEXT.  */
926     }
927 
928   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
929     {
930       unsigned i;
931       tree t;
932 
933       /* Note that the number of BINFO slots has already been emitted in
934 	 EXPR's header (see streamer_write_tree_header) because this length
935 	 is needed to build the empty BINFO node on the reader side.  */
936       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
937 	DFS_follow_tree_edge (t);
938       DFS_follow_tree_edge (BINFO_OFFSET (expr));
939       DFS_follow_tree_edge (BINFO_VTABLE (expr));
940 
941       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX,
942 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
943 	 by C++ FE only.  */
944     }
945 
946   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
947     {
948       unsigned i;
949       tree index, value;
950 
951       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
952 	{
953 	  DFS_follow_tree_edge (index);
954 	  DFS_follow_tree_edge (value);
955 	}
956     }
957 
958   if (code == OMP_CLAUSE)
959     {
960       int i;
961       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
962 	DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
963       DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
964     }
965 
966 #undef DFS_follow_tree_edge
967 }
968 
969 /* Return a hash value for the tree T.
970    CACHE holds hash values of trees outside current SCC.  MAP, if non-NULL,
971    may hold hash values if trees inside current SCC.  */
972 
973 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)974 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
975 {
976   inchash::hash hstate;
977 
978 #define visit(SIBLING) \
979   do { \
980     unsigned ix; \
981     if (!SIBLING) \
982       hstate.add_int (0); \
983     else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
984       hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
985     else if (map) \
986       hstate.add_int (*map->get (SIBLING)); \
987     else \
988       hstate.add_int (1); \
989   } while (0)
990 
991   /* Hash TS_BASE.  */
992   enum tree_code code = TREE_CODE (t);
993   hstate.add_int (code);
994   if (!TYPE_P (t))
995     {
996       hstate.add_flag (TREE_SIDE_EFFECTS (t));
997       hstate.add_flag (TREE_CONSTANT (t));
998       hstate.add_flag (TREE_READONLY (t));
999       hstate.add_flag (TREE_PUBLIC (t));
1000     }
1001   hstate.add_flag (TREE_ADDRESSABLE (t));
1002   hstate.add_flag (TREE_THIS_VOLATILE (t));
1003   if (DECL_P (t))
1004     hstate.add_flag (DECL_UNSIGNED (t));
1005   else if (TYPE_P (t))
1006     hstate.add_flag (TYPE_UNSIGNED (t));
1007   if (TYPE_P (t))
1008     hstate.add_flag (TYPE_ARTIFICIAL (t));
1009   else
1010     hstate.add_flag (TREE_NO_WARNING (t));
1011   hstate.add_flag (TREE_NOTHROW (t));
1012   hstate.add_flag (TREE_STATIC (t));
1013   hstate.add_flag (TREE_PROTECTED (t));
1014   hstate.add_flag (TREE_DEPRECATED (t));
1015   if (code != TREE_BINFO)
1016     hstate.add_flag (TREE_PRIVATE (t));
1017   if (TYPE_P (t))
1018     {
1019       hstate.add_flag (AGGREGATE_TYPE_P (t)
1020 		       ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1021       hstate.add_flag (TYPE_ADDR_SPACE (t));
1022     }
1023   else if (code == SSA_NAME)
1024     hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1025   hstate.commit_flag ();
1026 
1027   if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1028     hstate.add_wide_int (wi::to_widest (t));
1029 
1030   if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1031     {
1032       REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1033       hstate.add_flag (r.cl);
1034       hstate.add_flag (r.sign);
1035       hstate.add_flag (r.signalling);
1036       hstate.add_flag (r.canonical);
1037       hstate.commit_flag ();
1038       hstate.add_int (r.uexp);
1039       hstate.add (r.sig, sizeof (r.sig));
1040     }
1041 
1042   if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1043     {
1044       FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1045       hstate.add_int (f.mode);
1046       hstate.add_int (f.data.low);
1047       hstate.add_int (f.data.high);
1048     }
1049 
1050   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1051     {
1052       hstate.add_hwi (DECL_MODE (t));
1053       hstate.add_flag (DECL_NONLOCAL (t));
1054       hstate.add_flag (DECL_VIRTUAL_P (t));
1055       hstate.add_flag (DECL_IGNORED_P (t));
1056       hstate.add_flag (DECL_ABSTRACT_P (t));
1057       hstate.add_flag (DECL_ARTIFICIAL (t));
1058       hstate.add_flag (DECL_USER_ALIGN (t));
1059       hstate.add_flag (DECL_PRESERVE_P (t));
1060       hstate.add_flag (DECL_EXTERNAL (t));
1061       hstate.add_flag (DECL_GIMPLE_REG_P (t));
1062       hstate.commit_flag ();
1063       hstate.add_int (DECL_ALIGN (t));
1064       if (code == LABEL_DECL)
1065 	{
1066           hstate.add_int (EH_LANDING_PAD_NR (t));
1067 	  hstate.add_int (LABEL_DECL_UID (t));
1068 	}
1069       else if (code == FIELD_DECL)
1070 	{
1071 	  hstate.add_flag (DECL_PACKED (t));
1072 	  hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1073 	  hstate.add_flag (DECL_PADDING_P (t));
1074 	  hstate.add_int (DECL_OFFSET_ALIGN (t));
1075 	}
1076       else if (code == VAR_DECL)
1077 	{
1078 	  hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1079 	  hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1080 	}
1081       if (code == RESULT_DECL
1082 	  || code == PARM_DECL
1083 	  || code == VAR_DECL)
1084 	{
1085 	  hstate.add_flag (DECL_BY_REFERENCE (t));
1086 	  if (code == VAR_DECL
1087 	      || code == PARM_DECL)
1088 	    hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1089 	}
1090       hstate.commit_flag ();
1091     }
1092 
1093   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1094     hstate.add_int (DECL_REGISTER (t));
1095 
1096   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1097     {
1098       hstate.add_flag (DECL_COMMON (t));
1099       hstate.add_flag (DECL_DLLIMPORT_P (t));
1100       hstate.add_flag (DECL_WEAK (t));
1101       hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1102       hstate.add_flag (DECL_COMDAT (t));
1103       hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1104       hstate.add_int (DECL_VISIBILITY (t));
1105       if (code == VAR_DECL)
1106 	{
1107 	  /* DECL_IN_TEXT_SECTION is set during final asm output only.  */
1108 	  hstate.add_flag (DECL_HARD_REGISTER (t));
1109 	  hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1110 	}
1111       if (TREE_CODE (t) == FUNCTION_DECL)
1112         {
1113 	  hstate.add_flag (DECL_FINAL_P (t));
1114 	  hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1115 	  hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1116 	}
1117       hstate.commit_flag ();
1118     }
1119 
1120   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1121     {
1122       hstate.add_int (DECL_BUILT_IN_CLASS (t));
1123       hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1124       hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1125       hstate.add_flag (DECL_UNINLINABLE (t));
1126       hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1127       hstate.add_flag (DECL_IS_NOVOPS (t));
1128       hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1129       hstate.add_flag (DECL_IS_MALLOC (t));
1130       hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1131       hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1132       hstate.add_flag (DECL_STATIC_CHAIN (t));
1133       hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1134       hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1135       hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1136       hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1137       hstate.add_flag (DECL_PURE_P (t));
1138       hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1139       hstate.commit_flag ();
1140       if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1141 	hstate.add_int (DECL_FUNCTION_CODE (t));
1142     }
1143 
1144   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1145     {
1146       hstate.add_hwi (TYPE_MODE (t));
1147       hstate.add_flag (TYPE_STRING_FLAG (t));
1148       /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1149  	 no streaming.  */
1150       hstate.add_flag (TYPE_PACKED (t));
1151       hstate.add_flag (TYPE_RESTRICT (t));
1152       hstate.add_flag (TYPE_USER_ALIGN (t));
1153       hstate.add_flag (TYPE_READONLY (t));
1154       if (RECORD_OR_UNION_TYPE_P (t))
1155 	{
1156 	  hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1157 	  hstate.add_flag (TYPE_FINAL_P (t));
1158 	}
1159       else if (code == ARRAY_TYPE)
1160 	hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1161       if (AGGREGATE_TYPE_P (t))
1162 	hstate.add_flag (TYPE_TYPELESS_STORAGE (t));
1163       hstate.commit_flag ();
1164       hstate.add_int (TYPE_PRECISION (t));
1165       hstate.add_int (TYPE_ALIGN (t));
1166       hstate.add_int (TYPE_EMPTY_P (t));
1167     }
1168 
1169   if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1170     hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1171 			strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1172 
1173   if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1174       /* We don't stream these when passing things to a different target.  */
1175       && !lto_stream_offload_p)
1176     hstate.add_hwi (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1177 
1178   if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1179     hstate.add_hwi (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1180 
1181   if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1182     hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1183 
1184   if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1185     hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1186 
1187   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1188     {
1189       if (code != IDENTIFIER_NODE)
1190 	visit (TREE_TYPE (t));
1191     }
1192 
1193   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1194     {
1195       unsigned int count = vector_cst_encoded_nelts (t);
1196       for (unsigned int i = 0; i < count; ++i)
1197 	visit (VECTOR_CST_ENCODED_ELT (t, i));
1198     }
1199 
1200   if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
1201     for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1202       visit (POLY_INT_CST_COEFF (t, i));
1203 
1204   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1205     {
1206       visit (TREE_REALPART (t));
1207       visit (TREE_IMAGPART (t));
1208     }
1209 
1210   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1211     {
1212       /* Drop names that were created for anonymous entities.  */
1213       if (DECL_NAME (t)
1214 	  && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1215 	  && anon_aggrname_p (DECL_NAME (t)))
1216 	;
1217       else
1218 	visit (DECL_NAME (t));
1219       if (DECL_FILE_SCOPE_P (t))
1220 	;
1221       else
1222 	visit (DECL_CONTEXT (t));
1223     }
1224 
1225   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1226     {
1227       visit (DECL_SIZE (t));
1228       visit (DECL_SIZE_UNIT (t));
1229       visit (DECL_ATTRIBUTES (t));
1230       if ((code == VAR_DECL
1231 	   || code == PARM_DECL)
1232 	  && DECL_HAS_VALUE_EXPR_P (t))
1233 	visit (DECL_VALUE_EXPR (t));
1234       if (code == VAR_DECL
1235 	  && DECL_HAS_DEBUG_EXPR_P (t))
1236 	visit (DECL_DEBUG_EXPR (t));
1237       /* ???  Hash DECL_INITIAL as streamed.  Needs the output-block to
1238          be able to call get_symbol_initial_value.  */
1239     }
1240 
1241   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1242     {
1243       if (DECL_ASSEMBLER_NAME_SET_P (t))
1244 	visit (DECL_ASSEMBLER_NAME (t));
1245     }
1246 
1247   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1248     {
1249       visit (DECL_FIELD_OFFSET (t));
1250       visit (DECL_BIT_FIELD_TYPE (t));
1251       visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1252       visit (DECL_FIELD_BIT_OFFSET (t));
1253     }
1254 
1255   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1256     {
1257       visit (DECL_FUNCTION_PERSONALITY (t));
1258       visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1259       visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1260     }
1261 
1262   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1263     {
1264       visit (TYPE_SIZE (t));
1265       visit (TYPE_SIZE_UNIT (t));
1266       visit (TYPE_ATTRIBUTES (t));
1267       visit (TYPE_NAME (t));
1268       visit (TYPE_MAIN_VARIANT (t));
1269       if (TYPE_FILE_SCOPE_P (t))
1270 	;
1271       else
1272 	visit (TYPE_CONTEXT (t));
1273     }
1274 
1275   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1276     {
1277       if (code == ENUMERAL_TYPE)
1278 	visit (TYPE_VALUES (t));
1279       else if (code == ARRAY_TYPE)
1280 	visit (TYPE_DOMAIN (t));
1281       else if (RECORD_OR_UNION_TYPE_P (t))
1282 	for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1283 	  visit (f);
1284       else if (code == FUNCTION_TYPE
1285 	       || code == METHOD_TYPE)
1286 	visit (TYPE_ARG_TYPES (t));
1287       if (!POINTER_TYPE_P (t))
1288 	visit (TYPE_MIN_VALUE_RAW (t));
1289       visit (TYPE_MAX_VALUE_RAW (t));
1290     }
1291 
1292   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1293     {
1294       visit (TREE_PURPOSE (t));
1295       visit (TREE_VALUE (t));
1296       visit (TREE_CHAIN (t));
1297     }
1298 
1299   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1300     for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1301       visit (TREE_VEC_ELT (t, i));
1302 
1303   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1304     {
1305       hstate.add_hwi (TREE_OPERAND_LENGTH (t));
1306       for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1307 	visit (TREE_OPERAND (t, i));
1308     }
1309 
1310   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1311     {
1312       unsigned i;
1313       tree b;
1314       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1315 	visit (b);
1316       visit (BINFO_OFFSET (t));
1317       visit (BINFO_VTABLE (t));
1318       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1319 	 BINFO_BASE_ACCESSES and BINFO_VPTR_INDEX; these are used
1320 	 by C++ FE only.  */
1321     }
1322 
1323   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1324     {
1325       unsigned i;
1326       tree index, value;
1327       hstate.add_hwi (CONSTRUCTOR_NELTS (t));
1328       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1329 	{
1330 	  visit (index);
1331 	  visit (value);
1332 	}
1333     }
1334 
1335   if (code == OMP_CLAUSE)
1336     {
1337       int i;
1338       HOST_WIDE_INT val;
1339 
1340       hstate.add_hwi (OMP_CLAUSE_CODE (t));
1341       switch (OMP_CLAUSE_CODE (t))
1342 	{
1343 	case OMP_CLAUSE_DEFAULT:
1344 	  val = OMP_CLAUSE_DEFAULT_KIND (t);
1345 	  break;
1346 	case OMP_CLAUSE_SCHEDULE:
1347 	  val = OMP_CLAUSE_SCHEDULE_KIND (t);
1348 	  break;
1349 	case OMP_CLAUSE_DEPEND:
1350 	  val = OMP_CLAUSE_DEPEND_KIND (t);
1351 	  break;
1352 	case OMP_CLAUSE_MAP:
1353 	  val = OMP_CLAUSE_MAP_KIND (t);
1354 	  break;
1355 	case OMP_CLAUSE_PROC_BIND:
1356 	  val = OMP_CLAUSE_PROC_BIND_KIND (t);
1357 	  break;
1358 	case OMP_CLAUSE_REDUCTION:
1359 	case OMP_CLAUSE_TASK_REDUCTION:
1360 	case OMP_CLAUSE_IN_REDUCTION:
1361 	  val = OMP_CLAUSE_REDUCTION_CODE (t);
1362 	  break;
1363 	default:
1364 	  val = 0;
1365 	  break;
1366 	}
1367       hstate.add_hwi (val);
1368       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1369 	visit (OMP_CLAUSE_OPERAND (t, i));
1370       visit (OMP_CLAUSE_CHAIN (t));
1371     }
1372 
1373   return hstate.end ();
1374 
1375 #undef visit
1376 }
1377 
1378 /* Compare two SCC entries by their hash value for qsorting them.  */
1379 
1380 int
scc_entry_compare(const void * p1_,const void * p2_)1381 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1382 {
1383   const scc_entry *p1 = (const scc_entry *) p1_;
1384   const scc_entry *p2 = (const scc_entry *) p2_;
1385   if (p1->hash < p2->hash)
1386     return -1;
1387   else if (p1->hash > p2->hash)
1388     return 1;
1389   return 0;
1390 }
1391 
1392 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1393    THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST.  */
1394 
1395 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1396 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1397 	       bool ref_p, bool this_ref_p)
1398 {
1399   unsigned int last_classes = 0, iterations = 0;
1400 
1401   /* Compute hash values for the SCC members.  */
1402   for (unsigned i = 0; i < size; ++i)
1403     sccstack[first+i].hash
1404       = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1405 
1406   if (size == 1)
1407     return sccstack[first].hash;
1408 
1409   /* We aim to get unique hash for every tree within SCC and compute hash value
1410      of the whole SCC by combining all values together in a stable (entry-point
1411      independent) order.  This guarantees that the same SCC regions within
1412      different translation units will get the same hash values and therefore
1413      will be merged at WPA time.
1414 
1415      Often the hashes are already unique.  In that case we compute the SCC hash
1416      by combining individual hash values in an increasing order.
1417 
1418      If there are duplicates, we seek at least one tree with unique hash (and
1419      pick one with minimal hash and this property).  Then we obtain a stable
1420      order by DFS walk starting from this unique tree and then use the index
1421      within this order to make individual hash values unique.
1422 
1423      If there is no tree with unique hash, we iteratively propagate the hash
1424      values across the internal edges of SCC.  This usually quickly leads
1425      to unique hashes.  Consider, for example, an SCC containing two pointers
1426      that are identical except for the types they point to and assume that
1427      these types are also part of the SCC.  The propagation will add the
1428      points-to type information into their hash values.  */
1429   do
1430     {
1431       /* Sort the SCC so we can easily check for uniqueness.  */
1432       qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1433 
1434       unsigned int classes = 1;
1435       int firstunique = -1;
1436 
1437       /* Find the tree with lowest unique hash (if it exists) and compute
1438 	 the number of equivalence classes.  */
1439       if (sccstack[first].hash != sccstack[first+1].hash)
1440 	firstunique = 0;
1441       for (unsigned i = 1; i < size; ++i)
1442 	if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1443 	  {
1444 	    classes++;
1445 	    if (firstunique == -1
1446 		&& (i == size - 1
1447 		    || sccstack[first+i+1].hash != sccstack[first+i].hash))
1448 	      firstunique = i;
1449 	  }
1450 
1451       /* If we found a tree with unique hash, stop the iteration.  */
1452       if (firstunique != -1
1453 	  /* Also terminate if we run out of iterations or if the number of
1454 	     equivalence classes is no longer increasing.
1455 	     For example a cyclic list of trees that are all equivalent will
1456 	     never have unique entry point; we however do not build such SCCs
1457 	     in our IL.  */
1458 	  || classes <= last_classes || iterations > 16)
1459 	{
1460           hashval_t scc_hash;
1461 
1462 	  /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1463 	     starting from FIRSTUNIQUE to obtain a stable order.  */
1464 	  if (classes != size && firstunique != -1)
1465 	    {
1466 	      hash_map <tree, hashval_t> map(size*2);
1467 
1468 	      /* Store hash values into a map, so we can associate them with
1469 		 the reordered SCC.  */
1470 	      for (unsigned i = 0; i < size; ++i)
1471 		map.put (sccstack[first+i].t, sccstack[first+i].hash);
1472 
1473 	      DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1474 			 true);
1475 	      gcc_assert (again.sccstack.length () == size);
1476 
1477 	      memcpy (sccstack.address () + first,
1478 		      again.sccstack.address (),
1479 		      sizeof (scc_entry) * size);
1480 
1481 	      /* Update hash values of individual members by hashing in the
1482 		 index within the stable order.  This ensures uniqueness.
1483 		 Also compute the SCC hash by mixing in all hash values in
1484 		 the stable order we obtained.  */
1485 	      sccstack[first].hash = *map.get (sccstack[first].t);
1486 	      scc_hash = sccstack[first].hash;
1487 	      for (unsigned i = 1; i < size; ++i)
1488 		{
1489 		  sccstack[first+i].hash
1490 		    = iterative_hash_hashval_t (i,
1491 						*map.get (sccstack[first+i].t));
1492 		  scc_hash
1493 		    = iterative_hash_hashval_t (scc_hash,
1494 						sccstack[first+i].hash);
1495 		}
1496 	    }
1497 	  /* If we got a unique hash value for each tree, then sort already
1498 	     ensured entry-point independent order.  Only compute the final
1499 	     SCC hash.
1500 
1501 	     If we failed to find the unique entry point, we go by the same
1502 	     route.  We will eventually introduce unwanted hash conflicts.  */
1503 	  else
1504 	    {
1505 	      scc_hash = sccstack[first].hash;
1506 	      for (unsigned i = 1; i < size; ++i)
1507 		scc_hash
1508 		  = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1509 
1510 	      /* We cannot 100% guarantee that the hash won't conflict so as
1511 		 to make it impossible to find a unique hash.  This however
1512 		 should be an extremely rare case.  ICE for now so possible
1513 		 issues are found and evaluated.  */
1514 	      gcc_checking_assert (classes == size);
1515 	    }
1516 
1517 	  /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1518 	     hash into the hash of each element.  */
1519 	  for (unsigned i = 0; i < size; ++i)
1520 	    sccstack[first+i].hash
1521 	      = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1522 	  return scc_hash;
1523 	}
1524 
1525       last_classes = classes;
1526       iterations++;
1527 
1528       /* We failed to identify the entry point; propagate hash values across
1529 	 the edges.  */
1530       hash_map <tree, hashval_t> map(size*2);
1531 
1532       for (unsigned i = 0; i < size; ++i)
1533 	map.put (sccstack[first+i].t, sccstack[first+i].hash);
1534 
1535       for (unsigned i = 0; i < size; i++)
1536 	sccstack[first+i].hash
1537 	  = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1538     }
1539   while (true);
1540 }
1541 
1542 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1543    already in the streamer cache.  Main routine called for
1544    each visit of EXPR.  */
1545 
1546 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1547 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1548 		     tree expr, bool ref_p, bool this_ref_p)
1549 {
1550   /* Handle special cases.  */
1551   if (expr == NULL_TREE)
1552     return;
1553 
1554   /* Do not DFS walk into indexable trees.  */
1555   if (this_ref_p && tree_is_indexable (expr))
1556     return;
1557 
1558   /* Check if we already streamed EXPR.  */
1559   if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1560     return;
1561 
1562   worklist w;
1563   w.expr = expr;
1564   w.from_state = from_state;
1565   w.cstate = NULL;
1566   w.ref_p = ref_p;
1567   w.this_ref_p = this_ref_p;
1568   worklist_vec.safe_push (w);
1569 }
1570 
1571 
1572 /* Emit the physical representation of tree node EXPR to output block OB.
1573    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1574    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
1575 
1576 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1577 lto_output_tree (struct output_block *ob, tree expr,
1578 		 bool ref_p, bool this_ref_p)
1579 {
1580   unsigned ix;
1581   bool existed_p;
1582 
1583   if (expr == NULL_TREE)
1584     {
1585       streamer_write_record_start (ob, LTO_null);
1586       return;
1587     }
1588 
1589   if (this_ref_p && tree_is_indexable (expr))
1590     {
1591       lto_output_tree_ref (ob, expr);
1592       return;
1593     }
1594 
1595   existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1596   if (existed_p)
1597     {
1598       /* If a node has already been streamed out, make sure that
1599 	 we don't write it more than once.  Otherwise, the reader
1600 	 will instantiate two different nodes for the same object.  */
1601       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1602       streamer_write_uhwi (ob, ix);
1603       streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1604 			   lto_tree_code_to_tag (TREE_CODE (expr)));
1605       lto_stats.num_pickle_refs_output++;
1606     }
1607   else
1608     {
1609       /* This is the first time we see EXPR, write all reachable
1610 	 trees to OB.  */
1611       static bool in_dfs_walk;
1612 
1613       /* Protect against recursion which means disconnect between
1614          what tree edges we walk in the DFS walk and what edges
1615 	 we stream out.  */
1616       gcc_assert (!in_dfs_walk);
1617 
1618       if (streamer_dump_file)
1619 	{
1620 	  print_node_brief (streamer_dump_file, "   Streaming SCC of ",
1621 			    expr, 4);
1622           fprintf (streamer_dump_file, "\n");
1623 	}
1624 
1625       /* Start the DFS walk.  */
1626       /* Save ob state ... */
1627       /* let's see ... */
1628       in_dfs_walk = true;
1629       DFS (ob, expr, ref_p, this_ref_p, false);
1630       in_dfs_walk = false;
1631 
1632       /* Finally append a reference to the tree we were writing.
1633 	 ???  If expr ended up as a singleton we could have
1634 	 inlined it here and avoid outputting a reference.  */
1635       existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1636       gcc_assert (existed_p);
1637       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1638       streamer_write_uhwi (ob, ix);
1639       streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1640 			   lto_tree_code_to_tag (TREE_CODE (expr)));
1641       if (streamer_dump_file)
1642 	{
1643 	  print_node_brief (streamer_dump_file, "   Finished SCC of ",
1644 			    expr, 4);
1645           fprintf (streamer_dump_file, "\n\n");
1646 	}
1647       lto_stats.num_pickle_refs_output++;
1648     }
1649 }
1650 
1651 
1652 /* Output to OB a list of try/catch handlers starting with FIRST.  */
1653 
1654 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1655 output_eh_try_list (struct output_block *ob, eh_catch first)
1656 {
1657   eh_catch n;
1658 
1659   for (n = first; n; n = n->next_catch)
1660     {
1661       streamer_write_record_start (ob, LTO_eh_catch);
1662       stream_write_tree (ob, n->type_list, true);
1663       stream_write_tree (ob, n->filter_list, true);
1664       stream_write_tree (ob, n->label, true);
1665     }
1666 
1667   streamer_write_record_start (ob, LTO_null);
1668 }
1669 
1670 
1671 /* Output EH region R in function FN to OB.  CURR_RN is the slot index
1672    that is being emitted in FN->EH->REGION_ARRAY.  This is used to
1673    detect EH region sharing.  */
1674 
1675 static void
output_eh_region(struct output_block * ob,eh_region r)1676 output_eh_region (struct output_block *ob, eh_region r)
1677 {
1678   enum LTO_tags tag;
1679 
1680   if (r == NULL)
1681     {
1682       streamer_write_record_start (ob, LTO_null);
1683       return;
1684     }
1685 
1686   if (r->type == ERT_CLEANUP)
1687     tag = LTO_ert_cleanup;
1688   else if (r->type == ERT_TRY)
1689     tag = LTO_ert_try;
1690   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1691     tag = LTO_ert_allowed_exceptions;
1692   else if (r->type == ERT_MUST_NOT_THROW)
1693     tag = LTO_ert_must_not_throw;
1694   else
1695     gcc_unreachable ();
1696 
1697   streamer_write_record_start (ob, tag);
1698   streamer_write_hwi (ob, r->index);
1699 
1700   if (r->outer)
1701     streamer_write_hwi (ob, r->outer->index);
1702   else
1703     streamer_write_zero (ob);
1704 
1705   if (r->inner)
1706     streamer_write_hwi (ob, r->inner->index);
1707   else
1708     streamer_write_zero (ob);
1709 
1710   if (r->next_peer)
1711     streamer_write_hwi (ob, r->next_peer->index);
1712   else
1713     streamer_write_zero (ob);
1714 
1715   if (r->type == ERT_TRY)
1716     {
1717       output_eh_try_list (ob, r->u.eh_try.first_catch);
1718     }
1719   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1720     {
1721       stream_write_tree (ob, r->u.allowed.type_list, true);
1722       stream_write_tree (ob, r->u.allowed.label, true);
1723       streamer_write_uhwi (ob, r->u.allowed.filter);
1724     }
1725   else if (r->type == ERT_MUST_NOT_THROW)
1726     {
1727       stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1728       bitpack_d bp = bitpack_create (ob->main_stream);
1729       stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1730       streamer_write_bitpack (&bp);
1731     }
1732 
1733   if (r->landing_pads)
1734     streamer_write_hwi (ob, r->landing_pads->index);
1735   else
1736     streamer_write_zero (ob);
1737 }
1738 
1739 
1740 /* Output landing pad LP to OB.  */
1741 
1742 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1743 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1744 {
1745   if (lp == NULL)
1746     {
1747       streamer_write_record_start (ob, LTO_null);
1748       return;
1749     }
1750 
1751   streamer_write_record_start (ob, LTO_eh_landing_pad);
1752   streamer_write_hwi (ob, lp->index);
1753   if (lp->next_lp)
1754     streamer_write_hwi (ob, lp->next_lp->index);
1755   else
1756     streamer_write_zero (ob);
1757 
1758   if (lp->region)
1759     streamer_write_hwi (ob, lp->region->index);
1760   else
1761     streamer_write_zero (ob);
1762 
1763   stream_write_tree (ob, lp->post_landing_pad, true);
1764 }
1765 
1766 
1767 /* Output the existing eh_table to OB.  */
1768 
1769 static void
output_eh_regions(struct output_block * ob,struct function * fn)1770 output_eh_regions (struct output_block *ob, struct function *fn)
1771 {
1772   if (fn->eh && fn->eh->region_tree)
1773     {
1774       unsigned i;
1775       eh_region eh;
1776       eh_landing_pad lp;
1777       tree ttype;
1778 
1779       streamer_write_record_start (ob, LTO_eh_table);
1780 
1781       /* Emit the index of the root of the EH region tree.  */
1782       streamer_write_hwi (ob, fn->eh->region_tree->index);
1783 
1784       /* Emit all the EH regions in the region array.  */
1785       streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1786       FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1787 	output_eh_region (ob, eh);
1788 
1789       /* Emit all landing pads.  */
1790       streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1791       FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1792 	output_eh_lp (ob, lp);
1793 
1794       /* Emit all the runtime type data.  */
1795       streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1796       FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1797 	stream_write_tree (ob, ttype, true);
1798 
1799       /* Emit the table of action chains.  */
1800       if (targetm.arm_eabi_unwinder)
1801 	{
1802 	  tree t;
1803 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1804 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1805 	    stream_write_tree (ob, t, true);
1806 	}
1807       else
1808 	{
1809 	  uchar c;
1810 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1811 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1812 	    streamer_write_char_stream (ob->main_stream, c);
1813 	}
1814     }
1815 
1816   /* The LTO_null either terminates the record or indicates that there
1817      are no eh_records at all.  */
1818   streamer_write_record_start (ob, LTO_null);
1819 }
1820 
1821 
1822 /* Output all of the active ssa names to the ssa_names stream.  */
1823 
1824 static void
output_ssa_names(struct output_block * ob,struct function * fn)1825 output_ssa_names (struct output_block *ob, struct function *fn)
1826 {
1827   unsigned int i, len;
1828 
1829   len = vec_safe_length (SSANAMES (fn));
1830   streamer_write_uhwi (ob, len);
1831 
1832   for (i = 1; i < len; i++)
1833     {
1834       tree ptr = (*SSANAMES (fn))[i];
1835 
1836       if (ptr == NULL_TREE
1837 	  || SSA_NAME_IN_FREE_LIST (ptr)
1838 	  || virtual_operand_p (ptr)
1839 	  /* Simply skip unreleased SSA names.  */
1840 	  || (! SSA_NAME_IS_DEFAULT_DEF (ptr)
1841 	      && (! SSA_NAME_DEF_STMT (ptr)
1842 		  || ! gimple_bb (SSA_NAME_DEF_STMT (ptr)))))
1843 	continue;
1844 
1845       streamer_write_uhwi (ob, i);
1846       streamer_write_char_stream (ob->main_stream,
1847 				  SSA_NAME_IS_DEFAULT_DEF (ptr));
1848       if (SSA_NAME_VAR (ptr))
1849 	stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1850       else
1851 	/* ???  This drops SSA_NAME_IDENTIFIER on the floor.  */
1852 	stream_write_tree (ob, TREE_TYPE (ptr), true);
1853     }
1854 
1855   streamer_write_zero (ob);
1856 }
1857 
1858 
1859 
1860 /* Output the cfg.  */
1861 
1862 static void
output_cfg(struct output_block * ob,struct function * fn)1863 output_cfg (struct output_block *ob, struct function *fn)
1864 {
1865   struct lto_output_stream *tmp_stream = ob->main_stream;
1866   basic_block bb;
1867 
1868   ob->main_stream = ob->cfg_stream;
1869 
1870   streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1871 		       profile_status_for_fn (fn));
1872 
1873   /* Output the number of the highest basic block.  */
1874   streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1875 
1876   FOR_ALL_BB_FN (bb, fn)
1877     {
1878       edge_iterator ei;
1879       edge e;
1880 
1881       streamer_write_hwi (ob, bb->index);
1882 
1883       /* Output the successors and the edge flags.  */
1884       streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1885       FOR_EACH_EDGE (e, ei, bb->succs)
1886 	{
1887 	  streamer_write_uhwi (ob, e->dest->index);
1888 	  e->probability.stream_out (ob);
1889 	  streamer_write_uhwi (ob, e->flags);
1890 	}
1891     }
1892 
1893   streamer_write_hwi (ob, -1);
1894 
1895   bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1896   while (bb->next_bb)
1897     {
1898       streamer_write_hwi (ob, bb->next_bb->index);
1899       bb = bb->next_bb;
1900     }
1901 
1902   streamer_write_hwi (ob, -1);
1903 
1904   /* Output the number of loops.  */
1905   streamer_write_uhwi (ob, number_of_loops (fn));
1906 
1907   /* Output each loop, skipping the tree root which has number zero.  */
1908   for (unsigned i = 1; i < number_of_loops (fn); ++i)
1909     {
1910       struct loop *loop = get_loop (fn, i);
1911 
1912       /* Write the index of the loop header.  That's enough to rebuild
1913          the loop tree on the reader side.  Stream -1 for an unused
1914 	 loop entry.  */
1915       if (!loop)
1916 	{
1917 	  streamer_write_hwi (ob, -1);
1918 	  continue;
1919 	}
1920       else
1921 	streamer_write_hwi (ob, loop->header->index);
1922 
1923       /* Write everything copy_loop_info copies.  */
1924       streamer_write_enum (ob->main_stream,
1925 			   loop_estimation, EST_LAST, loop->estimate_state);
1926       streamer_write_hwi (ob, loop->any_upper_bound);
1927       if (loop->any_upper_bound)
1928 	streamer_write_widest_int (ob, loop->nb_iterations_upper_bound);
1929       streamer_write_hwi (ob, loop->any_likely_upper_bound);
1930       if (loop->any_likely_upper_bound)
1931 	streamer_write_widest_int (ob, loop->nb_iterations_likely_upper_bound);
1932       streamer_write_hwi (ob, loop->any_estimate);
1933       if (loop->any_estimate)
1934 	streamer_write_widest_int (ob, loop->nb_iterations_estimate);
1935 
1936       /* Write OMP SIMD related info.  */
1937       streamer_write_hwi (ob, loop->safelen);
1938       streamer_write_hwi (ob, loop->unroll);
1939       streamer_write_hwi (ob, loop->owned_clique);
1940       streamer_write_hwi (ob, loop->dont_vectorize);
1941       streamer_write_hwi (ob, loop->force_vectorize);
1942       stream_write_tree (ob, loop->simduid, true);
1943     }
1944 
1945   ob->main_stream = tmp_stream;
1946 }
1947 
1948 
1949 /* Create the header in the file using OB.  If the section type is for
1950    a function, set FN to the decl for that function.  */
1951 
1952 void
produce_asm(struct output_block * ob,tree fn)1953 produce_asm (struct output_block *ob, tree fn)
1954 {
1955   enum lto_section_type section_type = ob->section_type;
1956   struct lto_function_header header;
1957   char *section_name;
1958 
1959   if (section_type == LTO_section_function_body)
1960     {
1961       const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1962       section_name = lto_get_section_name (section_type, name, NULL);
1963     }
1964   else
1965     section_name = lto_get_section_name (section_type, NULL, NULL);
1966 
1967   lto_begin_section (section_name, !flag_wpa);
1968   free (section_name);
1969 
1970   /* The entire header is stream computed here.  */
1971   memset (&header, 0, sizeof (struct lto_function_header));
1972 
1973   /* Write the header.  */
1974   header.major_version = LTO_major_version;
1975   header.minor_version = LTO_minor_version;
1976 
1977   if (section_type == LTO_section_function_body)
1978     header.cfg_size = ob->cfg_stream->total_size;
1979   header.main_size = ob->main_stream->total_size;
1980   header.string_size = ob->string_stream->total_size;
1981   lto_write_data (&header, sizeof header);
1982 
1983   /* Put all of the gimple and the string table out the asm file as a
1984      block of text.  */
1985   if (section_type == LTO_section_function_body)
1986     lto_write_stream (ob->cfg_stream);
1987   lto_write_stream (ob->main_stream);
1988   lto_write_stream (ob->string_stream);
1989 
1990   lto_end_section ();
1991 }
1992 
1993 
1994 /* Output the base body of struct function FN using output block OB.  */
1995 
1996 static void
output_struct_function_base(struct output_block * ob,struct function * fn)1997 output_struct_function_base (struct output_block *ob, struct function *fn)
1998 {
1999   struct bitpack_d bp;
2000   unsigned i;
2001   tree t;
2002 
2003   /* Output the static chain and non-local goto save area.  */
2004   stream_write_tree (ob, fn->static_chain_decl, true);
2005   stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
2006 
2007   /* Output all the local variables in the function.  */
2008   streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
2009   FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2010     stream_write_tree (ob, t, true);
2011 
2012   /* Output current IL state of the function.  */
2013   streamer_write_uhwi (ob, fn->curr_properties);
2014 
2015   /* Write all the attributes for FN.  */
2016   bp = bitpack_create (ob->main_stream);
2017   bp_pack_value (&bp, fn->is_thunk, 1);
2018   bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2019   bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2020   bp_pack_value (&bp, fn->returns_struct, 1);
2021   bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2022   bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2023   bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2024   bp_pack_value (&bp, fn->after_inlining, 1);
2025   bp_pack_value (&bp, fn->stdarg, 1);
2026   bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2027   bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2028   bp_pack_value (&bp, fn->calls_alloca, 1);
2029   bp_pack_value (&bp, fn->calls_setjmp, 1);
2030   bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2031   bp_pack_value (&bp, fn->has_simduid_loops, 1);
2032   bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2033   bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2034   bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2035 
2036   /* Output the function start and end loci.  */
2037   stream_output_location (ob, &bp, fn->function_start_locus);
2038   stream_output_location (ob, &bp, fn->function_end_locus);
2039 
2040   /* Save the instance discriminator if present.  */
2041   int *instance_number_p = NULL;
2042   if (decl_to_instance_map)
2043     instance_number_p = decl_to_instance_map->get (fn->decl);
2044   bp_pack_value (&bp, !!instance_number_p, 1);
2045   if (instance_number_p)
2046     bp_pack_value (&bp, *instance_number_p, sizeof (int) * CHAR_BIT);
2047 
2048   streamer_write_bitpack (&bp);
2049 }
2050 
2051 
2052 /* Collect all leaf BLOCKs beyond ROOT into LEAFS.  */
2053 
2054 static void
collect_block_tree_leafs(tree root,vec<tree> & leafs)2055 collect_block_tree_leafs (tree root, vec<tree> &leafs)
2056 {
2057   for (root = BLOCK_SUBBLOCKS (root); root; root = BLOCK_CHAIN (root))
2058     if (! BLOCK_SUBBLOCKS (root))
2059       leafs.safe_push (root);
2060     else
2061       collect_block_tree_leafs (BLOCK_SUBBLOCKS (root), leafs);
2062 }
2063 
2064 /* This performs function body modifications that are needed for streaming
2065    to work.  */
2066 
2067 void
lto_prepare_function_for_streaming(struct cgraph_node * node)2068 lto_prepare_function_for_streaming (struct cgraph_node *node)
2069 {
2070   struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2071   basic_block bb;
2072 
2073   if (number_of_loops (fn))
2074     {
2075       push_cfun (fn);
2076       loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
2077       loop_optimizer_finalize ();
2078       pop_cfun ();
2079     }
2080   /* We will renumber the statements.  The code that does this uses
2081      the same ordering that we use for serializing them so we can use
2082      the same code on the other end and not have to write out the
2083      statement numbers.  We do not assign UIDs to PHIs here because
2084      virtual PHIs get re-computed on-the-fly which would make numbers
2085      inconsistent.  */
2086   set_gimple_stmt_max_uid (fn, 0);
2087   FOR_ALL_BB_FN (bb, fn)
2088     {
2089       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2090 	   gsi_next (&gsi))
2091 	{
2092 	  gphi *stmt = gsi.phi ();
2093 
2094 	  /* Virtual PHIs are not going to be streamed.  */
2095 	  if (!virtual_operand_p (gimple_phi_result (stmt)))
2096 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2097 	}
2098       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2099 	   gsi_next (&gsi))
2100 	{
2101 	  gimple *stmt = gsi_stmt (gsi);
2102 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2103 	}
2104     }
2105   /* To avoid keeping duplicate gimple IDs in the statements, renumber
2106      virtual phis now.  */
2107   FOR_ALL_BB_FN (bb, fn)
2108     {
2109       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2110 	   gsi_next (&gsi))
2111 	{
2112 	  gphi *stmt = gsi.phi ();
2113 	  if (virtual_operand_p (gimple_phi_result (stmt)))
2114 	    gimple_set_uid (stmt, inc_gimple_stmt_max_uid (fn));
2115 	}
2116     }
2117 
2118 }
2119 
2120 /* Output the body of function NODE->DECL.  */
2121 
2122 static void
output_function(struct cgraph_node * node)2123 output_function (struct cgraph_node *node)
2124 {
2125   tree function;
2126   struct function *fn;
2127   basic_block bb;
2128   struct output_block *ob;
2129 
2130   if (streamer_dump_file)
2131     fprintf (streamer_dump_file, "\nStreaming body of %s\n",
2132 	     node->name ());
2133 
2134   function = node->decl;
2135   fn = DECL_STRUCT_FUNCTION (function);
2136   ob = create_output_block (LTO_section_function_body);
2137 
2138   clear_line_info (ob);
2139   ob->symbol = node;
2140 
2141   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2142 
2143   /* Make string 0 be a NULL string.  */
2144   streamer_write_char_stream (ob->string_stream, 0);
2145 
2146   streamer_write_record_start (ob, LTO_function);
2147 
2148   /* Output decls for parameters and args.  */
2149   stream_write_tree (ob, DECL_RESULT (function), true);
2150   streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2151 
2152   /* Output debug args if available. */
2153   vec<tree, va_gc> **debugargs = decl_debug_args_lookup (function);
2154   if (! debugargs)
2155     streamer_write_uhwi (ob, 0);
2156   else
2157     {
2158       streamer_write_uhwi (ob, (*debugargs)->length ());
2159       for (unsigned i = 0; i < (*debugargs)->length (); ++i)
2160 	stream_write_tree (ob, (**debugargs)[i], true);
2161     }
2162 
2163   /* Output DECL_INITIAL for the function, which contains the tree of
2164      lexical scopes.  */
2165   stream_write_tree (ob, DECL_INITIAL (function), true);
2166   /* As we do not recurse into BLOCK_SUBBLOCKS but only BLOCK_SUPERCONTEXT
2167      collect block tree leafs and stream those.  */
2168   auto_vec<tree> block_tree_leafs;
2169   if (DECL_INITIAL (function))
2170     collect_block_tree_leafs (DECL_INITIAL (function), block_tree_leafs);
2171   streamer_write_uhwi (ob, block_tree_leafs.length ());
2172   for (unsigned i = 0; i < block_tree_leafs.length (); ++i)
2173     stream_write_tree (ob, block_tree_leafs[i], true);
2174 
2175   /* We also stream abstract functions where we stream only stuff needed for
2176      debug info.  */
2177   if (gimple_has_body_p (function))
2178     {
2179       streamer_write_uhwi (ob, 1);
2180       output_struct_function_base (ob, fn);
2181 
2182       /* Output all the SSA names used in the function.  */
2183       output_ssa_names (ob, fn);
2184 
2185       /* Output any exception handling regions.  */
2186       output_eh_regions (ob, fn);
2187 
2188       /* Output the code for the function.  */
2189       FOR_ALL_BB_FN (bb, fn)
2190 	output_bb (ob, bb, fn);
2191 
2192       /* The terminator for this function.  */
2193       streamer_write_record_start (ob, LTO_null);
2194 
2195       output_cfg (ob, fn);
2196    }
2197   else
2198     streamer_write_uhwi (ob, 0);
2199 
2200   /* Create a section to hold the pickled output of this function.   */
2201   produce_asm (ob, function);
2202 
2203   destroy_output_block (ob);
2204   if (streamer_dump_file)
2205     fprintf (streamer_dump_file, "Finished streaming %s\n",
2206 	     node->name ());
2207 }
2208 
2209 /* Output the body of function NODE->DECL.  */
2210 
2211 static void
output_constructor(struct varpool_node * node)2212 output_constructor (struct varpool_node *node)
2213 {
2214   tree var = node->decl;
2215   struct output_block *ob;
2216 
2217   if (streamer_dump_file)
2218     fprintf (streamer_dump_file, "\nStreaming constructor of %s\n",
2219 	     node->name ());
2220 
2221   ob = create_output_block (LTO_section_function_body);
2222 
2223   clear_line_info (ob);
2224   ob->symbol = node;
2225 
2226   /* Make string 0 be a NULL string.  */
2227   streamer_write_char_stream (ob->string_stream, 0);
2228 
2229   /* Output DECL_INITIAL for the function, which contains the tree of
2230      lexical scopes.  */
2231   stream_write_tree (ob, DECL_INITIAL (var), true);
2232 
2233   /* Create a section to hold the pickled output of this function.   */
2234   produce_asm (ob, var);
2235 
2236   destroy_output_block (ob);
2237   if (streamer_dump_file)
2238     fprintf (streamer_dump_file, "Finished streaming %s\n",
2239 	     node->name ());
2240 }
2241 
2242 
2243 /* Emit toplevel asms.  */
2244 
2245 void
lto_output_toplevel_asms(void)2246 lto_output_toplevel_asms (void)
2247 {
2248   struct output_block *ob;
2249   struct asm_node *can;
2250   char *section_name;
2251   struct lto_simple_header_with_strings header;
2252 
2253   if (!symtab->first_asm_symbol ())
2254     return;
2255 
2256   ob = create_output_block (LTO_section_asm);
2257 
2258   /* Make string 0 be a NULL string.  */
2259   streamer_write_char_stream (ob->string_stream, 0);
2260 
2261   for (can = symtab->first_asm_symbol (); can; can = can->next)
2262     {
2263       streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2264       streamer_write_hwi (ob, can->order);
2265     }
2266 
2267   streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2268 
2269   section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2270   lto_begin_section (section_name, !flag_wpa);
2271   free (section_name);
2272 
2273   /* The entire header stream is computed here.  */
2274   memset (&header, 0, sizeof (header));
2275 
2276   /* Write the header.  */
2277   header.major_version = LTO_major_version;
2278   header.minor_version = LTO_minor_version;
2279 
2280   header.main_size = ob->main_stream->total_size;
2281   header.string_size = ob->string_stream->total_size;
2282   lto_write_data (&header, sizeof header);
2283 
2284   /* Put all of the gimple and the string table out the asm file as a
2285      block of text.  */
2286   lto_write_stream (ob->main_stream);
2287   lto_write_stream (ob->string_stream);
2288 
2289   lto_end_section ();
2290 
2291   destroy_output_block (ob);
2292 }
2293 
2294 
2295 /* Copy the function body or variable constructor of NODE without deserializing. */
2296 
2297 static void
copy_function_or_variable(struct symtab_node * node)2298 copy_function_or_variable (struct symtab_node *node)
2299 {
2300   tree function = node->decl;
2301   struct lto_file_decl_data *file_data = node->lto_file_data;
2302   const char *data;
2303   size_t len;
2304   const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2305   char *section_name =
2306     lto_get_section_name (LTO_section_function_body, name, NULL);
2307   size_t i, j;
2308   struct lto_in_decl_state *in_state;
2309   struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2310 
2311   if (streamer_dump_file)
2312     fprintf (streamer_dump_file, "Copying section for %s\n", name);
2313   lto_begin_section (section_name, false);
2314   free (section_name);
2315 
2316   /* We may have renamed the declaration, e.g., a static function.  */
2317   name = lto_get_decl_name_mapping (file_data, name);
2318 
2319   data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2320                                    name, &len);
2321   gcc_assert (data);
2322 
2323   /* Do a bit copy of the function body.  */
2324   lto_write_raw_data (data, len);
2325 
2326   /* Copy decls. */
2327   in_state =
2328     lto_get_function_in_decl_state (node->lto_file_data, function);
2329   out_state->compressed = in_state->compressed;
2330   gcc_assert (in_state);
2331 
2332   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2333     {
2334       size_t n = vec_safe_length (in_state->streams[i]);
2335       vec<tree, va_gc> *trees = in_state->streams[i];
2336       struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2337 
2338       /* The out state must have the same indices and the in state.
2339 	 So just copy the vector.  All the encoders in the in state
2340 	 must be empty where we reach here. */
2341       gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2342       encoder->trees.reserve_exact (n);
2343       for (j = 0; j < n; j++)
2344 	encoder->trees.safe_push ((*trees)[j]);
2345     }
2346 
2347   lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2348 			     data, len);
2349   lto_end_section ();
2350 }
2351 
2352 /* Wrap symbol references in *TP inside a type-preserving MEM_REF.  */
2353 
2354 static tree
wrap_refs(tree * tp,int * ws,void *)2355 wrap_refs (tree *tp, int *ws, void *)
2356 {
2357   tree t = *tp;
2358   if (handled_component_p (t)
2359       && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2360       && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2361     {
2362       tree decl = TREE_OPERAND (t, 0);
2363       tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2364       TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2365 				    build1 (ADDR_EXPR, ptrtype, decl),
2366 				    build_int_cst (ptrtype, 0));
2367       TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2368       *ws = 0;
2369     }
2370   else if (TREE_CODE (t) == CONSTRUCTOR)
2371     ;
2372   else if (!EXPR_P (t))
2373     *ws = 0;
2374   return NULL_TREE;
2375 }
2376 
2377 /* Remove functions that are no longer used from offload_funcs, and mark the
2378    remaining ones with DECL_PRESERVE_P.  */
2379 
2380 static void
prune_offload_funcs(void)2381 prune_offload_funcs (void)
2382 {
2383   if (!offload_funcs)
2384     return;
2385 
2386   unsigned ix, ix2;
2387   tree *elem_ptr;
2388   VEC_ORDERED_REMOVE_IF (*offload_funcs, ix, ix2, elem_ptr,
2389 			 cgraph_node::get (*elem_ptr) == NULL);
2390 
2391   tree fn_decl;
2392   FOR_EACH_VEC_ELT (*offload_funcs, ix, fn_decl)
2393     DECL_PRESERVE_P (fn_decl) = 1;
2394 }
2395 
2396 /* Main entry point from the pass manager.  */
2397 
2398 void
lto_output(void)2399 lto_output (void)
2400 {
2401   struct lto_out_decl_state *decl_state;
2402   bitmap output = NULL;
2403   int i, n_nodes;
2404   lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2405 
2406   prune_offload_funcs ();
2407 
2408   if (flag_checking)
2409     output = lto_bitmap_alloc ();
2410 
2411   /* Initialize the streamer.  */
2412   lto_streamer_init ();
2413 
2414   n_nodes = lto_symtab_encoder_size (encoder);
2415   /* Process only the functions with bodies.  */
2416   for (i = 0; i < n_nodes; i++)
2417     {
2418       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2419       if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2420 	{
2421 	  if (lto_symtab_encoder_encode_body_p (encoder, node)
2422 	      && !node->alias)
2423 	    {
2424 	      if (flag_checking)
2425 		{
2426 		  gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2427 		  bitmap_set_bit (output, DECL_UID (node->decl));
2428 		}
2429 	      decl_state = lto_new_out_decl_state ();
2430 	      lto_push_out_decl_state (decl_state);
2431 	      if (gimple_has_body_p (node->decl)
2432 		  || (!flag_wpa
2433 		      && flag_incremental_link != INCREMENTAL_LINK_LTO)
2434 		  /* Thunks have no body but they may be synthetized
2435 		     at WPA time.  */
2436 		  || DECL_ARGUMENTS (node->decl))
2437 		output_function (node);
2438 	      else
2439 		copy_function_or_variable (node);
2440 	      gcc_assert (lto_get_out_decl_state () == decl_state);
2441 	      lto_pop_out_decl_state ();
2442 	      lto_record_function_out_decl_state (node->decl, decl_state);
2443 	    }
2444 	}
2445       else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2446 	{
2447 	  /* Wrap symbol references inside the ctor in a type
2448 	     preserving MEM_REF.  */
2449 	  tree ctor = DECL_INITIAL (node->decl);
2450 	  if (ctor && !in_lto_p)
2451 	    walk_tree (&ctor, wrap_refs, NULL, NULL);
2452 	  if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2453 	      && lto_symtab_encoder_encode_initializer_p (encoder, node)
2454 	      && !node->alias)
2455 	    {
2456 	      timevar_push (TV_IPA_LTO_CTORS_OUT);
2457 	      if (flag_checking)
2458 		{
2459 		  gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2460 		  bitmap_set_bit (output, DECL_UID (node->decl));
2461 		}
2462 	      decl_state = lto_new_out_decl_state ();
2463 	      lto_push_out_decl_state (decl_state);
2464 	      if (DECL_INITIAL (node->decl) != error_mark_node
2465 		  || (!flag_wpa
2466 		      && flag_incremental_link != INCREMENTAL_LINK_LTO))
2467 		output_constructor (node);
2468 	      else
2469 		copy_function_or_variable (node);
2470 	      gcc_assert (lto_get_out_decl_state () == decl_state);
2471 	      lto_pop_out_decl_state ();
2472 	      lto_record_function_out_decl_state (node->decl, decl_state);
2473 	      timevar_pop (TV_IPA_LTO_CTORS_OUT);
2474 	    }
2475 	}
2476     }
2477 
2478   /* Emit the callgraph after emitting function bodies.  This needs to
2479      be done now to make sure that all the statements in every function
2480      have been renumbered so that edges can be associated with call
2481      statements using the statement UIDs.  */
2482   output_symtab ();
2483 
2484   output_offload_tables ();
2485 
2486 #if CHECKING_P
2487   lto_bitmap_free (output);
2488 #endif
2489 }
2490 
2491 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2492    from it and required for correct representation of its semantics.
2493    Each node in ENCODER must be a global declaration or a type.  A node
2494    is written only once, even if it appears multiple times in the
2495    vector.  Certain transitively-reachable nodes, such as those
2496    representing expressions, may be duplicated, but such nodes
2497    must not appear in ENCODER itself.  */
2498 
2499 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2500 write_global_stream (struct output_block *ob,
2501 		     struct lto_tree_ref_encoder *encoder)
2502 {
2503   tree t;
2504   size_t index;
2505   const size_t size = lto_tree_ref_encoder_size (encoder);
2506 
2507   for (index = 0; index < size; index++)
2508     {
2509       t = lto_tree_ref_encoder_get_tree (encoder, index);
2510       if (streamer_dump_file)
2511 	{
2512           fprintf (streamer_dump_file, " %i:", (int)index);
2513 	  print_node_brief (streamer_dump_file, "", t, 4);
2514           fprintf (streamer_dump_file, "\n");
2515 	}
2516       if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2517 	stream_write_tree (ob, t, false);
2518     }
2519 }
2520 
2521 
2522 /* Write a sequence of indices into the globals vector corresponding
2523    to the trees in ENCODER.  These are used by the reader to map the
2524    indices used to refer to global entities within function bodies to
2525    their referents.  */
2526 
2527 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2528 write_global_references (struct output_block *ob,
2529  			 struct lto_tree_ref_encoder *encoder)
2530 {
2531   tree t;
2532   uint32_t index;
2533   const uint32_t size = lto_tree_ref_encoder_size (encoder);
2534 
2535   /* Write size and slot indexes as 32-bit unsigned numbers. */
2536   uint32_t *data = XNEWVEC (uint32_t, size + 1);
2537   data[0] = size;
2538 
2539   for (index = 0; index < size; index++)
2540     {
2541       unsigned slot_num;
2542 
2543       t = lto_tree_ref_encoder_get_tree (encoder, index);
2544       streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2545       gcc_assert (slot_num != (unsigned)-1);
2546       data[index + 1] = slot_num;
2547     }
2548 
2549   lto_write_data (data, sizeof (int32_t) * (size + 1));
2550   free (data);
2551 }
2552 
2553 
2554 /* Write all the streams in an lto_out_decl_state STATE using
2555    output block OB and output stream OUT_STREAM.  */
2556 
2557 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2558 lto_output_decl_state_streams (struct output_block *ob,
2559 			       struct lto_out_decl_state *state)
2560 {
2561   int i;
2562 
2563   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2564     write_global_stream (ob, &state->streams[i]);
2565 }
2566 
2567 
2568 /* Write all the references in an lto_out_decl_state STATE using
2569    output block OB and output stream OUT_STREAM.  */
2570 
2571 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2572 lto_output_decl_state_refs (struct output_block *ob,
2573 			    struct lto_out_decl_state *state)
2574 {
2575   unsigned i;
2576   unsigned ref;
2577   tree decl;
2578 
2579   /* Write reference to FUNCTION_DECL.  If there is not function,
2580      write reference to void_type_node. */
2581   decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2582   streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2583   gcc_assert (ref != (unsigned)-1);
2584   ref = ref * 2 + (state->compressed ? 1 : 0);
2585   lto_write_data (&ref, sizeof (uint32_t));
2586 
2587   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2588     write_global_references (ob, &state->streams[i]);
2589 }
2590 
2591 
2592 /* Return the written size of STATE. */
2593 
2594 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2595 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2596 {
2597   int i;
2598   size_t size;
2599 
2600   size = sizeof (int32_t);	/* fn_ref. */
2601   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2602     {
2603       size += sizeof (int32_t); /* vector size. */
2604       size += (lto_tree_ref_encoder_size (&state->streams[i])
2605 	       * sizeof (int32_t));
2606     }
2607   return size;
2608 }
2609 
2610 
2611 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2612    so far.  */
2613 
2614 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2615 write_symbol (struct streamer_tree_cache_d *cache,
2616 	      tree t, hash_set<const char *> *seen, bool alias)
2617 {
2618   const char *name;
2619   enum gcc_plugin_symbol_kind kind;
2620   enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2621   unsigned slot_num;
2622   uint64_t size;
2623   const char *comdat;
2624   unsigned char c;
2625 
2626   gcc_assert (VAR_OR_FUNCTION_DECL_P (t));
2627 
2628   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2629 
2630   /* This behaves like assemble_name_raw in varasm.c, performing the
2631      same name manipulations that ASM_OUTPUT_LABELREF does. */
2632   name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2633 
2634   if (seen->add (name))
2635     return;
2636 
2637   streamer_tree_cache_lookup (cache, t, &slot_num);
2638   gcc_assert (slot_num != (unsigned)-1);
2639 
2640   if (DECL_EXTERNAL (t))
2641     {
2642       if (DECL_WEAK (t))
2643 	kind = GCCPK_WEAKUNDEF;
2644       else
2645 	kind = GCCPK_UNDEF;
2646     }
2647   else
2648     {
2649       if (DECL_WEAK (t))
2650 	kind = GCCPK_WEAKDEF;
2651       else if (DECL_COMMON (t))
2652 	kind = GCCPK_COMMON;
2653       else
2654 	kind = GCCPK_DEF;
2655 
2656       /* When something is defined, it should have node attached.  */
2657       gcc_assert (alias || !VAR_P (t) || varpool_node::get (t)->definition);
2658       gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2659 		  || (cgraph_node::get (t)
2660 		      && cgraph_node::get (t)->definition));
2661     }
2662 
2663   /* Imitate what default_elf_asm_output_external do.
2664      When symbol is external, we need to output it with DEFAULT visibility
2665      when compiling with -fvisibility=default, while with HIDDEN visibility
2666      when symbol has attribute (visibility("hidden")) specified.
2667      targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2668      right. */
2669 
2670   if (DECL_EXTERNAL (t)
2671       && !targetm.binds_local_p (t))
2672     visibility = GCCPV_DEFAULT;
2673   else
2674     switch (DECL_VISIBILITY (t))
2675       {
2676       case VISIBILITY_DEFAULT:
2677 	visibility = GCCPV_DEFAULT;
2678 	break;
2679       case VISIBILITY_PROTECTED:
2680 	visibility = GCCPV_PROTECTED;
2681 	break;
2682       case VISIBILITY_HIDDEN:
2683 	visibility = GCCPV_HIDDEN;
2684 	break;
2685       case VISIBILITY_INTERNAL:
2686 	visibility = GCCPV_INTERNAL;
2687 	break;
2688       }
2689 
2690   if (kind == GCCPK_COMMON
2691       && DECL_SIZE_UNIT (t)
2692       && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2693     size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2694   else
2695     size = 0;
2696 
2697   if (DECL_ONE_ONLY (t))
2698     comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2699   else
2700     comdat = "";
2701 
2702   lto_write_data (name, strlen (name) + 1);
2703   lto_write_data (comdat, strlen (comdat) + 1);
2704   c = (unsigned char) kind;
2705   lto_write_data (&c, 1);
2706   c = (unsigned char) visibility;
2707   lto_write_data (&c, 1);
2708   lto_write_data (&size, 8);
2709   lto_write_data (&slot_num, 4);
2710 }
2711 
2712 /* Write an IL symbol table to OB.
2713    SET and VSET are cgraph/varpool node sets we are outputting.  */
2714 
2715 static void
produce_symtab(struct output_block * ob)2716 produce_symtab (struct output_block *ob)
2717 {
2718   struct streamer_tree_cache_d *cache = ob->writer_cache;
2719   char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2720   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2721   lto_symtab_encoder_iterator lsei;
2722 
2723   lto_begin_section (section_name, false);
2724   free (section_name);
2725 
2726   hash_set<const char *> seen;
2727 
2728   /* Write the symbol table.
2729      First write everything defined and then all declarations.
2730      This is necessary to handle cases where we have duplicated symbols.  */
2731   for (lsei = lsei_start (encoder);
2732        !lsei_end_p (lsei); lsei_next (&lsei))
2733     {
2734       symtab_node *node = lsei_node (lsei);
2735 
2736       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2737 	continue;
2738       write_symbol (cache, node->decl, &seen, false);
2739     }
2740   for (lsei = lsei_start (encoder);
2741        !lsei_end_p (lsei); lsei_next (&lsei))
2742     {
2743       symtab_node *node = lsei_node (lsei);
2744 
2745       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2746 	continue;
2747       write_symbol (cache, node->decl, &seen, false);
2748     }
2749 
2750   lto_end_section ();
2751 }
2752 
2753 
2754 /* Init the streamer_mode_table for output, where we collect info on what
2755    machine_mode values have been streamed.  */
2756 void
lto_output_init_mode_table(void)2757 lto_output_init_mode_table (void)
2758 {
2759   memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2760 }
2761 
2762 
2763 /* Write the mode table.  */
2764 static void
lto_write_mode_table(void)2765 lto_write_mode_table (void)
2766 {
2767   struct output_block *ob;
2768   ob = create_output_block (LTO_section_mode_table);
2769   bitpack_d bp = bitpack_create (ob->main_stream);
2770 
2771   /* Ensure that for GET_MODE_INNER (m) != m we have
2772      also the inner mode marked.  */
2773   for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2774     if (streamer_mode_table[i])
2775       {
2776 	machine_mode m = (machine_mode) i;
2777 	machine_mode inner_m = GET_MODE_INNER (m);
2778 	if (inner_m != m)
2779 	  streamer_mode_table[(int) inner_m] = 1;
2780       }
2781   /* First stream modes that have GET_MODE_INNER (m) == m,
2782      so that we can refer to them afterwards.  */
2783   for (int pass = 0; pass < 2; pass++)
2784     for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2785       if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2786 	{
2787 	  machine_mode m = (machine_mode) i;
2788 	  if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2789 	    continue;
2790 	  bp_pack_value (&bp, m, 8);
2791 	  bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2792 	  bp_pack_poly_value (&bp, GET_MODE_SIZE (m), 16);
2793 	  bp_pack_poly_value (&bp, GET_MODE_PRECISION (m), 16);
2794 	  bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2795 	  bp_pack_poly_value (&bp, GET_MODE_NUNITS (m), 16);
2796 	  switch (GET_MODE_CLASS (m))
2797 	    {
2798 	    case MODE_FRACT:
2799 	    case MODE_UFRACT:
2800 	    case MODE_ACCUM:
2801 	    case MODE_UACCUM:
2802 	      bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2803 	      bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2804 	      break;
2805 	    case MODE_FLOAT:
2806 	    case MODE_DECIMAL_FLOAT:
2807 	      bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2808 	      break;
2809 	    default:
2810 	      break;
2811 	    }
2812 	  bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2813 	}
2814   bp_pack_value (&bp, VOIDmode, 8);
2815 
2816   streamer_write_bitpack (&bp);
2817 
2818   char *section_name
2819     = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2820   lto_begin_section (section_name, !flag_wpa);
2821   free (section_name);
2822 
2823   /* The entire header stream is computed here.  */
2824   struct lto_simple_header_with_strings header;
2825   memset (&header, 0, sizeof (header));
2826 
2827   /* Write the header.  */
2828   header.major_version = LTO_major_version;
2829   header.minor_version = LTO_minor_version;
2830 
2831   header.main_size = ob->main_stream->total_size;
2832   header.string_size = ob->string_stream->total_size;
2833   lto_write_data (&header, sizeof header);
2834 
2835   /* Put all of the gimple and the string table out the asm file as a
2836      block of text.  */
2837   lto_write_stream (ob->main_stream);
2838   lto_write_stream (ob->string_stream);
2839 
2840   lto_end_section ();
2841   destroy_output_block (ob);
2842 }
2843 
2844 
2845 /* This pass is run after all of the functions are serialized and all
2846    of the IPA passes have written their serialized forms.  This pass
2847    causes the vector of all of the global decls and types used from
2848    this file to be written in to a section that can then be read in to
2849    recover these on other side.  */
2850 
2851 void
produce_asm_for_decls(void)2852 produce_asm_for_decls (void)
2853 {
2854   struct lto_out_decl_state *out_state;
2855   struct lto_out_decl_state *fn_out_state;
2856   struct lto_decl_header header;
2857   char *section_name;
2858   struct output_block *ob;
2859   unsigned idx, num_fns;
2860   size_t decl_state_size;
2861   int32_t num_decl_states;
2862 
2863   ob = create_output_block (LTO_section_decls);
2864 
2865   memset (&header, 0, sizeof (struct lto_decl_header));
2866 
2867   section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2868   lto_begin_section (section_name, !flag_wpa);
2869   free (section_name);
2870 
2871   /* Make string 0 be a NULL string.  */
2872   streamer_write_char_stream (ob->string_stream, 0);
2873 
2874   gcc_assert (!alias_pairs);
2875 
2876   /* Get rid of the global decl state hash tables to save some memory.  */
2877   out_state = lto_get_out_decl_state ();
2878   for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2879     if (out_state->streams[i].tree_hash_table)
2880       {
2881 	delete out_state->streams[i].tree_hash_table;
2882 	out_state->streams[i].tree_hash_table = NULL;
2883       }
2884 
2885   /* Write the global symbols.  */
2886   if (streamer_dump_file)
2887     fprintf (streamer_dump_file, "Outputting global stream\n");
2888   lto_output_decl_state_streams (ob, out_state);
2889   num_fns = lto_function_decl_states.length ();
2890   for (idx = 0; idx < num_fns; idx++)
2891     {
2892       fn_out_state =
2893 	lto_function_decl_states[idx];
2894       if (streamer_dump_file)
2895         fprintf (streamer_dump_file, "Outputting stream for %s\n",
2896 		 IDENTIFIER_POINTER
2897 		    (DECL_ASSEMBLER_NAME (fn_out_state->fn_decl)));
2898       lto_output_decl_state_streams (ob, fn_out_state);
2899     }
2900 
2901   header.major_version = LTO_major_version;
2902   header.minor_version = LTO_minor_version;
2903 
2904   /* Currently not used.  This field would allow us to preallocate
2905      the globals vector, so that it need not be resized as it is extended.  */
2906   header.num_nodes = -1;
2907 
2908   /* Compute the total size of all decl out states. */
2909   decl_state_size = sizeof (int32_t);
2910   decl_state_size += lto_out_decl_state_written_size (out_state);
2911   for (idx = 0; idx < num_fns; idx++)
2912     {
2913       fn_out_state =
2914 	lto_function_decl_states[idx];
2915       decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2916     }
2917   header.decl_state_size = decl_state_size;
2918 
2919   header.main_size = ob->main_stream->total_size;
2920   header.string_size = ob->string_stream->total_size;
2921 
2922   lto_write_data (&header, sizeof header);
2923 
2924   /* Write the main out-decl state, followed by out-decl states of
2925      functions. */
2926   num_decl_states = num_fns + 1;
2927   lto_write_data (&num_decl_states, sizeof (num_decl_states));
2928   lto_output_decl_state_refs (ob, out_state);
2929   for (idx = 0; idx < num_fns; idx++)
2930     {
2931       fn_out_state = lto_function_decl_states[idx];
2932       lto_output_decl_state_refs (ob, fn_out_state);
2933     }
2934 
2935   lto_write_stream (ob->main_stream);
2936   lto_write_stream (ob->string_stream);
2937 
2938   lto_end_section ();
2939 
2940   /* Write the symbol table.  It is used by linker to determine dependencies
2941      and thus we can skip it for WPA.  */
2942   if (!flag_wpa)
2943     produce_symtab (ob);
2944 
2945   /* Write command line opts.  */
2946   lto_write_options ();
2947 
2948   /* Deallocate memory and clean up.  */
2949   for (idx = 0; idx < num_fns; idx++)
2950     {
2951       fn_out_state =
2952 	lto_function_decl_states[idx];
2953       lto_delete_out_decl_state (fn_out_state);
2954     }
2955   lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2956   lto_function_decl_states.release ();
2957   destroy_output_block (ob);
2958   if (lto_stream_offload_p)
2959     lto_write_mode_table ();
2960 }
2961