1 /* Write the GIMPLE representation to a file stream.
2 
3    Copyright (C) 2009-2016 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tree-pass.h"
32 #include "ssa.h"
33 #include "gimple-streamer.h"
34 #include "alias.h"
35 #include "stor-layout.h"
36 #include "gimple-iterator.h"
37 #include "except.h"
38 #include "lto-symtab.h"
39 #include "cgraph.h"
40 #include "cfgloop.h"
41 #include "builtins.h"
42 #include "gomp-constants.h"
43 
44 
45 static void lto_write_tree (struct output_block*, tree, bool);
46 
47 /* Clear the line info stored in DATA_IN.  */
48 
49 static void
clear_line_info(struct output_block * ob)50 clear_line_info (struct output_block *ob)
51 {
52   ob->current_file = NULL;
53   ob->current_line = 0;
54   ob->current_col = 0;
55   ob->current_sysp = false;
56 }
57 
58 
59 /* Create the output block and return it.  SECTION_TYPE is
60    LTO_section_function_body or LTO_static_initializer.  */
61 
62 struct output_block *
create_output_block(enum lto_section_type section_type)63 create_output_block (enum lto_section_type section_type)
64 {
65   struct output_block *ob = XCNEW (struct output_block);
66 
67   ob->section_type = section_type;
68   ob->decl_state = lto_get_out_decl_state ();
69   ob->main_stream = XCNEW (struct lto_output_stream);
70   ob->string_stream = XCNEW (struct lto_output_stream);
71   ob->writer_cache = streamer_tree_cache_create (!flag_wpa, true, false);
72 
73   if (section_type == LTO_section_function_body)
74     ob->cfg_stream = XCNEW (struct lto_output_stream);
75 
76   clear_line_info (ob);
77 
78   ob->string_hash_table = new hash_table<string_slot_hasher> (37);
79   gcc_obstack_init (&ob->obstack);
80 
81   return ob;
82 }
83 
84 
85 /* Destroy the output block OB.  */
86 
87 void
destroy_output_block(struct output_block * ob)88 destroy_output_block (struct output_block *ob)
89 {
90   enum lto_section_type section_type = ob->section_type;
91 
92   delete ob->string_hash_table;
93   ob->string_hash_table = NULL;
94 
95   free (ob->main_stream);
96   free (ob->string_stream);
97   if (section_type == LTO_section_function_body)
98     free (ob->cfg_stream);
99 
100   streamer_tree_cache_delete (ob->writer_cache);
101   obstack_free (&ob->obstack, NULL);
102 
103   free (ob);
104 }
105 
106 
107 /* Look up NODE in the type table and write the index for it to OB.  */
108 
109 static void
output_type_ref(struct output_block * ob,tree node)110 output_type_ref (struct output_block *ob, tree node)
111 {
112   streamer_write_record_start (ob, LTO_type_ref);
113   lto_output_type_ref_index (ob->decl_state, ob->main_stream, node);
114 }
115 
116 
117 /* Return true if tree node T is written to various tables.  For these
118    nodes, we sometimes want to write their phyiscal representation
119    (via lto_output_tree), and sometimes we need to emit an index
120    reference into a table (via lto_output_tree_ref).  */
121 
122 static bool
tree_is_indexable(tree t)123 tree_is_indexable (tree t)
124 {
125   /* Parameters and return values of functions of variably modified types
126      must go to global stream, because they may be used in the type
127      definition.  */
128   if ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
129       && DECL_CONTEXT (t))
130     return variably_modified_type_p (TREE_TYPE (DECL_CONTEXT (t)), NULL_TREE);
131   /* IMPORTED_DECL is put into BLOCK and thus it never can be shared.  */
132   else if (TREE_CODE (t) == IMPORTED_DECL)
133     return false;
134   else if (((TREE_CODE (t) == VAR_DECL && !TREE_STATIC (t))
135 	    || TREE_CODE (t) == TYPE_DECL
136 	    || TREE_CODE (t) == CONST_DECL
137 	    || TREE_CODE (t) == NAMELIST_DECL)
138 	   && decl_function_context (t))
139     return false;
140   else if (TREE_CODE (t) == DEBUG_EXPR_DECL)
141     return false;
142   /* Variably modified types need to be streamed alongside function
143      bodies because they can refer to local entities.  Together with
144      them we have to localize their members as well.
145      ???  In theory that includes non-FIELD_DECLs as well.  */
146   else if (TYPE_P (t)
147 	   && variably_modified_type_p (t, NULL_TREE))
148     return false;
149   else if (TREE_CODE (t) == FIELD_DECL
150 	   && variably_modified_type_p (DECL_CONTEXT (t), NULL_TREE))
151     return false;
152   else
153     return (TYPE_P (t) || DECL_P (t) || TREE_CODE (t) == SSA_NAME);
154 }
155 
156 
157 /* Output info about new location into bitpack BP.
158    After outputting bitpack, lto_output_location_data has
159    to be done to output actual data.  */
160 
161 void
lto_output_location(struct output_block * ob,struct bitpack_d * bp,location_t loc)162 lto_output_location (struct output_block *ob, struct bitpack_d *bp,
163 		     location_t loc)
164 {
165   expanded_location xloc;
166 
167   loc = LOCATION_LOCUS (loc);
168   bp_pack_int_in_range (bp, 0, RESERVED_LOCATION_COUNT,
169 		        loc < RESERVED_LOCATION_COUNT
170 			? loc : RESERVED_LOCATION_COUNT);
171   if (loc < RESERVED_LOCATION_COUNT)
172     return;
173 
174   xloc = expand_location (loc);
175 
176   bp_pack_value (bp, ob->current_file != xloc.file, 1);
177   bp_pack_value (bp, ob->current_line != xloc.line, 1);
178   bp_pack_value (bp, ob->current_col != xloc.column, 1);
179 
180   if (ob->current_file != xloc.file)
181     {
182       bp_pack_string (ob, bp, xloc.file, true);
183       bp_pack_value (bp, xloc.sysp, 1);
184     }
185   ob->current_file = xloc.file;
186   ob->current_sysp = xloc.sysp;
187 
188   if (ob->current_line != xloc.line)
189     bp_pack_var_len_unsigned (bp, xloc.line);
190   ob->current_line = xloc.line;
191 
192   if (ob->current_col != xloc.column)
193     bp_pack_var_len_unsigned (bp, xloc.column);
194   ob->current_col = xloc.column;
195 }
196 
197 
198 /* If EXPR is an indexable tree node, output a reference to it to
199    output block OB.  Otherwise, output the physical representation of
200    EXPR to OB.  */
201 
202 static void
lto_output_tree_ref(struct output_block * ob,tree expr)203 lto_output_tree_ref (struct output_block *ob, tree expr)
204 {
205   enum tree_code code;
206 
207   if (TYPE_P (expr))
208     {
209       output_type_ref (ob, expr);
210       return;
211     }
212 
213   code = TREE_CODE (expr);
214   switch (code)
215     {
216     case SSA_NAME:
217       streamer_write_record_start (ob, LTO_ssa_name_ref);
218       streamer_write_uhwi (ob, SSA_NAME_VERSION (expr));
219       break;
220 
221     case FIELD_DECL:
222       streamer_write_record_start (ob, LTO_field_decl_ref);
223       lto_output_field_decl_index (ob->decl_state, ob->main_stream, expr);
224       break;
225 
226     case FUNCTION_DECL:
227       streamer_write_record_start (ob, LTO_function_decl_ref);
228       lto_output_fn_decl_index (ob->decl_state, ob->main_stream, expr);
229       break;
230 
231     case VAR_DECL:
232     case DEBUG_EXPR_DECL:
233       gcc_assert (decl_function_context (expr) == NULL || TREE_STATIC (expr));
234     case PARM_DECL:
235       streamer_write_record_start (ob, LTO_global_decl_ref);
236       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
237       break;
238 
239     case CONST_DECL:
240       streamer_write_record_start (ob, LTO_const_decl_ref);
241       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
242       break;
243 
244     case IMPORTED_DECL:
245       gcc_assert (decl_function_context (expr) == NULL);
246       streamer_write_record_start (ob, LTO_imported_decl_ref);
247       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
248       break;
249 
250     case TYPE_DECL:
251       streamer_write_record_start (ob, LTO_type_decl_ref);
252       lto_output_type_decl_index (ob->decl_state, ob->main_stream, expr);
253       break;
254 
255     case NAMELIST_DECL:
256       streamer_write_record_start (ob, LTO_namelist_decl_ref);
257       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
258       break;
259 
260     case NAMESPACE_DECL:
261       streamer_write_record_start (ob, LTO_namespace_decl_ref);
262       lto_output_namespace_decl_index (ob->decl_state, ob->main_stream, expr);
263       break;
264 
265     case LABEL_DECL:
266       streamer_write_record_start (ob, LTO_label_decl_ref);
267       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
268       break;
269 
270     case RESULT_DECL:
271       streamer_write_record_start (ob, LTO_result_decl_ref);
272       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
273       break;
274 
275     case TRANSLATION_UNIT_DECL:
276       streamer_write_record_start (ob, LTO_translation_unit_decl_ref);
277       lto_output_var_decl_index (ob->decl_state, ob->main_stream, expr);
278       break;
279 
280     default:
281       /* No other node is indexable, so it should have been handled by
282 	 lto_output_tree.  */
283       gcc_unreachable ();
284     }
285 }
286 
287 
288 /* Return true if EXPR is a tree node that can be written to disk.  */
289 
290 static inline bool
lto_is_streamable(tree expr)291 lto_is_streamable (tree expr)
292 {
293   enum tree_code code = TREE_CODE (expr);
294 
295   /* Notice that we reject SSA_NAMEs as well.  We only emit the SSA
296      name version in lto_output_tree_ref (see output_ssa_names).  */
297   return !is_lang_specific (expr)
298 	 && code != SSA_NAME
299 	 && code != CALL_EXPR
300 	 && code != LANG_TYPE
301 	 && code != MODIFY_EXPR
302 	 && code != INIT_EXPR
303 	 && code != TARGET_EXPR
304 	 && code != BIND_EXPR
305 	 && code != WITH_CLEANUP_EXPR
306 	 && code != STATEMENT_LIST
307 	 && (code == CASE_LABEL_EXPR
308 	     || code == DECL_EXPR
309 	     || TREE_CODE_CLASS (code) != tcc_statement);
310 }
311 
312 /* Very rough estimate of streaming size of the initializer.  If we ignored
313    presence of strings, we could simply just count number of non-indexable
314    tree nodes and number of references to indexable nodes.  Strings however
315    may be very large and we do not want to dump them int othe global stream.
316 
317    Count the size of initializer until the size in DATA is positive.  */
318 
319 static tree
subtract_estimated_size(tree * tp,int * ws,void * data)320 subtract_estimated_size (tree *tp, int *ws, void *data)
321 {
322   long *sum = (long *)data;
323   if (tree_is_indexable (*tp))
324     {
325       /* Indexable tree is one reference to global stream.
326 	 Guess it may be about 4 bytes.  */
327       *sum -= 4;
328       *ws = 0;
329     }
330   /* String table entry + base of tree node needs to be streamed.  */
331   if (TREE_CODE (*tp) == STRING_CST)
332     *sum -= TREE_STRING_LENGTH (*tp) + 8;
333   else
334     {
335       /* Identifiers are also variable length but should not appear
336 	 naked in constructor.  */
337       gcc_checking_assert (TREE_CODE (*tp) != IDENTIFIER_NODE);
338       /* We do not really make attempt to work out size of pickled tree, as
339 	 it is very variable. Make it bigger than the reference.  */
340       *sum -= 16;
341     }
342   if (*sum < 0)
343     return *tp;
344   return NULL_TREE;
345 }
346 
347 
348 /* For EXPR lookup and return what we want to stream to OB as DECL_INITIAL.  */
349 
350 static tree
get_symbol_initial_value(lto_symtab_encoder_t encoder,tree expr)351 get_symbol_initial_value (lto_symtab_encoder_t encoder, tree expr)
352 {
353   gcc_checking_assert (DECL_P (expr)
354 		       && TREE_CODE (expr) != FUNCTION_DECL
355 		       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL);
356 
357   /* Handle DECL_INITIAL for symbols.  */
358   tree initial = DECL_INITIAL (expr);
359   if (TREE_CODE (expr) == VAR_DECL
360       && (TREE_STATIC (expr) || DECL_EXTERNAL (expr))
361       && !DECL_IN_CONSTANT_POOL (expr)
362       && initial)
363     {
364       varpool_node *vnode;
365       /* Extra section needs about 30 bytes; do not produce it for simple
366 	 scalar values.  */
367       if (!(vnode = varpool_node::get (expr))
368 	  || !lto_symtab_encoder_encode_initializer_p (encoder, vnode))
369         initial = error_mark_node;
370       if (initial != error_mark_node)
371 	{
372 	  long max_size = 30;
373 	  if (walk_tree (&initial, subtract_estimated_size, (void *)&max_size,
374 			 NULL))
375 	    initial = error_mark_node;
376 	}
377     }
378 
379   return initial;
380 }
381 
382 
383 /* Write a physical representation of tree node EXPR to output block
384    OB.  If REF_P is true, the leaves of EXPR are emitted as references
385    via lto_output_tree_ref.  IX is the index into the streamer cache
386    where EXPR is stored.  */
387 
388 static void
lto_write_tree_1(struct output_block * ob,tree expr,bool ref_p)389 lto_write_tree_1 (struct output_block *ob, tree expr, bool ref_p)
390 {
391   /* Pack all the non-pointer fields in EXPR into a bitpack and write
392      the resulting bitpack.  */
393   streamer_write_tree_bitfields (ob, expr);
394 
395   /* Write all the pointer fields in EXPR.  */
396   streamer_write_tree_body (ob, expr, ref_p);
397 
398   /* Write any LTO-specific data to OB.  */
399   if (DECL_P (expr)
400       && TREE_CODE (expr) != FUNCTION_DECL
401       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
402     {
403       /* Handle DECL_INITIAL for symbols.  */
404       tree initial = get_symbol_initial_value
405 			 (ob->decl_state->symtab_node_encoder, expr);
406       stream_write_tree (ob, initial, ref_p);
407     }
408 }
409 
410 /* Write a physical representation of tree node EXPR to output block
411    OB.  If REF_P is true, the leaves of EXPR are emitted as references
412    via lto_output_tree_ref.  IX is the index into the streamer cache
413    where EXPR is stored.  */
414 
415 static void
lto_write_tree(struct output_block * ob,tree expr,bool ref_p)416 lto_write_tree (struct output_block *ob, tree expr, bool ref_p)
417 {
418   if (!lto_is_streamable (expr))
419     internal_error ("tree code %qs is not supported in LTO streams",
420 		    get_tree_code_name (TREE_CODE (expr)));
421 
422   /* Write the header, containing everything needed to materialize
423      EXPR on the reading side.  */
424   streamer_write_tree_header (ob, expr);
425 
426   lto_write_tree_1 (ob, expr, ref_p);
427 
428   /* Mark the end of EXPR.  */
429   streamer_write_zero (ob);
430 }
431 
432 /* Emit the physical representation of tree node EXPR to output block OB,
433    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
434    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
435 
436 static void
lto_output_tree_1(struct output_block * ob,tree expr,hashval_t hash,bool ref_p,bool this_ref_p)437 lto_output_tree_1 (struct output_block *ob, tree expr, hashval_t hash,
438 		   bool ref_p, bool this_ref_p)
439 {
440   unsigned ix;
441 
442   gcc_checking_assert (expr != NULL_TREE
443 		       && !(this_ref_p && tree_is_indexable (expr)));
444 
445   bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
446 					      expr, hash, &ix);
447   gcc_assert (!exists_p);
448   if (streamer_handle_as_builtin_p (expr))
449     {
450       /* MD and NORMAL builtins do not need to be written out
451 	 completely as they are always instantiated by the
452 	 compiler on startup.  The only builtins that need to
453 	 be written out are BUILT_IN_FRONTEND.  For all other
454 	 builtins, we simply write the class and code.  */
455       streamer_write_builtin (ob, expr);
456     }
457   else if (TREE_CODE (expr) == INTEGER_CST
458 	   && !TREE_OVERFLOW (expr))
459     {
460       /* Shared INTEGER_CST nodes are special because they need their
461 	 original type to be materialized by the reader (to implement
462 	 TYPE_CACHED_VALUES).  */
463       streamer_write_integer_cst (ob, expr, ref_p);
464     }
465   else
466     {
467       /* This is the first time we see EXPR, write its fields
468 	 to OB.  */
469       lto_write_tree (ob, expr, ref_p);
470     }
471 }
472 
473 class DFS
474 {
475 public:
476   DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
477        bool single_p);
478   ~DFS ();
479 
480   struct scc_entry
481   {
482     tree t;
483     hashval_t hash;
484   };
485   vec<scc_entry> sccstack;
486 
487 private:
488   struct sccs
489   {
490     unsigned int dfsnum;
491     unsigned int low;
492   };
493   struct worklist
494   {
495     tree expr;
496     sccs *from_state;
497     sccs *cstate;
498     bool ref_p;
499     bool this_ref_p;
500   };
501 
502   static int scc_entry_compare (const void *, const void *);
503 
504   void DFS_write_tree_body (struct output_block *ob,
505 			    tree expr, sccs *expr_state, bool ref_p);
506 
507   void DFS_write_tree (struct output_block *ob, sccs *from_state,
508 		       tree expr, bool ref_p, bool this_ref_p);
509 
510   hashval_t
511   hash_scc (struct output_block *ob, unsigned first, unsigned size,
512 	    bool ref_p, bool this_ref_p);
513 
514   hash_map<tree, sccs *> sccstate;
515   vec<worklist> worklist_vec;
516   struct obstack sccstate_obstack;
517 };
518 
519 /* Emit the physical representation of tree node EXPR to output block OB,
520    using depth-first search on the subgraph.  If THIS_REF_P is true, the
521    leaves of EXPR are emitted as references via lto_output_tree_ref.
522    REF_P is used for streaming siblings of EXPR.  If SINGLE_P is true,
523    this is for a rewalk of a single leaf SCC.  */
524 
DFS(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p,bool single_p)525 DFS::DFS (struct output_block *ob, tree expr, bool ref_p, bool this_ref_p,
526 	  bool single_p)
527 {
528   unsigned int next_dfs_num = 1;
529   sccstack.create (0);
530   gcc_obstack_init (&sccstate_obstack);
531   worklist_vec = vNULL;
532   DFS_write_tree (ob, NULL, expr, ref_p, this_ref_p);
533   while (!worklist_vec.is_empty ())
534     {
535       worklist &w = worklist_vec.last ();
536       expr = w.expr;
537       sccs *from_state = w.from_state;
538       sccs *cstate = w.cstate;
539       ref_p = w.ref_p;
540       this_ref_p = w.this_ref_p;
541       if (cstate == NULL)
542 	{
543 	  sccs **slot = &sccstate.get_or_insert (expr);
544 	  cstate = *slot;
545 	  if (cstate)
546 	    {
547 	      gcc_checking_assert (from_state);
548 	      if (cstate->dfsnum < from_state->dfsnum)
549 		from_state->low = MIN (cstate->dfsnum, from_state->low);
550 	      worklist_vec.pop ();
551 	      continue;
552 	    }
553 
554 	  scc_entry e = { expr, 0 };
555 	  /* Not yet visited.  DFS recurse and push it onto the stack.  */
556 	  *slot = cstate = XOBNEW (&sccstate_obstack, struct sccs);
557 	  sccstack.safe_push (e);
558 	  cstate->dfsnum = next_dfs_num++;
559 	  cstate->low = cstate->dfsnum;
560 	  w.cstate = cstate;
561 
562 	  if (streamer_handle_as_builtin_p (expr))
563 	    ;
564 	  else if (TREE_CODE (expr) == INTEGER_CST
565 		   && !TREE_OVERFLOW (expr))
566 	    DFS_write_tree (ob, cstate, TREE_TYPE (expr), ref_p, ref_p);
567 	  else
568 	    {
569 	      DFS_write_tree_body (ob, expr, cstate, ref_p);
570 
571 	      /* Walk any LTO-specific edges.  */
572 	      if (DECL_P (expr)
573 		  && TREE_CODE (expr) != FUNCTION_DECL
574 		  && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
575 		{
576 		  /* Handle DECL_INITIAL for symbols.  */
577 		  tree initial
578 		    = get_symbol_initial_value (ob->decl_state->symtab_node_encoder,
579 						expr);
580 		  DFS_write_tree (ob, cstate, initial, ref_p, ref_p);
581 		}
582 	    }
583 	  continue;
584 	}
585 
586       /* See if we found an SCC.  */
587       if (cstate->low == cstate->dfsnum)
588 	{
589 	  unsigned first, size;
590 	  tree x;
591 
592 	  /* If we are re-walking a single leaf SCC just pop it,
593 	     let earlier worklist item access the sccstack.  */
594 	  if (single_p)
595 	    {
596 	      worklist_vec.pop ();
597 	      continue;
598 	    }
599 
600 	  /* Pop the SCC and compute its size.  */
601 	  first = sccstack.length ();
602 	  do
603 	    {
604 	      x = sccstack[--first].t;
605 	    }
606 	  while (x != expr);
607 	  size = sccstack.length () - first;
608 
609 	  /* No need to compute hashes for LTRANS units, we don't perform
610 	     any merging there.  */
611 	  hashval_t scc_hash = 0;
612 	  unsigned scc_entry_len = 0;
613 	  if (!flag_wpa)
614 	    {
615 	      scc_hash = hash_scc (ob, first, size, ref_p, this_ref_p);
616 
617 	      /* Put the entries with the least number of collisions first.  */
618 	      unsigned entry_start = 0;
619 	      scc_entry_len = size + 1;
620 	      for (unsigned i = 0; i < size;)
621 		{
622 		  unsigned from = i;
623 		  for (i = i + 1; i < size
624 		       && (sccstack[first + i].hash
625 			   == sccstack[first + from].hash); ++i)
626 		    ;
627 		  if (i - from < scc_entry_len)
628 		    {
629 		      scc_entry_len = i - from;
630 		      entry_start = from;
631 		    }
632 		}
633 	      for (unsigned i = 0; i < scc_entry_len; ++i)
634 		std::swap (sccstack[first + i],
635 			   sccstack[first + entry_start + i]);
636 
637 	      /* We already sorted SCC deterministically in hash_scc.  */
638 
639 	      /* Check that we have only one SCC.
640 		 Naturally we may have conflicts if hash function is not
641 		 strong enough.  Lets see how far this gets.  */
642 	      gcc_checking_assert (scc_entry_len == 1);
643 	    }
644 
645 	  /* Write LTO_tree_scc.  */
646 	  streamer_write_record_start (ob, LTO_tree_scc);
647 	  streamer_write_uhwi (ob, size);
648 	  streamer_write_uhwi (ob, scc_hash);
649 
650 	  /* Write size-1 SCCs without wrapping them inside SCC bundles.
651 	     All INTEGER_CSTs need to be handled this way as we need
652 	     their type to materialize them.  Also builtins are handled
653 	     this way.
654 	     ???  We still wrap these in LTO_tree_scc so at the
655 	     input side we can properly identify the tree we want
656 	     to ultimatively return.  */
657 	  if (size == 1)
658 	    lto_output_tree_1 (ob, expr, scc_hash, ref_p, this_ref_p);
659 	  else
660 	    {
661 	      /* Write the size of the SCC entry candidates.  */
662 	      streamer_write_uhwi (ob, scc_entry_len);
663 
664 	      /* Write all headers and populate the streamer cache.  */
665 	      for (unsigned i = 0; i < size; ++i)
666 		{
667 		  hashval_t hash = sccstack[first+i].hash;
668 		  tree t = sccstack[first+i].t;
669 		  bool exists_p = streamer_tree_cache_insert (ob->writer_cache,
670 							      t, hash, NULL);
671 		  gcc_assert (!exists_p);
672 
673 		  if (!lto_is_streamable (t))
674 		    internal_error ("tree code %qs is not supported "
675 				    "in LTO streams",
676 				    get_tree_code_name (TREE_CODE (t)));
677 
678 		  gcc_checking_assert (!streamer_handle_as_builtin_p (t));
679 
680 		  /* Write the header, containing everything needed to
681 		     materialize EXPR on the reading side.  */
682 		  streamer_write_tree_header (ob, t);
683 		}
684 
685 	      /* Write the bitpacks and tree references.  */
686 	      for (unsigned i = 0; i < size; ++i)
687 		{
688 		  lto_write_tree_1 (ob, sccstack[first+i].t, ref_p);
689 
690 		  /* Mark the end of the tree.  */
691 		  streamer_write_zero (ob);
692 		}
693 	    }
694 
695 	  /* Finally truncate the vector.  */
696 	  sccstack.truncate (first);
697 
698 	  if (from_state)
699 	    from_state->low = MIN (from_state->low, cstate->low);
700 	  worklist_vec.pop ();
701 	  continue;
702 	}
703 
704       gcc_checking_assert (from_state);
705       from_state->low = MIN (from_state->low, cstate->low);
706       if (cstate->dfsnum < from_state->dfsnum)
707 	from_state->low = MIN (cstate->dfsnum, from_state->low);
708       worklist_vec.pop ();
709     }
710   worklist_vec.release ();
711 }
712 
~DFS()713 DFS::~DFS ()
714 {
715   sccstack.release ();
716   obstack_free (&sccstate_obstack, NULL);
717 }
718 
719 /* Handle the tree EXPR in the DFS walk with SCC state EXPR_STATE and
720    DFS recurse for all tree edges originating from it.  */
721 
722 void
DFS_write_tree_body(struct output_block * ob,tree expr,sccs * expr_state,bool ref_p)723 DFS::DFS_write_tree_body (struct output_block *ob,
724 			  tree expr, sccs *expr_state, bool ref_p)
725 {
726 #define DFS_follow_tree_edge(DEST) \
727   DFS_write_tree (ob, expr_state, DEST, ref_p, ref_p)
728 
729   enum tree_code code;
730 
731   code = TREE_CODE (expr);
732 
733   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
734     {
735       if (TREE_CODE (expr) != IDENTIFIER_NODE)
736 	DFS_follow_tree_edge (TREE_TYPE (expr));
737     }
738 
739   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
740     {
741       for (unsigned i = 0; i < VECTOR_CST_NELTS (expr); ++i)
742 	DFS_follow_tree_edge (VECTOR_CST_ELT (expr, i));
743     }
744 
745   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
746     {
747       DFS_follow_tree_edge (TREE_REALPART (expr));
748       DFS_follow_tree_edge (TREE_IMAGPART (expr));
749     }
750 
751   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
752     {
753       /* Drop names that were created for anonymous entities.  */
754       if (DECL_NAME (expr)
755 	  && TREE_CODE (DECL_NAME (expr)) == IDENTIFIER_NODE
756 	  && anon_aggrname_p (DECL_NAME (expr)))
757 	;
758       else
759 	DFS_follow_tree_edge (DECL_NAME (expr));
760       DFS_follow_tree_edge (DECL_CONTEXT (expr));
761     }
762 
763   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
764     {
765       DFS_follow_tree_edge (DECL_SIZE (expr));
766       DFS_follow_tree_edge (DECL_SIZE_UNIT (expr));
767 
768       /* Note, DECL_INITIAL is not handled here.  Since DECL_INITIAL needs
769 	 special handling in LTO, it must be handled by streamer hooks.  */
770 
771       DFS_follow_tree_edge (DECL_ATTRIBUTES (expr));
772 
773       /* Do not follow DECL_ABSTRACT_ORIGIN.  We cannot handle debug information
774 	 for early inlining so drop it on the floor instead of ICEing in
775 	 dwarf2out.c.
776 	 We however use DECL_ABSTRACT_ORIGIN == error_mark_node to mark
777 	 declarations which should be eliminated by decl merging. Be sure none
778 	 leaks to this point.  */
779       gcc_assert (DECL_ABSTRACT_ORIGIN (expr) != error_mark_node);
780 
781       if ((TREE_CODE (expr) == VAR_DECL
782 	   || TREE_CODE (expr) == PARM_DECL)
783 	  && DECL_HAS_VALUE_EXPR_P (expr))
784 	DFS_follow_tree_edge (DECL_VALUE_EXPR (expr));
785       if (TREE_CODE (expr) == VAR_DECL)
786 	DFS_follow_tree_edge (DECL_DEBUG_EXPR (expr));
787     }
788 
789   if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
790     {
791       if (TREE_CODE (expr) == TYPE_DECL)
792 	DFS_follow_tree_edge (DECL_ORIGINAL_TYPE (expr));
793     }
794 
795   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
796     {
797       /* Make sure we don't inadvertently set the assembler name.  */
798       if (DECL_ASSEMBLER_NAME_SET_P (expr))
799 	DFS_follow_tree_edge (DECL_ASSEMBLER_NAME (expr));
800     }
801 
802   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
803     {
804       DFS_follow_tree_edge (DECL_FIELD_OFFSET (expr));
805       DFS_follow_tree_edge (DECL_BIT_FIELD_TYPE (expr));
806       DFS_follow_tree_edge (DECL_BIT_FIELD_REPRESENTATIVE (expr));
807       DFS_follow_tree_edge (DECL_FIELD_BIT_OFFSET (expr));
808       DFS_follow_tree_edge (DECL_FCONTEXT (expr));
809     }
810 
811   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
812     {
813       DFS_follow_tree_edge (DECL_VINDEX (expr));
814       DFS_follow_tree_edge (DECL_FUNCTION_PERSONALITY (expr));
815       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_TARGET (expr));
816       DFS_follow_tree_edge (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (expr));
817     }
818 
819   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
820     {
821       DFS_follow_tree_edge (TYPE_SIZE (expr));
822       DFS_follow_tree_edge (TYPE_SIZE_UNIT (expr));
823       DFS_follow_tree_edge (TYPE_ATTRIBUTES (expr));
824       DFS_follow_tree_edge (TYPE_NAME (expr));
825       /* Do not follow TYPE_POINTER_TO or TYPE_REFERENCE_TO.  They will be
826 	 reconstructed during fixup.  */
827       /* Do not follow TYPE_NEXT_VARIANT, we reconstruct the variant lists
828 	 during fixup.  */
829       DFS_follow_tree_edge (TYPE_MAIN_VARIANT (expr));
830       DFS_follow_tree_edge (TYPE_CONTEXT (expr));
831       /* TYPE_CANONICAL is re-computed during type merging, so no need
832 	 to follow it here.  */
833       DFS_follow_tree_edge (TYPE_STUB_DECL (expr));
834     }
835 
836   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
837     {
838       if (TREE_CODE (expr) == ENUMERAL_TYPE)
839 	DFS_follow_tree_edge (TYPE_VALUES (expr));
840       else if (TREE_CODE (expr) == ARRAY_TYPE)
841 	DFS_follow_tree_edge (TYPE_DOMAIN (expr));
842       else if (RECORD_OR_UNION_TYPE_P (expr))
843 	for (tree t = TYPE_FIELDS (expr); t; t = TREE_CHAIN (t))
844 	  DFS_follow_tree_edge (t);
845       else if (TREE_CODE (expr) == FUNCTION_TYPE
846 	       || TREE_CODE (expr) == METHOD_TYPE)
847 	DFS_follow_tree_edge (TYPE_ARG_TYPES (expr));
848 
849       if (!POINTER_TYPE_P (expr))
850 	DFS_follow_tree_edge (TYPE_MINVAL (expr));
851       DFS_follow_tree_edge (TYPE_MAXVAL (expr));
852       if (RECORD_OR_UNION_TYPE_P (expr))
853 	DFS_follow_tree_edge (TYPE_BINFO (expr));
854     }
855 
856   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
857     {
858       DFS_follow_tree_edge (TREE_PURPOSE (expr));
859       DFS_follow_tree_edge (TREE_VALUE (expr));
860       DFS_follow_tree_edge (TREE_CHAIN (expr));
861     }
862 
863   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
864     {
865       for (int i = 0; i < TREE_VEC_LENGTH (expr); i++)
866 	DFS_follow_tree_edge (TREE_VEC_ELT (expr, i));
867     }
868 
869   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
870     {
871       for (int i = 0; i < TREE_OPERAND_LENGTH (expr); i++)
872 	DFS_follow_tree_edge (TREE_OPERAND (expr, i));
873       DFS_follow_tree_edge (TREE_BLOCK (expr));
874     }
875 
876   if (CODE_CONTAINS_STRUCT (code, TS_BLOCK))
877     {
878       for (tree t = BLOCK_VARS (expr); t; t = TREE_CHAIN (t))
879 	if (VAR_OR_FUNCTION_DECL_P (t)
880 	    && DECL_EXTERNAL (t))
881 	  /* We have to stream externals in the block chain as
882 	     non-references.  See also
883 	     tree-streamer-out.c:streamer_write_chain.  */
884 	  DFS_write_tree (ob, expr_state, t, ref_p, false);
885 	else
886 	  DFS_follow_tree_edge (t);
887 
888       DFS_follow_tree_edge (BLOCK_SUPERCONTEXT (expr));
889 
890       /* Follow BLOCK_ABSTRACT_ORIGIN for the limited cases we can
891 	 handle - those that represent inlined function scopes.
892 	 For the drop rest them on the floor instead of ICEing
893 	 in dwarf2out.c.  */
894       if (inlined_function_outer_scope_p (expr))
895 	{
896 	  tree ultimate_origin = block_ultimate_origin (expr);
897 	  DFS_follow_tree_edge (ultimate_origin);
898 	}
899       /* Do not follow BLOCK_NONLOCALIZED_VARS.  We cannot handle debug
900 	 information for early inlined BLOCKs so drop it on the floor instead
901 	 of ICEing in dwarf2out.c.  */
902 
903       /* BLOCK_FRAGMENT_ORIGIN and BLOCK_FRAGMENT_CHAIN is not live at LTO
904 	 streaming time.  */
905 
906       /* Do not output BLOCK_SUBBLOCKS.  Instead on streaming-in this
907 	 list is re-constructed from BLOCK_SUPERCONTEXT.  */
908     }
909 
910   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
911     {
912       unsigned i;
913       tree t;
914 
915       /* Note that the number of BINFO slots has already been emitted in
916 	 EXPR's header (see streamer_write_tree_header) because this length
917 	 is needed to build the empty BINFO node on the reader side.  */
918       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
919 	DFS_follow_tree_edge (t);
920       DFS_follow_tree_edge (BINFO_OFFSET (expr));
921       DFS_follow_tree_edge (BINFO_VTABLE (expr));
922       DFS_follow_tree_edge (BINFO_VPTR_FIELD (expr));
923 
924       /* The number of BINFO_BASE_ACCESSES has already been emitted in
925 	 EXPR's bitfield section.  */
926       FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
927 	DFS_follow_tree_edge (t);
928 
929       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
930 	 and BINFO_VPTR_INDEX; these are used by C++ FE only.  */
931     }
932 
933   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
934     {
935       unsigned i;
936       tree index, value;
937 
938       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (expr), i, index, value)
939 	{
940 	  DFS_follow_tree_edge (index);
941 	  DFS_follow_tree_edge (value);
942 	}
943     }
944 
945   if (code == OMP_CLAUSE)
946     {
947       int i;
948       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (expr)]; i++)
949 	DFS_follow_tree_edge (OMP_CLAUSE_OPERAND (expr, i));
950       DFS_follow_tree_edge (OMP_CLAUSE_CHAIN (expr));
951     }
952 
953 #undef DFS_follow_tree_edge
954 }
955 
956 /* Return a hash value for the tree T.
957    CACHE holds hash values of trees outside current SCC.  MAP, if non-NULL,
958    may hold hash values if trees inside current SCC.  */
959 
960 static hashval_t
hash_tree(struct streamer_tree_cache_d * cache,hash_map<tree,hashval_t> * map,tree t)961 hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map, tree t)
962 {
963   inchash::hash hstate;
964 
965 #define visit(SIBLING) \
966   do { \
967     unsigned ix; \
968     if (!SIBLING) \
969       hstate.add_int (0); \
970     else if (streamer_tree_cache_lookup (cache, SIBLING, &ix)) \
971       hstate.add_int (streamer_tree_cache_get_hash (cache, ix)); \
972     else if (map) \
973       hstate.add_int (*map->get (SIBLING)); \
974     else \
975       hstate.add_int (1); \
976   } while (0)
977 
978   /* Hash TS_BASE.  */
979   enum tree_code code = TREE_CODE (t);
980   hstate.add_int (code);
981   if (!TYPE_P (t))
982     {
983       hstate.add_flag (TREE_SIDE_EFFECTS (t));
984       hstate.add_flag (TREE_CONSTANT (t));
985       hstate.add_flag (TREE_READONLY (t));
986       hstate.add_flag (TREE_PUBLIC (t));
987     }
988   hstate.add_flag (TREE_ADDRESSABLE (t));
989   hstate.add_flag (TREE_THIS_VOLATILE (t));
990   if (DECL_P (t))
991     hstate.add_flag (DECL_UNSIGNED (t));
992   else if (TYPE_P (t))
993     hstate.add_flag (TYPE_UNSIGNED (t));
994   if (TYPE_P (t))
995     hstate.add_flag (TYPE_ARTIFICIAL (t));
996   else
997     hstate.add_flag (TREE_NO_WARNING (t));
998   hstate.add_flag (TREE_NOTHROW (t));
999   hstate.add_flag (TREE_STATIC (t));
1000   hstate.add_flag (TREE_PROTECTED (t));
1001   hstate.add_flag (TREE_DEPRECATED (t));
1002   if (code != TREE_BINFO)
1003     hstate.add_flag (TREE_PRIVATE (t));
1004   if (TYPE_P (t))
1005     {
1006       hstate.add_flag (AGGREGATE_TYPE_P (t)
1007 		       ? TYPE_REVERSE_STORAGE_ORDER (t) : TYPE_SATURATING (t));
1008       hstate.add_flag (TYPE_ADDR_SPACE (t));
1009     }
1010   else if (code == SSA_NAME)
1011     hstate.add_flag (SSA_NAME_IS_DEFAULT_DEF (t));
1012   hstate.commit_flag ();
1013 
1014   if (CODE_CONTAINS_STRUCT (code, TS_INT_CST))
1015     {
1016       int i;
1017       hstate.add_wide_int (TREE_INT_CST_NUNITS (t));
1018       hstate.add_wide_int (TREE_INT_CST_EXT_NUNITS (t));
1019       for (i = 0; i < TREE_INT_CST_NUNITS (t); i++)
1020 	hstate.add_wide_int (TREE_INT_CST_ELT (t, i));
1021     }
1022 
1023   if (CODE_CONTAINS_STRUCT (code, TS_REAL_CST))
1024     {
1025       REAL_VALUE_TYPE r = TREE_REAL_CST (t);
1026       hstate.add_flag (r.cl);
1027       hstate.add_flag (r.sign);
1028       hstate.add_flag (r.signalling);
1029       hstate.add_flag (r.canonical);
1030       hstate.commit_flag ();
1031       hstate.add_int (r.uexp);
1032       hstate.add (r.sig, sizeof (r.sig));
1033     }
1034 
1035   if (CODE_CONTAINS_STRUCT (code, TS_FIXED_CST))
1036     {
1037       FIXED_VALUE_TYPE f = TREE_FIXED_CST (t);
1038       hstate.add_int (f.mode);
1039       hstate.add_int (f.data.low);
1040       hstate.add_int (f.data.high);
1041     }
1042 
1043   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1044     {
1045       hstate.add_wide_int (DECL_MODE (t));
1046       hstate.add_flag (DECL_NONLOCAL (t));
1047       hstate.add_flag (DECL_VIRTUAL_P (t));
1048       hstate.add_flag (DECL_IGNORED_P (t));
1049       hstate.add_flag (DECL_ABSTRACT_P (t));
1050       hstate.add_flag (DECL_ARTIFICIAL (t));
1051       hstate.add_flag (DECL_USER_ALIGN (t));
1052       hstate.add_flag (DECL_PRESERVE_P (t));
1053       hstate.add_flag (DECL_EXTERNAL (t));
1054       hstate.add_flag (DECL_GIMPLE_REG_P (t));
1055       hstate.commit_flag ();
1056       hstate.add_int (DECL_ALIGN (t));
1057       if (code == LABEL_DECL)
1058 	{
1059           hstate.add_int (EH_LANDING_PAD_NR (t));
1060 	  hstate.add_int (LABEL_DECL_UID (t));
1061 	}
1062       else if (code == FIELD_DECL)
1063 	{
1064 	  hstate.add_flag (DECL_PACKED (t));
1065 	  hstate.add_flag (DECL_NONADDRESSABLE_P (t));
1066 	  hstate.add_int (DECL_OFFSET_ALIGN (t));
1067 	}
1068       else if (code == VAR_DECL)
1069 	{
1070 	  hstate.add_flag (DECL_HAS_DEBUG_EXPR_P (t));
1071 	  hstate.add_flag (DECL_NONLOCAL_FRAME (t));
1072 	}
1073       if (code == RESULT_DECL
1074 	  || code == PARM_DECL
1075 	  || code == VAR_DECL)
1076 	{
1077 	  hstate.add_flag (DECL_BY_REFERENCE (t));
1078 	  if (code == VAR_DECL
1079 	      || code == PARM_DECL)
1080 	    hstate.add_flag (DECL_HAS_VALUE_EXPR_P (t));
1081 	}
1082       hstate.commit_flag ();
1083     }
1084 
1085   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WRTL))
1086     hstate.add_int (DECL_REGISTER (t));
1087 
1088   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1089     {
1090       hstate.add_flag (DECL_COMMON (t));
1091       hstate.add_flag (DECL_DLLIMPORT_P (t));
1092       hstate.add_flag (DECL_WEAK (t));
1093       hstate.add_flag (DECL_SEEN_IN_BIND_EXPR_P (t));
1094       hstate.add_flag (DECL_COMDAT (t));
1095       hstate.add_flag (DECL_VISIBILITY_SPECIFIED (t));
1096       hstate.add_int (DECL_VISIBILITY (t));
1097       if (code == VAR_DECL)
1098 	{
1099 	  /* DECL_IN_TEXT_SECTION is set during final asm output only.  */
1100 	  hstate.add_flag (DECL_HARD_REGISTER (t));
1101 	  hstate.add_flag (DECL_IN_CONSTANT_POOL (t));
1102 	}
1103       if (TREE_CODE (t) == FUNCTION_DECL)
1104         {
1105 	  hstate.add_flag (DECL_FINAL_P (t));
1106 	  hstate.add_flag (DECL_CXX_CONSTRUCTOR_P (t));
1107 	  hstate.add_flag (DECL_CXX_DESTRUCTOR_P (t));
1108 	}
1109       hstate.commit_flag ();
1110     }
1111 
1112   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1113     {
1114       hstate.add_int (DECL_BUILT_IN_CLASS (t));
1115       hstate.add_flag (DECL_STATIC_CONSTRUCTOR (t));
1116       hstate.add_flag (DECL_STATIC_DESTRUCTOR (t));
1117       hstate.add_flag (DECL_UNINLINABLE (t));
1118       hstate.add_flag (DECL_POSSIBLY_INLINED (t));
1119       hstate.add_flag (DECL_IS_NOVOPS (t));
1120       hstate.add_flag (DECL_IS_RETURNS_TWICE (t));
1121       hstate.add_flag (DECL_IS_MALLOC (t));
1122       hstate.add_flag (DECL_IS_OPERATOR_NEW (t));
1123       hstate.add_flag (DECL_DECLARED_INLINE_P (t));
1124       hstate.add_flag (DECL_STATIC_CHAIN (t));
1125       hstate.add_flag (DECL_NO_INLINE_WARNING_P (t));
1126       hstate.add_flag (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (t));
1127       hstate.add_flag (DECL_NO_LIMIT_STACK (t));
1128       hstate.add_flag (DECL_DISREGARD_INLINE_LIMITS (t));
1129       hstate.add_flag (DECL_PURE_P (t));
1130       hstate.add_flag (DECL_LOOPING_CONST_OR_PURE_P (t));
1131       hstate.commit_flag ();
1132       if (DECL_BUILT_IN_CLASS (t) != NOT_BUILT_IN)
1133 	hstate.add_int (DECL_FUNCTION_CODE (t));
1134     }
1135 
1136   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1137     {
1138       hstate.add_wide_int (TYPE_MODE (t));
1139       hstate.add_flag (TYPE_STRING_FLAG (t));
1140       /* TYPE_NO_FORCE_BLK is private to stor-layout and need
1141  	 no streaming.  */
1142       hstate.add_flag (TYPE_NEEDS_CONSTRUCTING (t));
1143       hstate.add_flag (TYPE_PACKED (t));
1144       hstate.add_flag (TYPE_RESTRICT (t));
1145       hstate.add_flag (TYPE_USER_ALIGN (t));
1146       hstate.add_flag (TYPE_READONLY (t));
1147       if (RECORD_OR_UNION_TYPE_P (t))
1148 	{
1149 	  hstate.add_flag (TYPE_TRANSPARENT_AGGR (t));
1150 	  hstate.add_flag (TYPE_FINAL_P (t));
1151 	}
1152       else if (code == ARRAY_TYPE)
1153 	hstate.add_flag (TYPE_NONALIASED_COMPONENT (t));
1154       hstate.commit_flag ();
1155       hstate.add_int (TYPE_PRECISION (t));
1156       hstate.add_int (TYPE_ALIGN (t));
1157     }
1158 
1159   if (CODE_CONTAINS_STRUCT (code, TS_TRANSLATION_UNIT_DECL))
1160     hstate.add (TRANSLATION_UNIT_LANGUAGE (t),
1161 			strlen (TRANSLATION_UNIT_LANGUAGE (t)));
1162 
1163   if (CODE_CONTAINS_STRUCT (code, TS_TARGET_OPTION)
1164       /* We don't stream these when passing things to a different target.  */
1165       && !lto_stream_offload_p)
1166     hstate.add_wide_int (cl_target_option_hash (TREE_TARGET_OPTION (t)));
1167 
1168   if (CODE_CONTAINS_STRUCT (code, TS_OPTIMIZATION))
1169     hstate.add_wide_int (cl_optimization_hash (TREE_OPTIMIZATION (t)));
1170 
1171   if (CODE_CONTAINS_STRUCT (code, TS_IDENTIFIER))
1172     hstate.merge_hash (IDENTIFIER_HASH_VALUE (t));
1173 
1174   if (CODE_CONTAINS_STRUCT (code, TS_STRING))
1175     hstate.add (TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t));
1176 
1177   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
1178     {
1179       if (code != IDENTIFIER_NODE)
1180 	visit (TREE_TYPE (t));
1181     }
1182 
1183   if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
1184     for (unsigned i = 0; i < VECTOR_CST_NELTS (t); ++i)
1185       visit (VECTOR_CST_ELT (t, i));
1186 
1187   if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
1188     {
1189       visit (TREE_REALPART (t));
1190       visit (TREE_IMAGPART (t));
1191     }
1192 
1193   if (CODE_CONTAINS_STRUCT (code, TS_DECL_MINIMAL))
1194     {
1195       /* Drop names that were created for anonymous entities.  */
1196       if (DECL_NAME (t)
1197 	  && TREE_CODE (DECL_NAME (t)) == IDENTIFIER_NODE
1198 	  && anon_aggrname_p (DECL_NAME (t)))
1199 	;
1200       else
1201 	visit (DECL_NAME (t));
1202       if (DECL_FILE_SCOPE_P (t))
1203 	;
1204       else
1205 	visit (DECL_CONTEXT (t));
1206     }
1207 
1208   if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON))
1209     {
1210       visit (DECL_SIZE (t));
1211       visit (DECL_SIZE_UNIT (t));
1212       visit (DECL_ATTRIBUTES (t));
1213       if ((code == VAR_DECL
1214 	   || code == PARM_DECL)
1215 	  && DECL_HAS_VALUE_EXPR_P (t))
1216 	visit (DECL_VALUE_EXPR (t));
1217       if (code == VAR_DECL
1218 	  && DECL_HAS_DEBUG_EXPR_P (t))
1219 	visit (DECL_DEBUG_EXPR (t));
1220       /* ???  Hash DECL_INITIAL as streamed.  Needs the output-block to
1221          be able to call get_symbol_initial_value.  */
1222     }
1223 
1224   if (CODE_CONTAINS_STRUCT (code, TS_DECL_NON_COMMON))
1225     {
1226       if (code == TYPE_DECL)
1227 	visit (DECL_ORIGINAL_TYPE (t));
1228     }
1229 
1230   if (CODE_CONTAINS_STRUCT (code, TS_DECL_WITH_VIS))
1231     {
1232       if (DECL_ASSEMBLER_NAME_SET_P (t))
1233 	visit (DECL_ASSEMBLER_NAME (t));
1234     }
1235 
1236   if (CODE_CONTAINS_STRUCT (code, TS_FIELD_DECL))
1237     {
1238       visit (DECL_FIELD_OFFSET (t));
1239       visit (DECL_BIT_FIELD_TYPE (t));
1240       visit (DECL_BIT_FIELD_REPRESENTATIVE (t));
1241       visit (DECL_FIELD_BIT_OFFSET (t));
1242       visit (DECL_FCONTEXT (t));
1243     }
1244 
1245   if (CODE_CONTAINS_STRUCT (code, TS_FUNCTION_DECL))
1246     {
1247       visit (DECL_VINDEX (t));
1248       visit (DECL_FUNCTION_PERSONALITY (t));
1249       visit (DECL_FUNCTION_SPECIFIC_TARGET (t));
1250       visit (DECL_FUNCTION_SPECIFIC_OPTIMIZATION (t));
1251     }
1252 
1253   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_COMMON))
1254     {
1255       visit (TYPE_SIZE (t));
1256       visit (TYPE_SIZE_UNIT (t));
1257       visit (TYPE_ATTRIBUTES (t));
1258       visit (TYPE_NAME (t));
1259       visit (TYPE_MAIN_VARIANT (t));
1260       if (TYPE_FILE_SCOPE_P (t))
1261 	;
1262       else
1263 	visit (TYPE_CONTEXT (t));
1264       visit (TYPE_STUB_DECL (t));
1265     }
1266 
1267   if (CODE_CONTAINS_STRUCT (code, TS_TYPE_NON_COMMON))
1268     {
1269       if (code == ENUMERAL_TYPE)
1270 	visit (TYPE_VALUES (t));
1271       else if (code == ARRAY_TYPE)
1272 	visit (TYPE_DOMAIN (t));
1273       else if (RECORD_OR_UNION_TYPE_P (t))
1274 	for (tree f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
1275 	  visit (f);
1276       else if (code == FUNCTION_TYPE
1277 	       || code == METHOD_TYPE)
1278 	visit (TYPE_ARG_TYPES (t));
1279       if (!POINTER_TYPE_P (t))
1280 	visit (TYPE_MINVAL (t));
1281       visit (TYPE_MAXVAL (t));
1282       if (RECORD_OR_UNION_TYPE_P (t))
1283 	visit (TYPE_BINFO (t));
1284     }
1285 
1286   if (CODE_CONTAINS_STRUCT (code, TS_LIST))
1287     {
1288       visit (TREE_PURPOSE (t));
1289       visit (TREE_VALUE (t));
1290       visit (TREE_CHAIN (t));
1291     }
1292 
1293   if (CODE_CONTAINS_STRUCT (code, TS_VEC))
1294     for (int i = 0; i < TREE_VEC_LENGTH (t); ++i)
1295       visit (TREE_VEC_ELT (t, i));
1296 
1297   if (CODE_CONTAINS_STRUCT (code, TS_EXP))
1298     {
1299       hstate.add_wide_int (TREE_OPERAND_LENGTH (t));
1300       for (int i = 0; i < TREE_OPERAND_LENGTH (t); ++i)
1301 	visit (TREE_OPERAND (t, i));
1302     }
1303 
1304   if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
1305     {
1306       unsigned i;
1307       tree b;
1308       FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (t), i, b)
1309 	visit (b);
1310       visit (BINFO_OFFSET (t));
1311       visit (BINFO_VTABLE (t));
1312       visit (BINFO_VPTR_FIELD (t));
1313       FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (t), i, b)
1314 	visit (b);
1315       /* Do not walk BINFO_INHERITANCE_CHAIN, BINFO_SUBVTT_INDEX
1316 	 and BINFO_VPTR_INDEX; these are used by C++ FE only.  */
1317     }
1318 
1319   if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
1320     {
1321       unsigned i;
1322       tree index, value;
1323       hstate.add_wide_int (CONSTRUCTOR_NELTS (t));
1324       FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), i, index, value)
1325 	{
1326 	  visit (index);
1327 	  visit (value);
1328 	}
1329     }
1330 
1331   if (code == OMP_CLAUSE)
1332     {
1333       int i;
1334       HOST_WIDE_INT val;
1335 
1336       hstate.add_wide_int (OMP_CLAUSE_CODE (t));
1337       switch (OMP_CLAUSE_CODE (t))
1338 	{
1339 	case OMP_CLAUSE_DEFAULT:
1340 	  val = OMP_CLAUSE_DEFAULT_KIND (t);
1341 	  break;
1342 	case OMP_CLAUSE_SCHEDULE:
1343 	  val = OMP_CLAUSE_SCHEDULE_KIND (t);
1344 	  break;
1345 	case OMP_CLAUSE_DEPEND:
1346 	  val = OMP_CLAUSE_DEPEND_KIND (t);
1347 	  break;
1348 	case OMP_CLAUSE_MAP:
1349 	  val = OMP_CLAUSE_MAP_KIND (t);
1350 	  break;
1351 	case OMP_CLAUSE_PROC_BIND:
1352 	  val = OMP_CLAUSE_PROC_BIND_KIND (t);
1353 	  break;
1354 	case OMP_CLAUSE_REDUCTION:
1355 	  val = OMP_CLAUSE_REDUCTION_CODE (t);
1356 	  break;
1357 	default:
1358 	  val = 0;
1359 	  break;
1360 	}
1361       hstate.add_wide_int (val);
1362       for (i = 0; i < omp_clause_num_ops[OMP_CLAUSE_CODE (t)]; i++)
1363 	visit (OMP_CLAUSE_OPERAND (t, i));
1364       visit (OMP_CLAUSE_CHAIN (t));
1365     }
1366 
1367   return hstate.end ();
1368 
1369 #undef visit
1370 }
1371 
1372 /* Compare two SCC entries by their hash value for qsorting them.  */
1373 
1374 int
scc_entry_compare(const void * p1_,const void * p2_)1375 DFS::scc_entry_compare (const void *p1_, const void *p2_)
1376 {
1377   const scc_entry *p1 = (const scc_entry *) p1_;
1378   const scc_entry *p2 = (const scc_entry *) p2_;
1379   if (p1->hash < p2->hash)
1380     return -1;
1381   else if (p1->hash > p2->hash)
1382     return 1;
1383   return 0;
1384 }
1385 
1386 /* Return a hash value for the SCC on the SCC stack from FIRST with SIZE.
1387    THIS_REF_P and REF_P are as passed to lto_output_tree for FIRST.  */
1388 
1389 hashval_t
hash_scc(struct output_block * ob,unsigned first,unsigned size,bool ref_p,bool this_ref_p)1390 DFS::hash_scc (struct output_block *ob, unsigned first, unsigned size,
1391 	       bool ref_p, bool this_ref_p)
1392 {
1393   unsigned int last_classes = 0, iterations = 0;
1394 
1395   /* Compute hash values for the SCC members.  */
1396   for (unsigned i = 0; i < size; ++i)
1397     sccstack[first+i].hash
1398       = hash_tree (ob->writer_cache, NULL, sccstack[first+i].t);
1399 
1400   if (size == 1)
1401     return sccstack[first].hash;
1402 
1403   /* We aim to get unique hash for every tree within SCC and compute hash value
1404      of the whole SCC by combining all values together in a stable (entry-point
1405      independent) order.  This guarantees that the same SCC regions within
1406      different translation units will get the same hash values and therefore
1407      will be merged at WPA time.
1408 
1409      Often the hashes are already unique.  In that case we compute the SCC hash
1410      by combining individual hash values in an increasing order.
1411 
1412      If there are duplicates, we seek at least one tree with unique hash (and
1413      pick one with minimal hash and this property).  Then we obtain a stable
1414      order by DFS walk starting from this unique tree and then use the index
1415      within this order to make individual hash values unique.
1416 
1417      If there is no tree with unique hash, we iteratively propagate the hash
1418      values across the internal edges of SCC.  This usually quickly leads
1419      to unique hashes.  Consider, for example, an SCC containing two pointers
1420      that are identical except for the types they point to and assume that
1421      these types are also part of the SCC.  The propagation will add the
1422      points-to type information into their hash values.  */
1423   do
1424     {
1425       /* Sort the SCC so we can easily check for uniqueness.  */
1426       qsort (&sccstack[first], size, sizeof (scc_entry), scc_entry_compare);
1427 
1428       unsigned int classes = 1;
1429       int firstunique = -1;
1430 
1431       /* Find the tree with lowest unique hash (if it exists) and compute
1432 	 the number of equivalence classes.  */
1433       if (sccstack[first].hash != sccstack[first+1].hash)
1434 	firstunique = 0;
1435       for (unsigned i = 1; i < size; ++i)
1436 	if (sccstack[first+i-1].hash != sccstack[first+i].hash)
1437 	  {
1438 	    classes++;
1439 	    if (firstunique == -1
1440 		&& (i == size - 1
1441 		    || sccstack[first+i+1].hash != sccstack[first+i].hash))
1442 	      firstunique = i;
1443 	  }
1444 
1445       /* If we found a tree with unique hash, stop the iteration.  */
1446       if (firstunique != -1
1447 	  /* Also terminate if we run out of iterations or if the number of
1448 	     equivalence classes is no longer increasing.
1449 	     For example a cyclic list of trees that are all equivalent will
1450 	     never have unique entry point; we however do not build such SCCs
1451 	     in our IL.  */
1452 	  || classes <= last_classes || iterations > 16)
1453 	{
1454           hashval_t scc_hash;
1455 
1456 	  /* If some hashes are not unique (CLASSES != SIZE), use the DFS walk
1457 	     starting from FIRSTUNIQUE to obtain a stable order.  */
1458 	  if (classes != size && firstunique != -1)
1459 	    {
1460 	      hash_map <tree, hashval_t> map(size*2);
1461 
1462 	      /* Store hash values into a map, so we can associate them with
1463 		 the reordered SCC.  */
1464 	      for (unsigned i = 0; i < size; ++i)
1465 		map.put (sccstack[first+i].t, sccstack[first+i].hash);
1466 
1467 	      DFS again (ob, sccstack[first+firstunique].t, ref_p, this_ref_p,
1468 			 true);
1469 	      gcc_assert (again.sccstack.length () == size);
1470 
1471 	      memcpy (sccstack.address () + first,
1472 		      again.sccstack.address (),
1473 		      sizeof (scc_entry) * size);
1474 
1475 	      /* Update hash values of individual members by hashing in the
1476 		 index within the stable order.  This ensures uniqueness.
1477 		 Also compute the SCC hash by mixing in all hash values in
1478 		 the stable order we obtained.  */
1479 	      sccstack[first].hash = *map.get (sccstack[first].t);
1480 	      scc_hash = sccstack[first].hash;
1481 	      for (unsigned i = 1; i < size; ++i)
1482 		{
1483 		  sccstack[first+i].hash
1484 		    = iterative_hash_hashval_t (i,
1485 						*map.get (sccstack[first+i].t));
1486 		  scc_hash
1487 		    = iterative_hash_hashval_t (scc_hash,
1488 						sccstack[first+i].hash);
1489 		}
1490 	    }
1491 	  /* If we got a unique hash value for each tree, then sort already
1492 	     ensured entry-point independent order.  Only compute the final
1493 	     SCC hash.
1494 
1495 	     If we failed to find the unique entry point, we go by the same
1496 	     route.  We will eventually introduce unwanted hash conflicts.  */
1497 	  else
1498 	    {
1499 	      scc_hash = sccstack[first].hash;
1500 	      for (unsigned i = 1; i < size; ++i)
1501 		scc_hash
1502 		  = iterative_hash_hashval_t (scc_hash, sccstack[first+i].hash);
1503 
1504 	      /* We cannot 100% guarantee that the hash won't conflict so as
1505 		 to make it impossible to find a unique hash.  This however
1506 		 should be an extremely rare case.  ICE for now so possible
1507 		 issues are found and evaluated.  */
1508 	      gcc_checking_assert (classes == size);
1509 	    }
1510 
1511 	  /* To avoid conflicts across SCCs, iteratively hash the whole SCC
1512 	     hash into the hash of each element.  */
1513 	  for (unsigned i = 0; i < size; ++i)
1514 	    sccstack[first+i].hash
1515 	      = iterative_hash_hashval_t (sccstack[first+i].hash, scc_hash);
1516 	  return scc_hash;
1517 	}
1518 
1519       last_classes = classes;
1520       iterations++;
1521 
1522       /* We failed to identify the entry point; propagate hash values across
1523 	 the edges.  */
1524       hash_map <tree, hashval_t> map(size*2);
1525 
1526       for (unsigned i = 0; i < size; ++i)
1527 	map.put (sccstack[first+i].t, sccstack[first+i].hash);
1528 
1529       for (unsigned i = 0; i < size; i++)
1530 	sccstack[first+i].hash
1531 	  = hash_tree (ob->writer_cache, &map, sccstack[first+i].t);
1532     }
1533   while (true);
1534 }
1535 
1536 /* DFS walk EXPR and stream SCCs of tree bodies if they are not
1537    already in the streamer cache.  Main routine called for
1538    each visit of EXPR.  */
1539 
1540 void
DFS_write_tree(struct output_block * ob,sccs * from_state,tree expr,bool ref_p,bool this_ref_p)1541 DFS::DFS_write_tree (struct output_block *ob, sccs *from_state,
1542 		     tree expr, bool ref_p, bool this_ref_p)
1543 {
1544   /* Handle special cases.  */
1545   if (expr == NULL_TREE)
1546     return;
1547 
1548   /* Do not DFS walk into indexable trees.  */
1549   if (this_ref_p && tree_is_indexable (expr))
1550     return;
1551 
1552   /* Check if we already streamed EXPR.  */
1553   if (streamer_tree_cache_lookup (ob->writer_cache, expr, NULL))
1554     return;
1555 
1556   worklist w;
1557   w.expr = expr;
1558   w.from_state = from_state;
1559   w.cstate = NULL;
1560   w.ref_p = ref_p;
1561   w.this_ref_p = this_ref_p;
1562   worklist_vec.safe_push (w);
1563 }
1564 
1565 
1566 /* Emit the physical representation of tree node EXPR to output block OB.
1567    If THIS_REF_P is true, the leaves of EXPR are emitted as references via
1568    lto_output_tree_ref.  REF_P is used for streaming siblings of EXPR.  */
1569 
1570 void
lto_output_tree(struct output_block * ob,tree expr,bool ref_p,bool this_ref_p)1571 lto_output_tree (struct output_block *ob, tree expr,
1572 		 bool ref_p, bool this_ref_p)
1573 {
1574   unsigned ix;
1575   bool existed_p;
1576 
1577   if (expr == NULL_TREE)
1578     {
1579       streamer_write_record_start (ob, LTO_null);
1580       return;
1581     }
1582 
1583   if (this_ref_p && tree_is_indexable (expr))
1584     {
1585       lto_output_tree_ref (ob, expr);
1586       return;
1587     }
1588 
1589   existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1590   if (existed_p)
1591     {
1592       /* If a node has already been streamed out, make sure that
1593 	 we don't write it more than once.  Otherwise, the reader
1594 	 will instantiate two different nodes for the same object.  */
1595       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1596       streamer_write_uhwi (ob, ix);
1597       streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1598 			   lto_tree_code_to_tag (TREE_CODE (expr)));
1599       lto_stats.num_pickle_refs_output++;
1600     }
1601   else
1602     {
1603       /* This is the first time we see EXPR, write all reachable
1604 	 trees to OB.  */
1605       static bool in_dfs_walk;
1606 
1607       /* Protect against recursion which means disconnect between
1608          what tree edges we walk in the DFS walk and what edges
1609 	 we stream out.  */
1610       gcc_assert (!in_dfs_walk);
1611 
1612       /* Start the DFS walk.  */
1613       /* Save ob state ... */
1614       /* let's see ... */
1615       in_dfs_walk = true;
1616       DFS (ob, expr, ref_p, this_ref_p, false);
1617       in_dfs_walk = false;
1618 
1619       /* Finally append a reference to the tree we were writing.
1620 	 ???  If expr ended up as a singleton we could have
1621 	 inlined it here and avoid outputting a reference.  */
1622       existed_p = streamer_tree_cache_lookup (ob->writer_cache, expr, &ix);
1623       gcc_assert (existed_p);
1624       streamer_write_record_start (ob, LTO_tree_pickle_reference);
1625       streamer_write_uhwi (ob, ix);
1626       streamer_write_enum (ob->main_stream, LTO_tags, LTO_NUM_TAGS,
1627 			   lto_tree_code_to_tag (TREE_CODE (expr)));
1628       lto_stats.num_pickle_refs_output++;
1629     }
1630 }
1631 
1632 
1633 /* Output to OB a list of try/catch handlers starting with FIRST.  */
1634 
1635 static void
output_eh_try_list(struct output_block * ob,eh_catch first)1636 output_eh_try_list (struct output_block *ob, eh_catch first)
1637 {
1638   eh_catch n;
1639 
1640   for (n = first; n; n = n->next_catch)
1641     {
1642       streamer_write_record_start (ob, LTO_eh_catch);
1643       stream_write_tree (ob, n->type_list, true);
1644       stream_write_tree (ob, n->filter_list, true);
1645       stream_write_tree (ob, n->label, true);
1646     }
1647 
1648   streamer_write_record_start (ob, LTO_null);
1649 }
1650 
1651 
1652 /* Output EH region R in function FN to OB.  CURR_RN is the slot index
1653    that is being emitted in FN->EH->REGION_ARRAY.  This is used to
1654    detect EH region sharing.  */
1655 
1656 static void
output_eh_region(struct output_block * ob,eh_region r)1657 output_eh_region (struct output_block *ob, eh_region r)
1658 {
1659   enum LTO_tags tag;
1660 
1661   if (r == NULL)
1662     {
1663       streamer_write_record_start (ob, LTO_null);
1664       return;
1665     }
1666 
1667   if (r->type == ERT_CLEANUP)
1668     tag = LTO_ert_cleanup;
1669   else if (r->type == ERT_TRY)
1670     tag = LTO_ert_try;
1671   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1672     tag = LTO_ert_allowed_exceptions;
1673   else if (r->type == ERT_MUST_NOT_THROW)
1674     tag = LTO_ert_must_not_throw;
1675   else
1676     gcc_unreachable ();
1677 
1678   streamer_write_record_start (ob, tag);
1679   streamer_write_hwi (ob, r->index);
1680 
1681   if (r->outer)
1682     streamer_write_hwi (ob, r->outer->index);
1683   else
1684     streamer_write_zero (ob);
1685 
1686   if (r->inner)
1687     streamer_write_hwi (ob, r->inner->index);
1688   else
1689     streamer_write_zero (ob);
1690 
1691   if (r->next_peer)
1692     streamer_write_hwi (ob, r->next_peer->index);
1693   else
1694     streamer_write_zero (ob);
1695 
1696   if (r->type == ERT_TRY)
1697     {
1698       output_eh_try_list (ob, r->u.eh_try.first_catch);
1699     }
1700   else if (r->type == ERT_ALLOWED_EXCEPTIONS)
1701     {
1702       stream_write_tree (ob, r->u.allowed.type_list, true);
1703       stream_write_tree (ob, r->u.allowed.label, true);
1704       streamer_write_uhwi (ob, r->u.allowed.filter);
1705     }
1706   else if (r->type == ERT_MUST_NOT_THROW)
1707     {
1708       stream_write_tree (ob, r->u.must_not_throw.failure_decl, true);
1709       bitpack_d bp = bitpack_create (ob->main_stream);
1710       stream_output_location (ob, &bp, r->u.must_not_throw.failure_loc);
1711       streamer_write_bitpack (&bp);
1712     }
1713 
1714   if (r->landing_pads)
1715     streamer_write_hwi (ob, r->landing_pads->index);
1716   else
1717     streamer_write_zero (ob);
1718 }
1719 
1720 
1721 /* Output landing pad LP to OB.  */
1722 
1723 static void
output_eh_lp(struct output_block * ob,eh_landing_pad lp)1724 output_eh_lp (struct output_block *ob, eh_landing_pad lp)
1725 {
1726   if (lp == NULL)
1727     {
1728       streamer_write_record_start (ob, LTO_null);
1729       return;
1730     }
1731 
1732   streamer_write_record_start (ob, LTO_eh_landing_pad);
1733   streamer_write_hwi (ob, lp->index);
1734   if (lp->next_lp)
1735     streamer_write_hwi (ob, lp->next_lp->index);
1736   else
1737     streamer_write_zero (ob);
1738 
1739   if (lp->region)
1740     streamer_write_hwi (ob, lp->region->index);
1741   else
1742     streamer_write_zero (ob);
1743 
1744   stream_write_tree (ob, lp->post_landing_pad, true);
1745 }
1746 
1747 
1748 /* Output the existing eh_table to OB.  */
1749 
1750 static void
output_eh_regions(struct output_block * ob,struct function * fn)1751 output_eh_regions (struct output_block *ob, struct function *fn)
1752 {
1753   if (fn->eh && fn->eh->region_tree)
1754     {
1755       unsigned i;
1756       eh_region eh;
1757       eh_landing_pad lp;
1758       tree ttype;
1759 
1760       streamer_write_record_start (ob, LTO_eh_table);
1761 
1762       /* Emit the index of the root of the EH region tree.  */
1763       streamer_write_hwi (ob, fn->eh->region_tree->index);
1764 
1765       /* Emit all the EH regions in the region array.  */
1766       streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
1767       FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
1768 	output_eh_region (ob, eh);
1769 
1770       /* Emit all landing pads.  */
1771       streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
1772       FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
1773 	output_eh_lp (ob, lp);
1774 
1775       /* Emit all the runtime type data.  */
1776       streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
1777       FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
1778 	stream_write_tree (ob, ttype, true);
1779 
1780       /* Emit the table of action chains.  */
1781       if (targetm.arm_eabi_unwinder)
1782 	{
1783 	  tree t;
1784 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
1785 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
1786 	    stream_write_tree (ob, t, true);
1787 	}
1788       else
1789 	{
1790 	  uchar c;
1791 	  streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
1792 	  FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
1793 	    streamer_write_char_stream (ob->main_stream, c);
1794 	}
1795     }
1796 
1797   /* The LTO_null either terminates the record or indicates that there
1798      are no eh_records at all.  */
1799   streamer_write_record_start (ob, LTO_null);
1800 }
1801 
1802 
1803 /* Output all of the active ssa names to the ssa_names stream.  */
1804 
1805 static void
output_ssa_names(struct output_block * ob,struct function * fn)1806 output_ssa_names (struct output_block *ob, struct function *fn)
1807 {
1808   unsigned int i, len;
1809 
1810   len = vec_safe_length (SSANAMES (fn));
1811   streamer_write_uhwi (ob, len);
1812 
1813   for (i = 1; i < len; i++)
1814     {
1815       tree ptr = (*SSANAMES (fn))[i];
1816 
1817       if (ptr == NULL_TREE
1818 	  || SSA_NAME_IN_FREE_LIST (ptr)
1819 	  || virtual_operand_p (ptr))
1820 	continue;
1821 
1822       streamer_write_uhwi (ob, i);
1823       streamer_write_char_stream (ob->main_stream,
1824 				  SSA_NAME_IS_DEFAULT_DEF (ptr));
1825       if (SSA_NAME_VAR (ptr))
1826 	stream_write_tree (ob, SSA_NAME_VAR (ptr), true);
1827       else
1828 	/* ???  This drops SSA_NAME_IDENTIFIER on the floor.  */
1829 	stream_write_tree (ob, TREE_TYPE (ptr), true);
1830     }
1831 
1832   streamer_write_zero (ob);
1833 }
1834 
1835 
1836 /* Output a wide-int.  */
1837 
1838 static void
streamer_write_wi(struct output_block * ob,const widest_int & w)1839 streamer_write_wi (struct output_block *ob,
1840 		   const widest_int &w)
1841 {
1842   int len = w.get_len ();
1843 
1844   streamer_write_uhwi (ob, w.get_precision ());
1845   streamer_write_uhwi (ob, len);
1846   for (int i = 0; i < len; i++)
1847     streamer_write_hwi (ob, w.elt (i));
1848 }
1849 
1850 
1851 /* Output the cfg.  */
1852 
1853 static void
output_cfg(struct output_block * ob,struct function * fn)1854 output_cfg (struct output_block *ob, struct function *fn)
1855 {
1856   struct lto_output_stream *tmp_stream = ob->main_stream;
1857   basic_block bb;
1858 
1859   ob->main_stream = ob->cfg_stream;
1860 
1861   streamer_write_enum (ob->main_stream, profile_status_d, PROFILE_LAST,
1862 		       profile_status_for_fn (fn));
1863 
1864   /* Output the number of the highest basic block.  */
1865   streamer_write_uhwi (ob, last_basic_block_for_fn (fn));
1866 
1867   FOR_ALL_BB_FN (bb, fn)
1868     {
1869       edge_iterator ei;
1870       edge e;
1871 
1872       streamer_write_hwi (ob, bb->index);
1873 
1874       /* Output the successors and the edge flags.  */
1875       streamer_write_uhwi (ob, EDGE_COUNT (bb->succs));
1876       FOR_EACH_EDGE (e, ei, bb->succs)
1877 	{
1878 	  streamer_write_uhwi (ob, e->dest->index);
1879 	  streamer_write_hwi (ob, e->probability);
1880 	  streamer_write_gcov_count (ob, e->count);
1881 	  streamer_write_uhwi (ob, e->flags);
1882 	}
1883     }
1884 
1885   streamer_write_hwi (ob, -1);
1886 
1887   bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1888   while (bb->next_bb)
1889     {
1890       streamer_write_hwi (ob, bb->next_bb->index);
1891       bb = bb->next_bb;
1892     }
1893 
1894   streamer_write_hwi (ob, -1);
1895 
1896   /* ???  The cfgloop interface is tied to cfun.  */
1897   gcc_assert (cfun == fn);
1898 
1899   /* Output the number of loops.  */
1900   streamer_write_uhwi (ob, number_of_loops (fn));
1901 
1902   /* Output each loop, skipping the tree root which has number zero.  */
1903   for (unsigned i = 1; i < number_of_loops (fn); ++i)
1904     {
1905       struct loop *loop = get_loop (fn, i);
1906 
1907       /* Write the index of the loop header.  That's enough to rebuild
1908          the loop tree on the reader side.  Stream -1 for an unused
1909 	 loop entry.  */
1910       if (!loop)
1911 	{
1912 	  streamer_write_hwi (ob, -1);
1913 	  continue;
1914 	}
1915       else
1916 	streamer_write_hwi (ob, loop->header->index);
1917 
1918       /* Write everything copy_loop_info copies.  */
1919       streamer_write_enum (ob->main_stream,
1920 			   loop_estimation, EST_LAST, loop->estimate_state);
1921       streamer_write_hwi (ob, loop->any_upper_bound);
1922       if (loop->any_upper_bound)
1923 	streamer_write_wi (ob, loop->nb_iterations_upper_bound);
1924       streamer_write_hwi (ob, loop->any_estimate);
1925       if (loop->any_estimate)
1926 	streamer_write_wi (ob, loop->nb_iterations_estimate);
1927 
1928       /* Write OMP SIMD related info.  */
1929       streamer_write_hwi (ob, loop->safelen);
1930       streamer_write_hwi (ob, loop->dont_vectorize);
1931       streamer_write_hwi (ob, loop->force_vectorize);
1932       stream_write_tree (ob, loop->simduid, true);
1933     }
1934 
1935   ob->main_stream = tmp_stream;
1936 }
1937 
1938 
1939 /* Create the header in the file using OB.  If the section type is for
1940    a function, set FN to the decl for that function.  */
1941 
1942 void
produce_asm(struct output_block * ob,tree fn)1943 produce_asm (struct output_block *ob, tree fn)
1944 {
1945   enum lto_section_type section_type = ob->section_type;
1946   struct lto_function_header header;
1947   char *section_name;
1948 
1949   if (section_type == LTO_section_function_body)
1950     {
1951       const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fn));
1952       section_name = lto_get_section_name (section_type, name, NULL);
1953     }
1954   else
1955     section_name = lto_get_section_name (section_type, NULL, NULL);
1956 
1957   lto_begin_section (section_name, !flag_wpa);
1958   free (section_name);
1959 
1960   /* The entire header is stream computed here.  */
1961   memset (&header, 0, sizeof (struct lto_function_header));
1962 
1963   /* Write the header.  */
1964   header.major_version = LTO_major_version;
1965   header.minor_version = LTO_minor_version;
1966 
1967   if (section_type == LTO_section_function_body)
1968     header.cfg_size = ob->cfg_stream->total_size;
1969   header.main_size = ob->main_stream->total_size;
1970   header.string_size = ob->string_stream->total_size;
1971   lto_write_data (&header, sizeof header);
1972 
1973   /* Put all of the gimple and the string table out the asm file as a
1974      block of text.  */
1975   if (section_type == LTO_section_function_body)
1976     lto_write_stream (ob->cfg_stream);
1977   lto_write_stream (ob->main_stream);
1978   lto_write_stream (ob->string_stream);
1979 
1980   lto_end_section ();
1981 }
1982 
1983 
1984 /* Output the base body of struct function FN using output block OB.  */
1985 
1986 static void
output_struct_function_base(struct output_block * ob,struct function * fn)1987 output_struct_function_base (struct output_block *ob, struct function *fn)
1988 {
1989   struct bitpack_d bp;
1990   unsigned i;
1991   tree t;
1992 
1993   /* Output the static chain and non-local goto save area.  */
1994   stream_write_tree (ob, fn->static_chain_decl, true);
1995   stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
1996 
1997   /* Output all the local variables in the function.  */
1998   streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
1999   FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
2000     stream_write_tree (ob, t, true);
2001 
2002   /* Output current IL state of the function.  */
2003   streamer_write_uhwi (ob, fn->curr_properties);
2004 
2005   /* Write all the attributes for FN.  */
2006   bp = bitpack_create (ob->main_stream);
2007   bp_pack_value (&bp, fn->is_thunk, 1);
2008   bp_pack_value (&bp, fn->has_local_explicit_reg_vars, 1);
2009   bp_pack_value (&bp, fn->returns_pcc_struct, 1);
2010   bp_pack_value (&bp, fn->returns_struct, 1);
2011   bp_pack_value (&bp, fn->can_throw_non_call_exceptions, 1);
2012   bp_pack_value (&bp, fn->can_delete_dead_exceptions, 1);
2013   bp_pack_value (&bp, fn->always_inline_functions_inlined, 1);
2014   bp_pack_value (&bp, fn->after_inlining, 1);
2015   bp_pack_value (&bp, fn->stdarg, 1);
2016   bp_pack_value (&bp, fn->has_nonlocal_label, 1);
2017   bp_pack_value (&bp, fn->has_forced_label_in_static, 1);
2018   bp_pack_value (&bp, fn->calls_alloca, 1);
2019   bp_pack_value (&bp, fn->calls_setjmp, 1);
2020   bp_pack_value (&bp, fn->has_force_vectorize_loops, 1);
2021   bp_pack_value (&bp, fn->has_simduid_loops, 1);
2022   bp_pack_value (&bp, fn->va_list_fpr_size, 8);
2023   bp_pack_value (&bp, fn->va_list_gpr_size, 8);
2024   bp_pack_value (&bp, fn->last_clique, sizeof (short) * 8);
2025 
2026   /* Output the function start and end loci.  */
2027   stream_output_location (ob, &bp, fn->function_start_locus);
2028   stream_output_location (ob, &bp, fn->function_end_locus);
2029 
2030   streamer_write_bitpack (&bp);
2031 }
2032 
2033 
2034 /* Output the body of function NODE->DECL.  */
2035 
2036 static void
output_function(struct cgraph_node * node)2037 output_function (struct cgraph_node *node)
2038 {
2039   tree function;
2040   struct function *fn;
2041   basic_block bb;
2042   struct output_block *ob;
2043 
2044   function = node->decl;
2045   fn = DECL_STRUCT_FUNCTION (function);
2046   ob = create_output_block (LTO_section_function_body);
2047 
2048   clear_line_info (ob);
2049   ob->symbol = node;
2050 
2051   gcc_assert (current_function_decl == NULL_TREE && cfun == NULL);
2052 
2053   /* Set current_function_decl and cfun.  */
2054   push_cfun (fn);
2055 
2056   /* Make string 0 be a NULL string.  */
2057   streamer_write_char_stream (ob->string_stream, 0);
2058 
2059   streamer_write_record_start (ob, LTO_function);
2060 
2061   /* Output decls for parameters and args.  */
2062   stream_write_tree (ob, DECL_RESULT (function), true);
2063   streamer_write_chain (ob, DECL_ARGUMENTS (function), true);
2064 
2065   /* Output DECL_INITIAL for the function, which contains the tree of
2066      lexical scopes.  */
2067   stream_write_tree (ob, DECL_INITIAL (function), true);
2068 
2069   /* We also stream abstract functions where we stream only stuff needed for
2070      debug info.  */
2071   if (gimple_has_body_p (function))
2072     {
2073       streamer_write_uhwi (ob, 1);
2074       output_struct_function_base (ob, fn);
2075 
2076       /* Output all the SSA names used in the function.  */
2077       output_ssa_names (ob, fn);
2078 
2079       /* Output any exception handling regions.  */
2080       output_eh_regions (ob, fn);
2081 
2082 
2083       /* We will renumber the statements.  The code that does this uses
2084 	 the same ordering that we use for serializing them so we can use
2085 	 the same code on the other end and not have to write out the
2086 	 statement numbers.  We do not assign UIDs to PHIs here because
2087 	 virtual PHIs get re-computed on-the-fly which would make numbers
2088 	 inconsistent.  */
2089       set_gimple_stmt_max_uid (cfun, 0);
2090       FOR_ALL_BB_FN (bb, cfun)
2091 	{
2092 	  for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2093 	       gsi_next (&gsi))
2094 	    {
2095 	      gphi *stmt = gsi.phi ();
2096 
2097 	      /* Virtual PHIs are not going to be streamed.  */
2098 	      if (!virtual_operand_p (gimple_phi_result (stmt)))
2099 	        gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2100 	    }
2101 	  for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
2102 	       gsi_next (&gsi))
2103 	    {
2104 	      gimple *stmt = gsi_stmt (gsi);
2105 	      gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2106 	    }
2107 	}
2108       /* To avoid keeping duplicate gimple IDs in the statements, renumber
2109 	 virtual phis now.  */
2110       FOR_ALL_BB_FN (bb, cfun)
2111 	{
2112 	  for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
2113 	       gsi_next (&gsi))
2114 	    {
2115 	      gphi *stmt = gsi.phi ();
2116 	      if (virtual_operand_p (gimple_phi_result (stmt)))
2117 	        gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
2118 	    }
2119 	}
2120 
2121       /* Output the code for the function.  */
2122       FOR_ALL_BB_FN (bb, fn)
2123 	output_bb (ob, bb, fn);
2124 
2125       /* The terminator for this function.  */
2126       streamer_write_record_start (ob, LTO_null);
2127 
2128       output_cfg (ob, fn);
2129 
2130       pop_cfun ();
2131    }
2132   else
2133     streamer_write_uhwi (ob, 0);
2134 
2135   /* Create a section to hold the pickled output of this function.   */
2136   produce_asm (ob, function);
2137 
2138   destroy_output_block (ob);
2139 }
2140 
2141 /* Output the body of function NODE->DECL.  */
2142 
2143 static void
output_constructor(struct varpool_node * node)2144 output_constructor (struct varpool_node *node)
2145 {
2146   tree var = node->decl;
2147   struct output_block *ob;
2148 
2149   ob = create_output_block (LTO_section_function_body);
2150 
2151   clear_line_info (ob);
2152   ob->symbol = node;
2153 
2154   /* Make string 0 be a NULL string.  */
2155   streamer_write_char_stream (ob->string_stream, 0);
2156 
2157   /* Output DECL_INITIAL for the function, which contains the tree of
2158      lexical scopes.  */
2159   stream_write_tree (ob, DECL_INITIAL (var), true);
2160 
2161   /* Create a section to hold the pickled output of this function.   */
2162   produce_asm (ob, var);
2163 
2164   destroy_output_block (ob);
2165 }
2166 
2167 
2168 /* Emit toplevel asms.  */
2169 
2170 void
lto_output_toplevel_asms(void)2171 lto_output_toplevel_asms (void)
2172 {
2173   struct output_block *ob;
2174   struct asm_node *can;
2175   char *section_name;
2176   struct lto_simple_header_with_strings header;
2177 
2178   if (!symtab->first_asm_symbol ())
2179     return;
2180 
2181   ob = create_output_block (LTO_section_asm);
2182 
2183   /* Make string 0 be a NULL string.  */
2184   streamer_write_char_stream (ob->string_stream, 0);
2185 
2186   for (can = symtab->first_asm_symbol (); can; can = can->next)
2187     {
2188       streamer_write_string_cst (ob, ob->main_stream, can->asm_str);
2189       streamer_write_hwi (ob, can->order);
2190     }
2191 
2192   streamer_write_string_cst (ob, ob->main_stream, NULL_TREE);
2193 
2194   section_name = lto_get_section_name (LTO_section_asm, NULL, NULL);
2195   lto_begin_section (section_name, !flag_wpa);
2196   free (section_name);
2197 
2198   /* The entire header stream is computed here.  */
2199   memset (&header, 0, sizeof (header));
2200 
2201   /* Write the header.  */
2202   header.major_version = LTO_major_version;
2203   header.minor_version = LTO_minor_version;
2204 
2205   header.main_size = ob->main_stream->total_size;
2206   header.string_size = ob->string_stream->total_size;
2207   lto_write_data (&header, sizeof header);
2208 
2209   /* Put all of the gimple and the string table out the asm file as a
2210      block of text.  */
2211   lto_write_stream (ob->main_stream);
2212   lto_write_stream (ob->string_stream);
2213 
2214   lto_end_section ();
2215 
2216   destroy_output_block (ob);
2217 }
2218 
2219 
2220 /* Copy the function body or variable constructor of NODE without deserializing. */
2221 
2222 static void
copy_function_or_variable(struct symtab_node * node)2223 copy_function_or_variable (struct symtab_node *node)
2224 {
2225   tree function = node->decl;
2226   struct lto_file_decl_data *file_data = node->lto_file_data;
2227   const char *data;
2228   size_t len;
2229   const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function));
2230   char *section_name =
2231     lto_get_section_name (LTO_section_function_body, name, NULL);
2232   size_t i, j;
2233   struct lto_in_decl_state *in_state;
2234   struct lto_out_decl_state *out_state = lto_get_out_decl_state ();
2235 
2236   lto_begin_section (section_name, false);
2237   free (section_name);
2238 
2239   /* We may have renamed the declaration, e.g., a static function.  */
2240   name = lto_get_decl_name_mapping (file_data, name);
2241 
2242   data = lto_get_raw_section_data (file_data, LTO_section_function_body,
2243                                    name, &len);
2244   gcc_assert (data);
2245 
2246   /* Do a bit copy of the function body.  */
2247   lto_write_raw_data (data, len);
2248 
2249   /* Copy decls. */
2250   in_state =
2251     lto_get_function_in_decl_state (node->lto_file_data, function);
2252   out_state->compressed = in_state->compressed;
2253   gcc_assert (in_state);
2254 
2255   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2256     {
2257       size_t n = vec_safe_length (in_state->streams[i]);
2258       vec<tree, va_gc> *trees = in_state->streams[i];
2259       struct lto_tree_ref_encoder *encoder = &(out_state->streams[i]);
2260 
2261       /* The out state must have the same indices and the in state.
2262 	 So just copy the vector.  All the encoders in the in state
2263 	 must be empty where we reach here. */
2264       gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
2265       encoder->trees.reserve_exact (n);
2266       for (j = 0; j < n; j++)
2267 	encoder->trees.safe_push ((*trees)[j]);
2268     }
2269 
2270   lto_free_raw_section_data (file_data, LTO_section_function_body, name,
2271 			     data, len);
2272   lto_end_section ();
2273 }
2274 
2275 /* Wrap symbol references in *TP inside a type-preserving MEM_REF.  */
2276 
2277 static tree
wrap_refs(tree * tp,int * ws,void *)2278 wrap_refs (tree *tp, int *ws, void *)
2279 {
2280   tree t = *tp;
2281   if (handled_component_p (t)
2282       && TREE_CODE (TREE_OPERAND (t, 0)) == VAR_DECL
2283       && TREE_PUBLIC (TREE_OPERAND (t, 0)))
2284     {
2285       tree decl = TREE_OPERAND (t, 0);
2286       tree ptrtype = build_pointer_type (TREE_TYPE (decl));
2287       TREE_OPERAND (t, 0) = build2 (MEM_REF, TREE_TYPE (decl),
2288 				    build1 (ADDR_EXPR, ptrtype, decl),
2289 				    build_int_cst (ptrtype, 0));
2290       TREE_THIS_VOLATILE (TREE_OPERAND (t, 0)) = TREE_THIS_VOLATILE (decl);
2291       *ws = 0;
2292     }
2293   else if (TREE_CODE (t) == CONSTRUCTOR)
2294     ;
2295   else if (!EXPR_P (t))
2296     *ws = 0;
2297   return NULL_TREE;
2298 }
2299 
2300 /* Main entry point from the pass manager.  */
2301 
2302 void
lto_output(void)2303 lto_output (void)
2304 {
2305   struct lto_out_decl_state *decl_state;
2306   bitmap output = NULL;
2307   int i, n_nodes;
2308   lto_symtab_encoder_t encoder = lto_get_out_decl_state ()->symtab_node_encoder;
2309 
2310   if (flag_checking)
2311     output = lto_bitmap_alloc ();
2312 
2313   /* Initialize the streamer.  */
2314   lto_streamer_init ();
2315 
2316   n_nodes = lto_symtab_encoder_size (encoder);
2317   /* Process only the functions with bodies.  */
2318   for (i = 0; i < n_nodes; i++)
2319     {
2320       symtab_node *snode = lto_symtab_encoder_deref (encoder, i);
2321       if (cgraph_node *node = dyn_cast <cgraph_node *> (snode))
2322 	{
2323 	  if (lto_symtab_encoder_encode_body_p (encoder, node)
2324 	      && !node->alias
2325 	      && (!node->thunk.thunk_p || !node->thunk.add_pointer_bounds_args))
2326 	    {
2327 	      if (flag_checking)
2328 		{
2329 		  gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2330 		  bitmap_set_bit (output, DECL_UID (node->decl));
2331 		}
2332 	      decl_state = lto_new_out_decl_state ();
2333 	      lto_push_out_decl_state (decl_state);
2334 	      if (gimple_has_body_p (node->decl) || !flag_wpa
2335 		  /* Thunks have no body but they may be synthetized
2336 		     at WPA time.  */
2337 		  || DECL_ARGUMENTS (node->decl))
2338 		output_function (node);
2339 	      else
2340 		copy_function_or_variable (node);
2341 	      gcc_assert (lto_get_out_decl_state () == decl_state);
2342 	      lto_pop_out_decl_state ();
2343 	      lto_record_function_out_decl_state (node->decl, decl_state);
2344 	    }
2345 	}
2346       else if (varpool_node *node = dyn_cast <varpool_node *> (snode))
2347 	{
2348 	  /* Wrap symbol references inside the ctor in a type
2349 	     preserving MEM_REF.  */
2350 	  tree ctor = DECL_INITIAL (node->decl);
2351 	  if (ctor && !in_lto_p)
2352 	    walk_tree (&ctor, wrap_refs, NULL, NULL);
2353 	  if (get_symbol_initial_value (encoder, node->decl) == error_mark_node
2354 	      && lto_symtab_encoder_encode_initializer_p (encoder, node)
2355 	      && !node->alias)
2356 	    {
2357 	      timevar_push (TV_IPA_LTO_CTORS_OUT);
2358 	      if (flag_checking)
2359 		{
2360 		  gcc_assert (!bitmap_bit_p (output, DECL_UID (node->decl)));
2361 		  bitmap_set_bit (output, DECL_UID (node->decl));
2362 		}
2363 	      decl_state = lto_new_out_decl_state ();
2364 	      lto_push_out_decl_state (decl_state);
2365 	      if (DECL_INITIAL (node->decl) != error_mark_node
2366 		  || !flag_wpa)
2367 		output_constructor (node);
2368 	      else
2369 		copy_function_or_variable (node);
2370 	      gcc_assert (lto_get_out_decl_state () == decl_state);
2371 	      lto_pop_out_decl_state ();
2372 	      lto_record_function_out_decl_state (node->decl, decl_state);
2373 	      timevar_pop (TV_IPA_LTO_CTORS_OUT);
2374 	    }
2375 	}
2376     }
2377 
2378   /* Emit the callgraph after emitting function bodies.  This needs to
2379      be done now to make sure that all the statements in every function
2380      have been renumbered so that edges can be associated with call
2381      statements using the statement UIDs.  */
2382   output_symtab ();
2383 
2384   output_offload_tables ();
2385 
2386 #if CHECKING_P
2387   lto_bitmap_free (output);
2388 #endif
2389 }
2390 
2391 /* Write each node in encoded by ENCODER to OB, as well as those reachable
2392    from it and required for correct representation of its semantics.
2393    Each node in ENCODER must be a global declaration or a type.  A node
2394    is written only once, even if it appears multiple times in the
2395    vector.  Certain transitively-reachable nodes, such as those
2396    representing expressions, may be duplicated, but such nodes
2397    must not appear in ENCODER itself.  */
2398 
2399 static void
write_global_stream(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2400 write_global_stream (struct output_block *ob,
2401 		     struct lto_tree_ref_encoder *encoder)
2402 {
2403   tree t;
2404   size_t index;
2405   const size_t size = lto_tree_ref_encoder_size (encoder);
2406 
2407   for (index = 0; index < size; index++)
2408     {
2409       t = lto_tree_ref_encoder_get_tree (encoder, index);
2410       if (!streamer_tree_cache_lookup (ob->writer_cache, t, NULL))
2411 	stream_write_tree (ob, t, false);
2412     }
2413 }
2414 
2415 
2416 /* Write a sequence of indices into the globals vector corresponding
2417    to the trees in ENCODER.  These are used by the reader to map the
2418    indices used to refer to global entities within function bodies to
2419    their referents.  */
2420 
2421 static void
write_global_references(struct output_block * ob,struct lto_tree_ref_encoder * encoder)2422 write_global_references (struct output_block *ob,
2423  			 struct lto_tree_ref_encoder *encoder)
2424 {
2425   tree t;
2426   uint32_t index;
2427   const uint32_t size = lto_tree_ref_encoder_size (encoder);
2428 
2429   /* Write size and slot indexes as 32-bit unsigned numbers. */
2430   uint32_t *data = XNEWVEC (uint32_t, size + 1);
2431   data[0] = size;
2432 
2433   for (index = 0; index < size; index++)
2434     {
2435       unsigned slot_num;
2436 
2437       t = lto_tree_ref_encoder_get_tree (encoder, index);
2438       streamer_tree_cache_lookup (ob->writer_cache, t, &slot_num);
2439       gcc_assert (slot_num != (unsigned)-1);
2440       data[index + 1] = slot_num;
2441     }
2442 
2443   lto_write_data (data, sizeof (int32_t) * (size + 1));
2444   free (data);
2445 }
2446 
2447 
2448 /* Write all the streams in an lto_out_decl_state STATE using
2449    output block OB and output stream OUT_STREAM.  */
2450 
2451 void
lto_output_decl_state_streams(struct output_block * ob,struct lto_out_decl_state * state)2452 lto_output_decl_state_streams (struct output_block *ob,
2453 			       struct lto_out_decl_state *state)
2454 {
2455   int i;
2456 
2457   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2458     write_global_stream (ob, &state->streams[i]);
2459 }
2460 
2461 
2462 /* Write all the references in an lto_out_decl_state STATE using
2463    output block OB and output stream OUT_STREAM.  */
2464 
2465 void
lto_output_decl_state_refs(struct output_block * ob,struct lto_out_decl_state * state)2466 lto_output_decl_state_refs (struct output_block *ob,
2467 			    struct lto_out_decl_state *state)
2468 {
2469   unsigned i;
2470   unsigned ref;
2471   tree decl;
2472 
2473   /* Write reference to FUNCTION_DECL.  If there is not function,
2474      write reference to void_type_node. */
2475   decl = (state->fn_decl) ? state->fn_decl : void_type_node;
2476   streamer_tree_cache_lookup (ob->writer_cache, decl, &ref);
2477   gcc_assert (ref != (unsigned)-1);
2478   ref = ref * 2 + (state->compressed ? 1 : 0);
2479   lto_write_data (&ref, sizeof (uint32_t));
2480 
2481   for (i = 0;  i < LTO_N_DECL_STREAMS; i++)
2482     write_global_references (ob, &state->streams[i]);
2483 }
2484 
2485 
2486 /* Return the written size of STATE. */
2487 
2488 static size_t
lto_out_decl_state_written_size(struct lto_out_decl_state * state)2489 lto_out_decl_state_written_size (struct lto_out_decl_state *state)
2490 {
2491   int i;
2492   size_t size;
2493 
2494   size = sizeof (int32_t);	/* fn_ref. */
2495   for (i = 0; i < LTO_N_DECL_STREAMS; i++)
2496     {
2497       size += sizeof (int32_t); /* vector size. */
2498       size += (lto_tree_ref_encoder_size (&state->streams[i])
2499 	       * sizeof (int32_t));
2500     }
2501   return size;
2502 }
2503 
2504 
2505 /* Write symbol T into STREAM in CACHE. SEEN specifies symbols we wrote
2506    so far.  */
2507 
2508 static void
write_symbol(struct streamer_tree_cache_d * cache,tree t,hash_set<const char * > * seen,bool alias)2509 write_symbol (struct streamer_tree_cache_d *cache,
2510 	      tree t, hash_set<const char *> *seen, bool alias)
2511 {
2512   const char *name;
2513   enum gcc_plugin_symbol_kind kind;
2514   enum gcc_plugin_symbol_visibility visibility = GCCPV_DEFAULT;
2515   unsigned slot_num;
2516   uint64_t size;
2517   const char *comdat;
2518   unsigned char c;
2519 
2520   gcc_checking_assert (TREE_PUBLIC (t)
2521 		       && !is_builtin_fn (t)
2522 		       && !DECL_ABSTRACT_P (t)
2523 		       && (!VAR_P (t) || !DECL_HARD_REGISTER (t)));
2524 
2525   gcc_assert (TREE_CODE (t) == VAR_DECL
2526 	      || TREE_CODE (t) == FUNCTION_DECL);
2527 
2528   name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (t));
2529 
2530   /* This behaves like assemble_name_raw in varasm.c, performing the
2531      same name manipulations that ASM_OUTPUT_LABELREF does. */
2532   name = IDENTIFIER_POINTER ((*targetm.asm_out.mangle_assembler_name) (name));
2533 
2534   if (seen->add (name))
2535     return;
2536 
2537   streamer_tree_cache_lookup (cache, t, &slot_num);
2538   gcc_assert (slot_num != (unsigned)-1);
2539 
2540   if (DECL_EXTERNAL (t))
2541     {
2542       if (DECL_WEAK (t))
2543 	kind = GCCPK_WEAKUNDEF;
2544       else
2545 	kind = GCCPK_UNDEF;
2546     }
2547   else
2548     {
2549       if (DECL_WEAK (t))
2550 	kind = GCCPK_WEAKDEF;
2551       else if (DECL_COMMON (t))
2552 	kind = GCCPK_COMMON;
2553       else
2554 	kind = GCCPK_DEF;
2555 
2556       /* When something is defined, it should have node attached.  */
2557       gcc_assert (alias || TREE_CODE (t) != VAR_DECL
2558 		  || varpool_node::get (t)->definition);
2559       gcc_assert (alias || TREE_CODE (t) != FUNCTION_DECL
2560 		  || (cgraph_node::get (t)
2561 		      && cgraph_node::get (t)->definition));
2562     }
2563 
2564   /* Imitate what default_elf_asm_output_external do.
2565      When symbol is external, we need to output it with DEFAULT visibility
2566      when compiling with -fvisibility=default, while with HIDDEN visibility
2567      when symbol has attribute (visibility("hidden")) specified.
2568      targetm.binds_local_p check DECL_VISIBILITY_SPECIFIED and gets this
2569      right. */
2570 
2571   if (DECL_EXTERNAL (t)
2572       && !targetm.binds_local_p (t))
2573     visibility = GCCPV_DEFAULT;
2574   else
2575     switch (DECL_VISIBILITY (t))
2576       {
2577       case VISIBILITY_DEFAULT:
2578 	visibility = GCCPV_DEFAULT;
2579 	break;
2580       case VISIBILITY_PROTECTED:
2581 	visibility = GCCPV_PROTECTED;
2582 	break;
2583       case VISIBILITY_HIDDEN:
2584 	visibility = GCCPV_HIDDEN;
2585 	break;
2586       case VISIBILITY_INTERNAL:
2587 	visibility = GCCPV_INTERNAL;
2588 	break;
2589       }
2590 
2591   if (kind == GCCPK_COMMON
2592       && DECL_SIZE_UNIT (t)
2593       && TREE_CODE (DECL_SIZE_UNIT (t)) == INTEGER_CST)
2594     size = TREE_INT_CST_LOW (DECL_SIZE_UNIT (t));
2595   else
2596     size = 0;
2597 
2598   if (DECL_ONE_ONLY (t))
2599     comdat = IDENTIFIER_POINTER (decl_comdat_group_id (t));
2600   else
2601     comdat = "";
2602 
2603   lto_write_data (name, strlen (name) + 1);
2604   lto_write_data (comdat, strlen (comdat) + 1);
2605   c = (unsigned char) kind;
2606   lto_write_data (&c, 1);
2607   c = (unsigned char) visibility;
2608   lto_write_data (&c, 1);
2609   lto_write_data (&size, 8);
2610   lto_write_data (&slot_num, 4);
2611 }
2612 
2613 /* Write an IL symbol table to OB.
2614    SET and VSET are cgraph/varpool node sets we are outputting.  */
2615 
2616 static void
produce_symtab(struct output_block * ob)2617 produce_symtab (struct output_block *ob)
2618 {
2619   struct streamer_tree_cache_d *cache = ob->writer_cache;
2620   char *section_name = lto_get_section_name (LTO_section_symtab, NULL, NULL);
2621   lto_symtab_encoder_t encoder = ob->decl_state->symtab_node_encoder;
2622   lto_symtab_encoder_iterator lsei;
2623 
2624   lto_begin_section (section_name, false);
2625   free (section_name);
2626 
2627   hash_set<const char *> seen;
2628 
2629   /* Write the symbol table.
2630      First write everything defined and then all declarations.
2631      This is necessary to handle cases where we have duplicated symbols.  */
2632   for (lsei = lsei_start (encoder);
2633        !lsei_end_p (lsei); lsei_next (&lsei))
2634     {
2635       symtab_node *node = lsei_node (lsei);
2636 
2637       if (DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2638 	continue;
2639       write_symbol (cache, node->decl, &seen, false);
2640     }
2641   for (lsei = lsei_start (encoder);
2642        !lsei_end_p (lsei); lsei_next (&lsei))
2643     {
2644       symtab_node *node = lsei_node (lsei);
2645 
2646       if (!DECL_EXTERNAL (node->decl) || !node->output_to_lto_symbol_table_p ())
2647 	continue;
2648       write_symbol (cache, node->decl, &seen, false);
2649     }
2650 
2651   lto_end_section ();
2652 }
2653 
2654 
2655 /* Init the streamer_mode_table for output, where we collect info on what
2656    machine_mode values have been streamed.  */
2657 void
lto_output_init_mode_table(void)2658 lto_output_init_mode_table (void)
2659 {
2660   memset (streamer_mode_table, '\0', MAX_MACHINE_MODE);
2661 }
2662 
2663 
2664 /* Write the mode table.  */
2665 static void
lto_write_mode_table(void)2666 lto_write_mode_table (void)
2667 {
2668   struct output_block *ob;
2669   ob = create_output_block (LTO_section_mode_table);
2670   bitpack_d bp = bitpack_create (ob->main_stream);
2671 
2672   /* Ensure that for GET_MODE_INNER (m) != m we have
2673      also the inner mode marked.  */
2674   for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2675     if (streamer_mode_table[i])
2676       {
2677 	machine_mode m = (machine_mode) i;
2678 	if (GET_MODE_INNER (m) != m)
2679 	  streamer_mode_table[(int) GET_MODE_INNER (m)] = 1;
2680       }
2681   /* First stream modes that have GET_MODE_INNER (m) == m,
2682      so that we can refer to them afterwards.  */
2683   for (int pass = 0; pass < 2; pass++)
2684     for (int i = 0; i < (int) MAX_MACHINE_MODE; i++)
2685       if (streamer_mode_table[i] && i != (int) VOIDmode && i != (int) BLKmode)
2686 	{
2687 	  machine_mode m = (machine_mode) i;
2688 	  if ((GET_MODE_INNER (m) == m) ^ (pass == 0))
2689 	    continue;
2690 	  bp_pack_value (&bp, m, 8);
2691 	  bp_pack_enum (&bp, mode_class, MAX_MODE_CLASS, GET_MODE_CLASS (m));
2692 	  bp_pack_value (&bp, GET_MODE_SIZE (m), 8);
2693 	  bp_pack_value (&bp, GET_MODE_PRECISION (m), 16);
2694 	  bp_pack_value (&bp, GET_MODE_INNER (m), 8);
2695 	  bp_pack_value (&bp, GET_MODE_NUNITS (m), 8);
2696 	  switch (GET_MODE_CLASS (m))
2697 	    {
2698 	    case MODE_FRACT:
2699 	    case MODE_UFRACT:
2700 	    case MODE_ACCUM:
2701 	    case MODE_UACCUM:
2702 	      bp_pack_value (&bp, GET_MODE_IBIT (m), 8);
2703 	      bp_pack_value (&bp, GET_MODE_FBIT (m), 8);
2704 	      break;
2705 	    case MODE_FLOAT:
2706 	    case MODE_DECIMAL_FLOAT:
2707 	      bp_pack_string (ob, &bp, REAL_MODE_FORMAT (m)->name, true);
2708 	      break;
2709 	    default:
2710 	      break;
2711 	    }
2712 	  bp_pack_string (ob, &bp, GET_MODE_NAME (m), true);
2713 	}
2714   bp_pack_value (&bp, VOIDmode, 8);
2715 
2716   streamer_write_bitpack (&bp);
2717 
2718   char *section_name
2719     = lto_get_section_name (LTO_section_mode_table, NULL, NULL);
2720   lto_begin_section (section_name, !flag_wpa);
2721   free (section_name);
2722 
2723   /* The entire header stream is computed here.  */
2724   struct lto_simple_header_with_strings header;
2725   memset (&header, 0, sizeof (header));
2726 
2727   /* Write the header.  */
2728   header.major_version = LTO_major_version;
2729   header.minor_version = LTO_minor_version;
2730 
2731   header.main_size = ob->main_stream->total_size;
2732   header.string_size = ob->string_stream->total_size;
2733   lto_write_data (&header, sizeof header);
2734 
2735   /* Put all of the gimple and the string table out the asm file as a
2736      block of text.  */
2737   lto_write_stream (ob->main_stream);
2738   lto_write_stream (ob->string_stream);
2739 
2740   lto_end_section ();
2741   destroy_output_block (ob);
2742 }
2743 
2744 
2745 /* This pass is run after all of the functions are serialized and all
2746    of the IPA passes have written their serialized forms.  This pass
2747    causes the vector of all of the global decls and types used from
2748    this file to be written in to a section that can then be read in to
2749    recover these on other side.  */
2750 
2751 void
produce_asm_for_decls(void)2752 produce_asm_for_decls (void)
2753 {
2754   struct lto_out_decl_state *out_state;
2755   struct lto_out_decl_state *fn_out_state;
2756   struct lto_decl_header header;
2757   char *section_name;
2758   struct output_block *ob;
2759   unsigned idx, num_fns;
2760   size_t decl_state_size;
2761   int32_t num_decl_states;
2762 
2763   ob = create_output_block (LTO_section_decls);
2764 
2765   memset (&header, 0, sizeof (struct lto_decl_header));
2766 
2767   section_name = lto_get_section_name (LTO_section_decls, NULL, NULL);
2768   lto_begin_section (section_name, !flag_wpa);
2769   free (section_name);
2770 
2771   /* Make string 0 be a NULL string.  */
2772   streamer_write_char_stream (ob->string_stream, 0);
2773 
2774   gcc_assert (!alias_pairs);
2775 
2776   /* Get rid of the global decl state hash tables to save some memory.  */
2777   out_state = lto_get_out_decl_state ();
2778   for (int i = 0; i < LTO_N_DECL_STREAMS; i++)
2779     if (out_state->streams[i].tree_hash_table)
2780       {
2781 	delete out_state->streams[i].tree_hash_table;
2782 	out_state->streams[i].tree_hash_table = NULL;
2783       }
2784 
2785   /* Write the global symbols.  */
2786   lto_output_decl_state_streams (ob, out_state);
2787   num_fns = lto_function_decl_states.length ();
2788   for (idx = 0; idx < num_fns; idx++)
2789     {
2790       fn_out_state =
2791 	lto_function_decl_states[idx];
2792       lto_output_decl_state_streams (ob, fn_out_state);
2793     }
2794 
2795   header.major_version = LTO_major_version;
2796   header.minor_version = LTO_minor_version;
2797 
2798   /* Currently not used.  This field would allow us to preallocate
2799      the globals vector, so that it need not be resized as it is extended.  */
2800   header.num_nodes = -1;
2801 
2802   /* Compute the total size of all decl out states. */
2803   decl_state_size = sizeof (int32_t);
2804   decl_state_size += lto_out_decl_state_written_size (out_state);
2805   for (idx = 0; idx < num_fns; idx++)
2806     {
2807       fn_out_state =
2808 	lto_function_decl_states[idx];
2809       decl_state_size += lto_out_decl_state_written_size (fn_out_state);
2810     }
2811   header.decl_state_size = decl_state_size;
2812 
2813   header.main_size = ob->main_stream->total_size;
2814   header.string_size = ob->string_stream->total_size;
2815 
2816   lto_write_data (&header, sizeof header);
2817 
2818   /* Write the main out-decl state, followed by out-decl states of
2819      functions. */
2820   num_decl_states = num_fns + 1;
2821   lto_write_data (&num_decl_states, sizeof (num_decl_states));
2822   lto_output_decl_state_refs (ob, out_state);
2823   for (idx = 0; idx < num_fns; idx++)
2824     {
2825       fn_out_state = lto_function_decl_states[idx];
2826       lto_output_decl_state_refs (ob, fn_out_state);
2827     }
2828 
2829   lto_write_stream (ob->main_stream);
2830   lto_write_stream (ob->string_stream);
2831 
2832   lto_end_section ();
2833 
2834   /* Write the symbol table.  It is used by linker to determine dependencies
2835      and thus we can skip it for WPA.  */
2836   if (!flag_wpa)
2837     produce_symtab (ob);
2838 
2839   /* Write command line opts.  */
2840   lto_write_options ();
2841 
2842   /* Deallocate memory and clean up.  */
2843   for (idx = 0; idx < num_fns; idx++)
2844     {
2845       fn_out_state =
2846 	lto_function_decl_states[idx];
2847       lto_delete_out_decl_state (fn_out_state);
2848     }
2849   lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
2850   lto_function_decl_states.release ();
2851   destroy_output_block (ob);
2852   if (lto_stream_offload_p)
2853     lto_write_mode_table ();
2854 }
2855