1 /* Read the GIMPLE representation from a file stream.
2 
3    Copyright (C) 2009-2018 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 
46 
47 struct freeing_string_slot_hasher : string_slot_hasher
48 {
49   static inline void remove (value_type *);
50 };
51 
52 inline void
53 freeing_string_slot_hasher::remove (value_type *v)
54 {
55   free (v);
56 }
57 
58 /* The table to hold the file names.  */
59 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
60 
61 
62 /* Check that tag ACTUAL has one of the given values.  NUM_TAGS is the
63    number of valid tag values to check.  */
64 
65 void
66 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
67 {
68   va_list ap;
69   int i;
70 
71   va_start (ap, ntags);
72   for (i = 0; i < ntags; i++)
73     if ((unsigned) actual == va_arg (ap, unsigned))
74       {
75 	va_end (ap);
76 	return;
77       }
78 
79   va_end (ap);
80   internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
81 }
82 
83 
84 /* Read LENGTH bytes from STREAM to ADDR.  */
85 
86 void
87 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
88 {
89   size_t i;
90   unsigned char *const buffer = (unsigned char *) addr;
91 
92   for (i = 0; i < length; i++)
93     buffer[i] = streamer_read_uchar (ib);
94 }
95 
96 
97 /* Lookup STRING in file_name_hash_table.  If found, return the existing
98    string, otherwise insert STRING as the canonical version.  */
99 
100 static const char *
101 canon_file_name (const char *string)
102 {
103   string_slot **slot;
104   struct string_slot s_slot;
105   size_t len = strlen (string);
106 
107   s_slot.s = string;
108   s_slot.len = len;
109 
110   slot = file_name_hash_table->find_slot (&s_slot, INSERT);
111   if (*slot == NULL)
112     {
113       char *saved_string;
114       struct string_slot *new_slot;
115 
116       saved_string = (char *) xmalloc (len + 1);
117       new_slot = XCNEW (struct string_slot);
118       memcpy (saved_string, string, len + 1);
119       new_slot->s = saved_string;
120       new_slot->len = len;
121       *slot = new_slot;
122       return saved_string;
123     }
124   else
125     {
126       struct string_slot *old_slot = *slot;
127       return old_slot->s;
128     }
129 }
130 
131 /* Pointer to currently alive instance of lto_location_cache.  */
132 
133 lto_location_cache *lto_location_cache::current_cache;
134 
135 /* Sort locations in source order. Start with file from last application.  */
136 
137 int
138 lto_location_cache::cmp_loc (const void *pa, const void *pb)
139 {
140   const cached_location *a = ((const cached_location *)pa);
141   const cached_location *b = ((const cached_location *)pb);
142   const char *current_file = current_cache->current_file;
143   int current_line = current_cache->current_line;
144 
145   if (a->file == current_file && b->file != current_file)
146     return -1;
147   if (a->file != current_file && b->file == current_file)
148     return 1;
149   if (a->file == current_file && b->file == current_file)
150     {
151       if (a->line == current_line && b->line != current_line)
152 	return -1;
153       if (a->line != current_line && b->line == current_line)
154 	return 1;
155     }
156   if (a->file != b->file)
157     return strcmp (a->file, b->file);
158   if (a->sysp != b->sysp)
159     return a->sysp ? 1 : -1;
160   if (a->line != b->line)
161     return a->line - b->line;
162   return a->col - b->col;
163 }
164 
165 /* Apply all changes in location cache.  Add locations into linemap and patch
166    trees.  */
167 
168 bool
169 lto_location_cache::apply_location_cache ()
170 {
171   static const char *prev_file;
172   if (!loc_cache.length ())
173     return false;
174   if (loc_cache.length () > 1)
175     loc_cache.qsort (cmp_loc);
176 
177   for (unsigned int i = 0; i < loc_cache.length (); i++)
178     {
179       struct cached_location loc = loc_cache[i];
180 
181       if (current_file != loc.file)
182 	linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
183 		     loc.sysp, loc.file, loc.line);
184       else if (current_line != loc.line)
185 	{
186 	  int max = loc.col;
187 
188 	  for (unsigned int j = i + 1; j < loc_cache.length (); j++)
189 	    if (loc.file != loc_cache[j].file
190 		|| loc.line != loc_cache[j].line)
191 	      break;
192 	    else if (max < loc_cache[j].col)
193 	      max = loc_cache[j].col;
194 	  linemap_line_start (line_table, loc.line, max + 1);
195 	}
196       gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
197       if (current_file == loc.file && current_line == loc.line
198 	  && current_col == loc.col)
199 	*loc.loc = current_loc;
200       else
201         current_loc = *loc.loc = linemap_position_for_column (line_table,
202 							      loc.col);
203       current_line = loc.line;
204       prev_file = current_file = loc.file;
205       current_col = loc.col;
206     }
207   loc_cache.truncate (0);
208   accepted_length = 0;
209   return true;
210 }
211 
212 /* Tree merging did not suceed; mark all changes in the cache as accepted.  */
213 
214 void
215 lto_location_cache::accept_location_cache ()
216 {
217   gcc_assert (current_cache == this);
218   accepted_length = loc_cache.length ();
219 }
220 
221 /* Tree merging did suceed; throw away recent changes.  */
222 
223 void
224 lto_location_cache::revert_location_cache ()
225 {
226   loc_cache.truncate (accepted_length);
227 }
228 
229 /* Read a location bitpack from input block IB and either update *LOC directly
230    or add it to the location cache.
231    It is neccesary to call apply_location_cache to get *LOC updated.  */
232 
233 void
234 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
235 				    struct data_in *data_in)
236 {
237   static const char *stream_file;
238   static int stream_line;
239   static int stream_col;
240   static bool stream_sysp;
241   bool file_change, line_change, column_change;
242 
243   gcc_assert (current_cache == this);
244 
245   *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
246 
247   if (*loc < RESERVED_LOCATION_COUNT)
248     return;
249 
250   /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
251      ICE on it.  */
252 
253   file_change = bp_unpack_value (bp, 1);
254   line_change = bp_unpack_value (bp, 1);
255   column_change = bp_unpack_value (bp, 1);
256 
257   if (file_change)
258     {
259       stream_file = canon_file_name (bp_unpack_string (data_in, bp));
260       stream_sysp = bp_unpack_value (bp, 1);
261     }
262 
263   if (line_change)
264     stream_line = bp_unpack_var_len_unsigned (bp);
265 
266   if (column_change)
267     stream_col = bp_unpack_var_len_unsigned (bp);
268 
269   /* This optimization saves location cache operations druing gimple
270      streaming.  */
271 
272   if (current_file == stream_file && current_line == stream_line
273       && current_col == stream_col && current_sysp == stream_sysp)
274     {
275       *loc = current_loc;
276       return;
277     }
278 
279   struct cached_location entry
280     = {stream_file, loc, stream_line, stream_col, stream_sysp};
281   loc_cache.safe_push (entry);
282 }
283 
284 /* Read a location bitpack from input block IB and either update *LOC directly
285    or add it to the location cache.
286    It is neccesary to call apply_location_cache to get *LOC updated.  */
287 
288 void
289 lto_input_location (location_t *loc, struct bitpack_d *bp,
290 		    struct data_in *data_in)
291 {
292   data_in->location_cache.input_location (loc, bp, data_in);
293 }
294 
295 /* Read location and return it instead of going through location caching.
296    This should be used only when the resulting location is not going to be
297    discarded.  */
298 
299 location_t
300 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
301 {
302   location_t loc;
303   stream_input_location (&loc, bp, data_in);
304   data_in->location_cache.apply_location_cache ();
305   return loc;
306 }
307 
308 /* Read a reference to a tree node from DATA_IN using input block IB.
309    TAG is the expected node that should be found in IB, if TAG belongs
310    to one of the indexable trees, expect to read a reference index to
311    be looked up in one of the symbol tables, otherwise read the pysical
312    representation of the tree using stream_read_tree.  FN is the
313    function scope for the read tree.  */
314 
315 tree
316 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
317 		    struct function *fn, enum LTO_tags tag)
318 {
319   unsigned HOST_WIDE_INT ix_u;
320   tree result = NULL_TREE;
321 
322   lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
323 
324   switch (tag)
325     {
326     case LTO_type_ref:
327       ix_u = streamer_read_uhwi (ib);
328       result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
329       break;
330 
331     case LTO_ssa_name_ref:
332       ix_u = streamer_read_uhwi (ib);
333       result = (*SSANAMES (fn))[ix_u];
334       break;
335 
336     case LTO_field_decl_ref:
337       ix_u = streamer_read_uhwi (ib);
338       result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
339       break;
340 
341     case LTO_function_decl_ref:
342       ix_u = streamer_read_uhwi (ib);
343       result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
344       break;
345 
346     case LTO_type_decl_ref:
347       ix_u = streamer_read_uhwi (ib);
348       result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
349       break;
350 
351     case LTO_namespace_decl_ref:
352       ix_u = streamer_read_uhwi (ib);
353       result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
354       break;
355 
356     case LTO_global_decl_ref:
357     case LTO_result_decl_ref:
358     case LTO_const_decl_ref:
359     case LTO_imported_decl_ref:
360     case LTO_label_decl_ref:
361     case LTO_translation_unit_decl_ref:
362     case LTO_namelist_decl_ref:
363       ix_u = streamer_read_uhwi (ib);
364       result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
365       break;
366 
367     default:
368       gcc_unreachable ();
369     }
370 
371   gcc_assert (result);
372 
373   return result;
374 }
375 
376 
377 /* Read and return a double-linked list of catch handlers from input
378    block IB, using descriptors in DATA_IN.  */
379 
380 static struct eh_catch_d *
381 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
382 			 eh_catch *last_p)
383 {
384   eh_catch first;
385   enum LTO_tags tag;
386 
387   *last_p = first = NULL;
388   tag = streamer_read_record_start (ib);
389   while (tag)
390     {
391       tree list;
392       eh_catch n;
393 
394       lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
395 
396       /* Read the catch node.  */
397       n = ggc_cleared_alloc<eh_catch_d> ();
398       n->type_list = stream_read_tree (ib, data_in);
399       n->filter_list = stream_read_tree (ib, data_in);
400       n->label = stream_read_tree (ib, data_in);
401 
402       /* Register all the types in N->FILTER_LIST.  */
403       for (list = n->filter_list; list; list = TREE_CHAIN (list))
404 	add_type_for_runtime (TREE_VALUE (list));
405 
406       /* Chain N to the end of the list.  */
407       if (*last_p)
408 	(*last_p)->next_catch = n;
409       n->prev_catch = *last_p;
410       *last_p = n;
411 
412       /* Set the head of the list the first time through the loop.  */
413       if (first == NULL)
414 	first = n;
415 
416       tag = streamer_read_record_start (ib);
417     }
418 
419   return first;
420 }
421 
422 
423 /* Read and return EH region IX from input block IB, using descriptors
424    in DATA_IN.  */
425 
426 static eh_region
427 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
428 {
429   enum LTO_tags tag;
430   eh_region r;
431 
432   /* Read the region header.  */
433   tag = streamer_read_record_start (ib);
434   if (tag == LTO_null)
435     return NULL;
436 
437   r = ggc_cleared_alloc<eh_region_d> ();
438   r->index = streamer_read_hwi (ib);
439 
440   gcc_assert (r->index == ix);
441 
442   /* Read all the region pointers as region numbers.  We'll fix up
443      the pointers once the whole array has been read.  */
444   r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
445   r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
446   r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
447 
448   switch (tag)
449     {
450       case LTO_ert_cleanup:
451 	r->type = ERT_CLEANUP;
452 	break;
453 
454       case LTO_ert_try:
455 	{
456 	  struct eh_catch_d *last_catch;
457 	  r->type = ERT_TRY;
458 	  r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
459 							     &last_catch);
460 	  r->u.eh_try.last_catch = last_catch;
461 	  break;
462 	}
463 
464       case LTO_ert_allowed_exceptions:
465 	{
466 	  tree l;
467 
468 	  r->type = ERT_ALLOWED_EXCEPTIONS;
469 	  r->u.allowed.type_list = stream_read_tree (ib, data_in);
470 	  r->u.allowed.label = stream_read_tree (ib, data_in);
471 	  r->u.allowed.filter = streamer_read_uhwi (ib);
472 
473 	  for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
474 	    add_type_for_runtime (TREE_VALUE (l));
475 	}
476 	break;
477 
478       case LTO_ert_must_not_throw:
479 	{
480 	  r->type = ERT_MUST_NOT_THROW;
481 	  r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
482 	  bitpack_d bp = streamer_read_bitpack (ib);
483 	  r->u.must_not_throw.failure_loc
484 	   = stream_input_location_now (&bp, data_in);
485 	}
486 	break;
487 
488       default:
489 	gcc_unreachable ();
490     }
491 
492   r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
493 
494   return r;
495 }
496 
497 
498 /* Read and return EH landing pad IX from input block IB, using descriptors
499    in DATA_IN.  */
500 
501 static eh_landing_pad
502 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
503 {
504   enum LTO_tags tag;
505   eh_landing_pad lp;
506 
507   /* Read the landing pad header.  */
508   tag = streamer_read_record_start (ib);
509   if (tag == LTO_null)
510     return NULL;
511 
512   lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
513 
514   lp = ggc_cleared_alloc<eh_landing_pad_d> ();
515   lp->index = streamer_read_hwi (ib);
516   gcc_assert (lp->index == ix);
517   lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
518   lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
519   lp->post_landing_pad = stream_read_tree (ib, data_in);
520 
521   return lp;
522 }
523 
524 
525 /* After reading the EH regions, pointers to peer and children regions
526    are region numbers.  This converts all these region numbers into
527    real pointers into the rematerialized regions for FN.  ROOT_REGION
528    is the region number for the root EH region in FN.  */
529 
530 static void
531 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
532 {
533   unsigned i;
534   vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
535   vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
536   eh_region r;
537   eh_landing_pad lp;
538 
539   gcc_assert (eh_array && lp_array);
540 
541   gcc_assert (root_region >= 0);
542   fn->eh->region_tree = (*eh_array)[root_region];
543 
544 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
545 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
546 
547   /* Convert all the index numbers stored in pointer fields into
548      pointers to the corresponding slots in the EH region array.  */
549   FOR_EACH_VEC_ELT (*eh_array, i, r)
550     {
551       /* The array may contain NULL regions.  */
552       if (r == NULL)
553 	continue;
554 
555       gcc_assert (i == (unsigned) r->index);
556       FIXUP_EH_REGION (r->outer);
557       FIXUP_EH_REGION (r->inner);
558       FIXUP_EH_REGION (r->next_peer);
559       FIXUP_EH_LP (r->landing_pads);
560     }
561 
562   /* Convert all the index numbers stored in pointer fields into
563      pointers to the corresponding slots in the EH landing pad array.  */
564   FOR_EACH_VEC_ELT (*lp_array, i, lp)
565     {
566       /* The array may contain NULL landing pads.  */
567       if (lp == NULL)
568 	continue;
569 
570       gcc_assert (i == (unsigned) lp->index);
571       FIXUP_EH_LP (lp->next_lp);
572       FIXUP_EH_REGION (lp->region);
573     }
574 
575 #undef FIXUP_EH_REGION
576 #undef FIXUP_EH_LP
577 }
578 
579 
580 /* Initialize EH support.  */
581 
582 void
583 lto_init_eh (void)
584 {
585   static bool eh_initialized_p = false;
586 
587   if (eh_initialized_p)
588     return;
589 
590   /* Contrary to most other FEs, we only initialize EH support when at
591      least one of the files in the set contains exception regions in
592      it.  Since this happens much later than the call to init_eh in
593      lang_dependent_init, we have to set flag_exceptions and call
594      init_eh again to initialize the EH tables.  */
595   flag_exceptions = 1;
596   init_eh ();
597 
598   eh_initialized_p = true;
599 }
600 
601 
602 /* Read the exception table for FN from IB using the data descriptors
603    in DATA_IN.  */
604 
605 static void
606 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
607 		  struct function *fn)
608 {
609   HOST_WIDE_INT i, root_region, len;
610   enum LTO_tags tag;
611 
612   tag = streamer_read_record_start (ib);
613   if (tag == LTO_null)
614     return;
615 
616   lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
617 
618   /* If the file contains EH regions, then it was compiled with
619      -fexceptions.  In that case, initialize the backend EH
620      machinery.  */
621   lto_init_eh ();
622 
623   gcc_assert (fn->eh);
624 
625   root_region = streamer_read_hwi (ib);
626   gcc_assert (root_region == (int) root_region);
627 
628   /* Read the EH region array.  */
629   len = streamer_read_hwi (ib);
630   gcc_assert (len == (int) len);
631   if (len > 0)
632     {
633       vec_safe_grow_cleared (fn->eh->region_array, len);
634       for (i = 0; i < len; i++)
635 	{
636 	  eh_region r = input_eh_region (ib, data_in, i);
637 	  (*fn->eh->region_array)[i] = r;
638 	}
639     }
640 
641   /* Read the landing pads.  */
642   len = streamer_read_hwi (ib);
643   gcc_assert (len == (int) len);
644   if (len > 0)
645     {
646       vec_safe_grow_cleared (fn->eh->lp_array, len);
647       for (i = 0; i < len; i++)
648 	{
649 	  eh_landing_pad lp = input_eh_lp (ib, data_in, i);
650 	  (*fn->eh->lp_array)[i] = lp;
651 	}
652     }
653 
654   /* Read the runtime type data.  */
655   len = streamer_read_hwi (ib);
656   gcc_assert (len == (int) len);
657   if (len > 0)
658     {
659       vec_safe_grow_cleared (fn->eh->ttype_data, len);
660       for (i = 0; i < len; i++)
661 	{
662 	  tree ttype = stream_read_tree (ib, data_in);
663 	  (*fn->eh->ttype_data)[i] = ttype;
664 	}
665     }
666 
667   /* Read the table of action chains.  */
668   len = streamer_read_hwi (ib);
669   gcc_assert (len == (int) len);
670   if (len > 0)
671     {
672       if (targetm.arm_eabi_unwinder)
673 	{
674 	  vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
675 	  for (i = 0; i < len; i++)
676 	    {
677 	      tree t = stream_read_tree (ib, data_in);
678 	      (*fn->eh->ehspec_data.arm_eabi)[i] = t;
679 	    }
680 	}
681       else
682 	{
683 	  vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
684 	  for (i = 0; i < len; i++)
685 	    {
686 	      uchar c = streamer_read_uchar (ib);
687 	      (*fn->eh->ehspec_data.other)[i] = c;
688 	    }
689 	}
690     }
691 
692   /* Reconstruct the EH region tree by fixing up the peer/children
693      pointers.  */
694   fixup_eh_region_pointers (fn, root_region);
695 
696   tag = streamer_read_record_start (ib);
697   lto_tag_check_range (tag, LTO_null, LTO_null);
698 }
699 
700 
701 /* Make a new basic block with index INDEX in function FN.  */
702 
703 static basic_block
704 make_new_block (struct function *fn, unsigned int index)
705 {
706   basic_block bb = alloc_block ();
707   bb->index = index;
708   SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
709   n_basic_blocks_for_fn (fn)++;
710   return bb;
711 }
712 
713 
714 /* Read the CFG for function FN from input block IB.  */
715 
716 static void
717 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
718 	   struct function *fn)
719 {
720   unsigned int bb_count;
721   basic_block p_bb;
722   unsigned int i;
723   int index;
724 
725   init_empty_tree_cfg_for_function (fn);
726   init_ssa_operands (fn);
727 
728   profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
729 						   PROFILE_LAST);
730 
731   bb_count = streamer_read_uhwi (ib);
732 
733   last_basic_block_for_fn (fn) = bb_count;
734   if (bb_count > basic_block_info_for_fn (fn)->length ())
735     vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
736 
737   if (bb_count > label_to_block_map_for_fn (fn)->length ())
738     vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
739 
740   index = streamer_read_hwi (ib);
741   while (index != -1)
742     {
743       basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
744       unsigned int edge_count;
745 
746       if (bb == NULL)
747 	bb = make_new_block (fn, index);
748 
749       edge_count = streamer_read_uhwi (ib);
750 
751       /* Connect up the CFG.  */
752       for (i = 0; i < edge_count; i++)
753 	{
754 	  unsigned int dest_index;
755 	  unsigned int edge_flags;
756 	  basic_block dest;
757 	  profile_probability probability;
758 	  edge e;
759 
760 	  dest_index = streamer_read_uhwi (ib);
761 	  probability = profile_probability::stream_in (ib);
762 	  edge_flags = streamer_read_uhwi (ib);
763 
764 	  dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
765 
766 	  if (dest == NULL)
767 	    dest = make_new_block (fn, dest_index);
768 
769 	  e = make_edge (bb, dest, edge_flags);
770 	  e->probability = probability;
771 	}
772 
773       index = streamer_read_hwi (ib);
774     }
775 
776   p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
777   index = streamer_read_hwi (ib);
778   while (index != -1)
779     {
780       basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
781       bb->prev_bb = p_bb;
782       p_bb->next_bb = bb;
783       p_bb = bb;
784       index = streamer_read_hwi (ib);
785     }
786 
787   /* ???  The cfgloop interface is tied to cfun.  */
788   gcc_assert (cfun == fn);
789 
790   /* Input the loop tree.  */
791   unsigned n_loops = streamer_read_uhwi (ib);
792   if (n_loops == 0)
793     return;
794 
795   struct loops *loops = ggc_cleared_alloc<struct loops> ();
796   init_loops_structure (fn, loops, n_loops);
797   set_loops_for_fn (fn, loops);
798 
799   /* Input each loop and associate it with its loop header so
800      flow_loops_find can rebuild the loop tree.  */
801   for (unsigned i = 1; i < n_loops; ++i)
802     {
803       int header_index = streamer_read_hwi (ib);
804       if (header_index == -1)
805 	{
806 	  loops->larray->quick_push (NULL);
807 	  continue;
808 	}
809 
810       struct loop *loop = alloc_loop ();
811       loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
812       loop->header->loop_father = loop;
813 
814       /* Read everything copy_loop_info copies.  */
815       loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
816       loop->any_upper_bound = streamer_read_hwi (ib);
817       if (loop->any_upper_bound)
818 	loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
819       loop->any_likely_upper_bound = streamer_read_hwi (ib);
820       if (loop->any_likely_upper_bound)
821 	loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
822       loop->any_estimate = streamer_read_hwi (ib);
823       if (loop->any_estimate)
824 	loop->nb_iterations_estimate = streamer_read_widest_int (ib);
825 
826       /* Read OMP SIMD related info.  */
827       loop->safelen = streamer_read_hwi (ib);
828       loop->unroll = streamer_read_hwi (ib);
829       loop->dont_vectorize = streamer_read_hwi (ib);
830       loop->force_vectorize = streamer_read_hwi (ib);
831       loop->simduid = stream_read_tree (ib, data_in);
832 
833       place_new_loop (fn, loop);
834 
835       /* flow_loops_find doesn't like loops not in the tree, hook them
836          all as siblings of the tree root temporarily.  */
837       flow_loop_tree_node_add (loops->tree_root, loop);
838     }
839 
840   /* Rebuild the loop tree.  */
841   flow_loops_find (loops);
842 }
843 
844 
845 /* Read the SSA names array for function FN from DATA_IN using input
846    block IB.  */
847 
848 static void
849 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
850 		 struct function *fn)
851 {
852   unsigned int i, size;
853 
854   size = streamer_read_uhwi (ib);
855   init_ssanames (fn, size);
856 
857   i = streamer_read_uhwi (ib);
858   while (i)
859     {
860       tree ssa_name, name;
861       bool is_default_def;
862 
863       /* Skip over the elements that had been freed.  */
864       while (SSANAMES (fn)->length () < i)
865 	SSANAMES (fn)->quick_push (NULL_TREE);
866 
867       is_default_def = (streamer_read_uchar (ib) != 0);
868       name = stream_read_tree (ib, data_in);
869       ssa_name = make_ssa_name_fn (fn, name, NULL);
870 
871       if (is_default_def)
872 	{
873 	  set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
874 	  SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
875 	}
876 
877       i = streamer_read_uhwi (ib);
878     }
879 }
880 
881 
882 /* Go through all NODE edges and fixup call_stmt pointers
883    so they point to STMTS.  */
884 
885 static void
886 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
887 			 struct function *fn)
888 {
889 #define STMT_UID_NOT_IN_RANGE(uid) \
890   (gimple_stmt_max_uid (fn) < uid || uid == 0)
891 
892   struct cgraph_edge *cedge;
893   struct ipa_ref *ref = NULL;
894   unsigned int i;
895 
896   for (cedge = node->callees; cedge; cedge = cedge->next_callee)
897     {
898       if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
899         fatal_error (input_location,
900 		     "Cgraph edge statement index out of range");
901       cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
902       if (!cedge->call_stmt)
903         fatal_error (input_location,
904 		     "Cgraph edge statement index not found");
905     }
906   for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
907     {
908       if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
909         fatal_error (input_location,
910 		     "Cgraph edge statement index out of range");
911       cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
912       if (!cedge->call_stmt)
913         fatal_error (input_location, "Cgraph edge statement index not found");
914     }
915   for (i = 0; node->iterate_reference (i, ref); i++)
916     if (ref->lto_stmt_uid)
917       {
918 	if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
919 	  fatal_error (input_location,
920 		       "Reference statement index out of range");
921 	ref->stmt = stmts[ref->lto_stmt_uid - 1];
922 	if (!ref->stmt)
923 	  fatal_error (input_location, "Reference statement index not found");
924       }
925 }
926 
927 
928 /* Fixup call_stmt pointers in NODE and all clones.  */
929 
930 static void
931 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
932 {
933   struct cgraph_node *node;
934   struct function *fn;
935 
936   while (orig->clone_of)
937     orig = orig->clone_of;
938   fn = DECL_STRUCT_FUNCTION (orig->decl);
939 
940   if (!orig->thunk.thunk_p)
941     fixup_call_stmt_edges_1 (orig, stmts, fn);
942   if (orig->clones)
943     for (node = orig->clones; node != orig;)
944       {
945 	if (!node->thunk.thunk_p)
946 	  fixup_call_stmt_edges_1 (node, stmts, fn);
947 	if (node->clones)
948 	  node = node->clones;
949 	else if (node->next_sibling_clone)
950 	  node = node->next_sibling_clone;
951 	else
952 	  {
953 	    while (node != orig && !node->next_sibling_clone)
954 	      node = node->clone_of;
955 	    if (node != orig)
956 	      node = node->next_sibling_clone;
957 	  }
958       }
959 }
960 
961 
962 /* Input the base body of struct function FN from DATA_IN
963    using input block IB.  */
964 
965 static void
966 input_struct_function_base (struct function *fn, struct data_in *data_in,
967                             struct lto_input_block *ib)
968 {
969   struct bitpack_d bp;
970   int len;
971 
972   /* Read the static chain and non-local goto save area.  */
973   fn->static_chain_decl = stream_read_tree (ib, data_in);
974   fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
975 
976   /* Read all the local symbols.  */
977   len = streamer_read_hwi (ib);
978   if (len > 0)
979     {
980       int i;
981       vec_safe_grow_cleared (fn->local_decls, len);
982       for (i = 0; i < len; i++)
983 	{
984 	  tree t = stream_read_tree (ib, data_in);
985 	  (*fn->local_decls)[i] = t;
986 	}
987     }
988 
989   /* Input the current IL state of the function.  */
990   fn->curr_properties = streamer_read_uhwi (ib);
991 
992   /* Read all the attributes for FN.  */
993   bp = streamer_read_bitpack (ib);
994   fn->is_thunk = bp_unpack_value (&bp, 1);
995   fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
996   fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
997   fn->returns_struct = bp_unpack_value (&bp, 1);
998   fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
999   fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1000   fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1001   fn->after_inlining = bp_unpack_value (&bp, 1);
1002   fn->stdarg = bp_unpack_value (&bp, 1);
1003   fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1004   fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1005   fn->calls_alloca = bp_unpack_value (&bp, 1);
1006   fn->calls_setjmp = bp_unpack_value (&bp, 1);
1007   fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1008   fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1009   fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1010   fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1011   fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1012 
1013   /* Input the function start and end loci.  */
1014   fn->function_start_locus = stream_input_location_now (&bp, data_in);
1015   fn->function_end_locus = stream_input_location_now (&bp, data_in);
1016 }
1017 
1018 
1019 /* Read the body of function FN_DECL from DATA_IN using input block IB.  */
1020 
1021 static void
1022 input_function (tree fn_decl, struct data_in *data_in,
1023 		struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1024 {
1025   struct function *fn;
1026   enum LTO_tags tag;
1027   gimple **stmts;
1028   basic_block bb;
1029   struct cgraph_node *node;
1030 
1031   tag = streamer_read_record_start (ib);
1032   lto_tag_check (tag, LTO_function);
1033 
1034   /* Read decls for parameters and args.  */
1035   DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1036   DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1037 
1038   /* Read debug args if available.  */
1039   unsigned n_debugargs = streamer_read_uhwi (ib);
1040   if (n_debugargs)
1041     {
1042       vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1043       vec_safe_grow (*debugargs, n_debugargs);
1044       for (unsigned i = 0; i < n_debugargs; ++i)
1045 	(**debugargs)[i] = stream_read_tree (ib, data_in);
1046     }
1047 
1048   /* Read the tree of lexical scopes for the function.  */
1049   DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1050   unsigned block_leaf_count = streamer_read_uhwi (ib);
1051   while (block_leaf_count--)
1052     stream_read_tree (ib, data_in);
1053 
1054   if (!streamer_read_uhwi (ib))
1055     return;
1056 
1057   push_struct_function (fn_decl);
1058   fn = DECL_STRUCT_FUNCTION (fn_decl);
1059   init_tree_ssa (fn);
1060   /* We input IL in SSA form.  */
1061   cfun->gimple_df->in_ssa_p = true;
1062 
1063   gimple_register_cfg_hooks ();
1064 
1065   node = cgraph_node::get (fn_decl);
1066   if (!node)
1067     node = cgraph_node::create (fn_decl);
1068   input_struct_function_base (fn, data_in, ib);
1069   input_cfg (ib_cfg, data_in, fn);
1070 
1071   /* Read all the SSA names.  */
1072   input_ssa_names (ib, data_in, fn);
1073 
1074   /* Read the exception handling regions in the function.  */
1075   input_eh_regions (ib, data_in, fn);
1076 
1077   gcc_assert (DECL_INITIAL (fn_decl));
1078   DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1079 
1080   /* Read all the basic blocks.  */
1081   tag = streamer_read_record_start (ib);
1082   while (tag)
1083     {
1084       input_bb (ib, tag, data_in, fn,
1085 		node->count_materialization_scale);
1086       tag = streamer_read_record_start (ib);
1087     }
1088 
1089   /* Fix up the call statements that are mentioned in the callgraph
1090      edges.  */
1091   set_gimple_stmt_max_uid (cfun, 0);
1092   FOR_ALL_BB_FN (bb, cfun)
1093     {
1094       gimple_stmt_iterator gsi;
1095       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1096 	{
1097 	  gimple *stmt = gsi_stmt (gsi);
1098 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1099 	}
1100       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1101 	{
1102 	  gimple *stmt = gsi_stmt (gsi);
1103 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1104 	}
1105     }
1106   stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1107   FOR_ALL_BB_FN (bb, cfun)
1108     {
1109       gimple_stmt_iterator bsi = gsi_start_phis (bb);
1110       while (!gsi_end_p (bsi))
1111 	{
1112 	  gimple *stmt = gsi_stmt (bsi);
1113 	  gsi_next (&bsi);
1114 	  stmts[gimple_uid (stmt)] = stmt;
1115 	}
1116       bsi = gsi_start_bb (bb);
1117       while (!gsi_end_p (bsi))
1118 	{
1119 	  gimple *stmt = gsi_stmt (bsi);
1120 	  bool remove = false;
1121 	  /* If we're recompiling LTO objects with debug stmts but
1122 	     we're not supposed to have debug stmts, remove them now.
1123 	     We can't remove them earlier because this would cause uid
1124 	     mismatches in fixups, but we can do it at this point, as
1125 	     long as debug stmts don't require fixups.
1126 	     Similarly remove all IFN_*SAN_* internal calls   */
1127 	  if (!flag_wpa)
1128 	    {
1129 	      if (is_gimple_debug (stmt)
1130 		  && (gimple_debug_nonbind_marker_p (stmt)
1131 		      ? !MAY_HAVE_DEBUG_MARKER_STMTS
1132 		      : !MAY_HAVE_DEBUG_BIND_STMTS))
1133 		remove = true;
1134 	      if (is_gimple_call (stmt)
1135 		  && gimple_call_internal_p (stmt))
1136 		{
1137 		  bool replace = false;
1138 		  switch (gimple_call_internal_fn (stmt))
1139 		    {
1140 		    case IFN_UBSAN_NULL:
1141 		      if ((flag_sanitize
1142 			  & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1143 			replace = true;
1144 		      break;
1145 		    case IFN_UBSAN_BOUNDS:
1146 		      if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1147 			replace = true;
1148 		      break;
1149 		    case IFN_UBSAN_VPTR:
1150 		      if ((flag_sanitize & SANITIZE_VPTR) == 0)
1151 			replace = true;
1152 		      break;
1153 		    case IFN_UBSAN_OBJECT_SIZE:
1154 		      if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1155 			replace = true;
1156 		      break;
1157 		    case IFN_UBSAN_PTR:
1158 		      if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1159 			replace = true;
1160 		      break;
1161 		    case IFN_ASAN_MARK:
1162 		      if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1163 			replace = true;
1164 		      break;
1165 		    case IFN_TSAN_FUNC_EXIT:
1166 		      if ((flag_sanitize & SANITIZE_THREAD) == 0)
1167 			replace = true;
1168 		      break;
1169 		    default:
1170 		      break;
1171 		    }
1172 		  if (replace)
1173 		    {
1174 		      gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1175 						   IFN_NOP);
1176 		      update_stmt (stmt);
1177 		    }
1178 		}
1179 	    }
1180 	  if (remove)
1181 	    {
1182 	      gimple_stmt_iterator gsi = bsi;
1183 	      gsi_next (&bsi);
1184 	      unlink_stmt_vdef (stmt);
1185 	      release_defs (stmt);
1186 	      gsi_remove (&gsi, true);
1187 	    }
1188 	  else
1189 	    {
1190 	      gsi_next (&bsi);
1191 	      stmts[gimple_uid (stmt)] = stmt;
1192 
1193 	      /* Remember that the input function has begin stmt
1194 		 markers, so that we know to expect them when emitting
1195 		 debug info.  */
1196 	      if (!cfun->debug_nonbind_markers
1197 		  && gimple_debug_nonbind_marker_p (stmt))
1198 		cfun->debug_nonbind_markers = true;
1199 	    }
1200 	}
1201     }
1202 
1203   /* Set the gimple body to the statement sequence in the entry
1204      basic block.  FIXME lto, this is fairly hacky.  The existence
1205      of a gimple body is used by the cgraph routines, but we should
1206      really use the presence of the CFG.  */
1207   {
1208     edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1209     gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1210   }
1211 
1212   update_max_bb_count ();
1213   fixup_call_stmt_edges (node, stmts);
1214   execute_all_ipa_stmt_fixups (node, stmts);
1215 
1216   update_ssa (TODO_update_ssa_only_virtuals);
1217   free_dominance_info (CDI_DOMINATORS);
1218   free_dominance_info (CDI_POST_DOMINATORS);
1219   free (stmts);
1220   pop_cfun ();
1221 }
1222 
1223 /* Read the body of function FN_DECL from DATA_IN using input block IB.  */
1224 
1225 static void
1226 input_constructor (tree var, struct data_in *data_in,
1227 		   struct lto_input_block *ib)
1228 {
1229   DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1230 }
1231 
1232 
1233 /* Read the body from DATA for function NODE and fill it in.
1234    FILE_DATA are the global decls and types.  SECTION_TYPE is either
1235    LTO_section_function_body or LTO_section_static_initializer.  If
1236    section type is LTO_section_function_body, FN must be the decl for
1237    that function.  */
1238 
1239 static void
1240 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1241 			      const char *data, enum lto_section_type section_type)
1242 {
1243   const struct lto_function_header *header;
1244   struct data_in *data_in;
1245   int cfg_offset;
1246   int main_offset;
1247   int string_offset;
1248   tree fn_decl = node->decl;
1249 
1250   header = (const struct lto_function_header *) data;
1251   if (TREE_CODE (node->decl) == FUNCTION_DECL)
1252     {
1253       cfg_offset = sizeof (struct lto_function_header);
1254       main_offset = cfg_offset + header->cfg_size;
1255       string_offset = main_offset + header->main_size;
1256     }
1257   else
1258     {
1259       main_offset = sizeof (struct lto_function_header);
1260       string_offset = main_offset + header->main_size;
1261     }
1262 
1263   data_in = lto_data_in_create (file_data, data + string_offset,
1264 			      header->string_size, vNULL);
1265 
1266   if (section_type == LTO_section_function_body)
1267     {
1268       struct lto_in_decl_state *decl_state;
1269       unsigned from;
1270 
1271       gcc_checking_assert (node);
1272 
1273       /* Use the function's decl state. */
1274       decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1275       gcc_assert (decl_state);
1276       file_data->current_decl_state = decl_state;
1277 
1278 
1279       /* Set up the struct function.  */
1280       from = data_in->reader_cache->nodes.length ();
1281       lto_input_block ib_main (data + main_offset, header->main_size,
1282 			       file_data->mode_table);
1283       if (TREE_CODE (node->decl) == FUNCTION_DECL)
1284 	{
1285 	  lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1286 				  file_data->mode_table);
1287 	  input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1288 	}
1289       else
1290         input_constructor (fn_decl, data_in, &ib_main);
1291       data_in->location_cache.apply_location_cache ();
1292       /* And fixup types we streamed locally.  */
1293 	{
1294 	  struct streamer_tree_cache_d *cache = data_in->reader_cache;
1295 	  unsigned len = cache->nodes.length ();
1296 	  unsigned i;
1297 	  for (i = len; i-- > from;)
1298 	    {
1299 	      tree t = streamer_tree_cache_get_tree (cache, i);
1300 	      if (t == NULL_TREE)
1301 		continue;
1302 
1303 	      if (TYPE_P (t))
1304 		{
1305 		  gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1306 		  if (type_with_alias_set_p (t)
1307 		      && canonical_type_used_p (t))
1308 		    TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1309 		  if (TYPE_MAIN_VARIANT (t) != t)
1310 		    {
1311 		      gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1312 		      TYPE_NEXT_VARIANT (t)
1313 			= TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1314 		      TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1315 		    }
1316 		}
1317 	    }
1318 	}
1319 
1320       /* Restore decl state */
1321       file_data->current_decl_state = file_data->global_decl_state;
1322     }
1323 
1324   lto_data_in_delete (data_in);
1325 }
1326 
1327 
1328 /* Read the body of NODE using DATA.  FILE_DATA holds the global
1329    decls and types.  */
1330 
1331 void
1332 lto_input_function_body (struct lto_file_decl_data *file_data,
1333 			 struct cgraph_node *node, const char *data)
1334 {
1335   lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1336 }
1337 
1338 /* Read the body of NODE using DATA.  FILE_DATA holds the global
1339    decls and types.  */
1340 
1341 void
1342 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1343 				struct varpool_node *node, const char *data)
1344 {
1345   lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1346 }
1347 
1348 
1349 /* Queue of acummulated decl -> DIE mappings.  Similar to locations those
1350    are only applied to prevailing tree nodes during tree merging.  */
1351 vec<dref_entry> dref_queue;
1352 
1353 /* Read the physical representation of a tree node EXPR from
1354    input block IB using the per-file context in DATA_IN.  */
1355 
1356 static void
1357 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1358 {
1359   /* Read all the bitfield values in EXPR.  Note that for LTO, we
1360      only write language-independent bitfields, so no more unpacking is
1361      needed.  */
1362   streamer_read_tree_bitfields (ib, data_in, expr);
1363 
1364   /* Read all the pointer fields in EXPR.  */
1365   streamer_read_tree_body (ib, data_in, expr);
1366 
1367   /* Read any LTO-specific data not read by the tree streamer.  */
1368   if (DECL_P (expr)
1369       && TREE_CODE (expr) != FUNCTION_DECL
1370       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1371     DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1372 
1373   /* Stream references to early generated DIEs.  Keep in sync with the
1374      trees handled in dwarf2out_register_external_die.  */
1375   if ((DECL_P (expr)
1376        && TREE_CODE (expr) != FIELD_DECL
1377        && TREE_CODE (expr) != DEBUG_EXPR_DECL
1378        && TREE_CODE (expr) != TYPE_DECL)
1379       || TREE_CODE (expr) == BLOCK)
1380     {
1381       const char *str = streamer_read_string (data_in, ib);
1382       if (str)
1383 	{
1384 	  unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1385 	  dref_entry e = { expr, str, off };
1386 	  dref_queue.safe_push (e);
1387 	}
1388     }
1389 }
1390 
1391 /* Read the physical representation of a tree node with tag TAG from
1392    input block IB using the per-file context in DATA_IN.  */
1393 
1394 static tree
1395 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1396 	       enum LTO_tags tag, hashval_t hash)
1397 {
1398   /* Instantiate a new tree node.  */
1399   tree result = streamer_alloc_tree (ib, data_in, tag);
1400 
1401   /* Enter RESULT in the reader cache.  This will make RESULT
1402      available so that circular references in the rest of the tree
1403      structure can be resolved in subsequent calls to stream_read_tree.  */
1404   streamer_tree_cache_append (data_in->reader_cache, result, hash);
1405 
1406   lto_read_tree_1 (ib, data_in, result);
1407 
1408   /* end_marker = */ streamer_read_uchar (ib);
1409 
1410   return result;
1411 }
1412 
1413 
1414 /* Populate the reader cache with trees materialized from the SCC
1415    following in the IB, DATA_IN stream.  */
1416 
1417 hashval_t
1418 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1419 	       unsigned *len, unsigned *entry_len)
1420 {
1421   /* A blob of unnamed tree nodes, fill the cache from it and
1422      recurse.  */
1423   unsigned size = streamer_read_uhwi (ib);
1424   hashval_t scc_hash = streamer_read_uhwi (ib);
1425   unsigned scc_entry_len = 1;
1426 
1427   if (size == 1)
1428     {
1429       enum LTO_tags tag = streamer_read_record_start (ib);
1430       lto_input_tree_1 (ib, data_in, tag, scc_hash);
1431     }
1432   else
1433     {
1434       unsigned int first = data_in->reader_cache->nodes.length ();
1435       tree result;
1436 
1437       scc_entry_len = streamer_read_uhwi (ib);
1438 
1439       /* Materialize size trees by reading their headers.  */
1440       for (unsigned i = 0; i < size; ++i)
1441 	{
1442 	  enum LTO_tags tag = streamer_read_record_start (ib);
1443 	  if (tag == LTO_null
1444 	      || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1445 	      || tag == LTO_tree_pickle_reference
1446 	      || tag == LTO_integer_cst
1447 	      || tag == LTO_tree_scc)
1448 	    gcc_unreachable ();
1449 
1450 	  result = streamer_alloc_tree (ib, data_in, tag);
1451 	  streamer_tree_cache_append (data_in->reader_cache, result, 0);
1452 	}
1453 
1454       /* Read the tree bitpacks and references.  */
1455       for (unsigned i = 0; i < size; ++i)
1456 	{
1457 	  result = streamer_tree_cache_get_tree (data_in->reader_cache,
1458 						 first + i);
1459 	  lto_read_tree_1 (ib, data_in, result);
1460 	  /* end_marker = */ streamer_read_uchar (ib);
1461 	}
1462     }
1463 
1464   *len = size;
1465   *entry_len = scc_entry_len;
1466   return scc_hash;
1467 }
1468 
1469 
1470 /* Read a tree from input block IB using the per-file context in
1471    DATA_IN.  This context is used, for example, to resolve references
1472    to previously read nodes.  */
1473 
1474 tree
1475 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1476 		  enum LTO_tags tag, hashval_t hash)
1477 {
1478   tree result;
1479 
1480   gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1481 
1482   if (tag == LTO_null)
1483     result = NULL_TREE;
1484   else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1485     {
1486       /* If TAG is a reference to an indexable tree, the next value
1487 	 in IB is the index into the table where we expect to find
1488 	 that tree.  */
1489       result = lto_input_tree_ref (ib, data_in, cfun, tag);
1490     }
1491   else if (tag == LTO_tree_pickle_reference)
1492     {
1493       /* If TAG is a reference to a previously read tree, look it up in
1494 	 the reader cache.  */
1495       result = streamer_get_pickled_tree (ib, data_in);
1496     }
1497   else if (tag == LTO_integer_cst)
1498     {
1499       /* For shared integer constants in singletons we can use the
1500          existing tree integer constant merging code.  */
1501       tree type = stream_read_tree (ib, data_in);
1502       unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1503       unsigned HOST_WIDE_INT i;
1504       HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1505 
1506       for (i = 0; i < len; i++)
1507 	a[i] = streamer_read_hwi (ib);
1508       gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1509       result = wide_int_to_tree (type, wide_int::from_array
1510 				 (a, len, TYPE_PRECISION (type)));
1511       streamer_tree_cache_append (data_in->reader_cache, result, hash);
1512     }
1513   else if (tag == LTO_tree_scc)
1514     gcc_unreachable ();
1515   else
1516     {
1517       /* Otherwise, materialize a new node from IB.  */
1518       result = lto_read_tree (ib, data_in, tag, hash);
1519     }
1520 
1521   return result;
1522 }
1523 
1524 tree
1525 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1526 {
1527   enum LTO_tags tag;
1528 
1529   /* Input and skip SCCs.  */
1530   while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1531     {
1532       unsigned len, entry_len;
1533       lto_input_scc (ib, data_in, &len, &entry_len);
1534 
1535       /* Register DECLs with the debuginfo machinery.  */
1536       while (!dref_queue.is_empty ())
1537 	{
1538 	  dref_entry e = dref_queue.pop ();
1539 	  debug_hooks->register_external_die (e.decl, e.sym, e.off);
1540 	}
1541     }
1542   return lto_input_tree_1 (ib, data_in, tag, 0);
1543 }
1544 
1545 
1546 /* Input toplevel asms.  */
1547 
1548 void
1549 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1550 {
1551   size_t len;
1552   const char *data = lto_get_section_data (file_data, LTO_section_asm,
1553 					   NULL, &len);
1554   const struct lto_simple_header_with_strings *header
1555     = (const struct lto_simple_header_with_strings *) data;
1556   int string_offset;
1557   struct data_in *data_in;
1558   tree str;
1559 
1560   if (! data)
1561     return;
1562 
1563   string_offset = sizeof (*header) + header->main_size;
1564 
1565   lto_input_block ib (data + sizeof (*header), header->main_size,
1566 		      file_data->mode_table);
1567 
1568   data_in = lto_data_in_create (file_data, data + string_offset,
1569 			      header->string_size, vNULL);
1570 
1571   while ((str = streamer_read_string_cst (data_in, &ib)))
1572     {
1573       asm_node *node = symtab->finalize_toplevel_asm (str);
1574       node->order = streamer_read_hwi (&ib) + order_base;
1575       if (node->order >= symtab->order)
1576 	symtab->order = node->order + 1;
1577     }
1578 
1579   lto_data_in_delete (data_in);
1580 
1581   lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1582 }
1583 
1584 
1585 /* Input mode table.  */
1586 
1587 void
1588 lto_input_mode_table (struct lto_file_decl_data *file_data)
1589 {
1590   size_t len;
1591   const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1592 					   NULL, &len);
1593   if (! data)
1594     {
1595       internal_error ("cannot read LTO mode table from %s",
1596 		      file_data->file_name);
1597       return;
1598     }
1599 
1600   unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1601   file_data->mode_table = table;
1602   const struct lto_simple_header_with_strings *header
1603     = (const struct lto_simple_header_with_strings *) data;
1604   int string_offset;
1605   struct data_in *data_in;
1606   string_offset = sizeof (*header) + header->main_size;
1607 
1608   lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1609   data_in = lto_data_in_create (file_data, data + string_offset,
1610 				header->string_size, vNULL);
1611   bitpack_d bp = streamer_read_bitpack (&ib);
1612 
1613   table[VOIDmode] = VOIDmode;
1614   table[BLKmode] = BLKmode;
1615   unsigned int m;
1616   while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1617     {
1618       enum mode_class mclass
1619 	= bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1620       poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1621       poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1622       machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1623       poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1624       unsigned int ibit = 0, fbit = 0;
1625       unsigned int real_fmt_len = 0;
1626       const char *real_fmt_name = NULL;
1627       switch (mclass)
1628 	{
1629 	case MODE_FRACT:
1630 	case MODE_UFRACT:
1631 	case MODE_ACCUM:
1632 	case MODE_UACCUM:
1633 	  ibit = bp_unpack_value (&bp, 8);
1634 	  fbit = bp_unpack_value (&bp, 8);
1635 	  break;
1636 	case MODE_FLOAT:
1637 	case MODE_DECIMAL_FLOAT:
1638 	  real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1639 						    &real_fmt_len);
1640 	  break;
1641 	default:
1642 	  break;
1643 	}
1644       /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1645 	 if not found, fallback to all modes.  */
1646       int pass;
1647       for (pass = 0; pass < 2; pass++)
1648 	for (machine_mode mr = pass ? VOIDmode
1649 				    : GET_CLASS_NARROWEST_MODE (mclass);
1650 	     pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1651 	     pass ? mr = (machine_mode) (mr + 1)
1652 		  : mr = GET_MODE_WIDER_MODE (mr).else_void ())
1653 	  if (GET_MODE_CLASS (mr) != mclass
1654 	      || maybe_ne (GET_MODE_SIZE (mr), size)
1655 	      || maybe_ne (GET_MODE_PRECISION (mr), prec)
1656 	      || (inner == m
1657 		  ? GET_MODE_INNER (mr) != mr
1658 		  : GET_MODE_INNER (mr) != table[(int) inner])
1659 	      || GET_MODE_IBIT (mr) != ibit
1660 	      || GET_MODE_FBIT (mr) != fbit
1661 	      || maybe_ne (GET_MODE_NUNITS (mr), nunits))
1662 	    continue;
1663 	  else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1664 		   && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1665 	    continue;
1666 	  else
1667 	    {
1668 	      table[m] = mr;
1669 	      pass = 2;
1670 	      break;
1671 	    }
1672       unsigned int mname_len;
1673       const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1674       if (pass == 2)
1675 	{
1676 	  switch (mclass)
1677 	    {
1678 	    case MODE_VECTOR_BOOL:
1679 	    case MODE_VECTOR_INT:
1680 	    case MODE_VECTOR_FLOAT:
1681 	    case MODE_VECTOR_FRACT:
1682 	    case MODE_VECTOR_UFRACT:
1683 	    case MODE_VECTOR_ACCUM:
1684 	    case MODE_VECTOR_UACCUM:
1685 	      /* For unsupported vector modes just use BLKmode,
1686 		 if the scalar mode is supported.  */
1687 	      if (table[(int) inner] != VOIDmode)
1688 		{
1689 		  table[m] = BLKmode;
1690 		  break;
1691 		}
1692 	      /* FALLTHRU */
1693 	    default:
1694 	      fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1695 	      break;
1696 	    }
1697 	}
1698     }
1699   lto_data_in_delete (data_in);
1700 
1701   lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1702 }
1703 
1704 
1705 /* Initialization for the LTO reader.  */
1706 
1707 void
1708 lto_reader_init (void)
1709 {
1710   lto_streamer_init ();
1711   file_name_hash_table
1712     = new hash_table<freeing_string_slot_hasher> (37);
1713 }
1714 
1715 
1716 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1717    table to use with LEN strings.  RESOLUTIONS is the vector of linker
1718    resolutions (NULL if not using a linker plugin).  */
1719 
1720 struct data_in *
1721 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1722 		    unsigned len,
1723 		    vec<ld_plugin_symbol_resolution_t> resolutions)
1724 {
1725   struct data_in *data_in = new (struct data_in);
1726   data_in->file_data = file_data;
1727   data_in->strings = strings;
1728   data_in->strings_len = len;
1729   data_in->globals_resolution = resolutions;
1730   data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1731   return data_in;
1732 }
1733 
1734 
1735 /* Remove DATA_IN.  */
1736 
1737 void
1738 lto_data_in_delete (struct data_in *data_in)
1739 {
1740   data_in->globals_resolution.release ();
1741   streamer_tree_cache_delete (data_in->reader_cache);
1742   delete data_in;
1743 }
1744