1 /* Read the GIMPLE representation from a file stream.
2 
3    Copyright (C) 2009-2016 Free Software Foundation, Inc.
4    Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5    Re-implemented by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 
45 
46 struct freeing_string_slot_hasher : string_slot_hasher
47 {
48   static inline void remove (value_type *);
49 };
50 
51 inline void
remove(value_type * v)52 freeing_string_slot_hasher::remove (value_type *v)
53 {
54   free (v);
55 }
56 
57 /* The table to hold the file names.  */
58 static hash_table<freeing_string_slot_hasher> *file_name_hash_table;
59 
60 
61 /* Check that tag ACTUAL has one of the given values.  NUM_TAGS is the
62    number of valid tag values to check.  */
63 
64 void
lto_tag_check_set(enum LTO_tags actual,int ntags,...)65 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
66 {
67   va_list ap;
68   int i;
69 
70   va_start (ap, ntags);
71   for (i = 0; i < ntags; i++)
72     if ((unsigned) actual == va_arg (ap, unsigned))
73       {
74 	va_end (ap);
75 	return;
76       }
77 
78   va_end (ap);
79   internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
80 }
81 
82 
83 /* Read LENGTH bytes from STREAM to ADDR.  */
84 
85 void
lto_input_data_block(struct lto_input_block * ib,void * addr,size_t length)86 lto_input_data_block (struct lto_input_block *ib, void *addr, size_t length)
87 {
88   size_t i;
89   unsigned char *const buffer = (unsigned char *const) addr;
90 
91   for (i = 0; i < length; i++)
92     buffer[i] = streamer_read_uchar (ib);
93 }
94 
95 
96 /* Lookup STRING in file_name_hash_table.  If found, return the existing
97    string, otherwise insert STRING as the canonical version.  */
98 
99 static const char *
canon_file_name(const char * string)100 canon_file_name (const char *string)
101 {
102   string_slot **slot;
103   struct string_slot s_slot;
104   size_t len = strlen (string);
105 
106   s_slot.s = string;
107   s_slot.len = len;
108 
109   slot = file_name_hash_table->find_slot (&s_slot, INSERT);
110   if (*slot == NULL)
111     {
112       char *saved_string;
113       struct string_slot *new_slot;
114 
115       saved_string = (char *) xmalloc (len + 1);
116       new_slot = XCNEW (struct string_slot);
117       memcpy (saved_string, string, len + 1);
118       new_slot->s = saved_string;
119       new_slot->len = len;
120       *slot = new_slot;
121       return saved_string;
122     }
123   else
124     {
125       struct string_slot *old_slot = *slot;
126       return old_slot->s;
127     }
128 }
129 
130 /* Pointer to currently alive instance of lto_location_cache.  */
131 
132 lto_location_cache *lto_location_cache::current_cache;
133 
134 /* Sort locations in source order. Start with file from last application.  */
135 
136 int
cmp_loc(const void * pa,const void * pb)137 lto_location_cache::cmp_loc (const void *pa, const void *pb)
138 {
139   const cached_location *a = ((const cached_location *)pa);
140   const cached_location *b = ((const cached_location *)pb);
141   const char *current_file = current_cache->current_file;
142   int current_line = current_cache->current_line;
143 
144   if (a->file == current_file && b->file != current_file)
145     return -1;
146   if (a->file != current_file && b->file == current_file)
147     return 1;
148   if (a->file == current_file && b->file == current_file)
149     {
150       if (a->line == current_line && b->line != current_line)
151 	return -1;
152       if (a->line != current_line && b->line == current_line)
153 	return 1;
154     }
155   if (a->file != b->file)
156     return strcmp (a->file, b->file);
157   if (a->sysp != b->sysp)
158     return a->sysp ? 1 : -1;
159   if (a->line != b->line)
160     return a->line - b->line;
161   return a->col - b->col;
162 }
163 
164 /* Apply all changes in location cache.  Add locations into linemap and patch
165    trees.  */
166 
167 bool
apply_location_cache()168 lto_location_cache::apply_location_cache ()
169 {
170   static const char *prev_file;
171   if (!loc_cache.length ())
172     return false;
173   if (loc_cache.length () > 1)
174     loc_cache.qsort (cmp_loc);
175 
176   for (unsigned int i = 0; i < loc_cache.length (); i++)
177     {
178       struct cached_location loc = loc_cache[i];
179 
180       if (current_file != loc.file)
181 	linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
182 		     loc.sysp, loc.file, loc.line);
183       else if (current_line != loc.line)
184 	{
185 	  int max = loc.col;
186 
187 	  for (unsigned int j = i + 1; j < loc_cache.length (); j++)
188 	    if (loc.file != loc_cache[j].file
189 		|| loc.line != loc_cache[j].line)
190 	      break;
191 	    else if (max < loc_cache[j].col)
192 	      max = loc_cache[j].col;
193 	  linemap_line_start (line_table, loc.line, max + 1);
194 	}
195       gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
196       if (current_file == loc.file && current_line == loc.line
197 	  && current_col == loc.col)
198 	*loc.loc = current_loc;
199       else
200         current_loc = *loc.loc = linemap_position_for_column (line_table,
201 							      loc.col);
202       current_line = loc.line;
203       prev_file = current_file = loc.file;
204       current_col = loc.col;
205     }
206   loc_cache.truncate (0);
207   accepted_length = 0;
208   return true;
209 }
210 
211 /* Tree merging did not suceed; mark all changes in the cache as accepted.  */
212 
213 void
accept_location_cache()214 lto_location_cache::accept_location_cache ()
215 {
216   gcc_assert (current_cache == this);
217   accepted_length = loc_cache.length ();
218 }
219 
220 /* Tree merging did suceed; throw away recent changes.  */
221 
222 void
revert_location_cache()223 lto_location_cache::revert_location_cache ()
224 {
225   loc_cache.truncate (accepted_length);
226 }
227 
228 /* Read a location bitpack from input block IB and either update *LOC directly
229    or add it to the location cache.
230    It is neccesary to call apply_location_cache to get *LOC updated.  */
231 
232 void
input_location(location_t * loc,struct bitpack_d * bp,struct data_in * data_in)233 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
234 				    struct data_in *data_in)
235 {
236   static const char *stream_file;
237   static int stream_line;
238   static int stream_col;
239   static bool stream_sysp;
240   bool file_change, line_change, column_change;
241 
242   gcc_assert (current_cache == this);
243 
244   *loc = bp_unpack_int_in_range (bp, "location", 0, RESERVED_LOCATION_COUNT);
245 
246   if (*loc < RESERVED_LOCATION_COUNT)
247     return;
248 
249   /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
250      ICE on it.  */
251 
252   file_change = bp_unpack_value (bp, 1);
253   line_change = bp_unpack_value (bp, 1);
254   column_change = bp_unpack_value (bp, 1);
255 
256   if (file_change)
257     {
258       stream_file = canon_file_name (bp_unpack_string (data_in, bp));
259       stream_sysp = bp_unpack_value (bp, 1);
260     }
261 
262   if (line_change)
263     stream_line = bp_unpack_var_len_unsigned (bp);
264 
265   if (column_change)
266     stream_col = bp_unpack_var_len_unsigned (bp);
267 
268   /* This optimization saves location cache operations druing gimple
269      streaming.  */
270 
271   if (current_file == stream_file && current_line == stream_line
272       && current_col == stream_col && current_sysp == stream_sysp)
273     {
274       *loc = current_loc;
275       return;
276     }
277 
278   struct cached_location entry
279     = {stream_file, loc, stream_line, stream_col, stream_sysp};
280   loc_cache.safe_push (entry);
281 }
282 
283 /* Read a location bitpack from input block IB and either update *LOC directly
284    or add it to the location cache.
285    It is neccesary to call apply_location_cache to get *LOC updated.  */
286 
287 void
lto_input_location(location_t * loc,struct bitpack_d * bp,struct data_in * data_in)288 lto_input_location (location_t *loc, struct bitpack_d *bp,
289 		    struct data_in *data_in)
290 {
291   data_in->location_cache.input_location (loc, bp, data_in);
292 }
293 
294 /* Read location and return it instead of going through location caching.
295    This should be used only when the resulting location is not going to be
296    discarded.  */
297 
298 location_t
stream_input_location_now(struct bitpack_d * bp,struct data_in * data_in)299 stream_input_location_now (struct bitpack_d *bp, struct data_in *data_in)
300 {
301   location_t loc;
302   stream_input_location (&loc, bp, data_in);
303   data_in->location_cache.apply_location_cache ();
304   return loc;
305 }
306 
307 /* Read a reference to a tree node from DATA_IN using input block IB.
308    TAG is the expected node that should be found in IB, if TAG belongs
309    to one of the indexable trees, expect to read a reference index to
310    be looked up in one of the symbol tables, otherwise read the pysical
311    representation of the tree using stream_read_tree.  FN is the
312    function scope for the read tree.  */
313 
314 tree
lto_input_tree_ref(struct lto_input_block * ib,struct data_in * data_in,struct function * fn,enum LTO_tags tag)315 lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
316 		    struct function *fn, enum LTO_tags tag)
317 {
318   unsigned HOST_WIDE_INT ix_u;
319   tree result = NULL_TREE;
320 
321   lto_tag_check_range (tag, LTO_field_decl_ref, LTO_namelist_decl_ref);
322 
323   switch (tag)
324     {
325     case LTO_type_ref:
326       ix_u = streamer_read_uhwi (ib);
327       result = lto_file_decl_data_get_type (data_in->file_data, ix_u);
328       break;
329 
330     case LTO_ssa_name_ref:
331       ix_u = streamer_read_uhwi (ib);
332       result = (*SSANAMES (fn))[ix_u];
333       break;
334 
335     case LTO_field_decl_ref:
336       ix_u = streamer_read_uhwi (ib);
337       result = lto_file_decl_data_get_field_decl (data_in->file_data, ix_u);
338       break;
339 
340     case LTO_function_decl_ref:
341       ix_u = streamer_read_uhwi (ib);
342       result = lto_file_decl_data_get_fn_decl (data_in->file_data, ix_u);
343       break;
344 
345     case LTO_type_decl_ref:
346       ix_u = streamer_read_uhwi (ib);
347       result = lto_file_decl_data_get_type_decl (data_in->file_data, ix_u);
348       break;
349 
350     case LTO_namespace_decl_ref:
351       ix_u = streamer_read_uhwi (ib);
352       result = lto_file_decl_data_get_namespace_decl (data_in->file_data, ix_u);
353       break;
354 
355     case LTO_global_decl_ref:
356     case LTO_result_decl_ref:
357     case LTO_const_decl_ref:
358     case LTO_imported_decl_ref:
359     case LTO_label_decl_ref:
360     case LTO_translation_unit_decl_ref:
361     case LTO_namelist_decl_ref:
362       ix_u = streamer_read_uhwi (ib);
363       result = lto_file_decl_data_get_var_decl (data_in->file_data, ix_u);
364       break;
365 
366     default:
367       gcc_unreachable ();
368     }
369 
370   gcc_assert (result);
371 
372   return result;
373 }
374 
375 
376 /* Read and return a double-linked list of catch handlers from input
377    block IB, using descriptors in DATA_IN.  */
378 
379 static struct eh_catch_d *
lto_input_eh_catch_list(struct lto_input_block * ib,struct data_in * data_in,eh_catch * last_p)380 lto_input_eh_catch_list (struct lto_input_block *ib, struct data_in *data_in,
381 			 eh_catch *last_p)
382 {
383   eh_catch first;
384   enum LTO_tags tag;
385 
386   *last_p = first = NULL;
387   tag = streamer_read_record_start (ib);
388   while (tag)
389     {
390       tree list;
391       eh_catch n;
392 
393       lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
394 
395       /* Read the catch node.  */
396       n = ggc_cleared_alloc<eh_catch_d> ();
397       n->type_list = stream_read_tree (ib, data_in);
398       n->filter_list = stream_read_tree (ib, data_in);
399       n->label = stream_read_tree (ib, data_in);
400 
401       /* Register all the types in N->FILTER_LIST.  */
402       for (list = n->filter_list; list; list = TREE_CHAIN (list))
403 	add_type_for_runtime (TREE_VALUE (list));
404 
405       /* Chain N to the end of the list.  */
406       if (*last_p)
407 	(*last_p)->next_catch = n;
408       n->prev_catch = *last_p;
409       *last_p = n;
410 
411       /* Set the head of the list the first time through the loop.  */
412       if (first == NULL)
413 	first = n;
414 
415       tag = streamer_read_record_start (ib);
416     }
417 
418   return first;
419 }
420 
421 
422 /* Read and return EH region IX from input block IB, using descriptors
423    in DATA_IN.  */
424 
425 static eh_region
input_eh_region(struct lto_input_block * ib,struct data_in * data_in,int ix)426 input_eh_region (struct lto_input_block *ib, struct data_in *data_in, int ix)
427 {
428   enum LTO_tags tag;
429   eh_region r;
430 
431   /* Read the region header.  */
432   tag = streamer_read_record_start (ib);
433   if (tag == LTO_null)
434     return NULL;
435 
436   r = ggc_cleared_alloc<eh_region_d> ();
437   r->index = streamer_read_hwi (ib);
438 
439   gcc_assert (r->index == ix);
440 
441   /* Read all the region pointers as region numbers.  We'll fix up
442      the pointers once the whole array has been read.  */
443   r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
444   r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
445   r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
446 
447   switch (tag)
448     {
449       case LTO_ert_cleanup:
450 	r->type = ERT_CLEANUP;
451 	break;
452 
453       case LTO_ert_try:
454 	{
455 	  struct eh_catch_d *last_catch;
456 	  r->type = ERT_TRY;
457 	  r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
458 							     &last_catch);
459 	  r->u.eh_try.last_catch = last_catch;
460 	  break;
461 	}
462 
463       case LTO_ert_allowed_exceptions:
464 	{
465 	  tree l;
466 
467 	  r->type = ERT_ALLOWED_EXCEPTIONS;
468 	  r->u.allowed.type_list = stream_read_tree (ib, data_in);
469 	  r->u.allowed.label = stream_read_tree (ib, data_in);
470 	  r->u.allowed.filter = streamer_read_uhwi (ib);
471 
472 	  for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
473 	    add_type_for_runtime (TREE_VALUE (l));
474 	}
475 	break;
476 
477       case LTO_ert_must_not_throw:
478 	{
479 	  r->type = ERT_MUST_NOT_THROW;
480 	  r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
481 	  bitpack_d bp = streamer_read_bitpack (ib);
482 	  r->u.must_not_throw.failure_loc
483 	   = stream_input_location_now (&bp, data_in);
484 	}
485 	break;
486 
487       default:
488 	gcc_unreachable ();
489     }
490 
491   r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
492 
493   return r;
494 }
495 
496 
497 /* Read and return EH landing pad IX from input block IB, using descriptors
498    in DATA_IN.  */
499 
500 static eh_landing_pad
input_eh_lp(struct lto_input_block * ib,struct data_in * data_in,int ix)501 input_eh_lp (struct lto_input_block *ib, struct data_in *data_in, int ix)
502 {
503   enum LTO_tags tag;
504   eh_landing_pad lp;
505 
506   /* Read the landing pad header.  */
507   tag = streamer_read_record_start (ib);
508   if (tag == LTO_null)
509     return NULL;
510 
511   lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
512 
513   lp = ggc_cleared_alloc<eh_landing_pad_d> ();
514   lp->index = streamer_read_hwi (ib);
515   gcc_assert (lp->index == ix);
516   lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
517   lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
518   lp->post_landing_pad = stream_read_tree (ib, data_in);
519 
520   return lp;
521 }
522 
523 
524 /* After reading the EH regions, pointers to peer and children regions
525    are region numbers.  This converts all these region numbers into
526    real pointers into the rematerialized regions for FN.  ROOT_REGION
527    is the region number for the root EH region in FN.  */
528 
529 static void
fixup_eh_region_pointers(struct function * fn,HOST_WIDE_INT root_region)530 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
531 {
532   unsigned i;
533   vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
534   vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
535   eh_region r;
536   eh_landing_pad lp;
537 
538   gcc_assert (eh_array && lp_array);
539 
540   gcc_assert (root_region >= 0);
541   fn->eh->region_tree = (*eh_array)[root_region];
542 
543 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
544 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
545 
546   /* Convert all the index numbers stored in pointer fields into
547      pointers to the corresponding slots in the EH region array.  */
548   FOR_EACH_VEC_ELT (*eh_array, i, r)
549     {
550       /* The array may contain NULL regions.  */
551       if (r == NULL)
552 	continue;
553 
554       gcc_assert (i == (unsigned) r->index);
555       FIXUP_EH_REGION (r->outer);
556       FIXUP_EH_REGION (r->inner);
557       FIXUP_EH_REGION (r->next_peer);
558       FIXUP_EH_LP (r->landing_pads);
559     }
560 
561   /* Convert all the index numbers stored in pointer fields into
562      pointers to the corresponding slots in the EH landing pad array.  */
563   FOR_EACH_VEC_ELT (*lp_array, i, lp)
564     {
565       /* The array may contain NULL landing pads.  */
566       if (lp == NULL)
567 	continue;
568 
569       gcc_assert (i == (unsigned) lp->index);
570       FIXUP_EH_LP (lp->next_lp);
571       FIXUP_EH_REGION (lp->region);
572     }
573 
574 #undef FIXUP_EH_REGION
575 #undef FIXUP_EH_LP
576 }
577 
578 
579 /* Initialize EH support.  */
580 
581 void
lto_init_eh(void)582 lto_init_eh (void)
583 {
584   static bool eh_initialized_p = false;
585 
586   if (eh_initialized_p)
587     return;
588 
589   /* Contrary to most other FEs, we only initialize EH support when at
590      least one of the files in the set contains exception regions in
591      it.  Since this happens much later than the call to init_eh in
592      lang_dependent_init, we have to set flag_exceptions and call
593      init_eh again to initialize the EH tables.  */
594   flag_exceptions = 1;
595   init_eh ();
596 
597   eh_initialized_p = true;
598 }
599 
600 
601 /* Read the exception table for FN from IB using the data descriptors
602    in DATA_IN.  */
603 
604 static void
input_eh_regions(struct lto_input_block * ib,struct data_in * data_in,struct function * fn)605 input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
606 		  struct function *fn)
607 {
608   HOST_WIDE_INT i, root_region, len;
609   enum LTO_tags tag;
610 
611   tag = streamer_read_record_start (ib);
612   if (tag == LTO_null)
613     return;
614 
615   lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
616 
617   /* If the file contains EH regions, then it was compiled with
618      -fexceptions.  In that case, initialize the backend EH
619      machinery.  */
620   lto_init_eh ();
621 
622   gcc_assert (fn->eh);
623 
624   root_region = streamer_read_hwi (ib);
625   gcc_assert (root_region == (int) root_region);
626 
627   /* Read the EH region array.  */
628   len = streamer_read_hwi (ib);
629   gcc_assert (len == (int) len);
630   if (len > 0)
631     {
632       vec_safe_grow_cleared (fn->eh->region_array, len);
633       for (i = 0; i < len; i++)
634 	{
635 	  eh_region r = input_eh_region (ib, data_in, i);
636 	  (*fn->eh->region_array)[i] = r;
637 	}
638     }
639 
640   /* Read the landing pads.  */
641   len = streamer_read_hwi (ib);
642   gcc_assert (len == (int) len);
643   if (len > 0)
644     {
645       vec_safe_grow_cleared (fn->eh->lp_array, len);
646       for (i = 0; i < len; i++)
647 	{
648 	  eh_landing_pad lp = input_eh_lp (ib, data_in, i);
649 	  (*fn->eh->lp_array)[i] = lp;
650 	}
651     }
652 
653   /* Read the runtime type data.  */
654   len = streamer_read_hwi (ib);
655   gcc_assert (len == (int) len);
656   if (len > 0)
657     {
658       vec_safe_grow_cleared (fn->eh->ttype_data, len);
659       for (i = 0; i < len; i++)
660 	{
661 	  tree ttype = stream_read_tree (ib, data_in);
662 	  (*fn->eh->ttype_data)[i] = ttype;
663 	}
664     }
665 
666   /* Read the table of action chains.  */
667   len = streamer_read_hwi (ib);
668   gcc_assert (len == (int) len);
669   if (len > 0)
670     {
671       if (targetm.arm_eabi_unwinder)
672 	{
673 	  vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
674 	  for (i = 0; i < len; i++)
675 	    {
676 	      tree t = stream_read_tree (ib, data_in);
677 	      (*fn->eh->ehspec_data.arm_eabi)[i] = t;
678 	    }
679 	}
680       else
681 	{
682 	  vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
683 	  for (i = 0; i < len; i++)
684 	    {
685 	      uchar c = streamer_read_uchar (ib);
686 	      (*fn->eh->ehspec_data.other)[i] = c;
687 	    }
688 	}
689     }
690 
691   /* Reconstruct the EH region tree by fixing up the peer/children
692      pointers.  */
693   fixup_eh_region_pointers (fn, root_region);
694 
695   tag = streamer_read_record_start (ib);
696   lto_tag_check_range (tag, LTO_null, LTO_null);
697 }
698 
699 
700 /* Make a new basic block with index INDEX in function FN.  */
701 
702 static basic_block
make_new_block(struct function * fn,unsigned int index)703 make_new_block (struct function *fn, unsigned int index)
704 {
705   basic_block bb = alloc_block ();
706   bb->index = index;
707   SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
708   n_basic_blocks_for_fn (fn)++;
709   return bb;
710 }
711 
712 
713 /* Read a wide-int.  */
714 
715 static widest_int
streamer_read_wi(struct lto_input_block * ib)716 streamer_read_wi (struct lto_input_block *ib)
717 {
718   HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
719   int i;
720   int prec ATTRIBUTE_UNUSED = streamer_read_uhwi (ib);
721   int len = streamer_read_uhwi (ib);
722   for (i = 0; i < len; i++)
723     a[i] = streamer_read_hwi (ib);
724   return widest_int::from_array (a, len);
725 }
726 
727 
728 /* Read the CFG for function FN from input block IB.  */
729 
730 static void
input_cfg(struct lto_input_block * ib,struct data_in * data_in,struct function * fn,int count_materialization_scale)731 input_cfg (struct lto_input_block *ib, struct data_in *data_in,
732 	   struct function *fn,
733 	   int count_materialization_scale)
734 {
735   unsigned int bb_count;
736   basic_block p_bb;
737   unsigned int i;
738   int index;
739 
740   init_empty_tree_cfg_for_function (fn);
741   init_ssa_operands (fn);
742 
743   profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
744 						   PROFILE_LAST);
745 
746   bb_count = streamer_read_uhwi (ib);
747 
748   last_basic_block_for_fn (fn) = bb_count;
749   if (bb_count > basic_block_info_for_fn (fn)->length ())
750     vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count);
751 
752   if (bb_count > label_to_block_map_for_fn (fn)->length ())
753     vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count);
754 
755   index = streamer_read_hwi (ib);
756   while (index != -1)
757     {
758       basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
759       unsigned int edge_count;
760 
761       if (bb == NULL)
762 	bb = make_new_block (fn, index);
763 
764       edge_count = streamer_read_uhwi (ib);
765 
766       /* Connect up the CFG.  */
767       for (i = 0; i < edge_count; i++)
768 	{
769 	  unsigned int dest_index;
770 	  unsigned int edge_flags;
771 	  basic_block dest;
772 	  int probability;
773 	  gcov_type count;
774 	  edge e;
775 
776 	  dest_index = streamer_read_uhwi (ib);
777 	  probability = (int) streamer_read_hwi (ib);
778 	  count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
779                                count_materialization_scale);
780 	  edge_flags = streamer_read_uhwi (ib);
781 
782 	  dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
783 
784 	  if (dest == NULL)
785 	    dest = make_new_block (fn, dest_index);
786 
787 	  e = make_edge (bb, dest, edge_flags);
788 	  e->probability = probability;
789 	  e->count = count;
790 	}
791 
792       index = streamer_read_hwi (ib);
793     }
794 
795   p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
796   index = streamer_read_hwi (ib);
797   while (index != -1)
798     {
799       basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
800       bb->prev_bb = p_bb;
801       p_bb->next_bb = bb;
802       p_bb = bb;
803       index = streamer_read_hwi (ib);
804     }
805 
806   /* ???  The cfgloop interface is tied to cfun.  */
807   gcc_assert (cfun == fn);
808 
809   /* Input the loop tree.  */
810   unsigned n_loops = streamer_read_uhwi (ib);
811   if (n_loops == 0)
812     return;
813 
814   struct loops *loops = ggc_cleared_alloc<struct loops> ();
815   init_loops_structure (fn, loops, n_loops);
816   set_loops_for_fn (fn, loops);
817 
818   /* Input each loop and associate it with its loop header so
819      flow_loops_find can rebuild the loop tree.  */
820   for (unsigned i = 1; i < n_loops; ++i)
821     {
822       int header_index = streamer_read_hwi (ib);
823       if (header_index == -1)
824 	{
825 	  loops->larray->quick_push (NULL);
826 	  continue;
827 	}
828 
829       struct loop *loop = alloc_loop ();
830       loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
831       loop->header->loop_father = loop;
832 
833       /* Read everything copy_loop_info copies.  */
834       loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
835       loop->any_upper_bound = streamer_read_hwi (ib);
836       if (loop->any_upper_bound)
837 	loop->nb_iterations_upper_bound = streamer_read_wi (ib);
838       loop->any_estimate = streamer_read_hwi (ib);
839       if (loop->any_estimate)
840 	loop->nb_iterations_estimate = streamer_read_wi (ib);
841 
842       /* Read OMP SIMD related info.  */
843       loop->safelen = streamer_read_hwi (ib);
844       loop->dont_vectorize = streamer_read_hwi (ib);
845       loop->force_vectorize = streamer_read_hwi (ib);
846       loop->simduid = stream_read_tree (ib, data_in);
847 
848       place_new_loop (fn, loop);
849 
850       /* flow_loops_find doesn't like loops not in the tree, hook them
851          all as siblings of the tree root temporarily.  */
852       flow_loop_tree_node_add (loops->tree_root, loop);
853     }
854 
855   /* Rebuild the loop tree.  */
856   flow_loops_find (loops);
857 }
858 
859 
860 /* Read the SSA names array for function FN from DATA_IN using input
861    block IB.  */
862 
863 static void
input_ssa_names(struct lto_input_block * ib,struct data_in * data_in,struct function * fn)864 input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
865 		 struct function *fn)
866 {
867   unsigned int i, size;
868 
869   size = streamer_read_uhwi (ib);
870   init_ssanames (fn, size);
871 
872   i = streamer_read_uhwi (ib);
873   while (i)
874     {
875       tree ssa_name, name;
876       bool is_default_def;
877 
878       /* Skip over the elements that had been freed.  */
879       while (SSANAMES (fn)->length () < i)
880 	SSANAMES (fn)->quick_push (NULL_TREE);
881 
882       is_default_def = (streamer_read_uchar (ib) != 0);
883       name = stream_read_tree (ib, data_in);
884       ssa_name = make_ssa_name_fn (fn, name, gimple_build_nop ());
885 
886       if (is_default_def)
887 	set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
888 
889       i = streamer_read_uhwi (ib);
890     }
891 }
892 
893 
894 /* Go through all NODE edges and fixup call_stmt pointers
895    so they point to STMTS.  */
896 
897 static void
fixup_call_stmt_edges_1(struct cgraph_node * node,gimple ** stmts,struct function * fn)898 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
899 			 struct function *fn)
900 {
901 #define STMT_UID_NOT_IN_RANGE(uid) \
902   (gimple_stmt_max_uid (fn) < uid || uid == 0)
903 
904   struct cgraph_edge *cedge;
905   struct ipa_ref *ref = NULL;
906   unsigned int i;
907 
908   for (cedge = node->callees; cedge; cedge = cedge->next_callee)
909     {
910       if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
911         fatal_error (input_location,
912 		     "Cgraph edge statement index out of range");
913       cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
914       if (!cedge->call_stmt)
915         fatal_error (input_location,
916 		     "Cgraph edge statement index not found");
917     }
918   for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
919     {
920       if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
921         fatal_error (input_location,
922 		     "Cgraph edge statement index out of range");
923       cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
924       if (!cedge->call_stmt)
925         fatal_error (input_location, "Cgraph edge statement index not found");
926     }
927   for (i = 0; node->iterate_reference (i, ref); i++)
928     if (ref->lto_stmt_uid)
929       {
930 	if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
931 	  fatal_error (input_location,
932 		       "Reference statement index out of range");
933 	ref->stmt = stmts[ref->lto_stmt_uid - 1];
934 	if (!ref->stmt)
935 	  fatal_error (input_location, "Reference statement index not found");
936       }
937 }
938 
939 
940 /* Fixup call_stmt pointers in NODE and all clones.  */
941 
942 static void
fixup_call_stmt_edges(struct cgraph_node * orig,gimple ** stmts)943 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
944 {
945   struct cgraph_node *node;
946   struct function *fn;
947 
948   while (orig->clone_of)
949     orig = orig->clone_of;
950   fn = DECL_STRUCT_FUNCTION (orig->decl);
951 
952   if (!orig->thunk.thunk_p)
953     fixup_call_stmt_edges_1 (orig, stmts, fn);
954   if (orig->clones)
955     for (node = orig->clones; node != orig;)
956       {
957 	fixup_call_stmt_edges_1 (node, stmts, fn);
958 	if (node->clones)
959 	  node = node->clones;
960 	else if (node->next_sibling_clone)
961 	  node = node->next_sibling_clone;
962 	else
963 	  {
964 	    while (node != orig && !node->next_sibling_clone)
965 	      node = node->clone_of;
966 	    if (node != orig)
967 	      node = node->next_sibling_clone;
968 	  }
969       }
970 }
971 
972 
973 /* Input the base body of struct function FN from DATA_IN
974    using input block IB.  */
975 
976 static void
input_struct_function_base(struct function * fn,struct data_in * data_in,struct lto_input_block * ib)977 input_struct_function_base (struct function *fn, struct data_in *data_in,
978                             struct lto_input_block *ib)
979 {
980   struct bitpack_d bp;
981   int len;
982 
983   /* Read the static chain and non-local goto save area.  */
984   fn->static_chain_decl = stream_read_tree (ib, data_in);
985   fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
986 
987   /* Read all the local symbols.  */
988   len = streamer_read_hwi (ib);
989   if (len > 0)
990     {
991       int i;
992       vec_safe_grow_cleared (fn->local_decls, len);
993       for (i = 0; i < len; i++)
994 	{
995 	  tree t = stream_read_tree (ib, data_in);
996 	  (*fn->local_decls)[i] = t;
997 	}
998     }
999 
1000   /* Input the current IL state of the function.  */
1001   fn->curr_properties = streamer_read_uhwi (ib);
1002 
1003   /* Read all the attributes for FN.  */
1004   bp = streamer_read_bitpack (ib);
1005   fn->is_thunk = bp_unpack_value (&bp, 1);
1006   fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
1007   fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1008   fn->returns_struct = bp_unpack_value (&bp, 1);
1009   fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1010   fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1011   fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1012   fn->after_inlining = bp_unpack_value (&bp, 1);
1013   fn->stdarg = bp_unpack_value (&bp, 1);
1014   fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1015   fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1016   fn->calls_alloca = bp_unpack_value (&bp, 1);
1017   fn->calls_setjmp = bp_unpack_value (&bp, 1);
1018   fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1019   fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1020   fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1021   fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1022   fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1023 
1024   /* Input the function start and end loci.  */
1025   fn->function_start_locus = stream_input_location_now (&bp, data_in);
1026   fn->function_end_locus = stream_input_location_now (&bp, data_in);
1027 }
1028 
1029 
1030 /* Read the body of function FN_DECL from DATA_IN using input block IB.  */
1031 
1032 static void
input_function(tree fn_decl,struct data_in * data_in,struct lto_input_block * ib,struct lto_input_block * ib_cfg)1033 input_function (tree fn_decl, struct data_in *data_in,
1034 		struct lto_input_block *ib, struct lto_input_block *ib_cfg)
1035 {
1036   struct function *fn;
1037   enum LTO_tags tag;
1038   gimple **stmts;
1039   basic_block bb;
1040   struct cgraph_node *node;
1041 
1042   tag = streamer_read_record_start (ib);
1043   lto_tag_check (tag, LTO_function);
1044 
1045   /* Read decls for parameters and args.  */
1046   DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1047   DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1048 
1049   /* Read the tree of lexical scopes for the function.  */
1050   DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1051 
1052   if (!streamer_read_uhwi (ib))
1053     return;
1054 
1055   push_struct_function (fn_decl);
1056   fn = DECL_STRUCT_FUNCTION (fn_decl);
1057   init_tree_ssa (fn);
1058   /* We input IL in SSA form.  */
1059   cfun->gimple_df->in_ssa_p = true;
1060 
1061   gimple_register_cfg_hooks ();
1062 
1063   node = cgraph_node::get (fn_decl);
1064   if (!node)
1065     node = cgraph_node::create (fn_decl);
1066   input_struct_function_base (fn, data_in, ib);
1067   input_cfg (ib_cfg, data_in, fn, node->count_materialization_scale);
1068 
1069   /* Read all the SSA names.  */
1070   input_ssa_names (ib, data_in, fn);
1071 
1072   /* Read the exception handling regions in the function.  */
1073   input_eh_regions (ib, data_in, fn);
1074 
1075   gcc_assert (DECL_INITIAL (fn_decl));
1076   DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1077 
1078   /* Read all the basic blocks.  */
1079   tag = streamer_read_record_start (ib);
1080   while (tag)
1081     {
1082       input_bb (ib, tag, data_in, fn,
1083 		node->count_materialization_scale);
1084       tag = streamer_read_record_start (ib);
1085     }
1086 
1087   /* Fix up the call statements that are mentioned in the callgraph
1088      edges.  */
1089   set_gimple_stmt_max_uid (cfun, 0);
1090   FOR_ALL_BB_FN (bb, cfun)
1091     {
1092       gimple_stmt_iterator gsi;
1093       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1094 	{
1095 	  gimple *stmt = gsi_stmt (gsi);
1096 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1097 	}
1098       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1099 	{
1100 	  gimple *stmt = gsi_stmt (gsi);
1101 	  gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1102 	}
1103     }
1104   stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1105   FOR_ALL_BB_FN (bb, cfun)
1106     {
1107       gimple_stmt_iterator bsi = gsi_start_phis (bb);
1108       while (!gsi_end_p (bsi))
1109 	{
1110 	  gimple *stmt = gsi_stmt (bsi);
1111 	  gsi_next (&bsi);
1112 	  stmts[gimple_uid (stmt)] = stmt;
1113 	}
1114       bsi = gsi_start_bb (bb);
1115       while (!gsi_end_p (bsi))
1116 	{
1117 	  gimple *stmt = gsi_stmt (bsi);
1118 	  /* If we're recompiling LTO objects with debug stmts but
1119 	     we're not supposed to have debug stmts, remove them now.
1120 	     We can't remove them earlier because this would cause uid
1121 	     mismatches in fixups, but we can do it at this point, as
1122 	     long as debug stmts don't require fixups.  */
1123 	  if (!MAY_HAVE_DEBUG_STMTS && !flag_wpa && is_gimple_debug (stmt))
1124 	    {
1125 	      gimple_stmt_iterator gsi = bsi;
1126 	      gsi_next (&bsi);
1127 	      gsi_remove (&gsi, true);
1128 	    }
1129 	  else
1130 	    {
1131 	      gsi_next (&bsi);
1132 	      stmts[gimple_uid (stmt)] = stmt;
1133 	    }
1134 	}
1135     }
1136 
1137   /* Set the gimple body to the statement sequence in the entry
1138      basic block.  FIXME lto, this is fairly hacky.  The existence
1139      of a gimple body is used by the cgraph routines, but we should
1140      really use the presence of the CFG.  */
1141   {
1142     edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1143     gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1144   }
1145 
1146   fixup_call_stmt_edges (node, stmts);
1147   execute_all_ipa_stmt_fixups (node, stmts);
1148 
1149   update_ssa (TODO_update_ssa_only_virtuals);
1150   free_dominance_info (CDI_DOMINATORS);
1151   free_dominance_info (CDI_POST_DOMINATORS);
1152   free (stmts);
1153   pop_cfun ();
1154 }
1155 
1156 /* Read the body of function FN_DECL from DATA_IN using input block IB.  */
1157 
1158 static void
input_constructor(tree var,struct data_in * data_in,struct lto_input_block * ib)1159 input_constructor (tree var, struct data_in *data_in,
1160 		   struct lto_input_block *ib)
1161 {
1162   DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1163 }
1164 
1165 
1166 /* Read the body from DATA for function NODE and fill it in.
1167    FILE_DATA are the global decls and types.  SECTION_TYPE is either
1168    LTO_section_function_body or LTO_section_static_initializer.  If
1169    section type is LTO_section_function_body, FN must be the decl for
1170    that function.  */
1171 
1172 static void
lto_read_body_or_constructor(struct lto_file_decl_data * file_data,struct symtab_node * node,const char * data,enum lto_section_type section_type)1173 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1174 			      const char *data, enum lto_section_type section_type)
1175 {
1176   const struct lto_function_header *header;
1177   struct data_in *data_in;
1178   int cfg_offset;
1179   int main_offset;
1180   int string_offset;
1181   tree fn_decl = node->decl;
1182 
1183   header = (const struct lto_function_header *) data;
1184   if (TREE_CODE (node->decl) == FUNCTION_DECL)
1185     {
1186       cfg_offset = sizeof (struct lto_function_header);
1187       main_offset = cfg_offset + header->cfg_size;
1188       string_offset = main_offset + header->main_size;
1189     }
1190   else
1191     {
1192       main_offset = sizeof (struct lto_function_header);
1193       string_offset = main_offset + header->main_size;
1194     }
1195 
1196   data_in = lto_data_in_create (file_data, data + string_offset,
1197 			      header->string_size, vNULL);
1198 
1199   if (section_type == LTO_section_function_body)
1200     {
1201       struct lto_in_decl_state *decl_state;
1202       unsigned from;
1203 
1204       gcc_checking_assert (node);
1205 
1206       /* Use the function's decl state. */
1207       decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1208       gcc_assert (decl_state);
1209       file_data->current_decl_state = decl_state;
1210 
1211 
1212       /* Set up the struct function.  */
1213       from = data_in->reader_cache->nodes.length ();
1214       lto_input_block ib_main (data + main_offset, header->main_size,
1215 			       file_data->mode_table);
1216       if (TREE_CODE (node->decl) == FUNCTION_DECL)
1217 	{
1218 	  lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1219 				  file_data->mode_table);
1220 	  input_function (fn_decl, data_in, &ib_main, &ib_cfg);
1221 	}
1222       else
1223         input_constructor (fn_decl, data_in, &ib_main);
1224       data_in->location_cache.apply_location_cache ();
1225       /* And fixup types we streamed locally.  */
1226 	{
1227 	  struct streamer_tree_cache_d *cache = data_in->reader_cache;
1228 	  unsigned len = cache->nodes.length ();
1229 	  unsigned i;
1230 	  for (i = len; i-- > from;)
1231 	    {
1232 	      tree t = streamer_tree_cache_get_tree (cache, i);
1233 	      if (t == NULL_TREE)
1234 		continue;
1235 
1236 	      if (TYPE_P (t))
1237 		{
1238 		  gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1239 		  if (type_with_alias_set_p (t)
1240 		      && canonical_type_used_p (t))
1241 		    TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1242 		  if (TYPE_MAIN_VARIANT (t) != t)
1243 		    {
1244 		      gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1245 		      TYPE_NEXT_VARIANT (t)
1246 			= TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1247 		      TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1248 		    }
1249 		}
1250 	    }
1251 	}
1252 
1253       /* Restore decl state */
1254       file_data->current_decl_state = file_data->global_decl_state;
1255     }
1256 
1257   lto_data_in_delete (data_in);
1258 }
1259 
1260 
1261 /* Read the body of NODE using DATA.  FILE_DATA holds the global
1262    decls and types.  */
1263 
1264 void
lto_input_function_body(struct lto_file_decl_data * file_data,struct cgraph_node * node,const char * data)1265 lto_input_function_body (struct lto_file_decl_data *file_data,
1266 			 struct cgraph_node *node, const char *data)
1267 {
1268   lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1269 }
1270 
1271 /* Read the body of NODE using DATA.  FILE_DATA holds the global
1272    decls and types.  */
1273 
1274 void
lto_input_variable_constructor(struct lto_file_decl_data * file_data,struct varpool_node * node,const char * data)1275 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1276 				struct varpool_node *node, const char *data)
1277 {
1278   lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1279 }
1280 
1281 
1282 /* Read the physical representation of a tree node EXPR from
1283    input block IB using the per-file context in DATA_IN.  */
1284 
1285 static void
lto_read_tree_1(struct lto_input_block * ib,struct data_in * data_in,tree expr)1286 lto_read_tree_1 (struct lto_input_block *ib, struct data_in *data_in, tree expr)
1287 {
1288   /* Read all the bitfield values in EXPR.  Note that for LTO, we
1289      only write language-independent bitfields, so no more unpacking is
1290      needed.  */
1291   streamer_read_tree_bitfields (ib, data_in, expr);
1292 
1293   /* Read all the pointer fields in EXPR.  */
1294   streamer_read_tree_body (ib, data_in, expr);
1295 
1296   /* Read any LTO-specific data not read by the tree streamer.  */
1297   if (DECL_P (expr)
1298       && TREE_CODE (expr) != FUNCTION_DECL
1299       && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1300     DECL_INITIAL (expr) = stream_read_tree (ib, data_in);
1301 
1302   /* We should never try to instantiate an MD or NORMAL builtin here.  */
1303   if (TREE_CODE (expr) == FUNCTION_DECL)
1304     gcc_assert (!streamer_handle_as_builtin_p (expr));
1305 
1306 #ifdef LTO_STREAMER_DEBUG
1307   /* Remove the mapping to RESULT's original address set by
1308      streamer_alloc_tree.  */
1309   lto_orig_address_remove (expr);
1310 #endif
1311 }
1312 
1313 /* Read the physical representation of a tree node with tag TAG from
1314    input block IB using the per-file context in DATA_IN.  */
1315 
1316 static tree
lto_read_tree(struct lto_input_block * ib,struct data_in * data_in,enum LTO_tags tag,hashval_t hash)1317 lto_read_tree (struct lto_input_block *ib, struct data_in *data_in,
1318 	       enum LTO_tags tag, hashval_t hash)
1319 {
1320   /* Instantiate a new tree node.  */
1321   tree result = streamer_alloc_tree (ib, data_in, tag);
1322 
1323   /* Enter RESULT in the reader cache.  This will make RESULT
1324      available so that circular references in the rest of the tree
1325      structure can be resolved in subsequent calls to stream_read_tree.  */
1326   streamer_tree_cache_append (data_in->reader_cache, result, hash);
1327 
1328   lto_read_tree_1 (ib, data_in, result);
1329 
1330   /* end_marker = */ streamer_read_uchar (ib);
1331 
1332   return result;
1333 }
1334 
1335 
1336 /* Populate the reader cache with trees materialized from the SCC
1337    following in the IB, DATA_IN stream.  */
1338 
1339 hashval_t
lto_input_scc(struct lto_input_block * ib,struct data_in * data_in,unsigned * len,unsigned * entry_len)1340 lto_input_scc (struct lto_input_block *ib, struct data_in *data_in,
1341 	       unsigned *len, unsigned *entry_len)
1342 {
1343   /* A blob of unnamed tree nodes, fill the cache from it and
1344      recurse.  */
1345   unsigned size = streamer_read_uhwi (ib);
1346   hashval_t scc_hash = streamer_read_uhwi (ib);
1347   unsigned scc_entry_len = 1;
1348 
1349   if (size == 1)
1350     {
1351       enum LTO_tags tag = streamer_read_record_start (ib);
1352       lto_input_tree_1 (ib, data_in, tag, scc_hash);
1353     }
1354   else
1355     {
1356       unsigned int first = data_in->reader_cache->nodes.length ();
1357       tree result;
1358 
1359       scc_entry_len = streamer_read_uhwi (ib);
1360 
1361       /* Materialize size trees by reading their headers.  */
1362       for (unsigned i = 0; i < size; ++i)
1363 	{
1364 	  enum LTO_tags tag = streamer_read_record_start (ib);
1365 	  if (tag == LTO_null
1366 	      || (tag >= LTO_field_decl_ref && tag <= LTO_global_decl_ref)
1367 	      || tag == LTO_tree_pickle_reference
1368 	      || tag == LTO_builtin_decl
1369 	      || tag == LTO_integer_cst
1370 	      || tag == LTO_tree_scc)
1371 	    gcc_unreachable ();
1372 
1373 	  result = streamer_alloc_tree (ib, data_in, tag);
1374 	  streamer_tree_cache_append (data_in->reader_cache, result, 0);
1375 	}
1376 
1377       /* Read the tree bitpacks and references.  */
1378       for (unsigned i = 0; i < size; ++i)
1379 	{
1380 	  result = streamer_tree_cache_get_tree (data_in->reader_cache,
1381 						 first + i);
1382 	  lto_read_tree_1 (ib, data_in, result);
1383 	  /* end_marker = */ streamer_read_uchar (ib);
1384 	}
1385     }
1386 
1387   *len = size;
1388   *entry_len = scc_entry_len;
1389   return scc_hash;
1390 }
1391 
1392 
1393 /* Read a tree from input block IB using the per-file context in
1394    DATA_IN.  This context is used, for example, to resolve references
1395    to previously read nodes.  */
1396 
1397 tree
lto_input_tree_1(struct lto_input_block * ib,struct data_in * data_in,enum LTO_tags tag,hashval_t hash)1398 lto_input_tree_1 (struct lto_input_block *ib, struct data_in *data_in,
1399 		  enum LTO_tags tag, hashval_t hash)
1400 {
1401   tree result;
1402 
1403   gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1404 
1405   if (tag == LTO_null)
1406     result = NULL_TREE;
1407   else if (tag >= LTO_field_decl_ref && tag <= LTO_namelist_decl_ref)
1408     {
1409       /* If TAG is a reference to an indexable tree, the next value
1410 	 in IB is the index into the table where we expect to find
1411 	 that tree.  */
1412       result = lto_input_tree_ref (ib, data_in, cfun, tag);
1413     }
1414   else if (tag == LTO_tree_pickle_reference)
1415     {
1416       /* If TAG is a reference to a previously read tree, look it up in
1417 	 the reader cache.  */
1418       result = streamer_get_pickled_tree (ib, data_in);
1419     }
1420   else if (tag == LTO_builtin_decl)
1421     {
1422       /* If we are going to read a built-in function, all we need is
1423 	 the code and class.  */
1424       result = streamer_get_builtin_tree (ib, data_in);
1425     }
1426   else if (tag == LTO_integer_cst)
1427     {
1428       /* For shared integer constants in singletons we can use the
1429          existing tree integer constant merging code.  */
1430       tree type = stream_read_tree (ib, data_in);
1431       unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1432       unsigned HOST_WIDE_INT i;
1433       HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1434 
1435       for (i = 0; i < len; i++)
1436 	a[i] = streamer_read_hwi (ib);
1437       gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1438       result = wide_int_to_tree (type, wide_int::from_array
1439 				 (a, len, TYPE_PRECISION (type)));
1440       streamer_tree_cache_append (data_in->reader_cache, result, hash);
1441     }
1442   else if (tag == LTO_tree_scc)
1443     gcc_unreachable ();
1444   else
1445     {
1446       /* Otherwise, materialize a new node from IB.  */
1447       result = lto_read_tree (ib, data_in, tag, hash);
1448     }
1449 
1450   return result;
1451 }
1452 
1453 tree
lto_input_tree(struct lto_input_block * ib,struct data_in * data_in)1454 lto_input_tree (struct lto_input_block *ib, struct data_in *data_in)
1455 {
1456   enum LTO_tags tag;
1457 
1458   /* Input and skip SCCs.  */
1459   while ((tag = streamer_read_record_start (ib)) == LTO_tree_scc)
1460     {
1461       unsigned len, entry_len;
1462       lto_input_scc (ib, data_in, &len, &entry_len);
1463     }
1464   return lto_input_tree_1 (ib, data_in, tag, 0);
1465 }
1466 
1467 
1468 /* Input toplevel asms.  */
1469 
1470 void
lto_input_toplevel_asms(struct lto_file_decl_data * file_data,int order_base)1471 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1472 {
1473   size_t len;
1474   const char *data = lto_get_section_data (file_data, LTO_section_asm,
1475 					   NULL, &len);
1476   const struct lto_simple_header_with_strings *header
1477     = (const struct lto_simple_header_with_strings *) data;
1478   int string_offset;
1479   struct data_in *data_in;
1480   tree str;
1481 
1482   if (! data)
1483     return;
1484 
1485   string_offset = sizeof (*header) + header->main_size;
1486 
1487   lto_input_block ib (data + sizeof (*header), header->main_size,
1488 		      file_data->mode_table);
1489 
1490   data_in = lto_data_in_create (file_data, data + string_offset,
1491 			      header->string_size, vNULL);
1492 
1493   while ((str = streamer_read_string_cst (data_in, &ib)))
1494     {
1495       asm_node *node = symtab->finalize_toplevel_asm (str);
1496       node->order = streamer_read_hwi (&ib) + order_base;
1497       if (node->order >= symtab->order)
1498 	symtab->order = node->order + 1;
1499     }
1500 
1501   lto_data_in_delete (data_in);
1502 
1503   lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1504 }
1505 
1506 
1507 /* Input mode table.  */
1508 
1509 void
lto_input_mode_table(struct lto_file_decl_data * file_data)1510 lto_input_mode_table (struct lto_file_decl_data *file_data)
1511 {
1512   size_t len;
1513   const char *data = lto_get_section_data (file_data, LTO_section_mode_table,
1514 					   NULL, &len);
1515   if (! data)
1516     {
1517       internal_error ("cannot read LTO mode table from %s",
1518 		      file_data->file_name);
1519       return;
1520     }
1521 
1522   unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1523   file_data->mode_table = table;
1524   const struct lto_simple_header_with_strings *header
1525     = (const struct lto_simple_header_with_strings *) data;
1526   int string_offset;
1527   struct data_in *data_in;
1528   string_offset = sizeof (*header) + header->main_size;
1529 
1530   lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1531   data_in = lto_data_in_create (file_data, data + string_offset,
1532 				header->string_size, vNULL);
1533   bitpack_d bp = streamer_read_bitpack (&ib);
1534 
1535   table[VOIDmode] = VOIDmode;
1536   table[BLKmode] = BLKmode;
1537   unsigned int m;
1538   while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1539     {
1540       enum mode_class mclass
1541 	= bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1542       unsigned int size = bp_unpack_value (&bp, 8);
1543       unsigned int prec = bp_unpack_value (&bp, 16);
1544       machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1545       unsigned int nunits = bp_unpack_value (&bp, 8);
1546       unsigned int ibit = 0, fbit = 0;
1547       unsigned int real_fmt_len = 0;
1548       const char *real_fmt_name = NULL;
1549       switch (mclass)
1550 	{
1551 	case MODE_FRACT:
1552 	case MODE_UFRACT:
1553 	case MODE_ACCUM:
1554 	case MODE_UACCUM:
1555 	  ibit = bp_unpack_value (&bp, 8);
1556 	  fbit = bp_unpack_value (&bp, 8);
1557 	  break;
1558 	case MODE_FLOAT:
1559 	case MODE_DECIMAL_FLOAT:
1560 	  real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
1561 						    &real_fmt_len);
1562 	  break;
1563 	default:
1564 	  break;
1565 	}
1566       /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
1567 	 if not found, fallback to all modes.  */
1568       int pass;
1569       for (pass = 0; pass < 2; pass++)
1570 	for (machine_mode mr = pass ? VOIDmode
1571 				    : GET_CLASS_NARROWEST_MODE (mclass);
1572 	     pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
1573 	     pass ? mr = (machine_mode) (mr + 1)
1574 		  : mr = GET_MODE_WIDER_MODE (mr))
1575 	  if (GET_MODE_CLASS (mr) != mclass
1576 	      || GET_MODE_SIZE (mr) != size
1577 	      || GET_MODE_PRECISION (mr) != prec
1578 	      || (inner == m
1579 		  ? GET_MODE_INNER (mr) != mr
1580 		  : GET_MODE_INNER (mr) != table[(int) inner])
1581 	      || GET_MODE_IBIT (mr) != ibit
1582 	      || GET_MODE_FBIT (mr) != fbit
1583 	      || GET_MODE_NUNITS (mr) != nunits)
1584 	    continue;
1585 	  else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
1586 		   && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
1587 	    continue;
1588 	  else
1589 	    {
1590 	      table[m] = mr;
1591 	      pass = 2;
1592 	      break;
1593 	    }
1594       unsigned int mname_len;
1595       const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
1596       if (pass == 2)
1597 	{
1598 	  switch (mclass)
1599 	    {
1600 	    case MODE_VECTOR_INT:
1601 	    case MODE_VECTOR_FLOAT:
1602 	    case MODE_VECTOR_FRACT:
1603 	    case MODE_VECTOR_UFRACT:
1604 	    case MODE_VECTOR_ACCUM:
1605 	    case MODE_VECTOR_UACCUM:
1606 	      /* For unsupported vector modes just use BLKmode,
1607 		 if the scalar mode is supported.  */
1608 	      if (table[(int) inner] != VOIDmode)
1609 		{
1610 		  table[m] = BLKmode;
1611 		  break;
1612 		}
1613 	      /* FALLTHRU */
1614 	    default:
1615 	      fatal_error (UNKNOWN_LOCATION, "unsupported mode %s\n", mname);
1616 	      break;
1617 	    }
1618 	}
1619     }
1620   lto_data_in_delete (data_in);
1621 
1622   lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
1623 }
1624 
1625 
1626 /* Initialization for the LTO reader.  */
1627 
1628 void
lto_reader_init(void)1629 lto_reader_init (void)
1630 {
1631   lto_streamer_init ();
1632   file_name_hash_table
1633     = new hash_table<freeing_string_slot_hasher> (37);
1634 }
1635 
1636 
1637 /* Create a new data_in object for FILE_DATA. STRINGS is the string
1638    table to use with LEN strings.  RESOLUTIONS is the vector of linker
1639    resolutions (NULL if not using a linker plugin).  */
1640 
1641 struct data_in *
lto_data_in_create(struct lto_file_decl_data * file_data,const char * strings,unsigned len,vec<ld_plugin_symbol_resolution_t> resolutions)1642 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
1643 		    unsigned len,
1644 		    vec<ld_plugin_symbol_resolution_t> resolutions)
1645 {
1646   struct data_in *data_in = new (struct data_in);
1647   data_in->file_data = file_data;
1648   data_in->strings = strings;
1649   data_in->strings_len = len;
1650   data_in->globals_resolution = resolutions;
1651   data_in->reader_cache = streamer_tree_cache_create (false, false, true);
1652   return data_in;
1653 }
1654 
1655 
1656 /* Remove DATA_IN.  */
1657 
1658 void
lto_data_in_delete(struct data_in * data_in)1659 lto_data_in_delete (struct data_in *data_in)
1660 {
1661   data_in->globals_resolution.release ();
1662   streamer_tree_cache_delete (data_in->reader_cache);
1663   delete data_in;
1664 }
1665