1 /* Read the GIMPLE representation from a file stream.
2
3 Copyright (C) 2009-2022 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5 Re-implemented by Diego Novillo <dnovillo@google.com>
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "cfghooks.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "gimple-streamer.h"
35 #include "toplev.h"
36 #include "gimple-iterator.h"
37 #include "tree-cfg.h"
38 #include "tree-into-ssa.h"
39 #include "tree-dfa.h"
40 #include "tree-ssa.h"
41 #include "except.h"
42 #include "cgraph.h"
43 #include "cfgloop.h"
44 #include "debug.h"
45 #include "alloc-pool.h"
46 #include "toplev.h"
47
48 /* Allocator used to hold string slot entries for line map streaming. */
49 static struct object_allocator<struct string_slot> *string_slot_allocator;
50
51 /* The table to hold the file names. */
52 static hash_table<string_slot_hasher> *file_name_hash_table;
53
54 /* The table to hold the relative pathname prefixes. */
55
56 /* This obstack holds file names used in locators. Line map datastructures
57 points here and thus it needs to be kept allocated as long as linemaps
58 exists. */
59 static struct obstack file_name_obstack;
60
61 /* Map a pair of nul terminated strings where the first one can be
62 pointer compared, but the second can't, to another string. */
63 struct string_pair_map
64 {
65 const char *str1;
66 const char *str2;
67 const char *str3;
68 hashval_t hash;
69 bool prefix;
70 };
71
72 /* Allocator used to hold string pair map entries for line map streaming. */
73 static struct object_allocator<struct string_pair_map>
74 *string_pair_map_allocator;
75
76 struct string_pair_map_hasher : nofree_ptr_hash <string_pair_map>
77 {
78 static inline hashval_t hash (const string_pair_map *);
79 static inline bool equal (const string_pair_map *, const string_pair_map *);
80 };
81
82 inline hashval_t
hash(const string_pair_map * spm)83 string_pair_map_hasher::hash (const string_pair_map *spm)
84 {
85 return spm->hash;
86 }
87
88 inline bool
equal(const string_pair_map * spm1,const string_pair_map * spm2)89 string_pair_map_hasher::equal (const string_pair_map *spm1,
90 const string_pair_map *spm2)
91 {
92 return (spm1->hash == spm2->hash
93 && spm1->str1 == spm2->str1
94 && spm1->prefix == spm2->prefix
95 && strcmp (spm1->str2, spm2->str2) == 0);
96 }
97
98 /* The table to hold the pairs of pathnames and corresponding
99 resulting pathname. Used for both mapping of get_src_pwd ()
100 and recorded source working directory to relative path prefix
101 from current working directory to the recorded one, and for
102 mapping of that relative path prefix and some relative path
103 to those concatenated. */
104 static hash_table<string_pair_map_hasher> *path_name_pair_hash_table;
105
106
107 /* Check that tag ACTUAL has one of the given values. NUM_TAGS is the
108 number of valid tag values to check. */
109
110 void
lto_tag_check_set(enum LTO_tags actual,int ntags,...)111 lto_tag_check_set (enum LTO_tags actual, int ntags, ...)
112 {
113 va_list ap;
114 int i;
115
116 va_start (ap, ntags);
117 for (i = 0; i < ntags; i++)
118 if ((unsigned) actual == va_arg (ap, unsigned))
119 {
120 va_end (ap);
121 return;
122 }
123
124 va_end (ap);
125 internal_error ("bytecode stream: unexpected tag %s", lto_tag_name (actual));
126 }
127
128
129 /* Read LENGTH bytes from STREAM to ADDR. */
130
131 void
lto_input_data_block(class lto_input_block * ib,void * addr,size_t length)132 lto_input_data_block (class lto_input_block *ib, void *addr, size_t length)
133 {
134 size_t i;
135 unsigned char *const buffer = (unsigned char *) addr;
136
137 for (i = 0; i < length; i++)
138 buffer[i] = streamer_read_uchar (ib);
139 }
140
141 /* Compute the relative path to get to DATA_WD (absolute directory name)
142 from CWD (another absolute directory name). E.g. for
143 DATA_WD of "/tmp/foo/bar" and CWD of "/tmp/baz/qux" return
144 "../../foo/bar". Returned string should be freed by the caller.
145 Return NULL if absolute file name needs to be used. */
146
147 static char *
relative_path_prefix(const char * data_wd,const char * cwd)148 relative_path_prefix (const char *data_wd, const char *cwd)
149 {
150 const char *d = data_wd;
151 const char *c = cwd;
152 #ifdef HAVE_DOS_BASED_FILE_SYSTEM
153 if (d[1] == ':')
154 {
155 if (!IS_DIR_SEPARATOR (d[2]))
156 return NULL;
157 if (c[0] == d[0] && c[1] == ':' && IS_DIR_SEPARATOR (c[2]))
158 {
159 c += 3;
160 d += 3;
161 }
162 else
163 return NULL;
164 }
165 else if (c[1] == ':')
166 return NULL;
167 #endif
168 do
169 {
170 while (IS_DIR_SEPARATOR (*d))
171 d++;
172 while (IS_DIR_SEPARATOR (*c))
173 c++;
174 size_t i;
175 for (i = 0; c[i] && !IS_DIR_SEPARATOR (c[i]) && c[i] == d[i]; i++)
176 ;
177 if ((c[i] == '\0' || IS_DIR_SEPARATOR (c[i]))
178 && (d[i] == '\0' || IS_DIR_SEPARATOR (d[i])))
179 {
180 c += i;
181 d += i;
182 if (*c == '\0' || *d == '\0')
183 break;
184 }
185 else
186 break;
187 }
188 while (1);
189 size_t num_up = 0;
190 do
191 {
192 while (IS_DIR_SEPARATOR (*c))
193 c++;
194 if (*c == '\0')
195 break;
196 num_up++;
197 while (*c && !IS_DIR_SEPARATOR (*c))
198 c++;
199 }
200 while (1);
201 while (IS_DIR_SEPARATOR (*d))
202 d++;
203 size_t len = strlen (d);
204 if (len == 0 && num_up == 0)
205 return xstrdup (".");
206 char *ret = XNEWVEC (char, num_up * 3 + len + 1);
207 char *p = ret;
208 for (; num_up; num_up--)
209 {
210 const char dir_up[3] = { '.', '.', DIR_SEPARATOR };
211 memcpy (p, dir_up, 3);
212 p += 3;
213 }
214 memcpy (p, d, len + 1);
215 return ret;
216 }
217
218 /* Look up DATA_WD in hash table of relative prefixes. If found,
219 return relative path from CWD to DATA_WD from the hash table,
220 otherwise create it. */
221
222 static const char *
canon_relative_path_prefix(const char * data_wd,const char * cwd)223 canon_relative_path_prefix (const char *data_wd, const char *cwd)
224 {
225 if (!IS_ABSOLUTE_PATH (data_wd) || !IS_ABSOLUTE_PATH (cwd))
226 return NULL;
227
228 if (!path_name_pair_hash_table)
229 {
230 path_name_pair_hash_table
231 = new hash_table<string_pair_map_hasher> (37);
232 string_pair_map_allocator
233 = new object_allocator <struct string_pair_map>
234 ("line map string pair map hash");
235 }
236
237 inchash::hash h;
238 h.add_ptr (cwd);
239 h.merge_hash (htab_hash_string (data_wd));
240 h.add_int (true);
241
242 string_pair_map s_slot;
243 s_slot.str1 = cwd;
244 s_slot.str2 = data_wd;
245 s_slot.str3 = NULL;
246 s_slot.hash = h.end ();
247 s_slot.prefix = true;
248
249 string_pair_map **slot
250 = path_name_pair_hash_table->find_slot (&s_slot, INSERT);
251 if (*slot == NULL)
252 {
253 /* Compute relative path from cwd directory to data_wd directory.
254 E.g. if cwd is /tmp/foo/bar and data_wd is /tmp/baz/qux ,
255 it will return ../../baz/qux . */
256 char *relative_path = relative_path_prefix (data_wd, cwd);
257 const char *relative = relative_path ? relative_path : data_wd;
258 size_t relative_len = strlen (relative);
259 gcc_assert (relative_len);
260
261 size_t data_wd_len = strlen (data_wd);
262 bool add_separator = false;
263 if (!IS_DIR_SEPARATOR (relative[relative_len - 1]))
264 add_separator = true;
265
266 size_t len = relative_len + 1 + data_wd_len + 1 + add_separator;
267
268 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
269 struct string_pair_map *new_slot
270 = string_pair_map_allocator->allocate ();
271 memcpy (saved_string, data_wd, data_wd_len + 1);
272 memcpy (saved_string + data_wd_len + 1, relative, relative_len);
273 if (add_separator)
274 saved_string[len - 2] = DIR_SEPARATOR;
275 saved_string[len - 1] = '\0';
276 new_slot->str1 = cwd;
277 new_slot->str2 = saved_string;
278 new_slot->str3 = saved_string + data_wd_len + 1;
279 if (relative_len == 1 && relative[0] == '.')
280 new_slot->str3 = NULL;
281 new_slot->hash = s_slot.hash;
282 new_slot->prefix = true;
283 *slot = new_slot;
284 free (relative_path);
285 return new_slot->str3;
286 }
287 else
288 {
289 string_pair_map *old_slot = *slot;
290 return old_slot->str3;
291 }
292 }
293
294 /* Look up the pair of RELATIVE_PREFIX and STRING strings in a hash table.
295 If found, return the concatenation of those from the hash table,
296 otherwise concatenate them. */
297
298 static const char *
canon_relative_file_name(const char * relative_prefix,const char * string)299 canon_relative_file_name (const char *relative_prefix, const char *string)
300 {
301 inchash::hash h;
302 h.add_ptr (relative_prefix);
303 h.merge_hash (htab_hash_string (string));
304
305 string_pair_map s_slot;
306 s_slot.str1 = relative_prefix;
307 s_slot.str2 = string;
308 s_slot.str3 = NULL;
309 s_slot.hash = h.end ();
310 s_slot.prefix = false;
311
312 string_pair_map **slot
313 = path_name_pair_hash_table->find_slot (&s_slot, INSERT);
314 if (*slot == NULL)
315 {
316 size_t relative_prefix_len = strlen (relative_prefix);
317 size_t string_len = strlen (string);
318 size_t len = relative_prefix_len + string_len + 1;
319
320 char *saved_string = XOBNEWVEC (&file_name_obstack, char, len);
321 struct string_pair_map *new_slot
322 = string_pair_map_allocator->allocate ();
323 memcpy (saved_string, relative_prefix, relative_prefix_len);
324 memcpy (saved_string + relative_prefix_len, string, string_len + 1);
325 new_slot->str1 = relative_prefix;
326 new_slot->str2 = saved_string + relative_prefix_len;
327 new_slot->str3 = saved_string;
328 new_slot->hash = s_slot.hash;
329 new_slot->prefix = false;
330 *slot = new_slot;
331 return new_slot->str3;
332 }
333 else
334 {
335 string_pair_map *old_slot = *slot;
336 return old_slot->str3;
337 }
338 }
339
340 /* Lookup STRING in file_name_hash_table. If found, return the existing
341 string, otherwise insert STRING as the canonical version.
342 If STRING is a relative pathname and RELATIVE_PREFIX is non-NULL, use
343 canon_relative_file_name instead. */
344
345 static const char *
canon_file_name(const char * relative_prefix,const char * string)346 canon_file_name (const char *relative_prefix, const char *string)
347 {
348 if (relative_prefix && !IS_ABSOLUTE_PATH (string))
349 return canon_relative_file_name (relative_prefix, string);
350
351 string_slot **slot;
352 struct string_slot s_slot;
353 size_t len = strlen (string);
354
355 s_slot.s = string;
356 s_slot.len = len;
357
358 slot = file_name_hash_table->find_slot (&s_slot, INSERT);
359 if (*slot == NULL)
360 {
361 char *saved_string;
362 struct string_slot *new_slot;
363
364 saved_string = XOBNEWVEC (&file_name_obstack, char, len + 1);
365 new_slot = string_slot_allocator->allocate ();
366 memcpy (saved_string, string, len + 1);
367 new_slot->s = saved_string;
368 new_slot->len = len;
369 *slot = new_slot;
370 return saved_string;
371 }
372 else
373 {
374 struct string_slot *old_slot = *slot;
375 return old_slot->s;
376 }
377 }
378
379 /* Pointer to currently alive instance of lto_location_cache. */
380
381 lto_location_cache *lto_location_cache::current_cache;
382
383 /* Sort locations in source order. Start with file from last application. */
384
385 int
cmp_loc(const void * pa,const void * pb)386 lto_location_cache::cmp_loc (const void *pa, const void *pb)
387 {
388 const cached_location *a = ((const cached_location *)pa);
389 const cached_location *b = ((const cached_location *)pb);
390 const char *current_file = current_cache->current_file;
391 int current_line = current_cache->current_line;
392
393 if (a->file == current_file && b->file != current_file)
394 return -1;
395 if (a->file != current_file && b->file == current_file)
396 return 1;
397 if (a->file == current_file && b->file == current_file)
398 {
399 if (a->line == current_line && b->line != current_line)
400 return -1;
401 if (a->line != current_line && b->line == current_line)
402 return 1;
403 }
404 if (a->file != b->file)
405 return strcmp (a->file, b->file);
406 if (a->sysp != b->sysp)
407 return a->sysp ? 1 : -1;
408 if (a->line != b->line)
409 return a->line - b->line;
410 if (a->col != b->col)
411 return a->col - b->col;
412 if ((a->block == NULL_TREE) != (b->block == NULL_TREE))
413 return a->block ? 1 : -1;
414 if (a->block)
415 {
416 if (BLOCK_NUMBER (a->block) < BLOCK_NUMBER (b->block))
417 return -1;
418 if (BLOCK_NUMBER (a->block) > BLOCK_NUMBER (b->block))
419 return 1;
420 }
421 return 0;
422 }
423
424 /* Apply all changes in location cache. Add locations into linemap and patch
425 trees. */
426
427 bool
apply_location_cache()428 lto_location_cache::apply_location_cache ()
429 {
430 static const char *prev_file;
431 if (!loc_cache.length ())
432 return false;
433 if (loc_cache.length () > 1)
434 loc_cache.qsort (cmp_loc);
435
436 for (unsigned int i = 0; i < loc_cache.length (); i++)
437 {
438 struct cached_location loc = loc_cache[i];
439
440 if (current_file != loc.file)
441 linemap_add (line_table, prev_file ? LC_RENAME : LC_ENTER,
442 loc.sysp, loc.file, loc.line);
443 else if (current_line != loc.line)
444 {
445 int max = loc.col;
446
447 for (unsigned int j = i + 1; j < loc_cache.length (); j++)
448 if (loc.file != loc_cache[j].file
449 || loc.line != loc_cache[j].line)
450 break;
451 else if (max < loc_cache[j].col)
452 max = loc_cache[j].col;
453 linemap_line_start (line_table, loc.line, max + 1);
454 }
455 gcc_assert (*loc.loc == BUILTINS_LOCATION + 1);
456 if (current_file != loc.file
457 || current_line != loc.line
458 || current_col != loc.col)
459 {
460 current_loc = linemap_position_for_column (line_table, loc.col);
461 if (loc.block)
462 current_loc = set_block (current_loc, loc.block);
463 }
464 else if (current_block != loc.block)
465 {
466 if (loc.block)
467 current_loc = set_block (current_loc, loc.block);
468 else
469 current_loc = LOCATION_LOCUS (current_loc);
470 }
471 *loc.loc = current_loc;
472 current_line = loc.line;
473 prev_file = current_file = loc.file;
474 current_col = loc.col;
475 current_block = loc.block;
476 }
477 loc_cache.truncate (0);
478 accepted_length = 0;
479 return true;
480 }
481
482 /* Tree merging did not succeed; mark all changes in the cache as accepted. */
483
484 void
accept_location_cache()485 lto_location_cache::accept_location_cache ()
486 {
487 gcc_assert (current_cache == this);
488 accepted_length = loc_cache.length ();
489 }
490
491 /* Tree merging did succeed; throw away recent changes. */
492
493 void
revert_location_cache()494 lto_location_cache::revert_location_cache ()
495 {
496 loc_cache.truncate (accepted_length);
497 }
498
499 /* Read a location bitpack from bit pack BP and either update *LOC directly
500 or add it to the location cache. If IB is non-NULL, stream in a block
501 afterwards.
502 It is neccesary to call apply_location_cache to get *LOC updated. */
503
504 void
input_location_and_block(location_t * loc,struct bitpack_d * bp,class lto_input_block * ib,class data_in * data_in)505 lto_location_cache::input_location_and_block (location_t *loc,
506 struct bitpack_d *bp,
507 class lto_input_block *ib,
508 class data_in *data_in)
509 {
510 static const char *stream_file;
511 static int stream_line;
512 static int stream_col;
513 static bool stream_sysp;
514 static tree stream_block;
515 static const char *stream_relative_path_prefix;
516
517 gcc_assert (current_cache == this);
518
519 *loc = bp_unpack_int_in_range (bp, "location", 0,
520 RESERVED_LOCATION_COUNT + 1);
521
522 if (*loc < RESERVED_LOCATION_COUNT)
523 {
524 if (ib)
525 {
526 bool block_change = bp_unpack_value (bp, 1);
527 if (block_change)
528 stream_block = stream_read_tree (ib, data_in);
529 if (stream_block)
530 *loc = set_block (*loc, stream_block);
531 }
532 return;
533 }
534
535 bool file_change = (*loc == RESERVED_LOCATION_COUNT + 1);
536 /* Keep value RESERVED_LOCATION_COUNT in *loc as linemap lookups will
537 ICE on it. */
538 *loc = RESERVED_LOCATION_COUNT;
539 bool line_change = bp_unpack_value (bp, 1);
540 bool column_change = bp_unpack_value (bp, 1);
541
542 if (file_change)
543 {
544 bool pwd_change = bp_unpack_value (bp, 1);
545 if (pwd_change)
546 {
547 const char *pwd = bp_unpack_string (data_in, bp);
548 const char *src_pwd = get_src_pwd ();
549 if (strcmp (pwd, src_pwd) == 0)
550 stream_relative_path_prefix = NULL;
551 else
552 stream_relative_path_prefix
553 = canon_relative_path_prefix (pwd, src_pwd);
554 }
555 stream_file = canon_file_name (stream_relative_path_prefix,
556 bp_unpack_string (data_in, bp));
557 stream_sysp = bp_unpack_value (bp, 1);
558 }
559
560 if (line_change)
561 stream_line = bp_unpack_var_len_unsigned (bp);
562
563 if (column_change)
564 stream_col = bp_unpack_var_len_unsigned (bp);
565
566 tree block = NULL_TREE;
567 if (ib)
568 {
569 bool block_change = bp_unpack_value (bp, 1);
570 if (block_change)
571 stream_block = stream_read_tree (ib, data_in);
572 block = stream_block;
573 }
574
575 /* This optimization saves location cache operations during gimple
576 streaming. */
577
578 if (current_file == stream_file
579 && current_line == stream_line
580 && current_col == stream_col
581 && current_sysp == stream_sysp)
582 {
583 if (current_block == block)
584 *loc = current_loc;
585 else if (block)
586 *loc = set_block (current_loc, block);
587 else
588 *loc = LOCATION_LOCUS (current_loc);
589 return;
590 }
591
592 struct cached_location entry
593 = {stream_file, loc, stream_line, stream_col, stream_sysp, block};
594 loc_cache.safe_push (entry);
595 }
596
597 /* Read a location bitpack from bit pack BP and either update *LOC directly
598 or add it to the location cache.
599 It is neccesary to call apply_location_cache to get *LOC updated. */
600
601 void
input_location(location_t * loc,struct bitpack_d * bp,class data_in * data_in)602 lto_location_cache::input_location (location_t *loc, struct bitpack_d *bp,
603 class data_in *data_in)
604 {
605 return input_location_and_block (loc, bp, NULL, data_in);
606 }
607
608 /* Read a location bitpack from input block IB and either update *LOC directly
609 or add it to the location cache.
610 It is neccesary to call apply_location_cache to get *LOC updated. */
611
612 void
lto_input_location(location_t * loc,struct bitpack_d * bp,class data_in * data_in)613 lto_input_location (location_t *loc, struct bitpack_d *bp,
614 class data_in *data_in)
615 {
616 data_in->location_cache.input_location (loc, bp, data_in);
617 }
618
619 /* Read a reference to a tree node from DATA_IN using input block IB.
620 TAG is the expected node that should be found in IB, if TAG belongs
621 to one of the indexable trees, expect to read a reference index to
622 be looked up in one of the symbol tables, otherwise read the pysical
623 representation of the tree using stream_read_tree. FN is the
624 function scope for the read tree. */
625
626 tree
lto_input_tree_ref(class lto_input_block * ib,class data_in * data_in,struct function * fn,enum LTO_tags tag)627 lto_input_tree_ref (class lto_input_block *ib, class data_in *data_in,
628 struct function *fn, enum LTO_tags tag)
629 {
630 unsigned HOST_WIDE_INT ix_u;
631 tree result = NULL_TREE;
632
633 if (tag == LTO_ssa_name_ref)
634 {
635 ix_u = streamer_read_uhwi (ib);
636 result = (*SSANAMES (fn))[ix_u];
637 }
638 else
639 {
640 gcc_checking_assert (tag == LTO_global_stream_ref);
641 ix_u = streamer_read_uhwi (ib);
642 result = (*data_in->file_data->current_decl_state
643 ->streams[LTO_DECL_STREAM])[ix_u];
644 }
645
646 gcc_assert (result);
647
648 return result;
649 }
650
651 /* Read VAR_DECL reference to DATA from IB. */
652
653 tree
lto_input_var_decl_ref(lto_input_block * ib,lto_file_decl_data * file_data)654 lto_input_var_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
655 {
656 unsigned int ix_u = streamer_read_uhwi (ib);
657 tree result = (*file_data->current_decl_state
658 ->streams[LTO_DECL_STREAM])[ix_u];
659 gcc_assert (TREE_CODE (result) == VAR_DECL);
660 return result;
661 }
662
663 /* Read VAR_DECL reference to DATA from IB. */
664
665 tree
lto_input_fn_decl_ref(lto_input_block * ib,lto_file_decl_data * file_data)666 lto_input_fn_decl_ref (lto_input_block *ib, lto_file_decl_data *file_data)
667 {
668 unsigned int ix_u = streamer_read_uhwi (ib);
669 tree result = (*file_data->current_decl_state
670 ->streams[LTO_DECL_STREAM])[ix_u];
671 gcc_assert (TREE_CODE (result) == FUNCTION_DECL);
672 return result;
673 }
674
675
676 /* Read and return a double-linked list of catch handlers from input
677 block IB, using descriptors in DATA_IN. */
678
679 static struct eh_catch_d *
lto_input_eh_catch_list(class lto_input_block * ib,class data_in * data_in,eh_catch * last_p)680 lto_input_eh_catch_list (class lto_input_block *ib, class data_in *data_in,
681 eh_catch *last_p)
682 {
683 eh_catch first;
684 enum LTO_tags tag;
685
686 *last_p = first = NULL;
687 tag = streamer_read_record_start (ib);
688 while (tag)
689 {
690 tree list;
691 eh_catch n;
692
693 lto_tag_check_range (tag, LTO_eh_catch, LTO_eh_catch);
694
695 /* Read the catch node. */
696 n = ggc_cleared_alloc<eh_catch_d> ();
697 n->type_list = stream_read_tree (ib, data_in);
698 n->filter_list = stream_read_tree (ib, data_in);
699 n->label = stream_read_tree (ib, data_in);
700
701 /* Register all the types in N->FILTER_LIST. */
702 for (list = n->filter_list; list; list = TREE_CHAIN (list))
703 add_type_for_runtime (TREE_VALUE (list));
704
705 /* Chain N to the end of the list. */
706 if (*last_p)
707 (*last_p)->next_catch = n;
708 n->prev_catch = *last_p;
709 *last_p = n;
710
711 /* Set the head of the list the first time through the loop. */
712 if (first == NULL)
713 first = n;
714
715 tag = streamer_read_record_start (ib);
716 }
717
718 return first;
719 }
720
721
722 /* Read and return EH region IX from input block IB, using descriptors
723 in DATA_IN. */
724
725 static eh_region
input_eh_region(class lto_input_block * ib,class data_in * data_in,int ix)726 input_eh_region (class lto_input_block *ib, class data_in *data_in, int ix)
727 {
728 enum LTO_tags tag;
729 eh_region r;
730
731 /* Read the region header. */
732 tag = streamer_read_record_start (ib);
733 if (tag == LTO_null)
734 return NULL;
735
736 r = ggc_cleared_alloc<eh_region_d> ();
737 r->index = streamer_read_hwi (ib);
738
739 gcc_assert (r->index == ix);
740
741 /* Read all the region pointers as region numbers. We'll fix up
742 the pointers once the whole array has been read. */
743 r->outer = (eh_region) (intptr_t) streamer_read_hwi (ib);
744 r->inner = (eh_region) (intptr_t) streamer_read_hwi (ib);
745 r->next_peer = (eh_region) (intptr_t) streamer_read_hwi (ib);
746
747 switch (tag)
748 {
749 case LTO_ert_cleanup:
750 r->type = ERT_CLEANUP;
751 break;
752
753 case LTO_ert_try:
754 {
755 struct eh_catch_d *last_catch;
756 r->type = ERT_TRY;
757 r->u.eh_try.first_catch = lto_input_eh_catch_list (ib, data_in,
758 &last_catch);
759 r->u.eh_try.last_catch = last_catch;
760 break;
761 }
762
763 case LTO_ert_allowed_exceptions:
764 {
765 tree l;
766
767 r->type = ERT_ALLOWED_EXCEPTIONS;
768 r->u.allowed.type_list = stream_read_tree (ib, data_in);
769 r->u.allowed.label = stream_read_tree (ib, data_in);
770 r->u.allowed.filter = streamer_read_uhwi (ib);
771
772 for (l = r->u.allowed.type_list; l ; l = TREE_CHAIN (l))
773 add_type_for_runtime (TREE_VALUE (l));
774 }
775 break;
776
777 case LTO_ert_must_not_throw:
778 {
779 r->type = ERT_MUST_NOT_THROW;
780 r->u.must_not_throw.failure_decl = stream_read_tree (ib, data_in);
781 bitpack_d bp = streamer_read_bitpack (ib);
782 stream_input_location (&r->u.must_not_throw.failure_loc,
783 &bp, data_in);
784 }
785 break;
786
787 default:
788 gcc_unreachable ();
789 }
790
791 r->landing_pads = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
792
793 return r;
794 }
795
796
797 /* Read and return EH landing pad IX from input block IB, using descriptors
798 in DATA_IN. */
799
800 static eh_landing_pad
input_eh_lp(class lto_input_block * ib,class data_in * data_in,int ix)801 input_eh_lp (class lto_input_block *ib, class data_in *data_in, int ix)
802 {
803 enum LTO_tags tag;
804 eh_landing_pad lp;
805
806 /* Read the landing pad header. */
807 tag = streamer_read_record_start (ib);
808 if (tag == LTO_null)
809 return NULL;
810
811 lto_tag_check_range (tag, LTO_eh_landing_pad, LTO_eh_landing_pad);
812
813 lp = ggc_cleared_alloc<eh_landing_pad_d> ();
814 lp->index = streamer_read_hwi (ib);
815 gcc_assert (lp->index == ix);
816 lp->next_lp = (eh_landing_pad) (intptr_t) streamer_read_hwi (ib);
817 lp->region = (eh_region) (intptr_t) streamer_read_hwi (ib);
818 lp->post_landing_pad = stream_read_tree (ib, data_in);
819
820 return lp;
821 }
822
823
824 /* After reading the EH regions, pointers to peer and children regions
825 are region numbers. This converts all these region numbers into
826 real pointers into the rematerialized regions for FN. ROOT_REGION
827 is the region number for the root EH region in FN. */
828
829 static void
fixup_eh_region_pointers(struct function * fn,HOST_WIDE_INT root_region)830 fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
831 {
832 unsigned i;
833 vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
834 vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
835 eh_region r;
836 eh_landing_pad lp;
837
838 gcc_assert (eh_array && lp_array);
839
840 gcc_assert (root_region >= 0);
841 fn->eh->region_tree = (*eh_array)[root_region];
842
843 #define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
844 #define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
845
846 /* Convert all the index numbers stored in pointer fields into
847 pointers to the corresponding slots in the EH region array. */
848 FOR_EACH_VEC_ELT (*eh_array, i, r)
849 {
850 /* The array may contain NULL regions. */
851 if (r == NULL)
852 continue;
853
854 gcc_assert (i == (unsigned) r->index);
855 FIXUP_EH_REGION (r->outer);
856 FIXUP_EH_REGION (r->inner);
857 FIXUP_EH_REGION (r->next_peer);
858 FIXUP_EH_LP (r->landing_pads);
859 }
860
861 /* Convert all the index numbers stored in pointer fields into
862 pointers to the corresponding slots in the EH landing pad array. */
863 FOR_EACH_VEC_ELT (*lp_array, i, lp)
864 {
865 /* The array may contain NULL landing pads. */
866 if (lp == NULL)
867 continue;
868
869 gcc_assert (i == (unsigned) lp->index);
870 FIXUP_EH_LP (lp->next_lp);
871 FIXUP_EH_REGION (lp->region);
872 }
873
874 #undef FIXUP_EH_REGION
875 #undef FIXUP_EH_LP
876 }
877
878
879 /* Initialize EH support. */
880
881 void
lto_init_eh(void)882 lto_init_eh (void)
883 {
884 static bool eh_initialized_p = false;
885
886 if (eh_initialized_p)
887 return;
888
889 /* Contrary to most other FEs, we only initialize EH support when at
890 least one of the files in the set contains exception regions in
891 it. Since this happens much later than the call to init_eh in
892 lang_dependent_init, we have to set flag_exceptions and call
893 init_eh again to initialize the EH tables. */
894 flag_exceptions = 1;
895 init_eh ();
896
897 eh_initialized_p = true;
898 }
899
900
901 /* Read the exception table for FN from IB using the data descriptors
902 in DATA_IN. */
903
904 static void
input_eh_regions(class lto_input_block * ib,class data_in * data_in,struct function * fn)905 input_eh_regions (class lto_input_block *ib, class data_in *data_in,
906 struct function *fn)
907 {
908 HOST_WIDE_INT i, root_region, len;
909 enum LTO_tags tag;
910
911 tag = streamer_read_record_start (ib);
912 if (tag == LTO_null)
913 return;
914
915 lto_tag_check_range (tag, LTO_eh_table, LTO_eh_table);
916
917 gcc_assert (fn->eh);
918
919 root_region = streamer_read_hwi (ib);
920 gcc_assert (root_region == (int) root_region);
921
922 /* Read the EH region array. */
923 len = streamer_read_hwi (ib);
924 gcc_assert (len == (int) len);
925 if (len > 0)
926 {
927 vec_safe_grow_cleared (fn->eh->region_array, len, true);
928 for (i = 0; i < len; i++)
929 {
930 eh_region r = input_eh_region (ib, data_in, i);
931 (*fn->eh->region_array)[i] = r;
932 }
933 }
934
935 /* Read the landing pads. */
936 len = streamer_read_hwi (ib);
937 gcc_assert (len == (int) len);
938 if (len > 0)
939 {
940 vec_safe_grow_cleared (fn->eh->lp_array, len, true);
941 for (i = 0; i < len; i++)
942 {
943 eh_landing_pad lp = input_eh_lp (ib, data_in, i);
944 (*fn->eh->lp_array)[i] = lp;
945 }
946 }
947
948 /* Read the runtime type data. */
949 len = streamer_read_hwi (ib);
950 gcc_assert (len == (int) len);
951 if (len > 0)
952 {
953 vec_safe_grow_cleared (fn->eh->ttype_data, len, true);
954 for (i = 0; i < len; i++)
955 {
956 tree ttype = stream_read_tree (ib, data_in);
957 (*fn->eh->ttype_data)[i] = ttype;
958 }
959 }
960
961 /* Read the table of action chains. */
962 len = streamer_read_hwi (ib);
963 gcc_assert (len == (int) len);
964 if (len > 0)
965 {
966 if (targetm.arm_eabi_unwinder)
967 {
968 vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len, true);
969 for (i = 0; i < len; i++)
970 {
971 tree t = stream_read_tree (ib, data_in);
972 (*fn->eh->ehspec_data.arm_eabi)[i] = t;
973 }
974 }
975 else
976 {
977 vec_safe_grow_cleared (fn->eh->ehspec_data.other, len, true);
978 for (i = 0; i < len; i++)
979 {
980 uchar c = streamer_read_uchar (ib);
981 (*fn->eh->ehspec_data.other)[i] = c;
982 }
983 }
984 }
985
986 /* Reconstruct the EH region tree by fixing up the peer/children
987 pointers. */
988 fixup_eh_region_pointers (fn, root_region);
989
990 tag = streamer_read_record_start (ib);
991 lto_tag_check_range (tag, LTO_null, LTO_null);
992 }
993
994
995 /* Make a new basic block with index INDEX in function FN. */
996
997 static basic_block
make_new_block(struct function * fn,unsigned int index)998 make_new_block (struct function *fn, unsigned int index)
999 {
1000 basic_block bb = alloc_block ();
1001 bb->index = index;
1002 SET_BASIC_BLOCK_FOR_FN (fn, index, bb);
1003 n_basic_blocks_for_fn (fn)++;
1004 return bb;
1005 }
1006
1007
1008 /* Read the CFG for function FN from input block IB. */
1009
1010 static void
input_cfg(class lto_input_block * ib,class data_in * data_in,struct function * fn)1011 input_cfg (class lto_input_block *ib, class data_in *data_in,
1012 struct function *fn)
1013 {
1014 unsigned int bb_count;
1015 basic_block p_bb;
1016 unsigned int i;
1017 int index;
1018
1019 init_empty_tree_cfg_for_function (fn);
1020
1021 profile_status_for_fn (fn) = streamer_read_enum (ib, profile_status_d,
1022 PROFILE_LAST);
1023
1024 bb_count = streamer_read_uhwi (ib);
1025
1026 last_basic_block_for_fn (fn) = bb_count;
1027 if (bb_count > basic_block_info_for_fn (fn)->length ())
1028 vec_safe_grow_cleared (basic_block_info_for_fn (fn), bb_count, true);
1029
1030 if (bb_count > label_to_block_map_for_fn (fn)->length ())
1031 vec_safe_grow_cleared (label_to_block_map_for_fn (fn), bb_count, true);
1032
1033 index = streamer_read_hwi (ib);
1034 while (index != -1)
1035 {
1036 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1037 unsigned int edge_count;
1038
1039 if (bb == NULL)
1040 bb = make_new_block (fn, index);
1041
1042 edge_count = streamer_read_uhwi (ib);
1043
1044 /* Connect up the CFG. */
1045 for (i = 0; i < edge_count; i++)
1046 {
1047 bitpack_d bp = streamer_read_bitpack (ib);
1048 unsigned int dest_index = bp_unpack_var_len_unsigned (&bp);
1049 unsigned int edge_flags = bp_unpack_var_len_unsigned (&bp);
1050 basic_block dest = BASIC_BLOCK_FOR_FN (fn, dest_index);
1051
1052 if (dest == NULL)
1053 dest = make_new_block (fn, dest_index);
1054
1055 edge e = make_edge (bb, dest, edge_flags);
1056 data_in->location_cache.input_location_and_block (&e->goto_locus,
1057 &bp, ib, data_in);
1058 e->probability = profile_probability::stream_in (ib);
1059
1060 }
1061
1062 index = streamer_read_hwi (ib);
1063 }
1064
1065 p_bb = ENTRY_BLOCK_PTR_FOR_FN (fn);
1066 index = streamer_read_hwi (ib);
1067 while (index != -1)
1068 {
1069 basic_block bb = BASIC_BLOCK_FOR_FN (fn, index);
1070 bb->prev_bb = p_bb;
1071 p_bb->next_bb = bb;
1072 p_bb = bb;
1073 index = streamer_read_hwi (ib);
1074 }
1075
1076 /* ??? The cfgloop interface is tied to cfun. */
1077 gcc_assert (cfun == fn);
1078
1079 /* Input the loop tree. */
1080 unsigned n_loops = streamer_read_uhwi (ib);
1081 if (n_loops == 0)
1082 return;
1083
1084 struct loops *loops = ggc_cleared_alloc<struct loops> ();
1085 init_loops_structure (fn, loops, n_loops);
1086 set_loops_for_fn (fn, loops);
1087
1088 /* Input each loop and associate it with its loop header so
1089 flow_loops_find can rebuild the loop tree. */
1090 for (unsigned i = 1; i < n_loops; ++i)
1091 {
1092 int header_index = streamer_read_hwi (ib);
1093 if (header_index == -1)
1094 {
1095 loops->larray->quick_push (NULL);
1096 continue;
1097 }
1098
1099 class loop *loop = alloc_loop ();
1100 loop->header = BASIC_BLOCK_FOR_FN (fn, header_index);
1101 loop->header->loop_father = loop;
1102
1103 /* Read everything copy_loop_info copies. */
1104 loop->estimate_state = streamer_read_enum (ib, loop_estimation, EST_LAST);
1105 loop->any_upper_bound = streamer_read_hwi (ib);
1106 if (loop->any_upper_bound)
1107 loop->nb_iterations_upper_bound = streamer_read_widest_int (ib);
1108 loop->any_likely_upper_bound = streamer_read_hwi (ib);
1109 if (loop->any_likely_upper_bound)
1110 loop->nb_iterations_likely_upper_bound = streamer_read_widest_int (ib);
1111 loop->any_estimate = streamer_read_hwi (ib);
1112 if (loop->any_estimate)
1113 loop->nb_iterations_estimate = streamer_read_widest_int (ib);
1114
1115 /* Read OMP SIMD related info. */
1116 loop->safelen = streamer_read_hwi (ib);
1117 loop->unroll = streamer_read_hwi (ib);
1118 loop->owned_clique = streamer_read_hwi (ib);
1119 loop->dont_vectorize = streamer_read_hwi (ib);
1120 loop->force_vectorize = streamer_read_hwi (ib);
1121 loop->finite_p = streamer_read_hwi (ib);
1122 loop->simduid = stream_read_tree (ib, data_in);
1123
1124 place_new_loop (fn, loop);
1125
1126 /* flow_loops_find doesn't like loops not in the tree, hook them
1127 all as siblings of the tree root temporarily. */
1128 flow_loop_tree_node_add (loops->tree_root, loop);
1129 }
1130
1131 /* Rebuild the loop tree. */
1132 flow_loops_find (loops);
1133 }
1134
1135
1136 /* Read the SSA names array for function FN from DATA_IN using input
1137 block IB. */
1138
1139 static void
input_ssa_names(class lto_input_block * ib,class data_in * data_in,struct function * fn)1140 input_ssa_names (class lto_input_block *ib, class data_in *data_in,
1141 struct function *fn)
1142 {
1143 unsigned int i, size;
1144
1145 size = streamer_read_uhwi (ib);
1146 init_tree_ssa (fn, size);
1147 cfun->gimple_df->in_ssa_p = true;
1148 init_ssa_operands (fn);
1149
1150 i = streamer_read_uhwi (ib);
1151 while (i)
1152 {
1153 tree ssa_name, name;
1154 bool is_default_def;
1155
1156 /* Skip over the elements that had been freed. */
1157 while (SSANAMES (fn)->length () < i)
1158 SSANAMES (fn)->quick_push (NULL_TREE);
1159
1160 is_default_def = (streamer_read_uchar (ib) != 0);
1161 name = stream_read_tree (ib, data_in);
1162 ssa_name = make_ssa_name_fn (fn, name, NULL);
1163
1164 if (is_default_def)
1165 {
1166 set_ssa_default_def (cfun, SSA_NAME_VAR (ssa_name), ssa_name);
1167 SSA_NAME_DEF_STMT (ssa_name) = gimple_build_nop ();
1168 }
1169
1170 i = streamer_read_uhwi (ib);
1171 }
1172 }
1173
1174
1175 /* Go through all NODE edges and fixup call_stmt pointers
1176 so they point to STMTS. */
1177
1178 static void
fixup_call_stmt_edges_1(struct cgraph_node * node,gimple ** stmts,struct function * fn)1179 fixup_call_stmt_edges_1 (struct cgraph_node *node, gimple **stmts,
1180 struct function *fn)
1181 {
1182 #define STMT_UID_NOT_IN_RANGE(uid) \
1183 (gimple_stmt_max_uid (fn) < uid || uid == 0)
1184
1185 struct cgraph_edge *cedge;
1186 struct ipa_ref *ref = NULL;
1187 unsigned int i;
1188
1189 for (cedge = node->callees; cedge; cedge = cedge->next_callee)
1190 {
1191 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1192 fatal_error (input_location,
1193 "Cgraph edge statement index out of range");
1194 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
1195 cedge->lto_stmt_uid = 0;
1196 if (!cedge->call_stmt)
1197 fatal_error (input_location,
1198 "Cgraph edge statement index not found");
1199 }
1200 for (cedge = node->indirect_calls; cedge; cedge = cedge->next_callee)
1201 {
1202 if (STMT_UID_NOT_IN_RANGE (cedge->lto_stmt_uid))
1203 fatal_error (input_location,
1204 "Cgraph edge statement index out of range");
1205 cedge->call_stmt = as_a <gcall *> (stmts[cedge->lto_stmt_uid - 1]);
1206 cedge->lto_stmt_uid = 0;
1207 if (!cedge->call_stmt)
1208 fatal_error (input_location, "Cgraph edge statement index not found");
1209 }
1210 for (i = 0; node->iterate_reference (i, ref); i++)
1211 if (ref->lto_stmt_uid)
1212 {
1213 if (STMT_UID_NOT_IN_RANGE (ref->lto_stmt_uid))
1214 fatal_error (input_location,
1215 "Reference statement index out of range");
1216 ref->stmt = stmts[ref->lto_stmt_uid - 1];
1217 ref->lto_stmt_uid = 0;
1218 if (!ref->stmt)
1219 fatal_error (input_location, "Reference statement index not found");
1220 }
1221 }
1222
1223
1224 /* Fixup call_stmt pointers in NODE and all clones. */
1225
1226 static void
fixup_call_stmt_edges(struct cgraph_node * orig,gimple ** stmts)1227 fixup_call_stmt_edges (struct cgraph_node *orig, gimple **stmts)
1228 {
1229 struct cgraph_node *node;
1230 struct function *fn;
1231
1232 while (orig->clone_of)
1233 orig = orig->clone_of;
1234 fn = DECL_STRUCT_FUNCTION (orig->decl);
1235
1236 if (!orig->thunk)
1237 fixup_call_stmt_edges_1 (orig, stmts, fn);
1238 if (orig->clones)
1239 for (node = orig->clones; node != orig;)
1240 {
1241 if (!node->thunk)
1242 fixup_call_stmt_edges_1 (node, stmts, fn);
1243 if (node->clones)
1244 node = node->clones;
1245 else if (node->next_sibling_clone)
1246 node = node->next_sibling_clone;
1247 else
1248 {
1249 while (node != orig && !node->next_sibling_clone)
1250 node = node->clone_of;
1251 if (node != orig)
1252 node = node->next_sibling_clone;
1253 }
1254 }
1255 }
1256
1257
1258 /* Input the base body of struct function FN from DATA_IN
1259 using input block IB. */
1260
1261 static void
input_struct_function_base(struct function * fn,class data_in * data_in,class lto_input_block * ib)1262 input_struct_function_base (struct function *fn, class data_in *data_in,
1263 class lto_input_block *ib)
1264 {
1265 struct bitpack_d bp;
1266 int len;
1267
1268 /* Read the static chain and non-local goto save area. */
1269 fn->static_chain_decl = stream_read_tree (ib, data_in);
1270 fn->nonlocal_goto_save_area = stream_read_tree (ib, data_in);
1271
1272 /* Read all the local symbols. */
1273 len = streamer_read_hwi (ib);
1274 if (len > 0)
1275 {
1276 int i;
1277 vec_safe_grow_cleared (fn->local_decls, len, true);
1278 for (i = 0; i < len; i++)
1279 {
1280 tree t = stream_read_tree (ib, data_in);
1281 (*fn->local_decls)[i] = t;
1282 }
1283 }
1284
1285 /* Input the current IL state of the function. */
1286 fn->curr_properties = streamer_read_uhwi (ib);
1287
1288 /* Read all the attributes for FN. */
1289 bp = streamer_read_bitpack (ib);
1290 fn->is_thunk = bp_unpack_value (&bp, 1);
1291 fn->has_local_explicit_reg_vars = bp_unpack_value (&bp, 1);
1292 fn->returns_pcc_struct = bp_unpack_value (&bp, 1);
1293 fn->returns_struct = bp_unpack_value (&bp, 1);
1294 fn->can_throw_non_call_exceptions = bp_unpack_value (&bp, 1);
1295 fn->can_delete_dead_exceptions = bp_unpack_value (&bp, 1);
1296 fn->always_inline_functions_inlined = bp_unpack_value (&bp, 1);
1297 fn->after_inlining = bp_unpack_value (&bp, 1);
1298 fn->stdarg = bp_unpack_value (&bp, 1);
1299 fn->has_nonlocal_label = bp_unpack_value (&bp, 1);
1300 fn->has_forced_label_in_static = bp_unpack_value (&bp, 1);
1301 fn->calls_alloca = bp_unpack_value (&bp, 1);
1302 fn->calls_setjmp = bp_unpack_value (&bp, 1);
1303 fn->calls_eh_return = bp_unpack_value (&bp, 1);
1304 fn->has_force_vectorize_loops = bp_unpack_value (&bp, 1);
1305 fn->has_simduid_loops = bp_unpack_value (&bp, 1);
1306 fn->va_list_fpr_size = bp_unpack_value (&bp, 8);
1307 fn->va_list_gpr_size = bp_unpack_value (&bp, 8);
1308 fn->last_clique = bp_unpack_value (&bp, sizeof (short) * 8);
1309
1310 /* Input the function start and end loci. */
1311 stream_input_location (&fn->function_start_locus, &bp, data_in);
1312 stream_input_location (&fn->function_end_locus, &bp, data_in);
1313
1314 /* Restore the instance discriminators if present. */
1315 int instance_number = bp_unpack_value (&bp, 1);
1316 if (instance_number)
1317 {
1318 instance_number = bp_unpack_value (&bp, sizeof (int) * CHAR_BIT);
1319 maybe_create_decl_to_instance_map ()->put (fn->decl, instance_number);
1320 }
1321 }
1322
1323 /* Read a chain of tree nodes from input block IB. DATA_IN contains
1324 tables and descriptors for the file being read. */
1325
1326 static tree
streamer_read_chain(class lto_input_block * ib,class data_in * data_in)1327 streamer_read_chain (class lto_input_block *ib, class data_in *data_in)
1328 {
1329 tree first, prev, curr;
1330
1331 /* The chain is written as NULL terminated list of trees. */
1332 first = prev = NULL_TREE;
1333 do
1334 {
1335 curr = stream_read_tree (ib, data_in);
1336 if (prev)
1337 TREE_CHAIN (prev) = curr;
1338 else
1339 first = curr;
1340
1341 prev = curr;
1342 }
1343 while (curr);
1344
1345 return first;
1346 }
1347
1348 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1349
1350 static void
input_function(tree fn_decl,class data_in * data_in,class lto_input_block * ib,class lto_input_block * ib_cfg,cgraph_node * node)1351 input_function (tree fn_decl, class data_in *data_in,
1352 class lto_input_block *ib, class lto_input_block *ib_cfg,
1353 cgraph_node *node)
1354 {
1355 struct function *fn;
1356 enum LTO_tags tag;
1357 gimple **stmts;
1358 basic_block bb;
1359
1360 tag = streamer_read_record_start (ib);
1361 lto_tag_check (tag, LTO_function);
1362
1363 /* Read decls for parameters and args. */
1364 DECL_RESULT (fn_decl) = stream_read_tree (ib, data_in);
1365 DECL_ARGUMENTS (fn_decl) = streamer_read_chain (ib, data_in);
1366
1367 /* Read debug args if available. */
1368 unsigned n_debugargs = streamer_read_uhwi (ib);
1369 if (n_debugargs)
1370 {
1371 vec<tree, va_gc> **debugargs = decl_debug_args_insert (fn_decl);
1372 vec_safe_grow (*debugargs, n_debugargs, true);
1373 for (unsigned i = 0; i < n_debugargs; ++i)
1374 (**debugargs)[i] = stream_read_tree (ib, data_in);
1375 }
1376
1377 /* Read the tree of lexical scopes for the function. */
1378 DECL_INITIAL (fn_decl) = stream_read_tree (ib, data_in);
1379 unsigned block_leaf_count = streamer_read_uhwi (ib);
1380 while (block_leaf_count--)
1381 stream_read_tree (ib, data_in);
1382
1383 if (!streamer_read_uhwi (ib))
1384 return;
1385
1386 push_struct_function (fn_decl);
1387 fn = DECL_STRUCT_FUNCTION (fn_decl);
1388
1389 gimple_register_cfg_hooks ();
1390
1391 input_struct_function_base (fn, data_in, ib);
1392 input_cfg (ib_cfg, data_in, fn);
1393
1394 /* Read all the SSA names. */
1395 input_ssa_names (ib, data_in, fn);
1396
1397 /* Read the exception handling regions in the function. */
1398 input_eh_regions (ib, data_in, fn);
1399
1400 gcc_assert (DECL_INITIAL (fn_decl));
1401 DECL_SAVED_TREE (fn_decl) = NULL_TREE;
1402
1403 /* Read all the basic blocks. */
1404 tag = streamer_read_record_start (ib);
1405 while (tag)
1406 {
1407 input_bb (ib, tag, data_in, fn,
1408 node->count_materialization_scale);
1409 tag = streamer_read_record_start (ib);
1410 }
1411
1412 /* Finalize gimple_location/gimple_block of stmts and phis. */
1413 data_in->location_cache.apply_location_cache ();
1414
1415 /* Fix up the call statements that are mentioned in the callgraph
1416 edges. */
1417 set_gimple_stmt_max_uid (cfun, 0);
1418 FOR_ALL_BB_FN (bb, cfun)
1419 {
1420 gimple_stmt_iterator gsi;
1421 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1422 {
1423 gimple *stmt = gsi_stmt (gsi);
1424 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1425 }
1426 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1427 {
1428 gimple *stmt = gsi_stmt (gsi);
1429 gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
1430 }
1431 }
1432 stmts = (gimple **) xcalloc (gimple_stmt_max_uid (fn), sizeof (gimple *));
1433 FOR_ALL_BB_FN (bb, cfun)
1434 {
1435 gimple_stmt_iterator bsi = gsi_start_phis (bb);
1436 while (!gsi_end_p (bsi))
1437 {
1438 gimple *stmt = gsi_stmt (bsi);
1439 gsi_next (&bsi);
1440 stmts[gimple_uid (stmt)] = stmt;
1441 }
1442 bsi = gsi_start_bb (bb);
1443 while (!gsi_end_p (bsi))
1444 {
1445 gimple *stmt = gsi_stmt (bsi);
1446 bool remove = false;
1447 /* If we're recompiling LTO objects with debug stmts but
1448 we're not supposed to have debug stmts, remove them now.
1449 We can't remove them earlier because this would cause uid
1450 mismatches in fixups, but we can do it at this point, as
1451 long as debug stmts don't require fixups.
1452 Similarly remove all IFN_*SAN_* internal calls */
1453 if (!flag_wpa)
1454 {
1455 if (is_gimple_debug (stmt)
1456 && (gimple_debug_nonbind_marker_p (stmt)
1457 ? !MAY_HAVE_DEBUG_MARKER_STMTS
1458 : !MAY_HAVE_DEBUG_BIND_STMTS))
1459 remove = true;
1460 /* In case the linemap overflows locations can be dropped
1461 to zero. Thus do not keep nonsensical inline entry markers
1462 we'd later ICE on. */
1463 tree block;
1464 if (gimple_debug_inline_entry_p (stmt)
1465 && (((block = gimple_block (stmt))
1466 && !inlined_function_outer_scope_p (block))
1467 || !debug_inline_points))
1468 remove = true;
1469 if (is_gimple_call (stmt)
1470 && gimple_call_internal_p (stmt))
1471 {
1472 bool replace = false;
1473 switch (gimple_call_internal_fn (stmt))
1474 {
1475 case IFN_UBSAN_NULL:
1476 if ((flag_sanitize
1477 & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) == 0)
1478 replace = true;
1479 break;
1480 case IFN_UBSAN_BOUNDS:
1481 if ((flag_sanitize & SANITIZE_BOUNDS) == 0)
1482 replace = true;
1483 break;
1484 case IFN_UBSAN_VPTR:
1485 if ((flag_sanitize & SANITIZE_VPTR) == 0)
1486 replace = true;
1487 break;
1488 case IFN_UBSAN_OBJECT_SIZE:
1489 if ((flag_sanitize & SANITIZE_OBJECT_SIZE) == 0)
1490 replace = true;
1491 break;
1492 case IFN_UBSAN_PTR:
1493 if ((flag_sanitize & SANITIZE_POINTER_OVERFLOW) == 0)
1494 replace = true;
1495 break;
1496 case IFN_ASAN_MARK:
1497 if ((flag_sanitize & SANITIZE_ADDRESS) == 0)
1498 replace = true;
1499 break;
1500 case IFN_TSAN_FUNC_EXIT:
1501 if ((flag_sanitize & SANITIZE_THREAD) == 0)
1502 replace = true;
1503 break;
1504 default:
1505 break;
1506 }
1507 if (replace)
1508 {
1509 gimple_call_set_internal_fn (as_a <gcall *> (stmt),
1510 IFN_NOP);
1511 update_stmt (stmt);
1512 }
1513 }
1514 }
1515 if (remove)
1516 {
1517 gimple_stmt_iterator gsi = bsi;
1518 gsi_next (&bsi);
1519 unlink_stmt_vdef (stmt);
1520 release_defs (stmt);
1521 gsi_remove (&gsi, true);
1522 }
1523 else
1524 {
1525 gsi_next (&bsi);
1526 stmts[gimple_uid (stmt)] = stmt;
1527
1528 /* Remember that the input function has begin stmt
1529 markers, so that we know to expect them when emitting
1530 debug info. */
1531 if (!cfun->debug_nonbind_markers
1532 && gimple_debug_nonbind_marker_p (stmt))
1533 cfun->debug_nonbind_markers = true;
1534 }
1535 }
1536 }
1537
1538 /* Set the gimple body to the statement sequence in the entry
1539 basic block. FIXME lto, this is fairly hacky. The existence
1540 of a gimple body is used by the cgraph routines, but we should
1541 really use the presence of the CFG. */
1542 {
1543 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1544 gimple_set_body (fn_decl, bb_seq (ei_edge (ei)->dest));
1545 }
1546
1547 update_max_bb_count ();
1548 fixup_call_stmt_edges (node, stmts);
1549 execute_all_ipa_stmt_fixups (node, stmts);
1550
1551 free_dominance_info (CDI_DOMINATORS);
1552 free_dominance_info (CDI_POST_DOMINATORS);
1553 free (stmts);
1554 pop_cfun ();
1555 }
1556
1557 /* Read the body of function FN_DECL from DATA_IN using input block IB. */
1558
1559 static void
input_constructor(tree var,class data_in * data_in,class lto_input_block * ib)1560 input_constructor (tree var, class data_in *data_in,
1561 class lto_input_block *ib)
1562 {
1563 DECL_INITIAL (var) = stream_read_tree (ib, data_in);
1564 }
1565
1566
1567 /* Read the body from DATA for function NODE and fill it in.
1568 FILE_DATA are the global decls and types. SECTION_TYPE is either
1569 LTO_section_function_body or LTO_section_static_initializer. If
1570 section type is LTO_section_function_body, FN must be the decl for
1571 that function. */
1572
1573 static void
lto_read_body_or_constructor(struct lto_file_decl_data * file_data,struct symtab_node * node,const char * data,enum lto_section_type section_type)1574 lto_read_body_or_constructor (struct lto_file_decl_data *file_data, struct symtab_node *node,
1575 const char *data, enum lto_section_type section_type)
1576 {
1577 const struct lto_function_header *header;
1578 class data_in *data_in;
1579 int cfg_offset;
1580 int main_offset;
1581 int string_offset;
1582 tree fn_decl = node->decl;
1583
1584 header = (const struct lto_function_header *) data;
1585 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1586 {
1587 cfg_offset = sizeof (struct lto_function_header);
1588 main_offset = cfg_offset + header->cfg_size;
1589 string_offset = main_offset + header->main_size;
1590 }
1591 else
1592 {
1593 main_offset = sizeof (struct lto_function_header);
1594 string_offset = main_offset + header->main_size;
1595 }
1596
1597 data_in = lto_data_in_create (file_data, data + string_offset,
1598 header->string_size, vNULL);
1599
1600 if (section_type == LTO_section_function_body)
1601 {
1602 struct lto_in_decl_state *decl_state;
1603 unsigned from;
1604
1605 gcc_checking_assert (node);
1606
1607 /* Use the function's decl state. */
1608 decl_state = lto_get_function_in_decl_state (file_data, fn_decl);
1609 gcc_assert (decl_state);
1610 file_data->current_decl_state = decl_state;
1611
1612
1613 /* Set up the struct function. */
1614 from = data_in->reader_cache->nodes.length ();
1615 lto_input_block ib_main (data + main_offset, header->main_size,
1616 file_data->mode_table);
1617 if (TREE_CODE (node->decl) == FUNCTION_DECL)
1618 {
1619 lto_input_block ib_cfg (data + cfg_offset, header->cfg_size,
1620 file_data->mode_table);
1621 input_function (fn_decl, data_in, &ib_main, &ib_cfg,
1622 dyn_cast <cgraph_node *>(node));
1623 }
1624 else
1625 input_constructor (fn_decl, data_in, &ib_main);
1626 data_in->location_cache.apply_location_cache ();
1627 /* And fixup types we streamed locally. */
1628 {
1629 struct streamer_tree_cache_d *cache = data_in->reader_cache;
1630 unsigned len = cache->nodes.length ();
1631 unsigned i;
1632 for (i = len; i-- > from;)
1633 {
1634 tree t = streamer_tree_cache_get_tree (cache, i);
1635 if (t == NULL_TREE)
1636 continue;
1637
1638 if (TYPE_P (t))
1639 {
1640 gcc_assert (TYPE_CANONICAL (t) == NULL_TREE);
1641 if (type_with_alias_set_p (t)
1642 && canonical_type_used_p (t))
1643 TYPE_CANONICAL (t) = TYPE_MAIN_VARIANT (t);
1644 if (TYPE_MAIN_VARIANT (t) != t)
1645 {
1646 gcc_assert (TYPE_NEXT_VARIANT (t) == NULL_TREE);
1647 TYPE_NEXT_VARIANT (t)
1648 = TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t));
1649 TYPE_NEXT_VARIANT (TYPE_MAIN_VARIANT (t)) = t;
1650 }
1651 }
1652 }
1653 }
1654
1655 /* Restore decl state */
1656 file_data->current_decl_state = file_data->global_decl_state;
1657 }
1658
1659 lto_data_in_delete (data_in);
1660 }
1661
1662
1663 /* Read the body of NODE using DATA. FILE_DATA holds the global
1664 decls and types. */
1665
1666 void
lto_input_function_body(struct lto_file_decl_data * file_data,struct cgraph_node * node,const char * data)1667 lto_input_function_body (struct lto_file_decl_data *file_data,
1668 struct cgraph_node *node, const char *data)
1669 {
1670 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1671 }
1672
1673 /* Read the body of NODE using DATA. FILE_DATA holds the global
1674 decls and types. */
1675
1676 void
lto_input_variable_constructor(struct lto_file_decl_data * file_data,struct varpool_node * node,const char * data)1677 lto_input_variable_constructor (struct lto_file_decl_data *file_data,
1678 struct varpool_node *node, const char *data)
1679 {
1680 lto_read_body_or_constructor (file_data, node, data, LTO_section_function_body);
1681 }
1682
1683
1684 /* Queue of acummulated decl -> DIE mappings. Similar to locations those
1685 are only applied to prevailing tree nodes during tree merging. */
1686 vec<dref_entry> dref_queue;
1687
1688 /* Read the physical representation of a tree node EXPR from
1689 input block IB using the per-file context in DATA_IN. */
1690
1691 static void
lto_read_tree_1(class lto_input_block * ib,class data_in * data_in,tree expr)1692 lto_read_tree_1 (class lto_input_block *ib, class data_in *data_in, tree expr)
1693 {
1694 /* Read all the bitfield values in EXPR. Note that for LTO, we
1695 only write language-independent bitfields, so no more unpacking is
1696 needed. */
1697 streamer_read_tree_bitfields (ib, data_in, expr);
1698
1699 /* Read all the pointer fields in EXPR. */
1700 streamer_read_tree_body (ib, data_in, expr);
1701
1702 /* Read any LTO-specific data not read by the tree streamer. Do not use
1703 stream_read_tree here since that flushes the dref_queue in mids of
1704 SCC reading. */
1705 if (DECL_P (expr)
1706 && TREE_CODE (expr) != FUNCTION_DECL
1707 && TREE_CODE (expr) != TRANSLATION_UNIT_DECL)
1708 DECL_INITIAL (expr)
1709 = lto_input_tree_1 (ib, data_in, streamer_read_record_start (ib), 0);
1710
1711 /* Stream references to early generated DIEs. Keep in sync with the
1712 trees handled in dwarf2out_register_external_die. */
1713 if ((DECL_P (expr)
1714 && TREE_CODE (expr) != FIELD_DECL
1715 && TREE_CODE (expr) != DEBUG_EXPR_DECL
1716 && TREE_CODE (expr) != TYPE_DECL)
1717 || TREE_CODE (expr) == BLOCK)
1718 {
1719 const char *str = streamer_read_string (data_in, ib);
1720 if (str)
1721 {
1722 unsigned HOST_WIDE_INT off = streamer_read_uhwi (ib);
1723 dref_entry e = { expr, str, off };
1724 dref_queue.safe_push (e);
1725 }
1726 }
1727 }
1728
1729 /* Read the physical representation of a tree node with tag TAG from
1730 input block IB using the per-file context in DATA_IN. */
1731
1732 static tree
lto_read_tree(class lto_input_block * ib,class data_in * data_in,enum LTO_tags tag,hashval_t hash)1733 lto_read_tree (class lto_input_block *ib, class data_in *data_in,
1734 enum LTO_tags tag, hashval_t hash)
1735 {
1736 /* Instantiate a new tree node. */
1737 tree result = streamer_alloc_tree (ib, data_in, tag);
1738
1739 /* Enter RESULT in the reader cache. This will make RESULT
1740 available so that circular references in the rest of the tree
1741 structure can be resolved in subsequent calls to stream_read_tree. */
1742 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1743
1744 lto_read_tree_1 (ib, data_in, result);
1745
1746 return result;
1747 }
1748
1749
1750 /* Populate the reader cache with trees materialized from the SCC
1751 following in the IB, DATA_IN stream.
1752 If SHARED_SCC is true we input LTO_tree_scc. */
1753
1754 hashval_t
lto_input_scc(class lto_input_block * ib,class data_in * data_in,unsigned * len,unsigned * entry_len,bool shared_scc)1755 lto_input_scc (class lto_input_block *ib, class data_in *data_in,
1756 unsigned *len, unsigned *entry_len, bool shared_scc)
1757 {
1758 unsigned size = streamer_read_uhwi (ib);
1759 hashval_t scc_hash = 0;
1760 unsigned scc_entry_len = 1;
1761
1762 if (shared_scc)
1763 {
1764 if (size & 1)
1765 scc_entry_len = streamer_read_uhwi (ib);
1766 size /= 2;
1767 scc_hash = streamer_read_uhwi (ib);
1768 }
1769
1770 if (size == 1)
1771 {
1772 enum LTO_tags tag = streamer_read_record_start (ib);
1773 lto_input_tree_1 (ib, data_in, tag, scc_hash);
1774 }
1775 else
1776 {
1777 unsigned int first = data_in->reader_cache->nodes.length ();
1778 tree result;
1779
1780 /* Materialize size trees by reading their headers. */
1781 for (unsigned i = 0; i < size; ++i)
1782 {
1783 enum LTO_tags tag = streamer_read_record_start (ib);
1784 if (tag == LTO_null
1785 || tag == LTO_global_stream_ref
1786 || tag == LTO_tree_pickle_reference
1787 || tag == LTO_integer_cst
1788 || tag == LTO_tree_scc
1789 || tag == LTO_trees)
1790 gcc_unreachable ();
1791
1792 result = streamer_alloc_tree (ib, data_in, tag);
1793 streamer_tree_cache_append (data_in->reader_cache, result, 0);
1794 }
1795
1796 /* Read the tree bitpacks and references. */
1797 for (unsigned i = 0; i < size; ++i)
1798 {
1799 result = streamer_tree_cache_get_tree (data_in->reader_cache,
1800 first + i);
1801 lto_read_tree_1 (ib, data_in, result);
1802 }
1803 }
1804
1805 *len = size;
1806 *entry_len = scc_entry_len;
1807 return scc_hash;
1808 }
1809
1810 /* Read reference to tree from IB and DATA_IN.
1811 This is used for streaming tree bodies where we know that
1812 the tree is already in cache or is indexable and
1813 must be matched with stream_write_tree_ref. */
1814
1815 tree
stream_read_tree_ref(lto_input_block * ib,data_in * data_in)1816 stream_read_tree_ref (lto_input_block *ib, data_in *data_in)
1817 {
1818 int ix = streamer_read_hwi (ib);
1819 if (!ix)
1820 return NULL_TREE;
1821 if (ix > 0)
1822 return streamer_tree_cache_get_tree (data_in->reader_cache, ix - 1);
1823
1824 ix = -ix - 1;
1825 int id = ix & 1;
1826 ix /= 2;
1827
1828 tree ret;
1829 if (!id)
1830 ret = (*data_in->file_data->current_decl_state
1831 ->streams[LTO_DECL_STREAM])[ix];
1832 else
1833 ret = (*SSANAMES (cfun))[ix];
1834 return ret;
1835 }
1836
1837 /* Read a tree from input block IB using the per-file context in
1838 DATA_IN. This context is used, for example, to resolve references
1839 to previously read nodes. */
1840
1841 tree
lto_input_tree_1(class lto_input_block * ib,class data_in * data_in,enum LTO_tags tag,hashval_t hash)1842 lto_input_tree_1 (class lto_input_block *ib, class data_in *data_in,
1843 enum LTO_tags tag, hashval_t hash)
1844 {
1845 tree result;
1846
1847 gcc_assert ((unsigned) tag < (unsigned) LTO_NUM_TAGS);
1848
1849 if (tag == LTO_null)
1850 result = NULL_TREE;
1851 else if (tag == LTO_global_stream_ref || tag == LTO_ssa_name_ref)
1852 {
1853 /* If TAG is a reference to an indexable tree, the next value
1854 in IB is the index into the table where we expect to find
1855 that tree. */
1856 result = lto_input_tree_ref (ib, data_in, cfun, tag);
1857 }
1858 else if (tag == LTO_tree_pickle_reference)
1859 {
1860 /* If TAG is a reference to a previously read tree, look it up in
1861 the reader cache. */
1862 result = streamer_get_pickled_tree (ib, data_in);
1863 }
1864 else if (tag == LTO_integer_cst)
1865 {
1866 /* For shared integer constants in singletons we can use the
1867 existing tree integer constant merging code. */
1868 tree type = stream_read_tree_ref (ib, data_in);
1869 unsigned HOST_WIDE_INT len = streamer_read_uhwi (ib);
1870 unsigned HOST_WIDE_INT i;
1871 HOST_WIDE_INT a[WIDE_INT_MAX_ELTS];
1872
1873 for (i = 0; i < len; i++)
1874 a[i] = streamer_read_hwi (ib);
1875 gcc_assert (TYPE_PRECISION (type) <= MAX_BITSIZE_MODE_ANY_INT);
1876 result = wide_int_to_tree (type, wide_int::from_array
1877 (a, len, TYPE_PRECISION (type)));
1878 streamer_tree_cache_append (data_in->reader_cache, result, hash);
1879 }
1880 else if (tag == LTO_tree_scc || tag == LTO_trees)
1881 gcc_unreachable ();
1882 else
1883 {
1884 /* Otherwise, materialize a new node from IB. */
1885 result = lto_read_tree (ib, data_in, tag, hash);
1886 }
1887
1888 return result;
1889 }
1890
1891 tree
lto_input_tree(class lto_input_block * ib,class data_in * data_in)1892 lto_input_tree (class lto_input_block *ib, class data_in *data_in)
1893 {
1894 enum LTO_tags tag;
1895
1896 /* Input pickled trees needed to stream in the reference. */
1897 while ((tag = streamer_read_record_start (ib)) == LTO_trees)
1898 {
1899 unsigned len, entry_len;
1900 lto_input_scc (ib, data_in, &len, &entry_len, false);
1901
1902 /* Register DECLs with the debuginfo machinery. */
1903 while (!dref_queue.is_empty ())
1904 {
1905 dref_entry e = dref_queue.pop ();
1906 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1907 }
1908 }
1909 tree t = lto_input_tree_1 (ib, data_in, tag, 0);
1910
1911 if (!dref_queue.is_empty ())
1912 {
1913 dref_entry e = dref_queue.pop ();
1914 debug_hooks->register_external_die (e.decl, e.sym, e.off);
1915 gcc_checking_assert (dref_queue.is_empty ());
1916 }
1917 return t;
1918 }
1919
1920
1921 /* Input toplevel asms. */
1922
1923 void
lto_input_toplevel_asms(struct lto_file_decl_data * file_data,int order_base)1924 lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
1925 {
1926 size_t len;
1927 const char *data
1928 = lto_get_summary_section_data (file_data, LTO_section_asm, &len);
1929 const struct lto_simple_header_with_strings *header
1930 = (const struct lto_simple_header_with_strings *) data;
1931 int string_offset;
1932 class data_in *data_in;
1933 tree str;
1934
1935 if (! data)
1936 return;
1937
1938 string_offset = sizeof (*header) + header->main_size;
1939
1940 lto_input_block ib (data + sizeof (*header), header->main_size,
1941 file_data->mode_table);
1942
1943 data_in = lto_data_in_create (file_data, data + string_offset,
1944 header->string_size, vNULL);
1945
1946 while ((str = streamer_read_string_cst (data_in, &ib)))
1947 {
1948 asm_node *node = symtab->finalize_toplevel_asm (str);
1949 node->order = streamer_read_hwi (&ib) + order_base;
1950 if (node->order >= symtab->order)
1951 symtab->order = node->order + 1;
1952 }
1953
1954 lto_data_in_delete (data_in);
1955
1956 lto_free_section_data (file_data, LTO_section_asm, NULL, data, len);
1957 }
1958
1959
1960 /* Input mode table. */
1961
1962 void
lto_input_mode_table(struct lto_file_decl_data * file_data)1963 lto_input_mode_table (struct lto_file_decl_data *file_data)
1964 {
1965 size_t len;
1966 const char *data
1967 = lto_get_summary_section_data (file_data, LTO_section_mode_table, &len);
1968 if (! data)
1969 internal_error ("cannot read LTO mode table from %s",
1970 file_data->file_name);
1971
1972 unsigned char *table = ggc_cleared_vec_alloc<unsigned char> (1 << 8);
1973 file_data->mode_table = table;
1974 const struct lto_simple_header_with_strings *header
1975 = (const struct lto_simple_header_with_strings *) data;
1976 int string_offset;
1977 class data_in *data_in;
1978 string_offset = sizeof (*header) + header->main_size;
1979
1980 lto_input_block ib (data + sizeof (*header), header->main_size, NULL);
1981 data_in = lto_data_in_create (file_data, data + string_offset,
1982 header->string_size, vNULL);
1983 bitpack_d bp = streamer_read_bitpack (&ib);
1984
1985 table[VOIDmode] = VOIDmode;
1986 table[BLKmode] = BLKmode;
1987 unsigned int m;
1988 while ((m = bp_unpack_value (&bp, 8)) != VOIDmode)
1989 {
1990 enum mode_class mclass
1991 = bp_unpack_enum (&bp, mode_class, MAX_MODE_CLASS);
1992 poly_uint16 size = bp_unpack_poly_value (&bp, 16);
1993 poly_uint16 prec = bp_unpack_poly_value (&bp, 16);
1994 machine_mode inner = (machine_mode) bp_unpack_value (&bp, 8);
1995 poly_uint16 nunits = bp_unpack_poly_value (&bp, 16);
1996 unsigned int ibit = 0, fbit = 0;
1997 unsigned int real_fmt_len = 0;
1998 const char *real_fmt_name = NULL;
1999 switch (mclass)
2000 {
2001 case MODE_FRACT:
2002 case MODE_UFRACT:
2003 case MODE_ACCUM:
2004 case MODE_UACCUM:
2005 ibit = bp_unpack_value (&bp, 8);
2006 fbit = bp_unpack_value (&bp, 8);
2007 break;
2008 case MODE_FLOAT:
2009 case MODE_DECIMAL_FLOAT:
2010 real_fmt_name = bp_unpack_indexed_string (data_in, &bp,
2011 &real_fmt_len);
2012 break;
2013 default:
2014 break;
2015 }
2016 /* First search just the GET_CLASS_NARROWEST_MODE to wider modes,
2017 if not found, fallback to all modes. */
2018 int pass;
2019 for (pass = 0; pass < 2; pass++)
2020 for (machine_mode mr = pass ? VOIDmode
2021 : GET_CLASS_NARROWEST_MODE (mclass);
2022 pass ? mr < MAX_MACHINE_MODE : mr != VOIDmode;
2023 pass ? mr = (machine_mode) (mr + 1)
2024 : mr = GET_MODE_WIDER_MODE (mr).else_void ())
2025 if (GET_MODE_CLASS (mr) != mclass
2026 || maybe_ne (GET_MODE_SIZE (mr), size)
2027 || maybe_ne (GET_MODE_PRECISION (mr), prec)
2028 || (inner == m
2029 ? GET_MODE_INNER (mr) != mr
2030 : GET_MODE_INNER (mr) != table[(int) inner])
2031 || GET_MODE_IBIT (mr) != ibit
2032 || GET_MODE_FBIT (mr) != fbit
2033 || maybe_ne (GET_MODE_NUNITS (mr), nunits))
2034 continue;
2035 else if ((mclass == MODE_FLOAT || mclass == MODE_DECIMAL_FLOAT)
2036 && strcmp (REAL_MODE_FORMAT (mr)->name, real_fmt_name) != 0)
2037 continue;
2038 else
2039 {
2040 table[m] = mr;
2041 pass = 2;
2042 break;
2043 }
2044 unsigned int mname_len;
2045 const char *mname = bp_unpack_indexed_string (data_in, &bp, &mname_len);
2046 if (pass == 2)
2047 {
2048 switch (mclass)
2049 {
2050 case MODE_VECTOR_BOOL:
2051 case MODE_VECTOR_INT:
2052 case MODE_VECTOR_FLOAT:
2053 case MODE_VECTOR_FRACT:
2054 case MODE_VECTOR_UFRACT:
2055 case MODE_VECTOR_ACCUM:
2056 case MODE_VECTOR_UACCUM:
2057 /* For unsupported vector modes just use BLKmode,
2058 if the scalar mode is supported. */
2059 if (table[(int) inner] != VOIDmode)
2060 {
2061 table[m] = BLKmode;
2062 break;
2063 }
2064 /* FALLTHRU */
2065 default:
2066 /* This is only used for offloading-target compilations and
2067 is a user-facing error. Give a better error message for
2068 the common modes; see also mode-classes.def. */
2069 if (mclass == MODE_FLOAT)
2070 fatal_error (UNKNOWN_LOCATION,
2071 "%s - %u-bit-precision floating-point numbers "
2072 "unsupported (mode %qs)", TARGET_MACHINE,
2073 prec.to_constant (), mname);
2074 else if (mclass == MODE_DECIMAL_FLOAT)
2075 fatal_error (UNKNOWN_LOCATION,
2076 "%s - %u-bit-precision decimal floating-point "
2077 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2078 prec.to_constant (), mname);
2079 else if (mclass == MODE_COMPLEX_FLOAT)
2080 fatal_error (UNKNOWN_LOCATION,
2081 "%s - %u-bit-precision complex floating-point "
2082 "numbers unsupported (mode %qs)", TARGET_MACHINE,
2083 prec.to_constant (), mname);
2084 else if (mclass == MODE_INT)
2085 fatal_error (UNKNOWN_LOCATION,
2086 "%s - %u-bit integer numbers unsupported (mode "
2087 "%qs)", TARGET_MACHINE, prec.to_constant (), mname);
2088 else
2089 fatal_error (UNKNOWN_LOCATION, "%s - unsupported mode %qs",
2090 TARGET_MACHINE, mname);
2091 break;
2092 }
2093 }
2094 }
2095 lto_data_in_delete (data_in);
2096
2097 lto_free_section_data (file_data, LTO_section_mode_table, NULL, data, len);
2098 }
2099
2100
2101 /* Initialization for the LTO reader. */
2102
2103 void
lto_reader_init(void)2104 lto_reader_init (void)
2105 {
2106 lto_streamer_init ();
2107 file_name_hash_table
2108 = new hash_table<string_slot_hasher> (37);
2109 string_slot_allocator = new object_allocator <struct string_slot>
2110 ("line map file name hash");
2111 gcc_obstack_init (&file_name_obstack);
2112 }
2113
2114 /* Free hash table used to stream in location file names. */
2115
2116 void
lto_free_file_name_hash(void)2117 lto_free_file_name_hash (void)
2118 {
2119 delete file_name_hash_table;
2120 file_name_hash_table = NULL;
2121 delete string_slot_allocator;
2122 string_slot_allocator = NULL;
2123 delete path_name_pair_hash_table;
2124 path_name_pair_hash_table = NULL;
2125 delete string_pair_map_allocator;
2126 string_pair_map_allocator = NULL;
2127 /* file_name_obstack must stay allocated since it is referred to by
2128 line map table. */
2129 }
2130
2131
2132 /* Create a new data_in object for FILE_DATA. STRINGS is the string
2133 table to use with LEN strings. RESOLUTIONS is the vector of linker
2134 resolutions (NULL if not using a linker plugin). */
2135
2136 class data_in *
lto_data_in_create(struct lto_file_decl_data * file_data,const char * strings,unsigned len,vec<ld_plugin_symbol_resolution_t> resolutions)2137 lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
2138 unsigned len,
2139 vec<ld_plugin_symbol_resolution_t> resolutions)
2140 {
2141 class data_in *data_in = new (class data_in);
2142 data_in->file_data = file_data;
2143 data_in->strings = strings;
2144 data_in->strings_len = len;
2145 data_in->globals_resolution = resolutions;
2146 data_in->reader_cache = streamer_tree_cache_create (false, false, true);
2147 return data_in;
2148 }
2149
2150
2151 /* Remove DATA_IN. */
2152
2153 void
lto_data_in_delete(class data_in * data_in)2154 lto_data_in_delete (class data_in *data_in)
2155 {
2156 data_in->globals_resolution.release ();
2157 streamer_tree_cache_delete (data_in->reader_cache);
2158 delete data_in;
2159 }
2160