1 /* Miscellaneous utilities for tree streaming.  Things that are used
2    in both input and output are here.
3 
4    Copyright (C) 2011-2018 Free Software Foundation, Inc.
5    Contributed by Diego Novillo <dnovillo@google.com>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "tree-streamer.h"
30 #include "cgraph.h"
31 
32 /* Table indexed by machine_mode, used for 2 different purposes.
33    During streaming out we record there non-zero value for all modes
34    that were streamed out.
35    During streaming in, we translate the on the disk mode using this
36    table.  For normal LTO it is set to identity, for ACCEL_COMPILER
37    depending on the mode_table content.  */
38 unsigned char streamer_mode_table[1 << 8];
39 
40 /* Check that all the TS_* structures handled by the streamer_write_* and
41    streamer_read_* routines are exactly ALL the structures defined in
42    treestruct.def.  */
43 
44 void
streamer_check_handled_ts_structures(void)45 streamer_check_handled_ts_structures (void)
46 {
47   bool handled_p[LAST_TS_ENUM];
48   unsigned i;
49 
50   memset (&handled_p, 0, sizeof (handled_p));
51 
52   /* These are the TS_* structures that are either handled or
53      explicitly ignored by the streamer routines.  */
54   handled_p[TS_BASE] = true;
55   handled_p[TS_TYPED] = true;
56   handled_p[TS_COMMON] = true;
57   handled_p[TS_INT_CST] = true;
58   handled_p[TS_POLY_INT_CST] = true;
59   handled_p[TS_REAL_CST] = true;
60   handled_p[TS_FIXED_CST] = true;
61   handled_p[TS_VECTOR] = true;
62   handled_p[TS_STRING] = true;
63   handled_p[TS_COMPLEX] = true;
64   handled_p[TS_IDENTIFIER] = true;
65   handled_p[TS_DECL_MINIMAL] = true;
66   handled_p[TS_DECL_COMMON] = true;
67   handled_p[TS_DECL_WRTL] = true;
68   handled_p[TS_DECL_NON_COMMON] = true;
69   handled_p[TS_DECL_WITH_VIS] = true;
70   handled_p[TS_FIELD_DECL] = true;
71   handled_p[TS_VAR_DECL] = true;
72   handled_p[TS_PARM_DECL] = true;
73   handled_p[TS_LABEL_DECL] = true;
74   handled_p[TS_RESULT_DECL] = true;
75   handled_p[TS_CONST_DECL] = true;
76   handled_p[TS_TYPE_DECL] = true;
77   handled_p[TS_FUNCTION_DECL] = true;
78   handled_p[TS_TYPE_COMMON] = true;
79   handled_p[TS_TYPE_WITH_LANG_SPECIFIC] = true;
80   handled_p[TS_TYPE_NON_COMMON] = true;
81   handled_p[TS_LIST] = true;
82   handled_p[TS_VEC] = true;
83   handled_p[TS_EXP] = true;
84   handled_p[TS_SSA_NAME] = true;
85   handled_p[TS_BLOCK] = true;
86   handled_p[TS_BINFO] = true;
87   handled_p[TS_STATEMENT_LIST] = true;
88   handled_p[TS_CONSTRUCTOR] = true;
89   handled_p[TS_OMP_CLAUSE] = true;
90   handled_p[TS_OPTIMIZATION] = true;
91   handled_p[TS_TARGET_OPTION] = true;
92   handled_p[TS_TRANSLATION_UNIT_DECL] = true;
93 
94   /* Anything not marked above will trigger the following assertion.
95      If this assertion triggers, it means that there is a new TS_*
96      structure that should be handled by the streamer.  */
97   for (i = 0; i < LAST_TS_ENUM; i++)
98     gcc_assert (handled_p[i]);
99 }
100 
101 
102 /* Helper for streamer_tree_cache_insert_1.  Add T to CACHE->NODES at
103    slot IX.  */
104 
105 static void
streamer_tree_cache_add_to_node_array(struct streamer_tree_cache_d * cache,unsigned ix,tree t,hashval_t hash)106 streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
107 				       unsigned ix, tree t, hashval_t hash)
108 {
109   /* We're either replacing an old element or appending consecutively.  */
110   if (cache->nodes.exists ())
111     {
112       if (cache->nodes.length () == ix)
113 	cache->nodes.safe_push (t);
114       else
115 	cache->nodes[ix] = t;
116     }
117   if (cache->hashes.exists ())
118     {
119       if (cache->hashes.length () == ix)
120 	cache->hashes.safe_push (hash);
121       else
122 	cache->hashes[ix] = hash;
123     }
124 }
125 
126 
127 /* Helper for streamer_tree_cache_insert and streamer_tree_cache_insert_at.
128    CACHE, T, and IX_P are as in streamer_tree_cache_insert.
129 
130    If INSERT_AT_NEXT_SLOT_P is true, T is inserted at the next available
131    slot in the cache.  Otherwise, T is inserted at the position indicated
132    in *IX_P.
133 
134    If T already existed in CACHE, return true.  Otherwise,
135    return false.  */
136 
137 static bool
streamer_tree_cache_insert_1(struct streamer_tree_cache_d * cache,tree t,hashval_t hash,unsigned * ix_p,bool insert_at_next_slot_p)138 streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
139 			      tree t, hashval_t hash, unsigned *ix_p,
140 			      bool insert_at_next_slot_p)
141 {
142   bool existed_p;
143 
144   gcc_assert (t);
145 
146   unsigned int &ix = cache->node_map->get_or_insert (t, &existed_p);
147   if (!existed_p)
148     {
149       /* Determine the next slot to use in the cache.  */
150       if (insert_at_next_slot_p)
151 	ix = cache->next_idx++;
152       else
153 	ix = *ix_p;
154 
155       streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
156     }
157   else
158     {
159       if (!insert_at_next_slot_p && ix != *ix_p)
160 	{
161 	  /* If the caller wants to insert T at a specific slot
162 	     location, and ENTRY->TO does not match *IX_P, add T to
163 	     the requested location slot.  */
164 	  ix = *ix_p;
165 	  streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
166 	}
167     }
168 
169   if (ix_p)
170     *ix_p = ix;
171 
172   return existed_p;
173 }
174 
175 
176 /* Insert tree node T in CACHE.  If T already existed in the cache
177    return true.  Otherwise, return false.
178 
179    If IX_P is non-null, update it with the index into the cache where
180    T has been stored.  */
181 
182 bool
streamer_tree_cache_insert(struct streamer_tree_cache_d * cache,tree t,hashval_t hash,unsigned * ix_p)183 streamer_tree_cache_insert (struct streamer_tree_cache_d *cache, tree t,
184 			    hashval_t hash, unsigned *ix_p)
185 {
186   return streamer_tree_cache_insert_1 (cache, t, hash, ix_p, true);
187 }
188 
189 
190 /* Replace the tree node with T in CACHE at slot IX.  */
191 
192 void
streamer_tree_cache_replace_tree(struct streamer_tree_cache_d * cache,tree t,unsigned ix)193 streamer_tree_cache_replace_tree (struct streamer_tree_cache_d *cache,
194 				  tree t, unsigned ix)
195 {
196   hashval_t hash = 0;
197   if (cache->hashes.exists ())
198     hash = streamer_tree_cache_get_hash (cache, ix);
199   if (!cache->node_map)
200     streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
201   else
202     streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
203 }
204 
205 
206 /* Appends tree node T to CACHE, even if T already existed in it.  */
207 
208 void
streamer_tree_cache_append(struct streamer_tree_cache_d * cache,tree t,hashval_t hash)209 streamer_tree_cache_append (struct streamer_tree_cache_d *cache,
210 			    tree t, hashval_t hash)
211 {
212   unsigned ix = cache->next_idx++;
213   if (!cache->node_map)
214     streamer_tree_cache_add_to_node_array (cache, ix, t, hash);
215   else
216     streamer_tree_cache_insert_1 (cache, t, hash, &ix, false);
217 }
218 
219 /* Return true if tree node T exists in CACHE, otherwise false.  If IX_P is
220    not NULL, write to *IX_P the index into the cache where T is stored
221    ((unsigned)-1 if T is not found).  */
222 
223 bool
streamer_tree_cache_lookup(struct streamer_tree_cache_d * cache,tree t,unsigned * ix_p)224 streamer_tree_cache_lookup (struct streamer_tree_cache_d *cache, tree t,
225 			    unsigned *ix_p)
226 {
227   unsigned *slot;
228   bool retval;
229   unsigned ix;
230 
231   gcc_assert (t);
232 
233   slot = cache->node_map->get (t);
234   if (slot == NULL)
235     {
236       retval = false;
237       ix = -1;
238     }
239   else
240     {
241       retval = true;
242       ix = *slot;
243     }
244 
245   if (ix_p)
246     *ix_p = ix;
247 
248   return retval;
249 }
250 
251 
252 /* Verify that NODE is in CACHE.  */
253 
254 static void
verify_common_node_recorded(struct streamer_tree_cache_d * cache,tree node)255 verify_common_node_recorded (struct streamer_tree_cache_d *cache, tree node)
256 {
257   /* Restrict this to flag_checking only because in general violating it is
258      harmless plus we never know what happens on all targets/frontend/flag(!)
259      combinations.  */
260   if (!flag_checking)
261     return;
262 
263   if (cache->node_map)
264     gcc_assert (streamer_tree_cache_lookup (cache, node, NULL));
265   else
266     {
267       bool found = false;
268       gcc_assert (cache->nodes.exists ());
269       /* Linear search...  */
270       for (unsigned i = 0; !found && i < cache->nodes.length (); ++i)
271 	if (cache->nodes[i] == node)
272 	  found = true;
273       gcc_assert (found);
274     }
275 }
276 
277 
278 /* Record NODE in CACHE.  */
279 
280 static void
record_common_node(struct streamer_tree_cache_d * cache,tree node)281 record_common_node (struct streamer_tree_cache_d *cache, tree node)
282 {
283   /* If we recursively end up at nodes we do not want to preload simply don't.
284      ???  We'd want to verify that this doesn't happen, or alternatively
285      do not recurse at all.  */
286   if (node == char_type_node)
287     return;
288 
289   gcc_checking_assert (node != boolean_type_node
290 		       && node != boolean_true_node
291 		       && node != boolean_false_node);
292 
293   /* We have to make sure to fill exactly the same number of
294      elements for all frontends.  That can include NULL trees.
295      As our hash table can't deal with zero entries we'll simply stream
296      a random other tree.  A NULL tree never will be looked up so it
297      doesn't matter which tree we replace it with, just to be sure
298      use error_mark_node.  */
299   if (!node)
300     node = error_mark_node;
301 
302   /* ???  FIXME, devise a better hash value.  But the hash needs to be equal
303      for all frontend and lto1 invocations.  So just use the position
304      in the cache as hash value.  */
305   streamer_tree_cache_append (cache, node, cache->nodes.length ());
306 
307   switch (TREE_CODE (node))
308     {
309     case ERROR_MARK:
310     case FIELD_DECL:
311     case FIXED_POINT_TYPE:
312     case IDENTIFIER_NODE:
313     case INTEGER_CST:
314     case INTEGER_TYPE:
315     case POINTER_BOUNDS_TYPE:
316     case REAL_TYPE:
317     case TREE_LIST:
318     case VOID_CST:
319     case VOID_TYPE:
320       /* No recursive trees.  */
321       break;
322     case ARRAY_TYPE:
323     case POINTER_TYPE:
324     case REFERENCE_TYPE:
325       record_common_node (cache, TREE_TYPE (node));
326       break;
327     case COMPLEX_TYPE:
328       /* Verify that a complex type's component type (node_type) has been
329 	 handled already (and we thus don't need to recurse here).  */
330       verify_common_node_recorded (cache, TREE_TYPE (node));
331       break;
332     case RECORD_TYPE:
333       /* The FIELD_DECLs of structures should be shared, so that every
334 	 COMPONENT_REF uses the same tree node when referencing a field.
335 	 Pointer equality between FIELD_DECLs is used by the alias
336 	 machinery to compute overlapping component references (see
337 	 nonoverlapping_component_refs_p and
338 	 nonoverlapping_component_refs_of_decl_p).  */
339       for (tree f = TYPE_FIELDS (node); f; f = TREE_CHAIN (f))
340 	record_common_node (cache, f);
341       break;
342     default:
343       /* Unexpected tree code.  */
344       gcc_unreachable ();
345     }
346 }
347 
348 
349 /* Preload common nodes into CACHE and make sure they are merged
350    properly according to the gimple type table.  */
351 
352 static void
preload_common_nodes(struct streamer_tree_cache_d * cache)353 preload_common_nodes (struct streamer_tree_cache_d *cache)
354 {
355   unsigned i;
356 
357   for (i = 0; i < itk_none; i++)
358     /* Skip itk_char.  char_type_node is dependent on -f[un]signed-char.  */
359     if (i != itk_char)
360       record_common_node (cache, integer_types[i]);
361 
362   for (i = 0; i < stk_type_kind_last; i++)
363     record_common_node (cache, sizetype_tab[i]);
364 
365   for (i = 0; i < TI_MAX; i++)
366     /* Skip boolean type and constants, they are frontend dependent.  */
367     if (i != TI_BOOLEAN_TYPE
368 	&& i != TI_BOOLEAN_FALSE
369 	&& i != TI_BOOLEAN_TRUE
370 	/* MAIN_IDENTIFIER is not always initialized by Fortran FE.  */
371 	&& i != TI_MAIN_IDENTIFIER
372 	/* PID_TYPE is initialized only by C family front-ends.  */
373 	&& i != TI_PID_TYPE
374 	/* Skip optimization and target option nodes; they depend on flags.  */
375 	&& i != TI_OPTIMIZATION_DEFAULT
376 	&& i != TI_OPTIMIZATION_CURRENT
377 	&& i != TI_TARGET_OPTION_DEFAULT
378 	&& i != TI_TARGET_OPTION_CURRENT
379 	&& i != TI_CURRENT_TARGET_PRAGMA
380 	&& i != TI_CURRENT_OPTIMIZE_PRAGMA
381 	/* Skip va_list* related nodes if offloading.  For native LTO
382 	   we want them to be merged for the stdarg pass, for offloading
383 	   they might not be identical between host and offloading target.  */
384 	&& (!lto_stream_offload_p
385 	    || (i != TI_VA_LIST_TYPE
386 		&& i != TI_VA_LIST_GPR_COUNTER_FIELD
387 		&& i != TI_VA_LIST_FPR_COUNTER_FIELD)))
388       record_common_node (cache, global_trees[i]);
389 }
390 
391 
392 /* Create a cache of pickled nodes.  */
393 
394 struct streamer_tree_cache_d *
streamer_tree_cache_create(bool with_hashes,bool with_map,bool with_vec)395 streamer_tree_cache_create (bool with_hashes, bool with_map, bool with_vec)
396 {
397   struct streamer_tree_cache_d *cache;
398 
399   cache = XCNEW (struct streamer_tree_cache_d);
400 
401   if (with_map)
402     cache->node_map = new hash_map<tree, unsigned> (251);
403   cache->next_idx = 0;
404   if (with_vec)
405     cache->nodes.create (165);
406   if (with_hashes)
407     cache->hashes.create (165);
408 
409   /* Load all the well-known tree nodes that are always created by
410      the compiler on startup.  This prevents writing them out
411      unnecessarily.  */
412   preload_common_nodes (cache);
413 
414   return cache;
415 }
416 
417 
418 /* Delete the streamer cache C.  */
419 
420 void
streamer_tree_cache_delete(struct streamer_tree_cache_d * c)421 streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
422 {
423   if (c == NULL)
424     return;
425 
426   delete c->node_map;
427   c->node_map = NULL;
428   c->nodes.release ();
429   c->hashes.release ();
430   free (c);
431 }
432