1 /****************************************************************************
2  *                                                                          *
3  *                         GNAT COMPILER COMPONENTS                         *
4  *                                                                          *
5  *                                U T I L S                                 *
6  *                                                                          *
7  *                          C Implementation File                           *
8  *                                                                          *
9  *          Copyright (C) 1992-2020, Free Software Foundation, Inc.         *
10  *                                                                          *
11  * GNAT is free software;  you can  redistribute it  and/or modify it under *
12  * terms of the  GNU General Public License as published  by the Free Soft- *
13  * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14  * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15  * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16  * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17  * for  more details.  You should have received a copy of the GNU General   *
18  * Public License along with GCC; see the file COPYING3.  If not see        *
19  * <http://www.gnu.org/licenses/>.                                          *
20  *                                                                          *
21  * GNAT was originally developed  by the GNAT team at  New York University. *
22  * Extensive contributions were provided by Ada Core Technologies Inc.      *
23  *                                                                          *
24  ****************************************************************************/
25 
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48 
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "snames.h"
54 #include "uintp.h"
55 #include "fe.h"
56 #include "sinfo.h"
57 #include "einfo.h"
58 #include "ada-tree.h"
59 #include "gigi.h"
60 
61 /* If nonzero, pretend we are allocating at global level.  */
62 int force_global;
63 
64 /* The default alignment of "double" floating-point types, i.e. floating
65    point types whose size is equal to 64 bits, or 0 if this alignment is
66    not specifically capped.  */
67 int double_float_alignment;
68 
69 /* The default alignment of "double" or larger scalar types, i.e. scalar
70    types whose size is greater or equal to 64 bits, or 0 if this alignment
71    is not specifically capped.  */
72 int double_scalar_alignment;
73 
74 /* True if floating-point arithmetics may use wider intermediate results.  */
75 bool fp_arith_may_widen = true;
76 
77 /* Tree nodes for the various types and decls we create.  */
78 tree gnat_std_decls[(int) ADT_LAST];
79 
80 /* Functions to call for each of the possible raise reasons.  */
81 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 
83 /* Likewise, but with extra info for each of the possible raise reasons.  */
84 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 
86 /* Forward declarations for handlers of attributes.  */
87 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_no_stack_protector_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
112 
113 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
114 {
115   { "cold", true,  true,  true  },
116   { "hot" , true,  true,  true  },
117   { NULL  , false, false, false }
118 };
119 
120 static const struct attribute_spec::exclusions attr_stack_protect_exclusions[] =
121 {
122   { "stack_protect", true, false, false },
123   { "no_stack_protector", true, false, false },
124   { NULL, false, false, false },
125 };
126 
127 /* Fake handler for attributes we don't properly support, typically because
128    they'd require dragging a lot of the common-c front-end circuitry.  */
129 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
130 
131 /* Table of machine-independent internal attributes for Ada.  We support
132    this minimal set of attributes to accommodate the needs of builtins.  */
133 const struct attribute_spec gnat_internal_attribute_table[] =
134 {
135   /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
136        affects_type_identity, handler, exclude } */
137   { "const",        0, 0,  true,  false, false, false,
138     handle_const_attribute, NULL },
139   { "nothrow",      0, 0,  true,  false, false, false,
140     handle_nothrow_attribute, NULL },
141   { "pure",         0, 0,  true,  false, false, false,
142     handle_pure_attribute, NULL },
143   { "no vops",      0, 0,  true,  false, false, false,
144     handle_novops_attribute, NULL },
145   { "nonnull",      0, -1, false, true,  true,  false,
146     handle_nonnull_attribute, NULL },
147   { "sentinel",     0, 1,  false, true,  true,  false,
148     handle_sentinel_attribute, NULL },
149   { "noreturn",     0, 0,  true,  false, false, false,
150     handle_noreturn_attribute, NULL },
151   { "stack_protect",0, 0, true,  false, false, false,
152     handle_stack_protect_attribute,
153     attr_stack_protect_exclusions },
154   { "no_stack_protector",0, 0, true,  false, false, false,
155     handle_no_stack_protector_attribute,
156     attr_stack_protect_exclusions },
157   { "noinline",     0, 0,  true,  false, false, false,
158     handle_noinline_attribute, NULL },
159   { "noclone",      0, 0,  true,  false, false, false,
160     handle_noclone_attribute, NULL },
161   { "no_icf",       0, 0,  true,  false, false, false,
162     handle_noicf_attribute, NULL },
163   { "noipa",        0, 0,  true,  false, false, false,
164     handle_noipa_attribute, NULL },
165   { "leaf",         0, 0,  true,  false, false, false,
166     handle_leaf_attribute, NULL },
167   { "always_inline",0, 0,  true,  false, false, false,
168     handle_always_inline_attribute, NULL },
169   { "malloc",       0, 0,  true,  false, false, false,
170     handle_malloc_attribute, NULL },
171   { "type generic", 0, 0,  false, true,  true,  false,
172     handle_type_generic_attribute, NULL },
173 
174   { "flatten",      0, 0,  true,  false, false, false,
175     handle_flatten_attribute, NULL },
176   { "used",         0, 0,  true,  false, false, false,
177     handle_used_attribute, NULL },
178   { "cold",         0, 0,  true,  false, false, false,
179     handle_cold_attribute, attr_cold_hot_exclusions },
180   { "hot",          0, 0,  true,  false, false, false,
181     handle_hot_attribute, attr_cold_hot_exclusions },
182   { "target",       1, -1, true,  false, false, false,
183     handle_target_attribute, NULL },
184   { "target_clones",1, -1, true,  false, false, false,
185     handle_target_clones_attribute, NULL },
186 
187   { "vector_size",  1, 1,  false, true,  false, false,
188     handle_vector_size_attribute, NULL },
189   { "vector_type",  0, 0,  false, true,  false, false,
190     handle_vector_type_attribute, NULL },
191   { "may_alias",    0, 0,  false, true,  false, false,
192     NULL, NULL },
193 
194   /* ??? format and format_arg are heavy and not supported, which actually
195      prevents support for stdio builtins, which we however declare as part
196      of the common builtins.def contents.  */
197   { "format",       3, 3,  false, true,  true,  false,
198     fake_attribute_handler, NULL },
199   { "format_arg",   1, 1,  false, true,  true,  false,
200     fake_attribute_handler, NULL },
201 
202   { NULL,           0, 0,  false, false, false, false,
203     NULL, NULL }
204 };
205 
206 /* Associates a GNAT tree node to a GCC tree node. It is used in
207    `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
208    of `save_gnu_tree' for more info.  */
209 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
210 
211 #define GET_GNU_TREE(GNAT_ENTITY)	\
212   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
213 
214 #define SET_GNU_TREE(GNAT_ENTITY,VAL)	\
215   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
216 
217 #define PRESENT_GNU_TREE(GNAT_ENTITY)	\
218   (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
219 
220 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any.  */
221 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
222 
223 #define GET_DUMMY_NODE(GNAT_ENTITY)	\
224   dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
225 
226 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL)	\
227   dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
228 
229 #define PRESENT_DUMMY_NODE(GNAT_ENTITY)	\
230   (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
231 
232 /* This variable keeps a table for types for each precision so that we only
233    allocate each of them once. Signed and unsigned types are kept separate.
234 
235    Note that these types are only used when fold-const requests something
236    special.  Perhaps we should NOT share these types; we'll see how it
237    goes later.  */
238 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
239 
240 /* Likewise for float types, but record these by mode.  */
241 static GTY(()) tree float_types[NUM_MACHINE_MODES];
242 
243 /* For each binding contour we allocate a binding_level structure to indicate
244    the binding depth.  */
245 
246 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
247   /* The binding level containing this one (the enclosing binding level). */
248   struct gnat_binding_level *chain;
249   /* The BLOCK node for this level.  */
250   tree block;
251   /* If nonzero, the setjmp buffer that needs to be updated for any
252      variable-sized definition within this context.  */
253   tree jmpbuf_decl;
254 };
255 
256 /* The binding level currently in effect.  */
257 static GTY(()) struct gnat_binding_level *current_binding_level;
258 
259 /* A chain of gnat_binding_level structures awaiting reuse.  */
260 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
261 
262 /* The context to be used for global declarations.  */
263 static GTY(()) tree global_context;
264 
265 /* An array of global declarations.  */
266 static GTY(()) vec<tree, va_gc> *global_decls;
267 
268 /* An array of builtin function declarations.  */
269 static GTY(()) vec<tree, va_gc> *builtin_decls;
270 
271 /* A chain of unused BLOCK nodes. */
272 static GTY((deletable)) tree free_block_chain;
273 
274 /* A hash table of packable types.  It is modelled on the generic type
275    hash table in tree.c, which must thus be used as a reference.  */
276 
277 struct GTY((for_user)) packable_type_hash
278 {
279   hashval_t hash;
280   tree type;
281 };
282 
283 struct packable_type_hasher : ggc_cache_ptr_hash<packable_type_hash>
284 {
hashpackable_type_hasher285   static inline hashval_t hash (packable_type_hash *t) { return t->hash; }
286   static bool equal (packable_type_hash *a, packable_type_hash *b);
287 
288   static int
keep_cache_entrypackable_type_hasher289   keep_cache_entry (packable_type_hash *&t)
290   {
291     return ggc_marked_p (t->type);
292   }
293 };
294 
295 static GTY ((cache)) hash_table<packable_type_hasher> *packable_type_hash_table;
296 
297 /* A hash table of padded types.  It is modelled on the generic type
298    hash table in tree.c, which must thus be used as a reference.  */
299 
300 struct GTY((for_user)) pad_type_hash
301 {
302   hashval_t hash;
303   tree type;
304 };
305 
306 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
307 {
hashpad_type_hasher308   static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
309   static bool equal (pad_type_hash *a, pad_type_hash *b);
310 
311   static int
keep_cache_entrypad_type_hasher312   keep_cache_entry (pad_type_hash *&t)
313   {
314     return ggc_marked_p (t->type);
315   }
316 };
317 
318 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
319 
320 static tree merge_sizes (tree, tree, tree, bool, bool);
321 static tree fold_bit_position (const_tree);
322 static tree compute_related_constant (tree, tree);
323 static tree split_plus (tree, tree *);
324 static tree float_type_for_precision (int, machine_mode);
325 static tree convert_to_fat_pointer (tree, tree);
326 static unsigned int scale_by_factor_of (tree, unsigned int);
327 
328 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
329    of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes.  */
330 struct deferred_decl_context_node
331 {
332   /* The ..._DECL node to work on.  */
333   tree decl;
334 
335   /* The corresponding entity's Scope.  */
336   Entity_Id gnat_scope;
337 
338   /* The value of force_global when DECL was pushed.  */
339   int force_global;
340 
341   /* The list of ..._TYPE nodes to propagate the context to.  */
342   vec<tree> types;
343 
344   /* The next queue item.  */
345   struct deferred_decl_context_node *next;
346 };
347 
348 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
349 
350 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
351    feed it with the elaboration of GNAT_SCOPE.  */
352 static struct deferred_decl_context_node *
353 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
354 
355 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
356    feed it with the DECL_CONTEXT computed as part of N as soon as it is
357    computed.  */
358 static void add_deferred_type_context (struct deferred_decl_context_node *n,
359 				       tree type);
360 
361 /* Initialize data structures of the utils.c module.  */
362 
363 void
init_gnat_utils(void)364 init_gnat_utils (void)
365 {
366   /* Initialize the association of GNAT nodes to GCC trees.  */
367   associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
368 
369   /* Initialize the association of GNAT nodes to GCC trees as dummies.  */
370   dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
371 
372   /* Initialize the hash table of packable types.  */
373   packable_type_hash_table = hash_table<packable_type_hasher>::create_ggc (512);
374 
375   /* Initialize the hash table of padded types.  */
376   pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
377 }
378 
379 /* Destroy data structures of the utils.c module.  */
380 
381 void
destroy_gnat_utils(void)382 destroy_gnat_utils (void)
383 {
384   /* Destroy the association of GNAT nodes to GCC trees.  */
385   ggc_free (associate_gnat_to_gnu);
386   associate_gnat_to_gnu = NULL;
387 
388   /* Destroy the association of GNAT nodes to GCC trees as dummies.  */
389   ggc_free (dummy_node_table);
390   dummy_node_table = NULL;
391 
392   /* Destroy the hash table of packable types.  */
393   packable_type_hash_table->empty ();
394   packable_type_hash_table = NULL;
395 
396   /* Destroy the hash table of padded types.  */
397   pad_type_hash_table->empty ();
398   pad_type_hash_table = NULL;
399 }
400 
401 /* GNAT_ENTITY is a GNAT tree node for an entity.  Associate GNU_DECL, a GCC
402    tree node, with GNAT_ENTITY.  If GNU_DECL is not a ..._DECL node, abort.
403    If NO_CHECK is true, the latter check is suppressed.
404 
405    If GNU_DECL is zero, reset a previous association.  */
406 
407 void
save_gnu_tree(Entity_Id gnat_entity,tree gnu_decl,bool no_check)408 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
409 {
410   /* Check that GNAT_ENTITY is not already defined and that it is being set
411      to something which is a decl.  If that is not the case, this usually
412      means GNAT_ENTITY is defined twice, but occasionally is due to some
413      Gigi problem.  */
414   gcc_assert (!(gnu_decl
415 		&& (PRESENT_GNU_TREE (gnat_entity)
416 		    || (!no_check && !DECL_P (gnu_decl)))));
417 
418   SET_GNU_TREE (gnat_entity, gnu_decl);
419 }
420 
421 /* GNAT_ENTITY is a GNAT tree node for an entity.  Return the GCC tree node
422    that was associated with it.  If there is no such tree node, abort.
423 
424    In some cases, such as delayed elaboration or expressions that need to
425    be elaborated only once, GNAT_ENTITY is really not an entity.  */
426 
427 tree
get_gnu_tree(Entity_Id gnat_entity)428 get_gnu_tree (Entity_Id gnat_entity)
429 {
430   gcc_assert (PRESENT_GNU_TREE (gnat_entity));
431   return GET_GNU_TREE (gnat_entity);
432 }
433 
434 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY.  */
435 
436 bool
present_gnu_tree(Entity_Id gnat_entity)437 present_gnu_tree (Entity_Id gnat_entity)
438 {
439   return PRESENT_GNU_TREE (gnat_entity);
440 }
441 
442 /* Make a dummy type corresponding to GNAT_TYPE.  */
443 
444 tree
make_dummy_type(Entity_Id gnat_type)445 make_dummy_type (Entity_Id gnat_type)
446 {
447   Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
448   tree gnu_type, debug_type;
449 
450   /* If there was no equivalent type (can only happen when just annotating
451      types) or underlying type, go back to the original type.  */
452   if (No (gnat_equiv))
453     gnat_equiv = gnat_type;
454 
455   /* If it there already a dummy type, use that one.  Else make one.  */
456   if (PRESENT_DUMMY_NODE (gnat_equiv))
457     return GET_DUMMY_NODE (gnat_equiv);
458 
459   /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
460      an ENUMERAL_TYPE.  */
461   gnu_type = make_node (Is_Record_Type (gnat_equiv)
462 			? tree_code_for_record_type (gnat_equiv)
463 			: ENUMERAL_TYPE);
464   TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
465   TYPE_DUMMY_P (gnu_type) = 1;
466   TYPE_STUB_DECL (gnu_type)
467     = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
468   if (Is_By_Reference_Type (gnat_equiv))
469     TYPE_BY_REFERENCE_P (gnu_type) = 1;
470   if (Has_Discriminants (gnat_equiv))
471     decl_attributes (&gnu_type,
472 		     tree_cons (get_identifier ("may_alias"), NULL_TREE,
473 				NULL_TREE),
474 		     ATTR_FLAG_TYPE_IN_PLACE);
475 
476   SET_DUMMY_NODE (gnat_equiv, gnu_type);
477 
478   /* Create a debug type so that debuggers only see an unspecified type.  */
479   if (Needs_Debug_Info (gnat_type))
480     {
481       debug_type = make_node (LANG_TYPE);
482       TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
483       TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
484       SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
485     }
486 
487   return gnu_type;
488 }
489 
490 /* Return the dummy type that was made for GNAT_TYPE, if any.  */
491 
492 tree
get_dummy_type(Entity_Id gnat_type)493 get_dummy_type (Entity_Id gnat_type)
494 {
495   return GET_DUMMY_NODE (gnat_type);
496 }
497 
498 /* Build dummy fat and thin pointer types whose designated type is specified
499    by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter.  */
500 
501 void
build_dummy_unc_pointer_types(Entity_Id gnat_desig_type,tree gnu_desig_type)502 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
503 {
504   tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
505   tree gnu_fat_type, fields, gnu_object_type;
506 
507   gnu_template_type = make_node (RECORD_TYPE);
508   TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
509   TYPE_DUMMY_P (gnu_template_type) = 1;
510   gnu_ptr_template = build_pointer_type (gnu_template_type);
511 
512   gnu_array_type = make_node (ENUMERAL_TYPE);
513   TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
514   TYPE_DUMMY_P (gnu_array_type) = 1;
515   gnu_ptr_array = build_pointer_type (gnu_array_type);
516 
517   gnu_fat_type = make_node (RECORD_TYPE);
518   /* Build a stub DECL to trigger the special processing for fat pointer types
519      in gnat_pushdecl.  */
520   TYPE_NAME (gnu_fat_type)
521     = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
522 			     gnu_fat_type);
523   fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
524 			      gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
525   DECL_CHAIN (fields)
526     = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
527 			 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
528   finish_fat_pointer_type (gnu_fat_type, fields);
529   SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
530   /* Suppress debug info until after the type is completed.  */
531   TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
532 
533   gnu_object_type = make_node (RECORD_TYPE);
534   TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
535   TYPE_DUMMY_P (gnu_object_type) = 1;
536 
537   TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
538   TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
539   TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
540 }
541 
542 /* Return true if we are in the global binding level.  */
543 
544 bool
global_bindings_p(void)545 global_bindings_p (void)
546 {
547   return force_global || !current_function_decl;
548 }
549 
550 /* Enter a new binding level.  */
551 
552 void
gnat_pushlevel(void)553 gnat_pushlevel (void)
554 {
555   struct gnat_binding_level *newlevel = NULL;
556 
557   /* Reuse a struct for this binding level, if there is one.  */
558   if (free_binding_level)
559     {
560       newlevel = free_binding_level;
561       free_binding_level = free_binding_level->chain;
562     }
563   else
564     newlevel = ggc_alloc<gnat_binding_level> ();
565 
566   /* Use a free BLOCK, if any; otherwise, allocate one.  */
567   if (free_block_chain)
568     {
569       newlevel->block = free_block_chain;
570       free_block_chain = BLOCK_CHAIN (free_block_chain);
571       BLOCK_CHAIN (newlevel->block) = NULL_TREE;
572     }
573   else
574     newlevel->block = make_node (BLOCK);
575 
576   /* Point the BLOCK we just made to its parent.  */
577   if (current_binding_level)
578     BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
579 
580   BLOCK_VARS (newlevel->block) = NULL_TREE;
581   BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
582   TREE_USED (newlevel->block) = 1;
583 
584   /* Add this level to the front of the chain (stack) of active levels.  */
585   newlevel->chain = current_binding_level;
586   newlevel->jmpbuf_decl = NULL_TREE;
587   current_binding_level = newlevel;
588 }
589 
590 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
591    and point FNDECL to this BLOCK.  */
592 
593 void
set_current_block_context(tree fndecl)594 set_current_block_context (tree fndecl)
595 {
596   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
597   DECL_INITIAL (fndecl) = current_binding_level->block;
598   set_block_for_group (current_binding_level->block);
599 }
600 
601 /* Set the jmpbuf_decl for the current binding level to DECL.  */
602 
603 void
set_block_jmpbuf_decl(tree decl)604 set_block_jmpbuf_decl (tree decl)
605 {
606   current_binding_level->jmpbuf_decl = decl;
607 }
608 
609 /* Get the jmpbuf_decl, if any, for the current binding level.  */
610 
611 tree
get_block_jmpbuf_decl(void)612 get_block_jmpbuf_decl (void)
613 {
614   return current_binding_level->jmpbuf_decl;
615 }
616 
617 /* Exit a binding level.  Set any BLOCK into the current code group.  */
618 
619 void
gnat_poplevel(void)620 gnat_poplevel (void)
621 {
622   struct gnat_binding_level *level = current_binding_level;
623   tree block = level->block;
624 
625   BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
626   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
627 
628   /* If this is a function-level BLOCK don't do anything.  Otherwise, if there
629      are no variables free the block and merge its subblocks into those of its
630      parent block.  Otherwise, add it to the list of its parent.  */
631   if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
632     ;
633   else if (!BLOCK_VARS (block))
634     {
635       BLOCK_SUBBLOCKS (level->chain->block)
636 	= block_chainon (BLOCK_SUBBLOCKS (block),
637 			 BLOCK_SUBBLOCKS (level->chain->block));
638       BLOCK_CHAIN (block) = free_block_chain;
639       free_block_chain = block;
640     }
641   else
642     {
643       BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
644       BLOCK_SUBBLOCKS (level->chain->block) = block;
645       TREE_USED (block) = 1;
646       set_block_for_group (block);
647     }
648 
649   /* Free this binding structure.  */
650   current_binding_level = level->chain;
651   level->chain = free_binding_level;
652   free_binding_level = level;
653 }
654 
655 /* Exit a binding level and discard the associated BLOCK.  */
656 
657 void
gnat_zaplevel(void)658 gnat_zaplevel (void)
659 {
660   struct gnat_binding_level *level = current_binding_level;
661   tree block = level->block;
662 
663   BLOCK_CHAIN (block) = free_block_chain;
664   free_block_chain = block;
665 
666   /* Free this binding structure.  */
667   current_binding_level = level->chain;
668   level->chain = free_binding_level;
669   free_binding_level = level;
670 }
671 
672 /* Set the context of TYPE and its parallel types (if any) to CONTEXT.  */
673 
674 static void
gnat_set_type_context(tree type,tree context)675 gnat_set_type_context (tree type, tree context)
676 {
677   tree decl = TYPE_STUB_DECL (type);
678 
679   TYPE_CONTEXT (type) = context;
680 
681   while (decl && DECL_PARALLEL_TYPE (decl))
682     {
683       tree parallel_type = DECL_PARALLEL_TYPE (decl);
684 
685       /* Give a context to the parallel types and their stub decl, if any.
686 	 Some parallel types seems to be present in multiple parallel type
687 	 chains, so don't mess with their context if they already have one.  */
688       if (!TYPE_CONTEXT (parallel_type))
689 	{
690 	  if (TYPE_STUB_DECL (parallel_type))
691 	    DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
692 	  TYPE_CONTEXT (parallel_type) = context;
693 	}
694 
695       decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
696     }
697 }
698 
699 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
700    the debug info, or Empty if there is no such scope.  If not NULL, set
701    IS_SUBPROGRAM to whether the returned entity is a subprogram.  */
702 
703 Entity_Id
get_debug_scope(Node_Id gnat_node,bool * is_subprogram)704 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
705 {
706   Entity_Id gnat_entity;
707 
708   if (is_subprogram)
709     *is_subprogram = false;
710 
711   if (Nkind (gnat_node) == N_Defining_Identifier
712       || Nkind (gnat_node) == N_Defining_Operator_Symbol)
713     gnat_entity = Scope (gnat_node);
714   else
715     return Empty;
716 
717   while (Present (gnat_entity))
718     {
719       switch (Ekind (gnat_entity))
720 	{
721 	case E_Function:
722 	case E_Procedure:
723 	  if (Present (Protected_Body_Subprogram (gnat_entity)))
724 	    gnat_entity = Protected_Body_Subprogram (gnat_entity);
725 
726 	  /* If the scope is a subprogram, then just rely on
727 	     current_function_decl, so that we don't have to defer
728 	     anything.  This is needed because other places rely on the
729 	     validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
730 	  if (is_subprogram)
731 	    *is_subprogram = true;
732 	  return gnat_entity;
733 
734 	case E_Record_Type:
735 	case E_Record_Subtype:
736 	  return gnat_entity;
737 
738 	default:
739 	  /* By default, we are not interested in this particular scope: go to
740 	     the outer one.  */
741 	  break;
742 	}
743 
744       gnat_entity = Scope (gnat_entity);
745     }
746 
747   return Empty;
748 }
749 
750 /* If N is NULL, set TYPE's context to CONTEXT.  Defer this to the processing
751    of N otherwise.  */
752 
753 static void
defer_or_set_type_context(tree type,tree context,struct deferred_decl_context_node * n)754 defer_or_set_type_context (tree type, tree context,
755 			   struct deferred_decl_context_node *n)
756 {
757   if (n)
758     add_deferred_type_context (n, type);
759   else
760     gnat_set_type_context (type, context);
761 }
762 
763 /* Return global_context, but create it first if need be.  */
764 
765 static tree
get_global_context(void)766 get_global_context (void)
767 {
768   if (!global_context)
769     {
770       global_context
771 	= build_translation_unit_decl (get_identifier (main_input_filename));
772       debug_hooks->register_main_translation_unit (global_context);
773     }
774 
775   return global_context;
776 }
777 
778 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
779    for location information and flag propagation.  */
780 
781 void
gnat_pushdecl(tree decl,Node_Id gnat_node)782 gnat_pushdecl (tree decl, Node_Id gnat_node)
783 {
784   tree context = NULL_TREE;
785   struct deferred_decl_context_node *deferred_decl_context = NULL;
786 
787   /* If explicitely asked to make DECL global or if it's an imported nested
788      object, short-circuit the regular Scope-based context computation.  */
789   if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
790     {
791       /* Rely on the GNAT scope, or fallback to the current_function_decl if
792 	 the GNAT scope reached the global scope, if it reached a subprogram
793 	 or the declaration is a subprogram or a variable (for them we skip
794 	 intermediate context types because the subprogram body elaboration
795 	 machinery and the inliner both expect a subprogram context).
796 
797 	 Falling back to current_function_decl is necessary for implicit
798 	 subprograms created by gigi, such as the elaboration subprograms.  */
799       bool context_is_subprogram = false;
800       const Entity_Id gnat_scope
801         = get_debug_scope (gnat_node, &context_is_subprogram);
802 
803       if (Present (gnat_scope)
804 	  && !context_is_subprogram
805 	  && TREE_CODE (decl) != FUNCTION_DECL
806 	  && TREE_CODE (decl) != VAR_DECL)
807 	/* Always assume the scope has not been elaborated, thus defer the
808 	   context propagation to the time its elaboration will be
809 	   available.  */
810 	deferred_decl_context
811 	  = add_deferred_decl_context (decl, gnat_scope, force_global);
812 
813       /* External declarations (when force_global > 0) may not be in a
814 	 local context.  */
815       else if (current_function_decl && force_global == 0)
816 	context = current_function_decl;
817     }
818 
819   /* If either we are forced to be in global mode or if both the GNAT scope and
820      the current_function_decl did not help in determining the context, use the
821      global scope.  */
822   if (!deferred_decl_context && !context)
823     context = get_global_context ();
824 
825   /* Functions imported in another function are not really nested.
826      For really nested functions mark them initially as needing
827      a static chain for uses of that flag before unnesting;
828      lower_nested_functions will then recompute it.  */
829   if (TREE_CODE (decl) == FUNCTION_DECL
830       && !TREE_PUBLIC (decl)
831       && context
832       && (TREE_CODE (context) == FUNCTION_DECL
833 	  || decl_function_context (context)))
834     DECL_STATIC_CHAIN (decl) = 1;
835 
836   if (!deferred_decl_context)
837     DECL_CONTEXT (decl) = context;
838 
839   TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
840 
841   /* Set the location of DECL and emit a declaration for it.  */
842   if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
843     Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
844 
845   add_decl_expr (decl, gnat_node);
846 
847   /* Put the declaration on the list.  The list of declarations is in reverse
848      order.  The list will be reversed later.  Put global declarations in the
849      globals list and local ones in the current block.  But skip TYPE_DECLs
850      for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
851      with the debugger and aren't needed anyway.  */
852   if (!(TREE_CODE (decl) == TYPE_DECL
853         && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
854     {
855       /* External declarations must go to the binding level they belong to.
856 	 This will make corresponding imported entities are available in the
857 	 debugger at the proper time.  */
858       if (DECL_EXTERNAL (decl)
859 	  && TREE_CODE (decl) == FUNCTION_DECL
860 	  && fndecl_built_in_p (decl))
861 	vec_safe_push (builtin_decls, decl);
862       else if (global_bindings_p ())
863 	vec_safe_push (global_decls, decl);
864       else
865 	{
866 	  DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
867 	  BLOCK_VARS (current_binding_level->block) = decl;
868 	}
869     }
870 
871   /* For the declaration of a type, set its name either if it isn't already
872      set or if the previous type name was not derived from a source name.
873      We'd rather have the type named with a real name and all the pointer
874      types to the same object have the same node, except when the names are
875      both derived from source names.  */
876   if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
877     {
878       tree t = TREE_TYPE (decl);
879 
880       /* Array and pointer types aren't tagged types in the C sense so we need
881 	 to generate a typedef in DWARF for them and make sure it is preserved,
882 	 unless the type is artificial.  */
883       if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
884 	  && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
885 	      || DECL_ARTIFICIAL (decl)))
886 	;
887       /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
888 	 generate the typedef in DWARF.  Also do that for fat pointer types
889 	 because, even though they are tagged types in the C sense, they are
890 	 still XUP types attached to the base array type at this point.  */
891       else if (!DECL_ARTIFICIAL (decl)
892 	       && (TREE_CODE (t) == ARRAY_TYPE
893 		   || TREE_CODE (t) == POINTER_TYPE
894 		   || TYPE_IS_FAT_POINTER_P (t)))
895 	{
896 	  tree tt = build_variant_type_copy (t);
897 	  TYPE_NAME (tt) = decl;
898 	  defer_or_set_type_context (tt,
899 				     DECL_CONTEXT (decl),
900 				     deferred_decl_context);
901 	  TREE_TYPE (decl) = tt;
902 	  if (TYPE_NAME (t)
903 	      && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
904 	      && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
905 	    DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
906 	  else
907 	    DECL_ORIGINAL_TYPE (decl) = t;
908 	  /* Array types need to have a name so that they can be related to
909 	     their GNAT encodings.  */
910 	  if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
911 	    TYPE_NAME (t) = DECL_NAME (decl);
912 	  /* Remark the canonical fat pointer type as artificial.  */
913 	  if (TYPE_IS_FAT_POINTER_P (t))
914 	    TYPE_ARTIFICIAL (t) = 1;
915 	  t = NULL_TREE;
916 	}
917       else if (TYPE_NAME (t)
918 	       && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
919 	       && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
920 	;
921       else
922 	t = NULL_TREE;
923 
924       /* Propagate the name to all the variants, this is needed for the type
925 	 qualifiers machinery to work properly (see check_qualified_type).
926 	 Also propagate the context to them.  Note that it will be propagated
927 	 to all parallel types too thanks to gnat_set_type_context.  */
928       if (t)
929 	for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
930 	  /* ??? Because of the previous kludge, we can have variants of fat
931 	     pointer types with different names.  */
932 	  if (!(TYPE_IS_FAT_POINTER_P (t)
933 		&& TYPE_NAME (t)
934 		&& TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
935 	    {
936 	      TYPE_NAME (t) = decl;
937 	      defer_or_set_type_context (t,
938 					 DECL_CONTEXT (decl),
939 					 deferred_decl_context);
940 	    }
941     }
942 }
943 
944 /* Create a record type that contains a SIZE bytes long field of TYPE with a
945    starting bit position so that it is aligned to ALIGN bits, and leaving at
946    least ROOM bytes free before the field.  BASE_ALIGN is the alignment the
947    record is guaranteed to get.  GNAT_NODE is used for the position of the
948    associated TYPE_DECL.  */
949 
950 tree
make_aligning_type(tree type,unsigned int align,tree size,unsigned int base_align,int room,Node_Id gnat_node)951 make_aligning_type (tree type, unsigned int align, tree size,
952 		    unsigned int base_align, int room, Node_Id gnat_node)
953 {
954   /* We will be crafting a record type with one field at a position set to be
955      the next multiple of ALIGN past record'address + room bytes.  We use a
956      record placeholder to express record'address.  */
957   tree record_type = make_node (RECORD_TYPE);
958   tree record = build0 (PLACEHOLDER_EXPR, record_type);
959 
960   tree record_addr_st
961     = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
962 
963   /* The diagram below summarizes the shape of what we manipulate:
964 
965                     <--------- pos ---------->
966                 {  +------------+-------------+-----------------+
967       record  =>{  |############|     ...     | field (type)    |
968                 {  +------------+-------------+-----------------+
969 		   |<-- room -->|<- voffset ->|<---- size ----->|
970 		   o            o
971 		   |            |
972 		   record_addr  vblock_addr
973 
974      Every length is in sizetype bytes there, except "pos" which has to be
975      set as a bit position in the GCC tree for the record.  */
976   tree room_st = size_int (room);
977   tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
978   tree voffset_st, pos, field;
979 
980   tree name = TYPE_IDENTIFIER (type);
981 
982   name = concat_name (name, "ALIGN");
983   TYPE_NAME (record_type) = name;
984 
985   /* Compute VOFFSET and then POS.  The next byte position multiple of some
986      alignment after some address is obtained by "and"ing the alignment minus
987      1 with the two's complement of the address.   */
988   voffset_st = size_binop (BIT_AND_EXPR,
989 			   fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
990 			   size_int ((align / BITS_PER_UNIT) - 1));
991 
992   /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype.  */
993   pos = size_binop (MULT_EXPR,
994 		    convert (bitsizetype,
995 			     size_binop (PLUS_EXPR, room_st, voffset_st)),
996                     bitsize_unit_node);
997 
998   /* Craft the GCC record representation.  We exceptionally do everything
999      manually here because 1) our generic circuitry is not quite ready to
1000      handle the complex position/size expressions we are setting up, 2) we
1001      have a strong simplifying factor at hand: we know the maximum possible
1002      value of voffset, and 3) we have to set/reset at least the sizes in
1003      accordance with this maximum value anyway, as we need them to convey
1004      what should be "alloc"ated for this type.
1005 
1006      Use -1 as the 'addressable' indication for the field to prevent the
1007      creation of a bitfield.  We don't need one, it would have damaging
1008      consequences on the alignment computation, and create_field_decl would
1009      make one without this special argument, for instance because of the
1010      complex position expression.  */
1011   field = create_field_decl (get_identifier ("F"), type, record_type, size,
1012 			     pos, 1, -1);
1013   TYPE_FIELDS (record_type) = field;
1014 
1015   SET_TYPE_ALIGN (record_type, base_align);
1016   TYPE_USER_ALIGN (record_type) = 1;
1017 
1018   TYPE_SIZE (record_type)
1019     = size_binop (PLUS_EXPR,
1020                   size_binop (MULT_EXPR, convert (bitsizetype, size),
1021                               bitsize_unit_node),
1022 		  bitsize_int (align + room * BITS_PER_UNIT));
1023   TYPE_SIZE_UNIT (record_type)
1024     = size_binop (PLUS_EXPR, size,
1025 		  size_int (room + align / BITS_PER_UNIT));
1026 
1027   SET_TYPE_MODE (record_type, BLKmode);
1028   relate_alias_sets (record_type, type, ALIAS_SET_COPY);
1029 
1030   /* Declare it now since it will never be declared otherwise.  This is
1031      necessary to ensure that its subtrees are properly marked.  */
1032   create_type_decl (name, record_type, true, false, gnat_node);
1033 
1034   return record_type;
1035 }
1036 
1037 /* Return true iff the packable types are equivalent.  */
1038 
1039 bool
equal(packable_type_hash * t1,packable_type_hash * t2)1040 packable_type_hasher::equal (packable_type_hash *t1, packable_type_hash *t2)
1041 {
1042   tree type1, type2;
1043 
1044   if (t1->hash != t2->hash)
1045     return 0;
1046 
1047   type1 = t1->type;
1048   type2 = t2->type;
1049 
1050   /* We consider that packable types are equivalent if they have the same name,
1051      size, alignment, RM size and storage order. Taking the mode into account
1052      is redundant since it is determined by the others.  */
1053   return
1054     TYPE_NAME (type1) == TYPE_NAME (type2)
1055     && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1056     && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1057     && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1058     && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1059 }
1060 
1061 /* Compute the hash value for the packable TYPE.  */
1062 
1063 static hashval_t
hash_packable_type(tree type)1064 hash_packable_type (tree type)
1065 {
1066   hashval_t hashcode;
1067 
1068   hashcode = iterative_hash_expr (TYPE_NAME (type), 0);
1069   hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1070   hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1071   hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1072   hashcode
1073     = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1074 
1075   return hashcode;
1076 }
1077 
1078 /* Look up the packable TYPE in the hash table and return its canonical version
1079    if it exists; otherwise, insert it into the hash table.  */
1080 
1081 static tree
canonicalize_packable_type(tree type)1082 canonicalize_packable_type (tree type)
1083 {
1084   const hashval_t hashcode = hash_packable_type (type);
1085   struct packable_type_hash in, *h, **slot;
1086 
1087   in.hash = hashcode;
1088   in.type = type;
1089   slot = packable_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1090   h = *slot;
1091   if (!h)
1092     {
1093       h = ggc_alloc<packable_type_hash> ();
1094       h->hash = hashcode;
1095       h->type = type;
1096       *slot = h;
1097     }
1098 
1099   return h->type;
1100 }
1101 
1102 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed
1103    record.  See if we can rewrite it as a type that has non-BLKmode, which we
1104    can pack tighter in the packed record.  If so, return the new type; if not,
1105    return the original type.  */
1106 
1107 static tree
make_packable_array_type(tree type)1108 make_packable_array_type (tree type)
1109 {
1110   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1111   unsigned HOST_WIDE_INT new_size;
1112   unsigned int new_align;
1113 
1114   /* No point in doing anything if the size is either zero or too large for an
1115      integral mode, or if the type already has non-BLKmode.  */
1116   if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode)
1117     return type;
1118 
1119   /* Punt if the component type is an aggregate type for now.  */
1120   if (AGGREGATE_TYPE_P (TREE_TYPE (type)))
1121     return type;
1122 
1123   tree new_type = copy_type (type);
1124 
1125   new_size = ceil_pow2 (size);
1126   new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1127   SET_TYPE_ALIGN (new_type, new_align);
1128 
1129   TYPE_SIZE (new_type) = bitsize_int (new_size);
1130   TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1131 
1132   SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ());
1133 
1134   return new_type;
1135 }
1136 
1137 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
1138    as the type of a field in a packed record if IN_RECORD is true, or as
1139    the component type of a packed array if IN_RECORD is false.  See if we
1140    can rewrite it either as a type that has non-BLKmode, which we can pack
1141    tighter in the packed record case, or as a smaller type with at most
1142    MAX_ALIGN alignment if the value is non-zero.  If so, return the new
1143    type; if not, return the original type.  */
1144 
1145 tree
make_packable_type(tree type,bool in_record,unsigned int max_align)1146 make_packable_type (tree type, bool in_record, unsigned int max_align)
1147 {
1148   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1149   const unsigned int align = TYPE_ALIGN (type);
1150   unsigned HOST_WIDE_INT new_size;
1151   unsigned int new_align;
1152 
1153   /* No point in doing anything if the size is zero.  */
1154   if (size == 0)
1155     return type;
1156 
1157   tree new_type = make_node (TREE_CODE (type));
1158 
1159   /* Copy the name and flags from the old type to that of the new.
1160      Note that we rely on the pointer equality created here for
1161      TYPE_NAME to look through conversions in various places.  */
1162   TYPE_NAME (new_type) = TYPE_NAME (type);
1163   TYPE_PACKED (new_type) = 1;
1164   TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1165   TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1166   TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1167   if (TREE_CODE (type) == RECORD_TYPE)
1168     TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1169 
1170   /* If we are in a record and have a small size, set the alignment to
1171      try for an integral mode.  Otherwise set it to try for a smaller
1172      type with BLKmode.  */
1173   if (in_record && size <= MAX_FIXED_MODE_SIZE)
1174     {
1175       new_size = ceil_pow2 (size);
1176       new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1177       SET_TYPE_ALIGN (new_type, new_align);
1178     }
1179   else
1180     {
1181       tree ada_size = TYPE_ADA_SIZE (type);
1182 
1183       /* Do not try to shrink the size if the RM size is not constant.  */
1184       if (TYPE_CONTAINS_TEMPLATE_P (type) || !tree_fits_uhwi_p (ada_size))
1185 	return type;
1186 
1187       /* Round the RM size up to a unit boundary to get the minimal size
1188 	 for a BLKmode record.  Give up if it's already the size and we
1189 	 don't need to lower the alignment.  */
1190       new_size = tree_to_uhwi (ada_size);
1191       new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1192       if (new_size == size && (max_align == 0 || align <= max_align))
1193 	return type;
1194 
1195       new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1196       if (max_align > 0 && new_align > max_align)
1197 	new_align = max_align;
1198       SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1199     }
1200 
1201   TYPE_USER_ALIGN (new_type) = 1;
1202 
1203   /* Now copy the fields, keeping the position and size as we don't want
1204      to change the layout by propagating the packedness downwards.  */
1205   tree new_field_list = NULL_TREE;
1206   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1207     {
1208       tree new_field_type = TREE_TYPE (field);
1209       tree new_field, new_field_size;
1210 
1211       if (AGGREGATE_TYPE_P (new_field_type)
1212 	  && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1213 	{
1214 	  if (RECORD_OR_UNION_TYPE_P (new_field_type)
1215 	      && !TYPE_FAT_POINTER_P (new_field_type))
1216 	    new_field_type
1217 	      = make_packable_type (new_field_type, true, max_align);
1218 	  else if (in_record
1219 		   && max_align > 0
1220 		   && max_align < BITS_PER_UNIT
1221 		   && TREE_CODE (new_field_type) == ARRAY_TYPE)
1222 	    new_field_type = make_packable_array_type (new_field_type);
1223 	}
1224 
1225       /* However, for the last field in a not already packed record type
1226 	 that is of an aggregate type, we need to use the RM size in the
1227 	 packable version of the record type, see finish_record_type.  */
1228       if (!DECL_CHAIN (field)
1229 	  && !TYPE_PACKED (type)
1230 	  && RECORD_OR_UNION_TYPE_P (new_field_type)
1231 	  && !TYPE_FAT_POINTER_P (new_field_type)
1232 	  && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1233 	  && TYPE_ADA_SIZE (new_field_type))
1234 	new_field_size = TYPE_ADA_SIZE (new_field_type);
1235       else
1236 	{
1237 	  new_field_size = DECL_SIZE (field);
1238 
1239 	  /* Make sure not to use too small a type for the size.  */
1240 	  if (TYPE_MODE (new_field_type) == BLKmode)
1241 	    new_field_type = TREE_TYPE (field);
1242 	}
1243 
1244       /* This is a layout with full representation, alignment and size clauses
1245 	 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case.  */
1246       new_field
1247 	= create_field_decl (DECL_NAME (field), new_field_type, new_type,
1248 			     new_field_size, bit_position (field), 0,
1249 			     !DECL_NONADDRESSABLE_P (field));
1250 
1251       DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1252       SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1253       if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1254 	DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1255 
1256       DECL_CHAIN (new_field) = new_field_list;
1257       new_field_list = new_field;
1258     }
1259 
1260   /* If this is a padding record, we never want to make the size smaller
1261      than what was specified.  For QUAL_UNION_TYPE, also copy the size.  */
1262   if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1263     {
1264       TYPE_SIZE (new_type) = TYPE_SIZE (type);
1265       TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1266       new_size = size;
1267     }
1268   else
1269     {
1270       TYPE_SIZE (new_type) = bitsize_int (new_size);
1271       TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1272     }
1273 
1274   if (!TYPE_CONTAINS_TEMPLATE_P (type))
1275     SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1276 
1277   finish_record_type (new_type, nreverse (new_field_list), 2, false);
1278   relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1279   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1280     SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1281   else if (TYPE_STUB_DECL (type))
1282     SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1283 			    DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1284 
1285   /* Try harder to get a packable type if necessary, for example in case
1286      the record itself contains a BLKmode field.  */
1287   if (in_record && TYPE_MODE (new_type) == BLKmode)
1288     SET_TYPE_MODE (new_type,
1289 		   mode_for_size_tree (TYPE_SIZE (new_type),
1290 				       MODE_INT, 1).else_blk ());
1291 
1292   /* If neither mode nor size nor alignment shrunk, return the old type.  */
1293   if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1294     return type;
1295 
1296   /* If the packable type is named, we canonicalize it by means of the hash
1297      table.  This is consistent with the language semantics and ensures that
1298      gigi and the middle-end have a common view of these packable types.  */
1299   return
1300     TYPE_NAME (new_type) ? canonicalize_packable_type (new_type) : new_type;
1301 }
1302 
1303 /* Return true if TYPE has an unsigned representation.  This needs to be used
1304    when the representation of types whose precision is not equal to their size
1305    is manipulated based on the RM size.  */
1306 
1307 static inline bool
type_unsigned_for_rm(tree type)1308 type_unsigned_for_rm (tree type)
1309 {
1310   /* This is the common case.  */
1311   if (TYPE_UNSIGNED (type))
1312     return true;
1313 
1314   /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1315   if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1316       && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1317     return true;
1318 
1319   return false;
1320 }
1321 
1322 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1323    If TYPE is the best type, return it.  Otherwise, make a new type.  We
1324    only support new integral and pointer types.  FOR_BIASED is true if
1325    we are making a biased type.  */
1326 
1327 tree
make_type_from_size(tree type,tree size_tree,bool for_biased)1328 make_type_from_size (tree type, tree size_tree, bool for_biased)
1329 {
1330   unsigned HOST_WIDE_INT size;
1331   bool biased_p;
1332   tree new_type;
1333 
1334   /* If size indicates an error, just return TYPE to avoid propagating
1335      the error.  Likewise if it's too large to represent.  */
1336   if (!size_tree || !tree_fits_uhwi_p (size_tree))
1337     return type;
1338 
1339   size = tree_to_uhwi (size_tree);
1340 
1341   switch (TREE_CODE (type))
1342     {
1343     case BOOLEAN_TYPE:
1344       /* Do not mess with boolean types that have foreign convention.  */
1345       if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1346 	break;
1347 
1348       /* ... fall through ... */
1349 
1350     case INTEGER_TYPE:
1351     case ENUMERAL_TYPE:
1352       biased_p = (TREE_CODE (type) == INTEGER_TYPE
1353 		  && TYPE_BIASED_REPRESENTATION_P (type));
1354 
1355       /* Integer types with precision 0 are forbidden.  */
1356       if (size == 0)
1357 	size = 1;
1358 
1359       /* Only do something if the type is not a bit-packed array type and does
1360 	 not already have the proper size and the size is not too large.  */
1361       if (BIT_PACKED_ARRAY_TYPE_P (type)
1362 	  || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1363 	  || size > (Enable_128bit_Types ? 128 : LONG_LONG_TYPE_SIZE))
1364 	break;
1365 
1366       biased_p |= for_biased;
1367 
1368       /* The type should be an unsigned type if the original type is unsigned
1369 	 or if the lower bound is constant and non-negative or if the type is
1370 	 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1371       if (type_unsigned_for_rm (type) || biased_p)
1372 	new_type = make_unsigned_type (size);
1373       else
1374 	new_type = make_signed_type (size);
1375       TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1376       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1377       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1378       /* Copy the name to show that it's essentially the same type and
1379 	 not a subrange type.  */
1380       TYPE_NAME (new_type) = TYPE_NAME (type);
1381       TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1382       SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1383       return new_type;
1384 
1385     case RECORD_TYPE:
1386       /* Do something if this is a fat pointer, in which case we
1387 	 may need to return the thin pointer.  */
1388       if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1389 	{
1390 	  scalar_int_mode p_mode;
1391 	  if (!int_mode_for_size (size, 0).exists (&p_mode)
1392 	      || !targetm.valid_pointer_mode (p_mode))
1393 	    p_mode = ptr_mode;
1394 	  return
1395 	    build_pointer_type_for_mode
1396 	      (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1397 	       p_mode, 0);
1398 	}
1399       break;
1400 
1401     case POINTER_TYPE:
1402       /* Only do something if this is a thin pointer, in which case we
1403 	 may need to return the fat pointer.  */
1404       if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1405 	return
1406 	  build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1407       break;
1408 
1409     default:
1410       break;
1411     }
1412 
1413   return type;
1414 }
1415 
1416 /* Return true iff the padded types are equivalent.  */
1417 
1418 bool
equal(pad_type_hash * t1,pad_type_hash * t2)1419 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1420 {
1421   tree type1, type2;
1422 
1423   if (t1->hash != t2->hash)
1424     return 0;
1425 
1426   type1 = t1->type;
1427   type2 = t2->type;
1428 
1429   /* We consider that padded types are equivalent if they pad the same type
1430      and have the same size, alignment, RM size and storage order.  Taking the
1431      mode into account is redundant since it is determined by the others.  */
1432   return
1433     TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1434     && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1435     && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1436     && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1437     && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1438 }
1439 
1440 /* Compute the hash value for the padded TYPE.  */
1441 
1442 static hashval_t
hash_pad_type(tree type)1443 hash_pad_type (tree type)
1444 {
1445   hashval_t hashcode;
1446 
1447   hashcode
1448     = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1449   hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1450   hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1451   hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1452   hashcode
1453     = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1454 
1455   return hashcode;
1456 }
1457 
1458 /* Look up the padded TYPE in the hash table and return its canonical version
1459    if it exists; otherwise, insert it into the hash table.  */
1460 
1461 static tree
canonicalize_pad_type(tree type)1462 canonicalize_pad_type (tree type)
1463 {
1464   const hashval_t hashcode = hash_pad_type (type);
1465   struct pad_type_hash in, *h, **slot;
1466 
1467   in.hash = hashcode;
1468   in.type = type;
1469   slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1470   h = *slot;
1471   if (!h)
1472     {
1473       h = ggc_alloc<pad_type_hash> ();
1474       h->hash = hashcode;
1475       h->type = type;
1476       *slot = h;
1477     }
1478 
1479   return h->type;
1480 }
1481 
1482 /* Ensure that TYPE has SIZE and ALIGN.  Make and return a new padded type
1483    if needed.  We have already verified that SIZE and ALIGN are large enough.
1484    GNAT_ENTITY is used to name the resulting record and to issue a warning.
1485    IS_COMPONENT_TYPE is true if this is being done for the component type of
1486    an array.  DEFINITION is true if this type is being defined.  SET_RM_SIZE
1487    is true if the RM size of the resulting type is to be set to SIZE too; in
1488    this case, the padded type is canonicalized before being returned.  */
1489 
1490 tree
maybe_pad_type(tree type,tree size,unsigned int align,Entity_Id gnat_entity,bool is_component_type,bool definition,bool set_rm_size)1491 maybe_pad_type (tree type, tree size, unsigned int align,
1492 		Entity_Id gnat_entity, bool is_component_type,
1493 		bool definition, bool set_rm_size)
1494 {
1495   tree orig_size = TYPE_SIZE (type);
1496   unsigned int orig_align = TYPE_ALIGN (type);
1497   tree record, field;
1498 
1499   /* If TYPE is a padded type, see if it agrees with any size and alignment
1500      we were given.  If so, return the original type.  Otherwise, strip
1501      off the padding, since we will either be returning the inner type
1502      or repadding it.  If no size or alignment is specified, use that of
1503      the original padded type.  */
1504   if (TYPE_IS_PADDING_P (type))
1505     {
1506       if ((!size
1507 	   || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1508 	  && (align == 0 || align == orig_align))
1509 	return type;
1510 
1511       if (!size)
1512 	size = orig_size;
1513       if (align == 0)
1514 	align = orig_align;
1515 
1516       type = TREE_TYPE (TYPE_FIELDS (type));
1517       orig_size = TYPE_SIZE (type);
1518       orig_align = TYPE_ALIGN (type);
1519     }
1520 
1521   /* If the size is either not being changed or is being made smaller (which
1522      is not done here and is only valid for bitfields anyway), show the size
1523      isn't changing.  Likewise, clear the alignment if it isn't being
1524      changed.  Then return if we aren't doing anything.  */
1525   if (size
1526       && (operand_equal_p (size, orig_size, 0)
1527 	  || (TREE_CODE (orig_size) == INTEGER_CST
1528 	      && tree_int_cst_lt (size, orig_size))))
1529     size = NULL_TREE;
1530 
1531   if (align == orig_align)
1532     align = 0;
1533 
1534   if (align == 0 && !size)
1535     return type;
1536 
1537   /* We used to modify the record in place in some cases, but that could
1538      generate incorrect debugging information.  So make a new record
1539      type and name.  */
1540   record = make_node (RECORD_TYPE);
1541   TYPE_PADDING_P (record) = 1;
1542 
1543   if (Present (gnat_entity))
1544     TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1545 
1546   SET_TYPE_ALIGN (record, align ? align : orig_align);
1547   TYPE_SIZE (record) = size ? size : orig_size;
1548   TYPE_SIZE_UNIT (record)
1549     = convert (sizetype,
1550 	       size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1551 			   bitsize_unit_node));
1552 
1553   /* If we are changing the alignment and the input type is a record with
1554      BLKmode and a small constant size, try to make a form that has an
1555      integral mode.  This might allow the padding record to also have an
1556      integral mode, which will be much more efficient.  There is no point
1557      in doing so if a size is specified unless it is also a small constant
1558      size and it is incorrect to do so if we cannot guarantee that the mode
1559      will be naturally aligned since the field must always be addressable.
1560 
1561      ??? This might not always be a win when done for a stand-alone object:
1562      since the nominal and the effective type of the object will now have
1563      different modes, a VIEW_CONVERT_EXPR will be required for converting
1564      between them and it might be hard to overcome afterwards, including
1565      at the RTL level when the stand-alone object is accessed as a whole.  */
1566   if (align > 0
1567       && RECORD_OR_UNION_TYPE_P (type)
1568       && TYPE_MODE (type) == BLKmode
1569       && !TYPE_BY_REFERENCE_P (type)
1570       && TREE_CODE (orig_size) == INTEGER_CST
1571       && !TREE_OVERFLOW (orig_size)
1572       && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1573       && (!size
1574 	  || (TREE_CODE (size) == INTEGER_CST
1575 	      && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1576     {
1577       tree packable_type = make_packable_type (type, true, align);
1578       if (TYPE_MODE (packable_type) != BLKmode
1579 	  && compare_tree_int (TYPE_SIZE (packable_type), align) <= 0)
1580         type = packable_type;
1581     }
1582 
1583   /* Now create the field with the original size.  */
1584   field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1585 			     bitsize_zero_node, 0, 1);
1586   DECL_INTERNAL_P (field) = 1;
1587 
1588   /* We will output additional debug info manually below.  */
1589   finish_record_type (record, field, 1, false);
1590 
1591   /* Set the RM size if requested.  */
1592   if (set_rm_size)
1593     {
1594       SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1595 
1596       /* If the padded type is complete and has constant size, we canonicalize
1597 	 it by means of the hash table.  This is consistent with the language
1598 	 semantics and ensures that gigi and the middle-end have a common view
1599 	 of these padded types.  */
1600       if (TREE_CONSTANT (TYPE_SIZE (record)))
1601 	{
1602 	  tree canonical = canonicalize_pad_type (record);
1603 	  if (canonical != record)
1604 	    {
1605 	      record = canonical;
1606 	      goto built;
1607 	    }
1608 	}
1609     }
1610 
1611   /* Make the inner type the debug type of the padded type.  */
1612   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1613     SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1614 
1615   /* Unless debugging information isn't being written for the input type,
1616      write a record that shows what we are a subtype of and also make a
1617      variable that indicates our size, if still variable.  */
1618   if (TREE_CODE (orig_size) != INTEGER_CST
1619       && TYPE_NAME (record)
1620       && TYPE_NAME (type)
1621       && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1622 	   && DECL_IGNORED_P (TYPE_NAME (type))))
1623     {
1624       tree name = TYPE_IDENTIFIER (record);
1625       tree size_unit = TYPE_SIZE_UNIT (record);
1626 
1627       /* A variable that holds the size is required even with no encoding since
1628 	 it will be referenced by debugging information attributes.  At global
1629 	 level, we need a single variable across all translation units.  */
1630       if (size
1631 	  && TREE_CODE (size) != INTEGER_CST
1632 	  && (definition || global_bindings_p ()))
1633 	{
1634 	  /* Whether or not gnat_entity comes from source, this XVZ variable is
1635 	     is a compilation artifact.  */
1636 	  size_unit
1637 	    = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1638 			      size_unit, true, global_bindings_p (),
1639 			      !definition && global_bindings_p (), false,
1640 			      false, true, true, NULL, gnat_entity);
1641 	  TYPE_SIZE_UNIT (record) = size_unit;
1642 	}
1643 
1644       /* There is no need to show what we are a subtype of when outputting as
1645 	 few encodings as possible: regular debugging infomation makes this
1646 	 redundant.  */
1647       if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1648 	{
1649 	  tree marker = make_node (RECORD_TYPE);
1650 	  tree orig_name = TYPE_IDENTIFIER (type);
1651 
1652 	  TYPE_NAME (marker) = concat_name (name, "XVS");
1653 	  finish_record_type (marker,
1654 			      create_field_decl (orig_name,
1655 						 build_reference_type (type),
1656 						 marker, NULL_TREE, NULL_TREE,
1657 						 0, 0),
1658 			      0, true);
1659 	  TYPE_SIZE_UNIT (marker) = size_unit;
1660 
1661 	  add_parallel_type (record, marker);
1662 	}
1663     }
1664 
1665 built:
1666   /* If a simple size was explicitly given, maybe issue a warning.  */
1667   if (!size
1668       || TREE_CODE (size) == COND_EXPR
1669       || TREE_CODE (size) == MAX_EXPR
1670       || No (gnat_entity))
1671     return record;
1672 
1673   /* But don't do it if we are just annotating types and the type is tagged or
1674      concurrent, since these types aren't fully laid out in this mode.  */
1675   if (type_annotate_only)
1676     {
1677       Entity_Id gnat_type
1678 	= is_component_type
1679 	  ? Component_Type (gnat_entity) : Etype (gnat_entity);
1680 
1681       if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1682 	return record;
1683     }
1684 
1685   /* Take the original size as the maximum size of the input if there was an
1686      unconstrained record involved and round it up to the specified alignment,
1687      if one was specified, but only for aggregate types.  */
1688   if (CONTAINS_PLACEHOLDER_P (orig_size))
1689     orig_size = max_size (orig_size, true);
1690 
1691   if (align && AGGREGATE_TYPE_P (type))
1692     orig_size = round_up (orig_size, align);
1693 
1694   if (!operand_equal_p (size, orig_size, 0)
1695       && !(TREE_CODE (size) == INTEGER_CST
1696 	   && TREE_CODE (orig_size) == INTEGER_CST
1697 	   && (TREE_OVERFLOW (size)
1698 	       || TREE_OVERFLOW (orig_size)
1699 	       || tree_int_cst_lt (size, orig_size))))
1700     {
1701       Node_Id gnat_error_node;
1702 
1703       /* For a packed array, post the message on the original array type.  */
1704       if (Is_Packed_Array_Impl_Type (gnat_entity))
1705 	gnat_entity = Original_Array_Type (gnat_entity);
1706 
1707       if ((Ekind (gnat_entity) == E_Component
1708 	   || Ekind (gnat_entity) == E_Discriminant)
1709 	  && Present (Component_Clause (gnat_entity)))
1710 	gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1711       else if (Has_Size_Clause (gnat_entity))
1712 	gnat_error_node = Expression (Size_Clause (gnat_entity));
1713       else if (Has_Object_Size_Clause (gnat_entity))
1714 	gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1715       else
1716 	gnat_error_node = Empty;
1717 
1718       /* Generate message only for entities that come from source, since
1719 	 if we have an entity created by expansion, the message will be
1720 	 generated for some other corresponding source entity.  */
1721       if (Comes_From_Source (gnat_entity))
1722 	{
1723 	  if (is_component_type)
1724 	    post_error_ne_tree ("component of& padded{ by ^ bits}?",
1725 				gnat_entity, gnat_entity,
1726 				size_diffop (size, orig_size));
1727 	  else if (Present (gnat_error_node))
1728 	    post_error_ne_tree ("{^ }bits of & unused?",
1729 				gnat_error_node, gnat_entity,
1730 				size_diffop (size, orig_size));
1731 	}
1732     }
1733 
1734   return record;
1735 }
1736 
1737 /* Return true if padded TYPE was built with an RM size.  */
1738 
1739 bool
pad_type_has_rm_size(tree type)1740 pad_type_has_rm_size (tree type)
1741 {
1742   /* This is required for the lookup.  */
1743   if (!TREE_CONSTANT (TYPE_SIZE (type)))
1744     return false;
1745 
1746   const hashval_t hashcode = hash_pad_type (type);
1747   struct pad_type_hash in, *h;
1748 
1749   in.hash = hashcode;
1750   in.type = type;
1751   h = pad_type_hash_table->find_with_hash (&in, hashcode);
1752 
1753   /* The types built with an RM size are the canonicalized ones.  */
1754   return h && h->type == type;
1755 }
1756 
1757 /* Return a copy of the padded TYPE but with reverse storage order.  */
1758 
1759 tree
set_reverse_storage_order_on_pad_type(tree type)1760 set_reverse_storage_order_on_pad_type (tree type)
1761 {
1762   if (flag_checking)
1763     {
1764       /* If the inner type is not scalar then the function does nothing.  */
1765       tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1766       gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1767 		  && !VECTOR_TYPE_P (inner_type));
1768     }
1769 
1770   /* This is required for the canonicalization.  */
1771   gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1772 
1773   tree field = copy_node (TYPE_FIELDS (type));
1774   type = copy_type (type);
1775   DECL_CONTEXT (field) = type;
1776   TYPE_FIELDS (type) = field;
1777   TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1778   return canonicalize_pad_type (type);
1779 }
1780 
1781 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1782    If this is a multi-dimensional array type, do this recursively.
1783 
1784    OP may be
1785    - ALIAS_SET_COPY:     the new set is made a copy of the old one.
1786    - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1787    - ALIAS_SET_SUBSET:   the new set is made a subset of the old one.  */
1788 
1789 void
relate_alias_sets(tree gnu_new_type,tree gnu_old_type,enum alias_set_op op)1790 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1791 {
1792   /* Remove any padding from GNU_OLD_TYPE.  It doesn't matter in the case
1793      of a one-dimensional array, since the padding has the same alias set
1794      as the field type, but if it's a multi-dimensional array, we need to
1795      see the inner types.  */
1796   while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1797 	 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1798 	     || TYPE_PADDING_P (gnu_old_type)))
1799     gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1800 
1801   /* Unconstrained array types are deemed incomplete and would thus be given
1802      alias set 0.  Retrieve the underlying array type.  */
1803   if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1804     gnu_old_type
1805       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1806   if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1807     gnu_new_type
1808       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1809 
1810   if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1811       && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1812       && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1813     relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1814 
1815   switch (op)
1816     {
1817     case ALIAS_SET_COPY:
1818       /* The alias set shouldn't be copied between array types with different
1819 	 aliasing settings because this can break the aliasing relationship
1820 	 between the array type and its element type.  */
1821       if (flag_checking || flag_strict_aliasing)
1822 	gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1823 		      && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1824 		      && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1825 			 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1826 
1827       TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1828       break;
1829 
1830     case ALIAS_SET_SUBSET:
1831     case ALIAS_SET_SUPERSET:
1832       {
1833 	alias_set_type old_set = get_alias_set (gnu_old_type);
1834 	alias_set_type new_set = get_alias_set (gnu_new_type);
1835 
1836 	/* Do nothing if the alias sets conflict.  This ensures that we
1837 	   never call record_alias_subset several times for the same pair
1838 	   or at all for alias set 0.  */
1839 	if (!alias_sets_conflict_p (old_set, new_set))
1840 	  {
1841 	    if (op == ALIAS_SET_SUBSET)
1842 	      record_alias_subset (old_set, new_set);
1843 	    else
1844 	      record_alias_subset (new_set, old_set);
1845 	  }
1846       }
1847       break;
1848 
1849     default:
1850       gcc_unreachable ();
1851     }
1852 
1853   record_component_aliases (gnu_new_type);
1854 }
1855 
1856 /* Record TYPE as a builtin type for Ada.  NAME is the name of the type.
1857    ARTIFICIAL_P is true if the type was generated by the compiler.  */
1858 
1859 void
record_builtin_type(const char * name,tree type,bool artificial_p)1860 record_builtin_type (const char *name, tree type, bool artificial_p)
1861 {
1862   tree type_decl = build_decl (input_location,
1863 			       TYPE_DECL, get_identifier (name), type);
1864   DECL_ARTIFICIAL (type_decl) = artificial_p;
1865   TYPE_ARTIFICIAL (type) = artificial_p;
1866   gnat_pushdecl (type_decl, Empty);
1867 
1868   if (debug_hooks->type_decl)
1869     debug_hooks->type_decl (type_decl, false);
1870 }
1871 
1872 /* Finish constructing the character type CHAR_TYPE.
1873 
1874   In Ada character types are enumeration types and, as a consequence, are
1875   represented in the front-end by integral types holding the positions of
1876   the enumeration values as defined by the language, which means that the
1877   integral types are unsigned.
1878 
1879   Unfortunately the signedness of 'char' in C is implementation-defined
1880   and GCC even has the option -f[un]signed-char to toggle it at run time.
1881   Since GNAT's philosophy is to be compatible with C by default, to wit
1882   Interfaces.C.char is defined as a mere copy of Character, we may need
1883   to declare character types as signed types in GENERIC and generate the
1884   necessary adjustments to make them behave as unsigned types.
1885 
1886   The overall strategy is as follows: if 'char' is unsigned, do nothing;
1887   if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1888   character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1889   types.  The idea is to ensure that the bit pattern contained in the
1890   Esize'd objects is not changed, even though the numerical value will
1891   be interpreted differently depending on the signedness.  */
1892 
1893 void
finish_character_type(tree char_type)1894 finish_character_type (tree char_type)
1895 {
1896   if (TYPE_UNSIGNED (char_type))
1897     return;
1898 
1899   /* Make a copy of a generic unsigned version since we'll modify it.  */
1900   tree unsigned_char_type
1901     = (char_type == char_type_node
1902        ? unsigned_char_type_node
1903        : copy_type (gnat_unsigned_type_for (char_type)));
1904 
1905   /* Create an unsigned version of the type and set it as debug type.  */
1906   TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1907   TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1908   TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1909   SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1910 
1911   /* If this is a subtype, make the debug type a subtype of the debug type
1912      of the base type and convert literal RM bounds to unsigned.  */
1913   if (TREE_TYPE (char_type))
1914     {
1915       tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1916       tree min_value = TYPE_RM_MIN_VALUE (char_type);
1917       tree max_value = TYPE_RM_MAX_VALUE (char_type);
1918 
1919       if (TREE_CODE (min_value) == INTEGER_CST)
1920 	min_value = fold_convert (base_unsigned_char_type, min_value);
1921       if (TREE_CODE (max_value) == INTEGER_CST)
1922 	max_value = fold_convert (base_unsigned_char_type, max_value);
1923 
1924       TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1925       SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1926       SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1927     }
1928 
1929   /* Adjust the RM bounds of the original type to unsigned; that's especially
1930      important for types since they are implicit in this case.  */
1931   SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1932   SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1933 }
1934 
1935 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1936    finish constructing the record type as a fat pointer type.  */
1937 
1938 void
finish_fat_pointer_type(tree record_type,tree field_list)1939 finish_fat_pointer_type (tree record_type, tree field_list)
1940 {
1941   /* Make sure we can put it into a register.  */
1942   if (STRICT_ALIGNMENT)
1943     SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1944 
1945   /* Show what it really is.  */
1946   TYPE_FAT_POINTER_P (record_type) = 1;
1947 
1948   /* Do not emit debug info for it since the types of its fields may still be
1949      incomplete at this point.  */
1950   finish_record_type (record_type, field_list, 0, false);
1951 
1952   /* Force type_contains_placeholder_p to return true on it.  Although the
1953      PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1954      type but the representation of the unconstrained array.  */
1955   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1956 }
1957 
1958 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1959    finish constructing the record or union type.  If REP_LEVEL is zero, this
1960    record has no representation clause and so will be entirely laid out here.
1961    If REP_LEVEL is one, this record has a representation clause and has been
1962    laid out already; only set the sizes and alignment.  If REP_LEVEL is two,
1963    this record is derived from a parent record and thus inherits its layout;
1964    only make a pass on the fields to finalize them.  DEBUG_INFO_P is true if
1965    additional debug info needs to be output for this type.  */
1966 
1967 void
finish_record_type(tree record_type,tree field_list,int rep_level,bool debug_info_p)1968 finish_record_type (tree record_type, tree field_list, int rep_level,
1969 		    bool debug_info_p)
1970 {
1971   const enum tree_code orig_code = TREE_CODE (record_type);
1972   const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
1973   const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE;
1974   const bool had_align = TYPE_ALIGN (record_type) > 0;
1975   /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1976      out just like a UNION_TYPE, since the size will be fixed.  */
1977   const enum tree_code code
1978     = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
1979        ? UNION_TYPE : orig_code);
1980   tree name = TYPE_IDENTIFIER (record_type);
1981   tree ada_size = bitsize_zero_node;
1982   tree size = bitsize_zero_node;
1983   tree field;
1984 
1985   TYPE_FIELDS (record_type) = field_list;
1986 
1987   /* Always attach the TYPE_STUB_DECL for a record type.  It is required to
1988      generate debug info and have a parallel type.  */
1989   TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1990 
1991   /* Globally initialize the record first.  If this is a rep'ed record,
1992      that just means some initializations; otherwise, layout the record.  */
1993   if (rep_level > 0)
1994     {
1995       if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
1996 	SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
1997 
1998       if (!had_size)
1999 	TYPE_SIZE (record_type) = bitsize_zero_node;
2000 
2001       if (!had_size_unit)
2002 	TYPE_SIZE_UNIT (record_type) = size_zero_node;
2003     }
2004   else
2005     {
2006       /* Ensure there isn't a size already set.  There can be in an error
2007 	 case where there is a rep clause but all fields have errors and
2008 	 no longer have a position.  */
2009       TYPE_SIZE (record_type) = NULL_TREE;
2010 
2011       /* Ensure we use the traditional GCC layout for bitfields when we need
2012 	 to pack the record type or have a representation clause.  The other
2013 	 possible layout (Microsoft C compiler), if available, would prevent
2014 	 efficient packing in almost all cases.  */
2015 #ifdef TARGET_MS_BITFIELD_LAYOUT
2016       if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
2017 	decl_attributes (&record_type,
2018 			 tree_cons (get_identifier ("gcc_struct"),
2019 				    NULL_TREE, NULL_TREE),
2020 			 ATTR_FLAG_TYPE_IN_PLACE);
2021 #endif
2022 
2023       layout_type (record_type);
2024     }
2025 
2026   /* At this point, the position and size of each field is known.  It was
2027      either set before entry by a rep clause, or by laying out the type above.
2028 
2029      We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
2030      to compute the Ada size; the GCC size and alignment (for rep'ed records
2031      that are not padding types); and the mode (for rep'ed records).  We also
2032      clear the DECL_BIT_FIELD indication for the cases we know have not been
2033      handled yet, and adjust DECL_NONADDRESSABLE_P accordingly.  */
2034 
2035   if (code == QUAL_UNION_TYPE)
2036     field_list = nreverse (field_list);
2037 
2038   for (field = field_list; field; field = DECL_CHAIN (field))
2039     {
2040       tree type = TREE_TYPE (field);
2041       tree pos = bit_position (field);
2042       tree this_size = DECL_SIZE (field);
2043       tree this_ada_size;
2044 
2045       if (RECORD_OR_UNION_TYPE_P (type)
2046 	  && !TYPE_FAT_POINTER_P (type)
2047 	  && !TYPE_CONTAINS_TEMPLATE_P (type)
2048 	  && TYPE_ADA_SIZE (type))
2049 	this_ada_size = TYPE_ADA_SIZE (type);
2050       else
2051 	this_ada_size = this_size;
2052 
2053       const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
2054 
2055       /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle.  */
2056       if (DECL_BIT_FIELD (field)
2057 	  && operand_equal_p (this_size, TYPE_SIZE (type), 0))
2058 	{
2059 	  const unsigned int align = TYPE_ALIGN (type);
2060 
2061 	  /* In the general case, type alignment is required.  */
2062 	  if (value_factor_p (pos, align))
2063 	    {
2064 	      /* The enclosing record type must be sufficiently aligned.
2065 		 Otherwise, if no alignment was specified for it and it
2066 		 has been laid out already, bump its alignment to the
2067 		 desired one if this is compatible with its size and
2068 		 maximum alignment, if any.  */
2069 	      if (TYPE_ALIGN (record_type) >= align)
2070 		{
2071 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2072 		  DECL_BIT_FIELD (field) = 0;
2073 		}
2074 	      else if (!had_align
2075 		       && rep_level == 0
2076 		       && value_factor_p (TYPE_SIZE (record_type), align)
2077 		       && (!TYPE_MAX_ALIGN (record_type)
2078 			   || TYPE_MAX_ALIGN (record_type) >= align))
2079 		{
2080 		  SET_TYPE_ALIGN (record_type, align);
2081 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2082 		  DECL_BIT_FIELD (field) = 0;
2083 		}
2084 	    }
2085 
2086 	  /* In the non-strict alignment case, only byte alignment is.  */
2087 	  if (!STRICT_ALIGNMENT
2088 	      && DECL_BIT_FIELD (field)
2089 	      && value_factor_p (pos, BITS_PER_UNIT))
2090 	    DECL_BIT_FIELD (field) = 0;
2091 	}
2092 
2093       /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
2094 	 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
2095 	 the variant part is always the last field in the list.  */
2096       if (variant_part && integer_zerop (pos))
2097 	DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
2098 
2099       /* If we still have DECL_BIT_FIELD set at this point, we know that the
2100 	 field is technically not addressable.  Except that it can actually
2101 	 be addressed if it is BLKmode and happens to be properly aligned.  */
2102       if (DECL_BIT_FIELD (field)
2103 	  && !(DECL_MODE (field) == BLKmode
2104 	       && value_factor_p (pos, BITS_PER_UNIT)))
2105 	DECL_NONADDRESSABLE_P (field) = 1;
2106 
2107       /* A type must be as aligned as its most aligned field that is not
2108 	 a bit-field.  But this is already enforced by layout_type.  */
2109       if (rep_level > 0 && !DECL_BIT_FIELD (field))
2110 	SET_TYPE_ALIGN (record_type,
2111 			MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
2112 
2113       switch (code)
2114 	{
2115 	case UNION_TYPE:
2116 	  ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
2117 	  size = size_binop (MAX_EXPR, size, this_size);
2118 	  break;
2119 
2120 	case QUAL_UNION_TYPE:
2121 	  ada_size
2122 	    = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2123 			   this_ada_size, ada_size);
2124 	  size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2125 			      this_size, size);
2126 	  break;
2127 
2128 	case RECORD_TYPE:
2129 	  /* Since we know here that all fields are sorted in order of
2130 	     increasing bit position, the size of the record is one
2131 	     higher than the ending bit of the last field processed
2132 	     unless we have a rep clause, because we might be processing
2133 	     the REP part of a record with a variant part for which the
2134 	     variant part has a rep clause but not the fixed part, in
2135 	     which case this REP part may contain overlapping fields
2136 	     and thus needs to be treated like a union tyoe above, so
2137 	     use a MAX in that case.  Also, if this field is a variant
2138 	     part, we need to take into account the previous size in
2139 	     the case of empty variants.  */
2140 	  ada_size
2141 	    = merge_sizes (ada_size, pos, this_ada_size, rep_level > 0,
2142 			   variant_part);
2143 	  size
2144 	    = merge_sizes (size, pos, this_size, rep_level > 0, variant_part);
2145 	  break;
2146 
2147 	default:
2148 	  gcc_unreachable ();
2149 	}
2150     }
2151 
2152   if (code == QUAL_UNION_TYPE)
2153     nreverse (field_list);
2154 
2155   /* We need to set the regular sizes if REP_LEVEL is one.  */
2156   if (rep_level == 1)
2157     {
2158       /* If this is a padding record, we never want to make the size smaller
2159 	 than what was specified in it, if any.  */
2160       if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
2161 	size = TYPE_SIZE (record_type);
2162 
2163       tree size_unit = had_size_unit
2164 		       ? TYPE_SIZE_UNIT (record_type)
2165 		       : convert (sizetype,
2166 				  size_binop (CEIL_DIV_EXPR, size,
2167 					      bitsize_unit_node));
2168       const unsigned int align = TYPE_ALIGN (record_type);
2169 
2170       TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2171       TYPE_SIZE_UNIT (record_type)
2172 	= variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2173     }
2174 
2175   /* We need to set the Ada size if REP_LEVEL is zero or one.  */
2176   if (rep_level < 2)
2177     {
2178       /* Now set any of the values we've just computed that apply.  */
2179       if (!TYPE_FAT_POINTER_P (record_type)
2180 	  && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2181 	SET_TYPE_ADA_SIZE (record_type, ada_size);
2182     }
2183 
2184   /* We need to set the mode if REP_LEVEL is one or two.  */
2185   if (rep_level > 0)
2186     {
2187       compute_record_mode (record_type);
2188       finish_bitfield_layout (record_type);
2189     }
2190 
2191   /* Reset the TYPE_MAX_ALIGN field since it's private to gigi.  */
2192   TYPE_MAX_ALIGN (record_type) = 0;
2193 
2194   if (debug_info_p)
2195     rest_of_record_type_compilation (record_type);
2196 }
2197 
2198 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE.  If
2199    PARRALEL_TYPE has no context and its computation is not deferred yet, also
2200    propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2201    moment TYPE will get a context.  */
2202 
2203 void
add_parallel_type(tree type,tree parallel_type)2204 add_parallel_type (tree type, tree parallel_type)
2205 {
2206   tree decl = TYPE_STUB_DECL (type);
2207 
2208   while (DECL_PARALLEL_TYPE (decl))
2209     decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2210 
2211   SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2212 
2213   /* If PARALLEL_TYPE already has a context, we are done.  */
2214   if (TYPE_CONTEXT (parallel_type))
2215     return;
2216 
2217   /* Otherwise, try to get one from TYPE's context.  If so, simply propagate
2218      it to PARALLEL_TYPE.  */
2219   if (TYPE_CONTEXT (type))
2220     gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2221 
2222   /* Otherwise TYPE has not context yet.  We know it will have one thanks to
2223      gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2224      so we have nothing to do in this case.  */
2225 }
2226 
2227 /* Return true if TYPE has a parallel type.  */
2228 
2229 static bool
has_parallel_type(tree type)2230 has_parallel_type (tree type)
2231 {
2232   tree decl = TYPE_STUB_DECL (type);
2233 
2234   return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2235 }
2236 
2237 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2238    associated with it.  It need not be invoked directly in most cases as
2239    finish_record_type takes care of doing so.  */
2240 
2241 void
rest_of_record_type_compilation(tree record_type)2242 rest_of_record_type_compilation (tree record_type)
2243 {
2244   bool var_size = false;
2245   tree field;
2246 
2247   /* If this is a padded type, the bulk of the debug info has already been
2248      generated for the field's type.  */
2249   if (TYPE_IS_PADDING_P (record_type))
2250     return;
2251 
2252   /* If the type already has a parallel type (XVS type), then we're done.  */
2253   if (has_parallel_type (record_type))
2254     return;
2255 
2256   for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2257     {
2258       /* We need to make an XVE/XVU record if any field has variable size,
2259 	 whether or not the record does.  For example, if we have a union,
2260 	 it may be that all fields, rounded up to the alignment, have the
2261 	 same size, in which case we'll use that size.  But the debug
2262 	 output routines (except Dwarf2) won't be able to output the fields,
2263 	 so we need to make the special record.  */
2264       if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2265 	  /* If a field has a non-constant qualifier, the record will have
2266 	     variable size too.  */
2267 	  || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2268 	      && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2269 	{
2270 	  var_size = true;
2271 	  break;
2272 	}
2273     }
2274 
2275   /* If this record type is of variable size, make a parallel record type that
2276      will tell the debugger how the former is laid out (see exp_dbug.ads).  */
2277   if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2278     {
2279       tree new_record_type
2280 	= make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2281 		     ? UNION_TYPE : TREE_CODE (record_type));
2282       tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2283       tree last_pos = bitsize_zero_node;
2284 
2285       new_name
2286 	= concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2287 				  ? "XVU" : "XVE");
2288       TYPE_NAME (new_record_type) = new_name;
2289       SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2290       TYPE_STUB_DECL (new_record_type)
2291 	= create_type_stub_decl (new_name, new_record_type);
2292       DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2293 	= DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2294       gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2295       TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2296       TYPE_SIZE_UNIT (new_record_type)
2297 	= size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2298 
2299       /* Now scan all the fields, replacing each field with a new field
2300 	 corresponding to the new encoding.  */
2301       for (tree old_field = TYPE_FIELDS (record_type);
2302 	   old_field;
2303 	   old_field = DECL_CHAIN (old_field))
2304 	{
2305 	  tree field_type = TREE_TYPE (old_field);
2306 	  tree field_name = DECL_NAME (old_field);
2307 	  tree curpos = fold_bit_position (old_field);
2308 	  tree pos, new_field;
2309 	  bool var = false;
2310 	  unsigned int align = 0;
2311 
2312 	  /* See how the position was modified from the last position.
2313 
2314 	     There are two basic cases we support: a value was added
2315 	     to the last position or the last position was rounded to
2316 	     a boundary and they something was added.  Check for the
2317 	     first case first.  If not, see if there is any evidence
2318 	     of rounding.  If so, round the last position and retry.
2319 
2320 	     If this is a union, the position can be taken as zero.  */
2321 	  if (TREE_CODE (new_record_type) == UNION_TYPE)
2322 	    pos = bitsize_zero_node;
2323 	  else
2324 	    pos = compute_related_constant (curpos, last_pos);
2325 
2326 	  if (pos)
2327 	    ;
2328 	  else if (TREE_CODE (curpos) == MULT_EXPR
2329 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2330 	    {
2331 	      tree offset = TREE_OPERAND (curpos, 0);
2332 	      align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2333 	      align = scale_by_factor_of (offset, align);
2334 	      last_pos = round_up (last_pos, align);
2335 	      pos = compute_related_constant (curpos, last_pos);
2336 	    }
2337 	  else if (TREE_CODE (curpos) == PLUS_EXPR
2338 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2339 		   && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2340 		   && tree_fits_uhwi_p
2341 		      (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2342 	    {
2343 	      tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2344 	      unsigned HOST_WIDE_INT addend
2345 	        = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2346 	      align
2347 		= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2348 	      align = scale_by_factor_of (offset, align);
2349 	      align = MIN (align, addend & -addend);
2350 	      last_pos = round_up (last_pos, align);
2351 	      pos = compute_related_constant (curpos, last_pos);
2352 	    }
2353 	  else
2354 	    {
2355 	      align = DECL_ALIGN (old_field);
2356 	      last_pos = round_up (last_pos, align);
2357 	      pos = compute_related_constant (curpos, last_pos);
2358 	    }
2359 
2360 	  /* See if this type is variable-sized and make a pointer type
2361 	     and indicate the indirection if so.  Beware that the debug
2362 	     back-end may adjust the position computed above according
2363 	     to the alignment of the field type, i.e. the pointer type
2364 	     in this case, if we don't preventively counter that.  */
2365 	  if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2366 	    {
2367 	      field_type = copy_type (build_pointer_type (field_type));
2368 	      SET_TYPE_ALIGN (field_type, BITS_PER_UNIT);
2369 	      var = true;
2370 
2371 	      /* ??? Kludge to work around a bug in Workbench's debugger.  */
2372 	      if (align == 0)
2373 		{
2374 		  align = DECL_ALIGN (old_field);
2375 		  last_pos = round_up (last_pos, align);
2376 		  pos = compute_related_constant (curpos, last_pos);
2377 		}
2378 	    }
2379 
2380 	  /* If we can't compute a position, set it to zero.
2381 
2382 	     ??? We really should abort here, but it's too much work
2383 	     to get this correct for all cases.  */
2384 	  if (!pos)
2385 	    pos = bitsize_zero_node;
2386 
2387 	  /* Make a new field name, if necessary.  */
2388 	  if (var || align != 0)
2389 	    {
2390 	      char suffix[16];
2391 
2392 	      if (align != 0)
2393 		sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2394 			 align / BITS_PER_UNIT);
2395 	      else
2396 		strcpy (suffix, "XVL");
2397 
2398 	      field_name = concat_name (field_name, suffix);
2399 	    }
2400 
2401 	  new_field
2402 	    = create_field_decl (field_name, field_type, new_record_type,
2403 				 DECL_SIZE (old_field), pos, 0, 0);
2404 	  /* The specified position is not the actual position of the field
2405 	     but the gap with the previous field, so the computation of the
2406 	     bit-field status may be incorrect.  We adjust it manually to
2407 	     avoid generating useless attributes for the field in DWARF.  */
2408 	  if (DECL_SIZE (old_field) == TYPE_SIZE (field_type)
2409 	      && value_factor_p (pos, BITS_PER_UNIT))
2410 	    {
2411 	      DECL_BIT_FIELD (new_field) = 0;
2412 	      DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE;
2413 	    }
2414 	  DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2415 	  TYPE_FIELDS (new_record_type) = new_field;
2416 
2417 	  /* If old_field is a QUAL_UNION_TYPE, take its size as being
2418 	     zero.  The only time it's not the last field of the record
2419 	     is when there are other components at fixed positions after
2420 	     it (meaning there was a rep clause for every field) and we
2421 	     want to be able to encode them.  */
2422 	  last_pos = size_binop (PLUS_EXPR, curpos,
2423 				 (TREE_CODE (TREE_TYPE (old_field))
2424 				  == QUAL_UNION_TYPE)
2425 				 ? bitsize_zero_node
2426 				 : DECL_SIZE (old_field));
2427 	}
2428 
2429       TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2430 
2431       add_parallel_type (record_type, new_record_type);
2432     }
2433 }
2434 
2435 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2436    with FIRST_BIT and SIZE that describe a field.  If MAX is true, we take the
2437    MAX of the end position of this field with LAST_SIZE.  In all other cases,
2438    we use FIRST_BIT plus SIZE.  SPECIAL is true if it's for a QUAL_UNION_TYPE,
2439    in which case we must look for COND_EXPRs and replace a value of zero with
2440    the old size.  Return an expression for the size.  */
2441 
2442 static tree
merge_sizes(tree last_size,tree first_bit,tree size,bool max,bool special)2443 merge_sizes (tree last_size, tree first_bit, tree size, bool max, bool special)
2444 {
2445   tree type = TREE_TYPE (last_size);
2446   tree new_size;
2447 
2448   if (!special || TREE_CODE (size) != COND_EXPR)
2449     {
2450       new_size = size_binop (PLUS_EXPR, first_bit, size);
2451       if (max)
2452 	new_size = size_binop (MAX_EXPR, last_size, new_size);
2453     }
2454 
2455   else
2456     new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2457 			    integer_zerop (TREE_OPERAND (size, 1))
2458 			    ? last_size : merge_sizes (last_size, first_bit,
2459 						       TREE_OPERAND (size, 1),
2460 						       max, special),
2461 			    integer_zerop (TREE_OPERAND (size, 2))
2462 			    ? last_size : merge_sizes (last_size, first_bit,
2463 						       TREE_OPERAND (size, 2),
2464 						       max, special));
2465 
2466   /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2467      when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2468      size is not constant.  */
2469   while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2470     new_size = TREE_OPERAND (new_size, 0);
2471 
2472   return new_size;
2473 }
2474 
2475 /* Convert the size expression EXPR to TYPE and fold the result.  */
2476 
2477 static tree
fold_convert_size(tree type,tree expr)2478 fold_convert_size (tree type, tree expr)
2479 {
2480   /* We assume that size expressions do not wrap around.  */
2481   if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR)
2482     return size_binop (TREE_CODE (expr),
2483 		       fold_convert_size (type, TREE_OPERAND (expr, 0)),
2484 		       fold_convert_size (type, TREE_OPERAND (expr, 1)));
2485 
2486   return fold_convert (type, expr);
2487 }
2488 
2489 /* Return the bit position of FIELD, in bits from the start of the record,
2490    and fold it as much as possible.  This is a tree of type bitsizetype.  */
2491 
2492 static tree
fold_bit_position(const_tree field)2493 fold_bit_position (const_tree field)
2494 {
2495   tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field));
2496   return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2497  		     size_binop (MULT_EXPR, offset, bitsize_unit_node));
2498 }
2499 
2500 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2501    related by the addition of a constant.  Return that constant if so.  */
2502 
2503 static tree
compute_related_constant(tree op0,tree op1)2504 compute_related_constant (tree op0, tree op1)
2505 {
2506   tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2507 
2508   if (TREE_CODE (op0) == MULT_EXPR
2509       && TREE_CODE (op1) == MULT_EXPR
2510       && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2511       && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2512     {
2513       factor = TREE_OPERAND (op0, 1);
2514       op0 = TREE_OPERAND (op0, 0);
2515       op1 = TREE_OPERAND (op1, 0);
2516     }
2517   else
2518     factor = NULL_TREE;
2519 
2520   op0_cst = split_plus (op0, &op0_var);
2521   op1_cst = split_plus (op1, &op1_var);
2522   result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2523 
2524   if (operand_equal_p (op0_var, op1_var, 0))
2525     return factor ? size_binop (MULT_EXPR, factor, result) : result;
2526 
2527   return NULL_TREE;
2528 }
2529 
2530 /* Utility function of above to split a tree OP which may be a sum, into a
2531    constant part, which is returned, and a variable part, which is stored
2532    in *PVAR.  *PVAR may be bitsize_zero_node.  All operations must be of
2533    bitsizetype.  */
2534 
2535 static tree
split_plus(tree in,tree * pvar)2536 split_plus (tree in, tree *pvar)
2537 {
2538   /* Strip conversions in order to ease the tree traversal and maximize the
2539      potential for constant or plus/minus discovery.  We need to be careful
2540      to always return and set *pvar to bitsizetype trees, but it's worth
2541      the effort.  */
2542   in = remove_conversions (in, false);
2543 
2544   *pvar = convert (bitsizetype, in);
2545 
2546   if (TREE_CODE (in) == INTEGER_CST)
2547     {
2548       *pvar = bitsize_zero_node;
2549       return convert (bitsizetype, in);
2550     }
2551   else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2552     {
2553       tree lhs_var, rhs_var;
2554       tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2555       tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2556 
2557       if (lhs_var == TREE_OPERAND (in, 0)
2558 	  && rhs_var == TREE_OPERAND (in, 1))
2559 	return bitsize_zero_node;
2560 
2561       *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2562       return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2563     }
2564   else
2565     return bitsize_zero_node;
2566 }
2567 
2568 /* Return a copy of TYPE but safe to modify in any way.  */
2569 
2570 tree
copy_type(tree type)2571 copy_type (tree type)
2572 {
2573   tree new_type = copy_node (type);
2574 
2575   /* Unshare the language-specific data.  */
2576   if (TYPE_LANG_SPECIFIC (type))
2577     {
2578       TYPE_LANG_SPECIFIC (new_type) = NULL;
2579       SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2580     }
2581 
2582   /* And the contents of the language-specific slot if needed.  */
2583   if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type))
2584       && TYPE_RM_VALUES (type))
2585     {
2586       TYPE_RM_VALUES (new_type) = NULL_TREE;
2587       SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2588       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2589       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2590     }
2591 
2592   /* copy_node clears this field instead of copying it, because it is
2593      aliased with TREE_CHAIN.  */
2594   TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2595 
2596   TYPE_POINTER_TO (new_type) = NULL_TREE;
2597   TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2598   TYPE_MAIN_VARIANT (new_type) = new_type;
2599   TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2600   TYPE_CANONICAL (new_type) = new_type;
2601 
2602   return new_type;
2603 }
2604 
2605 /* Return a subtype of sizetype with range MIN to MAX and whose
2606    TYPE_INDEX_TYPE is INDEX.  GNAT_NODE is used for the position
2607    of the associated TYPE_DECL.  */
2608 
2609 tree
create_index_type(tree min,tree max,tree index,Node_Id gnat_node)2610 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2611 {
2612   /* First build a type for the desired range.  */
2613   tree type = build_nonshared_range_type (sizetype, min, max);
2614 
2615   /* Then set the index type.  */
2616   SET_TYPE_INDEX_TYPE (type, index);
2617   create_type_decl (NULL_TREE, type, true, false, gnat_node);
2618 
2619   return type;
2620 }
2621 
2622 /* Return a subtype of TYPE with range MIN to MAX.  If TYPE is NULL,
2623    sizetype is used.  */
2624 
2625 tree
create_range_type(tree type,tree min,tree max)2626 create_range_type (tree type, tree min, tree max)
2627 {
2628   tree range_type;
2629 
2630   if (!type)
2631     type = sizetype;
2632 
2633   /* First build a type with the base range.  */
2634   range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2635 						 TYPE_MAX_VALUE (type));
2636 
2637   /* Then set the actual range.  */
2638   SET_TYPE_RM_MIN_VALUE (range_type, min);
2639   SET_TYPE_RM_MAX_VALUE (range_type, max);
2640 
2641   return range_type;
2642 }
2643 
2644 /* Return an extra subtype of TYPE with range MIN to MAX.  */
2645 
2646 tree
create_extra_subtype(tree type,tree min,tree max)2647 create_extra_subtype (tree type, tree min, tree max)
2648 {
2649   const bool uns = TYPE_UNSIGNED (type);
2650   const unsigned prec = TYPE_PRECISION (type);
2651   tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2652 
2653   TREE_TYPE (subtype) = type;
2654   TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2655 
2656   SET_TYPE_RM_MIN_VALUE (subtype, min);
2657   SET_TYPE_RM_MAX_VALUE (subtype, max);
2658 
2659   return subtype;
2660 }
2661 
2662 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2663    NAME gives the name of the type to be used in the declaration.  */
2664 
2665 tree
create_type_stub_decl(tree name,tree type)2666 create_type_stub_decl (tree name, tree type)
2667 {
2668   tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2669   DECL_ARTIFICIAL (type_decl) = 1;
2670   TYPE_ARTIFICIAL (type) = 1;
2671   return type_decl;
2672 }
2673 
2674 /* Return a TYPE_DECL node for TYPE.  NAME gives the name of the type to be
2675    used in the declaration.  ARTIFICIAL_P is true if the declaration was
2676    generated by the compiler.  DEBUG_INFO_P is true if we need to write
2677    debug information about this type.  GNAT_NODE is used for the position
2678    of the decl.  */
2679 
2680 tree
create_type_decl(tree name,tree type,bool artificial_p,bool debug_info_p,Node_Id gnat_node)2681 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2682 		  Node_Id gnat_node)
2683 {
2684   enum tree_code code = TREE_CODE (type);
2685   bool is_named
2686     = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2687   tree type_decl;
2688 
2689   /* Only the builtin TYPE_STUB_DECL should be used for dummy types.  */
2690   gcc_assert (!TYPE_IS_DUMMY_P (type));
2691 
2692   /* If the type hasn't been named yet, we're naming it; preserve an existing
2693      TYPE_STUB_DECL that has been attached to it for some purpose.  */
2694   if (!is_named && TYPE_STUB_DECL (type))
2695     {
2696       type_decl = TYPE_STUB_DECL (type);
2697       DECL_NAME (type_decl) = name;
2698     }
2699   else
2700     type_decl = build_decl (input_location, TYPE_DECL, name, type);
2701 
2702   DECL_ARTIFICIAL (type_decl) = artificial_p;
2703   TYPE_ARTIFICIAL (type) = artificial_p;
2704 
2705   /* Add this decl to the current binding level.  */
2706   gnat_pushdecl (type_decl, gnat_node);
2707 
2708   /* If we're naming the type, equate the TYPE_STUB_DECL to the name.  This
2709      causes the name to be also viewed as a "tag" by the debug back-end, with
2710      the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2711      types in DWARF.
2712 
2713      Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2714      from multiple contexts, and "type_decl" references a copy of it: in such a
2715      case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2716      with the mechanism above.  */
2717   if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2718     TYPE_STUB_DECL (type) = type_decl;
2719 
2720   /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2721      back-end doesn't support, and for others if we don't need to.  */
2722   if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2723     DECL_IGNORED_P (type_decl) = 1;
2724 
2725   return type_decl;
2726 }
2727 
2728 /* Return a VAR_DECL or CONST_DECL node.
2729 
2730    NAME gives the name of the variable.  ASM_NAME is its assembler name
2731    (if provided).  TYPE is its data type (a GCC ..._TYPE node).  INIT is
2732    the GCC tree for an optional initial expression; NULL_TREE if none.
2733 
2734    CONST_FLAG is true if this variable is constant, in which case we might
2735    return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2736 
2737    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2738    definition to be made visible outside of the current compilation unit, for
2739    instance variable definitions in a package specification.
2740 
2741    EXTERN_FLAG is true when processing an external variable declaration (as
2742    opposed to a definition: no storage is to be allocated for the variable).
2743 
2744    STATIC_FLAG is only relevant when not at top level and indicates whether
2745    to always allocate storage to the variable.
2746 
2747    VOLATILE_FLAG is true if this variable is declared as volatile.
2748 
2749    ARTIFICIAL_P is true if the variable was generated by the compiler.
2750 
2751    DEBUG_INFO_P is true if we need to write debug information for it.
2752 
2753    ATTR_LIST is the list of attributes to be attached to the variable.
2754 
2755    GNAT_NODE is used for the position of the decl.  */
2756 
2757 tree
create_var_decl(tree name,tree asm_name,tree type,tree init,bool const_flag,bool public_flag,bool extern_flag,bool static_flag,bool volatile_flag,bool artificial_p,bool debug_info_p,struct attrib * attr_list,Node_Id gnat_node,bool const_decl_allowed_p)2758 create_var_decl (tree name, tree asm_name, tree type, tree init,
2759 		 bool const_flag, bool public_flag, bool extern_flag,
2760 		 bool static_flag, bool volatile_flag, bool artificial_p,
2761 		 bool debug_info_p, struct attrib *attr_list,
2762 		 Node_Id gnat_node, bool const_decl_allowed_p)
2763 {
2764   /* Whether the object has static storage duration, either explicitly or by
2765      virtue of being declared at the global level.  */
2766   const bool static_storage = static_flag || global_bindings_p ();
2767 
2768   /* Whether the initializer is constant: for an external object or an object
2769      with static storage duration, we check that the initializer is a valid
2770      constant expression for initializing a static variable; otherwise, we
2771      only check that it is constant.  */
2772   const bool init_const
2773     = (init
2774        && gnat_types_compatible_p (type, TREE_TYPE (init))
2775        && (extern_flag || static_storage
2776 	   ? initializer_constant_valid_p (init, TREE_TYPE (init))
2777 	     != NULL_TREE
2778 	   : TREE_CONSTANT (init)));
2779 
2780   /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2781      case the initializer may be used in lieu of the DECL node (as done in
2782      Identifier_to_gnu).  This is useful to prevent the need of elaboration
2783      code when an identifier for which such a DECL is made is in turn used
2784      as an initializer.  We used to rely on CONST_DECL vs VAR_DECL for this,
2785      but extra constraints apply to this choice (see below) and they are not
2786      relevant to the distinction we wish to make.  */
2787   const bool constant_p = const_flag && init_const;
2788 
2789   /* The actual DECL node.  CONST_DECL was initially intended for enumerals
2790      and may be used for scalars in general but not for aggregates.  */
2791   tree var_decl
2792     = build_decl (input_location,
2793 		  (constant_p
2794 		   && const_decl_allowed_p
2795 		   && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2796 		  name, type);
2797 
2798   /* Detect constants created by the front-end to hold 'reference to function
2799      calls for stabilization purposes.  This is needed for renaming.  */
2800   if (const_flag && init && POINTER_TYPE_P (type))
2801     {
2802       tree inner = init;
2803       if (TREE_CODE (inner) == COMPOUND_EXPR)
2804 	inner = TREE_OPERAND (inner, 1);
2805       inner = remove_conversions (inner, true);
2806       if (TREE_CODE (inner) == ADDR_EXPR
2807 	  && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2808 	       && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2809 	      || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2810 		  && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2811 	DECL_RETURN_VALUE_P (var_decl) = 1;
2812     }
2813 
2814   /* If this is external, throw away any initializations (they will be done
2815      elsewhere) unless this is a constant for which we would like to remain
2816      able to get the initializer.  If we are defining a global here, leave a
2817      constant initialization and save any variable elaborations for the
2818      elaboration routine.  If we are just annotating types, throw away the
2819      initialization if it isn't a constant.  */
2820   if ((extern_flag && !constant_p)
2821       || (type_annotate_only && init && !TREE_CONSTANT (init)))
2822     init = NULL_TREE;
2823 
2824   /* At the global level, a non-constant initializer generates elaboration
2825      statements.  Check that such statements are allowed, that is to say,
2826      not violating a No_Elaboration_Code restriction.  */
2827   if (init && !init_const && global_bindings_p ())
2828     Check_Elaboration_Code_Allowed (gnat_node);
2829 
2830   /* Attach the initializer, if any.  */
2831   DECL_INITIAL (var_decl) = init;
2832 
2833   /* Directly set some flags.  */
2834   DECL_ARTIFICIAL (var_decl) = artificial_p;
2835   DECL_EXTERNAL (var_decl) = extern_flag;
2836 
2837   TREE_CONSTANT (var_decl) = constant_p;
2838   TREE_READONLY (var_decl) = const_flag;
2839 
2840   /* The object is public if it is external or if it is declared public
2841      and has static storage duration.  */
2842   TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2843 
2844   /* We need to allocate static storage for an object with static storage
2845      duration if it isn't external.  */
2846   TREE_STATIC (var_decl) = !extern_flag && static_storage;
2847 
2848   TREE_SIDE_EFFECTS (var_decl)
2849     = TREE_THIS_VOLATILE (var_decl)
2850     = TYPE_VOLATILE (type) | volatile_flag;
2851 
2852   if (TREE_SIDE_EFFECTS (var_decl))
2853     TREE_ADDRESSABLE (var_decl) = 1;
2854 
2855   /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2856      try to fiddle with DECL_COMMON.  However, on platforms that don't
2857      support global BSS sections, uninitialized global variables would
2858      go in DATA instead, thus increasing the size of the executable.  */
2859   if (!flag_no_common
2860       && TREE_CODE (var_decl) == VAR_DECL
2861       && TREE_PUBLIC (var_decl)
2862       && !have_global_bss_p ())
2863     DECL_COMMON (var_decl) = 1;
2864 
2865   /* Do not emit debug info if not requested, or for an external constant whose
2866      initializer is not absolute because this would require a global relocation
2867      in a read-only section which runs afoul of the PE-COFF run-time relocation
2868      mechanism.  */
2869   if (!debug_info_p
2870       || (extern_flag
2871 	  && constant_p
2872 	  && init
2873 	  && initializer_constant_valid_p (init, TREE_TYPE (init))
2874 	     != null_pointer_node))
2875     DECL_IGNORED_P (var_decl) = 1;
2876 
2877   /* ??? Some attributes cannot be applied to CONST_DECLs.  */
2878   if (TREE_CODE (var_decl) == VAR_DECL)
2879     process_attributes (&var_decl, &attr_list, true, gnat_node);
2880 
2881   /* Add this decl to the current binding level.  */
2882   gnat_pushdecl (var_decl, gnat_node);
2883 
2884   if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2885     {
2886       /* Let the target mangle the name if this isn't a verbatim asm.  */
2887       if (*IDENTIFIER_POINTER (asm_name) != '*')
2888 	asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2889 
2890       SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2891     }
2892 
2893   return var_decl;
2894 }
2895 
2896 /* Return true if TYPE, an aggregate type, contains (or is) an array.
2897    If SELF_REFERENTIAL is true, then an additional requirement on the
2898    array is that it be self-referential.  */
2899 
2900 bool
aggregate_type_contains_array_p(tree type,bool self_referential)2901 aggregate_type_contains_array_p (tree type, bool self_referential)
2902 {
2903   switch (TREE_CODE (type))
2904     {
2905     case RECORD_TYPE:
2906     case UNION_TYPE:
2907     case QUAL_UNION_TYPE:
2908       {
2909 	tree field;
2910 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2911 	  if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2912 	      && aggregate_type_contains_array_p (TREE_TYPE (field),
2913 						  self_referential))
2914 	    return true;
2915 	return false;
2916       }
2917 
2918     case ARRAY_TYPE:
2919       return self_referential ? type_contains_placeholder_p (type) : true;
2920 
2921     default:
2922       gcc_unreachable ();
2923     }
2924 }
2925 
2926 /* Return true if TYPE is a type with variable size or a padding type with a
2927    field of variable size or a record that has a field with such a type.  */
2928 
2929 static bool
type_has_variable_size(tree type)2930 type_has_variable_size (tree type)
2931 {
2932   tree field;
2933 
2934   if (!TREE_CONSTANT (TYPE_SIZE (type)))
2935     return true;
2936 
2937   if (TYPE_IS_PADDING_P (type)
2938       && !TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (type))))
2939     return true;
2940 
2941   if (!RECORD_OR_UNION_TYPE_P (type))
2942     return false;
2943 
2944   for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2945     if (type_has_variable_size (TREE_TYPE (field)))
2946       return true;
2947 
2948   return false;
2949 }
2950 
2951 /* Return a FIELD_DECL node.  NAME is the field's name, TYPE is its type and
2952    RECORD_TYPE is the type of the enclosing record.  If SIZE is nonzero, it
2953    is the specified size of the field.  If POS is nonzero, it is the bit
2954    position.  PACKED is 1 if the enclosing record is packed, -1 if it has
2955    Component_Alignment of Storage_Unit.  If ADDRESSABLE is nonzero, it
2956    means we are allowed to take the address of the field; if it is negative,
2957    we should not make a bitfield, which is used by make_aligning_type.  */
2958 
2959 tree
create_field_decl(tree name,tree type,tree record_type,tree size,tree pos,int packed,int addressable)2960 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2961 		   int packed, int addressable)
2962 {
2963   tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2964 
2965   DECL_CONTEXT (field_decl) = record_type;
2966   TREE_READONLY (field_decl) = TYPE_READONLY (type);
2967 
2968   /* If a size is specified, use it.  Otherwise, if the record type is packed
2969      compute a size to use, which may differ from the object's natural size.
2970      We always set a size in this case to trigger the checks for bitfield
2971      creation below, which is typically required when no position has been
2972      specified.  */
2973   if (size)
2974     size = convert (bitsizetype, size);
2975   else if (packed == 1)
2976     {
2977       size = rm_size (type);
2978       if (TYPE_MODE (type) == BLKmode)
2979 	size = round_up (size, BITS_PER_UNIT);
2980     }
2981 
2982   /* If we may, according to ADDRESSABLE, then make a bitfield when the size
2983      is specified for two reasons: first, when it differs from the natural
2984      size; second, when the alignment is insufficient.
2985 
2986      We never make a bitfield if the type of the field has a nonconstant size,
2987      because no such entity requiring bitfield operations should reach here.
2988 
2989      We do *preventively* make a bitfield when there might be the need for it
2990      but we don't have all the necessary information to decide, as is the case
2991      of a field in a packed record.
2992 
2993      We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2994      in layout_decl or finish_record_type to clear the bit_field indication if
2995      it is in fact not needed.  */
2996   if (addressable >= 0
2997       && size
2998       && TREE_CODE (size) == INTEGER_CST
2999       && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3000       && (packed
3001 	  || !tree_int_cst_equal (size, TYPE_SIZE (type))
3002 	  || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
3003 	  || (TYPE_ALIGN (record_type)
3004 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
3005     {
3006       DECL_BIT_FIELD (field_decl) = 1;
3007       DECL_SIZE (field_decl) = size;
3008       if (!packed && !pos)
3009 	{
3010 	  if (TYPE_ALIGN (record_type)
3011 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
3012 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
3013 	  else
3014 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3015 	}
3016     }
3017 
3018   DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
3019 
3020   /* If FIELD_TYPE has BLKmode, we must ensure this is aligned to at least
3021      a byte boundary since GCC cannot handle less aligned BLKmode bitfields.
3022      Likewise if it has a variable size and no specified position because
3023      variable-sized objects need to be aligned to at least a byte boundary.
3024      Likewise for an aggregate without specified position that contains an
3025      array because, in this case, slices of variable length of this array
3026      must be handled by GCC and have variable size.  */
3027   if (packed && (TYPE_MODE (type) == BLKmode
3028 		 || (!pos && type_has_variable_size (type))
3029 		 || (!pos
3030 		     && AGGREGATE_TYPE_P (type)
3031 		     && aggregate_type_contains_array_p (type, false))))
3032     SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
3033 
3034   /* Bump the alignment if need be, either for bitfield/packing purposes or
3035      to satisfy the type requirements if no such considerations apply.  When
3036      we get the alignment from the type, indicate if this is from an explicit
3037      user request, which prevents stor-layout from lowering it later on.  */
3038   else
3039     {
3040       const unsigned int field_align
3041 	= DECL_BIT_FIELD (field_decl)
3042 	  ? 1
3043 	  : packed
3044 	    ? BITS_PER_UNIT
3045 	    : 0;
3046 
3047       if (field_align > DECL_ALIGN (field_decl))
3048 	SET_DECL_ALIGN (field_decl, field_align);
3049       else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
3050 	{
3051 	  SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3052 	  DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
3053 	}
3054     }
3055 
3056   if (pos)
3057     {
3058       /* We need to pass in the alignment the DECL is known to have.
3059 	 This is the lowest-order bit set in POS, but no more than
3060 	 the alignment of the record, if one is specified.  Note
3061 	 that an alignment of 0 is taken as infinite.  */
3062       unsigned int known_align;
3063 
3064       if (tree_fits_uhwi_p (pos))
3065 	known_align = tree_to_uhwi (pos) & -tree_to_uhwi (pos);
3066       else
3067 	known_align = BITS_PER_UNIT;
3068 
3069       if (TYPE_ALIGN (record_type)
3070 	  && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
3071 	known_align = TYPE_ALIGN (record_type);
3072 
3073       layout_decl (field_decl, known_align);
3074       SET_DECL_OFFSET_ALIGN (field_decl,
3075 			     tree_fits_uhwi_p (pos)
3076 			     ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
3077       pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
3078 		    &DECL_FIELD_BIT_OFFSET (field_decl),
3079 		    DECL_OFFSET_ALIGN (field_decl), pos);
3080     }
3081 
3082   /* In addition to what our caller says, claim the field is addressable if we
3083      know that its type is not suitable.
3084 
3085      The field may also be "technically" nonaddressable, meaning that even if
3086      we attempt to take the field's address we will actually get the address
3087      of a copy.  This is the case for true bitfields, but the DECL_BIT_FIELD
3088      value we have at this point is not accurate enough, so we don't account
3089      for this here and let finish_record_type decide.  */
3090   if (!addressable && !type_for_nonaliased_component_p (type))
3091     addressable = 1;
3092 
3093   /* Note that there is a trade-off in making a field nonaddressable because
3094      this will cause type-based alias analysis to use the same alias set for
3095      accesses to the field as for accesses to the whole record: while doing
3096      so will make it more likely to disambiguate accesses to other objects
3097      and accesses to the field, it will make it less likely to disambiguate
3098      accesses to the other fields of the record and accesses to the field.
3099      If the record is fully static, then the trade-off is irrelevant since
3100      the fields of the record can always be disambiguated by their offsets
3101      but, if the record is dynamic, then it can become problematic.  */
3102   DECL_NONADDRESSABLE_P (field_decl) = !addressable;
3103 
3104   return field_decl;
3105 }
3106 
3107 /* Return a PARM_DECL node with NAME and TYPE.  */
3108 
3109 tree
create_param_decl(tree name,tree type)3110 create_param_decl (tree name, tree type)
3111 {
3112   tree param_decl = build_decl (input_location, PARM_DECL, name, type);
3113 
3114   /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
3115      can lead to various ABI violations.  */
3116   if (targetm.calls.promote_prototypes (NULL_TREE)
3117       && INTEGRAL_TYPE_P (type)
3118       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
3119     {
3120       /* We have to be careful about biased types here.  Make a subtype
3121 	 of integer_type_node with the proper biasing.  */
3122       if (TREE_CODE (type) == INTEGER_TYPE
3123 	  && TYPE_BIASED_REPRESENTATION_P (type))
3124 	{
3125 	  tree subtype
3126 	    = make_unsigned_type (TYPE_PRECISION (integer_type_node));
3127 	  TREE_TYPE (subtype) = integer_type_node;
3128 	  TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
3129 	  SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
3130 	  SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
3131 	  type = subtype;
3132 	}
3133       else
3134 	type = integer_type_node;
3135     }
3136 
3137   DECL_ARG_TYPE (param_decl) = type;
3138   return param_decl;
3139 }
3140 
3141 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
3142    a TYPE.  If IN_PLACE is true, the tree pointed to by NODE should not be
3143    changed.  GNAT_NODE is used for the position of error messages.  */
3144 
3145 void
process_attributes(tree * node,struct attrib ** attr_list,bool in_place,Node_Id gnat_node)3146 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
3147 		    Node_Id gnat_node)
3148 {
3149   struct attrib *attr;
3150 
3151   for (attr = *attr_list; attr; attr = attr->next)
3152     switch (attr->type)
3153       {
3154       case ATTR_MACHINE_ATTRIBUTE:
3155 	Sloc_to_locus (Sloc (gnat_node), &input_location);
3156 	decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
3157 			 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
3158 	break;
3159 
3160       case ATTR_LINK_ALIAS:
3161         if (!DECL_EXTERNAL (*node))
3162 	  {
3163 	    TREE_STATIC (*node) = 1;
3164 	    assemble_alias (*node, attr->name);
3165 	  }
3166 	break;
3167 
3168       case ATTR_WEAK_EXTERNAL:
3169 	if (SUPPORTS_WEAK)
3170 	  declare_weak (*node);
3171 	else
3172 	  post_error ("?weak declarations not supported on this target",
3173 		      attr->error_point);
3174 	break;
3175 
3176       case ATTR_LINK_SECTION:
3177 	if (targetm_common.have_named_sections)
3178 	  {
3179 	    set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
3180 	    DECL_COMMON (*node) = 0;
3181 	  }
3182 	else
3183 	  post_error ("?section attributes are not supported for this target",
3184 		      attr->error_point);
3185 	break;
3186 
3187       case ATTR_LINK_CONSTRUCTOR:
3188 	DECL_STATIC_CONSTRUCTOR (*node) = 1;
3189 	TREE_USED (*node) = 1;
3190 	break;
3191 
3192       case ATTR_LINK_DESTRUCTOR:
3193 	DECL_STATIC_DESTRUCTOR (*node) = 1;
3194 	TREE_USED (*node) = 1;
3195 	break;
3196 
3197       case ATTR_THREAD_LOCAL_STORAGE:
3198 	set_decl_tls_model (*node, decl_default_tls_model (*node));
3199 	DECL_COMMON (*node) = 0;
3200 	break;
3201       }
3202 
3203   *attr_list = NULL;
3204 }
3205 
3206 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3207    a power of 2. */
3208 
3209 bool
value_factor_p(tree value,unsigned HOST_WIDE_INT factor)3210 value_factor_p (tree value, unsigned HOST_WIDE_INT factor)
3211 {
3212   gcc_checking_assert (pow2p_hwi (factor));
3213 
3214   if (tree_fits_uhwi_p (value))
3215     return (tree_to_uhwi (value) & (factor - 1)) == 0;
3216 
3217   if (TREE_CODE (value) == MULT_EXPR)
3218     return (value_factor_p (TREE_OPERAND (value, 0), factor)
3219             || value_factor_p (TREE_OPERAND (value, 1), factor));
3220 
3221   return false;
3222 }
3223 
3224 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3225    feed it with the elaboration of GNAT_SCOPE.  */
3226 
3227 static struct deferred_decl_context_node *
add_deferred_decl_context(tree decl,Entity_Id gnat_scope,int force_global)3228 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3229 {
3230   struct deferred_decl_context_node *new_node;
3231 
3232   new_node
3233     = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3234   new_node->decl = decl;
3235   new_node->gnat_scope = gnat_scope;
3236   new_node->force_global = force_global;
3237   new_node->types.create (1);
3238   new_node->next = deferred_decl_context_queue;
3239   deferred_decl_context_queue = new_node;
3240   return new_node;
3241 }
3242 
3243 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3244    feed it with the DECL_CONTEXT computed as part of N as soon as it is
3245    computed.  */
3246 
3247 static void
add_deferred_type_context(struct deferred_decl_context_node * n,tree type)3248 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3249 {
3250   n->types.safe_push (type);
3251 }
3252 
3253 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available.  Return
3254    NULL_TREE if it is not available.  */
3255 
3256 static tree
compute_deferred_decl_context(Entity_Id gnat_scope)3257 compute_deferred_decl_context (Entity_Id gnat_scope)
3258 {
3259   tree context;
3260 
3261   if (present_gnu_tree (gnat_scope))
3262     context = get_gnu_tree (gnat_scope);
3263   else
3264     return NULL_TREE;
3265 
3266   if (TREE_CODE (context) == TYPE_DECL)
3267     {
3268       tree context_type = TREE_TYPE (context);
3269 
3270       /* Skip dummy types: only the final ones can appear in the context
3271 	 chain.  */
3272       if (TYPE_DUMMY_P (context_type))
3273 	return NULL_TREE;
3274 
3275       /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3276 	 chain.  */
3277       else
3278 	context = context_type;
3279     }
3280 
3281   return context;
3282 }
3283 
3284 /* Try to process all deferred nodes in the queue.  Keep in the queue the ones
3285    that cannot be processed yet, remove the other ones.  If FORCE is true,
3286    force the processing for all nodes, use the global context when nodes don't
3287    have a GNU translation.  */
3288 
3289 void
process_deferred_decl_context(bool force)3290 process_deferred_decl_context (bool force)
3291 {
3292   struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3293   struct deferred_decl_context_node *node;
3294 
3295   while (*it)
3296     {
3297       bool processed = false;
3298       tree context = NULL_TREE;
3299       Entity_Id gnat_scope;
3300 
3301       node = *it;
3302 
3303       /* If FORCE, get the innermost elaborated scope.  Otherwise, just try to
3304 	 get the first scope.  */
3305       gnat_scope = node->gnat_scope;
3306       while (Present (gnat_scope))
3307 	{
3308 	  context = compute_deferred_decl_context (gnat_scope);
3309 	  if (!force || context)
3310 	    break;
3311 	  gnat_scope = get_debug_scope (gnat_scope, NULL);
3312 	}
3313 
3314       /* Imported declarations must not be in a local context (i.e. not inside
3315 	 a function).  */
3316       if (context && node->force_global > 0)
3317 	{
3318 	  tree ctx = context;
3319 
3320 	  while (ctx)
3321 	    {
3322 	      gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3323 	      ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3324 	    }
3325 	}
3326 
3327       /* If FORCE, we want to get rid of all nodes in the queue: in case there
3328 	 was no elaborated scope, use the global context.  */
3329       if (force && !context)
3330 	context = get_global_context ();
3331 
3332       if (context)
3333 	{
3334 	  tree t;
3335 	  int i;
3336 
3337 	  DECL_CONTEXT (node->decl) = context;
3338 
3339 	  /* Propagate it to the TYPE_CONTEXT attributes of the requested
3340 	     ..._TYPE nodes.  */
3341 	  FOR_EACH_VEC_ELT (node->types, i, t)
3342 	    {
3343 	      gnat_set_type_context (t, context);
3344 	    }
3345 	  processed = true;
3346 	}
3347 
3348       /* If this node has been successfuly processed, remove it from the
3349 	 queue.  Then move to the next node.  */
3350       if (processed)
3351 	{
3352 	  *it = node->next;
3353 	  node->types.release ();
3354 	  free (node);
3355 	}
3356       else
3357 	it = &node->next;
3358     }
3359 }
3360 
3361 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR.  */
3362 
3363 static unsigned int
scale_by_factor_of(tree expr,unsigned int value)3364 scale_by_factor_of (tree expr, unsigned int value)
3365 {
3366   unsigned HOST_WIDE_INT addend = 0;
3367   unsigned HOST_WIDE_INT factor = 1;
3368 
3369   /* Peel conversions around EXPR and try to extract bodies from function
3370      calls: it is possible to get the scale factor from size functions.  */
3371   expr = remove_conversions (expr, true);
3372   if (TREE_CODE (expr) == CALL_EXPR)
3373     expr = maybe_inline_call_in_expr (expr);
3374 
3375   /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3376      multiple of the scale factor we are looking for.  */
3377   if (TREE_CODE (expr) == PLUS_EXPR
3378       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3379       && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3380     {
3381       addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3382       expr = TREE_OPERAND (expr, 0);
3383     }
3384 
3385   /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3386      corresponding to the number of trailing zeros of the mask.  */
3387   if (TREE_CODE (expr) == BIT_AND_EXPR
3388       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3389     {
3390       unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3391       unsigned int i = 0;
3392 
3393       while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3394 	{
3395 	  mask >>= 1;
3396 	  factor *= 2;
3397 	  i++;
3398 	}
3399     }
3400 
3401   /* If the addend is not a multiple of the factor we found, give up.  In
3402      theory we could find a smaller common factor but it's useless for our
3403      needs.  This situation arises when dealing with a field F1 with no
3404      alignment requirement but that is following a field F2 with such
3405      requirements.  As long as we have F2's offset, we don't need alignment
3406      information to compute F1's.  */
3407   if (addend % factor != 0)
3408     factor = 1;
3409 
3410   return factor * value;
3411 }
3412 
3413 /* Return a LABEL_DECL with NAME.  GNAT_NODE is used for the position of
3414    the decl.  */
3415 
3416 tree
create_label_decl(tree name,Node_Id gnat_node)3417 create_label_decl (tree name, Node_Id gnat_node)
3418 {
3419   tree label_decl
3420     = build_decl (input_location, LABEL_DECL, name, void_type_node);
3421 
3422   SET_DECL_MODE (label_decl, VOIDmode);
3423 
3424   /* Add this decl to the current binding level.  */
3425   gnat_pushdecl (label_decl, gnat_node);
3426 
3427   return label_decl;
3428 }
3429 
3430 /* Return a FUNCTION_DECL node.  NAME is the name of the subprogram, ASM_NAME
3431    its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3432    PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3433    chained through the DECL_CHAIN field).
3434 
3435    INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3436 
3437    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3438    definition to be made visible outside of the current compilation unit.
3439 
3440    EXTERN_FLAG is true when processing an external subprogram declaration.
3441 
3442    ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3443 
3444    DEBUG_INFO_P is true if we need to write debug information for it.
3445 
3446    DEFINITION is true if the subprogram is to be considered as a definition.
3447 
3448    ATTR_LIST is the list of attributes to be attached to the subprogram.
3449 
3450    GNAT_NODE is used for the position of the decl.  */
3451 
3452 tree
create_subprog_decl(tree name,tree asm_name,tree type,tree param_decl_list,enum inline_status_t inline_status,bool public_flag,bool extern_flag,bool artificial_p,bool debug_info_p,bool definition,struct attrib * attr_list,Node_Id gnat_node)3453 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3454 		     enum inline_status_t inline_status, bool public_flag,
3455 		     bool extern_flag, bool artificial_p, bool debug_info_p,
3456 		     bool definition, struct attrib *attr_list,
3457 		     Node_Id gnat_node)
3458 {
3459   tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3460   DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3461 
3462   DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3463   DECL_EXTERNAL (subprog_decl) = extern_flag;
3464   DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3465   DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3466   TREE_PUBLIC (subprog_decl) = public_flag;
3467 
3468   switch (inline_status)
3469     {
3470     case is_suppressed:
3471       DECL_UNINLINABLE (subprog_decl) = 1;
3472       break;
3473 
3474     case is_default:
3475       break;
3476 
3477     case is_required:
3478       if (Back_End_Inlining)
3479 	{
3480 	  decl_attributes (&subprog_decl,
3481 			   tree_cons (get_identifier ("always_inline"),
3482 				      NULL_TREE, NULL_TREE),
3483 			   ATTR_FLAG_TYPE_IN_PLACE);
3484 
3485 	  /* Inline_Always guarantees that every direct call is inlined and
3486 	     that there is no indirect reference to the subprogram, so the
3487 	     instance in the original package (as well as its clones in the
3488 	     client packages created for inter-unit inlining) can be made
3489 	     private, which causes the out-of-line body to be eliminated.  */
3490 	  TREE_PUBLIC (subprog_decl) = 0;
3491 	}
3492 
3493       /* ... fall through ... */
3494 
3495     case is_prescribed:
3496       DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3497 
3498       /* ... fall through ... */
3499 
3500     case is_requested:
3501       DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3502       if (!Debug_Generated_Code)
3503 	DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3504       break;
3505 
3506     default:
3507       gcc_unreachable ();
3508     }
3509 
3510   process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3511 
3512   /* Once everything is processed, finish the subprogram declaration.  */
3513   finish_subprog_decl (subprog_decl, asm_name, type);
3514 
3515   /* Add this decl to the current binding level.  */
3516   gnat_pushdecl (subprog_decl, gnat_node);
3517 
3518   /* Output the assembler code and/or RTL for the declaration.  */
3519   rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3520 
3521   return subprog_decl;
3522 }
3523 
3524 /* Given a subprogram declaration DECL, its assembler name and its type,
3525    finish constructing the subprogram declaration from ASM_NAME and TYPE.  */
3526 
3527 void
finish_subprog_decl(tree decl,tree asm_name,tree type)3528 finish_subprog_decl (tree decl, tree asm_name, tree type)
3529 {
3530   /* DECL_ARGUMENTS is set by the caller, but not its context.  */
3531   for (tree param_decl = DECL_ARGUMENTS (decl);
3532        param_decl;
3533        param_decl = DECL_CHAIN (param_decl))
3534     DECL_CONTEXT (param_decl) = decl;
3535 
3536   tree result_decl
3537     = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3538 		  TREE_TYPE (type));
3539 
3540   DECL_ARTIFICIAL (result_decl) = 1;
3541   DECL_IGNORED_P (result_decl) = 1;
3542   DECL_CONTEXT (result_decl) = decl;
3543   DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3544   DECL_RESULT (decl) = result_decl;
3545 
3546   /* Propagate the "pure" property.  */
3547   DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3548 
3549   /* Propagate the "noreturn" property.  */
3550   TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3551 
3552   if (asm_name)
3553     {
3554       /* Let the target mangle the name if this isn't a verbatim asm.  */
3555       if (*IDENTIFIER_POINTER (asm_name) != '*')
3556 	asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3557 
3558       SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3559 
3560       /* The expand_main_function circuitry expects "main_identifier_node" to
3561 	 designate the DECL_NAME of the 'main' entry point, in turn expected
3562 	 to be declared as the "main" function literally by default.  Ada
3563 	 program entry points are typically declared with a different name
3564 	 within the binder generated file, exported as 'main' to satisfy the
3565 	 system expectations.  Force main_identifier_node in this case.  */
3566       if (asm_name == main_identifier_node)
3567 	DECL_NAME (decl) = main_identifier_node;
3568     }
3569 }
3570 
3571 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3572    body.  This routine needs to be invoked before processing the declarations
3573    appearing in the subprogram.  */
3574 
3575 void
begin_subprog_body(tree subprog_decl)3576 begin_subprog_body (tree subprog_decl)
3577 {
3578   announce_function (subprog_decl);
3579 
3580   /* This function is being defined.  */
3581   TREE_STATIC (subprog_decl) = 1;
3582 
3583   /* The failure of this assertion will likely come from a wrong context for
3584      the subprogram body, e.g. another procedure for a procedure declared at
3585      library level.  */
3586   gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3587 
3588   current_function_decl = subprog_decl;
3589 
3590   /* Enter a new binding level and show that all the parameters belong to
3591      this function.  */
3592   gnat_pushlevel ();
3593 }
3594 
3595 /* Finish translating the current subprogram and set its BODY.  */
3596 
3597 void
end_subprog_body(tree body)3598 end_subprog_body (tree body)
3599 {
3600   tree fndecl = current_function_decl;
3601 
3602   /* Attach the BLOCK for this level to the function and pop the level.  */
3603   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3604   DECL_INITIAL (fndecl) = current_binding_level->block;
3605   gnat_poplevel ();
3606 
3607   /* The body should be a BIND_EXPR whose BLOCK is the top-level one.  */
3608   if (TREE_CODE (body) == BIND_EXPR)
3609     {
3610       BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3611       DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3612     }
3613 
3614   DECL_SAVED_TREE (fndecl) = body;
3615 
3616   current_function_decl = decl_function_context (fndecl);
3617 }
3618 
3619 /* Wrap up compilation of SUBPROG_DECL, a subprogram body.  */
3620 
3621 void
rest_of_subprog_body_compilation(tree subprog_decl)3622 rest_of_subprog_body_compilation (tree subprog_decl)
3623 {
3624   /* We cannot track the location of errors past this point.  */
3625   Current_Error_Node = Empty;
3626 
3627   /* If we're only annotating types, don't actually compile this function.  */
3628   if (type_annotate_only)
3629     return;
3630 
3631   /* Dump functions before gimplification.  */
3632   dump_function (TDI_original, subprog_decl);
3633 
3634   if (!decl_function_context (subprog_decl))
3635     cgraph_node::finalize_function (subprog_decl, false);
3636   else
3637     /* Register this function with cgraph just far enough to get it
3638        added to our parent's nested function list.  */
3639     (void) cgraph_node::get_create (subprog_decl);
3640 }
3641 
3642 tree
gnat_builtin_function(tree decl)3643 gnat_builtin_function (tree decl)
3644 {
3645   gnat_pushdecl (decl, Empty);
3646   return decl;
3647 }
3648 
3649 /* Return an integer type with the number of bits of precision given by
3650    PRECISION.  UNSIGNEDP is nonzero if the type is unsigned; otherwise
3651    it is a signed type.  */
3652 
3653 tree
gnat_type_for_size(unsigned precision,int unsignedp)3654 gnat_type_for_size (unsigned precision, int unsignedp)
3655 {
3656   tree t;
3657   char type_name[20];
3658 
3659   if (precision <= 2 * MAX_BITS_PER_WORD
3660       && signed_and_unsigned_types[precision][unsignedp])
3661     return signed_and_unsigned_types[precision][unsignedp];
3662 
3663  if (unsignedp)
3664     t = make_unsigned_type (precision);
3665   else
3666     t = make_signed_type (precision);
3667   TYPE_ARTIFICIAL (t) = 1;
3668 
3669   if (precision <= 2 * MAX_BITS_PER_WORD)
3670     signed_and_unsigned_types[precision][unsignedp] = t;
3671 
3672   if (!TYPE_NAME (t))
3673     {
3674       sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3675       TYPE_NAME (t) = get_identifier (type_name);
3676     }
3677 
3678   return t;
3679 }
3680 
3681 /* Likewise for floating-point types.  */
3682 
3683 static tree
float_type_for_precision(int precision,machine_mode mode)3684 float_type_for_precision (int precision, machine_mode mode)
3685 {
3686   tree t;
3687   char type_name[20];
3688 
3689   if (float_types[(int) mode])
3690     return float_types[(int) mode];
3691 
3692   float_types[(int) mode] = t = make_node (REAL_TYPE);
3693   TYPE_PRECISION (t) = precision;
3694   layout_type (t);
3695 
3696   gcc_assert (TYPE_MODE (t) == mode);
3697   if (!TYPE_NAME (t))
3698     {
3699       sprintf (type_name, "FLOAT_%d", precision);
3700       TYPE_NAME (t) = get_identifier (type_name);
3701     }
3702 
3703   return t;
3704 }
3705 
3706 /* Return a data type that has machine mode MODE.  UNSIGNEDP selects
3707    an unsigned type; otherwise a signed type is returned.  */
3708 
3709 tree
gnat_type_for_mode(machine_mode mode,int unsignedp)3710 gnat_type_for_mode (machine_mode mode, int unsignedp)
3711 {
3712   if (mode == BLKmode)
3713     return NULL_TREE;
3714 
3715   if (mode == VOIDmode)
3716     return void_type_node;
3717 
3718   if (COMPLEX_MODE_P (mode))
3719     return NULL_TREE;
3720 
3721   scalar_float_mode float_mode;
3722   if (is_a <scalar_float_mode> (mode, &float_mode))
3723     return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3724 				     float_mode);
3725 
3726   scalar_int_mode int_mode;
3727   if (is_a <scalar_int_mode> (mode, &int_mode))
3728     return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3729 
3730   if (VECTOR_MODE_P (mode))
3731     {
3732       machine_mode inner_mode = GET_MODE_INNER (mode);
3733       tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3734       if (inner_type)
3735 	return build_vector_type_for_mode (inner_type, mode);
3736     }
3737 
3738   return NULL_TREE;
3739 }
3740 
3741 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3742    signedness being specified by UNSIGNEDP.  */
3743 
3744 tree
gnat_signed_or_unsigned_type_for(int unsignedp,tree type_node)3745 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3746 {
3747   if (type_node == char_type_node)
3748     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3749 
3750   tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3751 
3752   if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3753     {
3754       type = copy_type (type);
3755       TREE_TYPE (type) = type_node;
3756     }
3757   else if (TREE_TYPE (type_node)
3758 	   && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3759 	   && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3760     {
3761       type = copy_type (type);
3762       TREE_TYPE (type) = TREE_TYPE (type_node);
3763     }
3764 
3765   return type;
3766 }
3767 
3768 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3769    transparently converted to each other.  */
3770 
3771 int
gnat_types_compatible_p(tree t1,tree t2)3772 gnat_types_compatible_p (tree t1, tree t2)
3773 {
3774   enum tree_code code;
3775 
3776   /* This is the default criterion.  */
3777   if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3778     return 1;
3779 
3780   /* We only check structural equivalence here.  */
3781   if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3782     return 0;
3783 
3784   /* Vector types are also compatible if they have the same number of subparts
3785      and the same form of (scalar) element type.  */
3786   if (code == VECTOR_TYPE
3787       && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3788       && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3789       && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3790     return 1;
3791 
3792   /* Array types are also compatible if they are constrained and have the same
3793      domain(s), the same component type and the same scalar storage order.  */
3794   if (code == ARRAY_TYPE
3795       && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3796 	  || (TYPE_DOMAIN (t1)
3797 	      && TYPE_DOMAIN (t2)
3798 	      && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3799 				     TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3800 	      && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3801 				     TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3802       && (TREE_TYPE (t1) == TREE_TYPE (t2)
3803 	  || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3804 	      && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3805       && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3806     return 1;
3807 
3808   return 0;
3809 }
3810 
3811 /* Return true if EXPR is a useless type conversion.  */
3812 
3813 bool
gnat_useless_type_conversion(tree expr)3814 gnat_useless_type_conversion (tree expr)
3815 {
3816   if (CONVERT_EXPR_P (expr)
3817       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3818       || TREE_CODE (expr) == NON_LVALUE_EXPR)
3819     return gnat_types_compatible_p (TREE_TYPE (expr),
3820 				    TREE_TYPE (TREE_OPERAND (expr, 0)));
3821 
3822   return false;
3823 }
3824 
3825 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags.  */
3826 
3827 bool
fntype_same_flags_p(const_tree t,tree cico_list,bool return_unconstrained_p,bool return_by_direct_ref_p,bool return_by_invisi_ref_p)3828 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3829 		     bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3830 {
3831   return TYPE_CI_CO_LIST (t) == cico_list
3832 	 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3833 	 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3834 	 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3835 }
3836 
3837 /* EXP is an expression for the size of an object.  If this size contains
3838    discriminant references, replace them with the maximum (if MAX_P) or
3839    minimum (if !MAX_P) possible value of the discriminant.
3840 
3841    Note that the expression may have already been gimplified,in which case
3842    COND_EXPRs have VOID_TYPE and no operands, and this must be handled.  */
3843 
3844 tree
max_size(tree exp,bool max_p)3845 max_size (tree exp, bool max_p)
3846 {
3847   enum tree_code code = TREE_CODE (exp);
3848   tree type = TREE_TYPE (exp);
3849   tree op0, op1, op2;
3850 
3851   switch (TREE_CODE_CLASS (code))
3852     {
3853     case tcc_declaration:
3854     case tcc_constant:
3855       return exp;
3856 
3857     case tcc_exceptional:
3858       gcc_assert (code == SSA_NAME);
3859       return exp;
3860 
3861     case tcc_vl_exp:
3862       if (code == CALL_EXPR)
3863 	{
3864 	  tree t, *argarray;
3865 	  int n, i;
3866 
3867 	  t = maybe_inline_call_in_expr (exp);
3868 	  if (t)
3869 	    return max_size (t, max_p);
3870 
3871 	  n = call_expr_nargs (exp);
3872 	  gcc_assert (n > 0);
3873 	  argarray = XALLOCAVEC (tree, n);
3874 	  for (i = 0; i < n; i++)
3875 	    argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3876 	  return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3877 	}
3878       break;
3879 
3880     case tcc_reference:
3881       /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3882 	 modify.  Otherwise, we treat it like a variable.  */
3883       if (CONTAINS_PLACEHOLDER_P (exp))
3884 	{
3885 	  tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3886 	  tree val
3887 	    = fold_convert (base_type,
3888 			    max_p
3889 			    ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3890 
3891 	  /* Walk down the extra subtypes to get more restrictive bounds.  */
3892 	  while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3893 	    {
3894 	      type = TREE_TYPE (type);
3895 	      if (max_p)
3896 		val = fold_build2 (MIN_EXPR, base_type, val,
3897 				   fold_convert (base_type,
3898 						 TYPE_MAX_VALUE (type)));
3899 	      else
3900 		val = fold_build2 (MAX_EXPR, base_type, val,
3901 				   fold_convert (base_type,
3902 						 TYPE_MIN_VALUE (type)));
3903 	    }
3904 
3905 	  return fold_convert (type, max_size (val, max_p));
3906 	}
3907 
3908       return exp;
3909 
3910     case tcc_comparison:
3911       return build_int_cst (type, max_p ? 1 : 0);
3912 
3913     case tcc_unary:
3914       op0 = TREE_OPERAND (exp, 0);
3915 
3916       if (code == NON_LVALUE_EXPR)
3917 	return max_size (op0, max_p);
3918 
3919       if (VOID_TYPE_P (TREE_TYPE (op0)))
3920 	return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3921 
3922       op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3923 
3924       if (op0 == TREE_OPERAND (exp, 0))
3925 	return exp;
3926 
3927       return fold_build1 (code, type, op0);
3928 
3929     case tcc_binary:
3930       op0 = TREE_OPERAND (exp, 0);
3931       op1 = TREE_OPERAND (exp, 1);
3932 
3933       /* If we have a multiply-add with a "negative" value in an unsigned
3934 	 type, do a multiply-subtract with the negated value, in order to
3935 	 avoid creating a spurious overflow below.  */
3936       if (code == PLUS_EXPR
3937 	  && TREE_CODE (op0) == MULT_EXPR
3938 	  && TYPE_UNSIGNED (type)
3939 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3940 	  && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3941 	  && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3942 	{
3943 	  tree tmp = op1;
3944 	  op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3945 			fold_build1 (NEGATE_EXPR, type,
3946 				    TREE_OPERAND (op0, 1)));
3947 	  op0 = tmp;
3948 	  code = MINUS_EXPR;
3949 	}
3950 
3951       op0 = max_size (op0, max_p);
3952       op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3953 
3954       if ((code == MINUS_EXPR || code == PLUS_EXPR))
3955 	{
3956 	  /* If the op0 has overflowed and the op1 is a variable,
3957 	     propagate the overflow by returning the op0.  */
3958 	  if (TREE_CODE (op0) == INTEGER_CST
3959 	      && TREE_OVERFLOW (op0)
3960 	      && TREE_CODE (op1) != INTEGER_CST)
3961 	    return op0;
3962 
3963 	  /* If we have a "negative" value in an unsigned type, do the
3964 	     opposite operation on the negated value, in order to avoid
3965 	     creating a spurious overflow below.  */
3966 	  if (TYPE_UNSIGNED (type)
3967 	      && TREE_CODE (op1) == INTEGER_CST
3968 	      && !TREE_OVERFLOW (op1)
3969 	      && tree_int_cst_sign_bit (op1))
3970 	    {
3971 	      op1 = fold_build1 (NEGATE_EXPR, type, op1);
3972 	      code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3973 	    }
3974 	}
3975 
3976       if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3977 	return exp;
3978 
3979       /* We need to detect overflows so we call size_binop here.  */
3980       return size_binop (code, op0, op1);
3981 
3982     case tcc_expression:
3983       switch (TREE_CODE_LENGTH (code))
3984 	{
3985 	case 1:
3986 	  if (code == SAVE_EXPR)
3987 	    return exp;
3988 
3989 	  op0 = max_size (TREE_OPERAND (exp, 0),
3990 			  code == TRUTH_NOT_EXPR ? !max_p : max_p);
3991 
3992 	  if (op0 == TREE_OPERAND (exp, 0))
3993 	    return exp;
3994 
3995 	  return fold_build1 (code, type, op0);
3996 
3997 	case 2:
3998 	  if (code == COMPOUND_EXPR)
3999 	    return max_size (TREE_OPERAND (exp, 1), max_p);
4000 
4001 	  op0 = max_size (TREE_OPERAND (exp, 0), max_p);
4002 	  op1 = max_size (TREE_OPERAND (exp, 1), max_p);
4003 
4004 	  if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4005 	    return exp;
4006 
4007 	  return fold_build2 (code, type, op0, op1);
4008 
4009 	case 3:
4010 	  if (code == COND_EXPR)
4011 	    {
4012 	      op0 = TREE_OPERAND (exp, 0);
4013 	      op1 = TREE_OPERAND (exp, 1);
4014 	      op2 = TREE_OPERAND (exp, 2);
4015 
4016 	      if (!op1 || !op2)
4017 		return exp;
4018 
4019 	      op1 = max_size (op1, max_p);
4020 	      op2 = max_size (op2, max_p);
4021 
4022 	      /* If we have the MAX of a "negative" value in an unsigned type
4023 		 and zero for a length expression, just return zero.  */
4024 	      if (max_p
4025 		  && TREE_CODE (op0) == LE_EXPR
4026 		  && TYPE_UNSIGNED (type)
4027 		  && TREE_CODE (op1) == INTEGER_CST
4028 		  && !TREE_OVERFLOW (op1)
4029 		  && tree_int_cst_sign_bit (op1)
4030 		  && integer_zerop (op2))
4031 		return op2;
4032 
4033 	      return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
4034 	    }
4035 	  break;
4036 
4037 	default:
4038 	  break;
4039 	}
4040 
4041       /* Other tree classes cannot happen.  */
4042     default:
4043       break;
4044     }
4045 
4046   gcc_unreachable ();
4047 }
4048 
4049 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
4050    EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
4051    Return a constructor for the template.  */
4052 
4053 tree
build_template(tree template_type,tree array_type,tree expr)4054 build_template (tree template_type, tree array_type, tree expr)
4055 {
4056   vec<constructor_elt, va_gc> *template_elts = NULL;
4057   tree bound_list = NULL_TREE;
4058   tree field;
4059 
4060   while (TREE_CODE (array_type) == RECORD_TYPE
4061 	 && (TYPE_PADDING_P (array_type)
4062 	     || TYPE_JUSTIFIED_MODULAR_P (array_type)))
4063     array_type = TREE_TYPE (TYPE_FIELDS (array_type));
4064 
4065   if (TREE_CODE (array_type) == ARRAY_TYPE
4066       || (TREE_CODE (array_type) == INTEGER_TYPE
4067 	  && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
4068     bound_list = TYPE_ACTUAL_BOUNDS (array_type);
4069 
4070   /* First make the list for a CONSTRUCTOR for the template.  Go down
4071      the field list of the template instead of the type chain because
4072      this array might be an Ada array of array and we can't tell where
4073      the nested array stop being the underlying object.  */
4074   for (field = TYPE_FIELDS (template_type);
4075        field;
4076        field = DECL_CHAIN (DECL_CHAIN (field)))
4077     {
4078       tree bounds, min, max;
4079 
4080       /* If we have a bound list, get the bounds from there.  Likewise
4081 	 for an ARRAY_TYPE.  Otherwise, if expr is a PARM_DECL with
4082 	 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
4083 	 template, but this will only give us a maximum range.  */
4084       if (bound_list)
4085 	{
4086 	  bounds = TREE_VALUE (bound_list);
4087 	  bound_list = TREE_CHAIN (bound_list);
4088 	}
4089       else if (TREE_CODE (array_type) == ARRAY_TYPE)
4090 	{
4091 	  bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
4092 	  array_type = TREE_TYPE (array_type);
4093 	}
4094       else if (expr && TREE_CODE (expr) == PARM_DECL
4095 	       && DECL_BY_COMPONENT_PTR_P (expr))
4096 	bounds = TREE_TYPE (field);
4097       else
4098 	gcc_unreachable ();
4099 
4100       min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
4101       max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
4102 
4103       /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
4104 	 substitute it from OBJECT.  */
4105       min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
4106       max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
4107 
4108       CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
4109       CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
4110     }
4111 
4112   return gnat_build_constructor (template_type, template_elts);
4113 }
4114 
4115 /* Return true if TYPE is suitable for the element type of a vector.  */
4116 
4117 static bool
type_for_vector_element_p(tree type)4118 type_for_vector_element_p (tree type)
4119 {
4120   machine_mode mode;
4121 
4122   if (!INTEGRAL_TYPE_P (type)
4123       && !SCALAR_FLOAT_TYPE_P (type)
4124       && !FIXED_POINT_TYPE_P (type))
4125     return false;
4126 
4127   mode = TYPE_MODE (type);
4128   if (GET_MODE_CLASS (mode) != MODE_INT
4129       && !SCALAR_FLOAT_MODE_P (mode)
4130       && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
4131     return false;
4132 
4133   return true;
4134 }
4135 
4136 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
4137    this is not possible.  If ATTRIBUTE is non-zero, we are processing the
4138    attribute declaration and want to issue error messages on failure.  */
4139 
4140 static tree
build_vector_type_for_size(tree inner_type,tree size,tree attribute)4141 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
4142 {
4143   unsigned HOST_WIDE_INT size_int, inner_size_int;
4144   int nunits;
4145 
4146   /* Silently punt on variable sizes.  We can't make vector types for them,
4147      need to ignore them on front-end generated subtypes of unconstrained
4148      base types, and this attribute is for binding implementors, not end
4149      users, so we should never get there from legitimate explicit uses.  */
4150   if (!tree_fits_uhwi_p (size))
4151     return NULL_TREE;
4152   size_int = tree_to_uhwi (size);
4153 
4154   if (!type_for_vector_element_p (inner_type))
4155     {
4156       if (attribute)
4157 	error ("invalid element type for attribute %qs",
4158 	       IDENTIFIER_POINTER (attribute));
4159       return NULL_TREE;
4160     }
4161   inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4162 
4163   if (size_int % inner_size_int)
4164     {
4165       if (attribute)
4166 	error ("vector size not an integral multiple of component size");
4167       return NULL_TREE;
4168     }
4169 
4170   if (size_int == 0)
4171     {
4172       if (attribute)
4173 	error ("zero vector size");
4174       return NULL_TREE;
4175     }
4176 
4177   nunits = size_int / inner_size_int;
4178   if (nunits & (nunits - 1))
4179     {
4180       if (attribute)
4181 	error ("number of components of vector not a power of two");
4182       return NULL_TREE;
4183     }
4184 
4185   return build_vector_type (inner_type, nunits);
4186 }
4187 
4188 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4189    NULL_TREE if this is not possible.  If ATTRIBUTE is non-zero, we are
4190    processing the attribute and want to issue error messages on failure.  */
4191 
4192 static tree
build_vector_type_for_array(tree array_type,tree attribute)4193 build_vector_type_for_array (tree array_type, tree attribute)
4194 {
4195   tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4196 						 TYPE_SIZE_UNIT (array_type),
4197 						 attribute);
4198   if (!vector_type)
4199     return NULL_TREE;
4200 
4201   TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4202   return vector_type;
4203 }
4204 
4205 /* Build a type to be used to represent an aliased object whose nominal type
4206    is an unconstrained array.  This consists of a RECORD_TYPE containing a
4207    field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4208    If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4209    an arbitrary unconstrained object.  Use NAME as the name of the record.
4210    DEBUG_INFO_P is true if we need to write debug information for the type.  */
4211 
4212 tree
build_unc_object_type(tree template_type,tree object_type,tree name,bool debug_info_p)4213 build_unc_object_type (tree template_type, tree object_type, tree name,
4214 		       bool debug_info_p)
4215 {
4216   tree type = make_node (RECORD_TYPE);
4217   tree template_field
4218     = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4219 			 NULL_TREE, NULL_TREE, 0, 1);
4220   tree array_field
4221     = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4222 			 NULL_TREE, NULL_TREE, 0, 1);
4223 
4224   TYPE_NAME (type) = name;
4225   TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4226   DECL_CHAIN (template_field) = array_field;
4227   finish_record_type (type, template_field, 0, true);
4228 
4229   /* Declare it now since it will never be declared otherwise.  This is
4230      necessary to ensure that its subtrees are properly marked.  */
4231   create_type_decl (name, type, true, debug_info_p, Empty);
4232 
4233   return type;
4234 }
4235 
4236 /* Same, taking a thin or fat pointer type instead of a template type. */
4237 
4238 tree
build_unc_object_type_from_ptr(tree thin_fat_ptr_type,tree object_type,tree name,bool debug_info_p)4239 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4240 				tree name, bool debug_info_p)
4241 {
4242   tree template_type;
4243 
4244   gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4245 
4246   template_type
4247     = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4248        ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4249        : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4250 
4251   return
4252     build_unc_object_type (template_type, object_type, name, debug_info_p);
4253 }
4254 
4255 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4256    In the normal case this is just two adjustments, but we have more to
4257    do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE.  */
4258 
4259 void
update_pointer_to(tree old_type,tree new_type)4260 update_pointer_to (tree old_type, tree new_type)
4261 {
4262   tree ptr = TYPE_POINTER_TO (old_type);
4263   tree ref = TYPE_REFERENCE_TO (old_type);
4264   tree t;
4265 
4266   /* If this is the main variant, process all the other variants first.  */
4267   if (TYPE_MAIN_VARIANT (old_type) == old_type)
4268     for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4269       update_pointer_to (t, new_type);
4270 
4271   /* If no pointers and no references, we are done.  */
4272   if (!ptr && !ref)
4273     return;
4274 
4275   /* Merge the old type qualifiers in the new type.
4276 
4277      Each old variant has qualifiers for specific reasons, and the new
4278      designated type as well.  Each set of qualifiers represents useful
4279      information grabbed at some point, and merging the two simply unifies
4280      these inputs into the final type description.
4281 
4282      Consider for instance a volatile type frozen after an access to constant
4283      type designating it; after the designated type's freeze, we get here with
4284      a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4285      when the access type was processed.  We will make a volatile and readonly
4286      designated type, because that's what it really is.
4287 
4288      We might also get here for a non-dummy OLD_TYPE variant with different
4289      qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4290      to private record type elaboration (see the comments around the call to
4291      this routine in gnat_to_gnu_entity <E_Access_Type>).  We have to merge
4292      the qualifiers in those cases too, to avoid accidentally discarding the
4293      initial set, and will often end up with OLD_TYPE == NEW_TYPE then.  */
4294   new_type
4295     = build_qualified_type (new_type,
4296 			    TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4297 
4298   /* If old type and new type are identical, there is nothing to do.  */
4299   if (old_type == new_type)
4300     return;
4301 
4302   /* Otherwise, first handle the simple case.  */
4303   if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4304     {
4305       tree new_ptr, new_ref;
4306 
4307       /* If pointer or reference already points to new type, nothing to do.
4308 	 This can happen as update_pointer_to can be invoked multiple times
4309 	 on the same couple of types because of the type variants.  */
4310       if ((ptr && TREE_TYPE (ptr) == new_type)
4311 	  || (ref && TREE_TYPE (ref) == new_type))
4312 	return;
4313 
4314       /* Chain PTR and its variants at the end.  */
4315       new_ptr = TYPE_POINTER_TO (new_type);
4316       if (new_ptr)
4317 	{
4318 	  while (TYPE_NEXT_PTR_TO (new_ptr))
4319 	    new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4320 	  TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4321 	}
4322       else
4323 	TYPE_POINTER_TO (new_type) = ptr;
4324 
4325       /* Now adjust them.  */
4326       for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4327 	for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4328 	  {
4329 	    TREE_TYPE (t) = new_type;
4330 	    if (TYPE_NULL_BOUNDS (t))
4331 	      TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4332 	    TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_POINTER_TO (new_type));
4333 	  }
4334 
4335       /* Chain REF and its variants at the end.  */
4336       new_ref = TYPE_REFERENCE_TO (new_type);
4337       if (new_ref)
4338 	{
4339 	  while (TYPE_NEXT_REF_TO (new_ref))
4340 	    new_ref = TYPE_NEXT_REF_TO (new_ref);
4341 	  TYPE_NEXT_REF_TO (new_ref) = ref;
4342 	}
4343       else
4344 	TYPE_REFERENCE_TO (new_type) = ref;
4345 
4346       /* Now adjust them.  */
4347       for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4348 	for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4349 	  {
4350 	    TREE_TYPE (t) = new_type;
4351 	    TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_REFERENCE_TO (new_type));
4352 	  }
4353 
4354       TYPE_POINTER_TO (old_type) = NULL_TREE;
4355       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4356     }
4357 
4358   /* Now deal with the unconstrained array case.  In this case the pointer
4359      is actually a record where both fields are pointers to dummy nodes.
4360      Turn them into pointers to the correct types using update_pointer_to.
4361      Likewise for the pointer to the object record (thin pointer).  */
4362   else
4363     {
4364       tree new_ptr = TYPE_POINTER_TO (new_type);
4365 
4366       gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4367 
4368       /* If PTR already points to NEW_TYPE, nothing to do.  This can happen
4369 	 since update_pointer_to can be invoked multiple times on the same
4370 	 couple of types because of the type variants.  */
4371       if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4372 	return;
4373 
4374       update_pointer_to
4375 	(TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4376 	 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4377 
4378       update_pointer_to
4379 	(TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4380 	 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4381 
4382       update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4383 			 TYPE_OBJECT_RECORD_TYPE (new_type));
4384 
4385       TYPE_POINTER_TO (old_type) = NULL_TREE;
4386       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4387     }
4388 }
4389 
4390 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4391    unconstrained one.  This involves making or finding a template.  */
4392 
4393 static tree
convert_to_fat_pointer(tree type,tree expr)4394 convert_to_fat_pointer (tree type, tree expr)
4395 {
4396   tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4397   tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4398   tree etype = TREE_TYPE (expr);
4399   tree template_addr;
4400   vec<constructor_elt, va_gc> *v;
4401   vec_alloc (v, 2);
4402 
4403   /* If EXPR is null, make a fat pointer that contains a null pointer to the
4404      array (compare_fat_pointers ensures that this is the full discriminant)
4405      and a valid pointer to the bounds.  This latter property is necessary
4406      since the compiler can hoist the load of the bounds done through it.  */
4407   if (integer_zerop (expr))
4408     {
4409       tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4410       tree null_bounds, t;
4411 
4412       if (TYPE_NULL_BOUNDS (ptr_template_type))
4413 	null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4414       else
4415 	{
4416 	  /* The template type can still be dummy at this point so we build an
4417 	     empty constructor.  The middle-end will fill it in with zeros.  */
4418 	  t = build_constructor (template_type, NULL);
4419 	  TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4420 	  null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4421 	  SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4422 	}
4423 
4424       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4425 			      fold_convert (p_array_type, null_pointer_node));
4426       CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4427       t = build_constructor (type, v);
4428       /* Do not set TREE_CONSTANT so as to force T to static memory.  */
4429       TREE_CONSTANT (t) = 0;
4430       TREE_STATIC (t) = 1;
4431 
4432       return t;
4433     }
4434 
4435   /* If EXPR is a thin pointer, make template and data from the record.  */
4436   if (TYPE_IS_THIN_POINTER_P (etype))
4437     {
4438       tree field = TYPE_FIELDS (TREE_TYPE (etype));
4439 
4440       expr = gnat_protect_expr (expr);
4441 
4442       /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4443 	 the thin pointer value has been shifted so we shift it back to get
4444 	 the template address.  */
4445       if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4446 	{
4447 	  template_addr
4448 	    = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4449 			       fold_build1 (NEGATE_EXPR, sizetype,
4450 					    byte_position
4451 					    (DECL_CHAIN (field))));
4452 	  template_addr
4453 	    = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4454 			    template_addr);
4455 	}
4456 
4457       /* Otherwise we explicitly take the address of the fields.  */
4458       else
4459 	{
4460 	  expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4461 	  template_addr
4462 	    = build_unary_op (ADDR_EXPR, NULL_TREE,
4463 			      build_component_ref (expr, field, false));
4464 	  expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4465 				 build_component_ref (expr, DECL_CHAIN (field),
4466 						      false));
4467 	}
4468     }
4469 
4470   /* Otherwise, build the constructor for the template.  */
4471   else
4472     template_addr
4473       = build_unary_op (ADDR_EXPR, NULL_TREE,
4474 			build_template (template_type, TREE_TYPE (etype),
4475 					expr));
4476 
4477   /* The final result is a constructor for the fat pointer.
4478 
4479      If EXPR is an argument of a foreign convention subprogram, the type it
4480      points to is directly the component type.  In this case, the expression
4481      type may not match the corresponding FIELD_DECL type at this point, so we
4482      call "convert" here to fix that up if necessary.  This type consistency is
4483      required, for instance because it ensures that possible later folding of
4484      COMPONENT_REFs against this constructor always yields something of the
4485      same type as the initial reference.
4486 
4487      Note that the call to "build_template" above is still fine because it
4488      will only refer to the provided TEMPLATE_TYPE in this case.  */
4489   CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4490   CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4491   return gnat_build_constructor (type, v);
4492 }
4493 
4494 /* Create an expression whose value is that of EXPR,
4495    converted to type TYPE.  The TREE_TYPE of the value
4496    is always TYPE.  This function implements all reasonable
4497    conversions; callers should filter out those that are
4498    not permitted by the language being compiled.  */
4499 
4500 tree
convert(tree type,tree expr)4501 convert (tree type, tree expr)
4502 {
4503   tree etype = TREE_TYPE (expr);
4504   enum tree_code ecode = TREE_CODE (etype);
4505   enum tree_code code = TREE_CODE (type);
4506 
4507   /* If the expression is already of the right type, we are done.  */
4508   if (etype == type)
4509     return expr;
4510 
4511   /* If both input and output have padding and are of variable size, do this
4512      as an unchecked conversion.  Likewise if one is a mere variant of the
4513      other, so we avoid a pointless unpad/repad sequence.  */
4514   else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4515 	   && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4516 	   && (!TREE_CONSTANT (TYPE_SIZE (type))
4517 	       || !TREE_CONSTANT (TYPE_SIZE (etype))
4518 	       || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4519 	       || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4520 		  == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4521     ;
4522 
4523   /* If the output type has padding, convert to the inner type and make a
4524      constructor to build the record, unless a variable size is involved.  */
4525   else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4526     {
4527       /* If we previously converted from another type and our type is
4528 	 of variable size, remove the conversion to avoid the need for
4529 	 variable-sized temporaries.  Likewise for a conversion between
4530 	 original and packable version.  */
4531       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4532 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4533 	      || (ecode == RECORD_TYPE
4534 		  && TYPE_NAME (etype)
4535 		     == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4536 	expr = TREE_OPERAND (expr, 0);
4537 
4538       /* If we are just removing the padding from expr, convert the original
4539 	 object if we have variable size in order to avoid the need for some
4540 	 variable-sized temporaries.  Likewise if the padding is a variant
4541 	 of the other, so we avoid a pointless unpad/repad sequence.  */
4542       if (TREE_CODE (expr) == COMPONENT_REF
4543 	  && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4544 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4545 	      || TYPE_MAIN_VARIANT (type)
4546 		 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4547 	      || (ecode == RECORD_TYPE
4548 		  && TYPE_NAME (etype)
4549 		     == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4550 	return convert (type, TREE_OPERAND (expr, 0));
4551 
4552       /* If the inner type is of self-referential size and the expression type
4553 	 is a record, do this as an unchecked conversion unless both types are
4554 	 essentially the same.  */
4555       if (ecode == RECORD_TYPE
4556 	  && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4557 	  && TYPE_MAIN_VARIANT (etype)
4558 	     != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4559 	return unchecked_convert (type, expr, false);
4560 
4561       /* If we are converting between array types with variable size, do the
4562 	 final conversion as an unchecked conversion, again to avoid the need
4563 	 for some variable-sized temporaries.  If valid, this conversion is
4564 	 very likely purely technical and without real effects.  */
4565       if (ecode == ARRAY_TYPE
4566 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4567 	  && !TREE_CONSTANT (TYPE_SIZE (etype))
4568 	  && !TREE_CONSTANT (TYPE_SIZE (type)))
4569 	return unchecked_convert (type,
4570 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
4571 					   expr),
4572 				  false);
4573 
4574       tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4575 
4576       /* If converting to the inner type has already created a CONSTRUCTOR with
4577          the right size, then reuse it instead of creating another one.  This
4578          can happen for the padding type built to overalign local variables.  */
4579       if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4580 	  && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4581 	  && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4582 	  && tree_int_cst_equal (TYPE_SIZE (type),
4583 				 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4584 	return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4585 
4586       vec<constructor_elt, va_gc> *v;
4587       vec_alloc (v, 1);
4588       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4589       return gnat_build_constructor (type, v);
4590     }
4591 
4592   /* If the input type has padding, remove it and convert to the output type.
4593      The conditions ordering is arranged to ensure that the output type is not
4594      a padding type here, as it is not clear whether the conversion would
4595      always be correct if this was to happen.  */
4596   else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4597     {
4598       tree unpadded;
4599 
4600       /* If we have just converted to this padded type, just get the
4601 	 inner expression.  */
4602       if (TREE_CODE (expr) == CONSTRUCTOR)
4603 	unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4604 
4605       /* Otherwise, build an explicit component reference.  */
4606       else
4607 	unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4608 
4609       return convert (type, unpadded);
4610     }
4611 
4612   /* If the input is a biased type, convert first to the base type and add
4613      the bias.  Note that the bias must go through a full conversion to the
4614      base type, lest it is itself a biased value; this happens for subtypes
4615      of biased types.  */
4616   if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4617     return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4618 				       fold_convert (TREE_TYPE (etype), expr),
4619 				       convert (TREE_TYPE (etype),
4620 						TYPE_MIN_VALUE (etype))));
4621 
4622   /* If the input is a justified modular type, we need to extract the actual
4623      object before converting it to an other type with the exceptions of an
4624      [unconstrained] array or a mere type variant.  It is useful to avoid
4625      the extraction and conversion in these cases because it could end up
4626      replacing a VAR_DECL by a constructor and we might be about the take
4627      the address of the result.  */
4628   if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4629       && code != ARRAY_TYPE
4630       && code != UNCONSTRAINED_ARRAY_TYPE
4631       && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4632     return
4633       convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4634 
4635   /* If converting to a type that contains a template, convert to the data
4636      type and then build the template. */
4637   if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4638     {
4639       tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4640       vec<constructor_elt, va_gc> *v;
4641       vec_alloc (v, 2);
4642 
4643       /* If the source already has a template, get a reference to the
4644 	 associated array only, as we are going to rebuild a template
4645 	 for the target type anyway.  */
4646       expr = maybe_unconstrained_array (expr);
4647 
4648       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4649 			      build_template (TREE_TYPE (TYPE_FIELDS (type)),
4650 					      obj_type, NULL_TREE));
4651       if (expr)
4652 	CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4653 				convert (obj_type, expr));
4654       return gnat_build_constructor (type, v);
4655     }
4656 
4657   /* There are some cases of expressions that we process specially.  */
4658   switch (TREE_CODE (expr))
4659     {
4660     case ERROR_MARK:
4661       return expr;
4662 
4663     case NULL_EXPR:
4664       /* Just set its type here.  For TRANSFORM_EXPR, we will do the actual
4665 	 conversion in gnat_expand_expr.  NULL_EXPR does not represent
4666 	 and actual value, so no conversion is needed.  */
4667       expr = copy_node (expr);
4668       TREE_TYPE (expr) = type;
4669       return expr;
4670 
4671     case STRING_CST:
4672       /* If we are converting a STRING_CST to another constrained array type,
4673 	 just make a new one in the proper type.  */
4674       if (code == ecode
4675 	  && !(TREE_CONSTANT (TYPE_SIZE (etype))
4676 	       && !TREE_CONSTANT (TYPE_SIZE (type))))
4677 	{
4678 	  expr = copy_node (expr);
4679 	  TREE_TYPE (expr) = type;
4680 	  return expr;
4681 	}
4682       break;
4683 
4684     case VECTOR_CST:
4685       /* If we are converting a VECTOR_CST to a mere type variant, just make
4686 	 a new one in the proper type.  */
4687       if (code == ecode && gnat_types_compatible_p (type, etype))
4688 	{
4689 	  expr = copy_node (expr);
4690 	  TREE_TYPE (expr) = type;
4691 	  return expr;
4692 	}
4693       break;
4694 
4695     case CONSTRUCTOR:
4696       /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4697 	 another padding type around the same type, just make a new one in
4698 	 the proper type.  */
4699       if (code == ecode
4700 	  && (gnat_types_compatible_p (type, etype)
4701 	      || (code == RECORD_TYPE
4702 		  && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4703 		  && TREE_TYPE (TYPE_FIELDS (type))
4704 		     == TREE_TYPE (TYPE_FIELDS (etype)))))
4705 	{
4706 	  expr = copy_node (expr);
4707 	  TREE_TYPE (expr) = type;
4708 	  CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4709 	  return expr;
4710 	}
4711 
4712       /* Likewise for a conversion between original and packable version, or
4713 	 conversion between types of the same size and with the same list of
4714 	 fields, but we have to work harder to preserve type consistency.  */
4715       if (code == ecode
4716 	  && code == RECORD_TYPE
4717 	  && (TYPE_NAME (type) == TYPE_NAME (etype)
4718 	      || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4719 
4720 	{
4721 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4722 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4723 	  vec<constructor_elt, va_gc> *v;
4724 	  vec_alloc (v, len);
4725 	  tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4726 	  unsigned HOST_WIDE_INT idx;
4727 	  tree index, value;
4728 
4729 	  /* Whether we need to clear TREE_CONSTANT et al. on the output
4730 	     constructor when we convert in place.  */
4731 	  bool clear_constant = false;
4732 
4733 	  FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4734 	    {
4735 	      /* Skip the missing fields in the CONSTRUCTOR.  */
4736 	      while (efield && field && !SAME_FIELD_P (efield, index))
4737 	        {
4738 		  efield = DECL_CHAIN (efield);
4739 		  field = DECL_CHAIN (field);
4740 		}
4741 	      /* The field must be the same.  */
4742 	      if (!(efield && field && SAME_FIELD_P (efield, field)))
4743 		break;
4744 	      constructor_elt elt
4745 	        = {field, convert (TREE_TYPE (field), value)};
4746 	      v->quick_push (elt);
4747 
4748 	      /* If packing has made this field a bitfield and the input
4749 		 value couldn't be emitted statically any more, we need to
4750 		 clear TREE_CONSTANT on our output.  */
4751 	      if (!clear_constant
4752 		  && TREE_CONSTANT (expr)
4753 		  && !CONSTRUCTOR_BITFIELD_P (efield)
4754 		  && CONSTRUCTOR_BITFIELD_P (field)
4755 		  && !initializer_constant_valid_for_bitfield_p (value))
4756 		clear_constant = true;
4757 
4758 	      efield = DECL_CHAIN (efield);
4759 	      field = DECL_CHAIN (field);
4760 	    }
4761 
4762 	  /* If we have been able to match and convert all the input fields
4763 	     to their output type, convert in place now.  We'll fallback to a
4764 	     view conversion downstream otherwise.  */
4765 	  if (idx == len)
4766 	    {
4767 	      expr = copy_node (expr);
4768 	      TREE_TYPE (expr) = type;
4769 	      CONSTRUCTOR_ELTS (expr) = v;
4770 	      if (clear_constant)
4771 		TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4772 	      return expr;
4773 	    }
4774 	}
4775 
4776       /* Likewise for a conversion between array type and vector type with a
4777          compatible representative array.  */
4778       else if (code == VECTOR_TYPE
4779 	       && ecode == ARRAY_TYPE
4780 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4781 					   etype))
4782 	{
4783 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4784 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4785 	  vec<constructor_elt, va_gc> *v;
4786 	  unsigned HOST_WIDE_INT ix;
4787 	  tree value;
4788 
4789 	  /* Build a VECTOR_CST from a *constant* array constructor.  */
4790 	  if (TREE_CONSTANT (expr))
4791 	    {
4792 	      bool constant_p = true;
4793 
4794 	      /* Iterate through elements and check if all constructor
4795 		 elements are *_CSTs.  */
4796 	      FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4797 		if (!CONSTANT_CLASS_P (value))
4798 		  {
4799 		    constant_p = false;
4800 		    break;
4801 		  }
4802 
4803 	      if (constant_p)
4804 		return build_vector_from_ctor (type,
4805 					       CONSTRUCTOR_ELTS (expr));
4806 	    }
4807 
4808 	  /* Otherwise, build a regular vector constructor.  */
4809 	  vec_alloc (v, len);
4810 	  FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4811 	    {
4812 	      constructor_elt elt = {NULL_TREE, value};
4813 	      v->quick_push (elt);
4814 	    }
4815 	  expr = copy_node (expr);
4816 	  TREE_TYPE (expr) = type;
4817 	  CONSTRUCTOR_ELTS (expr) = v;
4818 	  return expr;
4819 	}
4820       break;
4821 
4822     case UNCONSTRAINED_ARRAY_REF:
4823       /* First retrieve the underlying array.  */
4824       expr = maybe_unconstrained_array (expr);
4825       etype = TREE_TYPE (expr);
4826       ecode = TREE_CODE (etype);
4827       break;
4828 
4829     case VIEW_CONVERT_EXPR:
4830       {
4831 	/* GCC 4.x is very sensitive to type consistency overall, and view
4832 	   conversions thus are very frequent.  Even though just "convert"ing
4833 	   the inner operand to the output type is fine in most cases, it
4834 	   might expose unexpected input/output type mismatches in special
4835 	   circumstances so we avoid such recursive calls when we can.  */
4836 	tree op0 = TREE_OPERAND (expr, 0);
4837 
4838 	/* If we are converting back to the original type, we can just
4839 	   lift the input conversion.  This is a common occurrence with
4840 	   switches back-and-forth amongst type variants.  */
4841 	if (type == TREE_TYPE (op0))
4842 	  return op0;
4843 
4844 	/* Otherwise, if we're converting between two aggregate or vector
4845 	   types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4846 	   target type in place or to just convert the inner expression.  */
4847 	if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4848 	    || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4849 	  {
4850 	    /* If we are converting between mere variants, we can just
4851 	       substitute the VIEW_CONVERT_EXPR in place.  */
4852 	    if (gnat_types_compatible_p (type, etype))
4853 	      return build1 (VIEW_CONVERT_EXPR, type, op0);
4854 
4855 	    /* Otherwise, we may just bypass the input view conversion unless
4856 	       one of the types is a fat pointer,  which is handled by
4857 	       specialized code below which relies on exact type matching.  */
4858 	    else if (!TYPE_IS_FAT_POINTER_P (type)
4859 		     && !TYPE_IS_FAT_POINTER_P (etype))
4860 	      return convert (type, op0);
4861 	  }
4862 
4863 	break;
4864       }
4865 
4866     default:
4867       break;
4868     }
4869 
4870   /* Check for converting to a pointer to an unconstrained array.  */
4871   if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4872     return convert_to_fat_pointer (type, expr);
4873 
4874   /* If we are converting between two aggregate or vector types that are mere
4875      variants, just make a VIEW_CONVERT_EXPR.  Likewise when we are converting
4876      to a vector type from its representative array type.  */
4877   else if ((code == ecode
4878 	    && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4879 	    && gnat_types_compatible_p (type, etype))
4880 	   || (code == VECTOR_TYPE
4881 	       && ecode == ARRAY_TYPE
4882 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4883 					   etype)))
4884     return build1 (VIEW_CONVERT_EXPR, type, expr);
4885 
4886   /* If we are converting between tagged types, try to upcast properly.
4887      But don't do it if we are just annotating types since tagged types
4888      aren't fully laid out in this mode.  */
4889   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4890 	   && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4891 	   && !type_annotate_only)
4892     {
4893       tree child_etype = etype;
4894       do {
4895 	tree field = TYPE_FIELDS (child_etype);
4896 	if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4897 	  return build_component_ref (expr, field, false);
4898 	child_etype = TREE_TYPE (field);
4899       } while (TREE_CODE (child_etype) == RECORD_TYPE);
4900     }
4901 
4902   /* If we are converting from a smaller form of record type back to it, just
4903      make a VIEW_CONVERT_EXPR.  But first pad the expression to have the same
4904      size on both sides.  */
4905   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4906 	   && smaller_form_type_p (etype, type))
4907     {
4908       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4909 				      false, false, true),
4910 		      expr);
4911       return build1 (VIEW_CONVERT_EXPR, type, expr);
4912     }
4913 
4914   /* In all other cases of related types, make a NOP_EXPR.  */
4915   else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4916     return fold_convert (type, expr);
4917 
4918   switch (code)
4919     {
4920     case VOID_TYPE:
4921       return fold_build1 (CONVERT_EXPR, type, expr);
4922 
4923     case INTEGER_TYPE:
4924       if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4925 	  && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4926 	      || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4927 	return unchecked_convert (type, expr, false);
4928 
4929       /* If the output is a biased type, convert first to the base type and
4930 	 subtract the bias.  Note that the bias itself must go through a full
4931 	 conversion to the base type, lest it is a biased value; this happens
4932 	 for subtypes of biased types.  */
4933       if (TYPE_BIASED_REPRESENTATION_P (type))
4934 	return fold_convert (type,
4935 			     fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4936 					  convert (TREE_TYPE (type), expr),
4937 					  convert (TREE_TYPE (type),
4938 						   TYPE_MIN_VALUE (type))));
4939 
4940       /* If we are converting an additive expression to an integer type
4941 	 with lower precision, be wary of the optimization that can be
4942 	 applied by convert_to_integer.  There are 2 problematic cases:
4943 	   - if the first operand was originally of a biased type,
4944 	     because we could be recursively called to convert it
4945 	     to an intermediate type and thus rematerialize the
4946 	     additive operator endlessly,
4947 	   - if the expression contains a placeholder, because an
4948 	     intermediate conversion that changes the sign could
4949 	     be inserted and thus introduce an artificial overflow
4950 	     at compile time when the placeholder is substituted.  */
4951       if (ecode == INTEGER_TYPE
4952 	  && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4953 	  && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4954 	{
4955 	  tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4956 
4957 	  if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4958 	       && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4959 	      || CONTAINS_PLACEHOLDER_P (expr))
4960 	    return fold_convert (type, expr);
4961 	}
4962 
4963       /* ... fall through ... */
4964 
4965     case ENUMERAL_TYPE:
4966       return fold (convert_to_integer (type, expr));
4967 
4968     case BOOLEAN_TYPE:
4969       /* Do not use convert_to_integer with boolean types.  */
4970       return fold_convert_loc (EXPR_LOCATION (expr), type, expr);
4971 
4972     case POINTER_TYPE:
4973     case REFERENCE_TYPE:
4974       /* If converting between two thin pointers, adjust if needed to account
4975 	 for differing offsets from the base pointer, depending on whether
4976 	 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type.  */
4977       if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4978 	{
4979 	  tree etype_pos
4980 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4981 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4982 	      : size_zero_node;
4983 	  tree type_pos
4984 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4985 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4986 	      : size_zero_node;
4987 	  tree byte_diff = size_diffop (type_pos, etype_pos);
4988 
4989 	  expr = build1 (NOP_EXPR, type, expr);
4990 	  if (integer_zerop (byte_diff))
4991 	    return expr;
4992 
4993 	  return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4994 				  fold_convert (sizetype, byte_diff));
4995 	}
4996 
4997       /* If converting fat pointer to normal or thin pointer, get the pointer
4998 	 to the array and then convert it.  */
4999       if (TYPE_IS_FAT_POINTER_P (etype))
5000 	expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
5001 
5002       return fold (convert_to_pointer (type, expr));
5003 
5004     case REAL_TYPE:
5005       return fold (convert_to_real (type, expr));
5006 
5007     case RECORD_TYPE:
5008       /* Do a normal conversion between scalar and justified modular type.  */
5009       if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5010 	{
5011 	  vec<constructor_elt, va_gc> *v;
5012 	  vec_alloc (v, 1);
5013 
5014 	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5015 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
5016 					   expr));
5017 	  return gnat_build_constructor (type, v);
5018 	}
5019 
5020       /* In these cases, assume the front-end has validated the conversion.
5021 	 If the conversion is valid, it will be a bit-wise conversion, so
5022 	 it can be viewed as an unchecked conversion.  */
5023       return unchecked_convert (type, expr, false);
5024 
5025     case ARRAY_TYPE:
5026       /* Do a normal conversion between unconstrained and constrained array
5027 	 type, assuming the latter is a constrained version of the former.  */
5028       if (TREE_CODE (expr) == INDIRECT_REF
5029 	  && ecode == ARRAY_TYPE
5030 	  && TREE_TYPE (etype) == TREE_TYPE (type))
5031 	{
5032 	  tree ptr_type = build_pointer_type (type);
5033 	  tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
5034 				   fold_convert (ptr_type,
5035 						 TREE_OPERAND (expr, 0)));
5036 	  TREE_READONLY (t) = TREE_READONLY (expr);
5037 	  TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
5038 	  return t;
5039 	}
5040 
5041       /* In these cases, assume the front-end has validated the conversion.
5042 	 If the conversion is valid, it will be a bit-wise conversion, so
5043 	 it can be viewed as an unchecked conversion.  */
5044       return unchecked_convert (type, expr, false);
5045 
5046     case UNION_TYPE:
5047       /* This is a either a conversion between a tagged type and some
5048 	 subtype, which we have to mark as a UNION_TYPE because of
5049 	 overlapping fields or a conversion of an Unchecked_Union.  */
5050       return unchecked_convert (type, expr, false);
5051 
5052     case UNCONSTRAINED_ARRAY_TYPE:
5053       /* If the input is a VECTOR_TYPE, convert to the representative
5054 	 array type first.  */
5055       if (ecode == VECTOR_TYPE)
5056 	{
5057 	  expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5058 	  etype = TREE_TYPE (expr);
5059 	  ecode = TREE_CODE (etype);
5060 	}
5061 
5062       /* If EXPR is a constrained array, take its address, convert it to a
5063 	 fat pointer, and then dereference it.  Likewise if EXPR is a
5064 	 record containing both a template and a constrained array.
5065 	 Note that a record representing a justified modular type
5066 	 always represents a packed constrained array.  */
5067       if (ecode == ARRAY_TYPE
5068 	  || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5069 	  || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5070 	  || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5071 	return
5072 	  build_unary_op
5073 	    (INDIRECT_REF, NULL_TREE,
5074 	     convert_to_fat_pointer (TREE_TYPE (type),
5075 				     build_unary_op (ADDR_EXPR,
5076 						     NULL_TREE, expr)));
5077 
5078       /* Do something very similar for converting one unconstrained
5079 	 array to another.  */
5080       else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5081 	return
5082 	  build_unary_op (INDIRECT_REF, NULL_TREE,
5083 			  convert (TREE_TYPE (type),
5084 				   build_unary_op (ADDR_EXPR,
5085 						   NULL_TREE, expr)));
5086       else
5087 	gcc_unreachable ();
5088 
5089     case COMPLEX_TYPE:
5090       return fold (convert_to_complex (type, expr));
5091 
5092     default:
5093       gcc_unreachable ();
5094     }
5095 }
5096 
5097 /* Create an expression whose value is that of EXPR converted to the common
5098    index type, which is sizetype.  EXPR is supposed to be in the base type
5099    of the GNAT index type.  Calling it is equivalent to doing
5100 
5101      convert (sizetype, expr)
5102 
5103    but we try to distribute the type conversion with the knowledge that EXPR
5104    cannot overflow in its type.  This is a best-effort approach and we fall
5105    back to the above expression as soon as difficulties are encountered.
5106 
5107    This is necessary to overcome issues that arise when the GNAT base index
5108    type and the GCC common index type (sizetype) don't have the same size,
5109    which is quite frequent on 64-bit architectures.  In this case, and if
5110    the GNAT base index type is signed but the iteration type of the loop has
5111    been forced to unsigned, the loop scalar evolution engine cannot compute
5112    a simple evolution for the general induction variables associated with the
5113    array indices, because it will preserve the wrap-around semantics in the
5114    unsigned type of their "inner" part.  As a result, many loop optimizations
5115    are blocked.
5116 
5117    The solution is to use a special (basic) induction variable that is at
5118    least as large as sizetype, and to express the aforementioned general
5119    induction variables in terms of this induction variable, eliminating
5120    the problematic intermediate truncation to the GNAT base index type.
5121    This is possible as long as the original expression doesn't overflow
5122    and if the middle-end hasn't introduced artificial overflows in the
5123    course of the various simplification it can make to the expression.  */
5124 
5125 tree
convert_to_index_type(tree expr)5126 convert_to_index_type (tree expr)
5127 {
5128   enum tree_code code = TREE_CODE (expr);
5129   tree type = TREE_TYPE (expr);
5130 
5131   /* If the type is unsigned, overflow is allowed so we cannot be sure that
5132      EXPR doesn't overflow.  Keep it simple if optimization is disabled.  */
5133   if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
5134     return convert (sizetype, expr);
5135 
5136   switch (code)
5137     {
5138     case VAR_DECL:
5139       /* The main effect of the function: replace a loop parameter with its
5140 	 associated special induction variable.  */
5141       if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5142 	expr = DECL_INDUCTION_VAR (expr);
5143       break;
5144 
5145     CASE_CONVERT:
5146       {
5147 	tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5148 	/* Bail out as soon as we suspect some sort of type frobbing.  */
5149 	if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5150 	    || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5151 	  break;
5152       }
5153 
5154       /* ... fall through ... */
5155 
5156     case NON_LVALUE_EXPR:
5157       return fold_build1 (code, sizetype,
5158 			  convert_to_index_type (TREE_OPERAND (expr, 0)));
5159 
5160     case PLUS_EXPR:
5161     case MINUS_EXPR:
5162     case MULT_EXPR:
5163       return fold_build2 (code, sizetype,
5164 			  convert_to_index_type (TREE_OPERAND (expr, 0)),
5165 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5166 
5167     case COMPOUND_EXPR:
5168       return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5169 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5170 
5171     case COND_EXPR:
5172       return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5173 			  convert_to_index_type (TREE_OPERAND (expr, 1)),
5174 			  convert_to_index_type (TREE_OPERAND (expr, 2)));
5175 
5176     default:
5177       break;
5178     }
5179 
5180   return convert (sizetype, expr);
5181 }
5182 
5183 /* Remove all conversions that are done in EXP.  This includes converting
5184    from a padded type or to a justified modular type.  If TRUE_ADDRESS
5185    is true, always return the address of the containing object even if
5186    the address is not bit-aligned.  */
5187 
5188 tree
remove_conversions(tree exp,bool true_address)5189 remove_conversions (tree exp, bool true_address)
5190 {
5191   switch (TREE_CODE (exp))
5192     {
5193     case CONSTRUCTOR:
5194       if (true_address
5195 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5196 	  && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5197 	return
5198 	  remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5199       break;
5200 
5201     case COMPONENT_REF:
5202       if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5203 	return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5204       break;
5205 
5206     CASE_CONVERT:
5207     case VIEW_CONVERT_EXPR:
5208     case NON_LVALUE_EXPR:
5209       return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5210 
5211     default:
5212       break;
5213     }
5214 
5215   return exp;
5216 }
5217 
5218 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5219    refers to the underlying array.  If it has TYPE_CONTAINS_TEMPLATE_P,
5220    likewise return an expression pointing to the underlying array.  */
5221 
5222 tree
maybe_unconstrained_array(tree exp)5223 maybe_unconstrained_array (tree exp)
5224 {
5225   enum tree_code code = TREE_CODE (exp);
5226   tree type = TREE_TYPE (exp);
5227 
5228   switch (TREE_CODE (type))
5229     {
5230     case UNCONSTRAINED_ARRAY_TYPE:
5231       if (code == UNCONSTRAINED_ARRAY_REF)
5232 	{
5233 	  const bool read_only = TREE_READONLY (exp);
5234 	  const bool no_trap = TREE_THIS_NOTRAP (exp);
5235 
5236 	  exp = TREE_OPERAND (exp, 0);
5237 	  type = TREE_TYPE (exp);
5238 
5239 	  if (TREE_CODE (exp) == COND_EXPR)
5240 	    {
5241 	      tree op1
5242 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5243 				  build_component_ref (TREE_OPERAND (exp, 1),
5244 						       TYPE_FIELDS (type),
5245 						       false));
5246 	      tree op2
5247 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5248 				  build_component_ref (TREE_OPERAND (exp, 2),
5249 						       TYPE_FIELDS (type),
5250 						       false));
5251 
5252 	      exp = build3 (COND_EXPR,
5253 			    TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5254 			    TREE_OPERAND (exp, 0), op1, op2);
5255 	    }
5256 	  else
5257 	    {
5258 	      exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5259 				    build_component_ref (exp,
5260 							 TYPE_FIELDS (type),
5261 						         false));
5262 	      TREE_READONLY (exp) = read_only;
5263 	      TREE_THIS_NOTRAP (exp) = no_trap;
5264 	    }
5265 	}
5266 
5267       else if (code == NULL_EXPR)
5268 	exp = build1 (NULL_EXPR,
5269 		      TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5270 		      TREE_OPERAND (exp, 0));
5271       break;
5272 
5273     case RECORD_TYPE:
5274       /* If this is a padded type and it contains a template, convert to the
5275 	 unpadded type first.  */
5276       if (TYPE_PADDING_P (type)
5277 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5278 	  && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5279 	{
5280 	  exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5281 	  code = TREE_CODE (exp);
5282 	  type = TREE_TYPE (exp);
5283 	}
5284 
5285       if (TYPE_CONTAINS_TEMPLATE_P (type))
5286 	{
5287 	  /* If the array initializer is a box, return NULL_TREE.  */
5288 	  if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5289 	    return NULL_TREE;
5290 
5291 	  exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5292 				     false);
5293 
5294 	  /* If the array is padded, remove the padding.  */
5295 	  exp = maybe_padded_object (exp);
5296 	}
5297       break;
5298 
5299     default:
5300       break;
5301     }
5302 
5303   return exp;
5304 }
5305 
5306 /* Return true if EXPR is an expression that can be folded as an operand
5307    of a VIEW_CONVERT_EXPR.  See ada-tree.h for a complete rationale.  */
5308 
5309 static bool
can_fold_for_view_convert_p(tree expr)5310 can_fold_for_view_convert_p (tree expr)
5311 {
5312   tree t1, t2;
5313 
5314   /* The folder will fold NOP_EXPRs between integral types with the same
5315      precision (in the middle-end's sense).  We cannot allow it if the
5316      types don't have the same precision in the Ada sense as well.  */
5317   if (TREE_CODE (expr) != NOP_EXPR)
5318     return true;
5319 
5320   t1 = TREE_TYPE (expr);
5321   t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5322 
5323   /* Defer to the folder for non-integral conversions.  */
5324   if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5325     return true;
5326 
5327   /* Only fold conversions that preserve both precisions.  */
5328   if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5329       && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5330     return true;
5331 
5332   return false;
5333 }
5334 
5335 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5336    If NOTRUNC_P is true, truncation operations should be suppressed.
5337 
5338    Special care is required with (source or target) integral types whose
5339    precision is not equal to their size, to make sure we fetch or assign
5340    the value bits whose location might depend on the endianness, e.g.
5341 
5342      Rmsize : constant := 8;
5343      subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5344 
5345      type Bit_Array is array (1 .. Rmsize) of Boolean;
5346      pragma Pack (Bit_Array);
5347 
5348      function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5349 
5350      Value : Int := 2#1000_0001#;
5351      Vbits : Bit_Array := To_Bit_Array (Value);
5352 
5353    we expect the 8 bits at Vbits'Address to always contain Value, while
5354    their original location depends on the endianness, at Value'Address
5355    on a little-endian architecture but not on a big-endian one.
5356 
5357    One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5358    the bits between the precision and the size are filled, because of the
5359    trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5360    So we use the special predicate type_unsigned_for_rm above.  */
5361 
5362 tree
unchecked_convert(tree type,tree expr,bool notrunc_p)5363 unchecked_convert (tree type, tree expr, bool notrunc_p)
5364 {
5365   tree etype = TREE_TYPE (expr);
5366   enum tree_code ecode = TREE_CODE (etype);
5367   enum tree_code code = TREE_CODE (type);
5368   const bool ebiased
5369     = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5370   const bool biased
5371     = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5372   const bool ereverse
5373     = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5374   const bool reverse
5375     = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5376   tree tem;
5377   int c = 0;
5378 
5379   /* If the expression is already of the right type, we are done.  */
5380   if (etype == type)
5381     return expr;
5382 
5383   /* If both types are integral or regular pointer, then just do a normal
5384      conversion.  Likewise for a conversion to an unconstrained array.  */
5385   if (((INTEGRAL_TYPE_P (type)
5386 	|| (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5387 	|| (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5388        && (INTEGRAL_TYPE_P (etype)
5389 	   || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5390 	   || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5391       || code == UNCONSTRAINED_ARRAY_TYPE)
5392     {
5393       if (ebiased)
5394 	{
5395 	  tree ntype = copy_type (etype);
5396 	  TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5397 	  TYPE_MAIN_VARIANT (ntype) = ntype;
5398 	  expr = build1 (NOP_EXPR, ntype, expr);
5399 	}
5400 
5401       if (biased)
5402 	{
5403 	  tree rtype = copy_type (type);
5404 	  TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5405 	  TYPE_MAIN_VARIANT (rtype) = rtype;
5406 	  expr = convert (rtype, expr);
5407 	  expr = build1 (NOP_EXPR, type, expr);
5408 	}
5409       else
5410 	expr = convert (type, expr);
5411     }
5412 
5413   /* If we are converting to an integral type whose precision is not equal
5414      to its size, first unchecked convert to a record type that contains a
5415      field of the given precision.  Then extract the result from the field.
5416 
5417      There is a subtlety if the source type is an aggregate type with reverse
5418      storage order because its representation is not contiguous in the native
5419      storage order, i.e. a direct unchecked conversion to an integral type
5420      with N bits of precision cannot read the first N bits of the aggregate
5421      type.  To overcome it, we do an unchecked conversion to an integral type
5422      with reverse storage order and return the resulting value.  This also
5423      ensures that the result of the unchecked conversion doesn't depend on
5424      the endianness of the target machine, but only on the storage order of
5425      the aggregate type.
5426 
5427      Finally, for the sake of consistency, we do the unchecked conversion
5428      to an integral type with reverse storage order as soon as the source
5429      type is an aggregate type with reverse storage order, even if there
5430      are no considerations of precision or size involved.  Ultimately, we
5431      further extend this processing to any scalar type.  */
5432   else if ((INTEGRAL_TYPE_P (type)
5433 	    && TYPE_RM_SIZE (type)
5434 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5435 					   TYPE_SIZE (type))) < 0
5436 		|| ereverse))
5437 	   || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5438     {
5439       tree rec_type = make_node (RECORD_TYPE);
5440       tree field_type, field;
5441 
5442       TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5443 
5444       if (c < 0)
5445 	{
5446 	  const unsigned HOST_WIDE_INT prec
5447 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5448 	  if (type_unsigned_for_rm (type))
5449 	    field_type = make_unsigned_type (prec);
5450 	  else
5451 	    field_type = make_signed_type (prec);
5452 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5453 	}
5454       else
5455 	field_type = type;
5456 
5457       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5458 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5459 
5460       finish_record_type (rec_type, field, 1, false);
5461 
5462       expr = unchecked_convert (rec_type, expr, notrunc_p);
5463       expr = build_component_ref (expr, field, false);
5464       expr = fold_build1 (NOP_EXPR, type, expr);
5465     }
5466 
5467   /* Similarly if we are converting from an integral type whose precision is
5468      not equal to its size, first copy into a field of the given precision
5469      and unchecked convert the record type.
5470 
5471      The same considerations as above apply if the target type is an aggregate
5472      type with reverse storage order and we also proceed similarly.  */
5473   else if ((INTEGRAL_TYPE_P (etype)
5474 	    && TYPE_RM_SIZE (etype)
5475 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5476 					   TYPE_SIZE (etype))) < 0
5477 		|| reverse))
5478 	   || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5479     {
5480       tree rec_type = make_node (RECORD_TYPE);
5481       vec<constructor_elt, va_gc> *v;
5482       vec_alloc (v, 1);
5483       tree field_type, field;
5484 
5485       TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5486 
5487       if (c < 0)
5488 	{
5489 	  const unsigned HOST_WIDE_INT prec
5490 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5491 	  if (type_unsigned_for_rm (etype))
5492 	    field_type = make_unsigned_type (prec);
5493 	  else
5494 	    field_type = make_signed_type (prec);
5495 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5496 	}
5497       else
5498 	field_type = etype;
5499 
5500       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5501 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5502 
5503       finish_record_type (rec_type, field, 1, false);
5504 
5505       expr = fold_build1 (NOP_EXPR, field_type, expr);
5506       CONSTRUCTOR_APPEND_ELT (v, field, expr);
5507       expr = gnat_build_constructor (rec_type, v);
5508       expr = unchecked_convert (type, expr, notrunc_p);
5509     }
5510 
5511   /* If we are converting from a scalar type to a type with a different size,
5512      we need to pad to have the same size on both sides.
5513 
5514      ??? We cannot do it unconditionally because unchecked conversions are
5515      used liberally by the front-end to implement interface thunks:
5516 
5517        type ada__tags__addr_ptr is access system.address;
5518        S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5519        return p___size__4 (p__object!(S191s.all));
5520 
5521      so we need to skip dereferences.  */
5522   else if (!INDIRECT_REF_P (expr)
5523 	   && !AGGREGATE_TYPE_P (etype)
5524 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5525 	   && TREE_CONSTANT (TYPE_SIZE (type))
5526 	   && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5527     {
5528       if (c < 0)
5529 	{
5530 	  expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5531 					  false, false, true),
5532 			  expr);
5533 	  expr = unchecked_convert (type, expr, notrunc_p);
5534 	}
5535       else
5536 	{
5537 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5538 					  false, false, true);
5539 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5540 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5541 	}
5542     }
5543 
5544   /* Likewise if we are converting from a scalar type to a type with self-
5545      referential size.  We use the max size to do the padding in this case.  */
5546   else if (!INDIRECT_REF_P (expr)
5547 	   && !AGGREGATE_TYPE_P (etype)
5548 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5549 	   && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
5550     {
5551       tree new_size = max_size (TYPE_SIZE (type), true);
5552       c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
5553       if (c < 0)
5554 	{
5555 	  expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
5556 					  false, false, true),
5557 			  expr);
5558 	  expr = unchecked_convert (type, expr, notrunc_p);
5559 	}
5560       else
5561 	{
5562 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5563 					  false, false, true);
5564 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5565 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5566 	}
5567     }
5568 
5569   /* We have a special case when we are converting between two unconstrained
5570      array types.  In that case, take the address, convert the fat pointer
5571      types, and dereference.  */
5572   else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5573     expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5574 			   build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5575 				   build_unary_op (ADDR_EXPR, NULL_TREE,
5576 						   expr)));
5577 
5578   /* Another special case is when we are converting to a vector type from its
5579      representative array type; this a regular conversion.  */
5580   else if (code == VECTOR_TYPE
5581 	   && ecode == ARRAY_TYPE
5582 	   && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5583 				       etype))
5584     expr = convert (type, expr);
5585 
5586   /* And, if the array type is not the representative, we try to build an
5587      intermediate vector type of which the array type is the representative
5588      and to do the unchecked conversion between the vector types, in order
5589      to enable further simplifications in the middle-end.  */
5590   else if (code == VECTOR_TYPE
5591 	   && ecode == ARRAY_TYPE
5592 	   && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5593     {
5594       expr = convert (tem, expr);
5595       return unchecked_convert (type, expr, notrunc_p);
5596     }
5597 
5598   /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5599      the alignment of the CONSTRUCTOR to speed up the copy operation.  But do
5600      not do it for a conversion between original and packable version to avoid
5601      an infinite recursion.  */
5602   else if (TREE_CODE (expr) == CONSTRUCTOR
5603 	   && AGGREGATE_TYPE_P (type)
5604 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5605 	   && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5606     {
5607       expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5608 				      Empty, false, false, true),
5609 		      expr);
5610       return unchecked_convert (type, expr, notrunc_p);
5611     }
5612 
5613   /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5614      size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5615      But do not do it for a conversion between original and packable version
5616      to avoid an infinite recursion.  */
5617   else if (TREE_CODE (expr) == CONSTRUCTOR
5618 	   && AGGREGATE_TYPE_P (type)
5619 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5620 	   && TREE_CONSTANT (TYPE_SIZE (type))
5621 	   && (!TREE_CONSTANT (TYPE_SIZE (etype))
5622 	       || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5623     {
5624       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5625 				      Empty, false, false, true),
5626 		      expr);
5627       return unchecked_convert (type, expr, notrunc_p);
5628     }
5629 
5630   /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression.  */
5631   else
5632     {
5633       expr = maybe_unconstrained_array (expr);
5634       etype = TREE_TYPE (expr);
5635       ecode = TREE_CODE (etype);
5636       if (can_fold_for_view_convert_p (expr))
5637 	expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5638       else
5639 	expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5640     }
5641 
5642   /* If the result is a non-biased integral type whose precision is not equal
5643      to its size, sign- or zero-extend the result.  But we need not do this
5644      if the input is also an integral type and both are unsigned or both are
5645      signed and have the same precision.  */
5646   tree type_rm_size;
5647   if (!notrunc_p
5648       && !biased
5649       && INTEGRAL_TYPE_P (type)
5650       && (type_rm_size = TYPE_RM_SIZE (type))
5651       && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5652       && !(INTEGRAL_TYPE_P (etype)
5653 	   && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5654 	   && (type_unsigned_for_rm (type)
5655 	       || tree_int_cst_compare (type_rm_size,
5656 					TYPE_RM_SIZE (etype)
5657 					? TYPE_RM_SIZE (etype)
5658 					: TYPE_SIZE (etype)) == 0)))
5659     {
5660       if (integer_zerop (type_rm_size))
5661 	expr = build_int_cst (type, 0);
5662       else
5663 	{
5664 	  tree base_type
5665 	    = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5666 				  type_unsigned_for_rm (type));
5667 	  tree shift_expr
5668 	    = convert (base_type,
5669 		       size_binop (MINUS_EXPR,
5670 				   TYPE_SIZE (type), type_rm_size));
5671 	  expr
5672 	    = convert (type,
5673 		       build_binary_op (RSHIFT_EXPR, base_type,
5674 				        build_binary_op (LSHIFT_EXPR, base_type,
5675 							 convert (base_type,
5676 								  expr),
5677 							 shift_expr),
5678 				        shift_expr));
5679 	}
5680     }
5681 
5682   /* An unchecked conversion should never raise Constraint_Error.  The code
5683      below assumes that GCC's conversion routines overflow the same way that
5684      the underlying hardware does.  This is probably true.  In the rare case
5685      when it is false, we can rely on the fact that such conversions are
5686      erroneous anyway.  */
5687   if (TREE_CODE (expr) == INTEGER_CST)
5688     TREE_OVERFLOW (expr) = 0;
5689 
5690   /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5691      show no longer constant.  */
5692   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5693       && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5694 			   OEP_ONLY_CONST))
5695     TREE_CONSTANT (expr) = 0;
5696 
5697   return expr;
5698 }
5699 
5700 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5701    the latter being a record type as predicated by Is_Record_Type.  */
5702 
5703 enum tree_code
tree_code_for_record_type(Entity_Id gnat_type)5704 tree_code_for_record_type (Entity_Id gnat_type)
5705 {
5706   Node_Id component_list, component;
5707 
5708   /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5709      fields are all in the variant part.  Otherwise, return RECORD_TYPE.  */
5710   if (!Is_Unchecked_Union (gnat_type))
5711     return RECORD_TYPE;
5712 
5713   gnat_type = Implementation_Base_Type (gnat_type);
5714   component_list
5715     = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5716 
5717   for (component = First_Non_Pragma (Component_Items (component_list));
5718        Present (component);
5719        component = Next_Non_Pragma (component))
5720     if (Ekind (Defining_Entity (component)) == E_Component)
5721       return RECORD_TYPE;
5722 
5723   return UNION_TYPE;
5724 }
5725 
5726 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5727    size is equal to 64 bits, or an array of such a type.  Set ALIGN_CLAUSE
5728    according to the presence of an alignment clause on the type or, if it
5729    is an array, on the component type.  */
5730 
5731 bool
is_double_float_or_array(Entity_Id gnat_type,bool * align_clause)5732 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5733 {
5734   gnat_type = Underlying_Type (gnat_type);
5735 
5736   *align_clause = Present (Alignment_Clause (gnat_type));
5737 
5738   if (Is_Array_Type (gnat_type))
5739     {
5740       gnat_type = Underlying_Type (Component_Type (gnat_type));
5741       if (Present (Alignment_Clause (gnat_type)))
5742 	*align_clause = true;
5743     }
5744 
5745   if (!Is_Floating_Point_Type (gnat_type))
5746     return false;
5747 
5748   if (UI_To_Int (Esize (gnat_type)) != 64)
5749     return false;
5750 
5751   return true;
5752 }
5753 
5754 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5755    size is greater or equal to 64 bits, or an array of such a type.  Set
5756    ALIGN_CLAUSE according to the presence of an alignment clause on the
5757    type or, if it is an array, on the component type.  */
5758 
5759 bool
is_double_scalar_or_array(Entity_Id gnat_type,bool * align_clause)5760 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5761 {
5762   gnat_type = Underlying_Type (gnat_type);
5763 
5764   *align_clause = Present (Alignment_Clause (gnat_type));
5765 
5766   if (Is_Array_Type (gnat_type))
5767     {
5768       gnat_type = Underlying_Type (Component_Type (gnat_type));
5769       if (Present (Alignment_Clause (gnat_type)))
5770 	*align_clause = true;
5771     }
5772 
5773   if (!Is_Scalar_Type (gnat_type))
5774     return false;
5775 
5776   if (UI_To_Int (Esize (gnat_type)) < 64)
5777     return false;
5778 
5779   return true;
5780 }
5781 
5782 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5783    component of an aggregate type.  */
5784 
5785 bool
type_for_nonaliased_component_p(tree gnu_type)5786 type_for_nonaliased_component_p (tree gnu_type)
5787 {
5788   /* If the type is passed by reference, we may have pointers to the
5789      component so it cannot be made non-aliased. */
5790   if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5791     return false;
5792 
5793   /* We used to say that any component of aggregate type is aliased
5794      because the front-end may take 'Reference of it.  The front-end
5795      has been enhanced in the meantime so as to use a renaming instead
5796      in most cases, but the back-end can probably take the address of
5797      such a component too so we go for the conservative stance.
5798 
5799      For instance, we might need the address of any array type, even
5800      if normally passed by copy, to construct a fat pointer if the
5801      component is used as an actual for an unconstrained formal.
5802 
5803      Likewise for record types: even if a specific record subtype is
5804      passed by copy, the parent type might be passed by ref (e.g. if
5805      it's of variable size) and we might take the address of a child
5806      component to pass to a parent formal.  We have no way to check
5807      for such conditions here.  */
5808   if (AGGREGATE_TYPE_P (gnu_type))
5809     return false;
5810 
5811   return true;
5812 }
5813 
5814 /* Return true if TYPE is a smaller form of ORIG_TYPE.  */
5815 
5816 bool
smaller_form_type_p(tree type,tree orig_type)5817 smaller_form_type_p (tree type, tree orig_type)
5818 {
5819   tree size, osize;
5820 
5821   /* We're not interested in variants here.  */
5822   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5823     return false;
5824 
5825   /* Like a variant, a packable version keeps the original TYPE_NAME.  */
5826   if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5827     return false;
5828 
5829   size = TYPE_SIZE (type);
5830   osize = TYPE_SIZE (orig_type);
5831 
5832   if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5833     return false;
5834 
5835   return tree_int_cst_lt (size, osize) != 0;
5836 }
5837 
5838 /* Return whether EXPR, which is the renamed object in an object renaming
5839    declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5840    This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration.  */
5841 
5842 bool
can_materialize_object_renaming_p(Node_Id expr)5843 can_materialize_object_renaming_p (Node_Id expr)
5844 {
5845   while (true)
5846     {
5847       expr = Original_Node (expr);
5848 
5849       switch (Nkind (expr))
5850 	{
5851 	case N_Identifier:
5852 	case N_Expanded_Name:
5853 	  if (!Present (Renamed_Object (Entity (expr))))
5854 	    return true;
5855 	  expr = Renamed_Object (Entity (expr));
5856 	  break;
5857 
5858 	case N_Selected_Component:
5859 	  {
5860 	    if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5861 	      return false;
5862 
5863 	    const Uint bitpos
5864 	      = Normalized_First_Bit (Entity (Selector_Name (expr)));
5865 	    if (bitpos != UI_No_Uint && bitpos != Uint_0)
5866 	      return false;
5867 
5868 	    expr = Prefix (expr);
5869 	    break;
5870 	  }
5871 
5872 	case N_Indexed_Component:
5873 	case N_Slice:
5874 	  {
5875 	    const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5876 
5877 	    if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5878 	      return false;
5879 
5880 	    expr = Prefix (expr);
5881 	    break;
5882 	  }
5883 
5884 	case N_Explicit_Dereference:
5885 	  expr = Prefix (expr);
5886 	  break;
5887 
5888 	default:
5889 	  return true;
5890 	};
5891     }
5892 }
5893 
5894 /* Perform final processing on global declarations.  */
5895 
5896 static GTY (()) tree dummy_global;
5897 
5898 void
gnat_write_global_declarations(void)5899 gnat_write_global_declarations (void)
5900 {
5901   unsigned int i;
5902   tree iter;
5903 
5904   /* If we have declared types as used at the global level, insert them in
5905      the global hash table.  We use a dummy variable for this purpose, but
5906      we need to build it unconditionally to avoid -fcompare-debug issues.  */
5907   if (first_global_object_name)
5908     {
5909       struct varpool_node *node;
5910       char *label;
5911 
5912       ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, ULONG_MAX);
5913       dummy_global
5914 	= build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5915 		      void_type_node);
5916       DECL_HARD_REGISTER (dummy_global) = 1;
5917       TREE_STATIC (dummy_global) = 1;
5918       node = varpool_node::get_create (dummy_global);
5919       node->definition = 1;
5920       node->force_output = 1;
5921 
5922       if (types_used_by_cur_var_decl)
5923 	while (!types_used_by_cur_var_decl->is_empty ())
5924 	  {
5925 	    tree t = types_used_by_cur_var_decl->pop ();
5926 	    types_used_by_var_decl_insert (t, dummy_global);
5927 	  }
5928     }
5929 
5930   /* First output the integral global variables, so that they can be referenced
5931      as bounds by the global dynamic types.  Skip external variables, unless we
5932      really need to emit debug info for them:, e.g. imported variables.  */
5933   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5934     if (TREE_CODE (iter) == VAR_DECL
5935 	&& INTEGRAL_TYPE_P (TREE_TYPE (iter))
5936 	&& (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5937       rest_of_decl_compilation (iter, true, 0);
5938 
5939   /* Now output debug information for the global type declarations.  This
5940      ensures that global types whose compilation hasn't been finalized yet,
5941      for example pointers to Taft amendment types, have their compilation
5942      finalized in the right context.  */
5943   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5944     if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5945       debug_hooks->type_decl (iter, false);
5946 
5947   /* Then output the other global variables.  We need to do that after the
5948      information for global types is emitted so that they are finalized.  */
5949   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5950     if (TREE_CODE (iter) == VAR_DECL
5951 	&& !INTEGRAL_TYPE_P (TREE_TYPE (iter))
5952 	&& (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5953       rest_of_decl_compilation (iter, true, 0);
5954 
5955   /* Output debug information for the global constants.  */
5956   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5957     if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
5958       debug_hooks->early_global_decl (iter);
5959 
5960   /* Output it for the imported functions.  */
5961   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5962     if (TREE_CODE (iter) == FUNCTION_DECL
5963 	&& DECL_EXTERNAL (iter)
5964 	&& DECL_INITIAL (iter) == NULL
5965 	&& !DECL_IGNORED_P (iter)
5966 	&& DECL_FUNCTION_IS_DEF (iter))
5967       debug_hooks->early_global_decl (iter);
5968 
5969   /* Output it for the imported modules/declarations.  In GNAT, these are only
5970      materializing subprogram.  */
5971   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5972    if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5973      debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5974 					   DECL_CONTEXT (iter), false, false);
5975 }
5976 
5977 /* ************************************************************************
5978  * *                           GCC builtins support                       *
5979  * ************************************************************************ */
5980 
5981 /* The general scheme is fairly simple:
5982 
5983    For each builtin function/type to be declared, gnat_install_builtins calls
5984    internal facilities which eventually get to gnat_pushdecl, which in turn
5985    tracks the so declared builtin function decls in the 'builtin_decls' global
5986    datastructure. When an Intrinsic subprogram declaration is processed, we
5987    search this global datastructure to retrieve the associated BUILT_IN DECL
5988    node.  */
5989 
5990 /* Search the chain of currently available builtin declarations for a node
5991    corresponding to function NAME (an IDENTIFIER_NODE).  Return the first node
5992    found, if any, or NULL_TREE otherwise.  */
5993 tree
builtin_decl_for(tree name)5994 builtin_decl_for (tree name)
5995 {
5996   unsigned i;
5997   tree decl;
5998 
5999   FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
6000     if (DECL_NAME (decl) == name)
6001       return decl;
6002 
6003   return NULL_TREE;
6004 }
6005 
6006 /* The code below eventually exposes gnat_install_builtins, which declares
6007    the builtin types and functions we might need, either internally or as
6008    user accessible facilities.
6009 
6010    ??? This is a first implementation shot, still in rough shape.  It is
6011    heavily inspired from the "C" family implementation, with chunks copied
6012    verbatim from there.
6013 
6014    Two obvious improvement candidates are:
6015    o Use a more efficient name/decl mapping scheme
6016    o Devise a middle-end infrastructure to avoid having to copy
6017      pieces between front-ends.  */
6018 
6019 /* ----------------------------------------------------------------------- *
6020  *                         BUILTIN ELEMENTARY TYPES                        *
6021  * ----------------------------------------------------------------------- */
6022 
6023 /* Standard data types to be used in builtin argument declarations.  */
6024 
6025 enum c_tree_index
6026 {
6027     CTI_SIGNED_SIZE_TYPE, /* For format checking only.  */
6028     CTI_STRING_TYPE,
6029     CTI_CONST_STRING_TYPE,
6030 
6031     CTI_MAX
6032 };
6033 
6034 static tree c_global_trees[CTI_MAX];
6035 
6036 #define signed_size_type_node	c_global_trees[CTI_SIGNED_SIZE_TYPE]
6037 #define string_type_node	c_global_trees[CTI_STRING_TYPE]
6038 #define const_string_type_node	c_global_trees[CTI_CONST_STRING_TYPE]
6039 
6040 /* ??? In addition some attribute handlers, we currently don't support a
6041    (small) number of builtin-types, which in turns inhibits support for a
6042    number of builtin functions.  */
6043 #define wint_type_node    void_type_node
6044 #define intmax_type_node  void_type_node
6045 #define uintmax_type_node void_type_node
6046 
6047 /* Used to help initialize the builtin-types.def table.  When a type of
6048    the correct size doesn't exist, use error_mark_node instead of NULL.
6049    The later results in segfaults even when a decl using the type doesn't
6050    get invoked.  */
6051 
6052 static tree
builtin_type_for_size(int size,bool unsignedp)6053 builtin_type_for_size (int size, bool unsignedp)
6054 {
6055   tree type = gnat_type_for_size (size, unsignedp);
6056   return type ? type : error_mark_node;
6057 }
6058 
6059 /* Build/push the elementary type decls that builtin functions/types
6060    will need.  */
6061 
6062 static void
install_builtin_elementary_types(void)6063 install_builtin_elementary_types (void)
6064 {
6065   signed_size_type_node = gnat_signed_type_for (size_type_node);
6066   pid_type_node = integer_type_node;
6067 
6068   string_type_node = build_pointer_type (char_type_node);
6069   const_string_type_node
6070     = build_pointer_type (build_qualified_type
6071 			  (char_type_node, TYPE_QUAL_CONST));
6072 }
6073 
6074 /* ----------------------------------------------------------------------- *
6075  *                          BUILTIN FUNCTION TYPES                         *
6076  * ----------------------------------------------------------------------- */
6077 
6078 /* Now, builtin function types per se.  */
6079 
6080 enum c_builtin_type
6081 {
6082 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
6083 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
6084 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
6085 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
6086 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6087 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6088 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
6089 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6090 			    ARG6) NAME,
6091 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6092 			    ARG6, ARG7) NAME,
6093 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6094 			    ARG6, ARG7, ARG8) NAME,
6095 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6096 			    ARG6, ARG7, ARG8, ARG9) NAME,
6097 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6098 			     ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
6099 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6100 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
6101 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
6102 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
6103 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
6104 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6105 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6106 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6107 				NAME,
6108 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6109 				ARG6) NAME,
6110 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6111 				ARG6, ARG7) NAME,
6112 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
6113 #include "builtin-types.def"
6114 #include "ada-builtin-types.def"
6115 #undef DEF_PRIMITIVE_TYPE
6116 #undef DEF_FUNCTION_TYPE_0
6117 #undef DEF_FUNCTION_TYPE_1
6118 #undef DEF_FUNCTION_TYPE_2
6119 #undef DEF_FUNCTION_TYPE_3
6120 #undef DEF_FUNCTION_TYPE_4
6121 #undef DEF_FUNCTION_TYPE_5
6122 #undef DEF_FUNCTION_TYPE_6
6123 #undef DEF_FUNCTION_TYPE_7
6124 #undef DEF_FUNCTION_TYPE_8
6125 #undef DEF_FUNCTION_TYPE_9
6126 #undef DEF_FUNCTION_TYPE_10
6127 #undef DEF_FUNCTION_TYPE_11
6128 #undef DEF_FUNCTION_TYPE_VAR_0
6129 #undef DEF_FUNCTION_TYPE_VAR_1
6130 #undef DEF_FUNCTION_TYPE_VAR_2
6131 #undef DEF_FUNCTION_TYPE_VAR_3
6132 #undef DEF_FUNCTION_TYPE_VAR_4
6133 #undef DEF_FUNCTION_TYPE_VAR_5
6134 #undef DEF_FUNCTION_TYPE_VAR_6
6135 #undef DEF_FUNCTION_TYPE_VAR_7
6136 #undef DEF_POINTER_TYPE
6137   BT_LAST
6138 };
6139 
6140 typedef enum c_builtin_type builtin_type;
6141 
6142 /* A temporary array used in communication with def_fn_type.  */
6143 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
6144 
6145 /* A helper function for install_builtin_types.  Build function type
6146    for DEF with return type RET and N arguments.  If VAR is true, then the
6147    function should be variadic after those N arguments.
6148 
6149    Takes special care not to ICE if any of the types involved are
6150    error_mark_node, which indicates that said type is not in fact available
6151    (see builtin_type_for_size).  In which case the function type as a whole
6152    should be error_mark_node.  */
6153 
6154 static void
def_fn_type(builtin_type def,builtin_type ret,bool var,int n,...)6155 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
6156 {
6157   tree t;
6158   tree *args = XALLOCAVEC (tree, n);
6159   va_list list;
6160   int i;
6161 
6162   va_start (list, n);
6163   for (i = 0; i < n; ++i)
6164     {
6165       builtin_type a = (builtin_type) va_arg (list, int);
6166       t = builtin_types[a];
6167       if (t == error_mark_node)
6168 	goto egress;
6169       args[i] = t;
6170     }
6171 
6172   t = builtin_types[ret];
6173   if (t == error_mark_node)
6174     goto egress;
6175   if (var)
6176     t = build_varargs_function_type_array (t, n, args);
6177   else
6178     t = build_function_type_array (t, n, args);
6179 
6180  egress:
6181   builtin_types[def] = t;
6182   va_end (list);
6183 }
6184 
6185 /* Build the builtin function types and install them in the builtin_types
6186    array for later use in builtin function decls.  */
6187 
6188 static void
install_builtin_function_types(void)6189 install_builtin_function_types (void)
6190 {
6191   tree va_list_ref_type_node;
6192   tree va_list_arg_type_node;
6193 
6194   if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6195     {
6196       va_list_arg_type_node = va_list_ref_type_node =
6197 	build_pointer_type (TREE_TYPE (va_list_type_node));
6198     }
6199   else
6200     {
6201       va_list_arg_type_node = va_list_type_node;
6202       va_list_ref_type_node = build_reference_type (va_list_type_node);
6203     }
6204 
6205 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6206   builtin_types[ENUM] = VALUE;
6207 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6208   def_fn_type (ENUM, RETURN, 0, 0);
6209 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6210   def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6211 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6212   def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6213 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6214   def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6215 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6216   def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6217 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5)	\
6218   def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6219 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6220 			    ARG6)					\
6221   def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6222 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6223 			    ARG6, ARG7)					\
6224   def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6225 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6226 			    ARG6, ARG7, ARG8)				\
6227   def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6228 	       ARG7, ARG8);
6229 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6230 			    ARG6, ARG7, ARG8, ARG9)			\
6231   def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6232 	       ARG7, ARG8, ARG9);
6233 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6234 			     ARG6, ARG7, ARG8, ARG9, ARG10)		\
6235   def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6236 	       ARG7, ARG8, ARG9, ARG10);
6237 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6238 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11)	\
6239   def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6240 	       ARG7, ARG8, ARG9, ARG10, ARG11);
6241 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6242   def_fn_type (ENUM, RETURN, 1, 0);
6243 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6244   def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6245 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6246   def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6247 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6248   def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6249 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6250   def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6251 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6252   def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6253 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6254 				ARG6)				\
6255   def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6256 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6257 				ARG6, ARG7)				\
6258   def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6259 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6260   builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6261 
6262 #include "builtin-types.def"
6263 #include "ada-builtin-types.def"
6264 
6265 #undef DEF_PRIMITIVE_TYPE
6266 #undef DEF_FUNCTION_TYPE_0
6267 #undef DEF_FUNCTION_TYPE_1
6268 #undef DEF_FUNCTION_TYPE_2
6269 #undef DEF_FUNCTION_TYPE_3
6270 #undef DEF_FUNCTION_TYPE_4
6271 #undef DEF_FUNCTION_TYPE_5
6272 #undef DEF_FUNCTION_TYPE_6
6273 #undef DEF_FUNCTION_TYPE_7
6274 #undef DEF_FUNCTION_TYPE_8
6275 #undef DEF_FUNCTION_TYPE_9
6276 #undef DEF_FUNCTION_TYPE_10
6277 #undef DEF_FUNCTION_TYPE_11
6278 #undef DEF_FUNCTION_TYPE_VAR_0
6279 #undef DEF_FUNCTION_TYPE_VAR_1
6280 #undef DEF_FUNCTION_TYPE_VAR_2
6281 #undef DEF_FUNCTION_TYPE_VAR_3
6282 #undef DEF_FUNCTION_TYPE_VAR_4
6283 #undef DEF_FUNCTION_TYPE_VAR_5
6284 #undef DEF_FUNCTION_TYPE_VAR_6
6285 #undef DEF_FUNCTION_TYPE_VAR_7
6286 #undef DEF_POINTER_TYPE
6287   builtin_types[(int) BT_LAST] = NULL_TREE;
6288 }
6289 
6290 /* ----------------------------------------------------------------------- *
6291  *                            BUILTIN ATTRIBUTES                           *
6292  * ----------------------------------------------------------------------- */
6293 
6294 enum built_in_attribute
6295 {
6296 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6297 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6298 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6299 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6300 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6301 #include "builtin-attrs.def"
6302 #undef DEF_ATTR_NULL_TREE
6303 #undef DEF_ATTR_INT
6304 #undef DEF_ATTR_STRING
6305 #undef DEF_ATTR_IDENT
6306 #undef DEF_ATTR_TREE_LIST
6307   ATTR_LAST
6308 };
6309 
6310 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6311 
6312 static void
install_builtin_attributes(void)6313 install_builtin_attributes (void)
6314 {
6315   /* Fill in the built_in_attributes array.  */
6316 #define DEF_ATTR_NULL_TREE(ENUM)				\
6317   built_in_attributes[(int) ENUM] = NULL_TREE;
6318 #define DEF_ATTR_INT(ENUM, VALUE)				\
6319   built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6320 #define DEF_ATTR_STRING(ENUM, VALUE)				\
6321   built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6322 #define DEF_ATTR_IDENT(ENUM, STRING)				\
6323   built_in_attributes[(int) ENUM] = get_identifier (STRING);
6324 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN)	\
6325   built_in_attributes[(int) ENUM]			\
6326     = tree_cons (built_in_attributes[(int) PURPOSE],	\
6327 		 built_in_attributes[(int) VALUE],	\
6328 		 built_in_attributes[(int) CHAIN]);
6329 #include "builtin-attrs.def"
6330 #undef DEF_ATTR_NULL_TREE
6331 #undef DEF_ATTR_INT
6332 #undef DEF_ATTR_STRING
6333 #undef DEF_ATTR_IDENT
6334 #undef DEF_ATTR_TREE_LIST
6335 }
6336 
6337 /* Handle a "const" attribute; arguments as in
6338    struct attribute_spec.handler.  */
6339 
6340 static tree
handle_const_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6341 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6342 			tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6343 			bool *no_add_attrs)
6344 {
6345   if (TREE_CODE (*node) == FUNCTION_DECL)
6346     TREE_READONLY (*node) = 1;
6347   else
6348     *no_add_attrs = true;
6349 
6350   return NULL_TREE;
6351 }
6352 
6353 /* Handle a "nothrow" attribute; arguments as in
6354    struct attribute_spec.handler.  */
6355 
6356 static tree
handle_nothrow_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6357 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6358 			  tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6359 			  bool *no_add_attrs)
6360 {
6361   if (TREE_CODE (*node) == FUNCTION_DECL)
6362     TREE_NOTHROW (*node) = 1;
6363   else
6364     *no_add_attrs = true;
6365 
6366   return NULL_TREE;
6367 }
6368 
6369 /* Handle a "pure" attribute; arguments as in
6370    struct attribute_spec.handler.  */
6371 
6372 static tree
handle_pure_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6373 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6374 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6375 {
6376   if (TREE_CODE (*node) == FUNCTION_DECL)
6377     DECL_PURE_P (*node) = 1;
6378   /* TODO: support types.  */
6379   else
6380     {
6381       warning (OPT_Wattributes, "%qs attribute ignored",
6382 	       IDENTIFIER_POINTER (name));
6383       *no_add_attrs = true;
6384     }
6385 
6386   return NULL_TREE;
6387 }
6388 
6389 /* Handle a "no vops" attribute; arguments as in
6390    struct attribute_spec.handler.  */
6391 
6392 static tree
handle_novops_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6393 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6394 			 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6395 			 bool *ARG_UNUSED (no_add_attrs))
6396 {
6397   gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6398   DECL_IS_NOVOPS (*node) = 1;
6399   return NULL_TREE;
6400 }
6401 
6402 /* Helper for nonnull attribute handling; fetch the operand number
6403    from the attribute argument list.  */
6404 
6405 static bool
get_nonnull_operand(tree arg_num_expr,unsigned HOST_WIDE_INT * valp)6406 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6407 {
6408   /* Verify the arg number is a constant.  */
6409   if (!tree_fits_uhwi_p (arg_num_expr))
6410     return false;
6411 
6412   *valp = TREE_INT_CST_LOW (arg_num_expr);
6413   return true;
6414 }
6415 
6416 /* Handle the "nonnull" attribute.  */
6417 static tree
handle_nonnull_attribute(tree * node,tree ARG_UNUSED (name),tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6418 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6419 			  tree args, int ARG_UNUSED (flags),
6420 			  bool *no_add_attrs)
6421 {
6422   tree type = *node;
6423   unsigned HOST_WIDE_INT attr_arg_num;
6424 
6425   /* If no arguments are specified, all pointer arguments should be
6426      non-null.  Verify a full prototype is given so that the arguments
6427      will have the correct types when we actually check them later.
6428      Avoid diagnosing type-generic built-ins since those have no
6429      prototype.  */
6430   if (!args)
6431     {
6432       if (!prototype_p (type)
6433 	  && (!TYPE_ATTRIBUTES (type)
6434 	      || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6435 	{
6436 	  error ("%qs attribute without arguments on a non-prototype",
6437 		 "nonnull");
6438 	  *no_add_attrs = true;
6439 	}
6440       return NULL_TREE;
6441     }
6442 
6443   /* Argument list specified.  Verify that each argument number references
6444      a pointer argument.  */
6445   for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6446     {
6447       unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6448 
6449       if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6450 	{
6451 	  error ("%qs argument has invalid operand number (argument %lu)",
6452 		 "nonnull", (unsigned long) attr_arg_num);
6453 	  *no_add_attrs = true;
6454 	  return NULL_TREE;
6455 	}
6456 
6457       if (prototype_p (type))
6458 	{
6459 	  function_args_iterator iter;
6460 	  tree argument;
6461 
6462 	  function_args_iter_init (&iter, type);
6463 	  for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6464 	    {
6465 	      argument = function_args_iter_cond (&iter);
6466 	      if (!argument || ck_num == arg_num)
6467 		break;
6468 	    }
6469 
6470 	  if (!argument
6471 	      || TREE_CODE (argument) == VOID_TYPE)
6472 	    {
6473 	      error ("%qs argument with out-of-range operand number "
6474 		     "(argument %lu, operand %lu)", "nonnull",
6475 		     (unsigned long) attr_arg_num, (unsigned long) arg_num);
6476 	      *no_add_attrs = true;
6477 	      return NULL_TREE;
6478 	    }
6479 
6480 	  if (TREE_CODE (argument) != POINTER_TYPE)
6481 	    {
6482 	      error ("%qs argument references non-pointer operand "
6483 		     "(argument %lu, operand %lu)", "nonnull",
6484 		   (unsigned long) attr_arg_num, (unsigned long) arg_num);
6485 	      *no_add_attrs = true;
6486 	      return NULL_TREE;
6487 	    }
6488 	}
6489     }
6490 
6491   return NULL_TREE;
6492 }
6493 
6494 /* Handle a "sentinel" attribute.  */
6495 
6496 static tree
handle_sentinel_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6497 handle_sentinel_attribute (tree *node, tree name, tree args,
6498 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6499 {
6500   if (!prototype_p (*node))
6501     {
6502       warning (OPT_Wattributes,
6503 	       "%qs attribute requires prototypes with named arguments",
6504 	       IDENTIFIER_POINTER (name));
6505       *no_add_attrs = true;
6506     }
6507   else
6508     {
6509       if (!stdarg_p (*node))
6510         {
6511 	  warning (OPT_Wattributes,
6512 		   "%qs attribute only applies to variadic functions",
6513 		   IDENTIFIER_POINTER (name));
6514 	  *no_add_attrs = true;
6515 	}
6516     }
6517 
6518   if (args)
6519     {
6520       tree position = TREE_VALUE (args);
6521 
6522       if (TREE_CODE (position) != INTEGER_CST)
6523         {
6524 	  warning (0, "requested position is not an integer constant");
6525 	  *no_add_attrs = true;
6526 	}
6527       else
6528         {
6529 	  if (tree_int_cst_lt (position, integer_zero_node))
6530 	    {
6531 	      warning (0, "requested position is less than zero");
6532 	      *no_add_attrs = true;
6533 	    }
6534 	}
6535     }
6536 
6537   return NULL_TREE;
6538 }
6539 
6540 /* Handle a "noreturn" attribute; arguments as in
6541    struct attribute_spec.handler.  */
6542 
6543 static tree
handle_noreturn_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6544 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6545 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6546 {
6547   tree type = TREE_TYPE (*node);
6548 
6549   /* See FIXME comment in c_common_attribute_table.  */
6550   if (TREE_CODE (*node) == FUNCTION_DECL)
6551     TREE_THIS_VOLATILE (*node) = 1;
6552   else if (TREE_CODE (type) == POINTER_TYPE
6553 	   && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6554     TREE_TYPE (*node)
6555       = build_pointer_type
6556 	(change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6557   else
6558     {
6559       warning (OPT_Wattributes, "%qs attribute ignored",
6560 	       IDENTIFIER_POINTER (name));
6561       *no_add_attrs = true;
6562     }
6563 
6564   return NULL_TREE;
6565 }
6566 
6567 /* Handle a "stack_protect" attribute; arguments as in
6568    struct attribute_spec.handler.  */
6569 
6570 static tree
handle_stack_protect_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6571 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6572 				bool *no_add_attrs)
6573 {
6574   if (TREE_CODE (*node) != FUNCTION_DECL)
6575     {
6576       warning (OPT_Wattributes, "%qE attribute ignored", name);
6577       *no_add_attrs = true;
6578     }
6579 
6580   return NULL_TREE;
6581 }
6582 
6583 /* Handle a "no_stack_protector" attribute; arguments as in
6584    struct attribute_spec.handler.  */
6585 
6586 static tree
handle_no_stack_protector_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6587 handle_no_stack_protector_attribute (tree *node, tree name, tree, int,
6588 				   bool *no_add_attrs)
6589 {
6590   if (TREE_CODE (*node) != FUNCTION_DECL)
6591     {
6592       warning (OPT_Wattributes, "%qE attribute ignored", name);
6593       *no_add_attrs = true;
6594     }
6595 
6596   return NULL_TREE;
6597 }
6598 
6599 
6600 /* Handle a "noinline" attribute; arguments as in
6601    struct attribute_spec.handler.  */
6602 
6603 static tree
handle_noinline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6604 handle_noinline_attribute (tree *node, tree name,
6605 			   tree ARG_UNUSED (args),
6606 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6607 {
6608   if (TREE_CODE (*node) == FUNCTION_DECL)
6609     {
6610       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6611 	{
6612 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6613 		   "with attribute %qs", name, "always_inline");
6614 	  *no_add_attrs = true;
6615 	}
6616       else
6617 	DECL_UNINLINABLE (*node) = 1;
6618     }
6619   else
6620     {
6621       warning (OPT_Wattributes, "%qE attribute ignored", name);
6622       *no_add_attrs = true;
6623     }
6624 
6625   return NULL_TREE;
6626 }
6627 
6628 /* Handle a "noclone" attribute; arguments as in
6629    struct attribute_spec.handler.  */
6630 
6631 static tree
handle_noclone_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6632 handle_noclone_attribute (tree *node, tree name,
6633 			  tree ARG_UNUSED (args),
6634 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6635 {
6636   if (TREE_CODE (*node) != FUNCTION_DECL)
6637     {
6638       warning (OPT_Wattributes, "%qE attribute ignored", name);
6639       *no_add_attrs = true;
6640     }
6641 
6642   return NULL_TREE;
6643 }
6644 
6645 /* Handle a "no_icf" attribute; arguments as in
6646    struct attribute_spec.handler.  */
6647 
6648 static tree
handle_noicf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6649 handle_noicf_attribute (tree *node, tree name,
6650 			tree ARG_UNUSED (args),
6651 			int ARG_UNUSED (flags), bool *no_add_attrs)
6652 {
6653   if (TREE_CODE (*node) != FUNCTION_DECL)
6654     {
6655       warning (OPT_Wattributes, "%qE attribute ignored", name);
6656       *no_add_attrs = true;
6657     }
6658 
6659   return NULL_TREE;
6660 }
6661 
6662 /* Handle a "noipa" attribute; arguments as in
6663    struct attribute_spec.handler.  */
6664 
6665 static tree
handle_noipa_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6666 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6667 {
6668   if (TREE_CODE (*node) != FUNCTION_DECL)
6669     {
6670       warning (OPT_Wattributes, "%qE attribute ignored", name);
6671       *no_add_attrs = true;
6672     }
6673 
6674   return NULL_TREE;
6675 }
6676 
6677 /* Handle a "leaf" attribute; arguments as in
6678    struct attribute_spec.handler.  */
6679 
6680 static tree
handle_leaf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6681 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6682 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6683 {
6684   if (TREE_CODE (*node) != FUNCTION_DECL)
6685     {
6686       warning (OPT_Wattributes, "%qE attribute ignored", name);
6687       *no_add_attrs = true;
6688     }
6689   if (!TREE_PUBLIC (*node))
6690     {
6691       warning (OPT_Wattributes, "%qE attribute has no effect", name);
6692       *no_add_attrs = true;
6693     }
6694 
6695   return NULL_TREE;
6696 }
6697 
6698 /* Handle a "always_inline" attribute; arguments as in
6699    struct attribute_spec.handler.  */
6700 
6701 static tree
handle_always_inline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6702 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6703 				int ARG_UNUSED (flags), bool *no_add_attrs)
6704 {
6705   if (TREE_CODE (*node) == FUNCTION_DECL)
6706     {
6707       /* Set the attribute and mark it for disregarding inline limits.  */
6708       DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6709     }
6710   else
6711     {
6712       warning (OPT_Wattributes, "%qE attribute ignored", name);
6713       *no_add_attrs = true;
6714     }
6715 
6716   return NULL_TREE;
6717 }
6718 
6719 /* Handle a "malloc" attribute; arguments as in
6720    struct attribute_spec.handler.  */
6721 
6722 static tree
handle_malloc_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6723 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6724 			 int ARG_UNUSED (flags), bool *no_add_attrs)
6725 {
6726   if (TREE_CODE (*node) == FUNCTION_DECL
6727       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6728     DECL_IS_MALLOC (*node) = 1;
6729   else
6730     {
6731       warning (OPT_Wattributes, "%qs attribute ignored",
6732 	       IDENTIFIER_POINTER (name));
6733       *no_add_attrs = true;
6734     }
6735 
6736   return NULL_TREE;
6737 }
6738 
6739 /* Fake handler for attributes we don't properly support.  */
6740 
6741 tree
fake_attribute_handler(tree * ARG_UNUSED (node),tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6742 fake_attribute_handler (tree * ARG_UNUSED (node),
6743 			tree ARG_UNUSED (name),
6744 			tree ARG_UNUSED (args),
6745 			int  ARG_UNUSED (flags),
6746 			bool * ARG_UNUSED (no_add_attrs))
6747 {
6748   return NULL_TREE;
6749 }
6750 
6751 /* Handle a "type_generic" attribute.  */
6752 
6753 static tree
handle_type_generic_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6754 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6755 			       tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6756 			       bool * ARG_UNUSED (no_add_attrs))
6757 {
6758   /* Ensure we have a function type.  */
6759   gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6760 
6761   /* Ensure we have a variadic function.  */
6762   gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6763 
6764   return NULL_TREE;
6765 }
6766 
6767 /* Handle a "flatten" attribute; arguments as in
6768    struct attribute_spec.handler.  */
6769 
6770 static tree
handle_flatten_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)6771 handle_flatten_attribute (tree *node, tree name,
6772 			  tree args ATTRIBUTE_UNUSED,
6773 			  int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6774 {
6775   if (TREE_CODE (*node) == FUNCTION_DECL)
6776     /* Do nothing else, just set the attribute.  We'll get at
6777        it later with lookup_attribute.  */
6778     ;
6779   else
6780     {
6781       warning (OPT_Wattributes, "%qE attribute ignored", name);
6782       *no_add_attrs = true;
6783     }
6784 
6785   return NULL_TREE;
6786 }
6787 
6788 /* Handle a "used" attribute; arguments as in
6789    struct attribute_spec.handler.  */
6790 
6791 static tree
handle_used_attribute(tree * pnode,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6792 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6793 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6794 {
6795   tree node = *pnode;
6796 
6797   if (TREE_CODE (node) == FUNCTION_DECL
6798       || (VAR_P (node) && TREE_STATIC (node))
6799       || (TREE_CODE (node) == TYPE_DECL))
6800     {
6801       TREE_USED (node) = 1;
6802       DECL_PRESERVE_P (node) = 1;
6803       if (VAR_P (node))
6804 	DECL_READ_P (node) = 1;
6805     }
6806   else
6807     {
6808       warning (OPT_Wattributes, "%qE attribute ignored", name);
6809       *no_add_attrs = true;
6810     }
6811 
6812   return NULL_TREE;
6813 }
6814 
6815 /* Handle a "cold" and attribute; arguments as in
6816    struct attribute_spec.handler.  */
6817 
6818 static tree
handle_cold_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6819 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6820 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6821 {
6822   if (TREE_CODE (*node) == FUNCTION_DECL
6823       || TREE_CODE (*node) == LABEL_DECL)
6824     {
6825       /* Attribute cold processing is done later with lookup_attribute.  */
6826     }
6827   else
6828     {
6829       warning (OPT_Wattributes, "%qE attribute ignored", name);
6830       *no_add_attrs = true;
6831     }
6832 
6833   return NULL_TREE;
6834 }
6835 
6836 /* Handle a "hot" and attribute; arguments as in
6837    struct attribute_spec.handler.  */
6838 
6839 static tree
handle_hot_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6840 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6841 		      int ARG_UNUSED (flags), bool *no_add_attrs)
6842 {
6843   if (TREE_CODE (*node) == FUNCTION_DECL
6844       || TREE_CODE (*node) == LABEL_DECL)
6845     {
6846       /* Attribute hot processing is done later with lookup_attribute.  */
6847     }
6848   else
6849     {
6850       warning (OPT_Wattributes, "%qE attribute ignored", name);
6851       *no_add_attrs = true;
6852     }
6853 
6854   return NULL_TREE;
6855 }
6856 
6857 /* Handle a "target" attribute.  */
6858 
6859 static tree
handle_target_attribute(tree * node,tree name,tree args,int flags,bool * no_add_attrs)6860 handle_target_attribute (tree *node, tree name, tree args, int flags,
6861 			 bool *no_add_attrs)
6862 {
6863   /* Ensure we have a function type.  */
6864   if (TREE_CODE (*node) != FUNCTION_DECL)
6865     {
6866       warning (OPT_Wattributes, "%qE attribute ignored", name);
6867       *no_add_attrs = true;
6868     }
6869   else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
6870     {
6871       warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6872 		   "with %qs attribute", name, "target_clones");
6873       *no_add_attrs = true;
6874     }
6875   else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
6876     *no_add_attrs = true;
6877 
6878   /* Check that there's no empty string in values of the attribute.  */
6879   for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
6880     {
6881       tree value = TREE_VALUE (t);
6882       if (TREE_CODE (value) == STRING_CST
6883 	  && TREE_STRING_LENGTH (value) == 1
6884 	  && TREE_STRING_POINTER (value)[0] == '\0')
6885 	{
6886 	  warning (OPT_Wattributes, "empty string in attribute %<target%>");
6887 	  *no_add_attrs = true;
6888 	}
6889     }
6890 
6891   return NULL_TREE;
6892 }
6893 
6894 /* Handle a "target_clones" attribute.  */
6895 
6896 static tree
handle_target_clones_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6897 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6898 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6899 {
6900   /* Ensure we have a function type.  */
6901   if (TREE_CODE (*node) == FUNCTION_DECL)
6902     {
6903       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6904 	{
6905 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6906 		   "with %qs attribute", name, "always_inline");
6907 	  *no_add_attrs = true;
6908 	}
6909       else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
6910 	{
6911 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6912 		   "with %qs attribute", name, "target");
6913 	  *no_add_attrs = true;
6914 	}
6915       else
6916 	/* Do not inline functions with multiple clone targets.  */
6917 	DECL_UNINLINABLE (*node) = 1;
6918     }
6919   else
6920     {
6921       warning (OPT_Wattributes, "%qE attribute ignored", name);
6922       *no_add_attrs = true;
6923     }
6924   return NULL_TREE;
6925 }
6926 
6927 /* Handle a "vector_size" attribute; arguments as in
6928    struct attribute_spec.handler.  */
6929 
6930 static tree
handle_vector_size_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6931 handle_vector_size_attribute (tree *node, tree name, tree args,
6932 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6933 {
6934   tree type = *node;
6935   tree vector_type;
6936 
6937   *no_add_attrs = true;
6938 
6939   /* We need to provide for vector pointers, vector arrays, and
6940      functions returning vectors.  For example:
6941 
6942        __attribute__((vector_size(16))) short *foo;
6943 
6944      In this case, the mode is SI, but the type being modified is
6945      HI, so we need to look further.  */
6946   while (POINTER_TYPE_P (type)
6947 	 || TREE_CODE (type) == FUNCTION_TYPE
6948 	 || TREE_CODE (type) == ARRAY_TYPE)
6949     type = TREE_TYPE (type);
6950 
6951   vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6952   if (!vector_type)
6953     return NULL_TREE;
6954 
6955   /* Build back pointers if needed.  */
6956   *node = reconstruct_complex_type (*node, vector_type);
6957 
6958   return NULL_TREE;
6959 }
6960 
6961 /* Handle a "vector_type" attribute; arguments as in
6962    struct attribute_spec.handler.  */
6963 
6964 static tree
handle_vector_type_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6965 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6966 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6967 {
6968   tree type = *node;
6969   tree vector_type;
6970 
6971   *no_add_attrs = true;
6972 
6973   if (TREE_CODE (type) != ARRAY_TYPE)
6974     {
6975       error ("attribute %qs applies to array types only",
6976 	     IDENTIFIER_POINTER (name));
6977       return NULL_TREE;
6978     }
6979 
6980   vector_type = build_vector_type_for_array (type, name);
6981   if (!vector_type)
6982     return NULL_TREE;
6983 
6984   TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6985   *node = vector_type;
6986 
6987   return NULL_TREE;
6988 }
6989 
6990 /* ----------------------------------------------------------------------- *
6991  *                              BUILTIN FUNCTIONS                          *
6992  * ----------------------------------------------------------------------- */
6993 
6994 /* Worker for DEF_BUILTIN.  Possibly define a builtin function with one or two
6995    names.  Does not declare a non-__builtin_ function if flag_no_builtin, or
6996    if nonansi_p and flag_no_nonansi_builtin.  */
6997 
6998 static void
def_builtin_1(enum built_in_function fncode,const char * name,enum built_in_class fnclass,tree fntype,tree libtype,bool both_p,bool fallback_p,bool nonansi_p ATTRIBUTE_UNUSED,tree fnattrs,bool implicit_p)6999 def_builtin_1 (enum built_in_function fncode,
7000 	       const char *name,
7001 	       enum built_in_class fnclass,
7002 	       tree fntype, tree libtype,
7003 	       bool both_p, bool fallback_p,
7004 	       bool nonansi_p ATTRIBUTE_UNUSED,
7005 	       tree fnattrs, bool implicit_p)
7006 {
7007   tree decl;
7008   const char *libname;
7009 
7010   /* Preserve an already installed decl.  It most likely was setup in advance
7011      (e.g. as part of the internal builtins) for specific reasons.  */
7012   if (builtin_decl_explicit (fncode))
7013     return;
7014 
7015   if (fntype == error_mark_node)
7016     return;
7017 
7018   gcc_assert ((!both_p && !fallback_p)
7019 	      || !strncmp (name, "__builtin_",
7020 			   strlen ("__builtin_")));
7021 
7022   libname = name + strlen ("__builtin_");
7023   decl = add_builtin_function (name, fntype, fncode, fnclass,
7024 			       (fallback_p ? libname : NULL),
7025 			       fnattrs);
7026   if (both_p)
7027     /* ??? This is normally further controlled by command-line options
7028        like -fno-builtin, but we don't have them for Ada.  */
7029     add_builtin_function (libname, libtype, fncode, fnclass,
7030 			  NULL, fnattrs);
7031 
7032   set_builtin_decl (fncode, decl, implicit_p);
7033 }
7034 
7035 static int flag_isoc94 = 0;
7036 static int flag_isoc99 = 0;
7037 static int flag_isoc11 = 0;
7038 static int flag_isoc2x = 0;
7039 
7040 /* Install what the common builtins.def offers plus our local additions.
7041 
7042    Note that ada-builtins.def is included first so that locally redefined
7043    built-in functions take precedence over the commonly defined ones.  */
7044 
7045 static void
install_builtin_functions(void)7046 install_builtin_functions (void)
7047 {
7048 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
7049 		    NONANSI_P, ATTRS, IMPLICIT, COND)			\
7050   if (NAME && COND)							\
7051     def_builtin_1 (ENUM, NAME, CLASS,                                   \
7052                    builtin_types[(int) TYPE],                           \
7053                    builtin_types[(int) LIBTYPE],                        \
7054                    BOTH_P, FALLBACK_P, NONANSI_P,                       \
7055                    built_in_attributes[(int) ATTRS], IMPLICIT);
7056 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS)		\
7057   DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
7058 	       false, false, false, ATTRS, true, true)
7059 #include "ada-builtins.def"
7060 #include "builtins.def"
7061 }
7062 
7063 /* ----------------------------------------------------------------------- *
7064  *                              BUILTIN FUNCTIONS                          *
7065  * ----------------------------------------------------------------------- */
7066 
7067 /* Install the builtin functions we might need.  */
7068 
7069 void
gnat_install_builtins(void)7070 gnat_install_builtins (void)
7071 {
7072   install_builtin_elementary_types ();
7073   install_builtin_function_types ();
7074   install_builtin_attributes ();
7075 
7076   /* Install builtins used by generic middle-end pieces first.  Some of these
7077      know about internal specificities and control attributes accordingly, for
7078      instance __builtin_alloca vs no-throw and -fstack-check.  We will ignore
7079      the generic definition from builtins.def.  */
7080   build_common_builtin_nodes ();
7081 
7082   /* Now, install the target specific builtins, such as the AltiVec family on
7083      ppc, and the common set as exposed by builtins.def.  */
7084   targetm.init_builtins ();
7085   install_builtin_functions ();
7086 }
7087 
7088 #include "gt-ada-utils.h"
7089 #include "gtype-ada.h"
7090