1 /****************************************************************************
2  *                                                                          *
3  *                         GNAT COMPILER COMPONENTS                         *
4  *                                                                          *
5  *                                U T I L S                                 *
6  *                                                                          *
7  *                          C Implementation File                           *
8  *                                                                          *
9  *          Copyright (C) 1992-2020, Free Software Foundation, Inc.         *
10  *                                                                          *
11  * GNAT is free software;  you can  redistribute it  and/or modify it under *
12  * terms of the  GNU General Public License as published  by the Free Soft- *
13  * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14  * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15  * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16  * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17  * for  more details.  You should have received a copy of the GNU General   *
18  * Public License along with GCC; see the file COPYING3.  If not see        *
19  * <http://www.gnu.org/licenses/>.                                          *
20  *                                                                          *
21  * GNAT was originally developed  by the GNAT team at  New York University. *
22  * Extensive contributions were provided by Ada Core Technologies Inc.      *
23  *                                                                          *
24  ****************************************************************************/
25 
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48 
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "snames.h"
54 #include "uintp.h"
55 #include "fe.h"
56 #include "sinfo.h"
57 #include "einfo.h"
58 #include "ada-tree.h"
59 #include "gigi.h"
60 
61 /* If nonzero, pretend we are allocating at global level.  */
62 int force_global;
63 
64 /* The default alignment of "double" floating-point types, i.e. floating
65    point types whose size is equal to 64 bits, or 0 if this alignment is
66    not specifically capped.  */
67 int double_float_alignment;
68 
69 /* The default alignment of "double" or larger scalar types, i.e. scalar
70    types whose size is greater or equal to 64 bits, or 0 if this alignment
71    is not specifically capped.  */
72 int double_scalar_alignment;
73 
74 /* True if floating-point arithmetics may use wider intermediate results.  */
75 bool fp_arith_may_widen = true;
76 
77 /* Tree nodes for the various types and decls we create.  */
78 tree gnat_std_decls[(int) ADT_LAST];
79 
80 /* Functions to call for each of the possible raise reasons.  */
81 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82 
83 /* Likewise, but with extra info for each of the possible raise reasons.  */
84 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
85 
86 /* Forward declarations for handlers of attributes.  */
87 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_no_stack_protector_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
110 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
111 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
112 
113 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
114 {
115   { "cold", true,  true,  true  },
116   { "hot" , true,  true,  true  },
117   { NULL  , false, false, false }
118 };
119 
120 static const struct attribute_spec::exclusions attr_stack_protect_exclusions[] =
121 {
122   { "stack_protect", true, false, false },
123   { "no_stack_protector", true, false, false },
124   { NULL, false, false, false },
125 };
126 
127 /* Fake handler for attributes we don't properly support, typically because
128    they'd require dragging a lot of the common-c front-end circuitry.  */
129 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
130 
131 /* Table of machine-independent internal attributes for Ada.  We support
132    this minimal set of attributes to accommodate the needs of builtins.  */
133 const struct attribute_spec gnat_internal_attribute_table[] =
134 {
135   /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
136        affects_type_identity, handler, exclude } */
137   { "const",        0, 0,  true,  false, false, false,
138     handle_const_attribute, NULL },
139   { "nothrow",      0, 0,  true,  false, false, false,
140     handle_nothrow_attribute, NULL },
141   { "pure",         0, 0,  true,  false, false, false,
142     handle_pure_attribute, NULL },
143   { "no vops",      0, 0,  true,  false, false, false,
144     handle_novops_attribute, NULL },
145   { "nonnull",      0, -1, false, true,  true,  false,
146     handle_nonnull_attribute, NULL },
147   { "sentinel",     0, 1,  false, true,  true,  false,
148     handle_sentinel_attribute, NULL },
149   { "noreturn",     0, 0,  true,  false, false, false,
150     handle_noreturn_attribute, NULL },
151   { "stack_protect",0, 0, true,  false, false, false,
152     handle_stack_protect_attribute,
153     attr_stack_protect_exclusions },
154   { "no_stack_protector",0, 0, true,  false, false, false,
155     handle_no_stack_protector_attribute,
156     attr_stack_protect_exclusions },
157   { "noinline",     0, 0,  true,  false, false, false,
158     handle_noinline_attribute, NULL },
159   { "noclone",      0, 0,  true,  false, false, false,
160     handle_noclone_attribute, NULL },
161   { "no_icf",       0, 0,  true,  false, false, false,
162     handle_noicf_attribute, NULL },
163   { "noipa",        0, 0,  true,  false, false, false,
164     handle_noipa_attribute, NULL },
165   { "leaf",         0, 0,  true,  false, false, false,
166     handle_leaf_attribute, NULL },
167   { "always_inline",0, 0,  true,  false, false, false,
168     handle_always_inline_attribute, NULL },
169   { "malloc",       0, 0,  true,  false, false, false,
170     handle_malloc_attribute, NULL },
171   { "type generic", 0, 0,  false, true,  true,  false,
172     handle_type_generic_attribute, NULL },
173 
174   { "flatten",      0, 0,  true,  false, false, false,
175     handle_flatten_attribute, NULL },
176   { "used",         0, 0,  true,  false, false, false,
177     handle_used_attribute, NULL },
178   { "cold",         0, 0,  true,  false, false, false,
179     handle_cold_attribute, attr_cold_hot_exclusions },
180   { "hot",          0, 0,  true,  false, false, false,
181     handle_hot_attribute, attr_cold_hot_exclusions },
182   { "target",       1, -1, true,  false, false, false,
183     handle_target_attribute, NULL },
184   { "target_clones",1, -1, true,  false, false, false,
185     handle_target_clones_attribute, NULL },
186 
187   { "vector_size",  1, 1,  false, true,  false, false,
188     handle_vector_size_attribute, NULL },
189   { "vector_type",  0, 0,  false, true,  false, false,
190     handle_vector_type_attribute, NULL },
191   { "may_alias",    0, 0,  false, true,  false, false,
192     NULL, NULL },
193 
194   /* ??? format and format_arg are heavy and not supported, which actually
195      prevents support for stdio builtins, which we however declare as part
196      of the common builtins.def contents.  */
197   { "format",       3, 3,  false, true,  true,  false,
198     fake_attribute_handler, NULL },
199   { "format_arg",   1, 1,  false, true,  true,  false,
200     fake_attribute_handler, NULL },
201 
202   { NULL,           0, 0,  false, false, false, false,
203     NULL, NULL }
204 };
205 
206 /* Associates a GNAT tree node to a GCC tree node. It is used in
207    `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
208    of `save_gnu_tree' for more info.  */
209 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
210 
211 #define GET_GNU_TREE(GNAT_ENTITY)	\
212   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
213 
214 #define SET_GNU_TREE(GNAT_ENTITY,VAL)	\
215   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
216 
217 #define PRESENT_GNU_TREE(GNAT_ENTITY)	\
218   (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
219 
220 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any.  */
221 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
222 
223 #define GET_DUMMY_NODE(GNAT_ENTITY)	\
224   dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
225 
226 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL)	\
227   dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
228 
229 #define PRESENT_DUMMY_NODE(GNAT_ENTITY)	\
230   (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
231 
232 /* This variable keeps a table for types for each precision so that we only
233    allocate each of them once. Signed and unsigned types are kept separate.
234 
235    Note that these types are only used when fold-const requests something
236    special.  Perhaps we should NOT share these types; we'll see how it
237    goes later.  */
238 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
239 
240 /* Likewise for float types, but record these by mode.  */
241 static GTY(()) tree float_types[NUM_MACHINE_MODES];
242 
243 /* For each binding contour we allocate a binding_level structure to indicate
244    the binding depth.  */
245 
246 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
247   /* The binding level containing this one (the enclosing binding level). */
248   struct gnat_binding_level *chain;
249   /* The BLOCK node for this level.  */
250   tree block;
251   /* If nonzero, the setjmp buffer that needs to be updated for any
252      variable-sized definition within this context.  */
253   tree jmpbuf_decl;
254 };
255 
256 /* The binding level currently in effect.  */
257 static GTY(()) struct gnat_binding_level *current_binding_level;
258 
259 /* A chain of gnat_binding_level structures awaiting reuse.  */
260 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
261 
262 /* The context to be used for global declarations.  */
263 static GTY(()) tree global_context;
264 
265 /* An array of global declarations.  */
266 static GTY(()) vec<tree, va_gc> *global_decls;
267 
268 /* An array of builtin function declarations.  */
269 static GTY(()) vec<tree, va_gc> *builtin_decls;
270 
271 /* A chain of unused BLOCK nodes. */
272 static GTY((deletable)) tree free_block_chain;
273 
274 /* A hash table of packable types.  It is modelled on the generic type
275    hash table in tree.c, which must thus be used as a reference.  */
276 
277 struct GTY((for_user)) packable_type_hash
278 {
279   hashval_t hash;
280   tree type;
281 };
282 
283 struct packable_type_hasher : ggc_cache_ptr_hash<packable_type_hash>
284 {
hashpackable_type_hasher285   static inline hashval_t hash (packable_type_hash *t) { return t->hash; }
286   static bool equal (packable_type_hash *a, packable_type_hash *b);
287 
288   static int
keep_cache_entrypackable_type_hasher289   keep_cache_entry (packable_type_hash *&t)
290   {
291     return ggc_marked_p (t->type);
292   }
293 };
294 
295 static GTY ((cache)) hash_table<packable_type_hasher> *packable_type_hash_table;
296 
297 /* A hash table of padded types.  It is modelled on the generic type
298    hash table in tree.c, which must thus be used as a reference.  */
299 
300 struct GTY((for_user)) pad_type_hash
301 {
302   hashval_t hash;
303   tree type;
304 };
305 
306 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
307 {
hashpad_type_hasher308   static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
309   static bool equal (pad_type_hash *a, pad_type_hash *b);
310 
311   static int
keep_cache_entrypad_type_hasher312   keep_cache_entry (pad_type_hash *&t)
313   {
314     return ggc_marked_p (t->type);
315   }
316 };
317 
318 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
319 
320 static tree merge_sizes (tree, tree, tree, bool, bool);
321 static tree fold_bit_position (const_tree);
322 static tree compute_related_constant (tree, tree);
323 static tree split_plus (tree, tree *);
324 static tree float_type_for_precision (int, machine_mode);
325 static tree convert_to_fat_pointer (tree, tree);
326 static unsigned int scale_by_factor_of (tree, unsigned int);
327 
328 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
329    of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes.  */
330 struct deferred_decl_context_node
331 {
332   /* The ..._DECL node to work on.  */
333   tree decl;
334 
335   /* The corresponding entity's Scope.  */
336   Entity_Id gnat_scope;
337 
338   /* The value of force_global when DECL was pushed.  */
339   int force_global;
340 
341   /* The list of ..._TYPE nodes to propagate the context to.  */
342   vec<tree> types;
343 
344   /* The next queue item.  */
345   struct deferred_decl_context_node *next;
346 };
347 
348 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
349 
350 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
351    feed it with the elaboration of GNAT_SCOPE.  */
352 static struct deferred_decl_context_node *
353 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
354 
355 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
356    feed it with the DECL_CONTEXT computed as part of N as soon as it is
357    computed.  */
358 static void add_deferred_type_context (struct deferred_decl_context_node *n,
359 				       tree type);
360 
361 /* Initialize data structures of the utils.c module.  */
362 
363 void
init_gnat_utils(void)364 init_gnat_utils (void)
365 {
366   /* Initialize the association of GNAT nodes to GCC trees.  */
367   associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
368 
369   /* Initialize the association of GNAT nodes to GCC trees as dummies.  */
370   dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
371 
372   /* Initialize the hash table of packable types.  */
373   packable_type_hash_table = hash_table<packable_type_hasher>::create_ggc (512);
374 
375   /* Initialize the hash table of padded types.  */
376   pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
377 }
378 
379 /* Destroy data structures of the utils.c module.  */
380 
381 void
destroy_gnat_utils(void)382 destroy_gnat_utils (void)
383 {
384   /* Destroy the association of GNAT nodes to GCC trees.  */
385   ggc_free (associate_gnat_to_gnu);
386   associate_gnat_to_gnu = NULL;
387 
388   /* Destroy the association of GNAT nodes to GCC trees as dummies.  */
389   ggc_free (dummy_node_table);
390   dummy_node_table = NULL;
391 
392   /* Destroy the hash table of packable types.  */
393   packable_type_hash_table->empty ();
394   packable_type_hash_table = NULL;
395 
396   /* Destroy the hash table of padded types.  */
397   pad_type_hash_table->empty ();
398   pad_type_hash_table = NULL;
399 }
400 
401 /* GNAT_ENTITY is a GNAT tree node for an entity.  Associate GNU_DECL, a GCC
402    tree node, with GNAT_ENTITY.  If GNU_DECL is not a ..._DECL node, abort.
403    If NO_CHECK is true, the latter check is suppressed.
404 
405    If GNU_DECL is zero, reset a previous association.  */
406 
407 void
save_gnu_tree(Entity_Id gnat_entity,tree gnu_decl,bool no_check)408 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
409 {
410   /* Check that GNAT_ENTITY is not already defined and that it is being set
411      to something which is a decl.  If that is not the case, this usually
412      means GNAT_ENTITY is defined twice, but occasionally is due to some
413      Gigi problem.  */
414   gcc_assert (!(gnu_decl
415 		&& (PRESENT_GNU_TREE (gnat_entity)
416 		    || (!no_check && !DECL_P (gnu_decl)))));
417 
418   SET_GNU_TREE (gnat_entity, gnu_decl);
419 }
420 
421 /* GNAT_ENTITY is a GNAT tree node for an entity.  Return the GCC tree node
422    that was associated with it.  If there is no such tree node, abort.
423 
424    In some cases, such as delayed elaboration or expressions that need to
425    be elaborated only once, GNAT_ENTITY is really not an entity.  */
426 
427 tree
get_gnu_tree(Entity_Id gnat_entity)428 get_gnu_tree (Entity_Id gnat_entity)
429 {
430   gcc_assert (PRESENT_GNU_TREE (gnat_entity));
431   return GET_GNU_TREE (gnat_entity);
432 }
433 
434 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY.  */
435 
436 bool
present_gnu_tree(Entity_Id gnat_entity)437 present_gnu_tree (Entity_Id gnat_entity)
438 {
439   return PRESENT_GNU_TREE (gnat_entity);
440 }
441 
442 /* Make a dummy type corresponding to GNAT_TYPE.  */
443 
444 tree
make_dummy_type(Entity_Id gnat_type)445 make_dummy_type (Entity_Id gnat_type)
446 {
447   Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
448   tree gnu_type, debug_type;
449 
450   /* If there was no equivalent type (can only happen when just annotating
451      types) or underlying type, go back to the original type.  */
452   if (No (gnat_equiv))
453     gnat_equiv = gnat_type;
454 
455   /* If it there already a dummy type, use that one.  Else make one.  */
456   if (PRESENT_DUMMY_NODE (gnat_equiv))
457     return GET_DUMMY_NODE (gnat_equiv);
458 
459   /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
460      an ENUMERAL_TYPE.  */
461   gnu_type = make_node (Is_Record_Type (gnat_equiv)
462 			? tree_code_for_record_type (gnat_equiv)
463 			: ENUMERAL_TYPE);
464   TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
465   TYPE_DUMMY_P (gnu_type) = 1;
466   TYPE_STUB_DECL (gnu_type)
467     = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
468   if (Is_By_Reference_Type (gnat_equiv))
469     TYPE_BY_REFERENCE_P (gnu_type) = 1;
470   if (Has_Discriminants (gnat_equiv))
471     decl_attributes (&gnu_type,
472 		     tree_cons (get_identifier ("may_alias"), NULL_TREE,
473 				NULL_TREE),
474 		     ATTR_FLAG_TYPE_IN_PLACE);
475 
476   SET_DUMMY_NODE (gnat_equiv, gnu_type);
477 
478   /* Create a debug type so that debuggers only see an unspecified type.  */
479   if (Needs_Debug_Info (gnat_type))
480     {
481       debug_type = make_node (LANG_TYPE);
482       TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
483       TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
484       SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
485     }
486 
487   return gnu_type;
488 }
489 
490 /* Return the dummy type that was made for GNAT_TYPE, if any.  */
491 
492 tree
get_dummy_type(Entity_Id gnat_type)493 get_dummy_type (Entity_Id gnat_type)
494 {
495   return GET_DUMMY_NODE (gnat_type);
496 }
497 
498 /* Build dummy fat and thin pointer types whose designated type is specified
499    by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter.  */
500 
501 void
build_dummy_unc_pointer_types(Entity_Id gnat_desig_type,tree gnu_desig_type)502 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
503 {
504   tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
505   tree gnu_fat_type, fields, gnu_object_type;
506 
507   gnu_template_type = make_node (RECORD_TYPE);
508   TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
509   TYPE_DUMMY_P (gnu_template_type) = 1;
510   gnu_ptr_template = build_pointer_type (gnu_template_type);
511 
512   gnu_array_type = make_node (ENUMERAL_TYPE);
513   TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
514   TYPE_DUMMY_P (gnu_array_type) = 1;
515   gnu_ptr_array = build_pointer_type (gnu_array_type);
516 
517   gnu_fat_type = make_node (RECORD_TYPE);
518   /* Build a stub DECL to trigger the special processing for fat pointer types
519      in gnat_pushdecl.  */
520   TYPE_NAME (gnu_fat_type)
521     = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
522 			     gnu_fat_type);
523   fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
524 			      gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
525   DECL_CHAIN (fields)
526     = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
527 			 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
528   finish_fat_pointer_type (gnu_fat_type, fields);
529   SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
530   /* Suppress debug info until after the type is completed.  */
531   TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
532 
533   gnu_object_type = make_node (RECORD_TYPE);
534   TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
535   TYPE_DUMMY_P (gnu_object_type) = 1;
536 
537   TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
538   TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
539   TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
540 }
541 
542 /* Return true if we are in the global binding level.  */
543 
544 bool
global_bindings_p(void)545 global_bindings_p (void)
546 {
547   return force_global || !current_function_decl;
548 }
549 
550 /* Enter a new binding level.  */
551 
552 void
gnat_pushlevel(void)553 gnat_pushlevel (void)
554 {
555   struct gnat_binding_level *newlevel = NULL;
556 
557   /* Reuse a struct for this binding level, if there is one.  */
558   if (free_binding_level)
559     {
560       newlevel = free_binding_level;
561       free_binding_level = free_binding_level->chain;
562     }
563   else
564     newlevel = ggc_alloc<gnat_binding_level> ();
565 
566   /* Use a free BLOCK, if any; otherwise, allocate one.  */
567   if (free_block_chain)
568     {
569       newlevel->block = free_block_chain;
570       free_block_chain = BLOCK_CHAIN (free_block_chain);
571       BLOCK_CHAIN (newlevel->block) = NULL_TREE;
572     }
573   else
574     newlevel->block = make_node (BLOCK);
575 
576   /* Point the BLOCK we just made to its parent.  */
577   if (current_binding_level)
578     BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
579 
580   BLOCK_VARS (newlevel->block) = NULL_TREE;
581   BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
582   TREE_USED (newlevel->block) = 1;
583 
584   /* Add this level to the front of the chain (stack) of active levels.  */
585   newlevel->chain = current_binding_level;
586   newlevel->jmpbuf_decl = NULL_TREE;
587   current_binding_level = newlevel;
588 }
589 
590 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
591    and point FNDECL to this BLOCK.  */
592 
593 void
set_current_block_context(tree fndecl)594 set_current_block_context (tree fndecl)
595 {
596   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
597   DECL_INITIAL (fndecl) = current_binding_level->block;
598   set_block_for_group (current_binding_level->block);
599 }
600 
601 /* Set the jmpbuf_decl for the current binding level to DECL.  */
602 
603 void
set_block_jmpbuf_decl(tree decl)604 set_block_jmpbuf_decl (tree decl)
605 {
606   current_binding_level->jmpbuf_decl = decl;
607 }
608 
609 /* Get the jmpbuf_decl, if any, for the current binding level.  */
610 
611 tree
get_block_jmpbuf_decl(void)612 get_block_jmpbuf_decl (void)
613 {
614   return current_binding_level->jmpbuf_decl;
615 }
616 
617 /* Exit a binding level.  Set any BLOCK into the current code group.  */
618 
619 void
gnat_poplevel(void)620 gnat_poplevel (void)
621 {
622   struct gnat_binding_level *level = current_binding_level;
623   tree block = level->block;
624 
625   BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
626   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
627 
628   /* If this is a function-level BLOCK don't do anything.  Otherwise, if there
629      are no variables free the block and merge its subblocks into those of its
630      parent block.  Otherwise, add it to the list of its parent.  */
631   if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
632     ;
633   else if (!BLOCK_VARS (block))
634     {
635       BLOCK_SUBBLOCKS (level->chain->block)
636 	= block_chainon (BLOCK_SUBBLOCKS (block),
637 			 BLOCK_SUBBLOCKS (level->chain->block));
638       BLOCK_CHAIN (block) = free_block_chain;
639       free_block_chain = block;
640     }
641   else
642     {
643       BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
644       BLOCK_SUBBLOCKS (level->chain->block) = block;
645       TREE_USED (block) = 1;
646       set_block_for_group (block);
647     }
648 
649   /* Free this binding structure.  */
650   current_binding_level = level->chain;
651   level->chain = free_binding_level;
652   free_binding_level = level;
653 }
654 
655 /* Exit a binding level and discard the associated BLOCK.  */
656 
657 void
gnat_zaplevel(void)658 gnat_zaplevel (void)
659 {
660   struct gnat_binding_level *level = current_binding_level;
661   tree block = level->block;
662 
663   BLOCK_CHAIN (block) = free_block_chain;
664   free_block_chain = block;
665 
666   /* Free this binding structure.  */
667   current_binding_level = level->chain;
668   level->chain = free_binding_level;
669   free_binding_level = level;
670 }
671 
672 /* Set the context of TYPE and its parallel types (if any) to CONTEXT.  */
673 
674 static void
gnat_set_type_context(tree type,tree context)675 gnat_set_type_context (tree type, tree context)
676 {
677   tree decl = TYPE_STUB_DECL (type);
678 
679   TYPE_CONTEXT (type) = context;
680 
681   while (decl && DECL_PARALLEL_TYPE (decl))
682     {
683       tree parallel_type = DECL_PARALLEL_TYPE (decl);
684 
685       /* Give a context to the parallel types and their stub decl, if any.
686 	 Some parallel types seems to be present in multiple parallel type
687 	 chains, so don't mess with their context if they already have one.  */
688       if (!TYPE_CONTEXT (parallel_type))
689 	{
690 	  if (TYPE_STUB_DECL (parallel_type))
691 	    DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
692 	  TYPE_CONTEXT (parallel_type) = context;
693 	}
694 
695       decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
696     }
697 }
698 
699 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
700    the debug info, or Empty if there is no such scope.  If not NULL, set
701    IS_SUBPROGRAM to whether the returned entity is a subprogram.  */
702 
703 Entity_Id
get_debug_scope(Node_Id gnat_node,bool * is_subprogram)704 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
705 {
706   Entity_Id gnat_entity;
707 
708   if (is_subprogram)
709     *is_subprogram = false;
710 
711   if (Nkind (gnat_node) == N_Defining_Identifier
712       || Nkind (gnat_node) == N_Defining_Operator_Symbol)
713     gnat_entity = Scope (gnat_node);
714   else
715     return Empty;
716 
717   while (Present (gnat_entity))
718     {
719       switch (Ekind (gnat_entity))
720 	{
721 	case E_Function:
722 	case E_Procedure:
723 	  if (Present (Protected_Body_Subprogram (gnat_entity)))
724 	    gnat_entity = Protected_Body_Subprogram (gnat_entity);
725 
726 	  /* If the scope is a subprogram, then just rely on
727 	     current_function_decl, so that we don't have to defer
728 	     anything.  This is needed because other places rely on the
729 	     validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
730 	  if (is_subprogram)
731 	    *is_subprogram = true;
732 	  return gnat_entity;
733 
734 	case E_Record_Type:
735 	case E_Record_Subtype:
736 	  return gnat_entity;
737 
738 	default:
739 	  /* By default, we are not interested in this particular scope: go to
740 	     the outer one.  */
741 	  break;
742 	}
743 
744       gnat_entity = Scope (gnat_entity);
745     }
746 
747   return Empty;
748 }
749 
750 /* If N is NULL, set TYPE's context to CONTEXT.  Defer this to the processing
751    of N otherwise.  */
752 
753 static void
defer_or_set_type_context(tree type,tree context,struct deferred_decl_context_node * n)754 defer_or_set_type_context (tree type, tree context,
755 			   struct deferred_decl_context_node *n)
756 {
757   if (n)
758     add_deferred_type_context (n, type);
759   else
760     gnat_set_type_context (type, context);
761 }
762 
763 /* Return global_context, but create it first if need be.  */
764 
765 static tree
get_global_context(void)766 get_global_context (void)
767 {
768   if (!global_context)
769     {
770       global_context
771 	= build_translation_unit_decl (get_identifier (main_input_filename));
772       debug_hooks->register_main_translation_unit (global_context);
773     }
774 
775   return global_context;
776 }
777 
778 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
779    for location information and flag propagation.  */
780 
781 void
gnat_pushdecl(tree decl,Node_Id gnat_node)782 gnat_pushdecl (tree decl, Node_Id gnat_node)
783 {
784   tree context = NULL_TREE;
785   struct deferred_decl_context_node *deferred_decl_context = NULL;
786 
787   /* If explicitely asked to make DECL global or if it's an imported nested
788      object, short-circuit the regular Scope-based context computation.  */
789   if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
790     {
791       /* Rely on the GNAT scope, or fallback to the current_function_decl if
792 	 the GNAT scope reached the global scope, if it reached a subprogram
793 	 or the declaration is a subprogram or a variable (for them we skip
794 	 intermediate context types because the subprogram body elaboration
795 	 machinery and the inliner both expect a subprogram context).
796 
797 	 Falling back to current_function_decl is necessary for implicit
798 	 subprograms created by gigi, such as the elaboration subprograms.  */
799       bool context_is_subprogram = false;
800       const Entity_Id gnat_scope
801         = get_debug_scope (gnat_node, &context_is_subprogram);
802 
803       if (Present (gnat_scope)
804 	  && !context_is_subprogram
805 	  && TREE_CODE (decl) != FUNCTION_DECL
806 	  && TREE_CODE (decl) != VAR_DECL)
807 	/* Always assume the scope has not been elaborated, thus defer the
808 	   context propagation to the time its elaboration will be
809 	   available.  */
810 	deferred_decl_context
811 	  = add_deferred_decl_context (decl, gnat_scope, force_global);
812 
813       /* External declarations (when force_global > 0) may not be in a
814 	 local context.  */
815       else if (current_function_decl && force_global == 0)
816 	context = current_function_decl;
817     }
818 
819   /* If either we are forced to be in global mode or if both the GNAT scope and
820      the current_function_decl did not help in determining the context, use the
821      global scope.  */
822   if (!deferred_decl_context && !context)
823     context = get_global_context ();
824 
825   /* Functions imported in another function are not really nested.
826      For really nested functions mark them initially as needing
827      a static chain for uses of that flag before unnesting;
828      lower_nested_functions will then recompute it.  */
829   if (TREE_CODE (decl) == FUNCTION_DECL
830       && !TREE_PUBLIC (decl)
831       && context
832       && (TREE_CODE (context) == FUNCTION_DECL
833 	  || decl_function_context (context)))
834     DECL_STATIC_CHAIN (decl) = 1;
835 
836   if (!deferred_decl_context)
837     DECL_CONTEXT (decl) = context;
838 
839   TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
840 
841   /* Set the location of DECL and emit a declaration for it.  */
842   if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
843     Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
844 
845   add_decl_expr (decl, gnat_node);
846 
847   /* Put the declaration on the list.  The list of declarations is in reverse
848      order.  The list will be reversed later.  Put global declarations in the
849      globals list and local ones in the current block.  But skip TYPE_DECLs
850      for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
851      with the debugger and aren't needed anyway.  */
852   if (!(TREE_CODE (decl) == TYPE_DECL
853         && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
854     {
855       /* External declarations must go to the binding level they belong to.
856 	 This will make corresponding imported entities are available in the
857 	 debugger at the proper time.  */
858       if (DECL_EXTERNAL (decl)
859 	  && TREE_CODE (decl) == FUNCTION_DECL
860 	  && fndecl_built_in_p (decl))
861 	vec_safe_push (builtin_decls, decl);
862       else if (global_bindings_p ())
863 	vec_safe_push (global_decls, decl);
864       else
865 	{
866 	  DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
867 	  BLOCK_VARS (current_binding_level->block) = decl;
868 	}
869     }
870 
871   /* For the declaration of a type, set its name either if it isn't already
872      set or if the previous type name was not derived from a source name.
873      We'd rather have the type named with a real name and all the pointer
874      types to the same object have the same node, except when the names are
875      both derived from source names.  */
876   if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
877     {
878       tree t = TREE_TYPE (decl);
879 
880       /* Array and pointer types aren't tagged types in the C sense so we need
881 	 to generate a typedef in DWARF for them and make sure it is preserved,
882 	 unless the type is artificial.  */
883       if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
884 	  && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
885 	      || DECL_ARTIFICIAL (decl)))
886 	;
887       /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
888 	 generate the typedef in DWARF.  Also do that for fat pointer types
889 	 because, even though they are tagged types in the C sense, they are
890 	 still XUP types attached to the base array type at this point.  */
891       else if (!DECL_ARTIFICIAL (decl)
892 	       && (TREE_CODE (t) == ARRAY_TYPE
893 		   || TREE_CODE (t) == POINTER_TYPE
894 		   || TYPE_IS_FAT_POINTER_P (t)))
895 	{
896 	  tree tt = build_variant_type_copy (t);
897 	  TYPE_NAME (tt) = decl;
898 	  defer_or_set_type_context (tt,
899 				     DECL_CONTEXT (decl),
900 				     deferred_decl_context);
901 	  TREE_TYPE (decl) = tt;
902 	  if (TYPE_NAME (t)
903 	      && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
904 	      && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
905 	    DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
906 	  else
907 	    DECL_ORIGINAL_TYPE (decl) = t;
908 	  /* Array types need to have a name so that they can be related to
909 	     their GNAT encodings.  */
910 	  if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
911 	    TYPE_NAME (t) = DECL_NAME (decl);
912 	  /* Remark the canonical fat pointer type as artificial.  */
913 	  if (TYPE_IS_FAT_POINTER_P (t))
914 	    TYPE_ARTIFICIAL (t) = 1;
915 	  t = NULL_TREE;
916 	}
917       else if (TYPE_NAME (t)
918 	       && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
919 	       && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
920 	;
921       else
922 	t = NULL_TREE;
923 
924       /* Propagate the name to all the variants, this is needed for the type
925 	 qualifiers machinery to work properly (see check_qualified_type).
926 	 Also propagate the context to them.  Note that it will be propagated
927 	 to all parallel types too thanks to gnat_set_type_context.  */
928       if (t)
929 	for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
930 	  /* ??? Because of the previous kludge, we can have variants of fat
931 	     pointer types with different names.  */
932 	  if (!(TYPE_IS_FAT_POINTER_P (t)
933 		&& TYPE_NAME (t)
934 		&& TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
935 	    {
936 	      TYPE_NAME (t) = decl;
937 	      defer_or_set_type_context (t,
938 					 DECL_CONTEXT (decl),
939 					 deferred_decl_context);
940 	    }
941     }
942 }
943 
944 /* Create a record type that contains a SIZE bytes long field of TYPE with a
945    starting bit position so that it is aligned to ALIGN bits, and leaving at
946    least ROOM bytes free before the field.  BASE_ALIGN is the alignment the
947    record is guaranteed to get.  GNAT_NODE is used for the position of the
948    associated TYPE_DECL.  */
949 
950 tree
make_aligning_type(tree type,unsigned int align,tree size,unsigned int base_align,int room,Node_Id gnat_node)951 make_aligning_type (tree type, unsigned int align, tree size,
952 		    unsigned int base_align, int room, Node_Id gnat_node)
953 {
954   /* We will be crafting a record type with one field at a position set to be
955      the next multiple of ALIGN past record'address + room bytes.  We use a
956      record placeholder to express record'address.  */
957   tree record_type = make_node (RECORD_TYPE);
958   tree record = build0 (PLACEHOLDER_EXPR, record_type);
959 
960   tree record_addr_st
961     = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
962 
963   /* The diagram below summarizes the shape of what we manipulate:
964 
965                     <--------- pos ---------->
966                 {  +------------+-------------+-----------------+
967       record  =>{  |############|     ...     | field (type)    |
968                 {  +------------+-------------+-----------------+
969 		   |<-- room -->|<- voffset ->|<---- size ----->|
970 		   o            o
971 		   |            |
972 		   record_addr  vblock_addr
973 
974      Every length is in sizetype bytes there, except "pos" which has to be
975      set as a bit position in the GCC tree for the record.  */
976   tree room_st = size_int (room);
977   tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
978   tree voffset_st, pos, field;
979 
980   tree name = TYPE_IDENTIFIER (type);
981 
982   name = concat_name (name, "ALIGN");
983   TYPE_NAME (record_type) = name;
984 
985   /* Compute VOFFSET and then POS.  The next byte position multiple of some
986      alignment after some address is obtained by "and"ing the alignment minus
987      1 with the two's complement of the address.   */
988   voffset_st = size_binop (BIT_AND_EXPR,
989 			   fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
990 			   size_int ((align / BITS_PER_UNIT) - 1));
991 
992   /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype.  */
993   pos = size_binop (MULT_EXPR,
994 		    convert (bitsizetype,
995 			     size_binop (PLUS_EXPR, room_st, voffset_st)),
996                     bitsize_unit_node);
997 
998   /* Craft the GCC record representation.  We exceptionally do everything
999      manually here because 1) our generic circuitry is not quite ready to
1000      handle the complex position/size expressions we are setting up, 2) we
1001      have a strong simplifying factor at hand: we know the maximum possible
1002      value of voffset, and 3) we have to set/reset at least the sizes in
1003      accordance with this maximum value anyway, as we need them to convey
1004      what should be "alloc"ated for this type.
1005 
1006      Use -1 as the 'addressable' indication for the field to prevent the
1007      creation of a bitfield.  We don't need one, it would have damaging
1008      consequences on the alignment computation, and create_field_decl would
1009      make one without this special argument, for instance because of the
1010      complex position expression.  */
1011   field = create_field_decl (get_identifier ("F"), type, record_type, size,
1012 			     pos, 1, -1);
1013   TYPE_FIELDS (record_type) = field;
1014 
1015   SET_TYPE_ALIGN (record_type, base_align);
1016   TYPE_USER_ALIGN (record_type) = 1;
1017 
1018   TYPE_SIZE (record_type)
1019     = size_binop (PLUS_EXPR,
1020                   size_binop (MULT_EXPR, convert (bitsizetype, size),
1021                               bitsize_unit_node),
1022 		  bitsize_int (align + room * BITS_PER_UNIT));
1023   TYPE_SIZE_UNIT (record_type)
1024     = size_binop (PLUS_EXPR, size,
1025 		  size_int (room + align / BITS_PER_UNIT));
1026 
1027   SET_TYPE_MODE (record_type, BLKmode);
1028   relate_alias_sets (record_type, type, ALIAS_SET_COPY);
1029 
1030   /* Declare it now since it will never be declared otherwise.  This is
1031      necessary to ensure that its subtrees are properly marked.  */
1032   create_type_decl (name, record_type, true, false, gnat_node);
1033 
1034   return record_type;
1035 }
1036 
1037 /* Return true iff the packable types are equivalent.  */
1038 
1039 bool
equal(packable_type_hash * t1,packable_type_hash * t2)1040 packable_type_hasher::equal (packable_type_hash *t1, packable_type_hash *t2)
1041 {
1042   tree type1, type2;
1043 
1044   if (t1->hash != t2->hash)
1045     return 0;
1046 
1047   type1 = t1->type;
1048   type2 = t2->type;
1049 
1050   /* We consider that packable types are equivalent if they have the same name,
1051      size, alignment, RM size and storage order. Taking the mode into account
1052      is redundant since it is determined by the others.  */
1053   return
1054     TYPE_NAME (type1) == TYPE_NAME (type2)
1055     && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1056     && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1057     && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1058     && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1059 }
1060 
1061 /* Compute the hash value for the packable TYPE.  */
1062 
1063 static hashval_t
hash_packable_type(tree type)1064 hash_packable_type (tree type)
1065 {
1066   hashval_t hashcode;
1067 
1068   hashcode = iterative_hash_expr (TYPE_NAME (type), 0);
1069   hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1070   hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1071   hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1072   hashcode
1073     = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1074 
1075   return hashcode;
1076 }
1077 
1078 /* Look up the packable TYPE in the hash table and return its canonical version
1079    if it exists; otherwise, insert it into the hash table.  */
1080 
1081 static tree
canonicalize_packable_type(tree type)1082 canonicalize_packable_type (tree type)
1083 {
1084   const hashval_t hashcode = hash_packable_type (type);
1085   struct packable_type_hash in, *h, **slot;
1086 
1087   in.hash = hashcode;
1088   in.type = type;
1089   slot = packable_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1090   h = *slot;
1091   if (!h)
1092     {
1093       h = ggc_alloc<packable_type_hash> ();
1094       h->hash = hashcode;
1095       h->type = type;
1096       *slot = h;
1097     }
1098 
1099   return h->type;
1100 }
1101 
1102 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed
1103    record.  See if we can rewrite it as a type that has non-BLKmode, which we
1104    can pack tighter in the packed record.  If so, return the new type; if not,
1105    return the original type.  */
1106 
1107 static tree
make_packable_array_type(tree type)1108 make_packable_array_type (tree type)
1109 {
1110   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1111   unsigned HOST_WIDE_INT new_size;
1112   unsigned int new_align;
1113 
1114   /* No point in doing anything if the size is either zero or too large for an
1115      integral mode, or if the type already has non-BLKmode.  */
1116   if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode)
1117     return type;
1118 
1119   /* Punt if the component type is an aggregate type for now.  */
1120   if (AGGREGATE_TYPE_P (TREE_TYPE (type)))
1121     return type;
1122 
1123   tree new_type = copy_type (type);
1124 
1125   new_size = ceil_pow2 (size);
1126   new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1127   SET_TYPE_ALIGN (new_type, new_align);
1128 
1129   TYPE_SIZE (new_type) = bitsize_int (new_size);
1130   TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1131 
1132   SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ());
1133 
1134   return new_type;
1135 }
1136 
1137 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
1138    as the type of a field in a packed record if IN_RECORD is true, or as
1139    the component type of a packed array if IN_RECORD is false.  See if we
1140    can rewrite it either as a type that has non-BLKmode, which we can pack
1141    tighter in the packed record case, or as a smaller type with at most
1142    MAX_ALIGN alignment if the value is non-zero.  If so, return the new
1143    type; if not, return the original type.  */
1144 
1145 tree
make_packable_type(tree type,bool in_record,unsigned int max_align)1146 make_packable_type (tree type, bool in_record, unsigned int max_align)
1147 {
1148   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1149   const unsigned int align = TYPE_ALIGN (type);
1150   unsigned HOST_WIDE_INT new_size;
1151   unsigned int new_align;
1152 
1153   /* No point in doing anything if the size is zero.  */
1154   if (size == 0)
1155     return type;
1156 
1157   tree new_type = make_node (TREE_CODE (type));
1158 
1159   /* Copy the name and flags from the old type to that of the new.
1160      Note that we rely on the pointer equality created here for
1161      TYPE_NAME to look through conversions in various places.  */
1162   TYPE_NAME (new_type) = TYPE_NAME (type);
1163   TYPE_PACKED (new_type) = 1;
1164   TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1165   TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1166   TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1167   if (TREE_CODE (type) == RECORD_TYPE)
1168     TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1169 
1170   /* If we are in a record and have a small size, set the alignment to
1171      try for an integral mode.  Otherwise set it to try for a smaller
1172      type with BLKmode.  */
1173   if (in_record && size <= MAX_FIXED_MODE_SIZE)
1174     {
1175       new_size = ceil_pow2 (size);
1176       new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1177       SET_TYPE_ALIGN (new_type, new_align);
1178     }
1179   else
1180     {
1181       tree ada_size = TYPE_ADA_SIZE (type);
1182 
1183       /* Do not try to shrink the size if the RM size is not constant.  */
1184       if (TYPE_CONTAINS_TEMPLATE_P (type) || !tree_fits_uhwi_p (ada_size))
1185 	return type;
1186 
1187       /* Round the RM size up to a unit boundary to get the minimal size
1188 	 for a BLKmode record.  Give up if it's already the size and we
1189 	 don't need to lower the alignment.  */
1190       new_size = tree_to_uhwi (ada_size);
1191       new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1192       if (new_size == size && (max_align == 0 || align <= max_align))
1193 	return type;
1194 
1195       new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1196       if (max_align > 0 && new_align > max_align)
1197 	new_align = max_align;
1198       SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1199     }
1200 
1201   TYPE_USER_ALIGN (new_type) = 1;
1202 
1203   /* Now copy the fields, keeping the position and size as we don't want
1204      to change the layout by propagating the packedness downwards.  */
1205   tree new_field_list = NULL_TREE;
1206   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1207     {
1208       tree new_field_type = TREE_TYPE (field);
1209       tree new_field, new_field_size;
1210 
1211       if (AGGREGATE_TYPE_P (new_field_type)
1212 	  && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1213 	{
1214 	  if (RECORD_OR_UNION_TYPE_P (new_field_type)
1215 	      && !TYPE_FAT_POINTER_P (new_field_type))
1216 	    new_field_type
1217 	      = make_packable_type (new_field_type, true, max_align);
1218 	  else if (in_record
1219 		   && max_align > 0
1220 		   && max_align < BITS_PER_UNIT
1221 		   && TREE_CODE (new_field_type) == ARRAY_TYPE)
1222 	    new_field_type = make_packable_array_type (new_field_type);
1223 	}
1224 
1225       /* However, for the last field in a not already packed record type
1226 	 that is of an aggregate type, we need to use the RM size in the
1227 	 packable version of the record type, see finish_record_type.  */
1228       if (!DECL_CHAIN (field)
1229 	  && !TYPE_PACKED (type)
1230 	  && RECORD_OR_UNION_TYPE_P (new_field_type)
1231 	  && !TYPE_FAT_POINTER_P (new_field_type)
1232 	  && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1233 	  && TYPE_ADA_SIZE (new_field_type))
1234 	new_field_size = TYPE_ADA_SIZE (new_field_type);
1235       else
1236 	{
1237 	  new_field_size = DECL_SIZE (field);
1238 
1239 	  /* Make sure not to use too small a type for the size.  */
1240 	  if (TYPE_MODE (new_field_type) == BLKmode)
1241 	    new_field_type = TREE_TYPE (field);
1242 	}
1243 
1244       /* This is a layout with full representation, alignment and size clauses
1245 	 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case.  */
1246       new_field
1247 	= create_field_decl (DECL_NAME (field), new_field_type, new_type,
1248 			     new_field_size, bit_position (field), 0,
1249 			     !DECL_NONADDRESSABLE_P (field));
1250 
1251       DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1252       SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1253       if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1254 	DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1255 
1256       DECL_CHAIN (new_field) = new_field_list;
1257       new_field_list = new_field;
1258     }
1259 
1260   /* If this is a padding record, we never want to make the size smaller
1261      than what was specified.  For QUAL_UNION_TYPE, also copy the size.  */
1262   if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1263     {
1264       TYPE_SIZE (new_type) = TYPE_SIZE (type);
1265       TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1266       new_size = size;
1267     }
1268   else
1269     {
1270       TYPE_SIZE (new_type) = bitsize_int (new_size);
1271       TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1272     }
1273 
1274   if (!TYPE_CONTAINS_TEMPLATE_P (type))
1275     SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1276 
1277   finish_record_type (new_type, nreverse (new_field_list), 2, false);
1278   relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1279   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1280     SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1281   else if (TYPE_STUB_DECL (type))
1282     SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1283 			    DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1284 
1285   /* Try harder to get a packable type if necessary, for example in case
1286      the record itself contains a BLKmode field.  */
1287   if (in_record && TYPE_MODE (new_type) == BLKmode)
1288     SET_TYPE_MODE (new_type,
1289 		   mode_for_size_tree (TYPE_SIZE (new_type),
1290 				       MODE_INT, 1).else_blk ());
1291 
1292   /* If neither mode nor size nor alignment shrunk, return the old type.  */
1293   if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1294     return type;
1295 
1296   /* If the packable type is named, we canonicalize it by means of the hash
1297      table.  This is consistent with the language semantics and ensures that
1298      gigi and the middle-end have a common view of these packable types.  */
1299   return
1300     TYPE_NAME (new_type) ? canonicalize_packable_type (new_type) : new_type;
1301 }
1302 
1303 /* Return true if TYPE has an unsigned representation.  This needs to be used
1304    when the representation of types whose precision is not equal to their size
1305    is manipulated based on the RM size.  */
1306 
1307 static inline bool
type_unsigned_for_rm(tree type)1308 type_unsigned_for_rm (tree type)
1309 {
1310   /* This is the common case.  */
1311   if (TYPE_UNSIGNED (type))
1312     return true;
1313 
1314   /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1315   if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1316       && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1317     return true;
1318 
1319   return false;
1320 }
1321 
1322 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1323    If TYPE is the best type, return it.  Otherwise, make a new type.  We
1324    only support new integral and pointer types.  FOR_BIASED is true if
1325    we are making a biased type.  */
1326 
1327 tree
make_type_from_size(tree type,tree size_tree,bool for_biased)1328 make_type_from_size (tree type, tree size_tree, bool for_biased)
1329 {
1330   unsigned HOST_WIDE_INT size;
1331   bool biased_p;
1332   tree new_type;
1333 
1334   /* If size indicates an error, just return TYPE to avoid propagating
1335      the error.  Likewise if it's too large to represent.  */
1336   if (!size_tree || !tree_fits_uhwi_p (size_tree))
1337     return type;
1338 
1339   size = tree_to_uhwi (size_tree);
1340 
1341   switch (TREE_CODE (type))
1342     {
1343     case BOOLEAN_TYPE:
1344       /* Do not mess with boolean types that have foreign convention.  */
1345       if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1346 	break;
1347 
1348       /* ... fall through ... */
1349 
1350     case INTEGER_TYPE:
1351     case ENUMERAL_TYPE:
1352       biased_p = (TREE_CODE (type) == INTEGER_TYPE
1353 		  && TYPE_BIASED_REPRESENTATION_P (type));
1354 
1355       /* Integer types with precision 0 are forbidden.  */
1356       if (size == 0)
1357 	size = 1;
1358 
1359       /* Only do something if the type is not a bit-packed array type and does
1360 	 not already have the proper size and the size is not too large.  */
1361       if (BIT_PACKED_ARRAY_TYPE_P (type)
1362 	  || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1363 	  || size > (Enable_128bit_Types ? 128 : LONG_LONG_TYPE_SIZE))
1364 	break;
1365 
1366       biased_p |= for_biased;
1367 
1368       /* The type should be an unsigned type if the original type is unsigned
1369 	 or if the lower bound is constant and non-negative or if the type is
1370 	 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1371       if (type_unsigned_for_rm (type) || biased_p)
1372 	new_type = make_unsigned_type (size);
1373       else
1374 	new_type = make_signed_type (size);
1375       TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1376       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1377       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1378       /* Copy the name to show that it's essentially the same type and
1379 	 not a subrange type.  */
1380       TYPE_NAME (new_type) = TYPE_NAME (type);
1381       TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1382       SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1383       return new_type;
1384 
1385     case RECORD_TYPE:
1386       /* Do something if this is a fat pointer, in which case we
1387 	 may need to return the thin pointer.  */
1388       if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1389 	{
1390 	  scalar_int_mode p_mode;
1391 	  if (!int_mode_for_size (size, 0).exists (&p_mode)
1392 	      || !targetm.valid_pointer_mode (p_mode))
1393 	    p_mode = ptr_mode;
1394 	  return
1395 	    build_pointer_type_for_mode
1396 	      (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1397 	       p_mode, 0);
1398 	}
1399       break;
1400 
1401     case POINTER_TYPE:
1402       /* Only do something if this is a thin pointer, in which case we
1403 	 may need to return the fat pointer.  */
1404       if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1405 	return
1406 	  build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1407       break;
1408 
1409     default:
1410       break;
1411     }
1412 
1413   return type;
1414 }
1415 
1416 /* Return true iff the padded types are equivalent.  */
1417 
1418 bool
equal(pad_type_hash * t1,pad_type_hash * t2)1419 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1420 {
1421   tree type1, type2;
1422 
1423   if (t1->hash != t2->hash)
1424     return 0;
1425 
1426   type1 = t1->type;
1427   type2 = t2->type;
1428 
1429   /* We consider that padded types are equivalent if they pad the same type
1430      and have the same size, alignment, RM size and storage order.  Taking the
1431      mode into account is redundant since it is determined by the others.  */
1432   return
1433     TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1434     && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1435     && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1436     && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1437     && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1438 }
1439 
1440 /* Compute the hash value for the padded TYPE.  */
1441 
1442 static hashval_t
hash_pad_type(tree type)1443 hash_pad_type (tree type)
1444 {
1445   hashval_t hashcode;
1446 
1447   hashcode
1448     = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1449   hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1450   hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1451   hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1452   hashcode
1453     = iterative_hash_hashval_t (TYPE_REVERSE_STORAGE_ORDER (type), hashcode);
1454 
1455   return hashcode;
1456 }
1457 
1458 /* Look up the padded TYPE in the hash table and return its canonical version
1459    if it exists; otherwise, insert it into the hash table.  */
1460 
1461 static tree
canonicalize_pad_type(tree type)1462 canonicalize_pad_type (tree type)
1463 {
1464   const hashval_t hashcode = hash_pad_type (type);
1465   struct pad_type_hash in, *h, **slot;
1466 
1467   in.hash = hashcode;
1468   in.type = type;
1469   slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1470   h = *slot;
1471   if (!h)
1472     {
1473       h = ggc_alloc<pad_type_hash> ();
1474       h->hash = hashcode;
1475       h->type = type;
1476       *slot = h;
1477     }
1478 
1479   return h->type;
1480 }
1481 
1482 /* Ensure that TYPE has SIZE and ALIGN.  Make and return a new padded type
1483    if needed.  We have already verified that SIZE and ALIGN are large enough.
1484    GNAT_ENTITY is used to name the resulting record and to issue a warning.
1485    IS_COMPONENT_TYPE is true if this is being done for the component type of
1486    an array.  DEFINITION is true if this type is being defined.  SET_RM_SIZE
1487    is true if the RM size of the resulting type is to be set to SIZE too; in
1488    this case, the padded type is canonicalized before being returned.  */
1489 
1490 tree
maybe_pad_type(tree type,tree size,unsigned int align,Entity_Id gnat_entity,bool is_component_type,bool definition,bool set_rm_size)1491 maybe_pad_type (tree type, tree size, unsigned int align,
1492 		Entity_Id gnat_entity, bool is_component_type,
1493 		bool definition, bool set_rm_size)
1494 {
1495   tree orig_size = TYPE_SIZE (type);
1496   unsigned int orig_align = TYPE_ALIGN (type);
1497   tree record, field;
1498 
1499   /* If TYPE is a padded type, see if it agrees with any size and alignment
1500      we were given.  If so, return the original type.  Otherwise, strip
1501      off the padding, since we will either be returning the inner type
1502      or repadding it.  If no size or alignment is specified, use that of
1503      the original padded type.  */
1504   if (TYPE_IS_PADDING_P (type))
1505     {
1506       if ((!size
1507 	   || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1508 	  && (align == 0 || align == orig_align))
1509 	return type;
1510 
1511       if (!size)
1512 	size = orig_size;
1513       if (align == 0)
1514 	align = orig_align;
1515 
1516       type = TREE_TYPE (TYPE_FIELDS (type));
1517       orig_size = TYPE_SIZE (type);
1518       orig_align = TYPE_ALIGN (type);
1519     }
1520 
1521   /* If the size is either not being changed or is being made smaller (which
1522      is not done here and is only valid for bitfields anyway), show the size
1523      isn't changing.  Likewise, clear the alignment if it isn't being
1524      changed.  Then return if we aren't doing anything.  */
1525   if (size
1526       && (operand_equal_p (size, orig_size, 0)
1527 	  || (TREE_CODE (orig_size) == INTEGER_CST
1528 	      && tree_int_cst_lt (size, orig_size))))
1529     size = NULL_TREE;
1530 
1531   if (align == orig_align)
1532     align = 0;
1533 
1534   if (align == 0 && !size)
1535     return type;
1536 
1537   /* We used to modify the record in place in some cases, but that could
1538      generate incorrect debugging information.  So make a new record
1539      type and name.  */
1540   record = make_node (RECORD_TYPE);
1541   TYPE_PADDING_P (record) = 1;
1542 
1543   if (Present (gnat_entity))
1544     TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1545 
1546   SET_TYPE_ALIGN (record, align ? align : orig_align);
1547   TYPE_SIZE (record) = size ? size : orig_size;
1548   TYPE_SIZE_UNIT (record)
1549     = convert (sizetype,
1550 	       size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1551 			   bitsize_unit_node));
1552 
1553   /* If we are changing the alignment and the input type is a record with
1554      BLKmode and a small constant size, try to make a form that has an
1555      integral mode.  This might allow the padding record to also have an
1556      integral mode, which will be much more efficient.  There is no point
1557      in doing so if a size is specified unless it is also a small constant
1558      size and it is incorrect to do so if we cannot guarantee that the mode
1559      will be naturally aligned since the field must always be addressable.
1560 
1561      ??? This might not always be a win when done for a stand-alone object:
1562      since the nominal and the effective type of the object will now have
1563      different modes, a VIEW_CONVERT_EXPR will be required for converting
1564      between them and it might be hard to overcome afterwards, including
1565      at the RTL level when the stand-alone object is accessed as a whole.  */
1566   if (align > 0
1567       && RECORD_OR_UNION_TYPE_P (type)
1568       && TYPE_MODE (type) == BLKmode
1569       && !TYPE_BY_REFERENCE_P (type)
1570       && TREE_CODE (orig_size) == INTEGER_CST
1571       && !TREE_OVERFLOW (orig_size)
1572       && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1573       && (!size
1574 	  || (TREE_CODE (size) == INTEGER_CST
1575 	      && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1576     {
1577       tree packable_type = make_packable_type (type, true, align);
1578       if (TYPE_MODE (packable_type) != BLKmode
1579 	  && compare_tree_int (TYPE_SIZE (packable_type), align) <= 0)
1580         type = packable_type;
1581     }
1582 
1583   /* Now create the field with the original size.  */
1584   field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1585 			     bitsize_zero_node, 0, 1);
1586   DECL_INTERNAL_P (field) = 1;
1587 
1588   /* We will output additional debug info manually below.  */
1589   finish_record_type (record, field, 1, false);
1590 
1591   /* Set the RM size if requested.  */
1592   if (set_rm_size)
1593     {
1594       SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1595 
1596       /* If the padded type is complete and has constant size, we canonicalize
1597 	 it by means of the hash table.  This is consistent with the language
1598 	 semantics and ensures that gigi and the middle-end have a common view
1599 	 of these padded types.  */
1600       if (TREE_CONSTANT (TYPE_SIZE (record)))
1601 	{
1602 	  tree canonical = canonicalize_pad_type (record);
1603 	  if (canonical != record)
1604 	    {
1605 	      record = canonical;
1606 	      goto built;
1607 	    }
1608 	}
1609     }
1610 
1611   /* Make the inner type the debug type of the padded type.  */
1612   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1613     SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1614 
1615   /* Unless debugging information isn't being written for the input type,
1616      write a record that shows what we are a subtype of and also make a
1617      variable that indicates our size, if still variable.  */
1618   if (TREE_CODE (orig_size) != INTEGER_CST
1619       && TYPE_NAME (record)
1620       && TYPE_NAME (type)
1621       && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1622 	   && DECL_IGNORED_P (TYPE_NAME (type))))
1623     {
1624       tree name = TYPE_IDENTIFIER (record);
1625       tree size_unit = TYPE_SIZE_UNIT (record);
1626 
1627       /* A variable that holds the size is required even with no encoding since
1628 	 it will be referenced by debugging information attributes.  At global
1629 	 level, we need a single variable across all translation units.  */
1630       if (size
1631 	  && TREE_CODE (size) != INTEGER_CST
1632 	  && (definition || global_bindings_p ()))
1633 	{
1634 	  /* Whether or not gnat_entity comes from source, this XVZ variable is
1635 	     is a compilation artifact.  */
1636 	  size_unit
1637 	    = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1638 			      size_unit, true, global_bindings_p (),
1639 			      !definition && global_bindings_p (), false,
1640 			      false, true, true, NULL, gnat_entity);
1641 	  TYPE_SIZE_UNIT (record) = size_unit;
1642 	}
1643 
1644       /* There is no need to show what we are a subtype of when outputting as
1645 	 few encodings as possible: regular debugging infomation makes this
1646 	 redundant.  */
1647       if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1648 	{
1649 	  tree marker = make_node (RECORD_TYPE);
1650 	  tree orig_name = TYPE_IDENTIFIER (type);
1651 
1652 	  TYPE_NAME (marker) = concat_name (name, "XVS");
1653 	  finish_record_type (marker,
1654 			      create_field_decl (orig_name,
1655 						 build_reference_type (type),
1656 						 marker, NULL_TREE, NULL_TREE,
1657 						 0, 0),
1658 			      0, true);
1659 	  TYPE_SIZE_UNIT (marker) = size_unit;
1660 
1661 	  add_parallel_type (record, marker);
1662 	}
1663     }
1664 
1665 built:
1666   /* If a simple size was explicitly given, maybe issue a warning.  */
1667   if (!size
1668       || TREE_CODE (size) == COND_EXPR
1669       || TREE_CODE (size) == MAX_EXPR
1670       || No (gnat_entity))
1671     return record;
1672 
1673   /* But don't do it if we are just annotating types and the type is tagged or
1674      concurrent, since these types aren't fully laid out in this mode.  */
1675   if (type_annotate_only)
1676     {
1677       Entity_Id gnat_type
1678 	= is_component_type
1679 	  ? Component_Type (gnat_entity) : Etype (gnat_entity);
1680 
1681       if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1682 	return record;
1683     }
1684 
1685   /* Take the original size as the maximum size of the input if there was an
1686      unconstrained record involved and round it up to the specified alignment,
1687      if one was specified, but only for aggregate types.  */
1688   if (CONTAINS_PLACEHOLDER_P (orig_size))
1689     orig_size = max_size (orig_size, true);
1690 
1691   if (align && AGGREGATE_TYPE_P (type))
1692     orig_size = round_up (orig_size, align);
1693 
1694   if (!operand_equal_p (size, orig_size, 0)
1695       && !(TREE_CODE (size) == INTEGER_CST
1696 	   && TREE_CODE (orig_size) == INTEGER_CST
1697 	   && (TREE_OVERFLOW (size)
1698 	       || TREE_OVERFLOW (orig_size)
1699 	       || tree_int_cst_lt (size, orig_size))))
1700     {
1701       Node_Id gnat_error_node;
1702 
1703       /* For a packed array, post the message on the original array type.  */
1704       if (Is_Packed_Array_Impl_Type (gnat_entity))
1705 	gnat_entity = Original_Array_Type (gnat_entity);
1706 
1707       if ((Ekind (gnat_entity) == E_Component
1708 	   || Ekind (gnat_entity) == E_Discriminant)
1709 	  && Present (Component_Clause (gnat_entity)))
1710 	gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1711       else if (Has_Size_Clause (gnat_entity))
1712 	gnat_error_node = Expression (Size_Clause (gnat_entity));
1713       else if (Has_Object_Size_Clause (gnat_entity))
1714 	gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1715       else
1716 	gnat_error_node = Empty;
1717 
1718       /* Generate message only for entities that come from source, since
1719 	 if we have an entity created by expansion, the message will be
1720 	 generated for some other corresponding source entity.  */
1721       if (Comes_From_Source (gnat_entity))
1722 	{
1723 	  if (is_component_type)
1724 	    post_error_ne_tree ("component of& padded{ by ^ bits}?",
1725 				gnat_entity, gnat_entity,
1726 				size_diffop (size, orig_size));
1727 	  else if (Present (gnat_error_node))
1728 	    post_error_ne_tree ("{^ }bits of & unused?",
1729 				gnat_error_node, gnat_entity,
1730 				size_diffop (size, orig_size));
1731 	}
1732     }
1733 
1734   return record;
1735 }
1736 
1737 /* Return true if padded TYPE was built with an RM size.  */
1738 
1739 bool
pad_type_has_rm_size(tree type)1740 pad_type_has_rm_size (tree type)
1741 {
1742   /* This is required for the lookup.  */
1743   if (!TREE_CONSTANT (TYPE_SIZE (type)))
1744     return false;
1745 
1746   const hashval_t hashcode = hash_pad_type (type);
1747   struct pad_type_hash in, *h;
1748 
1749   in.hash = hashcode;
1750   in.type = type;
1751   h = pad_type_hash_table->find_with_hash (&in, hashcode);
1752 
1753   /* The types built with an RM size are the canonicalized ones.  */
1754   return h && h->type == type;
1755 }
1756 
1757 /* Return a copy of the padded TYPE but with reverse storage order.  */
1758 
1759 tree
set_reverse_storage_order_on_pad_type(tree type)1760 set_reverse_storage_order_on_pad_type (tree type)
1761 {
1762   if (flag_checking)
1763     {
1764       /* If the inner type is not scalar then the function does nothing.  */
1765       tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1766       gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1767 		  && !VECTOR_TYPE_P (inner_type));
1768     }
1769 
1770   /* This is required for the canonicalization.  */
1771   gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1772 
1773   tree field = copy_node (TYPE_FIELDS (type));
1774   type = copy_type (type);
1775   DECL_CONTEXT (field) = type;
1776   TYPE_FIELDS (type) = field;
1777   TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1778   return canonicalize_pad_type (type);
1779 }
1780 
1781 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1782    If this is a multi-dimensional array type, do this recursively.
1783 
1784    OP may be
1785    - ALIAS_SET_COPY:     the new set is made a copy of the old one.
1786    - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1787    - ALIAS_SET_SUBSET:   the new set is made a subset of the old one.  */
1788 
1789 void
relate_alias_sets(tree gnu_new_type,tree gnu_old_type,enum alias_set_op op)1790 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1791 {
1792   /* Remove any padding from GNU_OLD_TYPE.  It doesn't matter in the case
1793      of a one-dimensional array, since the padding has the same alias set
1794      as the field type, but if it's a multi-dimensional array, we need to
1795      see the inner types.  */
1796   while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1797 	 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1798 	     || TYPE_PADDING_P (gnu_old_type)))
1799     gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1800 
1801   /* Unconstrained array types are deemed incomplete and would thus be given
1802      alias set 0.  Retrieve the underlying array type.  */
1803   if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1804     gnu_old_type
1805       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1806   if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1807     gnu_new_type
1808       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1809 
1810   if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1811       && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1812       && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1813     relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1814 
1815   switch (op)
1816     {
1817     case ALIAS_SET_COPY:
1818       /* The alias set shouldn't be copied between array types with different
1819 	 aliasing settings because this can break the aliasing relationship
1820 	 between the array type and its element type.  */
1821       if (flag_checking || flag_strict_aliasing)
1822 	gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1823 		      && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1824 		      && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1825 			 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1826 
1827       TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1828       break;
1829 
1830     case ALIAS_SET_SUBSET:
1831     case ALIAS_SET_SUPERSET:
1832       {
1833 	alias_set_type old_set = get_alias_set (gnu_old_type);
1834 	alias_set_type new_set = get_alias_set (gnu_new_type);
1835 
1836 	/* Do nothing if the alias sets conflict.  This ensures that we
1837 	   never call record_alias_subset several times for the same pair
1838 	   or at all for alias set 0.  */
1839 	if (!alias_sets_conflict_p (old_set, new_set))
1840 	  {
1841 	    if (op == ALIAS_SET_SUBSET)
1842 	      record_alias_subset (old_set, new_set);
1843 	    else
1844 	      record_alias_subset (new_set, old_set);
1845 	  }
1846       }
1847       break;
1848 
1849     default:
1850       gcc_unreachable ();
1851     }
1852 
1853   record_component_aliases (gnu_new_type);
1854 }
1855 
1856 /* Record TYPE as a builtin type for Ada.  NAME is the name of the type.
1857    ARTIFICIAL_P is true if the type was generated by the compiler.  */
1858 
1859 void
record_builtin_type(const char * name,tree type,bool artificial_p)1860 record_builtin_type (const char *name, tree type, bool artificial_p)
1861 {
1862   tree type_decl = build_decl (input_location,
1863 			       TYPE_DECL, get_identifier (name), type);
1864   DECL_ARTIFICIAL (type_decl) = artificial_p;
1865   TYPE_ARTIFICIAL (type) = artificial_p;
1866   gnat_pushdecl (type_decl, Empty);
1867 
1868   if (debug_hooks->type_decl)
1869     debug_hooks->type_decl (type_decl, false);
1870 }
1871 
1872 /* Finish constructing the character type CHAR_TYPE.
1873 
1874   In Ada character types are enumeration types and, as a consequence, are
1875   represented in the front-end by integral types holding the positions of
1876   the enumeration values as defined by the language, which means that the
1877   integral types are unsigned.
1878 
1879   Unfortunately the signedness of 'char' in C is implementation-defined
1880   and GCC even has the option -f[un]signed-char to toggle it at run time.
1881   Since GNAT's philosophy is to be compatible with C by default, to wit
1882   Interfaces.C.char is defined as a mere copy of Character, we may need
1883   to declare character types as signed types in GENERIC and generate the
1884   necessary adjustments to make them behave as unsigned types.
1885 
1886   The overall strategy is as follows: if 'char' is unsigned, do nothing;
1887   if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1888   character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1889   types.  The idea is to ensure that the bit pattern contained in the
1890   Esize'd objects is not changed, even though the numerical value will
1891   be interpreted differently depending on the signedness.  */
1892 
1893 void
finish_character_type(tree char_type)1894 finish_character_type (tree char_type)
1895 {
1896   if (TYPE_UNSIGNED (char_type))
1897     return;
1898 
1899   /* Make a copy of a generic unsigned version since we'll modify it.  */
1900   tree unsigned_char_type
1901     = (char_type == char_type_node
1902        ? unsigned_char_type_node
1903        : copy_type (gnat_unsigned_type_for (char_type)));
1904 
1905   /* Create an unsigned version of the type and set it as debug type.  */
1906   TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1907   TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1908   TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1909   SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1910 
1911   /* If this is a subtype, make the debug type a subtype of the debug type
1912      of the base type and convert literal RM bounds to unsigned.  */
1913   if (TREE_TYPE (char_type))
1914     {
1915       tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1916       tree min_value = TYPE_RM_MIN_VALUE (char_type);
1917       tree max_value = TYPE_RM_MAX_VALUE (char_type);
1918 
1919       if (TREE_CODE (min_value) == INTEGER_CST)
1920 	min_value = fold_convert (base_unsigned_char_type, min_value);
1921       if (TREE_CODE (max_value) == INTEGER_CST)
1922 	max_value = fold_convert (base_unsigned_char_type, max_value);
1923 
1924       TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1925       SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1926       SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1927     }
1928 
1929   /* Adjust the RM bounds of the original type to unsigned; that's especially
1930      important for types since they are implicit in this case.  */
1931   SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1932   SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1933 }
1934 
1935 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1936    finish constructing the record type as a fat pointer type.  */
1937 
1938 void
finish_fat_pointer_type(tree record_type,tree field_list)1939 finish_fat_pointer_type (tree record_type, tree field_list)
1940 {
1941   /* Make sure we can put it into a register.  */
1942   if (STRICT_ALIGNMENT)
1943     SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1944 
1945   /* Show what it really is.  */
1946   TYPE_FAT_POINTER_P (record_type) = 1;
1947 
1948   /* Do not emit debug info for it since the types of its fields may still be
1949      incomplete at this point.  */
1950   finish_record_type (record_type, field_list, 0, false);
1951 
1952   /* Force type_contains_placeholder_p to return true on it.  Although the
1953      PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1954      type but the representation of the unconstrained array.  */
1955   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1956 }
1957 
1958 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1959    finish constructing the record or union type.  If REP_LEVEL is zero, this
1960    record has no representation clause and so will be entirely laid out here.
1961    If REP_LEVEL is one, this record has a representation clause and has been
1962    laid out already; only set the sizes and alignment.  If REP_LEVEL is two,
1963    this record is derived from a parent record and thus inherits its layout;
1964    only make a pass on the fields to finalize them.  DEBUG_INFO_P is true if
1965    additional debug info needs to be output for this type.  */
1966 
1967 void
finish_record_type(tree record_type,tree field_list,int rep_level,bool debug_info_p)1968 finish_record_type (tree record_type, tree field_list, int rep_level,
1969 		    bool debug_info_p)
1970 {
1971   const enum tree_code orig_code = TREE_CODE (record_type);
1972   const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
1973   const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE;
1974   const bool had_align = TYPE_ALIGN (record_type) > 0;
1975   /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1976      out just like a UNION_TYPE, since the size will be fixed.  */
1977   const enum tree_code code
1978     = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
1979        ? UNION_TYPE : orig_code);
1980   tree name = TYPE_IDENTIFIER (record_type);
1981   tree ada_size = bitsize_zero_node;
1982   tree size = bitsize_zero_node;
1983   tree field;
1984 
1985   TYPE_FIELDS (record_type) = field_list;
1986 
1987   /* Always attach the TYPE_STUB_DECL for a record type.  It is required to
1988      generate debug info and have a parallel type.  */
1989   TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1990 
1991   /* Globally initialize the record first.  If this is a rep'ed record,
1992      that just means some initializations; otherwise, layout the record.  */
1993   if (rep_level > 0)
1994     {
1995       if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
1996 	SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
1997 
1998       if (!had_size)
1999 	TYPE_SIZE (record_type) = bitsize_zero_node;
2000 
2001       if (!had_size_unit)
2002 	TYPE_SIZE_UNIT (record_type) = size_zero_node;
2003     }
2004   else
2005     {
2006       /* Ensure there isn't a size already set.  There can be in an error
2007 	 case where there is a rep clause but all fields have errors and
2008 	 no longer have a position.  */
2009       TYPE_SIZE (record_type) = NULL_TREE;
2010 
2011       /* Ensure we use the traditional GCC layout for bitfields when we need
2012 	 to pack the record type or have a representation clause.  The other
2013 	 possible layout (Microsoft C compiler), if available, would prevent
2014 	 efficient packing in almost all cases.  */
2015 #ifdef TARGET_MS_BITFIELD_LAYOUT
2016       if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
2017 	decl_attributes (&record_type,
2018 			 tree_cons (get_identifier ("gcc_struct"),
2019 				    NULL_TREE, NULL_TREE),
2020 			 ATTR_FLAG_TYPE_IN_PLACE);
2021 #endif
2022 
2023       layout_type (record_type);
2024     }
2025 
2026   /* At this point, the position and size of each field is known.  It was
2027      either set before entry by a rep clause, or by laying out the type above.
2028 
2029      We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
2030      to compute the Ada size; the GCC size and alignment (for rep'ed records
2031      that are not padding types); and the mode (for rep'ed records).  We also
2032      clear the DECL_BIT_FIELD indication for the cases we know have not been
2033      handled yet, and adjust DECL_NONADDRESSABLE_P accordingly.  */
2034 
2035   if (code == QUAL_UNION_TYPE)
2036     field_list = nreverse (field_list);
2037 
2038   for (field = field_list; field; field = DECL_CHAIN (field))
2039     {
2040       tree type = TREE_TYPE (field);
2041       tree pos = bit_position (field);
2042       tree this_size = DECL_SIZE (field);
2043       tree this_ada_size;
2044 
2045       if (RECORD_OR_UNION_TYPE_P (type)
2046 	  && !TYPE_FAT_POINTER_P (type)
2047 	  && !TYPE_CONTAINS_TEMPLATE_P (type)
2048 	  && TYPE_ADA_SIZE (type))
2049 	this_ada_size = TYPE_ADA_SIZE (type);
2050       else
2051 	this_ada_size = this_size;
2052 
2053       const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
2054 
2055       /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle.  */
2056       if (DECL_BIT_FIELD (field)
2057 	  && operand_equal_p (this_size, TYPE_SIZE (type), 0))
2058 	{
2059 	  const unsigned int align = TYPE_ALIGN (type);
2060 
2061 	  /* In the general case, type alignment is required.  */
2062 	  if (value_factor_p (pos, align))
2063 	    {
2064 	      /* The enclosing record type must be sufficiently aligned.
2065 		 Otherwise, if no alignment was specified for it and it
2066 		 has been laid out already, bump its alignment to the
2067 		 desired one if this is compatible with its size and
2068 		 maximum alignment, if any.  */
2069 	      if (TYPE_ALIGN (record_type) >= align)
2070 		{
2071 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2072 		  DECL_BIT_FIELD (field) = 0;
2073 		}
2074 	      else if (!had_align
2075 		       && rep_level == 0
2076 		       && value_factor_p (TYPE_SIZE (record_type), align)
2077 		       && (!TYPE_MAX_ALIGN (record_type)
2078 			   || TYPE_MAX_ALIGN (record_type) >= align))
2079 		{
2080 		  SET_TYPE_ALIGN (record_type, align);
2081 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
2082 		  DECL_BIT_FIELD (field) = 0;
2083 		}
2084 	    }
2085 
2086 	  /* In the non-strict alignment case, only byte alignment is.  */
2087 	  if (!STRICT_ALIGNMENT
2088 	      && DECL_BIT_FIELD (field)
2089 	      && value_factor_p (pos, BITS_PER_UNIT))
2090 	    DECL_BIT_FIELD (field) = 0;
2091 	}
2092 
2093       /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
2094 	 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
2095 	 the variant part is always the last field in the list.  */
2096       if (variant_part && integer_zerop (pos))
2097 	DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
2098 
2099       /* If we still have DECL_BIT_FIELD set at this point, we know that the
2100 	 field is technically not addressable.  Except that it can actually
2101 	 be addressed if it is BLKmode and happens to be properly aligned.  */
2102       if (DECL_BIT_FIELD (field)
2103 	  && !(DECL_MODE (field) == BLKmode
2104 	       && value_factor_p (pos, BITS_PER_UNIT)))
2105 	DECL_NONADDRESSABLE_P (field) = 1;
2106 
2107       /* A type must be as aligned as its most aligned field that is not
2108 	 a bit-field.  But this is already enforced by layout_type.  */
2109       if (rep_level > 0 && !DECL_BIT_FIELD (field))
2110 	SET_TYPE_ALIGN (record_type,
2111 			MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
2112 
2113       switch (code)
2114 	{
2115 	case UNION_TYPE:
2116 	  ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
2117 	  size = size_binop (MAX_EXPR, size, this_size);
2118 	  break;
2119 
2120 	case QUAL_UNION_TYPE:
2121 	  ada_size
2122 	    = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2123 			   this_ada_size, ada_size);
2124 	  size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2125 			      this_size, size);
2126 	  break;
2127 
2128 	case RECORD_TYPE:
2129 	  /* Since we know here that all fields are sorted in order of
2130 	     increasing bit position, the size of the record is one
2131 	     higher than the ending bit of the last field processed
2132 	     unless we have a rep clause, because we might be processing
2133 	     the REP part of a record with a variant part for which the
2134 	     variant part has a rep clause but not the fixed part, in
2135 	     which case this REP part may contain overlapping fields
2136 	     and thus needs to be treated like a union tyoe above, so
2137 	     use a MAX in that case.  Also, if this field is a variant
2138 	     part, we need to take into account the previous size in
2139 	     the case of empty variants.  */
2140 	  ada_size
2141 	    = merge_sizes (ada_size, pos, this_ada_size, rep_level > 0,
2142 			   variant_part);
2143 	  size
2144 	    = merge_sizes (size, pos, this_size, rep_level > 0, variant_part);
2145 	  break;
2146 
2147 	default:
2148 	  gcc_unreachable ();
2149 	}
2150     }
2151 
2152   if (code == QUAL_UNION_TYPE)
2153     nreverse (field_list);
2154 
2155   /* We need to set the regular sizes if REP_LEVEL is one.  */
2156   if (rep_level == 1)
2157     {
2158       /* If this is a padding record, we never want to make the size smaller
2159 	 than what was specified in it, if any.  */
2160       if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
2161 	size = TYPE_SIZE (record_type);
2162 
2163       tree size_unit = had_size_unit
2164 		       ? TYPE_SIZE_UNIT (record_type)
2165 		       : convert (sizetype,
2166 				  size_binop (CEIL_DIV_EXPR, size,
2167 					      bitsize_unit_node));
2168       const unsigned int align = TYPE_ALIGN (record_type);
2169 
2170       TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2171       TYPE_SIZE_UNIT (record_type)
2172 	= variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2173     }
2174 
2175   /* We need to set the Ada size if REP_LEVEL is zero or one.  */
2176   if (rep_level < 2)
2177     {
2178       /* Now set any of the values we've just computed that apply.  */
2179       if (!TYPE_FAT_POINTER_P (record_type)
2180 	  && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2181 	SET_TYPE_ADA_SIZE (record_type, ada_size);
2182     }
2183 
2184   /* We need to set the mode if REP_LEVEL is one or two.  */
2185   if (rep_level > 0)
2186     {
2187       compute_record_mode (record_type);
2188       finish_bitfield_layout (record_type);
2189     }
2190 
2191   /* Reset the TYPE_MAX_ALIGN field since it's private to gigi.  */
2192   TYPE_MAX_ALIGN (record_type) = 0;
2193 
2194   if (debug_info_p)
2195     rest_of_record_type_compilation (record_type);
2196 }
2197 
2198 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE.  If
2199    PARRALEL_TYPE has no context and its computation is not deferred yet, also
2200    propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2201    moment TYPE will get a context.  */
2202 
2203 void
add_parallel_type(tree type,tree parallel_type)2204 add_parallel_type (tree type, tree parallel_type)
2205 {
2206   tree decl = TYPE_STUB_DECL (type);
2207 
2208   while (DECL_PARALLEL_TYPE (decl))
2209     decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2210 
2211   SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2212 
2213   /* If PARALLEL_TYPE already has a context, we are done.  */
2214   if (TYPE_CONTEXT (parallel_type))
2215     return;
2216 
2217   /* Otherwise, try to get one from TYPE's context.  If so, simply propagate
2218      it to PARALLEL_TYPE.  */
2219   if (TYPE_CONTEXT (type))
2220     gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2221 
2222   /* Otherwise TYPE has not context yet.  We know it will have one thanks to
2223      gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2224      so we have nothing to do in this case.  */
2225 }
2226 
2227 /* Return true if TYPE has a parallel type.  */
2228 
2229 static bool
has_parallel_type(tree type)2230 has_parallel_type (tree type)
2231 {
2232   tree decl = TYPE_STUB_DECL (type);
2233 
2234   return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2235 }
2236 
2237 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2238    associated with it.  It need not be invoked directly in most cases as
2239    finish_record_type takes care of doing so.  */
2240 
2241 void
rest_of_record_type_compilation(tree record_type)2242 rest_of_record_type_compilation (tree record_type)
2243 {
2244   bool var_size = false;
2245   tree field;
2246 
2247   /* If this is a padded type, the bulk of the debug info has already been
2248      generated for the field's type.  */
2249   if (TYPE_IS_PADDING_P (record_type))
2250     return;
2251 
2252   /* If the type already has a parallel type (XVS type), then we're done.  */
2253   if (has_parallel_type (record_type))
2254     return;
2255 
2256   for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2257     {
2258       /* We need to make an XVE/XVU record if any field has variable size,
2259 	 whether or not the record does.  For example, if we have a union,
2260 	 it may be that all fields, rounded up to the alignment, have the
2261 	 same size, in which case we'll use that size.  But the debug
2262 	 output routines (except Dwarf2) won't be able to output the fields,
2263 	 so we need to make the special record.  */
2264       if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2265 	  /* If a field has a non-constant qualifier, the record will have
2266 	     variable size too.  */
2267 	  || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2268 	      && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2269 	{
2270 	  var_size = true;
2271 	  break;
2272 	}
2273     }
2274 
2275   /* If this record type is of variable size, make a parallel record type that
2276      will tell the debugger how the former is laid out (see exp_dbug.ads).  */
2277   if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2278     {
2279       tree new_record_type
2280 	= make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2281 		     ? UNION_TYPE : TREE_CODE (record_type));
2282       tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2283       tree last_pos = bitsize_zero_node;
2284 
2285       new_name
2286 	= concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2287 				  ? "XVU" : "XVE");
2288       TYPE_NAME (new_record_type) = new_name;
2289       SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2290       TYPE_STUB_DECL (new_record_type)
2291 	= create_type_stub_decl (new_name, new_record_type);
2292       DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2293 	= DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2294       gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2295       TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2296       TYPE_SIZE_UNIT (new_record_type)
2297 	= size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2298 
2299       /* Now scan all the fields, replacing each field with a new field
2300 	 corresponding to the new encoding.  */
2301       for (tree old_field = TYPE_FIELDS (record_type);
2302 	   old_field;
2303 	   old_field = DECL_CHAIN (old_field))
2304 	{
2305 	  tree field_type = TREE_TYPE (old_field);
2306 	  tree field_name = DECL_NAME (old_field);
2307 	  tree curpos = fold_bit_position (old_field);
2308 	  tree pos, new_field;
2309 	  bool var = false;
2310 	  unsigned int align = 0;
2311 
2312 	  /* See how the position was modified from the last position.
2313 
2314 	     There are two basic cases we support: a value was added
2315 	     to the last position or the last position was rounded to
2316 	     a boundary and they something was added.  Check for the
2317 	     first case first.  If not, see if there is any evidence
2318 	     of rounding.  If so, round the last position and retry.
2319 
2320 	     If this is a union, the position can be taken as zero.  */
2321 	  if (TREE_CODE (new_record_type) == UNION_TYPE)
2322 	    pos = bitsize_zero_node;
2323 	  else
2324 	    pos = compute_related_constant (curpos, last_pos);
2325 
2326 	  if (pos)
2327 	    ;
2328 	  else if (TREE_CODE (curpos) == MULT_EXPR
2329 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2330 	    {
2331 	      tree offset = TREE_OPERAND (curpos, 0);
2332 	      align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2333 	      align = scale_by_factor_of (offset, align);
2334 	      last_pos = round_up (last_pos, align);
2335 	      pos = compute_related_constant (curpos, last_pos);
2336 	    }
2337 	  else if (TREE_CODE (curpos) == PLUS_EXPR
2338 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2339 		   && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2340 		   && tree_fits_uhwi_p
2341 		      (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2342 	    {
2343 	      tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2344 	      unsigned HOST_WIDE_INT addend
2345 	        = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2346 	      align
2347 		= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2348 	      align = scale_by_factor_of (offset, align);
2349 	      align = MIN (align, addend & -addend);
2350 	      last_pos = round_up (last_pos, align);
2351 	      pos = compute_related_constant (curpos, last_pos);
2352 	    }
2353 	  else
2354 	    {
2355 	      align = DECL_ALIGN (old_field);
2356 	      last_pos = round_up (last_pos, align);
2357 	      pos = compute_related_constant (curpos, last_pos);
2358 	    }
2359 
2360 	  /* See if this type is variable-sized and make a pointer type
2361 	     and indicate the indirection if so.  Beware that the debug
2362 	     back-end may adjust the position computed above according
2363 	     to the alignment of the field type, i.e. the pointer type
2364 	     in this case, if we don't preventively counter that.  */
2365 	  if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2366 	    {
2367 	      field_type = copy_type (build_pointer_type (field_type));
2368 	      SET_TYPE_ALIGN (field_type, BITS_PER_UNIT);
2369 	      var = true;
2370 
2371 	      /* ??? Kludge to work around a bug in Workbench's debugger.  */
2372 	      if (align == 0)
2373 		{
2374 		  align = DECL_ALIGN (old_field);
2375 		  last_pos = round_up (last_pos, align);
2376 		  pos = compute_related_constant (curpos, last_pos);
2377 		}
2378 	    }
2379 
2380 	  /* If we can't compute a position, set it to zero.
2381 
2382 	     ??? We really should abort here, but it's too much work
2383 	     to get this correct for all cases.  */
2384 	  if (!pos)
2385 	    pos = bitsize_zero_node;
2386 
2387 	  /* Make a new field name, if necessary.  */
2388 	  if (var || align != 0)
2389 	    {
2390 	      char suffix[16];
2391 
2392 	      if (align != 0)
2393 		sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2394 			 align / BITS_PER_UNIT);
2395 	      else
2396 		strcpy (suffix, "XVL");
2397 
2398 	      field_name = concat_name (field_name, suffix);
2399 	    }
2400 
2401 	  new_field
2402 	    = create_field_decl (field_name, field_type, new_record_type,
2403 				 DECL_SIZE (old_field), pos, 0, 0);
2404 	  /* The specified position is not the actual position of the field
2405 	     but the gap with the previous field, so the computation of the
2406 	     bit-field status may be incorrect.  We adjust it manually to
2407 	     avoid generating useless attributes for the field in DWARF.  */
2408 	  if (DECL_SIZE (old_field) == TYPE_SIZE (field_type)
2409 	      && value_factor_p (pos, BITS_PER_UNIT))
2410 	    {
2411 	      DECL_BIT_FIELD (new_field) = 0;
2412 	      DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE;
2413 	    }
2414 	  DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2415 	  TYPE_FIELDS (new_record_type) = new_field;
2416 
2417 	  /* If old_field is a QUAL_UNION_TYPE, take its size as being
2418 	     zero.  The only time it's not the last field of the record
2419 	     is when there are other components at fixed positions after
2420 	     it (meaning there was a rep clause for every field) and we
2421 	     want to be able to encode them.  */
2422 	  last_pos = size_binop (PLUS_EXPR, curpos,
2423 				 (TREE_CODE (TREE_TYPE (old_field))
2424 				  == QUAL_UNION_TYPE)
2425 				 ? bitsize_zero_node
2426 				 : DECL_SIZE (old_field));
2427 	}
2428 
2429       TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2430 
2431       add_parallel_type (record_type, new_record_type);
2432     }
2433 }
2434 
2435 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2436    with FIRST_BIT and SIZE that describe a field.  If MAX is true, we take the
2437    MAX of the end position of this field with LAST_SIZE.  In all other cases,
2438    we use FIRST_BIT plus SIZE.  SPECIAL is true if it's for a QUAL_UNION_TYPE,
2439    in which case we must look for COND_EXPRs and replace a value of zero with
2440    the old size.  Return an expression for the size.  */
2441 
2442 static tree
merge_sizes(tree last_size,tree first_bit,tree size,bool max,bool special)2443 merge_sizes (tree last_size, tree first_bit, tree size, bool max, bool special)
2444 {
2445   tree type = TREE_TYPE (last_size);
2446   tree new_size;
2447 
2448   if (!special || TREE_CODE (size) != COND_EXPR)
2449     {
2450       new_size = size_binop (PLUS_EXPR, first_bit, size);
2451       if (max)
2452 	new_size = size_binop (MAX_EXPR, last_size, new_size);
2453     }
2454 
2455   else
2456     new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2457 			    integer_zerop (TREE_OPERAND (size, 1))
2458 			    ? last_size : merge_sizes (last_size, first_bit,
2459 						       TREE_OPERAND (size, 1),
2460 						       max, special),
2461 			    integer_zerop (TREE_OPERAND (size, 2))
2462 			    ? last_size : merge_sizes (last_size, first_bit,
2463 						       TREE_OPERAND (size, 2),
2464 						       max, special));
2465 
2466   /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2467      when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2468      size is not constant.  */
2469   while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2470     new_size = TREE_OPERAND (new_size, 0);
2471 
2472   return new_size;
2473 }
2474 
2475 /* Convert the size expression EXPR to TYPE and fold the result.  */
2476 
2477 static tree
fold_convert_size(tree type,tree expr)2478 fold_convert_size (tree type, tree expr)
2479 {
2480   /* We assume that size expressions do not wrap around.  */
2481   if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR)
2482     return size_binop (TREE_CODE (expr),
2483 		       fold_convert_size (type, TREE_OPERAND (expr, 0)),
2484 		       fold_convert_size (type, TREE_OPERAND (expr, 1)));
2485 
2486   return fold_convert (type, expr);
2487 }
2488 
2489 /* Return the bit position of FIELD, in bits from the start of the record,
2490    and fold it as much as possible.  This is a tree of type bitsizetype.  */
2491 
2492 static tree
fold_bit_position(const_tree field)2493 fold_bit_position (const_tree field)
2494 {
2495   tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field));
2496   return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2497  		     size_binop (MULT_EXPR, offset, bitsize_unit_node));
2498 }
2499 
2500 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2501    related by the addition of a constant.  Return that constant if so.  */
2502 
2503 static tree
compute_related_constant(tree op0,tree op1)2504 compute_related_constant (tree op0, tree op1)
2505 {
2506   tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2507 
2508   if (TREE_CODE (op0) == MULT_EXPR
2509       && TREE_CODE (op1) == MULT_EXPR
2510       && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2511       && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2512     {
2513       factor = TREE_OPERAND (op0, 1);
2514       op0 = TREE_OPERAND (op0, 0);
2515       op1 = TREE_OPERAND (op1, 0);
2516     }
2517   else
2518     factor = NULL_TREE;
2519 
2520   op0_cst = split_plus (op0, &op0_var);
2521   op1_cst = split_plus (op1, &op1_var);
2522   result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2523 
2524   if (operand_equal_p (op0_var, op1_var, 0))
2525     return factor ? size_binop (MULT_EXPR, factor, result) : result;
2526 
2527   return NULL_TREE;
2528 }
2529 
2530 /* Utility function of above to split a tree OP which may be a sum, into a
2531    constant part, which is returned, and a variable part, which is stored
2532    in *PVAR.  *PVAR may be bitsize_zero_node.  All operations must be of
2533    bitsizetype.  */
2534 
2535 static tree
split_plus(tree in,tree * pvar)2536 split_plus (tree in, tree *pvar)
2537 {
2538   /* Strip conversions in order to ease the tree traversal and maximize the
2539      potential for constant or plus/minus discovery.  We need to be careful
2540      to always return and set *pvar to bitsizetype trees, but it's worth
2541      the effort.  */
2542   in = remove_conversions (in, false);
2543 
2544   *pvar = convert (bitsizetype, in);
2545 
2546   if (TREE_CODE (in) == INTEGER_CST)
2547     {
2548       *pvar = bitsize_zero_node;
2549       return convert (bitsizetype, in);
2550     }
2551   else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2552     {
2553       tree lhs_var, rhs_var;
2554       tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2555       tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2556 
2557       if (lhs_var == TREE_OPERAND (in, 0)
2558 	  && rhs_var == TREE_OPERAND (in, 1))
2559 	return bitsize_zero_node;
2560 
2561       *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2562       return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2563     }
2564   else
2565     return bitsize_zero_node;
2566 }
2567 
2568 /* Return a copy of TYPE but safe to modify in any way.  */
2569 
2570 tree
copy_type(tree type)2571 copy_type (tree type)
2572 {
2573   tree new_type = copy_node (type);
2574 
2575   /* Unshare the language-specific data.  */
2576   if (TYPE_LANG_SPECIFIC (type))
2577     {
2578       TYPE_LANG_SPECIFIC (new_type) = NULL;
2579       SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2580     }
2581 
2582   /* And the contents of the language-specific slot if needed.  */
2583   if ((INTEGRAL_TYPE_P (type) || SCALAR_FLOAT_TYPE_P (type))
2584       && TYPE_RM_VALUES (type))
2585     {
2586       TYPE_RM_VALUES (new_type) = NULL_TREE;
2587       SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2588       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2589       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2590     }
2591 
2592   /* copy_node clears this field instead of copying it, because it is
2593      aliased with TREE_CHAIN.  */
2594   TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2595 
2596   TYPE_POINTER_TO (new_type) = NULL_TREE;
2597   TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2598   TYPE_MAIN_VARIANT (new_type) = new_type;
2599   TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2600   TYPE_CANONICAL (new_type) = new_type;
2601 
2602   return new_type;
2603 }
2604 
2605 /* Return a subtype of sizetype with range MIN to MAX and whose
2606    TYPE_INDEX_TYPE is INDEX.  GNAT_NODE is used for the position
2607    of the associated TYPE_DECL.  */
2608 
2609 tree
create_index_type(tree min,tree max,tree index,Node_Id gnat_node)2610 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2611 {
2612   /* First build a type for the desired range.  */
2613   tree type = build_nonshared_range_type (sizetype, min, max);
2614 
2615   /* Then set the index type.  */
2616   SET_TYPE_INDEX_TYPE (type, index);
2617   create_type_decl (NULL_TREE, type, true, false, gnat_node);
2618 
2619   return type;
2620 }
2621 
2622 /* Return a subtype of TYPE with range MIN to MAX.  If TYPE is NULL,
2623    sizetype is used.  */
2624 
2625 tree
create_range_type(tree type,tree min,tree max)2626 create_range_type (tree type, tree min, tree max)
2627 {
2628   tree range_type;
2629 
2630   if (!type)
2631     type = sizetype;
2632 
2633   /* First build a type with the base range.  */
2634   range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2635 						 TYPE_MAX_VALUE (type));
2636 
2637   /* Then set the actual range.  */
2638   SET_TYPE_RM_MIN_VALUE (range_type, min);
2639   SET_TYPE_RM_MAX_VALUE (range_type, max);
2640 
2641   return range_type;
2642 }
2643 
2644 /* Return an extra subtype of TYPE with range MIN to MAX.  */
2645 
2646 tree
create_extra_subtype(tree type,tree min,tree max)2647 create_extra_subtype (tree type, tree min, tree max)
2648 {
2649   const bool uns = TYPE_UNSIGNED (type);
2650   const unsigned prec = TYPE_PRECISION (type);
2651   tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2652 
2653   TREE_TYPE (subtype) = type;
2654   TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2655 
2656   SET_TYPE_RM_MIN_VALUE (subtype, min);
2657   SET_TYPE_RM_MAX_VALUE (subtype, max);
2658 
2659   return subtype;
2660 }
2661 
2662 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2663    NAME gives the name of the type to be used in the declaration.  */
2664 
2665 tree
create_type_stub_decl(tree name,tree type)2666 create_type_stub_decl (tree name, tree type)
2667 {
2668   tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2669   DECL_ARTIFICIAL (type_decl) = 1;
2670   TYPE_ARTIFICIAL (type) = 1;
2671   return type_decl;
2672 }
2673 
2674 /* Return a TYPE_DECL node for TYPE.  NAME gives the name of the type to be
2675    used in the declaration.  ARTIFICIAL_P is true if the declaration was
2676    generated by the compiler.  DEBUG_INFO_P is true if we need to write
2677    debug information about this type.  GNAT_NODE is used for the position
2678    of the decl.  */
2679 
2680 tree
create_type_decl(tree name,tree type,bool artificial_p,bool debug_info_p,Node_Id gnat_node)2681 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2682 		  Node_Id gnat_node)
2683 {
2684   enum tree_code code = TREE_CODE (type);
2685   bool is_named
2686     = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2687   tree type_decl;
2688 
2689   /* Only the builtin TYPE_STUB_DECL should be used for dummy types.  */
2690   gcc_assert (!TYPE_IS_DUMMY_P (type));
2691 
2692   /* If the type hasn't been named yet, we're naming it; preserve an existing
2693      TYPE_STUB_DECL that has been attached to it for some purpose.  */
2694   if (!is_named && TYPE_STUB_DECL (type))
2695     {
2696       type_decl = TYPE_STUB_DECL (type);
2697       DECL_NAME (type_decl) = name;
2698     }
2699   else
2700     type_decl = build_decl (input_location, TYPE_DECL, name, type);
2701 
2702   DECL_ARTIFICIAL (type_decl) = artificial_p;
2703   TYPE_ARTIFICIAL (type) = artificial_p;
2704 
2705   /* Add this decl to the current binding level.  */
2706   gnat_pushdecl (type_decl, gnat_node);
2707 
2708   /* If we're naming the type, equate the TYPE_STUB_DECL to the name.  This
2709      causes the name to be also viewed as a "tag" by the debug back-end, with
2710      the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2711      types in DWARF.
2712 
2713      Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2714      from multiple contexts, and "type_decl" references a copy of it: in such a
2715      case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2716      with the mechanism above.  */
2717   if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2718     TYPE_STUB_DECL (type) = type_decl;
2719 
2720   /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2721      back-end doesn't support, and for others if we don't need to.  */
2722   if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2723     DECL_IGNORED_P (type_decl) = 1;
2724 
2725   return type_decl;
2726 }
2727 
2728 /* Return a VAR_DECL or CONST_DECL node.
2729 
2730    NAME gives the name of the variable.  ASM_NAME is its assembler name
2731    (if provided).  TYPE is its data type (a GCC ..._TYPE node).  INIT is
2732    the GCC tree for an optional initial expression; NULL_TREE if none.
2733 
2734    CONST_FLAG is true if this variable is constant, in which case we might
2735    return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2736 
2737    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2738    definition to be made visible outside of the current compilation unit, for
2739    instance variable definitions in a package specification.
2740 
2741    EXTERN_FLAG is true when processing an external variable declaration (as
2742    opposed to a definition: no storage is to be allocated for the variable).
2743 
2744    STATIC_FLAG is only relevant when not at top level and indicates whether
2745    to always allocate storage to the variable.
2746 
2747    VOLATILE_FLAG is true if this variable is declared as volatile.
2748 
2749    ARTIFICIAL_P is true if the variable was generated by the compiler.
2750 
2751    DEBUG_INFO_P is true if we need to write debug information for it.
2752 
2753    ATTR_LIST is the list of attributes to be attached to the variable.
2754 
2755    GNAT_NODE is used for the position of the decl.  */
2756 
2757 tree
create_var_decl(tree name,tree asm_name,tree type,tree init,bool const_flag,bool public_flag,bool extern_flag,bool static_flag,bool volatile_flag,bool artificial_p,bool debug_info_p,struct attrib * attr_list,Node_Id gnat_node,bool const_decl_allowed_p)2758 create_var_decl (tree name, tree asm_name, tree type, tree init,
2759 		 bool const_flag, bool public_flag, bool extern_flag,
2760 		 bool static_flag, bool volatile_flag, bool artificial_p,
2761 		 bool debug_info_p, struct attrib *attr_list,
2762 		 Node_Id gnat_node, bool const_decl_allowed_p)
2763 {
2764   /* Whether the object has static storage duration, either explicitly or by
2765      virtue of being declared at the global level.  */
2766   const bool static_storage = static_flag || global_bindings_p ();
2767 
2768   /* Whether the initializer is constant: for an external object or an object
2769      with static storage duration, we check that the initializer is a valid
2770      constant expression for initializing a static variable; otherwise, we
2771      only check that it is constant.  */
2772   const bool init_const
2773     = (init
2774        && gnat_types_compatible_p (type, TREE_TYPE (init))
2775        && (extern_flag || static_storage
2776 	   ? initializer_constant_valid_p (init, TREE_TYPE (init))
2777 	     != NULL_TREE
2778 	   : TREE_CONSTANT (init)));
2779 
2780   /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2781      case the initializer may be used in lieu of the DECL node (as done in
2782      Identifier_to_gnu).  This is useful to prevent the need of elaboration
2783      code when an identifier for which such a DECL is made is in turn used
2784      as an initializer.  We used to rely on CONST_DECL vs VAR_DECL for this,
2785      but extra constraints apply to this choice (see below) and they are not
2786      relevant to the distinction we wish to make.  */
2787   const bool constant_p = const_flag && init_const;
2788 
2789   /* The actual DECL node.  CONST_DECL was initially intended for enumerals
2790      and may be used for scalars in general but not for aggregates.  */
2791   tree var_decl
2792     = build_decl (input_location,
2793 		  (constant_p
2794 		   && const_decl_allowed_p
2795 		   && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2796 		  name, type);
2797 
2798   /* Detect constants created by the front-end to hold 'reference to function
2799      calls for stabilization purposes.  This is needed for renaming.  */
2800   if (const_flag && init && POINTER_TYPE_P (type))
2801     {
2802       tree inner = init;
2803       if (TREE_CODE (inner) == COMPOUND_EXPR)
2804 	inner = TREE_OPERAND (inner, 1);
2805       inner = remove_conversions (inner, true);
2806       if (TREE_CODE (inner) == ADDR_EXPR
2807 	  && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2808 	       && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2809 	      || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2810 		  && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2811 	DECL_RETURN_VALUE_P (var_decl) = 1;
2812     }
2813 
2814   /* If this is external, throw away any initializations (they will be done
2815      elsewhere) unless this is a constant for which we would like to remain
2816      able to get the initializer.  If we are defining a global here, leave a
2817      constant initialization and save any variable elaborations for the
2818      elaboration routine.  If we are just annotating types, throw away the
2819      initialization if it isn't a constant.  */
2820   if ((extern_flag && !constant_p)
2821       || (type_annotate_only && init && !TREE_CONSTANT (init)))
2822     init = NULL_TREE;
2823 
2824   /* At the global level, a non-constant initializer generates elaboration
2825      statements.  Check that such statements are allowed, that is to say,
2826      not violating a No_Elaboration_Code restriction.  */
2827   if (init && !init_const && global_bindings_p ())
2828     Check_Elaboration_Code_Allowed (gnat_node);
2829 
2830   /* Attach the initializer, if any.  */
2831   DECL_INITIAL (var_decl) = init;
2832 
2833   /* Directly set some flags.  */
2834   DECL_ARTIFICIAL (var_decl) = artificial_p;
2835   DECL_EXTERNAL (var_decl) = extern_flag;
2836 
2837   TREE_CONSTANT (var_decl) = constant_p;
2838   TREE_READONLY (var_decl) = const_flag;
2839 
2840   /* The object is public if it is external or if it is declared public
2841      and has static storage duration.  */
2842   TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2843 
2844   /* We need to allocate static storage for an object with static storage
2845      duration if it isn't external.  */
2846   TREE_STATIC (var_decl) = !extern_flag && static_storage;
2847 
2848   TREE_SIDE_EFFECTS (var_decl)
2849     = TREE_THIS_VOLATILE (var_decl)
2850     = TYPE_VOLATILE (type) | volatile_flag;
2851 
2852   if (TREE_SIDE_EFFECTS (var_decl))
2853     TREE_ADDRESSABLE (var_decl) = 1;
2854 
2855   /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2856      try to fiddle with DECL_COMMON.  However, on platforms that don't
2857      support global BSS sections, uninitialized global variables would
2858      go in DATA instead, thus increasing the size of the executable.  */
2859   if (!flag_no_common
2860       && TREE_CODE (var_decl) == VAR_DECL
2861       && TREE_PUBLIC (var_decl)
2862       && !have_global_bss_p ())
2863     DECL_COMMON (var_decl) = 1;
2864 
2865   /* Do not emit debug info if not requested, or for an external constant whose
2866      initializer is not absolute because this would require a global relocation
2867      in a read-only section which runs afoul of the PE-COFF run-time relocation
2868      mechanism.  */
2869   if (!debug_info_p
2870       || (extern_flag
2871 	  && constant_p
2872 	  && init
2873 	  && initializer_constant_valid_p (init, TREE_TYPE (init))
2874 	     != null_pointer_node))
2875     DECL_IGNORED_P (var_decl) = 1;
2876 
2877   /* ??? Some attributes cannot be applied to CONST_DECLs.  */
2878   if (TREE_CODE (var_decl) == VAR_DECL)
2879     process_attributes (&var_decl, &attr_list, true, gnat_node);
2880 
2881   /* Add this decl to the current binding level.  */
2882   gnat_pushdecl (var_decl, gnat_node);
2883 
2884   if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2885     {
2886       /* Let the target mangle the name if this isn't a verbatim asm.  */
2887       if (*IDENTIFIER_POINTER (asm_name) != '*')
2888 	asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2889 
2890       SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2891     }
2892 
2893   return var_decl;
2894 }
2895 
2896 /* Return true if TYPE, an aggregate type, contains (or is) an array.
2897    If SELF_REFERENTIAL is true, then an additional requirement on the
2898    array is that it be self-referential.  */
2899 
2900 bool
aggregate_type_contains_array_p(tree type,bool self_referential)2901 aggregate_type_contains_array_p (tree type, bool self_referential)
2902 {
2903   switch (TREE_CODE (type))
2904     {
2905     case RECORD_TYPE:
2906     case UNION_TYPE:
2907     case QUAL_UNION_TYPE:
2908       {
2909 	tree field;
2910 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2911 	  if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2912 	      && aggregate_type_contains_array_p (TREE_TYPE (field),
2913 						  self_referential))
2914 	    return true;
2915 	return false;
2916       }
2917 
2918     case ARRAY_TYPE:
2919       return self_referential ? type_contains_placeholder_p (type) : true;
2920 
2921     default:
2922       gcc_unreachable ();
2923     }
2924 }
2925 
2926 /* Return true if TYPE is a type with variable size or a padding type with a
2927    field of variable size or a record that has a field with such a type.  */
2928 
2929 static bool
type_has_variable_size(tree type)2930 type_has_variable_size (tree type)
2931 {
2932   tree field;
2933 
2934   if (!TREE_CONSTANT (TYPE_SIZE (type)))
2935     return true;
2936 
2937   if (TYPE_IS_PADDING_P (type)
2938       && !TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (type))))
2939     return true;
2940 
2941   if (!RECORD_OR_UNION_TYPE_P (type))
2942     return false;
2943 
2944   for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2945     if (type_has_variable_size (TREE_TYPE (field)))
2946       return true;
2947 
2948   return false;
2949 }
2950 
2951 /* Return a FIELD_DECL node.  NAME is the field's name, TYPE is its type and
2952    RECORD_TYPE is the type of the enclosing record.  If SIZE is nonzero, it
2953    is the specified size of the field.  If POS is nonzero, it is the bit
2954    position.  PACKED is 1 if the enclosing record is packed, -1 if it has
2955    Component_Alignment of Storage_Unit.  If ADDRESSABLE is nonzero, it
2956    means we are allowed to take the address of the field; if it is negative,
2957    we should not make a bitfield, which is used by make_aligning_type.  */
2958 
2959 tree
create_field_decl(tree name,tree type,tree record_type,tree size,tree pos,int packed,int addressable)2960 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2961 		   int packed, int addressable)
2962 {
2963   tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2964 
2965   DECL_CONTEXT (field_decl) = record_type;
2966   TREE_READONLY (field_decl) = TYPE_READONLY (type);
2967 
2968   /* If a size is specified, use it.  Otherwise, if the record type is packed
2969      compute a size to use, which may differ from the object's natural size.
2970      We always set a size in this case to trigger the checks for bitfield
2971      creation below, which is typically required when no position has been
2972      specified.  */
2973   if (size)
2974     size = convert (bitsizetype, size);
2975   else if (packed == 1)
2976     {
2977       size = rm_size (type);
2978       if (TYPE_MODE (type) == BLKmode)
2979 	size = round_up (size, BITS_PER_UNIT);
2980     }
2981 
2982   /* If we may, according to ADDRESSABLE, then make a bitfield when the size
2983      is specified for two reasons: first, when it differs from the natural
2984      size; second, when the alignment is insufficient.
2985 
2986      We never make a bitfield if the type of the field has a nonconstant size,
2987      because no such entity requiring bitfield operations should reach here.
2988 
2989      We do *preventively* make a bitfield when there might be the need for it
2990      but we don't have all the necessary information to decide, as is the case
2991      of a field in a packed record.
2992 
2993      We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2994      in layout_decl or finish_record_type to clear the bit_field indication if
2995      it is in fact not needed.  */
2996   if (addressable >= 0
2997       && size
2998       && TREE_CODE (size) == INTEGER_CST
2999       && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
3000       && (packed
3001 	  || !tree_int_cst_equal (size, TYPE_SIZE (type))
3002 	  || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
3003 	  || (TYPE_ALIGN (record_type)
3004 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
3005     {
3006       DECL_BIT_FIELD (field_decl) = 1;
3007       DECL_SIZE (field_decl) = size;
3008       if (!packed && !pos)
3009 	{
3010 	  if (TYPE_ALIGN (record_type)
3011 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
3012 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
3013 	  else
3014 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3015 	}
3016     }
3017 
3018   DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
3019 
3020   /* If FIELD_TYPE has BLKmode, we must ensure this is aligned to at least
3021      a byte boundary since GCC cannot handle less aligned BLKmode bitfields.
3022      Likewise if it has a variable size and no specified position because
3023      variable-sized objects need to be aligned to at least a byte boundary.
3024      Likewise for an aggregate without specified position that contains an
3025      array because, in this case, slices of variable length of this array
3026      must be handled by GCC and have variable size.  */
3027   if (packed && (TYPE_MODE (type) == BLKmode
3028 		 || (!pos && type_has_variable_size (type))
3029 		 || (!pos
3030 		     && AGGREGATE_TYPE_P (type)
3031 		     && aggregate_type_contains_array_p (type, false))))
3032     SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
3033 
3034   /* Bump the alignment if need be, either for bitfield/packing purposes or
3035      to satisfy the type requirements if no such considerations apply.  When
3036      we get the alignment from the type, indicate if this is from an explicit
3037      user request, which prevents stor-layout from lowering it later on.  */
3038   else
3039     {
3040       const unsigned int field_align
3041 	= DECL_BIT_FIELD (field_decl)
3042 	  ? 1
3043 	  : packed
3044 	    ? BITS_PER_UNIT
3045 	    : 0;
3046 
3047       if (field_align > DECL_ALIGN (field_decl))
3048 	SET_DECL_ALIGN (field_decl, field_align);
3049       else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
3050 	{
3051 	  SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
3052 	  DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
3053 	}
3054     }
3055 
3056   if (pos)
3057     {
3058       /* We need to pass in the alignment the DECL is known to have.
3059 	 This is the lowest-order bit set in POS, but no more than
3060 	 the alignment of the record, if one is specified.  Note
3061 	 that an alignment of 0 is taken as infinite.  */
3062       unsigned int known_align;
3063 
3064       if (tree_fits_uhwi_p (pos))
3065 	known_align = tree_to_uhwi (pos) & -tree_to_uhwi (pos);
3066       else
3067 	known_align = BITS_PER_UNIT;
3068 
3069       if (TYPE_ALIGN (record_type)
3070 	  && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
3071 	known_align = TYPE_ALIGN (record_type);
3072 
3073       layout_decl (field_decl, known_align);
3074       SET_DECL_OFFSET_ALIGN (field_decl,
3075 			     tree_fits_uhwi_p (pos)
3076 			     ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
3077       pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
3078 		    &DECL_FIELD_BIT_OFFSET (field_decl),
3079 		    DECL_OFFSET_ALIGN (field_decl), pos);
3080     }
3081 
3082   /* In addition to what our caller says, claim the field is addressable if we
3083      know that its type is not suitable.
3084 
3085      The field may also be "technically" nonaddressable, meaning that even if
3086      we attempt to take the field's address we will actually get the address
3087      of a copy.  This is the case for true bitfields, but the DECL_BIT_FIELD
3088      value we have at this point is not accurate enough, so we don't account
3089      for this here and let finish_record_type decide.  */
3090   if (!addressable && !type_for_nonaliased_component_p (type))
3091     addressable = 1;
3092 
3093   /* Note that there is a trade-off in making a field nonaddressable because
3094      this will cause type-based alias analysis to use the same alias set for
3095      accesses to the field as for accesses to the whole record: while doing
3096      so will make it more likely to disambiguate accesses to other objects
3097      and accesses to the field, it will make it less likely to disambiguate
3098      accesses to the other fields of the record and accesses to the field.
3099      If the record is fully static, then the trade-off is irrelevant since
3100      the fields of the record can always be disambiguated by their offsets
3101      but, if the record is dynamic, then it can become problematic.  */
3102   DECL_NONADDRESSABLE_P (field_decl) = !addressable;
3103 
3104   return field_decl;
3105 }
3106 
3107 /* Return a PARM_DECL node with NAME and TYPE.  */
3108 
3109 tree
create_param_decl(tree name,tree type)3110 create_param_decl (tree name, tree type)
3111 {
3112   tree param_decl = build_decl (input_location, PARM_DECL, name, type);
3113 
3114   /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
3115      can lead to various ABI violations.  */
3116   if (targetm.calls.promote_prototypes (NULL_TREE)
3117       && INTEGRAL_TYPE_P (type)
3118       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
3119     {
3120       /* We have to be careful about biased types here.  Make a subtype
3121 	 of integer_type_node with the proper biasing.  */
3122       if (TREE_CODE (type) == INTEGER_TYPE
3123 	  && TYPE_BIASED_REPRESENTATION_P (type))
3124 	{
3125 	  tree subtype
3126 	    = make_unsigned_type (TYPE_PRECISION (integer_type_node));
3127 	  TREE_TYPE (subtype) = integer_type_node;
3128 	  TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
3129 	  SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
3130 	  SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
3131 	  type = subtype;
3132 	}
3133       else
3134 	type = integer_type_node;
3135     }
3136 
3137   DECL_ARG_TYPE (param_decl) = type;
3138   return param_decl;
3139 }
3140 
3141 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
3142    a TYPE.  If IN_PLACE is true, the tree pointed to by NODE should not be
3143    changed.  GNAT_NODE is used for the position of error messages.  */
3144 
3145 void
process_attributes(tree * node,struct attrib ** attr_list,bool in_place,Node_Id gnat_node)3146 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
3147 		    Node_Id gnat_node)
3148 {
3149   struct attrib *attr;
3150 
3151   for (attr = *attr_list; attr; attr = attr->next)
3152     switch (attr->type)
3153       {
3154       case ATTR_MACHINE_ATTRIBUTE:
3155 	Sloc_to_locus (Sloc (gnat_node), &input_location);
3156 	decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
3157 			 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
3158 	break;
3159 
3160       case ATTR_LINK_ALIAS:
3161         if (!DECL_EXTERNAL (*node))
3162 	  {
3163 	    TREE_STATIC (*node) = 1;
3164 	    assemble_alias (*node, attr->name);
3165 	  }
3166 	break;
3167 
3168       case ATTR_WEAK_EXTERNAL:
3169 	if (SUPPORTS_WEAK)
3170 	  declare_weak (*node);
3171 	else
3172 	  post_error ("?weak declarations not supported on this target",
3173 		      attr->error_point);
3174 	break;
3175 
3176       case ATTR_LINK_SECTION:
3177 	if (targetm_common.have_named_sections)
3178 	  {
3179 	    set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
3180 	    DECL_COMMON (*node) = 0;
3181 	  }
3182 	else
3183 	  post_error ("?section attributes are not supported for this target",
3184 		      attr->error_point);
3185 	break;
3186 
3187       case ATTR_LINK_CONSTRUCTOR:
3188 	DECL_STATIC_CONSTRUCTOR (*node) = 1;
3189 	TREE_USED (*node) = 1;
3190 	break;
3191 
3192       case ATTR_LINK_DESTRUCTOR:
3193 	DECL_STATIC_DESTRUCTOR (*node) = 1;
3194 	TREE_USED (*node) = 1;
3195 	break;
3196 
3197       case ATTR_THREAD_LOCAL_STORAGE:
3198 	set_decl_tls_model (*node, decl_default_tls_model (*node));
3199 	DECL_COMMON (*node) = 0;
3200 	break;
3201       }
3202 
3203   *attr_list = NULL;
3204 }
3205 
3206 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3207    a power of 2. */
3208 
3209 bool
value_factor_p(tree value,unsigned HOST_WIDE_INT factor)3210 value_factor_p (tree value, unsigned HOST_WIDE_INT factor)
3211 {
3212   gcc_checking_assert (pow2p_hwi (factor));
3213 
3214   if (tree_fits_uhwi_p (value))
3215     return (tree_to_uhwi (value) & (factor - 1)) == 0;
3216 
3217   if (TREE_CODE (value) == MULT_EXPR)
3218     return (value_factor_p (TREE_OPERAND (value, 0), factor)
3219             || value_factor_p (TREE_OPERAND (value, 1), factor));
3220 
3221   return false;
3222 }
3223 
3224 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3225    feed it with the elaboration of GNAT_SCOPE.  */
3226 
3227 static struct deferred_decl_context_node *
add_deferred_decl_context(tree decl,Entity_Id gnat_scope,int force_global)3228 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3229 {
3230   struct deferred_decl_context_node *new_node;
3231 
3232   new_node
3233     = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3234   new_node->decl = decl;
3235   new_node->gnat_scope = gnat_scope;
3236   new_node->force_global = force_global;
3237   new_node->types.create (1);
3238   new_node->next = deferred_decl_context_queue;
3239   deferred_decl_context_queue = new_node;
3240   return new_node;
3241 }
3242 
3243 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3244    feed it with the DECL_CONTEXT computed as part of N as soon as it is
3245    computed.  */
3246 
3247 static void
add_deferred_type_context(struct deferred_decl_context_node * n,tree type)3248 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3249 {
3250   n->types.safe_push (type);
3251 }
3252 
3253 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available.  Return
3254    NULL_TREE if it is not available.  */
3255 
3256 static tree
compute_deferred_decl_context(Entity_Id gnat_scope)3257 compute_deferred_decl_context (Entity_Id gnat_scope)
3258 {
3259   tree context;
3260 
3261   if (present_gnu_tree (gnat_scope))
3262     context = get_gnu_tree (gnat_scope);
3263   else
3264     return NULL_TREE;
3265 
3266   if (TREE_CODE (context) == TYPE_DECL)
3267     {
3268       tree context_type = TREE_TYPE (context);
3269 
3270       /* Skip dummy types: only the final ones can appear in the context
3271 	 chain.  */
3272       if (TYPE_DUMMY_P (context_type))
3273 	return NULL_TREE;
3274 
3275       /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3276 	 chain.  */
3277       else
3278 	context = context_type;
3279     }
3280 
3281   return context;
3282 }
3283 
3284 /* Try to process all deferred nodes in the queue.  Keep in the queue the ones
3285    that cannot be processed yet, remove the other ones.  If FORCE is true,
3286    force the processing for all nodes, use the global context when nodes don't
3287    have a GNU translation.  */
3288 
3289 void
process_deferred_decl_context(bool force)3290 process_deferred_decl_context (bool force)
3291 {
3292   struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3293   struct deferred_decl_context_node *node;
3294 
3295   while (*it)
3296     {
3297       bool processed = false;
3298       tree context = NULL_TREE;
3299       Entity_Id gnat_scope;
3300 
3301       node = *it;
3302 
3303       /* If FORCE, get the innermost elaborated scope.  Otherwise, just try to
3304 	 get the first scope.  */
3305       gnat_scope = node->gnat_scope;
3306       while (Present (gnat_scope))
3307 	{
3308 	  context = compute_deferred_decl_context (gnat_scope);
3309 	  if (!force || context)
3310 	    break;
3311 	  gnat_scope = get_debug_scope (gnat_scope, NULL);
3312 	}
3313 
3314       /* Imported declarations must not be in a local context (i.e. not inside
3315 	 a function).  */
3316       if (context && node->force_global > 0)
3317 	{
3318 	  tree ctx = context;
3319 
3320 	  while (ctx)
3321 	    {
3322 	      gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3323 	      ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3324 	    }
3325 	}
3326 
3327       /* If FORCE, we want to get rid of all nodes in the queue: in case there
3328 	 was no elaborated scope, use the global context.  */
3329       if (force && !context)
3330 	context = get_global_context ();
3331 
3332       if (context)
3333 	{
3334 	  tree t;
3335 	  int i;
3336 
3337 	  DECL_CONTEXT (node->decl) = context;
3338 
3339 	  /* Propagate it to the TYPE_CONTEXT attributes of the requested
3340 	     ..._TYPE nodes.  */
3341 	  FOR_EACH_VEC_ELT (node->types, i, t)
3342 	    {
3343 	      gnat_set_type_context (t, context);
3344 	    }
3345 	  processed = true;
3346 	}
3347 
3348       /* If this node has been successfuly processed, remove it from the
3349 	 queue.  Then move to the next node.  */
3350       if (processed)
3351 	{
3352 	  *it = node->next;
3353 	  node->types.release ();
3354 	  free (node);
3355 	}
3356       else
3357 	it = &node->next;
3358     }
3359 }
3360 
3361 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR.  */
3362 
3363 static unsigned int
scale_by_factor_of(tree expr,unsigned int value)3364 scale_by_factor_of (tree expr, unsigned int value)
3365 {
3366   unsigned HOST_WIDE_INT addend = 0;
3367   unsigned HOST_WIDE_INT factor = 1;
3368 
3369   /* Peel conversions around EXPR and try to extract bodies from function
3370      calls: it is possible to get the scale factor from size functions.  */
3371   expr = remove_conversions (expr, true);
3372   if (TREE_CODE (expr) == CALL_EXPR)
3373     expr = maybe_inline_call_in_expr (expr);
3374 
3375   /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3376      multiple of the scale factor we are looking for.  */
3377   if (TREE_CODE (expr) == PLUS_EXPR
3378       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3379       && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3380     {
3381       addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3382       expr = TREE_OPERAND (expr, 0);
3383     }
3384 
3385   /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3386      corresponding to the number of trailing zeros of the mask.  */
3387   if (TREE_CODE (expr) == BIT_AND_EXPR
3388       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3389     {
3390       unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3391       unsigned int i = 0;
3392 
3393       while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3394 	{
3395 	  mask >>= 1;
3396 	  factor *= 2;
3397 	  i++;
3398 	}
3399     }
3400 
3401   /* If the addend is not a multiple of the factor we found, give up.  In
3402      theory we could find a smaller common factor but it's useless for our
3403      needs.  This situation arises when dealing with a field F1 with no
3404      alignment requirement but that is following a field F2 with such
3405      requirements.  As long as we have F2's offset, we don't need alignment
3406      information to compute F1's.  */
3407   if (addend % factor != 0)
3408     factor = 1;
3409 
3410   return factor * value;
3411 }
3412 
3413 /* Return a LABEL_DECL with NAME.  GNAT_NODE is used for the position of
3414    the decl.  */
3415 
3416 tree
create_label_decl(tree name,Node_Id gnat_node)3417 create_label_decl (tree name, Node_Id gnat_node)
3418 {
3419   tree label_decl
3420     = build_decl (input_location, LABEL_DECL, name, void_type_node);
3421 
3422   SET_DECL_MODE (label_decl, VOIDmode);
3423 
3424   /* Add this decl to the current binding level.  */
3425   gnat_pushdecl (label_decl, gnat_node);
3426 
3427   return label_decl;
3428 }
3429 
3430 /* Return a FUNCTION_DECL node.  NAME is the name of the subprogram, ASM_NAME
3431    its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3432    PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3433    chained through the DECL_CHAIN field).
3434 
3435    INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3436 
3437    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3438    definition to be made visible outside of the current compilation unit.
3439 
3440    EXTERN_FLAG is true when processing an external subprogram declaration.
3441 
3442    ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3443 
3444    DEBUG_INFO_P is true if we need to write debug information for it.
3445 
3446    DEFINITION is true if the subprogram is to be considered as a definition.
3447 
3448    ATTR_LIST is the list of attributes to be attached to the subprogram.
3449 
3450    GNAT_NODE is used for the position of the decl.  */
3451 
3452 tree
create_subprog_decl(tree name,tree asm_name,tree type,tree param_decl_list,enum inline_status_t inline_status,bool public_flag,bool extern_flag,bool artificial_p,bool debug_info_p,bool definition,struct attrib * attr_list,Node_Id gnat_node)3453 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3454 		     enum inline_status_t inline_status, bool public_flag,
3455 		     bool extern_flag, bool artificial_p, bool debug_info_p,
3456 		     bool definition, struct attrib *attr_list,
3457 		     Node_Id gnat_node)
3458 {
3459   tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3460   DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3461 
3462   DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3463   DECL_EXTERNAL (subprog_decl) = extern_flag;
3464   DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3465   DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3466   TREE_PUBLIC (subprog_decl) = public_flag;
3467 
3468   switch (inline_status)
3469     {
3470     case is_suppressed:
3471       DECL_UNINLINABLE (subprog_decl) = 1;
3472       break;
3473 
3474     case is_default:
3475       break;
3476 
3477     case is_required:
3478       if (Back_End_Inlining)
3479 	{
3480 	  decl_attributes (&subprog_decl,
3481 			   tree_cons (get_identifier ("always_inline"),
3482 				      NULL_TREE, NULL_TREE),
3483 			   ATTR_FLAG_TYPE_IN_PLACE);
3484 
3485 	  /* Inline_Always guarantees that every direct call is inlined and
3486 	     that there is no indirect reference to the subprogram, so the
3487 	     instance in the original package (as well as its clones in the
3488 	     client packages created for inter-unit inlining) can be made
3489 	     private, which causes the out-of-line body to be eliminated.  */
3490 	  TREE_PUBLIC (subprog_decl) = 0;
3491 	}
3492 
3493       /* ... fall through ... */
3494 
3495     case is_prescribed:
3496       DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3497 
3498       /* ... fall through ... */
3499 
3500     case is_requested:
3501       DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3502       if (!Debug_Generated_Code)
3503 	DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3504       break;
3505 
3506     default:
3507       gcc_unreachable ();
3508     }
3509 
3510   process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3511 
3512   /* Once everything is processed, finish the subprogram declaration.  */
3513   finish_subprog_decl (subprog_decl, asm_name, type);
3514 
3515   /* Add this decl to the current binding level.  */
3516   gnat_pushdecl (subprog_decl, gnat_node);
3517 
3518   /* Output the assembler code and/or RTL for the declaration.  */
3519   rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3520 
3521   return subprog_decl;
3522 }
3523 
3524 /* Given a subprogram declaration DECL, its assembler name and its type,
3525    finish constructing the subprogram declaration from ASM_NAME and TYPE.  */
3526 
3527 void
finish_subprog_decl(tree decl,tree asm_name,tree type)3528 finish_subprog_decl (tree decl, tree asm_name, tree type)
3529 {
3530   /* DECL_ARGUMENTS is set by the caller, but not its context.  */
3531   for (tree param_decl = DECL_ARGUMENTS (decl);
3532        param_decl;
3533        param_decl = DECL_CHAIN (param_decl))
3534     DECL_CONTEXT (param_decl) = decl;
3535 
3536   tree result_decl
3537     = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3538 		  TREE_TYPE (type));
3539 
3540   DECL_ARTIFICIAL (result_decl) = 1;
3541   DECL_IGNORED_P (result_decl) = 1;
3542   DECL_CONTEXT (result_decl) = decl;
3543   DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3544   DECL_RESULT (decl) = result_decl;
3545 
3546   /* Propagate the "pure" property.  */
3547   DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3548 
3549   /* Propagate the "noreturn" property.  */
3550   TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3551 
3552   if (asm_name)
3553     {
3554       /* Let the target mangle the name if this isn't a verbatim asm.  */
3555       if (*IDENTIFIER_POINTER (asm_name) != '*')
3556 	asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3557 
3558       SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3559 
3560       /* The expand_main_function circuitry expects "main_identifier_node" to
3561 	 designate the DECL_NAME of the 'main' entry point, in turn expected
3562 	 to be declared as the "main" function literally by default.  Ada
3563 	 program entry points are typically declared with a different name
3564 	 within the binder generated file, exported as 'main' to satisfy the
3565 	 system expectations.  Force main_identifier_node in this case.  */
3566       if (asm_name == main_identifier_node)
3567 	DECL_NAME (decl) = main_identifier_node;
3568     }
3569 }
3570 
3571 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3572    body.  This routine needs to be invoked before processing the declarations
3573    appearing in the subprogram.  */
3574 
3575 void
begin_subprog_body(tree subprog_decl)3576 begin_subprog_body (tree subprog_decl)
3577 {
3578   announce_function (subprog_decl);
3579 
3580   /* This function is being defined.  */
3581   TREE_STATIC (subprog_decl) = 1;
3582 
3583   /* The failure of this assertion will likely come from a wrong context for
3584      the subprogram body, e.g. another procedure for a procedure declared at
3585      library level.  */
3586   gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3587 
3588   current_function_decl = subprog_decl;
3589 
3590   /* Enter a new binding level and show that all the parameters belong to
3591      this function.  */
3592   gnat_pushlevel ();
3593 }
3594 
3595 /* Finish translating the current subprogram and set its BODY.  */
3596 
3597 void
end_subprog_body(tree body)3598 end_subprog_body (tree body)
3599 {
3600   tree fndecl = current_function_decl;
3601 
3602   /* Attach the BLOCK for this level to the function and pop the level.  */
3603   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3604   DECL_INITIAL (fndecl) = current_binding_level->block;
3605   gnat_poplevel ();
3606 
3607   /* The body should be a BIND_EXPR whose BLOCK is the top-level one.  */
3608   if (TREE_CODE (body) == BIND_EXPR)
3609     {
3610       BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3611       DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3612     }
3613 
3614   DECL_SAVED_TREE (fndecl) = body;
3615 
3616   current_function_decl = decl_function_context (fndecl);
3617 }
3618 
3619 /* Wrap up compilation of SUBPROG_DECL, a subprogram body.  */
3620 
3621 void
rest_of_subprog_body_compilation(tree subprog_decl)3622 rest_of_subprog_body_compilation (tree subprog_decl)
3623 {
3624   /* We cannot track the location of errors past this point.  */
3625   Current_Error_Node = Empty;
3626 
3627   /* If we're only annotating types, don't actually compile this function.  */
3628   if (type_annotate_only)
3629     return;
3630 
3631   /* Dump functions before gimplification.  */
3632   dump_function (TDI_original, subprog_decl);
3633 
3634   if (!decl_function_context (subprog_decl))
3635     cgraph_node::finalize_function (subprog_decl, false);
3636   else
3637     /* Register this function with cgraph just far enough to get it
3638        added to our parent's nested function list.  */
3639     (void) cgraph_node::get_create (subprog_decl);
3640 }
3641 
3642 tree
gnat_builtin_function(tree decl)3643 gnat_builtin_function (tree decl)
3644 {
3645   gnat_pushdecl (decl, Empty);
3646   return decl;
3647 }
3648 
3649 /* Return an integer type with the number of bits of precision given by
3650    PRECISION.  UNSIGNEDP is nonzero if the type is unsigned; otherwise
3651    it is a signed type.  */
3652 
3653 tree
gnat_type_for_size(unsigned precision,int unsignedp)3654 gnat_type_for_size (unsigned precision, int unsignedp)
3655 {
3656   tree t;
3657   char type_name[20];
3658 
3659   if (precision <= 2 * MAX_BITS_PER_WORD
3660       && signed_and_unsigned_types[precision][unsignedp])
3661     return signed_and_unsigned_types[precision][unsignedp];
3662 
3663  if (unsignedp)
3664     t = make_unsigned_type (precision);
3665   else
3666     t = make_signed_type (precision);
3667   TYPE_ARTIFICIAL (t) = 1;
3668 
3669   if (precision <= 2 * MAX_BITS_PER_WORD)
3670     signed_and_unsigned_types[precision][unsignedp] = t;
3671 
3672   if (!TYPE_NAME (t))
3673     {
3674       sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3675       TYPE_NAME (t) = get_identifier (type_name);
3676     }
3677 
3678   return t;
3679 }
3680 
3681 /* Likewise for floating-point types.  */
3682 
3683 static tree
float_type_for_precision(int precision,machine_mode mode)3684 float_type_for_precision (int precision, machine_mode mode)
3685 {
3686   tree t;
3687   char type_name[20];
3688 
3689   if (float_types[(int) mode])
3690     return float_types[(int) mode];
3691 
3692   float_types[(int) mode] = t = make_node (REAL_TYPE);
3693   TYPE_PRECISION (t) = precision;
3694   layout_type (t);
3695 
3696   gcc_assert (TYPE_MODE (t) == mode);
3697   if (!TYPE_NAME (t))
3698     {
3699       sprintf (type_name, "FLOAT_%d", precision);
3700       TYPE_NAME (t) = get_identifier (type_name);
3701     }
3702 
3703   return t;
3704 }
3705 
3706 /* Return a data type that has machine mode MODE.  UNSIGNEDP selects
3707    an unsigned type; otherwise a signed type is returned.  */
3708 
3709 tree
gnat_type_for_mode(machine_mode mode,int unsignedp)3710 gnat_type_for_mode (machine_mode mode, int unsignedp)
3711 {
3712   if (mode == BLKmode)
3713     return NULL_TREE;
3714 
3715   if (mode == VOIDmode)
3716     return void_type_node;
3717 
3718   if (COMPLEX_MODE_P (mode))
3719     return NULL_TREE;
3720 
3721   scalar_float_mode float_mode;
3722   if (is_a <scalar_float_mode> (mode, &float_mode))
3723     return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3724 				     float_mode);
3725 
3726   scalar_int_mode int_mode;
3727   if (is_a <scalar_int_mode> (mode, &int_mode))
3728     return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3729 
3730   if (VECTOR_MODE_P (mode))
3731     {
3732       machine_mode inner_mode = GET_MODE_INNER (mode);
3733       tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3734       if (inner_type)
3735 	return build_vector_type_for_mode (inner_type, mode);
3736     }
3737 
3738   return NULL_TREE;
3739 }
3740 
3741 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3742    signedness being specified by UNSIGNEDP.  */
3743 
3744 tree
gnat_signed_or_unsigned_type_for(int unsignedp,tree type_node)3745 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3746 {
3747   if (type_node == char_type_node)
3748     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3749 
3750   tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3751 
3752   if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3753     {
3754       type = copy_type (type);
3755       TREE_TYPE (type) = type_node;
3756     }
3757   else if (TREE_TYPE (type_node)
3758 	   && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3759 	   && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3760     {
3761       type = copy_type (type);
3762       TREE_TYPE (type) = TREE_TYPE (type_node);
3763     }
3764 
3765   return type;
3766 }
3767 
3768 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3769    transparently converted to each other.  */
3770 
3771 int
gnat_types_compatible_p(tree t1,tree t2)3772 gnat_types_compatible_p (tree t1, tree t2)
3773 {
3774   enum tree_code code;
3775 
3776   /* This is the default criterion.  */
3777   if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3778     return 1;
3779 
3780   /* We only check structural equivalence here.  */
3781   if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3782     return 0;
3783 
3784   /* Vector types are also compatible if they have the same number of subparts
3785      and the same form of (scalar) element type.  */
3786   if (code == VECTOR_TYPE
3787       && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3788       && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3789       && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3790     return 1;
3791 
3792   /* Array types are also compatible if they are constrained and have the same
3793      domain(s), the same component type and the same scalar storage order.  */
3794   if (code == ARRAY_TYPE
3795       && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3796 	  || (TYPE_DOMAIN (t1)
3797 	      && TYPE_DOMAIN (t2)
3798 	      && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3799 				     TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3800 	      && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3801 				     TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3802       && (TREE_TYPE (t1) == TREE_TYPE (t2)
3803 	  || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3804 	      && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3805       && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3806     return 1;
3807 
3808   return 0;
3809 }
3810 
3811 /* Return true if EXPR is a useless type conversion.  */
3812 
3813 bool
gnat_useless_type_conversion(tree expr)3814 gnat_useless_type_conversion (tree expr)
3815 {
3816   if (CONVERT_EXPR_P (expr)
3817       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3818       || TREE_CODE (expr) == NON_LVALUE_EXPR)
3819     return gnat_types_compatible_p (TREE_TYPE (expr),
3820 				    TREE_TYPE (TREE_OPERAND (expr, 0)));
3821 
3822   return false;
3823 }
3824 
3825 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags.  */
3826 
3827 bool
fntype_same_flags_p(const_tree t,tree cico_list,bool return_unconstrained_p,bool return_by_direct_ref_p,bool return_by_invisi_ref_p)3828 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3829 		     bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3830 {
3831   return TYPE_CI_CO_LIST (t) == cico_list
3832 	 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3833 	 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3834 	 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3835 }
3836 
3837 /* EXP is an expression for the size of an object.  If this size contains
3838    discriminant references, replace them with the maximum (if MAX_P) or
3839    minimum (if !MAX_P) possible value of the discriminant.
3840 
3841    Note that the expression may have already been gimplified,in which case
3842    COND_EXPRs have VOID_TYPE and no operands, and this must be handled.  */
3843 
3844 tree
max_size(tree exp,bool max_p)3845 max_size (tree exp, bool max_p)
3846 {
3847   enum tree_code code = TREE_CODE (exp);
3848   tree type = TREE_TYPE (exp);
3849   tree op0, op1, op2;
3850 
3851   switch (TREE_CODE_CLASS (code))
3852     {
3853     case tcc_declaration:
3854     case tcc_constant:
3855       return exp;
3856 
3857     case tcc_exceptional:
3858       gcc_assert (code == SSA_NAME);
3859       return exp;
3860 
3861     case tcc_vl_exp:
3862       if (code == CALL_EXPR)
3863 	{
3864 	  tree t, *argarray;
3865 	  int n, i;
3866 
3867 	  t = maybe_inline_call_in_expr (exp);
3868 	  if (t)
3869 	    return max_size (t, max_p);
3870 
3871 	  n = call_expr_nargs (exp);
3872 	  gcc_assert (n > 0);
3873 	  argarray = XALLOCAVEC (tree, n);
3874 	  for (i = 0; i < n; i++)
3875 	    argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3876 	  return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3877 	}
3878       break;
3879 
3880     case tcc_reference:
3881       /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3882 	 modify.  Otherwise, we treat it like a variable.  */
3883       if (CONTAINS_PLACEHOLDER_P (exp))
3884 	{
3885 	  tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3886 	  tree val
3887 	    = fold_convert (base_type,
3888 			    max_p
3889 			    ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3890 
3891 	  /* Walk down the extra subtypes to get more restrictive bounds.  */
3892 	  while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3893 	    {
3894 	      type = TREE_TYPE (type);
3895 	      if (max_p)
3896 		val = fold_build2 (MIN_EXPR, base_type, val,
3897 				   fold_convert (base_type,
3898 						 TYPE_MAX_VALUE (type)));
3899 	      else
3900 		val = fold_build2 (MAX_EXPR, base_type, val,
3901 				   fold_convert (base_type,
3902 						 TYPE_MIN_VALUE (type)));
3903 	    }
3904 
3905 	  return fold_convert (type, max_size (val, max_p));
3906 	}
3907 
3908       return exp;
3909 
3910     case tcc_comparison:
3911       return build_int_cst (type, max_p ? 1 : 0);
3912 
3913     case tcc_unary:
3914       op0 = TREE_OPERAND (exp, 0);
3915 
3916       if (code == NON_LVALUE_EXPR)
3917 	return max_size (op0, max_p);
3918 
3919       if (VOID_TYPE_P (TREE_TYPE (op0)))
3920 	return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3921 
3922       op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3923 
3924       if (op0 == TREE_OPERAND (exp, 0))
3925 	return exp;
3926 
3927       return fold_build1 (code, type, op0);
3928 
3929     case tcc_binary:
3930       op0 = TREE_OPERAND (exp, 0);
3931       op1 = TREE_OPERAND (exp, 1);
3932 
3933       /* If we have a multiply-add with a "negative" value in an unsigned
3934 	 type, do a multiply-subtract with the negated value, in order to
3935 	 avoid creating a spurious overflow below.  */
3936       if (code == PLUS_EXPR
3937 	  && TREE_CODE (op0) == MULT_EXPR
3938 	  && TYPE_UNSIGNED (type)
3939 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3940 	  && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3941 	  && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3942 	{
3943 	  tree tmp = op1;
3944 	  op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3945 			fold_build1 (NEGATE_EXPR, type,
3946 				    TREE_OPERAND (op0, 1)));
3947 	  op0 = tmp;
3948 	  code = MINUS_EXPR;
3949 	}
3950 
3951       op0 = max_size (op0, max_p);
3952       op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3953 
3954       if ((code == MINUS_EXPR || code == PLUS_EXPR))
3955 	{
3956 	  /* If the op0 has overflowed and the op1 is a variable,
3957 	     propagate the overflow by returning the op0.  */
3958 	  if (TREE_CODE (op0) == INTEGER_CST
3959 	      && TREE_OVERFLOW (op0)
3960 	      && TREE_CODE (op1) != INTEGER_CST)
3961 	    return op0;
3962 
3963 	  /* If we have a "negative" value in an unsigned type, do the
3964 	     opposite operation on the negated value, in order to avoid
3965 	     creating a spurious overflow below.  */
3966 	  if (TYPE_UNSIGNED (type)
3967 	      && TREE_CODE (op1) == INTEGER_CST
3968 	      && !TREE_OVERFLOW (op1)
3969 	      && tree_int_cst_sign_bit (op1))
3970 	    {
3971 	      op1 = fold_build1 (NEGATE_EXPR, type, op1);
3972 	      code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3973 	    }
3974 	}
3975 
3976       if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3977 	return exp;
3978 
3979       /* We need to detect overflows so we call size_binop here.  */
3980       return size_binop (code, op0, op1);
3981 
3982     case tcc_expression:
3983       switch (TREE_CODE_LENGTH (code))
3984 	{
3985 	case 1:
3986 	  if (code == SAVE_EXPR)
3987 	    return exp;
3988 
3989 	  op0 = max_size (TREE_OPERAND (exp, 0),
3990 			  code == TRUTH_NOT_EXPR ? !max_p : max_p);
3991 
3992 	  if (op0 == TREE_OPERAND (exp, 0))
3993 	    return exp;
3994 
3995 	  return fold_build1 (code, type, op0);
3996 
3997 	case 2:
3998 	  if (code == COMPOUND_EXPR)
3999 	    return max_size (TREE_OPERAND (exp, 1), max_p);
4000 
4001 	  op0 = max_size (TREE_OPERAND (exp, 0), max_p);
4002 	  op1 = max_size (TREE_OPERAND (exp, 1), max_p);
4003 
4004 	  if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
4005 	    return exp;
4006 
4007 	  return fold_build2 (code, type, op0, op1);
4008 
4009 	case 3:
4010 	  if (code == COND_EXPR)
4011 	    {
4012 	      op0 = TREE_OPERAND (exp, 0);
4013 	      op1 = TREE_OPERAND (exp, 1);
4014 	      op2 = TREE_OPERAND (exp, 2);
4015 
4016 	      if (!op1 || !op2)
4017 		return exp;
4018 
4019 	      op1 = max_size (op1, max_p);
4020 	      op2 = max_size (op2, max_p);
4021 
4022 	      /* If we have the MAX of a "negative" value in an unsigned type
4023 		 and zero for a length expression, just return zero.  */
4024 	      if (max_p
4025 		  && TREE_CODE (op0) == LE_EXPR
4026 		  && TYPE_UNSIGNED (type)
4027 		  && TREE_CODE (op1) == INTEGER_CST
4028 		  && !TREE_OVERFLOW (op1)
4029 		  && tree_int_cst_sign_bit (op1)
4030 		  && integer_zerop (op2))
4031 		return op2;
4032 
4033 	      return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
4034 	    }
4035 	  break;
4036 
4037 	default:
4038 	  break;
4039 	}
4040 
4041       /* Other tree classes cannot happen.  */
4042     default:
4043       break;
4044     }
4045 
4046   gcc_unreachable ();
4047 }
4048 
4049 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
4050    EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
4051    Return a constructor for the template.  */
4052 
4053 tree
build_template(tree template_type,tree array_type,tree expr)4054 build_template (tree template_type, tree array_type, tree expr)
4055 {
4056   vec<constructor_elt, va_gc> *template_elts = NULL;
4057   tree bound_list = NULL_TREE;
4058   tree field;
4059 
4060   while (TREE_CODE (array_type) == RECORD_TYPE
4061 	 && (TYPE_PADDING_P (array_type)
4062 	     || TYPE_JUSTIFIED_MODULAR_P (array_type)))
4063     array_type = TREE_TYPE (TYPE_FIELDS (array_type));
4064 
4065   if (TREE_CODE (array_type) == ARRAY_TYPE
4066       || (TREE_CODE (array_type) == INTEGER_TYPE
4067 	  && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
4068     bound_list = TYPE_ACTUAL_BOUNDS (array_type);
4069 
4070   /* First make the list for a CONSTRUCTOR for the template.  Go down
4071      the field list of the template instead of the type chain because
4072      this array might be an Ada array of array and we can't tell where
4073      the nested array stop being the underlying object.  */
4074   for (field = TYPE_FIELDS (template_type);
4075        field;
4076        field = DECL_CHAIN (DECL_CHAIN (field)))
4077     {
4078       tree bounds, min, max;
4079 
4080       /* If we have a bound list, get the bounds from there.  Likewise
4081 	 for an ARRAY_TYPE.  Otherwise, if expr is a PARM_DECL with
4082 	 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
4083 	 template, but this will only give us a maximum range.  */
4084       if (bound_list)
4085 	{
4086 	  bounds = TREE_VALUE (bound_list);
4087 	  bound_list = TREE_CHAIN (bound_list);
4088 	}
4089       else if (TREE_CODE (array_type) == ARRAY_TYPE)
4090 	{
4091 	  bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
4092 	  array_type = TREE_TYPE (array_type);
4093 	}
4094       else if (expr && TREE_CODE (expr) == PARM_DECL
4095 	       && DECL_BY_COMPONENT_PTR_P (expr))
4096 	bounds = TREE_TYPE (field);
4097       else
4098 	gcc_unreachable ();
4099 
4100       min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
4101       max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
4102 
4103       /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
4104 	 substitute it from OBJECT.  */
4105       min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
4106       max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
4107 
4108       CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
4109       CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
4110     }
4111 
4112   return gnat_build_constructor (template_type, template_elts);
4113 }
4114 
4115 /* Return true if TYPE is suitable for the element type of a vector.  */
4116 
4117 static bool
type_for_vector_element_p(tree type)4118 type_for_vector_element_p (tree type)
4119 {
4120   machine_mode mode;
4121 
4122   if (!INTEGRAL_TYPE_P (type)
4123       && !SCALAR_FLOAT_TYPE_P (type)
4124       && !FIXED_POINT_TYPE_P (type))
4125     return false;
4126 
4127   mode = TYPE_MODE (type);
4128   if (GET_MODE_CLASS (mode) != MODE_INT
4129       && !SCALAR_FLOAT_MODE_P (mode)
4130       && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
4131     return false;
4132 
4133   return true;
4134 }
4135 
4136 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
4137    this is not possible.  If ATTRIBUTE is non-zero, we are processing the
4138    attribute declaration and want to issue error messages on failure.  */
4139 
4140 static tree
build_vector_type_for_size(tree inner_type,tree size,tree attribute)4141 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
4142 {
4143   unsigned HOST_WIDE_INT size_int, inner_size_int;
4144   int nunits;
4145 
4146   /* Silently punt on variable sizes.  We can't make vector types for them,
4147      need to ignore them on front-end generated subtypes of unconstrained
4148      base types, and this attribute is for binding implementors, not end
4149      users, so we should never get there from legitimate explicit uses.  */
4150   if (!tree_fits_uhwi_p (size))
4151     return NULL_TREE;
4152   size_int = tree_to_uhwi (size);
4153 
4154   if (!type_for_vector_element_p (inner_type))
4155     {
4156       if (attribute)
4157 	error ("invalid element type for attribute %qs",
4158 	       IDENTIFIER_POINTER (attribute));
4159       return NULL_TREE;
4160     }
4161   inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4162 
4163   if (size_int % inner_size_int)
4164     {
4165       if (attribute)
4166 	error ("vector size not an integral multiple of component size");
4167       return NULL_TREE;
4168     }
4169 
4170   if (size_int == 0)
4171     {
4172       if (attribute)
4173 	error ("zero vector size");
4174       return NULL_TREE;
4175     }
4176 
4177   nunits = size_int / inner_size_int;
4178   if (nunits & (nunits - 1))
4179     {
4180       if (attribute)
4181 	error ("number of components of vector not a power of two");
4182       return NULL_TREE;
4183     }
4184 
4185   return build_vector_type (inner_type, nunits);
4186 }
4187 
4188 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4189    NULL_TREE if this is not possible.  If ATTRIBUTE is non-zero, we are
4190    processing the attribute and want to issue error messages on failure.  */
4191 
4192 static tree
build_vector_type_for_array(tree array_type,tree attribute)4193 build_vector_type_for_array (tree array_type, tree attribute)
4194 {
4195   tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4196 						 TYPE_SIZE_UNIT (array_type),
4197 						 attribute);
4198   if (!vector_type)
4199     return NULL_TREE;
4200 
4201   TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4202   return vector_type;
4203 }
4204 
4205 /* Build a type to be used to represent an aliased object whose nominal type
4206    is an unconstrained array.  This consists of a RECORD_TYPE containing a
4207    field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4208    If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4209    an arbitrary unconstrained object.  Use NAME as the name of the record.
4210    DEBUG_INFO_P is true if we need to write debug information for the type.  */
4211 
4212 tree
build_unc_object_type(tree template_type,tree object_type,tree name,bool debug_info_p)4213 build_unc_object_type (tree template_type, tree object_type, tree name,
4214 		       bool debug_info_p)
4215 {
4216   tree type = make_node (RECORD_TYPE);
4217   tree template_field
4218     = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4219 			 NULL_TREE, NULL_TREE, 0, 1);
4220   tree array_field
4221     = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4222 			 NULL_TREE, NULL_TREE, 0, 1);
4223 
4224   TYPE_NAME (type) = name;
4225   TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4226   DECL_CHAIN (template_field) = array_field;
4227   finish_record_type (type, template_field, 0, true);
4228 
4229   /* Declare it now since it will never be declared otherwise.  This is
4230      necessary to ensure that its subtrees are properly marked.  */
4231   create_type_decl (name, type, true, debug_info_p, Empty);
4232 
4233   return type;
4234 }
4235 
4236 /* Same, taking a thin or fat pointer type instead of a template type. */
4237 
4238 tree
build_unc_object_type_from_ptr(tree thin_fat_ptr_type,tree object_type,tree name,bool debug_info_p)4239 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4240 				tree name, bool debug_info_p)
4241 {
4242   tree template_type;
4243 
4244   gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4245 
4246   template_type
4247     = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4248        ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4249        : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4250 
4251   return
4252     build_unc_object_type (template_type, object_type, name, debug_info_p);
4253 }
4254 
4255 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4256    In the normal case this is just two adjustments, but we have more to
4257    do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE.  */
4258 
4259 void
update_pointer_to(tree old_type,tree new_type)4260 update_pointer_to (tree old_type, tree new_type)
4261 {
4262   tree ptr = TYPE_POINTER_TO (old_type);
4263   tree ref = TYPE_REFERENCE_TO (old_type);
4264   tree t;
4265 
4266   /* If this is the main variant, process all the other variants first.  */
4267   if (TYPE_MAIN_VARIANT (old_type) == old_type)
4268     for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4269       update_pointer_to (t, new_type);
4270 
4271   /* If no pointers and no references, we are done.  */
4272   if (!ptr && !ref)
4273     return;
4274 
4275   /* Merge the old type qualifiers in the new type.
4276 
4277      Each old variant has qualifiers for specific reasons, and the new
4278      designated type as well.  Each set of qualifiers represents useful
4279      information grabbed at some point, and merging the two simply unifies
4280      these inputs into the final type description.
4281 
4282      Consider for instance a volatile type frozen after an access to constant
4283      type designating it; after the designated type's freeze, we get here with
4284      a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4285      when the access type was processed.  We will make a volatile and readonly
4286      designated type, because that's what it really is.
4287 
4288      We might also get here for a non-dummy OLD_TYPE variant with different
4289      qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4290      to private record type elaboration (see the comments around the call to
4291      this routine in gnat_to_gnu_entity <E_Access_Type>).  We have to merge
4292      the qualifiers in those cases too, to avoid accidentally discarding the
4293      initial set, and will often end up with OLD_TYPE == NEW_TYPE then.  */
4294   new_type
4295     = build_qualified_type (new_type,
4296 			    TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4297 
4298   /* If old type and new type are identical, there is nothing to do.  */
4299   if (old_type == new_type)
4300     return;
4301 
4302   /* Otherwise, first handle the simple case.  */
4303   if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4304     {
4305       tree new_ptr, new_ref;
4306 
4307       /* If pointer or reference already points to new type, nothing to do.
4308 	 This can happen as update_pointer_to can be invoked multiple times
4309 	 on the same couple of types because of the type variants.  */
4310       if ((ptr && TREE_TYPE (ptr) == new_type)
4311 	  || (ref && TREE_TYPE (ref) == new_type))
4312 	return;
4313 
4314       /* Chain PTR and its variants at the end.  */
4315       new_ptr = TYPE_POINTER_TO (new_type);
4316       if (new_ptr)
4317 	{
4318 	  while (TYPE_NEXT_PTR_TO (new_ptr))
4319 	    new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4320 	  TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4321 	}
4322       else
4323 	TYPE_POINTER_TO (new_type) = ptr;
4324 
4325       /* Now adjust them.  */
4326       for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4327 	for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4328 	  {
4329 	    TREE_TYPE (t) = new_type;
4330 	    if (TYPE_NULL_BOUNDS (t))
4331 	      TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4332 	  }
4333 
4334       /* Chain REF and its variants at the end.  */
4335       new_ref = TYPE_REFERENCE_TO (new_type);
4336       if (new_ref)
4337 	{
4338 	  while (TYPE_NEXT_REF_TO (new_ref))
4339 	    new_ref = TYPE_NEXT_REF_TO (new_ref);
4340 	  TYPE_NEXT_REF_TO (new_ref) = ref;
4341 	}
4342       else
4343 	TYPE_REFERENCE_TO (new_type) = ref;
4344 
4345       /* Now adjust them.  */
4346       for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4347 	for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4348 	  TREE_TYPE (t) = new_type;
4349 
4350       TYPE_POINTER_TO (old_type) = NULL_TREE;
4351       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4352     }
4353 
4354   /* Now deal with the unconstrained array case.  In this case the pointer
4355      is actually a record where both fields are pointers to dummy nodes.
4356      Turn them into pointers to the correct types using update_pointer_to.
4357      Likewise for the pointer to the object record (thin pointer).  */
4358   else
4359     {
4360       tree new_ptr = TYPE_POINTER_TO (new_type);
4361 
4362       gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4363 
4364       /* If PTR already points to NEW_TYPE, nothing to do.  This can happen
4365 	 since update_pointer_to can be invoked multiple times on the same
4366 	 couple of types because of the type variants.  */
4367       if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4368 	return;
4369 
4370       update_pointer_to
4371 	(TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4372 	 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4373 
4374       update_pointer_to
4375 	(TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4376 	 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4377 
4378       update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4379 			 TYPE_OBJECT_RECORD_TYPE (new_type));
4380 
4381       TYPE_POINTER_TO (old_type) = NULL_TREE;
4382       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4383     }
4384 }
4385 
4386 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4387    unconstrained one.  This involves making or finding a template.  */
4388 
4389 static tree
convert_to_fat_pointer(tree type,tree expr)4390 convert_to_fat_pointer (tree type, tree expr)
4391 {
4392   tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4393   tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4394   tree etype = TREE_TYPE (expr);
4395   tree template_addr;
4396   vec<constructor_elt, va_gc> *v;
4397   vec_alloc (v, 2);
4398 
4399   /* If EXPR is null, make a fat pointer that contains a null pointer to the
4400      array (compare_fat_pointers ensures that this is the full discriminant)
4401      and a valid pointer to the bounds.  This latter property is necessary
4402      since the compiler can hoist the load of the bounds done through it.  */
4403   if (integer_zerop (expr))
4404     {
4405       tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4406       tree null_bounds, t;
4407 
4408       if (TYPE_NULL_BOUNDS (ptr_template_type))
4409 	null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4410       else
4411 	{
4412 	  /* The template type can still be dummy at this point so we build an
4413 	     empty constructor.  The middle-end will fill it in with zeros.  */
4414 	  t = build_constructor (template_type, NULL);
4415 	  TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4416 	  null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4417 	  SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4418 	}
4419 
4420       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4421 			      fold_convert (p_array_type, null_pointer_node));
4422       CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4423       t = build_constructor (type, v);
4424       /* Do not set TREE_CONSTANT so as to force T to static memory.  */
4425       TREE_CONSTANT (t) = 0;
4426       TREE_STATIC (t) = 1;
4427 
4428       return t;
4429     }
4430 
4431   /* If EXPR is a thin pointer, make template and data from the record.  */
4432   if (TYPE_IS_THIN_POINTER_P (etype))
4433     {
4434       tree field = TYPE_FIELDS (TREE_TYPE (etype));
4435 
4436       expr = gnat_protect_expr (expr);
4437 
4438       /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4439 	 the thin pointer value has been shifted so we shift it back to get
4440 	 the template address.  */
4441       if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4442 	{
4443 	  template_addr
4444 	    = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4445 			       fold_build1 (NEGATE_EXPR, sizetype,
4446 					    byte_position
4447 					    (DECL_CHAIN (field))));
4448 	  template_addr
4449 	    = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4450 			    template_addr);
4451 	}
4452 
4453       /* Otherwise we explicitly take the address of the fields.  */
4454       else
4455 	{
4456 	  expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4457 	  template_addr
4458 	    = build_unary_op (ADDR_EXPR, NULL_TREE,
4459 			      build_component_ref (expr, field, false));
4460 	  expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4461 				 build_component_ref (expr, DECL_CHAIN (field),
4462 						      false));
4463 	}
4464     }
4465 
4466   /* Otherwise, build the constructor for the template.  */
4467   else
4468     template_addr
4469       = build_unary_op (ADDR_EXPR, NULL_TREE,
4470 			build_template (template_type, TREE_TYPE (etype),
4471 					expr));
4472 
4473   /* The final result is a constructor for the fat pointer.
4474 
4475      If EXPR is an argument of a foreign convention subprogram, the type it
4476      points to is directly the component type.  In this case, the expression
4477      type may not match the corresponding FIELD_DECL type at this point, so we
4478      call "convert" here to fix that up if necessary.  This type consistency is
4479      required, for instance because it ensures that possible later folding of
4480      COMPONENT_REFs against this constructor always yields something of the
4481      same type as the initial reference.
4482 
4483      Note that the call to "build_template" above is still fine because it
4484      will only refer to the provided TEMPLATE_TYPE in this case.  */
4485   CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4486   CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4487   return gnat_build_constructor (type, v);
4488 }
4489 
4490 /* Create an expression whose value is that of EXPR,
4491    converted to type TYPE.  The TREE_TYPE of the value
4492    is always TYPE.  This function implements all reasonable
4493    conversions; callers should filter out those that are
4494    not permitted by the language being compiled.  */
4495 
4496 tree
convert(tree type,tree expr)4497 convert (tree type, tree expr)
4498 {
4499   tree etype = TREE_TYPE (expr);
4500   enum tree_code ecode = TREE_CODE (etype);
4501   enum tree_code code = TREE_CODE (type);
4502 
4503   /* If the expression is already of the right type, we are done.  */
4504   if (etype == type)
4505     return expr;
4506 
4507   /* If both input and output have padding and are of variable size, do this
4508      as an unchecked conversion.  Likewise if one is a mere variant of the
4509      other, so we avoid a pointless unpad/repad sequence.  */
4510   else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4511 	   && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4512 	   && (!TREE_CONSTANT (TYPE_SIZE (type))
4513 	       || !TREE_CONSTANT (TYPE_SIZE (etype))
4514 	       || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4515 	       || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4516 		  == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4517     ;
4518 
4519   /* If the output type has padding, convert to the inner type and make a
4520      constructor to build the record, unless a variable size is involved.  */
4521   else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4522     {
4523       /* If we previously converted from another type and our type is
4524 	 of variable size, remove the conversion to avoid the need for
4525 	 variable-sized temporaries.  Likewise for a conversion between
4526 	 original and packable version.  */
4527       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4528 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4529 	      || (ecode == RECORD_TYPE
4530 		  && TYPE_NAME (etype)
4531 		     == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4532 	expr = TREE_OPERAND (expr, 0);
4533 
4534       /* If we are just removing the padding from expr, convert the original
4535 	 object if we have variable size in order to avoid the need for some
4536 	 variable-sized temporaries.  Likewise if the padding is a variant
4537 	 of the other, so we avoid a pointless unpad/repad sequence.  */
4538       if (TREE_CODE (expr) == COMPONENT_REF
4539 	  && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4540 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4541 	      || TYPE_MAIN_VARIANT (type)
4542 		 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4543 	      || (ecode == RECORD_TYPE
4544 		  && TYPE_NAME (etype)
4545 		     == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4546 	return convert (type, TREE_OPERAND (expr, 0));
4547 
4548       /* If the inner type is of self-referential size and the expression type
4549 	 is a record, do this as an unchecked conversion unless both types are
4550 	 essentially the same.  */
4551       if (ecode == RECORD_TYPE
4552 	  && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4553 	  && TYPE_MAIN_VARIANT (etype)
4554 	     != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4555 	return unchecked_convert (type, expr, false);
4556 
4557       /* If we are converting between array types with variable size, do the
4558 	 final conversion as an unchecked conversion, again to avoid the need
4559 	 for some variable-sized temporaries.  If valid, this conversion is
4560 	 very likely purely technical and without real effects.  */
4561       if (ecode == ARRAY_TYPE
4562 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4563 	  && !TREE_CONSTANT (TYPE_SIZE (etype))
4564 	  && !TREE_CONSTANT (TYPE_SIZE (type)))
4565 	return unchecked_convert (type,
4566 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
4567 					   expr),
4568 				  false);
4569 
4570       tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4571 
4572       /* If converting to the inner type has already created a CONSTRUCTOR with
4573          the right size, then reuse it instead of creating another one.  This
4574          can happen for the padding type built to overalign local variables.  */
4575       if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4576 	  && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4577 	  && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4578 	  && tree_int_cst_equal (TYPE_SIZE (type),
4579 				 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4580 	return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4581 
4582       vec<constructor_elt, va_gc> *v;
4583       vec_alloc (v, 1);
4584       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4585       return gnat_build_constructor (type, v);
4586     }
4587 
4588   /* If the input type has padding, remove it and convert to the output type.
4589      The conditions ordering is arranged to ensure that the output type is not
4590      a padding type here, as it is not clear whether the conversion would
4591      always be correct if this was to happen.  */
4592   else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4593     {
4594       tree unpadded;
4595 
4596       /* If we have just converted to this padded type, just get the
4597 	 inner expression.  */
4598       if (TREE_CODE (expr) == CONSTRUCTOR)
4599 	unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4600 
4601       /* Otherwise, build an explicit component reference.  */
4602       else
4603 	unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4604 
4605       return convert (type, unpadded);
4606     }
4607 
4608   /* If the input is a biased type, convert first to the base type and add
4609      the bias.  Note that the bias must go through a full conversion to the
4610      base type, lest it is itself a biased value; this happens for subtypes
4611      of biased types.  */
4612   if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4613     return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4614 				       fold_convert (TREE_TYPE (etype), expr),
4615 				       convert (TREE_TYPE (etype),
4616 						TYPE_MIN_VALUE (etype))));
4617 
4618   /* If the input is a justified modular type, we need to extract the actual
4619      object before converting it to an other type with the exceptions of an
4620      [unconstrained] array or a mere type variant.  It is useful to avoid
4621      the extraction and conversion in these cases because it could end up
4622      replacing a VAR_DECL by a constructor and we might be about the take
4623      the address of the result.  */
4624   if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4625       && code != ARRAY_TYPE
4626       && code != UNCONSTRAINED_ARRAY_TYPE
4627       && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4628     return
4629       convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4630 
4631   /* If converting to a type that contains a template, convert to the data
4632      type and then build the template. */
4633   if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4634     {
4635       tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4636       vec<constructor_elt, va_gc> *v;
4637       vec_alloc (v, 2);
4638 
4639       /* If the source already has a template, get a reference to the
4640 	 associated array only, as we are going to rebuild a template
4641 	 for the target type anyway.  */
4642       expr = maybe_unconstrained_array (expr);
4643 
4644       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4645 			      build_template (TREE_TYPE (TYPE_FIELDS (type)),
4646 					      obj_type, NULL_TREE));
4647       if (expr)
4648 	CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4649 				convert (obj_type, expr));
4650       return gnat_build_constructor (type, v);
4651     }
4652 
4653   /* There are some cases of expressions that we process specially.  */
4654   switch (TREE_CODE (expr))
4655     {
4656     case ERROR_MARK:
4657       return expr;
4658 
4659     case NULL_EXPR:
4660       /* Just set its type here.  For TRANSFORM_EXPR, we will do the actual
4661 	 conversion in gnat_expand_expr.  NULL_EXPR does not represent
4662 	 and actual value, so no conversion is needed.  */
4663       expr = copy_node (expr);
4664       TREE_TYPE (expr) = type;
4665       return expr;
4666 
4667     case STRING_CST:
4668       /* If we are converting a STRING_CST to another constrained array type,
4669 	 just make a new one in the proper type.  */
4670       if (code == ecode
4671 	  && !(TREE_CONSTANT (TYPE_SIZE (etype))
4672 	       && !TREE_CONSTANT (TYPE_SIZE (type))))
4673 	{
4674 	  expr = copy_node (expr);
4675 	  TREE_TYPE (expr) = type;
4676 	  return expr;
4677 	}
4678       break;
4679 
4680     case VECTOR_CST:
4681       /* If we are converting a VECTOR_CST to a mere type variant, just make
4682 	 a new one in the proper type.  */
4683       if (code == ecode && gnat_types_compatible_p (type, etype))
4684 	{
4685 	  expr = copy_node (expr);
4686 	  TREE_TYPE (expr) = type;
4687 	  return expr;
4688 	}
4689       break;
4690 
4691     case CONSTRUCTOR:
4692       /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4693 	 another padding type around the same type, just make a new one in
4694 	 the proper type.  */
4695       if (code == ecode
4696 	  && (gnat_types_compatible_p (type, etype)
4697 	      || (code == RECORD_TYPE
4698 		  && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4699 		  && TREE_TYPE (TYPE_FIELDS (type))
4700 		     == TREE_TYPE (TYPE_FIELDS (etype)))))
4701 	{
4702 	  expr = copy_node (expr);
4703 	  TREE_TYPE (expr) = type;
4704 	  CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4705 	  return expr;
4706 	}
4707 
4708       /* Likewise for a conversion between original and packable version, or
4709 	 conversion between types of the same size and with the same list of
4710 	 fields, but we have to work harder to preserve type consistency.  */
4711       if (code == ecode
4712 	  && code == RECORD_TYPE
4713 	  && (TYPE_NAME (type) == TYPE_NAME (etype)
4714 	      || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4715 
4716 	{
4717 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4718 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4719 	  vec<constructor_elt, va_gc> *v;
4720 	  vec_alloc (v, len);
4721 	  tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4722 	  unsigned HOST_WIDE_INT idx;
4723 	  tree index, value;
4724 
4725 	  /* Whether we need to clear TREE_CONSTANT et al. on the output
4726 	     constructor when we convert in place.  */
4727 	  bool clear_constant = false;
4728 
4729 	  FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4730 	    {
4731 	      /* Skip the missing fields in the CONSTRUCTOR.  */
4732 	      while (efield && field && !SAME_FIELD_P (efield, index))
4733 	        {
4734 		  efield = DECL_CHAIN (efield);
4735 		  field = DECL_CHAIN (field);
4736 		}
4737 	      /* The field must be the same.  */
4738 	      if (!(efield && field && SAME_FIELD_P (efield, field)))
4739 		break;
4740 	      constructor_elt elt
4741 	        = {field, convert (TREE_TYPE (field), value)};
4742 	      v->quick_push (elt);
4743 
4744 	      /* If packing has made this field a bitfield and the input
4745 		 value couldn't be emitted statically any more, we need to
4746 		 clear TREE_CONSTANT on our output.  */
4747 	      if (!clear_constant
4748 		  && TREE_CONSTANT (expr)
4749 		  && !CONSTRUCTOR_BITFIELD_P (efield)
4750 		  && CONSTRUCTOR_BITFIELD_P (field)
4751 		  && !initializer_constant_valid_for_bitfield_p (value))
4752 		clear_constant = true;
4753 
4754 	      efield = DECL_CHAIN (efield);
4755 	      field = DECL_CHAIN (field);
4756 	    }
4757 
4758 	  /* If we have been able to match and convert all the input fields
4759 	     to their output type, convert in place now.  We'll fallback to a
4760 	     view conversion downstream otherwise.  */
4761 	  if (idx == len)
4762 	    {
4763 	      expr = copy_node (expr);
4764 	      TREE_TYPE (expr) = type;
4765 	      CONSTRUCTOR_ELTS (expr) = v;
4766 	      if (clear_constant)
4767 		TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4768 	      return expr;
4769 	    }
4770 	}
4771 
4772       /* Likewise for a conversion between array type and vector type with a
4773          compatible representative array.  */
4774       else if (code == VECTOR_TYPE
4775 	       && ecode == ARRAY_TYPE
4776 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4777 					   etype))
4778 	{
4779 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4780 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4781 	  vec<constructor_elt, va_gc> *v;
4782 	  unsigned HOST_WIDE_INT ix;
4783 	  tree value;
4784 
4785 	  /* Build a VECTOR_CST from a *constant* array constructor.  */
4786 	  if (TREE_CONSTANT (expr))
4787 	    {
4788 	      bool constant_p = true;
4789 
4790 	      /* Iterate through elements and check if all constructor
4791 		 elements are *_CSTs.  */
4792 	      FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4793 		if (!CONSTANT_CLASS_P (value))
4794 		  {
4795 		    constant_p = false;
4796 		    break;
4797 		  }
4798 
4799 	      if (constant_p)
4800 		return build_vector_from_ctor (type,
4801 					       CONSTRUCTOR_ELTS (expr));
4802 	    }
4803 
4804 	  /* Otherwise, build a regular vector constructor.  */
4805 	  vec_alloc (v, len);
4806 	  FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4807 	    {
4808 	      constructor_elt elt = {NULL_TREE, value};
4809 	      v->quick_push (elt);
4810 	    }
4811 	  expr = copy_node (expr);
4812 	  TREE_TYPE (expr) = type;
4813 	  CONSTRUCTOR_ELTS (expr) = v;
4814 	  return expr;
4815 	}
4816       break;
4817 
4818     case UNCONSTRAINED_ARRAY_REF:
4819       /* First retrieve the underlying array.  */
4820       expr = maybe_unconstrained_array (expr);
4821       etype = TREE_TYPE (expr);
4822       ecode = TREE_CODE (etype);
4823       break;
4824 
4825     case VIEW_CONVERT_EXPR:
4826       {
4827 	/* GCC 4.x is very sensitive to type consistency overall, and view
4828 	   conversions thus are very frequent.  Even though just "convert"ing
4829 	   the inner operand to the output type is fine in most cases, it
4830 	   might expose unexpected input/output type mismatches in special
4831 	   circumstances so we avoid such recursive calls when we can.  */
4832 	tree op0 = TREE_OPERAND (expr, 0);
4833 
4834 	/* If we are converting back to the original type, we can just
4835 	   lift the input conversion.  This is a common occurrence with
4836 	   switches back-and-forth amongst type variants.  */
4837 	if (type == TREE_TYPE (op0))
4838 	  return op0;
4839 
4840 	/* Otherwise, if we're converting between two aggregate or vector
4841 	   types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4842 	   target type in place or to just convert the inner expression.  */
4843 	if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4844 	    || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4845 	  {
4846 	    /* If we are converting between mere variants, we can just
4847 	       substitute the VIEW_CONVERT_EXPR in place.  */
4848 	    if (gnat_types_compatible_p (type, etype))
4849 	      return build1 (VIEW_CONVERT_EXPR, type, op0);
4850 
4851 	    /* Otherwise, we may just bypass the input view conversion unless
4852 	       one of the types is a fat pointer,  which is handled by
4853 	       specialized code below which relies on exact type matching.  */
4854 	    else if (!TYPE_IS_FAT_POINTER_P (type)
4855 		     && !TYPE_IS_FAT_POINTER_P (etype))
4856 	      return convert (type, op0);
4857 	  }
4858 
4859 	break;
4860       }
4861 
4862     default:
4863       break;
4864     }
4865 
4866   /* Check for converting to a pointer to an unconstrained array.  */
4867   if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4868     return convert_to_fat_pointer (type, expr);
4869 
4870   /* If we are converting between two aggregate or vector types that are mere
4871      variants, just make a VIEW_CONVERT_EXPR.  Likewise when we are converting
4872      to a vector type from its representative array type.  */
4873   else if ((code == ecode
4874 	    && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4875 	    && gnat_types_compatible_p (type, etype))
4876 	   || (code == VECTOR_TYPE
4877 	       && ecode == ARRAY_TYPE
4878 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4879 					   etype)))
4880     return build1 (VIEW_CONVERT_EXPR, type, expr);
4881 
4882   /* If we are converting between tagged types, try to upcast properly.
4883      But don't do it if we are just annotating types since tagged types
4884      aren't fully laid out in this mode.  */
4885   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4886 	   && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4887 	   && !type_annotate_only)
4888     {
4889       tree child_etype = etype;
4890       do {
4891 	tree field = TYPE_FIELDS (child_etype);
4892 	if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4893 	  return build_component_ref (expr, field, false);
4894 	child_etype = TREE_TYPE (field);
4895       } while (TREE_CODE (child_etype) == RECORD_TYPE);
4896     }
4897 
4898   /* If we are converting from a smaller form of record type back to it, just
4899      make a VIEW_CONVERT_EXPR.  But first pad the expression to have the same
4900      size on both sides.  */
4901   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4902 	   && smaller_form_type_p (etype, type))
4903     {
4904       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4905 				      false, false, true),
4906 		      expr);
4907       return build1 (VIEW_CONVERT_EXPR, type, expr);
4908     }
4909 
4910   /* In all other cases of related types, make a NOP_EXPR.  */
4911   else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4912     return fold_convert (type, expr);
4913 
4914   switch (code)
4915     {
4916     case VOID_TYPE:
4917       return fold_build1 (CONVERT_EXPR, type, expr);
4918 
4919     case INTEGER_TYPE:
4920       if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4921 	  && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4922 	      || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4923 	return unchecked_convert (type, expr, false);
4924 
4925       /* If the output is a biased type, convert first to the base type and
4926 	 subtract the bias.  Note that the bias itself must go through a full
4927 	 conversion to the base type, lest it is a biased value; this happens
4928 	 for subtypes of biased types.  */
4929       if (TYPE_BIASED_REPRESENTATION_P (type))
4930 	return fold_convert (type,
4931 			     fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4932 					  convert (TREE_TYPE (type), expr),
4933 					  convert (TREE_TYPE (type),
4934 						   TYPE_MIN_VALUE (type))));
4935 
4936       /* If we are converting an additive expression to an integer type
4937 	 with lower precision, be wary of the optimization that can be
4938 	 applied by convert_to_integer.  There are 2 problematic cases:
4939 	   - if the first operand was originally of a biased type,
4940 	     because we could be recursively called to convert it
4941 	     to an intermediate type and thus rematerialize the
4942 	     additive operator endlessly,
4943 	   - if the expression contains a placeholder, because an
4944 	     intermediate conversion that changes the sign could
4945 	     be inserted and thus introduce an artificial overflow
4946 	     at compile time when the placeholder is substituted.  */
4947       if (ecode == INTEGER_TYPE
4948 	  && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4949 	  && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4950 	{
4951 	  tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4952 
4953 	  if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4954 	       && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4955 	      || CONTAINS_PLACEHOLDER_P (expr))
4956 	    return fold_convert (type, expr);
4957 	}
4958 
4959       /* ... fall through ... */
4960 
4961     case ENUMERAL_TYPE:
4962       return fold (convert_to_integer (type, expr));
4963 
4964     case BOOLEAN_TYPE:
4965       /* Do not use convert_to_integer with boolean types.  */
4966       return fold_convert_loc (EXPR_LOCATION (expr), type, expr);
4967 
4968     case POINTER_TYPE:
4969     case REFERENCE_TYPE:
4970       /* If converting between two thin pointers, adjust if needed to account
4971 	 for differing offsets from the base pointer, depending on whether
4972 	 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type.  */
4973       if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4974 	{
4975 	  tree etype_pos
4976 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4977 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4978 	      : size_zero_node;
4979 	  tree type_pos
4980 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4981 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4982 	      : size_zero_node;
4983 	  tree byte_diff = size_diffop (type_pos, etype_pos);
4984 
4985 	  expr = build1 (NOP_EXPR, type, expr);
4986 	  if (integer_zerop (byte_diff))
4987 	    return expr;
4988 
4989 	  return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4990 				  fold_convert (sizetype, byte_diff));
4991 	}
4992 
4993       /* If converting fat pointer to normal or thin pointer, get the pointer
4994 	 to the array and then convert it.  */
4995       if (TYPE_IS_FAT_POINTER_P (etype))
4996 	expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4997 
4998       return fold (convert_to_pointer (type, expr));
4999 
5000     case REAL_TYPE:
5001       return fold (convert_to_real (type, expr));
5002 
5003     case RECORD_TYPE:
5004       /* Do a normal conversion between scalar and justified modular type.  */
5005       if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5006 	{
5007 	  vec<constructor_elt, va_gc> *v;
5008 	  vec_alloc (v, 1);
5009 
5010 	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5011 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
5012 					   expr));
5013 	  return gnat_build_constructor (type, v);
5014 	}
5015 
5016       /* In these cases, assume the front-end has validated the conversion.
5017 	 If the conversion is valid, it will be a bit-wise conversion, so
5018 	 it can be viewed as an unchecked conversion.  */
5019       return unchecked_convert (type, expr, false);
5020 
5021     case ARRAY_TYPE:
5022       /* Do a normal conversion between unconstrained and constrained array
5023 	 type, assuming the latter is a constrained version of the former.  */
5024       if (TREE_CODE (expr) == INDIRECT_REF
5025 	  && ecode == ARRAY_TYPE
5026 	  && TREE_TYPE (etype) == TREE_TYPE (type))
5027 	{
5028 	  tree ptr_type = build_pointer_type (type);
5029 	  tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
5030 				   fold_convert (ptr_type,
5031 						 TREE_OPERAND (expr, 0)));
5032 	  TREE_READONLY (t) = TREE_READONLY (expr);
5033 	  TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
5034 	  return t;
5035 	}
5036 
5037       /* In these cases, assume the front-end has validated the conversion.
5038 	 If the conversion is valid, it will be a bit-wise conversion, so
5039 	 it can be viewed as an unchecked conversion.  */
5040       return unchecked_convert (type, expr, false);
5041 
5042     case UNION_TYPE:
5043       /* This is a either a conversion between a tagged type and some
5044 	 subtype, which we have to mark as a UNION_TYPE because of
5045 	 overlapping fields or a conversion of an Unchecked_Union.  */
5046       return unchecked_convert (type, expr, false);
5047 
5048     case UNCONSTRAINED_ARRAY_TYPE:
5049       /* If the input is a VECTOR_TYPE, convert to the representative
5050 	 array type first.  */
5051       if (ecode == VECTOR_TYPE)
5052 	{
5053 	  expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5054 	  etype = TREE_TYPE (expr);
5055 	  ecode = TREE_CODE (etype);
5056 	}
5057 
5058       /* If EXPR is a constrained array, take its address, convert it to a
5059 	 fat pointer, and then dereference it.  Likewise if EXPR is a
5060 	 record containing both a template and a constrained array.
5061 	 Note that a record representing a justified modular type
5062 	 always represents a packed constrained array.  */
5063       if (ecode == ARRAY_TYPE
5064 	  || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5065 	  || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5066 	  || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5067 	return
5068 	  build_unary_op
5069 	    (INDIRECT_REF, NULL_TREE,
5070 	     convert_to_fat_pointer (TREE_TYPE (type),
5071 				     build_unary_op (ADDR_EXPR,
5072 						     NULL_TREE, expr)));
5073 
5074       /* Do something very similar for converting one unconstrained
5075 	 array to another.  */
5076       else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5077 	return
5078 	  build_unary_op (INDIRECT_REF, NULL_TREE,
5079 			  convert (TREE_TYPE (type),
5080 				   build_unary_op (ADDR_EXPR,
5081 						   NULL_TREE, expr)));
5082       else
5083 	gcc_unreachable ();
5084 
5085     case COMPLEX_TYPE:
5086       return fold (convert_to_complex (type, expr));
5087 
5088     default:
5089       gcc_unreachable ();
5090     }
5091 }
5092 
5093 /* Create an expression whose value is that of EXPR converted to the common
5094    index type, which is sizetype.  EXPR is supposed to be in the base type
5095    of the GNAT index type.  Calling it is equivalent to doing
5096 
5097      convert (sizetype, expr)
5098 
5099    but we try to distribute the type conversion with the knowledge that EXPR
5100    cannot overflow in its type.  This is a best-effort approach and we fall
5101    back to the above expression as soon as difficulties are encountered.
5102 
5103    This is necessary to overcome issues that arise when the GNAT base index
5104    type and the GCC common index type (sizetype) don't have the same size,
5105    which is quite frequent on 64-bit architectures.  In this case, and if
5106    the GNAT base index type is signed but the iteration type of the loop has
5107    been forced to unsigned, the loop scalar evolution engine cannot compute
5108    a simple evolution for the general induction variables associated with the
5109    array indices, because it will preserve the wrap-around semantics in the
5110    unsigned type of their "inner" part.  As a result, many loop optimizations
5111    are blocked.
5112 
5113    The solution is to use a special (basic) induction variable that is at
5114    least as large as sizetype, and to express the aforementioned general
5115    induction variables in terms of this induction variable, eliminating
5116    the problematic intermediate truncation to the GNAT base index type.
5117    This is possible as long as the original expression doesn't overflow
5118    and if the middle-end hasn't introduced artificial overflows in the
5119    course of the various simplification it can make to the expression.  */
5120 
5121 tree
convert_to_index_type(tree expr)5122 convert_to_index_type (tree expr)
5123 {
5124   enum tree_code code = TREE_CODE (expr);
5125   tree type = TREE_TYPE (expr);
5126 
5127   /* If the type is unsigned, overflow is allowed so we cannot be sure that
5128      EXPR doesn't overflow.  Keep it simple if optimization is disabled.  */
5129   if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
5130     return convert (sizetype, expr);
5131 
5132   switch (code)
5133     {
5134     case VAR_DECL:
5135       /* The main effect of the function: replace a loop parameter with its
5136 	 associated special induction variable.  */
5137       if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5138 	expr = DECL_INDUCTION_VAR (expr);
5139       break;
5140 
5141     CASE_CONVERT:
5142       {
5143 	tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5144 	/* Bail out as soon as we suspect some sort of type frobbing.  */
5145 	if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5146 	    || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5147 	  break;
5148       }
5149 
5150       /* ... fall through ... */
5151 
5152     case NON_LVALUE_EXPR:
5153       return fold_build1 (code, sizetype,
5154 			  convert_to_index_type (TREE_OPERAND (expr, 0)));
5155 
5156     case PLUS_EXPR:
5157     case MINUS_EXPR:
5158     case MULT_EXPR:
5159       return fold_build2 (code, sizetype,
5160 			  convert_to_index_type (TREE_OPERAND (expr, 0)),
5161 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5162 
5163     case COMPOUND_EXPR:
5164       return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5165 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5166 
5167     case COND_EXPR:
5168       return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5169 			  convert_to_index_type (TREE_OPERAND (expr, 1)),
5170 			  convert_to_index_type (TREE_OPERAND (expr, 2)));
5171 
5172     default:
5173       break;
5174     }
5175 
5176   return convert (sizetype, expr);
5177 }
5178 
5179 /* Remove all conversions that are done in EXP.  This includes converting
5180    from a padded type or to a justified modular type.  If TRUE_ADDRESS
5181    is true, always return the address of the containing object even if
5182    the address is not bit-aligned.  */
5183 
5184 tree
remove_conversions(tree exp,bool true_address)5185 remove_conversions (tree exp, bool true_address)
5186 {
5187   switch (TREE_CODE (exp))
5188     {
5189     case CONSTRUCTOR:
5190       if (true_address
5191 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5192 	  && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5193 	return
5194 	  remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5195       break;
5196 
5197     case COMPONENT_REF:
5198       if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5199 	return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5200       break;
5201 
5202     CASE_CONVERT:
5203     case VIEW_CONVERT_EXPR:
5204     case NON_LVALUE_EXPR:
5205       return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5206 
5207     default:
5208       break;
5209     }
5210 
5211   return exp;
5212 }
5213 
5214 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5215    refers to the underlying array.  If it has TYPE_CONTAINS_TEMPLATE_P,
5216    likewise return an expression pointing to the underlying array.  */
5217 
5218 tree
maybe_unconstrained_array(tree exp)5219 maybe_unconstrained_array (tree exp)
5220 {
5221   enum tree_code code = TREE_CODE (exp);
5222   tree type = TREE_TYPE (exp);
5223 
5224   switch (TREE_CODE (type))
5225     {
5226     case UNCONSTRAINED_ARRAY_TYPE:
5227       if (code == UNCONSTRAINED_ARRAY_REF)
5228 	{
5229 	  const bool read_only = TREE_READONLY (exp);
5230 	  const bool no_trap = TREE_THIS_NOTRAP (exp);
5231 
5232 	  exp = TREE_OPERAND (exp, 0);
5233 	  type = TREE_TYPE (exp);
5234 
5235 	  if (TREE_CODE (exp) == COND_EXPR)
5236 	    {
5237 	      tree op1
5238 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5239 				  build_component_ref (TREE_OPERAND (exp, 1),
5240 						       TYPE_FIELDS (type),
5241 						       false));
5242 	      tree op2
5243 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5244 				  build_component_ref (TREE_OPERAND (exp, 2),
5245 						       TYPE_FIELDS (type),
5246 						       false));
5247 
5248 	      exp = build3 (COND_EXPR,
5249 			    TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5250 			    TREE_OPERAND (exp, 0), op1, op2);
5251 	    }
5252 	  else
5253 	    {
5254 	      exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5255 				    build_component_ref (exp,
5256 							 TYPE_FIELDS (type),
5257 						         false));
5258 	      TREE_READONLY (exp) = read_only;
5259 	      TREE_THIS_NOTRAP (exp) = no_trap;
5260 	    }
5261 	}
5262 
5263       else if (code == NULL_EXPR)
5264 	exp = build1 (NULL_EXPR,
5265 		      TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5266 		      TREE_OPERAND (exp, 0));
5267       break;
5268 
5269     case RECORD_TYPE:
5270       /* If this is a padded type and it contains a template, convert to the
5271 	 unpadded type first.  */
5272       if (TYPE_PADDING_P (type)
5273 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5274 	  && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5275 	{
5276 	  exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5277 	  code = TREE_CODE (exp);
5278 	  type = TREE_TYPE (exp);
5279 	}
5280 
5281       if (TYPE_CONTAINS_TEMPLATE_P (type))
5282 	{
5283 	  /* If the array initializer is a box, return NULL_TREE.  */
5284 	  if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5285 	    return NULL_TREE;
5286 
5287 	  exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5288 				     false);
5289 
5290 	  /* If the array is padded, remove the padding.  */
5291 	  exp = maybe_padded_object (exp);
5292 	}
5293       break;
5294 
5295     default:
5296       break;
5297     }
5298 
5299   return exp;
5300 }
5301 
5302 /* Return true if EXPR is an expression that can be folded as an operand
5303    of a VIEW_CONVERT_EXPR.  See ada-tree.h for a complete rationale.  */
5304 
5305 static bool
can_fold_for_view_convert_p(tree expr)5306 can_fold_for_view_convert_p (tree expr)
5307 {
5308   tree t1, t2;
5309 
5310   /* The folder will fold NOP_EXPRs between integral types with the same
5311      precision (in the middle-end's sense).  We cannot allow it if the
5312      types don't have the same precision in the Ada sense as well.  */
5313   if (TREE_CODE (expr) != NOP_EXPR)
5314     return true;
5315 
5316   t1 = TREE_TYPE (expr);
5317   t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5318 
5319   /* Defer to the folder for non-integral conversions.  */
5320   if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5321     return true;
5322 
5323   /* Only fold conversions that preserve both precisions.  */
5324   if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5325       && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5326     return true;
5327 
5328   return false;
5329 }
5330 
5331 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5332    If NOTRUNC_P is true, truncation operations should be suppressed.
5333 
5334    Special care is required with (source or target) integral types whose
5335    precision is not equal to their size, to make sure we fetch or assign
5336    the value bits whose location might depend on the endianness, e.g.
5337 
5338      Rmsize : constant := 8;
5339      subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5340 
5341      type Bit_Array is array (1 .. Rmsize) of Boolean;
5342      pragma Pack (Bit_Array);
5343 
5344      function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5345 
5346      Value : Int := 2#1000_0001#;
5347      Vbits : Bit_Array := To_Bit_Array (Value);
5348 
5349    we expect the 8 bits at Vbits'Address to always contain Value, while
5350    their original location depends on the endianness, at Value'Address
5351    on a little-endian architecture but not on a big-endian one.
5352 
5353    One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5354    the bits between the precision and the size are filled, because of the
5355    trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5356    So we use the special predicate type_unsigned_for_rm above.  */
5357 
5358 tree
unchecked_convert(tree type,tree expr,bool notrunc_p)5359 unchecked_convert (tree type, tree expr, bool notrunc_p)
5360 {
5361   tree etype = TREE_TYPE (expr);
5362   enum tree_code ecode = TREE_CODE (etype);
5363   enum tree_code code = TREE_CODE (type);
5364   const bool ebiased
5365     = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5366   const bool biased
5367     = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5368   const bool ereverse
5369     = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5370   const bool reverse
5371     = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5372   tree tem;
5373   int c = 0;
5374 
5375   /* If the expression is already of the right type, we are done.  */
5376   if (etype == type)
5377     return expr;
5378 
5379   /* If both types are integral or regular pointer, then just do a normal
5380      conversion.  Likewise for a conversion to an unconstrained array.  */
5381   if (((INTEGRAL_TYPE_P (type)
5382 	|| (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5383 	|| (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5384        && (INTEGRAL_TYPE_P (etype)
5385 	   || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5386 	   || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5387       || code == UNCONSTRAINED_ARRAY_TYPE)
5388     {
5389       if (ebiased)
5390 	{
5391 	  tree ntype = copy_type (etype);
5392 	  TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5393 	  TYPE_MAIN_VARIANT (ntype) = ntype;
5394 	  expr = build1 (NOP_EXPR, ntype, expr);
5395 	}
5396 
5397       if (biased)
5398 	{
5399 	  tree rtype = copy_type (type);
5400 	  TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5401 	  TYPE_MAIN_VARIANT (rtype) = rtype;
5402 	  expr = convert (rtype, expr);
5403 	  expr = build1 (NOP_EXPR, type, expr);
5404 	}
5405       else
5406 	expr = convert (type, expr);
5407     }
5408 
5409   /* If we are converting to an integral type whose precision is not equal
5410      to its size, first unchecked convert to a record type that contains a
5411      field of the given precision.  Then extract the result from the field.
5412 
5413      There is a subtlety if the source type is an aggregate type with reverse
5414      storage order because its representation is not contiguous in the native
5415      storage order, i.e. a direct unchecked conversion to an integral type
5416      with N bits of precision cannot read the first N bits of the aggregate
5417      type.  To overcome it, we do an unchecked conversion to an integral type
5418      with reverse storage order and return the resulting value.  This also
5419      ensures that the result of the unchecked conversion doesn't depend on
5420      the endianness of the target machine, but only on the storage order of
5421      the aggregate type.
5422 
5423      Finally, for the sake of consistency, we do the unchecked conversion
5424      to an integral type with reverse storage order as soon as the source
5425      type is an aggregate type with reverse storage order, even if there
5426      are no considerations of precision or size involved.  Ultimately, we
5427      further extend this processing to any scalar type.  */
5428   else if ((INTEGRAL_TYPE_P (type)
5429 	    && TYPE_RM_SIZE (type)
5430 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5431 					   TYPE_SIZE (type))) < 0
5432 		|| ereverse))
5433 	   || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5434     {
5435       tree rec_type = make_node (RECORD_TYPE);
5436       tree field_type, field;
5437 
5438       TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5439 
5440       if (c < 0)
5441 	{
5442 	  const unsigned HOST_WIDE_INT prec
5443 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5444 	  if (type_unsigned_for_rm (type))
5445 	    field_type = make_unsigned_type (prec);
5446 	  else
5447 	    field_type = make_signed_type (prec);
5448 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5449 	}
5450       else
5451 	field_type = type;
5452 
5453       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5454 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5455 
5456       finish_record_type (rec_type, field, 1, false);
5457 
5458       expr = unchecked_convert (rec_type, expr, notrunc_p);
5459       expr = build_component_ref (expr, field, false);
5460       expr = fold_build1 (NOP_EXPR, type, expr);
5461     }
5462 
5463   /* Similarly if we are converting from an integral type whose precision is
5464      not equal to its size, first copy into a field of the given precision
5465      and unchecked convert the record type.
5466 
5467      The same considerations as above apply if the target type is an aggregate
5468      type with reverse storage order and we also proceed similarly.  */
5469   else if ((INTEGRAL_TYPE_P (etype)
5470 	    && TYPE_RM_SIZE (etype)
5471 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5472 					   TYPE_SIZE (etype))) < 0
5473 		|| reverse))
5474 	   || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5475     {
5476       tree rec_type = make_node (RECORD_TYPE);
5477       vec<constructor_elt, va_gc> *v;
5478       vec_alloc (v, 1);
5479       tree field_type, field;
5480 
5481       TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5482 
5483       if (c < 0)
5484 	{
5485 	  const unsigned HOST_WIDE_INT prec
5486 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5487 	  if (type_unsigned_for_rm (etype))
5488 	    field_type = make_unsigned_type (prec);
5489 	  else
5490 	    field_type = make_signed_type (prec);
5491 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5492 	}
5493       else
5494 	field_type = etype;
5495 
5496       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5497 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5498 
5499       finish_record_type (rec_type, field, 1, false);
5500 
5501       expr = fold_build1 (NOP_EXPR, field_type, expr);
5502       CONSTRUCTOR_APPEND_ELT (v, field, expr);
5503       expr = gnat_build_constructor (rec_type, v);
5504       expr = unchecked_convert (type, expr, notrunc_p);
5505     }
5506 
5507   /* If we are converting from a scalar type to a type with a different size,
5508      we need to pad to have the same size on both sides.
5509 
5510      ??? We cannot do it unconditionally because unchecked conversions are
5511      used liberally by the front-end to implement interface thunks:
5512 
5513        type ada__tags__addr_ptr is access system.address;
5514        S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5515        return p___size__4 (p__object!(S191s.all));
5516 
5517      so we need to skip dereferences.  */
5518   else if (!INDIRECT_REF_P (expr)
5519 	   && !AGGREGATE_TYPE_P (etype)
5520 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5521 	   && TREE_CONSTANT (TYPE_SIZE (type))
5522 	   && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5523     {
5524       if (c < 0)
5525 	{
5526 	  expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5527 					  false, false, true),
5528 			  expr);
5529 	  expr = unchecked_convert (type, expr, notrunc_p);
5530 	}
5531       else
5532 	{
5533 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5534 					  false, false, true);
5535 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5536 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5537 	}
5538     }
5539 
5540   /* Likewise if we are converting from a scalar type to a type with self-
5541      referential size.  We use the max size to do the padding in this case.  */
5542   else if (!INDIRECT_REF_P (expr)
5543 	   && !AGGREGATE_TYPE_P (etype)
5544 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5545 	   && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
5546     {
5547       tree new_size = max_size (TYPE_SIZE (type), true);
5548       c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
5549       if (c < 0)
5550 	{
5551 	  expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
5552 					  false, false, true),
5553 			  expr);
5554 	  expr = unchecked_convert (type, expr, notrunc_p);
5555 	}
5556       else
5557 	{
5558 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5559 					  false, false, true);
5560 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5561 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5562 	}
5563     }
5564 
5565   /* We have a special case when we are converting between two unconstrained
5566      array types.  In that case, take the address, convert the fat pointer
5567      types, and dereference.  */
5568   else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5569     expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5570 			   build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5571 				   build_unary_op (ADDR_EXPR, NULL_TREE,
5572 						   expr)));
5573 
5574   /* Another special case is when we are converting to a vector type from its
5575      representative array type; this a regular conversion.  */
5576   else if (code == VECTOR_TYPE
5577 	   && ecode == ARRAY_TYPE
5578 	   && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5579 				       etype))
5580     expr = convert (type, expr);
5581 
5582   /* And, if the array type is not the representative, we try to build an
5583      intermediate vector type of which the array type is the representative
5584      and to do the unchecked conversion between the vector types, in order
5585      to enable further simplifications in the middle-end.  */
5586   else if (code == VECTOR_TYPE
5587 	   && ecode == ARRAY_TYPE
5588 	   && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5589     {
5590       expr = convert (tem, expr);
5591       return unchecked_convert (type, expr, notrunc_p);
5592     }
5593 
5594   /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5595      the alignment of the CONSTRUCTOR to speed up the copy operation.  But do
5596      not do it for a conversion between original and packable version to avoid
5597      an infinite recursion.  */
5598   else if (TREE_CODE (expr) == CONSTRUCTOR
5599 	   && AGGREGATE_TYPE_P (type)
5600 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5601 	   && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5602     {
5603       expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5604 				      Empty, false, false, true),
5605 		      expr);
5606       return unchecked_convert (type, expr, notrunc_p);
5607     }
5608 
5609   /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5610      size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5611      But do not do it for a conversion between original and packable version
5612      to avoid an infinite recursion.  */
5613   else if (TREE_CODE (expr) == CONSTRUCTOR
5614 	   && AGGREGATE_TYPE_P (type)
5615 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5616 	   && TREE_CONSTANT (TYPE_SIZE (type))
5617 	   && (!TREE_CONSTANT (TYPE_SIZE (etype))
5618 	       || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5619     {
5620       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5621 				      Empty, false, false, true),
5622 		      expr);
5623       return unchecked_convert (type, expr, notrunc_p);
5624     }
5625 
5626   /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression.  */
5627   else
5628     {
5629       expr = maybe_unconstrained_array (expr);
5630       etype = TREE_TYPE (expr);
5631       ecode = TREE_CODE (etype);
5632       if (can_fold_for_view_convert_p (expr))
5633 	expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5634       else
5635 	expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5636     }
5637 
5638   /* If the result is a non-biased integral type whose precision is not equal
5639      to its size, sign- or zero-extend the result.  But we need not do this
5640      if the input is also an integral type and both are unsigned or both are
5641      signed and have the same precision.  */
5642   tree type_rm_size;
5643   if (!notrunc_p
5644       && !biased
5645       && INTEGRAL_TYPE_P (type)
5646       && (type_rm_size = TYPE_RM_SIZE (type))
5647       && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5648       && !(INTEGRAL_TYPE_P (etype)
5649 	   && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5650 	   && (type_unsigned_for_rm (type)
5651 	       || tree_int_cst_compare (type_rm_size,
5652 					TYPE_RM_SIZE (etype)
5653 					? TYPE_RM_SIZE (etype)
5654 					: TYPE_SIZE (etype)) == 0)))
5655     {
5656       if (integer_zerop (type_rm_size))
5657 	expr = build_int_cst (type, 0);
5658       else
5659 	{
5660 	  tree base_type
5661 	    = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5662 				  type_unsigned_for_rm (type));
5663 	  tree shift_expr
5664 	    = convert (base_type,
5665 		       size_binop (MINUS_EXPR,
5666 				   TYPE_SIZE (type), type_rm_size));
5667 	  expr
5668 	    = convert (type,
5669 		       build_binary_op (RSHIFT_EXPR, base_type,
5670 				        build_binary_op (LSHIFT_EXPR, base_type,
5671 							 convert (base_type,
5672 								  expr),
5673 							 shift_expr),
5674 				        shift_expr));
5675 	}
5676     }
5677 
5678   /* An unchecked conversion should never raise Constraint_Error.  The code
5679      below assumes that GCC's conversion routines overflow the same way that
5680      the underlying hardware does.  This is probably true.  In the rare case
5681      when it is false, we can rely on the fact that such conversions are
5682      erroneous anyway.  */
5683   if (TREE_CODE (expr) == INTEGER_CST)
5684     TREE_OVERFLOW (expr) = 0;
5685 
5686   /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5687      show no longer constant.  */
5688   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5689       && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5690 			   OEP_ONLY_CONST))
5691     TREE_CONSTANT (expr) = 0;
5692 
5693   return expr;
5694 }
5695 
5696 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5697    the latter being a record type as predicated by Is_Record_Type.  */
5698 
5699 enum tree_code
tree_code_for_record_type(Entity_Id gnat_type)5700 tree_code_for_record_type (Entity_Id gnat_type)
5701 {
5702   Node_Id component_list, component;
5703 
5704   /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5705      fields are all in the variant part.  Otherwise, return RECORD_TYPE.  */
5706   if (!Is_Unchecked_Union (gnat_type))
5707     return RECORD_TYPE;
5708 
5709   gnat_type = Implementation_Base_Type (gnat_type);
5710   component_list
5711     = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5712 
5713   for (component = First_Non_Pragma (Component_Items (component_list));
5714        Present (component);
5715        component = Next_Non_Pragma (component))
5716     if (Ekind (Defining_Entity (component)) == E_Component)
5717       return RECORD_TYPE;
5718 
5719   return UNION_TYPE;
5720 }
5721 
5722 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5723    size is equal to 64 bits, or an array of such a type.  Set ALIGN_CLAUSE
5724    according to the presence of an alignment clause on the type or, if it
5725    is an array, on the component type.  */
5726 
5727 bool
is_double_float_or_array(Entity_Id gnat_type,bool * align_clause)5728 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5729 {
5730   gnat_type = Underlying_Type (gnat_type);
5731 
5732   *align_clause = Present (Alignment_Clause (gnat_type));
5733 
5734   if (Is_Array_Type (gnat_type))
5735     {
5736       gnat_type = Underlying_Type (Component_Type (gnat_type));
5737       if (Present (Alignment_Clause (gnat_type)))
5738 	*align_clause = true;
5739     }
5740 
5741   if (!Is_Floating_Point_Type (gnat_type))
5742     return false;
5743 
5744   if (UI_To_Int (Esize (gnat_type)) != 64)
5745     return false;
5746 
5747   return true;
5748 }
5749 
5750 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5751    size is greater or equal to 64 bits, or an array of such a type.  Set
5752    ALIGN_CLAUSE according to the presence of an alignment clause on the
5753    type or, if it is an array, on the component type.  */
5754 
5755 bool
is_double_scalar_or_array(Entity_Id gnat_type,bool * align_clause)5756 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5757 {
5758   gnat_type = Underlying_Type (gnat_type);
5759 
5760   *align_clause = Present (Alignment_Clause (gnat_type));
5761 
5762   if (Is_Array_Type (gnat_type))
5763     {
5764       gnat_type = Underlying_Type (Component_Type (gnat_type));
5765       if (Present (Alignment_Clause (gnat_type)))
5766 	*align_clause = true;
5767     }
5768 
5769   if (!Is_Scalar_Type (gnat_type))
5770     return false;
5771 
5772   if (UI_To_Int (Esize (gnat_type)) < 64)
5773     return false;
5774 
5775   return true;
5776 }
5777 
5778 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5779    component of an aggregate type.  */
5780 
5781 bool
type_for_nonaliased_component_p(tree gnu_type)5782 type_for_nonaliased_component_p (tree gnu_type)
5783 {
5784   /* If the type is passed by reference, we may have pointers to the
5785      component so it cannot be made non-aliased. */
5786   if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5787     return false;
5788 
5789   /* We used to say that any component of aggregate type is aliased
5790      because the front-end may take 'Reference of it.  The front-end
5791      has been enhanced in the meantime so as to use a renaming instead
5792      in most cases, but the back-end can probably take the address of
5793      such a component too so we go for the conservative stance.
5794 
5795      For instance, we might need the address of any array type, even
5796      if normally passed by copy, to construct a fat pointer if the
5797      component is used as an actual for an unconstrained formal.
5798 
5799      Likewise for record types: even if a specific record subtype is
5800      passed by copy, the parent type might be passed by ref (e.g. if
5801      it's of variable size) and we might take the address of a child
5802      component to pass to a parent formal.  We have no way to check
5803      for such conditions here.  */
5804   if (AGGREGATE_TYPE_P (gnu_type))
5805     return false;
5806 
5807   return true;
5808 }
5809 
5810 /* Return true if TYPE is a smaller form of ORIG_TYPE.  */
5811 
5812 bool
smaller_form_type_p(tree type,tree orig_type)5813 smaller_form_type_p (tree type, tree orig_type)
5814 {
5815   tree size, osize;
5816 
5817   /* We're not interested in variants here.  */
5818   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5819     return false;
5820 
5821   /* Like a variant, a packable version keeps the original TYPE_NAME.  */
5822   if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5823     return false;
5824 
5825   size = TYPE_SIZE (type);
5826   osize = TYPE_SIZE (orig_type);
5827 
5828   if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5829     return false;
5830 
5831   return tree_int_cst_lt (size, osize) != 0;
5832 }
5833 
5834 /* Return whether EXPR, which is the renamed object in an object renaming
5835    declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5836    This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration.  */
5837 
5838 bool
can_materialize_object_renaming_p(Node_Id expr)5839 can_materialize_object_renaming_p (Node_Id expr)
5840 {
5841   while (true)
5842     {
5843       expr = Original_Node (expr);
5844 
5845       switch (Nkind (expr))
5846 	{
5847 	case N_Identifier:
5848 	case N_Expanded_Name:
5849 	  if (!Present (Renamed_Object (Entity (expr))))
5850 	    return true;
5851 	  expr = Renamed_Object (Entity (expr));
5852 	  break;
5853 
5854 	case N_Selected_Component:
5855 	  {
5856 	    if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5857 	      return false;
5858 
5859 	    const Uint bitpos
5860 	      = Normalized_First_Bit (Entity (Selector_Name (expr)));
5861 	    if (!UI_Is_In_Int_Range (bitpos)
5862 		|| (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5863 	      return false;
5864 
5865 	    expr = Prefix (expr);
5866 	    break;
5867 	  }
5868 
5869 	case N_Indexed_Component:
5870 	case N_Slice:
5871 	  {
5872 	    const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5873 
5874 	    if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5875 	      return false;
5876 
5877 	    expr = Prefix (expr);
5878 	    break;
5879 	  }
5880 
5881 	case N_Explicit_Dereference:
5882 	  expr = Prefix (expr);
5883 	  break;
5884 
5885 	default:
5886 	  return true;
5887 	};
5888     }
5889 }
5890 
5891 /* Perform final processing on global declarations.  */
5892 
5893 static GTY (()) tree dummy_global;
5894 
5895 void
gnat_write_global_declarations(void)5896 gnat_write_global_declarations (void)
5897 {
5898   unsigned int i;
5899   tree iter;
5900 
5901   /* If we have declared types as used at the global level, insert them in
5902      the global hash table.  We use a dummy variable for this purpose, but
5903      we need to build it unconditionally to avoid -fcompare-debug issues.  */
5904   if (first_global_object_name)
5905     {
5906       struct varpool_node *node;
5907       char *label;
5908 
5909       ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, ULONG_MAX);
5910       dummy_global
5911 	= build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5912 		      void_type_node);
5913       DECL_HARD_REGISTER (dummy_global) = 1;
5914       TREE_STATIC (dummy_global) = 1;
5915       node = varpool_node::get_create (dummy_global);
5916       node->definition = 1;
5917       node->force_output = 1;
5918 
5919       if (types_used_by_cur_var_decl)
5920 	while (!types_used_by_cur_var_decl->is_empty ())
5921 	  {
5922 	    tree t = types_used_by_cur_var_decl->pop ();
5923 	    types_used_by_var_decl_insert (t, dummy_global);
5924 	  }
5925     }
5926 
5927   /* First output the integral global variables, so that they can be referenced
5928      as bounds by the global dynamic types.  Skip external variables, unless we
5929      really need to emit debug info for them:, e.g. imported variables.  */
5930   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5931     if (TREE_CODE (iter) == VAR_DECL
5932 	&& INTEGRAL_TYPE_P (TREE_TYPE (iter))
5933 	&& (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5934       rest_of_decl_compilation (iter, true, 0);
5935 
5936   /* Now output debug information for the global type declarations.  This
5937      ensures that global types whose compilation hasn't been finalized yet,
5938      for example pointers to Taft amendment types, have their compilation
5939      finalized in the right context.  */
5940   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5941     if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5942       debug_hooks->type_decl (iter, false);
5943 
5944   /* Then output the other global variables.  We need to do that after the
5945      information for global types is emitted so that they are finalized.  */
5946   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5947     if (TREE_CODE (iter) == VAR_DECL
5948 	&& !INTEGRAL_TYPE_P (TREE_TYPE (iter))
5949 	&& (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5950       rest_of_decl_compilation (iter, true, 0);
5951 
5952   /* Output debug information for the global constants.  */
5953   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5954     if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
5955       debug_hooks->early_global_decl (iter);
5956 
5957   /* Output it for the imported functions.  */
5958   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5959     if (TREE_CODE (iter) == FUNCTION_DECL
5960 	&& DECL_EXTERNAL (iter)
5961 	&& DECL_INITIAL (iter) == NULL
5962 	&& !DECL_IGNORED_P (iter)
5963 	&& DECL_FUNCTION_IS_DEF (iter))
5964       debug_hooks->early_global_decl (iter);
5965 
5966   /* Output it for the imported modules/declarations.  In GNAT, these are only
5967      materializing subprogram.  */
5968   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5969    if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5970      debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5971 					   DECL_CONTEXT (iter), false, false);
5972 }
5973 
5974 /* ************************************************************************
5975  * *                           GCC builtins support                       *
5976  * ************************************************************************ */
5977 
5978 /* The general scheme is fairly simple:
5979 
5980    For each builtin function/type to be declared, gnat_install_builtins calls
5981    internal facilities which eventually get to gnat_pushdecl, which in turn
5982    tracks the so declared builtin function decls in the 'builtin_decls' global
5983    datastructure. When an Intrinsic subprogram declaration is processed, we
5984    search this global datastructure to retrieve the associated BUILT_IN DECL
5985    node.  */
5986 
5987 /* Search the chain of currently available builtin declarations for a node
5988    corresponding to function NAME (an IDENTIFIER_NODE).  Return the first node
5989    found, if any, or NULL_TREE otherwise.  */
5990 tree
builtin_decl_for(tree name)5991 builtin_decl_for (tree name)
5992 {
5993   unsigned i;
5994   tree decl;
5995 
5996   FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5997     if (DECL_NAME (decl) == name)
5998       return decl;
5999 
6000   return NULL_TREE;
6001 }
6002 
6003 /* The code below eventually exposes gnat_install_builtins, which declares
6004    the builtin types and functions we might need, either internally or as
6005    user accessible facilities.
6006 
6007    ??? This is a first implementation shot, still in rough shape.  It is
6008    heavily inspired from the "C" family implementation, with chunks copied
6009    verbatim from there.
6010 
6011    Two obvious improvement candidates are:
6012    o Use a more efficient name/decl mapping scheme
6013    o Devise a middle-end infrastructure to avoid having to copy
6014      pieces between front-ends.  */
6015 
6016 /* ----------------------------------------------------------------------- *
6017  *                         BUILTIN ELEMENTARY TYPES                        *
6018  * ----------------------------------------------------------------------- */
6019 
6020 /* Standard data types to be used in builtin argument declarations.  */
6021 
6022 enum c_tree_index
6023 {
6024     CTI_SIGNED_SIZE_TYPE, /* For format checking only.  */
6025     CTI_STRING_TYPE,
6026     CTI_CONST_STRING_TYPE,
6027 
6028     CTI_MAX
6029 };
6030 
6031 static tree c_global_trees[CTI_MAX];
6032 
6033 #define signed_size_type_node	c_global_trees[CTI_SIGNED_SIZE_TYPE]
6034 #define string_type_node	c_global_trees[CTI_STRING_TYPE]
6035 #define const_string_type_node	c_global_trees[CTI_CONST_STRING_TYPE]
6036 
6037 /* ??? In addition some attribute handlers, we currently don't support a
6038    (small) number of builtin-types, which in turns inhibits support for a
6039    number of builtin functions.  */
6040 #define wint_type_node    void_type_node
6041 #define intmax_type_node  void_type_node
6042 #define uintmax_type_node void_type_node
6043 
6044 /* Used to help initialize the builtin-types.def table.  When a type of
6045    the correct size doesn't exist, use error_mark_node instead of NULL.
6046    The later results in segfaults even when a decl using the type doesn't
6047    get invoked.  */
6048 
6049 static tree
builtin_type_for_size(int size,bool unsignedp)6050 builtin_type_for_size (int size, bool unsignedp)
6051 {
6052   tree type = gnat_type_for_size (size, unsignedp);
6053   return type ? type : error_mark_node;
6054 }
6055 
6056 /* Build/push the elementary type decls that builtin functions/types
6057    will need.  */
6058 
6059 static void
install_builtin_elementary_types(void)6060 install_builtin_elementary_types (void)
6061 {
6062   signed_size_type_node = gnat_signed_type_for (size_type_node);
6063   pid_type_node = integer_type_node;
6064 
6065   string_type_node = build_pointer_type (char_type_node);
6066   const_string_type_node
6067     = build_pointer_type (build_qualified_type
6068 			  (char_type_node, TYPE_QUAL_CONST));
6069 }
6070 
6071 /* ----------------------------------------------------------------------- *
6072  *                          BUILTIN FUNCTION TYPES                         *
6073  * ----------------------------------------------------------------------- */
6074 
6075 /* Now, builtin function types per se.  */
6076 
6077 enum c_builtin_type
6078 {
6079 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
6080 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
6081 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
6082 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
6083 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6084 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6085 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
6086 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6087 			    ARG6) NAME,
6088 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6089 			    ARG6, ARG7) NAME,
6090 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6091 			    ARG6, ARG7, ARG8) NAME,
6092 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6093 			    ARG6, ARG7, ARG8, ARG9) NAME,
6094 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6095 			     ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
6096 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6097 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
6098 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
6099 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
6100 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
6101 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
6102 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
6103 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6104 				NAME,
6105 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6106 				ARG6) NAME,
6107 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6108 				ARG6, ARG7) NAME,
6109 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
6110 #include "builtin-types.def"
6111 #include "ada-builtin-types.def"
6112 #undef DEF_PRIMITIVE_TYPE
6113 #undef DEF_FUNCTION_TYPE_0
6114 #undef DEF_FUNCTION_TYPE_1
6115 #undef DEF_FUNCTION_TYPE_2
6116 #undef DEF_FUNCTION_TYPE_3
6117 #undef DEF_FUNCTION_TYPE_4
6118 #undef DEF_FUNCTION_TYPE_5
6119 #undef DEF_FUNCTION_TYPE_6
6120 #undef DEF_FUNCTION_TYPE_7
6121 #undef DEF_FUNCTION_TYPE_8
6122 #undef DEF_FUNCTION_TYPE_9
6123 #undef DEF_FUNCTION_TYPE_10
6124 #undef DEF_FUNCTION_TYPE_11
6125 #undef DEF_FUNCTION_TYPE_VAR_0
6126 #undef DEF_FUNCTION_TYPE_VAR_1
6127 #undef DEF_FUNCTION_TYPE_VAR_2
6128 #undef DEF_FUNCTION_TYPE_VAR_3
6129 #undef DEF_FUNCTION_TYPE_VAR_4
6130 #undef DEF_FUNCTION_TYPE_VAR_5
6131 #undef DEF_FUNCTION_TYPE_VAR_6
6132 #undef DEF_FUNCTION_TYPE_VAR_7
6133 #undef DEF_POINTER_TYPE
6134   BT_LAST
6135 };
6136 
6137 typedef enum c_builtin_type builtin_type;
6138 
6139 /* A temporary array used in communication with def_fn_type.  */
6140 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
6141 
6142 /* A helper function for install_builtin_types.  Build function type
6143    for DEF with return type RET and N arguments.  If VAR is true, then the
6144    function should be variadic after those N arguments.
6145 
6146    Takes special care not to ICE if any of the types involved are
6147    error_mark_node, which indicates that said type is not in fact available
6148    (see builtin_type_for_size).  In which case the function type as a whole
6149    should be error_mark_node.  */
6150 
6151 static void
def_fn_type(builtin_type def,builtin_type ret,bool var,int n,...)6152 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
6153 {
6154   tree t;
6155   tree *args = XALLOCAVEC (tree, n);
6156   va_list list;
6157   int i;
6158 
6159   va_start (list, n);
6160   for (i = 0; i < n; ++i)
6161     {
6162       builtin_type a = (builtin_type) va_arg (list, int);
6163       t = builtin_types[a];
6164       if (t == error_mark_node)
6165 	goto egress;
6166       args[i] = t;
6167     }
6168 
6169   t = builtin_types[ret];
6170   if (t == error_mark_node)
6171     goto egress;
6172   if (var)
6173     t = build_varargs_function_type_array (t, n, args);
6174   else
6175     t = build_function_type_array (t, n, args);
6176 
6177  egress:
6178   builtin_types[def] = t;
6179   va_end (list);
6180 }
6181 
6182 /* Build the builtin function types and install them in the builtin_types
6183    array for later use in builtin function decls.  */
6184 
6185 static void
install_builtin_function_types(void)6186 install_builtin_function_types (void)
6187 {
6188   tree va_list_ref_type_node;
6189   tree va_list_arg_type_node;
6190 
6191   if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6192     {
6193       va_list_arg_type_node = va_list_ref_type_node =
6194 	build_pointer_type (TREE_TYPE (va_list_type_node));
6195     }
6196   else
6197     {
6198       va_list_arg_type_node = va_list_type_node;
6199       va_list_ref_type_node = build_reference_type (va_list_type_node);
6200     }
6201 
6202 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6203   builtin_types[ENUM] = VALUE;
6204 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6205   def_fn_type (ENUM, RETURN, 0, 0);
6206 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6207   def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6208 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6209   def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6210 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6211   def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6212 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6213   def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6214 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5)	\
6215   def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6216 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6217 			    ARG6)					\
6218   def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6219 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6220 			    ARG6, ARG7)					\
6221   def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6222 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6223 			    ARG6, ARG7, ARG8)				\
6224   def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6225 	       ARG7, ARG8);
6226 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6227 			    ARG6, ARG7, ARG8, ARG9)			\
6228   def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6229 	       ARG7, ARG8, ARG9);
6230 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6231 			     ARG6, ARG7, ARG8, ARG9, ARG10)		\
6232   def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6233 	       ARG7, ARG8, ARG9, ARG10);
6234 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6235 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11)	\
6236   def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6237 	       ARG7, ARG8, ARG9, ARG10, ARG11);
6238 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6239   def_fn_type (ENUM, RETURN, 1, 0);
6240 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6241   def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6242 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6243   def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6244 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6245   def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6246 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6247   def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6248 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6249   def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6250 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6251 				ARG6)				\
6252   def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6253 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6254 				ARG6, ARG7)				\
6255   def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6256 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6257   builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6258 
6259 #include "builtin-types.def"
6260 #include "ada-builtin-types.def"
6261 
6262 #undef DEF_PRIMITIVE_TYPE
6263 #undef DEF_FUNCTION_TYPE_0
6264 #undef DEF_FUNCTION_TYPE_1
6265 #undef DEF_FUNCTION_TYPE_2
6266 #undef DEF_FUNCTION_TYPE_3
6267 #undef DEF_FUNCTION_TYPE_4
6268 #undef DEF_FUNCTION_TYPE_5
6269 #undef DEF_FUNCTION_TYPE_6
6270 #undef DEF_FUNCTION_TYPE_7
6271 #undef DEF_FUNCTION_TYPE_8
6272 #undef DEF_FUNCTION_TYPE_9
6273 #undef DEF_FUNCTION_TYPE_10
6274 #undef DEF_FUNCTION_TYPE_11
6275 #undef DEF_FUNCTION_TYPE_VAR_0
6276 #undef DEF_FUNCTION_TYPE_VAR_1
6277 #undef DEF_FUNCTION_TYPE_VAR_2
6278 #undef DEF_FUNCTION_TYPE_VAR_3
6279 #undef DEF_FUNCTION_TYPE_VAR_4
6280 #undef DEF_FUNCTION_TYPE_VAR_5
6281 #undef DEF_FUNCTION_TYPE_VAR_6
6282 #undef DEF_FUNCTION_TYPE_VAR_7
6283 #undef DEF_POINTER_TYPE
6284   builtin_types[(int) BT_LAST] = NULL_TREE;
6285 }
6286 
6287 /* ----------------------------------------------------------------------- *
6288  *                            BUILTIN ATTRIBUTES                           *
6289  * ----------------------------------------------------------------------- */
6290 
6291 enum built_in_attribute
6292 {
6293 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6294 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6295 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6296 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6297 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6298 #include "builtin-attrs.def"
6299 #undef DEF_ATTR_NULL_TREE
6300 #undef DEF_ATTR_INT
6301 #undef DEF_ATTR_STRING
6302 #undef DEF_ATTR_IDENT
6303 #undef DEF_ATTR_TREE_LIST
6304   ATTR_LAST
6305 };
6306 
6307 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6308 
6309 static void
install_builtin_attributes(void)6310 install_builtin_attributes (void)
6311 {
6312   /* Fill in the built_in_attributes array.  */
6313 #define DEF_ATTR_NULL_TREE(ENUM)				\
6314   built_in_attributes[(int) ENUM] = NULL_TREE;
6315 #define DEF_ATTR_INT(ENUM, VALUE)				\
6316   built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6317 #define DEF_ATTR_STRING(ENUM, VALUE)				\
6318   built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6319 #define DEF_ATTR_IDENT(ENUM, STRING)				\
6320   built_in_attributes[(int) ENUM] = get_identifier (STRING);
6321 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN)	\
6322   built_in_attributes[(int) ENUM]			\
6323     = tree_cons (built_in_attributes[(int) PURPOSE],	\
6324 		 built_in_attributes[(int) VALUE],	\
6325 		 built_in_attributes[(int) CHAIN]);
6326 #include "builtin-attrs.def"
6327 #undef DEF_ATTR_NULL_TREE
6328 #undef DEF_ATTR_INT
6329 #undef DEF_ATTR_STRING
6330 #undef DEF_ATTR_IDENT
6331 #undef DEF_ATTR_TREE_LIST
6332 }
6333 
6334 /* Handle a "const" attribute; arguments as in
6335    struct attribute_spec.handler.  */
6336 
6337 static tree
handle_const_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6338 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6339 			tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6340 			bool *no_add_attrs)
6341 {
6342   if (TREE_CODE (*node) == FUNCTION_DECL)
6343     TREE_READONLY (*node) = 1;
6344   else
6345     *no_add_attrs = true;
6346 
6347   return NULL_TREE;
6348 }
6349 
6350 /* Handle a "nothrow" attribute; arguments as in
6351    struct attribute_spec.handler.  */
6352 
6353 static tree
handle_nothrow_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6354 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6355 			  tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6356 			  bool *no_add_attrs)
6357 {
6358   if (TREE_CODE (*node) == FUNCTION_DECL)
6359     TREE_NOTHROW (*node) = 1;
6360   else
6361     *no_add_attrs = true;
6362 
6363   return NULL_TREE;
6364 }
6365 
6366 /* Handle a "pure" attribute; arguments as in
6367    struct attribute_spec.handler.  */
6368 
6369 static tree
handle_pure_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6370 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6371 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6372 {
6373   if (TREE_CODE (*node) == FUNCTION_DECL)
6374     DECL_PURE_P (*node) = 1;
6375   /* TODO: support types.  */
6376   else
6377     {
6378       warning (OPT_Wattributes, "%qs attribute ignored",
6379 	       IDENTIFIER_POINTER (name));
6380       *no_add_attrs = true;
6381     }
6382 
6383   return NULL_TREE;
6384 }
6385 
6386 /* Handle a "no vops" attribute; arguments as in
6387    struct attribute_spec.handler.  */
6388 
6389 static tree
handle_novops_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6390 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6391 			 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6392 			 bool *ARG_UNUSED (no_add_attrs))
6393 {
6394   gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6395   DECL_IS_NOVOPS (*node) = 1;
6396   return NULL_TREE;
6397 }
6398 
6399 /* Helper for nonnull attribute handling; fetch the operand number
6400    from the attribute argument list.  */
6401 
6402 static bool
get_nonnull_operand(tree arg_num_expr,unsigned HOST_WIDE_INT * valp)6403 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6404 {
6405   /* Verify the arg number is a constant.  */
6406   if (!tree_fits_uhwi_p (arg_num_expr))
6407     return false;
6408 
6409   *valp = TREE_INT_CST_LOW (arg_num_expr);
6410   return true;
6411 }
6412 
6413 /* Handle the "nonnull" attribute.  */
6414 static tree
handle_nonnull_attribute(tree * node,tree ARG_UNUSED (name),tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6415 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6416 			  tree args, int ARG_UNUSED (flags),
6417 			  bool *no_add_attrs)
6418 {
6419   tree type = *node;
6420   unsigned HOST_WIDE_INT attr_arg_num;
6421 
6422   /* If no arguments are specified, all pointer arguments should be
6423      non-null.  Verify a full prototype is given so that the arguments
6424      will have the correct types when we actually check them later.
6425      Avoid diagnosing type-generic built-ins since those have no
6426      prototype.  */
6427   if (!args)
6428     {
6429       if (!prototype_p (type)
6430 	  && (!TYPE_ATTRIBUTES (type)
6431 	      || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6432 	{
6433 	  error ("%qs attribute without arguments on a non-prototype",
6434 		 "nonnull");
6435 	  *no_add_attrs = true;
6436 	}
6437       return NULL_TREE;
6438     }
6439 
6440   /* Argument list specified.  Verify that each argument number references
6441      a pointer argument.  */
6442   for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6443     {
6444       unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6445 
6446       if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6447 	{
6448 	  error ("%qs argument has invalid operand number (argument %lu)",
6449 		 "nonnull", (unsigned long) attr_arg_num);
6450 	  *no_add_attrs = true;
6451 	  return NULL_TREE;
6452 	}
6453 
6454       if (prototype_p (type))
6455 	{
6456 	  function_args_iterator iter;
6457 	  tree argument;
6458 
6459 	  function_args_iter_init (&iter, type);
6460 	  for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6461 	    {
6462 	      argument = function_args_iter_cond (&iter);
6463 	      if (!argument || ck_num == arg_num)
6464 		break;
6465 	    }
6466 
6467 	  if (!argument
6468 	      || TREE_CODE (argument) == VOID_TYPE)
6469 	    {
6470 	      error ("%qs argument with out-of-range operand number "
6471 		     "(argument %lu, operand %lu)", "nonnull",
6472 		     (unsigned long) attr_arg_num, (unsigned long) arg_num);
6473 	      *no_add_attrs = true;
6474 	      return NULL_TREE;
6475 	    }
6476 
6477 	  if (TREE_CODE (argument) != POINTER_TYPE)
6478 	    {
6479 	      error ("%qs argument references non-pointer operand "
6480 		     "(argument %lu, operand %lu)", "nonnull",
6481 		   (unsigned long) attr_arg_num, (unsigned long) arg_num);
6482 	      *no_add_attrs = true;
6483 	      return NULL_TREE;
6484 	    }
6485 	}
6486     }
6487 
6488   return NULL_TREE;
6489 }
6490 
6491 /* Handle a "sentinel" attribute.  */
6492 
6493 static tree
handle_sentinel_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6494 handle_sentinel_attribute (tree *node, tree name, tree args,
6495 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6496 {
6497   if (!prototype_p (*node))
6498     {
6499       warning (OPT_Wattributes,
6500 	       "%qs attribute requires prototypes with named arguments",
6501 	       IDENTIFIER_POINTER (name));
6502       *no_add_attrs = true;
6503     }
6504   else
6505     {
6506       if (!stdarg_p (*node))
6507         {
6508 	  warning (OPT_Wattributes,
6509 		   "%qs attribute only applies to variadic functions",
6510 		   IDENTIFIER_POINTER (name));
6511 	  *no_add_attrs = true;
6512 	}
6513     }
6514 
6515   if (args)
6516     {
6517       tree position = TREE_VALUE (args);
6518 
6519       if (TREE_CODE (position) != INTEGER_CST)
6520         {
6521 	  warning (0, "requested position is not an integer constant");
6522 	  *no_add_attrs = true;
6523 	}
6524       else
6525         {
6526 	  if (tree_int_cst_lt (position, integer_zero_node))
6527 	    {
6528 	      warning (0, "requested position is less than zero");
6529 	      *no_add_attrs = true;
6530 	    }
6531 	}
6532     }
6533 
6534   return NULL_TREE;
6535 }
6536 
6537 /* Handle a "noreturn" attribute; arguments as in
6538    struct attribute_spec.handler.  */
6539 
6540 static tree
handle_noreturn_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6541 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6542 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6543 {
6544   tree type = TREE_TYPE (*node);
6545 
6546   /* See FIXME comment in c_common_attribute_table.  */
6547   if (TREE_CODE (*node) == FUNCTION_DECL)
6548     TREE_THIS_VOLATILE (*node) = 1;
6549   else if (TREE_CODE (type) == POINTER_TYPE
6550 	   && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6551     TREE_TYPE (*node)
6552       = build_pointer_type
6553 	(change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6554   else
6555     {
6556       warning (OPT_Wattributes, "%qs attribute ignored",
6557 	       IDENTIFIER_POINTER (name));
6558       *no_add_attrs = true;
6559     }
6560 
6561   return NULL_TREE;
6562 }
6563 
6564 /* Handle a "stack_protect" attribute; arguments as in
6565    struct attribute_spec.handler.  */
6566 
6567 static tree
handle_stack_protect_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6568 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6569 				bool *no_add_attrs)
6570 {
6571   if (TREE_CODE (*node) != FUNCTION_DECL)
6572     {
6573       warning (OPT_Wattributes, "%qE attribute ignored", name);
6574       *no_add_attrs = true;
6575     }
6576 
6577   return NULL_TREE;
6578 }
6579 
6580 /* Handle a "no_stack_protector" attribute; arguments as in
6581    struct attribute_spec.handler.  */
6582 
6583 static tree
handle_no_stack_protector_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6584 handle_no_stack_protector_attribute (tree *node, tree name, tree, int,
6585 				   bool *no_add_attrs)
6586 {
6587   if (TREE_CODE (*node) != FUNCTION_DECL)
6588     {
6589       warning (OPT_Wattributes, "%qE attribute ignored", name);
6590       *no_add_attrs = true;
6591     }
6592 
6593   return NULL_TREE;
6594 }
6595 
6596 
6597 /* Handle a "noinline" attribute; arguments as in
6598    struct attribute_spec.handler.  */
6599 
6600 static tree
handle_noinline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6601 handle_noinline_attribute (tree *node, tree name,
6602 			   tree ARG_UNUSED (args),
6603 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6604 {
6605   if (TREE_CODE (*node) == FUNCTION_DECL)
6606     {
6607       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6608 	{
6609 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6610 		   "with attribute %qs", name, "always_inline");
6611 	  *no_add_attrs = true;
6612 	}
6613       else
6614 	DECL_UNINLINABLE (*node) = 1;
6615     }
6616   else
6617     {
6618       warning (OPT_Wattributes, "%qE attribute ignored", name);
6619       *no_add_attrs = true;
6620     }
6621 
6622   return NULL_TREE;
6623 }
6624 
6625 /* Handle a "noclone" attribute; arguments as in
6626    struct attribute_spec.handler.  */
6627 
6628 static tree
handle_noclone_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6629 handle_noclone_attribute (tree *node, tree name,
6630 			  tree ARG_UNUSED (args),
6631 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6632 {
6633   if (TREE_CODE (*node) != FUNCTION_DECL)
6634     {
6635       warning (OPT_Wattributes, "%qE attribute ignored", name);
6636       *no_add_attrs = true;
6637     }
6638 
6639   return NULL_TREE;
6640 }
6641 
6642 /* Handle a "no_icf" attribute; arguments as in
6643    struct attribute_spec.handler.  */
6644 
6645 static tree
handle_noicf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6646 handle_noicf_attribute (tree *node, tree name,
6647 			tree ARG_UNUSED (args),
6648 			int ARG_UNUSED (flags), bool *no_add_attrs)
6649 {
6650   if (TREE_CODE (*node) != FUNCTION_DECL)
6651     {
6652       warning (OPT_Wattributes, "%qE attribute ignored", name);
6653       *no_add_attrs = true;
6654     }
6655 
6656   return NULL_TREE;
6657 }
6658 
6659 /* Handle a "noipa" attribute; arguments as in
6660    struct attribute_spec.handler.  */
6661 
6662 static tree
handle_noipa_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6663 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6664 {
6665   if (TREE_CODE (*node) != FUNCTION_DECL)
6666     {
6667       warning (OPT_Wattributes, "%qE attribute ignored", name);
6668       *no_add_attrs = true;
6669     }
6670 
6671   return NULL_TREE;
6672 }
6673 
6674 /* Handle a "leaf" attribute; arguments as in
6675    struct attribute_spec.handler.  */
6676 
6677 static tree
handle_leaf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6678 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6679 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6680 {
6681   if (TREE_CODE (*node) != FUNCTION_DECL)
6682     {
6683       warning (OPT_Wattributes, "%qE attribute ignored", name);
6684       *no_add_attrs = true;
6685     }
6686   if (!TREE_PUBLIC (*node))
6687     {
6688       warning (OPT_Wattributes, "%qE attribute has no effect", name);
6689       *no_add_attrs = true;
6690     }
6691 
6692   return NULL_TREE;
6693 }
6694 
6695 /* Handle a "always_inline" attribute; arguments as in
6696    struct attribute_spec.handler.  */
6697 
6698 static tree
handle_always_inline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6699 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6700 				int ARG_UNUSED (flags), bool *no_add_attrs)
6701 {
6702   if (TREE_CODE (*node) == FUNCTION_DECL)
6703     {
6704       /* Set the attribute and mark it for disregarding inline limits.  */
6705       DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6706     }
6707   else
6708     {
6709       warning (OPT_Wattributes, "%qE attribute ignored", name);
6710       *no_add_attrs = true;
6711     }
6712 
6713   return NULL_TREE;
6714 }
6715 
6716 /* Handle a "malloc" attribute; arguments as in
6717    struct attribute_spec.handler.  */
6718 
6719 static tree
handle_malloc_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6720 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6721 			 int ARG_UNUSED (flags), bool *no_add_attrs)
6722 {
6723   if (TREE_CODE (*node) == FUNCTION_DECL
6724       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6725     DECL_IS_MALLOC (*node) = 1;
6726   else
6727     {
6728       warning (OPT_Wattributes, "%qs attribute ignored",
6729 	       IDENTIFIER_POINTER (name));
6730       *no_add_attrs = true;
6731     }
6732 
6733   return NULL_TREE;
6734 }
6735 
6736 /* Fake handler for attributes we don't properly support.  */
6737 
6738 tree
fake_attribute_handler(tree * ARG_UNUSED (node),tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6739 fake_attribute_handler (tree * ARG_UNUSED (node),
6740 			tree ARG_UNUSED (name),
6741 			tree ARG_UNUSED (args),
6742 			int  ARG_UNUSED (flags),
6743 			bool * ARG_UNUSED (no_add_attrs))
6744 {
6745   return NULL_TREE;
6746 }
6747 
6748 /* Handle a "type_generic" attribute.  */
6749 
6750 static tree
handle_type_generic_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6751 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6752 			       tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6753 			       bool * ARG_UNUSED (no_add_attrs))
6754 {
6755   /* Ensure we have a function type.  */
6756   gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6757 
6758   /* Ensure we have a variadic function.  */
6759   gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6760 
6761   return NULL_TREE;
6762 }
6763 
6764 /* Handle a "flatten" attribute; arguments as in
6765    struct attribute_spec.handler.  */
6766 
6767 static tree
handle_flatten_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)6768 handle_flatten_attribute (tree *node, tree name,
6769 			  tree args ATTRIBUTE_UNUSED,
6770 			  int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6771 {
6772   if (TREE_CODE (*node) == FUNCTION_DECL)
6773     /* Do nothing else, just set the attribute.  We'll get at
6774        it later with lookup_attribute.  */
6775     ;
6776   else
6777     {
6778       warning (OPT_Wattributes, "%qE attribute ignored", name);
6779       *no_add_attrs = true;
6780     }
6781 
6782   return NULL_TREE;
6783 }
6784 
6785 /* Handle a "used" attribute; arguments as in
6786    struct attribute_spec.handler.  */
6787 
6788 static tree
handle_used_attribute(tree * pnode,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6789 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6790 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6791 {
6792   tree node = *pnode;
6793 
6794   if (TREE_CODE (node) == FUNCTION_DECL
6795       || (VAR_P (node) && TREE_STATIC (node))
6796       || (TREE_CODE (node) == TYPE_DECL))
6797     {
6798       TREE_USED (node) = 1;
6799       DECL_PRESERVE_P (node) = 1;
6800       if (VAR_P (node))
6801 	DECL_READ_P (node) = 1;
6802     }
6803   else
6804     {
6805       warning (OPT_Wattributes, "%qE attribute ignored", name);
6806       *no_add_attrs = true;
6807     }
6808 
6809   return NULL_TREE;
6810 }
6811 
6812 /* Handle a "cold" and attribute; arguments as in
6813    struct attribute_spec.handler.  */
6814 
6815 static tree
handle_cold_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6816 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6817 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6818 {
6819   if (TREE_CODE (*node) == FUNCTION_DECL
6820       || TREE_CODE (*node) == LABEL_DECL)
6821     {
6822       /* Attribute cold processing is done later with lookup_attribute.  */
6823     }
6824   else
6825     {
6826       warning (OPT_Wattributes, "%qE attribute ignored", name);
6827       *no_add_attrs = true;
6828     }
6829 
6830   return NULL_TREE;
6831 }
6832 
6833 /* Handle a "hot" and attribute; arguments as in
6834    struct attribute_spec.handler.  */
6835 
6836 static tree
handle_hot_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6837 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6838 		      int ARG_UNUSED (flags), bool *no_add_attrs)
6839 {
6840   if (TREE_CODE (*node) == FUNCTION_DECL
6841       || TREE_CODE (*node) == LABEL_DECL)
6842     {
6843       /* Attribute hot processing is done later with lookup_attribute.  */
6844     }
6845   else
6846     {
6847       warning (OPT_Wattributes, "%qE attribute ignored", name);
6848       *no_add_attrs = true;
6849     }
6850 
6851   return NULL_TREE;
6852 }
6853 
6854 /* Handle a "target" attribute.  */
6855 
6856 static tree
handle_target_attribute(tree * node,tree name,tree args,int flags,bool * no_add_attrs)6857 handle_target_attribute (tree *node, tree name, tree args, int flags,
6858 			 bool *no_add_attrs)
6859 {
6860   /* Ensure we have a function type.  */
6861   if (TREE_CODE (*node) != FUNCTION_DECL)
6862     {
6863       warning (OPT_Wattributes, "%qE attribute ignored", name);
6864       *no_add_attrs = true;
6865     }
6866   else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
6867     {
6868       warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6869 		   "with %qs attribute", name, "target_clones");
6870       *no_add_attrs = true;
6871     }
6872   else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
6873     *no_add_attrs = true;
6874 
6875   /* Check that there's no empty string in values of the attribute.  */
6876   for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
6877     {
6878       tree value = TREE_VALUE (t);
6879       if (TREE_CODE (value) == STRING_CST
6880 	  && TREE_STRING_LENGTH (value) == 1
6881 	  && TREE_STRING_POINTER (value)[0] == '\0')
6882 	{
6883 	  warning (OPT_Wattributes, "empty string in attribute %<target%>");
6884 	  *no_add_attrs = true;
6885 	}
6886     }
6887 
6888   return NULL_TREE;
6889 }
6890 
6891 /* Handle a "target_clones" attribute.  */
6892 
6893 static tree
handle_target_clones_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6894 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6895 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6896 {
6897   /* Ensure we have a function type.  */
6898   if (TREE_CODE (*node) == FUNCTION_DECL)
6899     {
6900       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6901 	{
6902 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6903 		   "with %qs attribute", name, "always_inline");
6904 	  *no_add_attrs = true;
6905 	}
6906       else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
6907 	{
6908 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6909 		   "with %qs attribute", name, "target");
6910 	  *no_add_attrs = true;
6911 	}
6912       else
6913 	/* Do not inline functions with multiple clone targets.  */
6914 	DECL_UNINLINABLE (*node) = 1;
6915     }
6916   else
6917     {
6918       warning (OPT_Wattributes, "%qE attribute ignored", name);
6919       *no_add_attrs = true;
6920     }
6921   return NULL_TREE;
6922 }
6923 
6924 /* Handle a "vector_size" attribute; arguments as in
6925    struct attribute_spec.handler.  */
6926 
6927 static tree
handle_vector_size_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6928 handle_vector_size_attribute (tree *node, tree name, tree args,
6929 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6930 {
6931   tree type = *node;
6932   tree vector_type;
6933 
6934   *no_add_attrs = true;
6935 
6936   /* We need to provide for vector pointers, vector arrays, and
6937      functions returning vectors.  For example:
6938 
6939        __attribute__((vector_size(16))) short *foo;
6940 
6941      In this case, the mode is SI, but the type being modified is
6942      HI, so we need to look further.  */
6943   while (POINTER_TYPE_P (type)
6944 	 || TREE_CODE (type) == FUNCTION_TYPE
6945 	 || TREE_CODE (type) == ARRAY_TYPE)
6946     type = TREE_TYPE (type);
6947 
6948   vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6949   if (!vector_type)
6950     return NULL_TREE;
6951 
6952   /* Build back pointers if needed.  */
6953   *node = reconstruct_complex_type (*node, vector_type);
6954 
6955   return NULL_TREE;
6956 }
6957 
6958 /* Handle a "vector_type" attribute; arguments as in
6959    struct attribute_spec.handler.  */
6960 
6961 static tree
handle_vector_type_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6962 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6963 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6964 {
6965   tree type = *node;
6966   tree vector_type;
6967 
6968   *no_add_attrs = true;
6969 
6970   if (TREE_CODE (type) != ARRAY_TYPE)
6971     {
6972       error ("attribute %qs applies to array types only",
6973 	     IDENTIFIER_POINTER (name));
6974       return NULL_TREE;
6975     }
6976 
6977   vector_type = build_vector_type_for_array (type, name);
6978   if (!vector_type)
6979     return NULL_TREE;
6980 
6981   TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6982   *node = vector_type;
6983 
6984   return NULL_TREE;
6985 }
6986 
6987 /* ----------------------------------------------------------------------- *
6988  *                              BUILTIN FUNCTIONS                          *
6989  * ----------------------------------------------------------------------- */
6990 
6991 /* Worker for DEF_BUILTIN.  Possibly define a builtin function with one or two
6992    names.  Does not declare a non-__builtin_ function if flag_no_builtin, or
6993    if nonansi_p and flag_no_nonansi_builtin.  */
6994 
6995 static void
def_builtin_1(enum built_in_function fncode,const char * name,enum built_in_class fnclass,tree fntype,tree libtype,bool both_p,bool fallback_p,bool nonansi_p ATTRIBUTE_UNUSED,tree fnattrs,bool implicit_p)6996 def_builtin_1 (enum built_in_function fncode,
6997 	       const char *name,
6998 	       enum built_in_class fnclass,
6999 	       tree fntype, tree libtype,
7000 	       bool both_p, bool fallback_p,
7001 	       bool nonansi_p ATTRIBUTE_UNUSED,
7002 	       tree fnattrs, bool implicit_p)
7003 {
7004   tree decl;
7005   const char *libname;
7006 
7007   /* Preserve an already installed decl.  It most likely was setup in advance
7008      (e.g. as part of the internal builtins) for specific reasons.  */
7009   if (builtin_decl_explicit (fncode))
7010     return;
7011 
7012   if (fntype == error_mark_node)
7013     return;
7014 
7015   gcc_assert ((!both_p && !fallback_p)
7016 	      || !strncmp (name, "__builtin_",
7017 			   strlen ("__builtin_")));
7018 
7019   libname = name + strlen ("__builtin_");
7020   decl = add_builtin_function (name, fntype, fncode, fnclass,
7021 			       (fallback_p ? libname : NULL),
7022 			       fnattrs);
7023   if (both_p)
7024     /* ??? This is normally further controlled by command-line options
7025        like -fno-builtin, but we don't have them for Ada.  */
7026     add_builtin_function (libname, libtype, fncode, fnclass,
7027 			  NULL, fnattrs);
7028 
7029   set_builtin_decl (fncode, decl, implicit_p);
7030 }
7031 
7032 static int flag_isoc94 = 0;
7033 static int flag_isoc99 = 0;
7034 static int flag_isoc11 = 0;
7035 static int flag_isoc2x = 0;
7036 
7037 /* Install what the common builtins.def offers plus our local additions.
7038 
7039    Note that ada-builtins.def is included first so that locally redefined
7040    built-in functions take precedence over the commonly defined ones.  */
7041 
7042 static void
install_builtin_functions(void)7043 install_builtin_functions (void)
7044 {
7045 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
7046 		    NONANSI_P, ATTRS, IMPLICIT, COND)			\
7047   if (NAME && COND)							\
7048     def_builtin_1 (ENUM, NAME, CLASS,                                   \
7049                    builtin_types[(int) TYPE],                           \
7050                    builtin_types[(int) LIBTYPE],                        \
7051                    BOTH_P, FALLBACK_P, NONANSI_P,                       \
7052                    built_in_attributes[(int) ATTRS], IMPLICIT);
7053 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS)		\
7054   DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
7055 	       false, false, false, ATTRS, true, true)
7056 #include "ada-builtins.def"
7057 #include "builtins.def"
7058 }
7059 
7060 /* ----------------------------------------------------------------------- *
7061  *                              BUILTIN FUNCTIONS                          *
7062  * ----------------------------------------------------------------------- */
7063 
7064 /* Install the builtin functions we might need.  */
7065 
7066 void
gnat_install_builtins(void)7067 gnat_install_builtins (void)
7068 {
7069   install_builtin_elementary_types ();
7070   install_builtin_function_types ();
7071   install_builtin_attributes ();
7072 
7073   /* Install builtins used by generic middle-end pieces first.  Some of these
7074      know about internal specificities and control attributes accordingly, for
7075      instance __builtin_alloca vs no-throw and -fstack-check.  We will ignore
7076      the generic definition from builtins.def.  */
7077   build_common_builtin_nodes ();
7078 
7079   /* Now, install the target specific builtins, such as the AltiVec family on
7080      ppc, and the common set as exposed by builtins.def.  */
7081   targetm.init_builtins ();
7082   install_builtin_functions ();
7083 }
7084 
7085 #include "gt-ada-utils.h"
7086 #include "gtype-ada.h"
7087