1 /****************************************************************************
2  *                                                                          *
3  *                         GNAT COMPILER COMPONENTS                         *
4  *                                                                          *
5  *                                U T I L S                                 *
6  *                                                                          *
7  *                          C Implementation File                           *
8  *                                                                          *
9  *          Copyright (C) 1992-2019, Free Software Foundation, Inc.         *
10  *                                                                          *
11  * GNAT is free software;  you can  redistribute it  and/or modify it under *
12  * terms of the  GNU General Public License as published  by the Free Soft- *
13  * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14  * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15  * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16  * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17  * for  more details.  You should have received a copy of the GNU General   *
18  * Public License along with GCC; see the file COPYING3.  If not see        *
19  * <http://www.gnu.org/licenses/>.                                          *
20  *                                                                          *
21  * GNAT was originally developed  by the GNAT team at  New York University. *
22  * Extensive contributions were provided by Ada Core Technologies Inc.      *
23  *                                                                          *
24  ****************************************************************************/
25 
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48 
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59 
60 /* If nonzero, pretend we are allocating at global level.  */
61 int force_global;
62 
63 /* The default alignment of "double" floating-point types, i.e. floating
64    point types whose size is equal to 64 bits, or 0 if this alignment is
65    not specifically capped.  */
66 int double_float_alignment;
67 
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69    types whose size is greater or equal to 64 bits, or 0 if this alignment
70    is not specifically capped.  */
71 int double_scalar_alignment;
72 
73 /* True if floating-point arithmetics may use wider intermediate results.  */
74 bool fp_arith_may_widen = true;
75 
76 /* Tree nodes for the various types and decls we create.  */
77 tree gnat_std_decls[(int) ADT_LAST];
78 
79 /* Functions to call for each of the possible raise reasons.  */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81 
82 /* Likewise, but with extra info for each of the possible raise reasons.  */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84 
85 /* Forward declarations for handlers of attributes.  */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
110 
111 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
112 {
113   { "cold", true,  true,  true  },
114   { "hot" , true,  true,  true  },
115   { NULL  , false, false, false }
116 };
117 
118 /* Fake handler for attributes we don't properly support, typically because
119    they'd require dragging a lot of the common-c front-end circuitry.  */
120 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121 
122 /* Table of machine-independent internal attributes for Ada.  We support
123    this minimal set of attributes to accommodate the needs of builtins.  */
124 const struct attribute_spec gnat_internal_attribute_table[] =
125 {
126   /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
127        affects_type_identity, handler, exclude } */
128   { "const",        0, 0,  true,  false, false, false,
129     handle_const_attribute, NULL },
130   { "nothrow",      0, 0,  true,  false, false, false,
131     handle_nothrow_attribute, NULL },
132   { "pure",         0, 0,  true,  false, false, false,
133     handle_pure_attribute, NULL },
134   { "no vops",      0, 0,  true,  false, false, false,
135     handle_novops_attribute, NULL },
136   { "nonnull",      0, -1, false, true,  true,  false,
137     handle_nonnull_attribute, NULL },
138   { "sentinel",     0, 1,  false, true,  true,  false,
139     handle_sentinel_attribute, NULL },
140   { "noreturn",     0, 0,  true,  false, false, false,
141     handle_noreturn_attribute, NULL },
142   { "stack_protect",0, 0, true,  false, false, false,
143     handle_stack_protect_attribute, NULL },
144   { "noinline",     0, 0,  true,  false, false, false,
145     handle_noinline_attribute, NULL },
146   { "noclone",      0, 0,  true,  false, false, false,
147     handle_noclone_attribute, NULL },
148   { "no_icf",       0, 0,  true,  false, false, false,
149     handle_noicf_attribute, NULL },
150   { "noipa",        0, 0,  true,  false, false, false,
151     handle_noipa_attribute, NULL },
152   { "leaf",         0, 0,  true,  false, false, false,
153     handle_leaf_attribute, NULL },
154   { "always_inline",0, 0,  true,  false, false, false,
155     handle_always_inline_attribute, NULL },
156   { "malloc",       0, 0,  true,  false, false, false,
157     handle_malloc_attribute, NULL },
158   { "type generic", 0, 0,  false, true,  true,  false,
159     handle_type_generic_attribute, NULL },
160 
161   { "flatten",      0, 0,  true,  false, false, false,
162     handle_flatten_attribute, NULL },
163   { "used",         0, 0,  true,  false, false, false,
164     handle_used_attribute, NULL },
165   { "cold",         0, 0,  true,  false, false, false,
166     handle_cold_attribute, attr_cold_hot_exclusions },
167   { "hot",          0, 0,  true,  false, false, false,
168     handle_hot_attribute, attr_cold_hot_exclusions },
169   { "target",       1, -1, true,  false, false, false,
170     handle_target_attribute, NULL },
171   { "target_clones",1, -1, true,  false, false, false,
172     handle_target_clones_attribute, NULL },
173 
174   { "vector_size",  1, 1,  false, true,  false, false,
175     handle_vector_size_attribute, NULL },
176   { "vector_type",  0, 0,  false, true,  false, false,
177     handle_vector_type_attribute, NULL },
178   { "may_alias",    0, 0,  false, true,  false, false,
179     NULL, NULL },
180 
181   /* ??? format and format_arg are heavy and not supported, which actually
182      prevents support for stdio builtins, which we however declare as part
183      of the common builtins.def contents.  */
184   { "format",       3, 3,  false, true,  true,  false,
185     fake_attribute_handler, NULL },
186   { "format_arg",   1, 1,  false, true,  true,  false,
187     fake_attribute_handler, NULL },
188 
189   { NULL,           0, 0,  false, false, false, false,
190     NULL, NULL }
191 };
192 
193 /* Associates a GNAT tree node to a GCC tree node. It is used in
194    `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
195    of `save_gnu_tree' for more info.  */
196 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
197 
198 #define GET_GNU_TREE(GNAT_ENTITY)	\
199   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
200 
201 #define SET_GNU_TREE(GNAT_ENTITY,VAL)	\
202   associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
203 
204 #define PRESENT_GNU_TREE(GNAT_ENTITY)	\
205   (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
206 
207 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any.  */
208 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
209 
210 #define GET_DUMMY_NODE(GNAT_ENTITY)	\
211   dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
212 
213 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL)	\
214   dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
215 
216 #define PRESENT_DUMMY_NODE(GNAT_ENTITY)	\
217   (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
218 
219 /* This variable keeps a table for types for each precision so that we only
220    allocate each of them once. Signed and unsigned types are kept separate.
221 
222    Note that these types are only used when fold-const requests something
223    special.  Perhaps we should NOT share these types; we'll see how it
224    goes later.  */
225 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
226 
227 /* Likewise for float types, but record these by mode.  */
228 static GTY(()) tree float_types[NUM_MACHINE_MODES];
229 
230 /* For each binding contour we allocate a binding_level structure to indicate
231    the binding depth.  */
232 
233 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
234   /* The binding level containing this one (the enclosing binding level). */
235   struct gnat_binding_level *chain;
236   /* The BLOCK node for this level.  */
237   tree block;
238   /* If nonzero, the setjmp buffer that needs to be updated for any
239      variable-sized definition within this context.  */
240   tree jmpbuf_decl;
241 };
242 
243 /* The binding level currently in effect.  */
244 static GTY(()) struct gnat_binding_level *current_binding_level;
245 
246 /* A chain of gnat_binding_level structures awaiting reuse.  */
247 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
248 
249 /* The context to be used for global declarations.  */
250 static GTY(()) tree global_context;
251 
252 /* An array of global declarations.  */
253 static GTY(()) vec<tree, va_gc> *global_decls;
254 
255 /* An array of builtin function declarations.  */
256 static GTY(()) vec<tree, va_gc> *builtin_decls;
257 
258 /* A chain of unused BLOCK nodes. */
259 static GTY((deletable)) tree free_block_chain;
260 
261 /* A hash table of padded types.  It is modelled on the generic type
262    hash table in tree.c, which must thus be used as a reference.  */
263 
264 struct GTY((for_user)) pad_type_hash
265 {
266   hashval_t hash;
267   tree type;
268 };
269 
270 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
271 {
hashpad_type_hasher272   static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
273   static bool equal (pad_type_hash *a, pad_type_hash *b);
274 
275   static int
keep_cache_entrypad_type_hasher276   keep_cache_entry (pad_type_hash *&t)
277   {
278     return ggc_marked_p (t->type);
279   }
280 };
281 
282 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
283 
284 static tree merge_sizes (tree, tree, tree, bool, bool);
285 static tree fold_bit_position (const_tree);
286 static tree compute_related_constant (tree, tree);
287 static tree split_plus (tree, tree *);
288 static tree float_type_for_precision (int, machine_mode);
289 static tree convert_to_fat_pointer (tree, tree);
290 static unsigned int scale_by_factor_of (tree, unsigned int);
291 
292 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
293    of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes.  */
294 struct deferred_decl_context_node
295 {
296   /* The ..._DECL node to work on.  */
297   tree decl;
298 
299   /* The corresponding entity's Scope.  */
300   Entity_Id gnat_scope;
301 
302   /* The value of force_global when DECL was pushed.  */
303   int force_global;
304 
305   /* The list of ..._TYPE nodes to propagate the context to.  */
306   vec<tree> types;
307 
308   /* The next queue item.  */
309   struct deferred_decl_context_node *next;
310 };
311 
312 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
313 
314 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
315    feed it with the elaboration of GNAT_SCOPE.  */
316 static struct deferred_decl_context_node *
317 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
318 
319 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
320    feed it with the DECL_CONTEXT computed as part of N as soon as it is
321    computed.  */
322 static void add_deferred_type_context (struct deferred_decl_context_node *n,
323 				       tree type);
324 
325 /* Initialize data structures of the utils.c module.  */
326 
327 void
init_gnat_utils(void)328 init_gnat_utils (void)
329 {
330   /* Initialize the association of GNAT nodes to GCC trees.  */
331   associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
332 
333   /* Initialize the association of GNAT nodes to GCC trees as dummies.  */
334   dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
335 
336   /* Initialize the hash table of padded types.  */
337   pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
338 }
339 
340 /* Destroy data structures of the utils.c module.  */
341 
342 void
destroy_gnat_utils(void)343 destroy_gnat_utils (void)
344 {
345   /* Destroy the association of GNAT nodes to GCC trees.  */
346   ggc_free (associate_gnat_to_gnu);
347   associate_gnat_to_gnu = NULL;
348 
349   /* Destroy the association of GNAT nodes to GCC trees as dummies.  */
350   ggc_free (dummy_node_table);
351   dummy_node_table = NULL;
352 
353   /* Destroy the hash table of padded types.  */
354   pad_type_hash_table->empty ();
355   pad_type_hash_table = NULL;
356 }
357 
358 /* GNAT_ENTITY is a GNAT tree node for an entity.  Associate GNU_DECL, a GCC
359    tree node, with GNAT_ENTITY.  If GNU_DECL is not a ..._DECL node, abort.
360    If NO_CHECK is true, the latter check is suppressed.
361 
362    If GNU_DECL is zero, reset a previous association.  */
363 
364 void
save_gnu_tree(Entity_Id gnat_entity,tree gnu_decl,bool no_check)365 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
366 {
367   /* Check that GNAT_ENTITY is not already defined and that it is being set
368      to something which is a decl.  If that is not the case, this usually
369      means GNAT_ENTITY is defined twice, but occasionally is due to some
370      Gigi problem.  */
371   gcc_assert (!(gnu_decl
372 		&& (PRESENT_GNU_TREE (gnat_entity)
373 		    || (!no_check && !DECL_P (gnu_decl)))));
374 
375   SET_GNU_TREE (gnat_entity, gnu_decl);
376 }
377 
378 /* GNAT_ENTITY is a GNAT tree node for an entity.  Return the GCC tree node
379    that was associated with it.  If there is no such tree node, abort.
380 
381    In some cases, such as delayed elaboration or expressions that need to
382    be elaborated only once, GNAT_ENTITY is really not an entity.  */
383 
384 tree
get_gnu_tree(Entity_Id gnat_entity)385 get_gnu_tree (Entity_Id gnat_entity)
386 {
387   gcc_assert (PRESENT_GNU_TREE (gnat_entity));
388   return GET_GNU_TREE (gnat_entity);
389 }
390 
391 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY.  */
392 
393 bool
present_gnu_tree(Entity_Id gnat_entity)394 present_gnu_tree (Entity_Id gnat_entity)
395 {
396   return PRESENT_GNU_TREE (gnat_entity);
397 }
398 
399 /* Make a dummy type corresponding to GNAT_TYPE.  */
400 
401 tree
make_dummy_type(Entity_Id gnat_type)402 make_dummy_type (Entity_Id gnat_type)
403 {
404   Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
405   tree gnu_type, debug_type;
406 
407   /* If there was no equivalent type (can only happen when just annotating
408      types) or underlying type, go back to the original type.  */
409   if (No (gnat_equiv))
410     gnat_equiv = gnat_type;
411 
412   /* If it there already a dummy type, use that one.  Else make one.  */
413   if (PRESENT_DUMMY_NODE (gnat_equiv))
414     return GET_DUMMY_NODE (gnat_equiv);
415 
416   /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
417      an ENUMERAL_TYPE.  */
418   gnu_type = make_node (Is_Record_Type (gnat_equiv)
419 			? tree_code_for_record_type (gnat_equiv)
420 			: ENUMERAL_TYPE);
421   TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
422   TYPE_DUMMY_P (gnu_type) = 1;
423   TYPE_STUB_DECL (gnu_type)
424     = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
425   if (Is_By_Reference_Type (gnat_equiv))
426     TYPE_BY_REFERENCE_P (gnu_type) = 1;
427   if (Has_Discriminants (gnat_equiv))
428     decl_attributes (&gnu_type,
429 		     tree_cons (get_identifier ("may_alias"), NULL_TREE,
430 				NULL_TREE),
431 		     ATTR_FLAG_TYPE_IN_PLACE);
432 
433   SET_DUMMY_NODE (gnat_equiv, gnu_type);
434 
435   /* Create a debug type so that debuggers only see an unspecified type.  */
436   if (Needs_Debug_Info (gnat_type))
437     {
438       debug_type = make_node (LANG_TYPE);
439       TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
440       TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
441       SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
442     }
443 
444   return gnu_type;
445 }
446 
447 /* Return the dummy type that was made for GNAT_TYPE, if any.  */
448 
449 tree
get_dummy_type(Entity_Id gnat_type)450 get_dummy_type (Entity_Id gnat_type)
451 {
452   return GET_DUMMY_NODE (gnat_type);
453 }
454 
455 /* Build dummy fat and thin pointer types whose designated type is specified
456    by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter.  */
457 
458 void
build_dummy_unc_pointer_types(Entity_Id gnat_desig_type,tree gnu_desig_type)459 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
460 {
461   tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
462   tree gnu_fat_type, fields, gnu_object_type;
463 
464   gnu_template_type = make_node (RECORD_TYPE);
465   TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
466   TYPE_DUMMY_P (gnu_template_type) = 1;
467   gnu_ptr_template = build_pointer_type (gnu_template_type);
468 
469   gnu_array_type = make_node (ENUMERAL_TYPE);
470   TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
471   TYPE_DUMMY_P (gnu_array_type) = 1;
472   gnu_ptr_array = build_pointer_type (gnu_array_type);
473 
474   gnu_fat_type = make_node (RECORD_TYPE);
475   /* Build a stub DECL to trigger the special processing for fat pointer types
476      in gnat_pushdecl.  */
477   TYPE_NAME (gnu_fat_type)
478     = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
479 			     gnu_fat_type);
480   fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
481 			      gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
482   DECL_CHAIN (fields)
483     = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
484 			 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 1);
485   finish_fat_pointer_type (gnu_fat_type, fields);
486   SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
487   /* Suppress debug info until after the type is completed.  */
488   TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
489 
490   gnu_object_type = make_node (RECORD_TYPE);
491   TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
492   TYPE_DUMMY_P (gnu_object_type) = 1;
493 
494   TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
495   TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
496   TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
497 }
498 
499 /* Return true if we are in the global binding level.  */
500 
501 bool
global_bindings_p(void)502 global_bindings_p (void)
503 {
504   return force_global || !current_function_decl;
505 }
506 
507 /* Enter a new binding level.  */
508 
509 void
gnat_pushlevel(void)510 gnat_pushlevel (void)
511 {
512   struct gnat_binding_level *newlevel = NULL;
513 
514   /* Reuse a struct for this binding level, if there is one.  */
515   if (free_binding_level)
516     {
517       newlevel = free_binding_level;
518       free_binding_level = free_binding_level->chain;
519     }
520   else
521     newlevel = ggc_alloc<gnat_binding_level> ();
522 
523   /* Use a free BLOCK, if any; otherwise, allocate one.  */
524   if (free_block_chain)
525     {
526       newlevel->block = free_block_chain;
527       free_block_chain = BLOCK_CHAIN (free_block_chain);
528       BLOCK_CHAIN (newlevel->block) = NULL_TREE;
529     }
530   else
531     newlevel->block = make_node (BLOCK);
532 
533   /* Point the BLOCK we just made to its parent.  */
534   if (current_binding_level)
535     BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
536 
537   BLOCK_VARS (newlevel->block) = NULL_TREE;
538   BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
539   TREE_USED (newlevel->block) = 1;
540 
541   /* Add this level to the front of the chain (stack) of active levels.  */
542   newlevel->chain = current_binding_level;
543   newlevel->jmpbuf_decl = NULL_TREE;
544   current_binding_level = newlevel;
545 }
546 
547 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
548    and point FNDECL to this BLOCK.  */
549 
550 void
set_current_block_context(tree fndecl)551 set_current_block_context (tree fndecl)
552 {
553   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
554   DECL_INITIAL (fndecl) = current_binding_level->block;
555   set_block_for_group (current_binding_level->block);
556 }
557 
558 /* Set the jmpbuf_decl for the current binding level to DECL.  */
559 
560 void
set_block_jmpbuf_decl(tree decl)561 set_block_jmpbuf_decl (tree decl)
562 {
563   current_binding_level->jmpbuf_decl = decl;
564 }
565 
566 /* Get the jmpbuf_decl, if any, for the current binding level.  */
567 
568 tree
get_block_jmpbuf_decl(void)569 get_block_jmpbuf_decl (void)
570 {
571   return current_binding_level->jmpbuf_decl;
572 }
573 
574 /* Exit a binding level.  Set any BLOCK into the current code group.  */
575 
576 void
gnat_poplevel(void)577 gnat_poplevel (void)
578 {
579   struct gnat_binding_level *level = current_binding_level;
580   tree block = level->block;
581 
582   BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
583   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
584 
585   /* If this is a function-level BLOCK don't do anything.  Otherwise, if there
586      are no variables free the block and merge its subblocks into those of its
587      parent block.  Otherwise, add it to the list of its parent.  */
588   if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
589     ;
590   else if (!BLOCK_VARS (block))
591     {
592       BLOCK_SUBBLOCKS (level->chain->block)
593 	= block_chainon (BLOCK_SUBBLOCKS (block),
594 			 BLOCK_SUBBLOCKS (level->chain->block));
595       BLOCK_CHAIN (block) = free_block_chain;
596       free_block_chain = block;
597     }
598   else
599     {
600       BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
601       BLOCK_SUBBLOCKS (level->chain->block) = block;
602       TREE_USED (block) = 1;
603       set_block_for_group (block);
604     }
605 
606   /* Free this binding structure.  */
607   current_binding_level = level->chain;
608   level->chain = free_binding_level;
609   free_binding_level = level;
610 }
611 
612 /* Exit a binding level and discard the associated BLOCK.  */
613 
614 void
gnat_zaplevel(void)615 gnat_zaplevel (void)
616 {
617   struct gnat_binding_level *level = current_binding_level;
618   tree block = level->block;
619 
620   BLOCK_CHAIN (block) = free_block_chain;
621   free_block_chain = block;
622 
623   /* Free this binding structure.  */
624   current_binding_level = level->chain;
625   level->chain = free_binding_level;
626   free_binding_level = level;
627 }
628 
629 /* Set the context of TYPE and its parallel types (if any) to CONTEXT.  */
630 
631 static void
gnat_set_type_context(tree type,tree context)632 gnat_set_type_context (tree type, tree context)
633 {
634   tree decl = TYPE_STUB_DECL (type);
635 
636   TYPE_CONTEXT (type) = context;
637 
638   while (decl && DECL_PARALLEL_TYPE (decl))
639     {
640       tree parallel_type = DECL_PARALLEL_TYPE (decl);
641 
642       /* Give a context to the parallel types and their stub decl, if any.
643 	 Some parallel types seems to be present in multiple parallel type
644 	 chains, so don't mess with their context if they already have one.  */
645       if (!TYPE_CONTEXT (parallel_type))
646 	{
647 	  if (TYPE_STUB_DECL (parallel_type))
648 	    DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
649 	  TYPE_CONTEXT (parallel_type) = context;
650 	}
651 
652       decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
653     }
654 }
655 
656 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
657    the debug info, or Empty if there is no such scope.  If not NULL, set
658    IS_SUBPROGRAM to whether the returned entity is a subprogram.  */
659 
660 Entity_Id
get_debug_scope(Node_Id gnat_node,bool * is_subprogram)661 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
662 {
663   Entity_Id gnat_entity;
664 
665   if (is_subprogram)
666     *is_subprogram = false;
667 
668   if (Nkind (gnat_node) == N_Defining_Identifier
669       || Nkind (gnat_node) == N_Defining_Operator_Symbol)
670     gnat_entity = Scope (gnat_node);
671   else
672     return Empty;
673 
674   while (Present (gnat_entity))
675     {
676       switch (Ekind (gnat_entity))
677 	{
678 	case E_Function:
679 	case E_Procedure:
680 	  if (Present (Protected_Body_Subprogram (gnat_entity)))
681 	    gnat_entity = Protected_Body_Subprogram (gnat_entity);
682 
683 	  /* If the scope is a subprogram, then just rely on
684 	     current_function_decl, so that we don't have to defer
685 	     anything.  This is needed because other places rely on the
686 	     validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
687 	  if (is_subprogram)
688 	    *is_subprogram = true;
689 	  return gnat_entity;
690 
691 	case E_Record_Type:
692 	case E_Record_Subtype:
693 	  return gnat_entity;
694 
695 	default:
696 	  /* By default, we are not interested in this particular scope: go to
697 	     the outer one.  */
698 	  break;
699 	}
700 
701       gnat_entity = Scope (gnat_entity);
702     }
703 
704   return Empty;
705 }
706 
707 /* If N is NULL, set TYPE's context to CONTEXT.  Defer this to the processing
708    of N otherwise.  */
709 
710 static void
defer_or_set_type_context(tree type,tree context,struct deferred_decl_context_node * n)711 defer_or_set_type_context (tree type, tree context,
712 			   struct deferred_decl_context_node *n)
713 {
714   if (n)
715     add_deferred_type_context (n, type);
716   else
717     gnat_set_type_context (type, context);
718 }
719 
720 /* Return global_context, but create it first if need be.  */
721 
722 static tree
get_global_context(void)723 get_global_context (void)
724 {
725   if (!global_context)
726     {
727       global_context
728 	= build_translation_unit_decl (get_identifier (main_input_filename));
729       debug_hooks->register_main_translation_unit (global_context);
730     }
731 
732   return global_context;
733 }
734 
735 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
736    for location information and flag propagation.  */
737 
738 void
gnat_pushdecl(tree decl,Node_Id gnat_node)739 gnat_pushdecl (tree decl, Node_Id gnat_node)
740 {
741   tree context = NULL_TREE;
742   struct deferred_decl_context_node *deferred_decl_context = NULL;
743 
744   /* If explicitely asked to make DECL global or if it's an imported nested
745      object, short-circuit the regular Scope-based context computation.  */
746   if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
747     {
748       /* Rely on the GNAT scope, or fallback to the current_function_decl if
749 	 the GNAT scope reached the global scope, if it reached a subprogram
750 	 or the declaration is a subprogram or a variable (for them we skip
751 	 intermediate context types because the subprogram body elaboration
752 	 machinery and the inliner both expect a subprogram context).
753 
754 	 Falling back to current_function_decl is necessary for implicit
755 	 subprograms created by gigi, such as the elaboration subprograms.  */
756       bool context_is_subprogram = false;
757       const Entity_Id gnat_scope
758         = get_debug_scope (gnat_node, &context_is_subprogram);
759 
760       if (Present (gnat_scope)
761 	  && !context_is_subprogram
762 	  && TREE_CODE (decl) != FUNCTION_DECL
763 	  && TREE_CODE (decl) != VAR_DECL)
764 	/* Always assume the scope has not been elaborated, thus defer the
765 	   context propagation to the time its elaboration will be
766 	   available.  */
767 	deferred_decl_context
768 	  = add_deferred_decl_context (decl, gnat_scope, force_global);
769 
770       /* External declarations (when force_global > 0) may not be in a
771 	 local context.  */
772       else if (current_function_decl && force_global == 0)
773 	context = current_function_decl;
774     }
775 
776   /* If either we are forced to be in global mode or if both the GNAT scope and
777      the current_function_decl did not help in determining the context, use the
778      global scope.  */
779   if (!deferred_decl_context && !context)
780     context = get_global_context ();
781 
782   /* Functions imported in another function are not really nested.
783      For really nested functions mark them initially as needing
784      a static chain for uses of that flag before unnesting;
785      lower_nested_functions will then recompute it.  */
786   if (TREE_CODE (decl) == FUNCTION_DECL
787       && !TREE_PUBLIC (decl)
788       && context
789       && (TREE_CODE (context) == FUNCTION_DECL
790 	  || decl_function_context (context)))
791     DECL_STATIC_CHAIN (decl) = 1;
792 
793   if (!deferred_decl_context)
794     DECL_CONTEXT (decl) = context;
795 
796   TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
797 
798   /* Set the location of DECL and emit a declaration for it.  */
799   if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
800     Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
801 
802   add_decl_expr (decl, gnat_node);
803 
804   /* Put the declaration on the list.  The list of declarations is in reverse
805      order.  The list will be reversed later.  Put global declarations in the
806      globals list and local ones in the current block.  But skip TYPE_DECLs
807      for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
808      with the debugger and aren't needed anyway.  */
809   if (!(TREE_CODE (decl) == TYPE_DECL
810         && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
811     {
812       /* External declarations must go to the binding level they belong to.
813 	 This will make corresponding imported entities are available in the
814 	 debugger at the proper time.  */
815       if (DECL_EXTERNAL (decl)
816 	  && TREE_CODE (decl) == FUNCTION_DECL
817 	  && fndecl_built_in_p (decl))
818 	vec_safe_push (builtin_decls, decl);
819       else if (global_bindings_p ())
820 	vec_safe_push (global_decls, decl);
821       else
822 	{
823 	  DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
824 	  BLOCK_VARS (current_binding_level->block) = decl;
825 	}
826     }
827 
828   /* For the declaration of a type, set its name either if it isn't already
829      set or if the previous type name was not derived from a source name.
830      We'd rather have the type named with a real name and all the pointer
831      types to the same object have the same node, except when the names are
832      both derived from source names.  */
833   if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
834     {
835       tree t = TREE_TYPE (decl);
836 
837       /* Array and pointer types aren't tagged types in the C sense so we need
838 	 to generate a typedef in DWARF for them and make sure it is preserved,
839 	 unless the type is artificial.  */
840       if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
841 	  && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
842 	      || DECL_ARTIFICIAL (decl)))
843 	;
844       /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
845 	 generate the typedef in DWARF.  Also do that for fat pointer types
846 	 because, even though they are tagged types in the C sense, they are
847 	 still XUP types attached to the base array type at this point.  */
848       else if (!DECL_ARTIFICIAL (decl)
849 	       && (TREE_CODE (t) == ARRAY_TYPE
850 		   || TREE_CODE (t) == POINTER_TYPE
851 		   || TYPE_IS_FAT_POINTER_P (t)))
852 	{
853 	  tree tt = build_variant_type_copy (t);
854 	  TYPE_NAME (tt) = decl;
855 	  defer_or_set_type_context (tt,
856 				     DECL_CONTEXT (decl),
857 				     deferred_decl_context);
858 	  TREE_TYPE (decl) = tt;
859 	  if (TYPE_NAME (t)
860 	      && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
861 	      && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
862 	    DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
863 	  else
864 	    DECL_ORIGINAL_TYPE (decl) = t;
865 	  /* Array types need to have a name so that they can be related to
866 	     their GNAT encodings.  */
867 	  if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
868 	    TYPE_NAME (t) = DECL_NAME (decl);
869 	  t = NULL_TREE;
870 	}
871       else if (TYPE_NAME (t)
872 	       && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
873 	       && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
874 	;
875       else
876 	t = NULL_TREE;
877 
878       /* Propagate the name to all the variants, this is needed for the type
879 	 qualifiers machinery to work properly (see check_qualified_type).
880 	 Also propagate the context to them.  Note that it will be propagated
881 	 to all parallel types too thanks to gnat_set_type_context.  */
882       if (t)
883 	for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
884 	  /* ??? Because of the previous kludge, we can have variants of fat
885 	     pointer types with different names.  */
886 	  if (!(TYPE_IS_FAT_POINTER_P (t)
887 		&& TYPE_NAME (t)
888 		&& TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
889 	    {
890 	      TYPE_NAME (t) = decl;
891 	      defer_or_set_type_context (t,
892 					 DECL_CONTEXT (decl),
893 					 deferred_decl_context);
894 	    }
895     }
896 }
897 
898 /* Create a record type that contains a SIZE bytes long field of TYPE with a
899    starting bit position so that it is aligned to ALIGN bits, and leaving at
900    least ROOM bytes free before the field.  BASE_ALIGN is the alignment the
901    record is guaranteed to get.  GNAT_NODE is used for the position of the
902    associated TYPE_DECL.  */
903 
904 tree
make_aligning_type(tree type,unsigned int align,tree size,unsigned int base_align,int room,Node_Id gnat_node)905 make_aligning_type (tree type, unsigned int align, tree size,
906 		    unsigned int base_align, int room, Node_Id gnat_node)
907 {
908   /* We will be crafting a record type with one field at a position set to be
909      the next multiple of ALIGN past record'address + room bytes.  We use a
910      record placeholder to express record'address.  */
911   tree record_type = make_node (RECORD_TYPE);
912   tree record = build0 (PLACEHOLDER_EXPR, record_type);
913 
914   tree record_addr_st
915     = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
916 
917   /* The diagram below summarizes the shape of what we manipulate:
918 
919                     <--------- pos ---------->
920                 {  +------------+-------------+-----------------+
921       record  =>{  |############|     ...     | field (type)    |
922                 {  +------------+-------------+-----------------+
923 		   |<-- room -->|<- voffset ->|<---- size ----->|
924 		   o            o
925 		   |            |
926 		   record_addr  vblock_addr
927 
928      Every length is in sizetype bytes there, except "pos" which has to be
929      set as a bit position in the GCC tree for the record.  */
930   tree room_st = size_int (room);
931   tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
932   tree voffset_st, pos, field;
933 
934   tree name = TYPE_IDENTIFIER (type);
935 
936   name = concat_name (name, "ALIGN");
937   TYPE_NAME (record_type) = name;
938 
939   /* Compute VOFFSET and then POS.  The next byte position multiple of some
940      alignment after some address is obtained by "and"ing the alignment minus
941      1 with the two's complement of the address.   */
942   voffset_st = size_binop (BIT_AND_EXPR,
943 			   fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
944 			   size_int ((align / BITS_PER_UNIT) - 1));
945 
946   /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype.  */
947   pos = size_binop (MULT_EXPR,
948 		    convert (bitsizetype,
949 			     size_binop (PLUS_EXPR, room_st, voffset_st)),
950                     bitsize_unit_node);
951 
952   /* Craft the GCC record representation.  We exceptionally do everything
953      manually here because 1) our generic circuitry is not quite ready to
954      handle the complex position/size expressions we are setting up, 2) we
955      have a strong simplifying factor at hand: we know the maximum possible
956      value of voffset, and 3) we have to set/reset at least the sizes in
957      accordance with this maximum value anyway, as we need them to convey
958      what should be "alloc"ated for this type.
959 
960      Use -1 as the 'addressable' indication for the field to prevent the
961      creation of a bitfield.  We don't need one, it would have damaging
962      consequences on the alignment computation, and create_field_decl would
963      make one without this special argument, for instance because of the
964      complex position expression.  */
965   field = create_field_decl (get_identifier ("F"), type, record_type, size,
966 			     pos, 1, -1);
967   TYPE_FIELDS (record_type) = field;
968 
969   SET_TYPE_ALIGN (record_type, base_align);
970   TYPE_USER_ALIGN (record_type) = 1;
971 
972   TYPE_SIZE (record_type)
973     = size_binop (PLUS_EXPR,
974                   size_binop (MULT_EXPR, convert (bitsizetype, size),
975                               bitsize_unit_node),
976 		  bitsize_int (align + room * BITS_PER_UNIT));
977   TYPE_SIZE_UNIT (record_type)
978     = size_binop (PLUS_EXPR, size,
979 		  size_int (room + align / BITS_PER_UNIT));
980 
981   SET_TYPE_MODE (record_type, BLKmode);
982   relate_alias_sets (record_type, type, ALIAS_SET_COPY);
983 
984   /* Declare it now since it will never be declared otherwise.  This is
985      necessary to ensure that its subtrees are properly marked.  */
986   create_type_decl (name, record_type, true, false, gnat_node);
987 
988   return record_type;
989 }
990 
991 /* TYPE is an ARRAY_TYPE that is being used as the type of a field in a packed
992    record.  See if we can rewrite it as a type that has non-BLKmode, which we
993    can pack tighter in the packed record.  If so, return the new type; if not,
994    return the original type.  */
995 
996 static tree
make_packable_array_type(tree type)997 make_packable_array_type (tree type)
998 {
999   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1000   unsigned HOST_WIDE_INT new_size;
1001   unsigned int new_align;
1002 
1003   /* No point in doing anything if the size is either zero or too large for an
1004      integral mode, or if the type already has non-BLKmode.  */
1005   if (size == 0 || size > MAX_FIXED_MODE_SIZE || TYPE_MODE (type) != BLKmode)
1006     return type;
1007 
1008   /* Punt if the component type is an aggregate type for now.  */
1009   if (AGGREGATE_TYPE_P (TREE_TYPE (type)))
1010     return type;
1011 
1012   tree new_type = copy_type (type);
1013 
1014   new_size = ceil_pow2 (size);
1015   new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1016   SET_TYPE_ALIGN (new_type, new_align);
1017 
1018   TYPE_SIZE (new_type) = bitsize_int (new_size);
1019   TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1020 
1021   SET_TYPE_MODE (new_type, mode_for_size (new_size, MODE_INT, 1).else_blk ());
1022 
1023   return new_type;
1024 }
1025 
1026 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
1027    as the type of a field in a packed record if IN_RECORD is true, or as
1028    the component type of a packed array if IN_RECORD is false.  See if we
1029    can rewrite it either as a type that has non-BLKmode, which we can pack
1030    tighter in the packed record case, or as a smaller type with at most
1031    MAX_ALIGN alignment if the value is non-zero.  If so, return the new
1032    type; if not, return the original type.  */
1033 
1034 tree
make_packable_type(tree type,bool in_record,unsigned int max_align)1035 make_packable_type (tree type, bool in_record, unsigned int max_align)
1036 {
1037   const unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
1038   const unsigned int align = TYPE_ALIGN (type);
1039   unsigned HOST_WIDE_INT new_size;
1040   unsigned int new_align;
1041 
1042   /* No point in doing anything if the size is zero.  */
1043   if (size == 0)
1044     return type;
1045 
1046   tree new_type = make_node (TREE_CODE (type));
1047 
1048   /* Copy the name and flags from the old type to that of the new.
1049      Note that we rely on the pointer equality created here for
1050      TYPE_NAME to look through conversions in various places.  */
1051   TYPE_NAME (new_type) = TYPE_NAME (type);
1052   TYPE_PACKED (new_type) = 1;
1053   TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1054   TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1055   TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1056   if (TREE_CODE (type) == RECORD_TYPE)
1057     TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1058 
1059   /* If we are in a record and have a small size, set the alignment to
1060      try for an integral mode.  Otherwise set it to try for a smaller
1061      type with BLKmode.  */
1062   if (in_record && size <= MAX_FIXED_MODE_SIZE)
1063     {
1064       new_size = ceil_pow2 (size);
1065       new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1066       SET_TYPE_ALIGN (new_type, new_align);
1067     }
1068   else
1069     {
1070       tree type_size = TYPE_ADA_SIZE (type);
1071       /* Do not try to shrink the size if the RM size is not constant.  */
1072       if (TYPE_CONTAINS_TEMPLATE_P (type)
1073 	  || !tree_fits_uhwi_p (type_size))
1074 	return type;
1075 
1076       /* Round the RM size up to a unit boundary to get the minimal size
1077 	 for a BLKmode record.  Give up if it's already the size and we
1078 	 don't need to lower the alignment.  */
1079       new_size = tree_to_uhwi (type_size);
1080       new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1081       if (new_size == size && (max_align == 0 || align <= max_align))
1082 	return type;
1083 
1084       new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1085       if (max_align > 0 && new_align > max_align)
1086 	new_align = max_align;
1087       SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1088     }
1089 
1090   TYPE_USER_ALIGN (new_type) = 1;
1091 
1092   /* Now copy the fields, keeping the position and size as we don't want
1093      to change the layout by propagating the packedness downwards.  */
1094   tree new_field_list = NULL_TREE;
1095   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1096     {
1097       tree new_field_type = TREE_TYPE (field);
1098       tree new_field, new_field_size;
1099 
1100       if (AGGREGATE_TYPE_P (new_field_type)
1101 	  && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1102 	{
1103 	  if (RECORD_OR_UNION_TYPE_P (new_field_type)
1104 	      && !TYPE_FAT_POINTER_P (new_field_type))
1105 	    new_field_type
1106 	      = make_packable_type (new_field_type, true, max_align);
1107 	  else if (in_record
1108 		   && max_align > 0
1109 		   && max_align < BITS_PER_UNIT
1110 		   && TREE_CODE (new_field_type) == ARRAY_TYPE)
1111 	    new_field_type = make_packable_array_type (new_field_type);
1112 	}
1113 
1114       /* However, for the last field in a not already packed record type
1115 	 that is of an aggregate type, we need to use the RM size in the
1116 	 packable version of the record type, see finish_record_type.  */
1117       if (!DECL_CHAIN (field)
1118 	  && !TYPE_PACKED (type)
1119 	  && RECORD_OR_UNION_TYPE_P (new_field_type)
1120 	  && !TYPE_FAT_POINTER_P (new_field_type)
1121 	  && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1122 	  && TYPE_ADA_SIZE (new_field_type))
1123 	new_field_size = TYPE_ADA_SIZE (new_field_type);
1124       else
1125 	new_field_size = DECL_SIZE (field);
1126 
1127       /* This is a layout with full representation, alignment and size clauses
1128 	 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case.  */
1129       new_field
1130 	= create_field_decl (DECL_NAME (field), new_field_type, new_type,
1131 			     new_field_size, bit_position (field), 0,
1132 			     !DECL_NONADDRESSABLE_P (field));
1133 
1134       DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1135       SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1136       if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1137 	DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1138 
1139       DECL_CHAIN (new_field) = new_field_list;
1140       new_field_list = new_field;
1141     }
1142 
1143   /* If this is a padding record, we never want to make the size smaller
1144      than what was specified.  For QUAL_UNION_TYPE, also copy the size.  */
1145   if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1146     {
1147       TYPE_SIZE (new_type) = TYPE_SIZE (type);
1148       TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1149       new_size = size;
1150     }
1151   else
1152     {
1153       TYPE_SIZE (new_type) = bitsize_int (new_size);
1154       TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1155     }
1156 
1157   if (!TYPE_CONTAINS_TEMPLATE_P (type))
1158     SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1159 
1160   finish_record_type (new_type, nreverse (new_field_list), 2, false);
1161   relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1162   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1163     SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1164   else if (TYPE_STUB_DECL (type))
1165     SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1166 			    DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1167 
1168   /* Try harder to get a packable type if necessary, for example
1169      in case the record itself contains a BLKmode field.  */
1170   if (in_record && TYPE_MODE (new_type) == BLKmode)
1171     SET_TYPE_MODE (new_type,
1172 		   mode_for_size_tree (TYPE_SIZE (new_type),
1173 				       MODE_INT, 1).else_blk ());
1174 
1175   /* If neither mode nor size nor alignment shrunk, return the old type.  */
1176   if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1177     return type;
1178 
1179   return new_type;
1180 }
1181 
1182 /* Return true if TYPE has an unsigned representation.  This needs to be used
1183    when the representation of types whose precision is not equal to their size
1184    is manipulated based on the RM size.  */
1185 
1186 static inline bool
type_unsigned_for_rm(tree type)1187 type_unsigned_for_rm (tree type)
1188 {
1189   /* This is the common case.  */
1190   if (TYPE_UNSIGNED (type))
1191     return true;
1192 
1193   /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1194   if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1195       && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1196     return true;
1197 
1198   return false;
1199 }
1200 
1201 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1202    If TYPE is the best type, return it.  Otherwise, make a new type.  We
1203    only support new integral and pointer types.  FOR_BIASED is true if
1204    we are making a biased type.  */
1205 
1206 tree
make_type_from_size(tree type,tree size_tree,bool for_biased)1207 make_type_from_size (tree type, tree size_tree, bool for_biased)
1208 {
1209   unsigned HOST_WIDE_INT size;
1210   bool biased_p;
1211   tree new_type;
1212 
1213   /* If size indicates an error, just return TYPE to avoid propagating
1214      the error.  Likewise if it's too large to represent.  */
1215   if (!size_tree || !tree_fits_uhwi_p (size_tree))
1216     return type;
1217 
1218   size = tree_to_uhwi (size_tree);
1219 
1220   switch (TREE_CODE (type))
1221     {
1222     case BOOLEAN_TYPE:
1223       /* Do not mess with boolean types that have foreign convention.  */
1224       if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1225 	break;
1226 
1227       /* ... fall through ... */
1228 
1229     case INTEGER_TYPE:
1230     case ENUMERAL_TYPE:
1231       biased_p = (TREE_CODE (type) == INTEGER_TYPE
1232 		  && TYPE_BIASED_REPRESENTATION_P (type));
1233 
1234       /* Integer types with precision 0 are forbidden.  */
1235       if (size == 0)
1236 	size = 1;
1237 
1238       /* Only do something if the type isn't a packed array type and doesn't
1239 	 already have the proper size and the size isn't too large.  */
1240       if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1241 	  || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1242 	  || size > LONG_LONG_TYPE_SIZE)
1243 	break;
1244 
1245       biased_p |= for_biased;
1246 
1247       /* The type should be an unsigned type if the original type is unsigned
1248 	 or if the lower bound is constant and non-negative or if the type is
1249 	 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity.  */
1250       if (type_unsigned_for_rm (type) || biased_p)
1251 	new_type = make_unsigned_type (size);
1252       else
1253 	new_type = make_signed_type (size);
1254       TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1255       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1256       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1257       /* Copy the name to show that it's essentially the same type and
1258 	 not a subrange type.  */
1259       TYPE_NAME (new_type) = TYPE_NAME (type);
1260       TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1261       SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1262       return new_type;
1263 
1264     case RECORD_TYPE:
1265       /* Do something if this is a fat pointer, in which case we
1266 	 may need to return the thin pointer.  */
1267       if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1268 	{
1269 	  scalar_int_mode p_mode;
1270 	  if (!int_mode_for_size (size, 0).exists (&p_mode)
1271 	      || !targetm.valid_pointer_mode (p_mode))
1272 	    p_mode = ptr_mode;
1273 	  return
1274 	    build_pointer_type_for_mode
1275 	      (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1276 	       p_mode, 0);
1277 	}
1278       break;
1279 
1280     case POINTER_TYPE:
1281       /* Only do something if this is a thin pointer, in which case we
1282 	 may need to return the fat pointer.  */
1283       if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1284 	return
1285 	  build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1286       break;
1287 
1288     default:
1289       break;
1290     }
1291 
1292   return type;
1293 }
1294 
1295 /* Return true iff the padded types are equivalent.  */
1296 
1297 bool
equal(pad_type_hash * t1,pad_type_hash * t2)1298 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1299 {
1300   tree type1, type2;
1301 
1302   if (t1->hash != t2->hash)
1303     return 0;
1304 
1305   type1 = t1->type;
1306   type2 = t2->type;
1307 
1308   /* We consider that the padded types are equivalent if they pad the same type
1309      and have the same size, alignment, RM size and storage order.  Taking the
1310      mode into account is redundant since it is determined by the others.  */
1311   return
1312     TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1313     && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1314     && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1315     && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1316     && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1317 }
1318 
1319 /* Compute the hash value for the padded TYPE.  */
1320 
1321 static hashval_t
hash_pad_type(tree type)1322 hash_pad_type (tree type)
1323 {
1324   hashval_t hashcode;
1325 
1326   hashcode
1327     = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1328   hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1329   hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1330   hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1331 
1332   return hashcode;
1333 }
1334 
1335 /* Look up the padded TYPE in the hash table and return its canonical version
1336    if it exists; otherwise, insert it into the hash table.  */
1337 
1338 static tree
canonicalize_pad_type(tree type)1339 canonicalize_pad_type (tree type)
1340 {
1341   const hashval_t hashcode = hash_pad_type (type);
1342   struct pad_type_hash in, *h, **slot;
1343 
1344   in.hash = hashcode;
1345   in.type = type;
1346   slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1347   h = *slot;
1348   if (!h)
1349     {
1350       h = ggc_alloc<pad_type_hash> ();
1351       h->hash = hashcode;
1352       h->type = type;
1353       *slot = h;
1354     }
1355 
1356   return h->type;
1357 }
1358 
1359 /* Ensure that TYPE has SIZE and ALIGN.  Make and return a new padded type
1360    if needed.  We have already verified that SIZE and ALIGN are large enough.
1361    GNAT_ENTITY is used to name the resulting record and to issue a warning.
1362    IS_COMPONENT_TYPE is true if this is being done for the component type of
1363    an array.  IS_USER_TYPE is true if the original type needs to be completed.
1364    DEFINITION is true if this type is being defined.  SET_RM_SIZE is true if
1365    the RM size of the resulting type is to be set to SIZE too; in this case,
1366    the padded type is canonicalized before being returned.  */
1367 
1368 tree
maybe_pad_type(tree type,tree size,unsigned int align,Entity_Id gnat_entity,bool is_component_type,bool is_user_type,bool definition,bool set_rm_size)1369 maybe_pad_type (tree type, tree size, unsigned int align,
1370 		Entity_Id gnat_entity, bool is_component_type,
1371 		bool is_user_type, bool definition, bool set_rm_size)
1372 {
1373   tree orig_size = TYPE_SIZE (type);
1374   unsigned int orig_align = TYPE_ALIGN (type);
1375   tree record, field;
1376 
1377   /* If TYPE is a padded type, see if it agrees with any size and alignment
1378      we were given.  If so, return the original type.  Otherwise, strip
1379      off the padding, since we will either be returning the inner type
1380      or repadding it.  If no size or alignment is specified, use that of
1381      the original padded type.  */
1382   if (TYPE_IS_PADDING_P (type))
1383     {
1384       if ((!size
1385 	   || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1386 	  && (align == 0 || align == orig_align))
1387 	return type;
1388 
1389       if (!size)
1390 	size = orig_size;
1391       if (align == 0)
1392 	align = orig_align;
1393 
1394       type = TREE_TYPE (TYPE_FIELDS (type));
1395       orig_size = TYPE_SIZE (type);
1396       orig_align = TYPE_ALIGN (type);
1397     }
1398 
1399   /* If the size is either not being changed or is being made smaller (which
1400      is not done here and is only valid for bitfields anyway), show the size
1401      isn't changing.  Likewise, clear the alignment if it isn't being
1402      changed.  Then return if we aren't doing anything.  */
1403   if (size
1404       && (operand_equal_p (size, orig_size, 0)
1405 	  || (TREE_CODE (orig_size) == INTEGER_CST
1406 	      && tree_int_cst_lt (size, orig_size))))
1407     size = NULL_TREE;
1408 
1409   if (align == orig_align)
1410     align = 0;
1411 
1412   if (align == 0 && !size)
1413     return type;
1414 
1415   /* If requested, complete the original type and give it a name.  */
1416   if (is_user_type)
1417     create_type_decl (get_entity_name (gnat_entity), type,
1418 		      !Comes_From_Source (gnat_entity),
1419 		      !(TYPE_NAME (type)
1420 			&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1421 			&& DECL_IGNORED_P (TYPE_NAME (type))),
1422 		      gnat_entity);
1423 
1424   /* We used to modify the record in place in some cases, but that could
1425      generate incorrect debugging information.  So make a new record
1426      type and name.  */
1427   record = make_node (RECORD_TYPE);
1428   TYPE_PADDING_P (record) = 1;
1429 
1430   /* ??? Padding types around packed array implementation types will be
1431      considered as root types in the array descriptor language hook (see
1432      gnat_get_array_descr_info). Give them the original packed array type
1433      name so that the one coming from sources appears in the debugging
1434      information.  */
1435   if (TYPE_IMPL_PACKED_ARRAY_P (type)
1436       && TYPE_ORIGINAL_PACKED_ARRAY (type)
1437       && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1438     TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1439   else if (Present (gnat_entity))
1440     TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1441 
1442   SET_TYPE_ALIGN (record, align ? align : orig_align);
1443   TYPE_SIZE (record) = size ? size : orig_size;
1444   TYPE_SIZE_UNIT (record)
1445     = convert (sizetype,
1446 	       size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1447 			   bitsize_unit_node));
1448 
1449   /* If we are changing the alignment and the input type is a record with
1450      BLKmode and a small constant size, try to make a form that has an
1451      integral mode.  This might allow the padding record to also have an
1452      integral mode, which will be much more efficient.  There is no point
1453      in doing so if a size is specified unless it is also a small constant
1454      size and it is incorrect to do so if we cannot guarantee that the mode
1455      will be naturally aligned since the field must always be addressable.
1456 
1457      ??? This might not always be a win when done for a stand-alone object:
1458      since the nominal and the effective type of the object will now have
1459      different modes, a VIEW_CONVERT_EXPR will be required for converting
1460      between them and it might be hard to overcome afterwards, including
1461      at the RTL level when the stand-alone object is accessed as a whole.  */
1462   if (align > 0
1463       && RECORD_OR_UNION_TYPE_P (type)
1464       && TYPE_MODE (type) == BLKmode
1465       && !TYPE_BY_REFERENCE_P (type)
1466       && TREE_CODE (orig_size) == INTEGER_CST
1467       && !TREE_OVERFLOW (orig_size)
1468       && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1469       && (!size
1470 	  || (TREE_CODE (size) == INTEGER_CST
1471 	      && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1472     {
1473       tree packable_type = make_packable_type (type, true, align);
1474       if (TYPE_MODE (packable_type) != BLKmode
1475 	  && compare_tree_int (TYPE_SIZE (packable_type), align) <= 0)
1476         type = packable_type;
1477     }
1478 
1479   /* Now create the field with the original size.  */
1480   field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1481 			     bitsize_zero_node, 0, 1);
1482   DECL_INTERNAL_P (field) = 1;
1483 
1484   /* We will output additional debug info manually below.  */
1485   finish_record_type (record, field, 1, false);
1486 
1487   /* Set the RM size if requested.  */
1488   if (set_rm_size)
1489     {
1490       SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1491 
1492       /* If the padded type is complete and has constant size, we canonicalize
1493 	 it by means of the hash table.  This is consistent with the language
1494 	 semantics and ensures that gigi and the middle-end have a common view
1495 	 of these padded types.  */
1496       if (TREE_CONSTANT (TYPE_SIZE (record)))
1497 	{
1498 	  tree canonical = canonicalize_pad_type (record);
1499 	  if (canonical != record)
1500 	    {
1501 	      record = canonical;
1502 	      goto built;
1503 	    }
1504 	}
1505     }
1506 
1507   if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1508     SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1509 
1510   /* Unless debugging information isn't being written for the input type,
1511      write a record that shows what we are a subtype of and also make a
1512      variable that indicates our size, if still variable.  */
1513   if (TREE_CODE (orig_size) != INTEGER_CST
1514       && TYPE_NAME (record)
1515       && TYPE_NAME (type)
1516       && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1517 	   && DECL_IGNORED_P (TYPE_NAME (type))))
1518     {
1519       tree name = TYPE_IDENTIFIER (record);
1520       tree size_unit = TYPE_SIZE_UNIT (record);
1521 
1522       /* A variable that holds the size is required even with no encoding since
1523 	 it will be referenced by debugging information attributes.  At global
1524 	 level, we need a single variable across all translation units.  */
1525       if (size
1526 	  && TREE_CODE (size) != INTEGER_CST
1527 	  && (definition || global_bindings_p ()))
1528 	{
1529 	  /* Whether or not gnat_entity comes from source, this XVZ variable is
1530 	     is a compilation artifact.  */
1531 	  size_unit
1532 	    = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1533 			      size_unit, true, global_bindings_p (),
1534 			      !definition && global_bindings_p (), false,
1535 			      false, true, true, NULL, gnat_entity);
1536 	  TYPE_SIZE_UNIT (record) = size_unit;
1537 	}
1538 
1539       /* There is no need to show what we are a subtype of when outputting as
1540 	 few encodings as possible: regular debugging infomation makes this
1541 	 redundant.  */
1542       if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1543 	{
1544 	  tree marker = make_node (RECORD_TYPE);
1545 	  tree orig_name = TYPE_IDENTIFIER (type);
1546 
1547 	  TYPE_NAME (marker) = concat_name (name, "XVS");
1548 	  finish_record_type (marker,
1549 			      create_field_decl (orig_name,
1550 						 build_reference_type (type),
1551 						 marker, NULL_TREE, NULL_TREE,
1552 						 0, 0),
1553 			      0, true);
1554 	  TYPE_SIZE_UNIT (marker) = size_unit;
1555 
1556 	  add_parallel_type (record, marker);
1557 	}
1558     }
1559 
1560 built:
1561   /* If a simple size was explicitly given, maybe issue a warning.  */
1562   if (!size
1563       || TREE_CODE (size) == COND_EXPR
1564       || TREE_CODE (size) == MAX_EXPR
1565       || No (gnat_entity))
1566     return record;
1567 
1568   /* But don't do it if we are just annotating types and the type is tagged or
1569      concurrent, since these types aren't fully laid out in this mode.  */
1570   if (type_annotate_only)
1571     {
1572       Entity_Id gnat_type
1573 	= is_component_type
1574 	  ? Component_Type (gnat_entity) : Etype (gnat_entity);
1575 
1576       if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1577 	return record;
1578     }
1579 
1580   /* Take the original size as the maximum size of the input if there was an
1581      unconstrained record involved and round it up to the specified alignment,
1582      if one was specified, but only for aggregate types.  */
1583   if (CONTAINS_PLACEHOLDER_P (orig_size))
1584     orig_size = max_size (orig_size, true);
1585 
1586   if (align && AGGREGATE_TYPE_P (type))
1587     orig_size = round_up (orig_size, align);
1588 
1589   if (!operand_equal_p (size, orig_size, 0)
1590       && !(TREE_CODE (size) == INTEGER_CST
1591 	   && TREE_CODE (orig_size) == INTEGER_CST
1592 	   && (TREE_OVERFLOW (size)
1593 	       || TREE_OVERFLOW (orig_size)
1594 	       || tree_int_cst_lt (size, orig_size))))
1595     {
1596       Node_Id gnat_error_node;
1597 
1598       /* For a packed array, post the message on the original array type.  */
1599       if (Is_Packed_Array_Impl_Type (gnat_entity))
1600 	gnat_entity = Original_Array_Type (gnat_entity);
1601 
1602       if ((Ekind (gnat_entity) == E_Component
1603 	   || Ekind (gnat_entity) == E_Discriminant)
1604 	  && Present (Component_Clause (gnat_entity)))
1605 	gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1606       else if (Has_Size_Clause (gnat_entity))
1607 	gnat_error_node = Expression (Size_Clause (gnat_entity));
1608       else if (Has_Object_Size_Clause (gnat_entity))
1609 	gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1610       else
1611 	gnat_error_node = Empty;
1612 
1613       /* Generate message only for entities that come from source, since
1614 	 if we have an entity created by expansion, the message will be
1615 	 generated for some other corresponding source entity.  */
1616       if (Comes_From_Source (gnat_entity))
1617 	{
1618 	  if (is_component_type)
1619 	    post_error_ne_tree ("component of& padded{ by ^ bits}?",
1620 				gnat_entity, gnat_entity,
1621 				size_diffop (size, orig_size));
1622 	  else if (Present (gnat_error_node))
1623 	    post_error_ne_tree ("{^ }bits of & unused?",
1624 				gnat_error_node, gnat_entity,
1625 				size_diffop (size, orig_size));
1626 	}
1627     }
1628 
1629   return record;
1630 }
1631 
1632 /* Return true if padded TYPE was built with an RM size.  */
1633 
1634 bool
pad_type_has_rm_size(tree type)1635 pad_type_has_rm_size (tree type)
1636 {
1637   /* This is required for the lookup.  */
1638   if (!TREE_CONSTANT (TYPE_SIZE (type)))
1639     return false;
1640 
1641   const hashval_t hashcode = hash_pad_type (type);
1642   struct pad_type_hash in, *h;
1643 
1644   in.hash = hashcode;
1645   in.type = type;
1646   h = pad_type_hash_table->find_with_hash (&in, hashcode);
1647 
1648   /* The types built with an RM size are the canonicalized ones.  */
1649   return h && h->type == type;
1650 }
1651 
1652 /* Return a copy of the padded TYPE but with reverse storage order.  */
1653 
1654 tree
set_reverse_storage_order_on_pad_type(tree type)1655 set_reverse_storage_order_on_pad_type (tree type)
1656 {
1657   if (flag_checking)
1658     {
1659       /* If the inner type is not scalar then the function does nothing.  */
1660       tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1661       gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1662 		  && !VECTOR_TYPE_P (inner_type));
1663     }
1664 
1665   /* This is required for the canonicalization.  */
1666   gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1667 
1668   tree field = copy_node (TYPE_FIELDS (type));
1669   type = copy_type (type);
1670   DECL_CONTEXT (field) = type;
1671   TYPE_FIELDS (type) = field;
1672   TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1673   return canonicalize_pad_type (type);
1674 }
1675 
1676 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1677    If this is a multi-dimensional array type, do this recursively.
1678 
1679    OP may be
1680    - ALIAS_SET_COPY:     the new set is made a copy of the old one.
1681    - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1682    - ALIAS_SET_SUBSET:   the new set is made a subset of the old one.  */
1683 
1684 void
relate_alias_sets(tree gnu_new_type,tree gnu_old_type,enum alias_set_op op)1685 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1686 {
1687   /* Remove any padding from GNU_OLD_TYPE.  It doesn't matter in the case
1688      of a one-dimensional array, since the padding has the same alias set
1689      as the field type, but if it's a multi-dimensional array, we need to
1690      see the inner types.  */
1691   while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1692 	 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1693 	     || TYPE_PADDING_P (gnu_old_type)))
1694     gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1695 
1696   /* Unconstrained array types are deemed incomplete and would thus be given
1697      alias set 0.  Retrieve the underlying array type.  */
1698   if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1699     gnu_old_type
1700       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1701   if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1702     gnu_new_type
1703       = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1704 
1705   if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1706       && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1707       && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1708     relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1709 
1710   switch (op)
1711     {
1712     case ALIAS_SET_COPY:
1713       /* The alias set shouldn't be copied between array types with different
1714 	 aliasing settings because this can break the aliasing relationship
1715 	 between the array type and its element type.  */
1716       if (flag_checking || flag_strict_aliasing)
1717 	gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1718 		      && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1719 		      && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1720 			 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1721 
1722       TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1723       break;
1724 
1725     case ALIAS_SET_SUBSET:
1726     case ALIAS_SET_SUPERSET:
1727       {
1728 	alias_set_type old_set = get_alias_set (gnu_old_type);
1729 	alias_set_type new_set = get_alias_set (gnu_new_type);
1730 
1731 	/* Do nothing if the alias sets conflict.  This ensures that we
1732 	   never call record_alias_subset several times for the same pair
1733 	   or at all for alias set 0.  */
1734 	if (!alias_sets_conflict_p (old_set, new_set))
1735 	  {
1736 	    if (op == ALIAS_SET_SUBSET)
1737 	      record_alias_subset (old_set, new_set);
1738 	    else
1739 	      record_alias_subset (new_set, old_set);
1740 	  }
1741       }
1742       break;
1743 
1744     default:
1745       gcc_unreachable ();
1746     }
1747 
1748   record_component_aliases (gnu_new_type);
1749 }
1750 
1751 /* Record TYPE as a builtin type for Ada.  NAME is the name of the type.
1752    ARTIFICIAL_P is true if the type was generated by the compiler.  */
1753 
1754 void
record_builtin_type(const char * name,tree type,bool artificial_p)1755 record_builtin_type (const char *name, tree type, bool artificial_p)
1756 {
1757   tree type_decl = build_decl (input_location,
1758 			       TYPE_DECL, get_identifier (name), type);
1759   DECL_ARTIFICIAL (type_decl) = artificial_p;
1760   TYPE_ARTIFICIAL (type) = artificial_p;
1761   gnat_pushdecl (type_decl, Empty);
1762 
1763   if (debug_hooks->type_decl)
1764     debug_hooks->type_decl (type_decl, false);
1765 }
1766 
1767 /* Finish constructing the character type CHAR_TYPE.
1768 
1769   In Ada character types are enumeration types and, as a consequence, are
1770   represented in the front-end by integral types holding the positions of
1771   the enumeration values as defined by the language, which means that the
1772   integral types are unsigned.
1773 
1774   Unfortunately the signedness of 'char' in C is implementation-defined
1775   and GCC even has the option -f[un]signed-char to toggle it at run time.
1776   Since GNAT's philosophy is to be compatible with C by default, to wit
1777   Interfaces.C.char is defined as a mere copy of Character, we may need
1778   to declare character types as signed types in GENERIC and generate the
1779   necessary adjustments to make them behave as unsigned types.
1780 
1781   The overall strategy is as follows: if 'char' is unsigned, do nothing;
1782   if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1783   character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1784   types.  The idea is to ensure that the bit pattern contained in the
1785   Esize'd objects is not changed, even though the numerical value will
1786   be interpreted differently depending on the signedness.  */
1787 
1788 void
finish_character_type(tree char_type)1789 finish_character_type (tree char_type)
1790 {
1791   if (TYPE_UNSIGNED (char_type))
1792     return;
1793 
1794   /* Make a copy of a generic unsigned version since we'll modify it.  */
1795   tree unsigned_char_type
1796     = (char_type == char_type_node
1797        ? unsigned_char_type_node
1798        : copy_type (gnat_unsigned_type_for (char_type)));
1799 
1800   /* Create an unsigned version of the type and set it as debug type.  */
1801   TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1802   TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1803   TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1804   SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1805 
1806   /* If this is a subtype, make the debug type a subtype of the debug type
1807      of the base type and convert literal RM bounds to unsigned.  */
1808   if (TREE_TYPE (char_type))
1809     {
1810       tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1811       tree min_value = TYPE_RM_MIN_VALUE (char_type);
1812       tree max_value = TYPE_RM_MAX_VALUE (char_type);
1813 
1814       if (TREE_CODE (min_value) == INTEGER_CST)
1815 	min_value = fold_convert (base_unsigned_char_type, min_value);
1816       if (TREE_CODE (max_value) == INTEGER_CST)
1817 	max_value = fold_convert (base_unsigned_char_type, max_value);
1818 
1819       TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1820       SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1821       SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1822     }
1823 
1824   /* Adjust the RM bounds of the original type to unsigned; that's especially
1825      important for types since they are implicit in this case.  */
1826   SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1827   SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1828 }
1829 
1830 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1831    finish constructing the record type as a fat pointer type.  */
1832 
1833 void
finish_fat_pointer_type(tree record_type,tree field_list)1834 finish_fat_pointer_type (tree record_type, tree field_list)
1835 {
1836   /* Make sure we can put it into a register.  */
1837   if (STRICT_ALIGNMENT)
1838     SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1839 
1840   /* Show what it really is.  */
1841   TYPE_FAT_POINTER_P (record_type) = 1;
1842 
1843   /* Do not emit debug info for it since the types of its fields may still be
1844      incomplete at this point.  */
1845   finish_record_type (record_type, field_list, 0, false);
1846 
1847   /* Force type_contains_placeholder_p to return true on it.  Although the
1848      PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1849      type but the representation of the unconstrained array.  */
1850   TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1851 }
1852 
1853 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1854    finish constructing the record or union type.  If REP_LEVEL is zero, this
1855    record has no representation clause and so will be entirely laid out here.
1856    If REP_LEVEL is one, this record has a representation clause and has been
1857    laid out already; only set the sizes and alignment.  If REP_LEVEL is two,
1858    this record is derived from a parent record and thus inherits its layout;
1859    only make a pass on the fields to finalize them.  DEBUG_INFO_P is true if
1860    additional debug info needs to be output for this type.  */
1861 
1862 void
finish_record_type(tree record_type,tree field_list,int rep_level,bool debug_info_p)1863 finish_record_type (tree record_type, tree field_list, int rep_level,
1864 		    bool debug_info_p)
1865 {
1866   const enum tree_code orig_code = TREE_CODE (record_type);
1867   const bool had_size = TYPE_SIZE (record_type) != NULL_TREE;
1868   const bool had_size_unit = TYPE_SIZE_UNIT (record_type) != NULL_TREE;
1869   const bool had_align = TYPE_ALIGN (record_type) > 0;
1870   /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1871      out just like a UNION_TYPE, since the size will be fixed.  */
1872   const enum tree_code code
1873     = (orig_code == QUAL_UNION_TYPE && rep_level > 0 && had_size
1874        ? UNION_TYPE : orig_code);
1875   tree name = TYPE_IDENTIFIER (record_type);
1876   tree ada_size = bitsize_zero_node;
1877   tree size = bitsize_zero_node;
1878   tree field;
1879 
1880   TYPE_FIELDS (record_type) = field_list;
1881 
1882   /* Always attach the TYPE_STUB_DECL for a record type.  It is required to
1883      generate debug info and have a parallel type.  */
1884   TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1885 
1886   /* Globally initialize the record first.  If this is a rep'ed record,
1887      that just means some initializations; otherwise, layout the record.  */
1888   if (rep_level > 0)
1889     {
1890       if (TYPE_ALIGN (record_type) < BITS_PER_UNIT)
1891 	SET_TYPE_ALIGN (record_type, BITS_PER_UNIT);
1892 
1893       if (!had_size)
1894 	TYPE_SIZE (record_type) = bitsize_zero_node;
1895 
1896       if (!had_size_unit)
1897 	TYPE_SIZE_UNIT (record_type) = size_zero_node;
1898     }
1899   else
1900     {
1901       /* Ensure there isn't a size already set.  There can be in an error
1902 	 case where there is a rep clause but all fields have errors and
1903 	 no longer have a position.  */
1904       TYPE_SIZE (record_type) = NULL_TREE;
1905 
1906       /* Ensure we use the traditional GCC layout for bitfields when we need
1907 	 to pack the record type or have a representation clause.  The other
1908 	 possible layout (Microsoft C compiler), if available, would prevent
1909 	 efficient packing in almost all cases.  */
1910 #ifdef TARGET_MS_BITFIELD_LAYOUT
1911       if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1912 	decl_attributes (&record_type,
1913 			 tree_cons (get_identifier ("gcc_struct"),
1914 				    NULL_TREE, NULL_TREE),
1915 			 ATTR_FLAG_TYPE_IN_PLACE);
1916 #endif
1917 
1918       layout_type (record_type);
1919     }
1920 
1921   /* At this point, the position and size of each field is known.  It was
1922      either set before entry by a rep clause, or by laying out the type above.
1923 
1924      We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1925      to compute the Ada size; the GCC size and alignment (for rep'ed records
1926      that are not padding types); and the mode (for rep'ed records).  We also
1927      clear the DECL_BIT_FIELD indication for the cases we know have not been
1928      handled yet, and adjust DECL_NONADDRESSABLE_P accordingly.  */
1929 
1930   if (code == QUAL_UNION_TYPE)
1931     field_list = nreverse (field_list);
1932 
1933   for (field = field_list; field; field = DECL_CHAIN (field))
1934     {
1935       tree type = TREE_TYPE (field);
1936       tree pos = bit_position (field);
1937       tree this_size = DECL_SIZE (field);
1938       tree this_ada_size;
1939 
1940       if (RECORD_OR_UNION_TYPE_P (type)
1941 	  && !TYPE_FAT_POINTER_P (type)
1942 	  && !TYPE_CONTAINS_TEMPLATE_P (type)
1943 	  && TYPE_ADA_SIZE (type))
1944 	this_ada_size = TYPE_ADA_SIZE (type);
1945       else
1946 	this_ada_size = this_size;
1947 
1948       const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
1949 
1950       /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle.  */
1951       if (DECL_BIT_FIELD (field)
1952 	  && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1953 	{
1954 	  const unsigned int align = TYPE_ALIGN (type);
1955 
1956 	  /* In the general case, type alignment is required.  */
1957 	  if (value_factor_p (pos, align))
1958 	    {
1959 	      /* The enclosing record type must be sufficiently aligned.
1960 		 Otherwise, if no alignment was specified for it and it
1961 		 has been laid out already, bump its alignment to the
1962 		 desired one if this is compatible with its size and
1963 		 maximum alignment, if any.  */
1964 	      if (TYPE_ALIGN (record_type) >= align)
1965 		{
1966 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1967 		  DECL_BIT_FIELD (field) = 0;
1968 		}
1969 	      else if (!had_align
1970 		       && rep_level == 0
1971 		       && value_factor_p (TYPE_SIZE (record_type), align)
1972 		       && (!TYPE_MAX_ALIGN (record_type)
1973 			   || TYPE_MAX_ALIGN (record_type) >= align))
1974 		{
1975 		  SET_TYPE_ALIGN (record_type, align);
1976 		  SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1977 		  DECL_BIT_FIELD (field) = 0;
1978 		}
1979 	    }
1980 
1981 	  /* In the non-strict alignment case, only byte alignment is.  */
1982 	  if (!STRICT_ALIGNMENT
1983 	      && DECL_BIT_FIELD (field)
1984 	      && value_factor_p (pos, BITS_PER_UNIT))
1985 	    DECL_BIT_FIELD (field) = 0;
1986 	}
1987 
1988       /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
1989 	 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
1990 	 the variant part is always the last field in the list.  */
1991       if (variant_part && integer_zerop (pos))
1992 	DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
1993 
1994       /* If we still have DECL_BIT_FIELD set at this point, we know that the
1995 	 field is technically not addressable.  Except that it can actually
1996 	 be addressed if it is BLKmode and happens to be properly aligned.  */
1997       if (DECL_BIT_FIELD (field)
1998 	  && !(DECL_MODE (field) == BLKmode
1999 	       && value_factor_p (pos, BITS_PER_UNIT)))
2000 	DECL_NONADDRESSABLE_P (field) = 1;
2001 
2002       /* A type must be as aligned as its most aligned field that is not
2003 	 a bit-field.  But this is already enforced by layout_type.  */
2004       if (rep_level > 0 && !DECL_BIT_FIELD (field))
2005 	SET_TYPE_ALIGN (record_type,
2006 			MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
2007 
2008       switch (code)
2009 	{
2010 	case UNION_TYPE:
2011 	  ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
2012 	  size = size_binop (MAX_EXPR, size, this_size);
2013 	  break;
2014 
2015 	case QUAL_UNION_TYPE:
2016 	  ada_size
2017 	    = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2018 			   this_ada_size, ada_size);
2019 	  size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
2020 			      this_size, size);
2021 	  break;
2022 
2023 	case RECORD_TYPE:
2024 	  /* Since we know here that all fields are sorted in order of
2025 	     increasing bit position, the size of the record is one
2026 	     higher than the ending bit of the last field processed
2027 	     unless we have a rep clause, because we might be processing
2028 	     the REP part of a record with a variant part for which the
2029 	     variant part has a rep clause but not the fixed part, in
2030 	     which case this REP part may contain overlapping fields
2031 	     and thus needs to be treated like a union tyoe above, so
2032 	     use a MAX in that case.  Also, if this field is a variant
2033 	     part, we need to take into account the previous size in
2034 	     the case of empty variants.  */
2035 	  ada_size
2036 	    = merge_sizes (ada_size, pos, this_ada_size, rep_level > 0,
2037 			   variant_part);
2038 	  size
2039 	    = merge_sizes (size, pos, this_size, rep_level > 0, variant_part);
2040 	  break;
2041 
2042 	default:
2043 	  gcc_unreachable ();
2044 	}
2045     }
2046 
2047   if (code == QUAL_UNION_TYPE)
2048     nreverse (field_list);
2049 
2050   /* We need to set the regular sizes if REP_LEVEL is one.  */
2051   if (rep_level == 1)
2052     {
2053       /* If this is a padding record, we never want to make the size smaller
2054 	 than what was specified in it, if any.  */
2055       if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
2056 	size = TYPE_SIZE (record_type);
2057 
2058       tree size_unit = had_size_unit
2059 		       ? TYPE_SIZE_UNIT (record_type)
2060 		       : convert (sizetype,
2061 				  size_binop (CEIL_DIV_EXPR, size,
2062 					      bitsize_unit_node));
2063       const unsigned int align = TYPE_ALIGN (record_type);
2064 
2065       TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2066       TYPE_SIZE_UNIT (record_type)
2067 	= variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2068     }
2069 
2070   /* We need to set the Ada size if REP_LEVEL is zero or one.  */
2071   if (rep_level < 2)
2072     {
2073       /* Now set any of the values we've just computed that apply.  */
2074       if (!TYPE_FAT_POINTER_P (record_type)
2075 	  && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2076 	SET_TYPE_ADA_SIZE (record_type, ada_size);
2077     }
2078 
2079   /* We need to set the mode if REP_LEVEL is one or two.  */
2080   if (rep_level > 0)
2081     {
2082       compute_record_mode (record_type);
2083       finish_bitfield_layout (record_type);
2084     }
2085 
2086   /* Reset the TYPE_MAX_ALIGN field since it's private to gigi.  */
2087   TYPE_MAX_ALIGN (record_type) = 0;
2088 
2089   if (debug_info_p)
2090     rest_of_record_type_compilation (record_type);
2091 }
2092 
2093 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE.  If
2094    PARRALEL_TYPE has no context and its computation is not deferred yet, also
2095    propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2096    moment TYPE will get a context.  */
2097 
2098 void
add_parallel_type(tree type,tree parallel_type)2099 add_parallel_type (tree type, tree parallel_type)
2100 {
2101   tree decl = TYPE_STUB_DECL (type);
2102 
2103   while (DECL_PARALLEL_TYPE (decl))
2104     decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2105 
2106   SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2107 
2108   /* If PARALLEL_TYPE already has a context, we are done.  */
2109   if (TYPE_CONTEXT (parallel_type))
2110     return;
2111 
2112   /* Otherwise, try to get one from TYPE's context.  If so, simply propagate
2113      it to PARALLEL_TYPE.  */
2114   if (TYPE_CONTEXT (type))
2115     gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2116 
2117   /* Otherwise TYPE has not context yet.  We know it will have one thanks to
2118      gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2119      so we have nothing to do in this case.  */
2120 }
2121 
2122 /* Return true if TYPE has a parallel type.  */
2123 
2124 static bool
has_parallel_type(tree type)2125 has_parallel_type (tree type)
2126 {
2127   tree decl = TYPE_STUB_DECL (type);
2128 
2129   return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2130 }
2131 
2132 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2133    associated with it.  It need not be invoked directly in most cases as
2134    finish_record_type takes care of doing so.  */
2135 
2136 void
rest_of_record_type_compilation(tree record_type)2137 rest_of_record_type_compilation (tree record_type)
2138 {
2139   bool var_size = false;
2140   tree field;
2141 
2142   /* If this is a padded type, the bulk of the debug info has already been
2143      generated for the field's type.  */
2144   if (TYPE_IS_PADDING_P (record_type))
2145     return;
2146 
2147   /* If the type already has a parallel type (XVS type), then we're done.  */
2148   if (has_parallel_type (record_type))
2149     return;
2150 
2151   for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2152     {
2153       /* We need to make an XVE/XVU record if any field has variable size,
2154 	 whether or not the record does.  For example, if we have a union,
2155 	 it may be that all fields, rounded up to the alignment, have the
2156 	 same size, in which case we'll use that size.  But the debug
2157 	 output routines (except Dwarf2) won't be able to output the fields,
2158 	 so we need to make the special record.  */
2159       if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2160 	  /* If a field has a non-constant qualifier, the record will have
2161 	     variable size too.  */
2162 	  || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2163 	      && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2164 	{
2165 	  var_size = true;
2166 	  break;
2167 	}
2168     }
2169 
2170   /* If this record type is of variable size, make a parallel record type that
2171      will tell the debugger how the former is laid out (see exp_dbug.ads).  */
2172   if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2173     {
2174       tree new_record_type
2175 	= make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2176 		     ? UNION_TYPE : TREE_CODE (record_type));
2177       tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2178       tree last_pos = bitsize_zero_node;
2179 
2180       new_name
2181 	= concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2182 				  ? "XVU" : "XVE");
2183       TYPE_NAME (new_record_type) = new_name;
2184       SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2185       TYPE_STUB_DECL (new_record_type)
2186 	= create_type_stub_decl (new_name, new_record_type);
2187       DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2188 	= DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2189       gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2190       TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2191       TYPE_SIZE_UNIT (new_record_type)
2192 	= size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2193 
2194       /* Now scan all the fields, replacing each field with a new field
2195 	 corresponding to the new encoding.  */
2196       for (tree old_field = TYPE_FIELDS (record_type);
2197 	   old_field;
2198 	   old_field = DECL_CHAIN (old_field))
2199 	{
2200 	  tree field_type = TREE_TYPE (old_field);
2201 	  tree field_name = DECL_NAME (old_field);
2202 	  tree curpos = fold_bit_position (old_field);
2203 	  tree pos, new_field;
2204 	  bool var = false;
2205 	  unsigned int align = 0;
2206 
2207 	  /* See how the position was modified from the last position.
2208 
2209 	     There are two basic cases we support: a value was added
2210 	     to the last position or the last position was rounded to
2211 	     a boundary and they something was added.  Check for the
2212 	     first case first.  If not, see if there is any evidence
2213 	     of rounding.  If so, round the last position and retry.
2214 
2215 	     If this is a union, the position can be taken as zero.  */
2216 	  if (TREE_CODE (new_record_type) == UNION_TYPE)
2217 	    pos = bitsize_zero_node;
2218 	  else
2219 	    pos = compute_related_constant (curpos, last_pos);
2220 
2221 	  if (pos)
2222 	    ;
2223 	  else if (TREE_CODE (curpos) == MULT_EXPR
2224 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2225 	    {
2226 	      tree offset = TREE_OPERAND (curpos, 0);
2227 	      align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2228 	      align = scale_by_factor_of (offset, align);
2229 	      last_pos = round_up (last_pos, align);
2230 	      pos = compute_related_constant (curpos, last_pos);
2231 	    }
2232 	  else if (TREE_CODE (curpos) == PLUS_EXPR
2233 		   && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2234 		   && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2235 		   && tree_fits_uhwi_p
2236 		      (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2237 	    {
2238 	      tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2239 	      unsigned HOST_WIDE_INT addend
2240 	        = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2241 	      align
2242 		= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2243 	      align = scale_by_factor_of (offset, align);
2244 	      align = MIN (align, addend & -addend);
2245 	      last_pos = round_up (last_pos, align);
2246 	      pos = compute_related_constant (curpos, last_pos);
2247 	    }
2248 	  else
2249 	    {
2250 	      align = DECL_ALIGN (old_field);
2251 	      last_pos = round_up (last_pos, align);
2252 	      pos = compute_related_constant (curpos, last_pos);
2253 	    }
2254 
2255 	  /* See if this type is variable-sized and make a pointer type
2256 	     and indicate the indirection if so.  Beware that the debug
2257 	     back-end may adjust the position computed above according
2258 	     to the alignment of the field type, i.e. the pointer type
2259 	     in this case, if we don't preventively counter that.  */
2260 	  if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2261 	    {
2262 	      field_type = copy_type (build_pointer_type (field_type));
2263 	      SET_TYPE_ALIGN (field_type, BITS_PER_UNIT);
2264 	      var = true;
2265 
2266 	      /* ??? Kludge to work around a bug in Workbench's debugger.  */
2267 	      if (align == 0)
2268 		{
2269 		  align = DECL_ALIGN (old_field);
2270 		  last_pos = round_up (last_pos, align);
2271 		  pos = compute_related_constant (curpos, last_pos);
2272 		}
2273 	    }
2274 
2275 	  /* If we can't compute a position, set it to zero.
2276 
2277 	     ??? We really should abort here, but it's too much work
2278 	     to get this correct for all cases.  */
2279 	  if (!pos)
2280 	    pos = bitsize_zero_node;
2281 
2282 	  /* Make a new field name, if necessary.  */
2283 	  if (var || align != 0)
2284 	    {
2285 	      char suffix[16];
2286 
2287 	      if (align != 0)
2288 		sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2289 			 align / BITS_PER_UNIT);
2290 	      else
2291 		strcpy (suffix, "XVL");
2292 
2293 	      field_name = concat_name (field_name, suffix);
2294 	    }
2295 
2296 	  new_field
2297 	    = create_field_decl (field_name, field_type, new_record_type,
2298 				 DECL_SIZE (old_field), pos, 0, 0);
2299 	  /* The specified position is not the actual position of the field
2300 	     but the gap with the previous field, so the computation of the
2301 	     bit-field status may be incorrect.  We adjust it manually to
2302 	     avoid generating useless attributes for the field in DWARF.  */
2303 	  if (DECL_SIZE (old_field) == TYPE_SIZE (field_type)
2304 	      && value_factor_p (pos, BITS_PER_UNIT))
2305 	    {
2306 	      DECL_BIT_FIELD (new_field) = 0;
2307 	      DECL_BIT_FIELD_TYPE (new_field) = NULL_TREE;
2308 	    }
2309 	  DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2310 	  TYPE_FIELDS (new_record_type) = new_field;
2311 
2312 	  /* If old_field is a QUAL_UNION_TYPE, take its size as being
2313 	     zero.  The only time it's not the last field of the record
2314 	     is when there are other components at fixed positions after
2315 	     it (meaning there was a rep clause for every field) and we
2316 	     want to be able to encode them.  */
2317 	  last_pos = size_binop (PLUS_EXPR, curpos,
2318 				 (TREE_CODE (TREE_TYPE (old_field))
2319 				  == QUAL_UNION_TYPE)
2320 				 ? bitsize_zero_node
2321 				 : DECL_SIZE (old_field));
2322 	}
2323 
2324       TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2325 
2326       add_parallel_type (record_type, new_record_type);
2327     }
2328 }
2329 
2330 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2331    with FIRST_BIT and SIZE that describe a field.  If MAX is true, we take the
2332    MAX of the end position of this field with LAST_SIZE.  In all other cases,
2333    we use FIRST_BIT plus SIZE.  SPECIAL is true if it's for a QUAL_UNION_TYPE,
2334    in which case we must look for COND_EXPRs and replace a value of zero with
2335    the old size.  Return an expression for the size.  */
2336 
2337 static tree
merge_sizes(tree last_size,tree first_bit,tree size,bool max,bool special)2338 merge_sizes (tree last_size, tree first_bit, tree size, bool max, bool special)
2339 {
2340   tree type = TREE_TYPE (last_size);
2341   tree new_size;
2342 
2343   if (!special || TREE_CODE (size) != COND_EXPR)
2344     {
2345       new_size = size_binop (PLUS_EXPR, first_bit, size);
2346       if (max)
2347 	new_size = size_binop (MAX_EXPR, last_size, new_size);
2348     }
2349 
2350   else
2351     new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2352 			    integer_zerop (TREE_OPERAND (size, 1))
2353 			    ? last_size : merge_sizes (last_size, first_bit,
2354 						       TREE_OPERAND (size, 1),
2355 						       max, special),
2356 			    integer_zerop (TREE_OPERAND (size, 2))
2357 			    ? last_size : merge_sizes (last_size, first_bit,
2358 						       TREE_OPERAND (size, 2),
2359 						       max, special));
2360 
2361   /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2362      when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2363      size is not constant.  */
2364   while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2365     new_size = TREE_OPERAND (new_size, 0);
2366 
2367   return new_size;
2368 }
2369 
2370 /* Convert the size expression EXPR to TYPE and fold the result.  */
2371 
2372 static tree
fold_convert_size(tree type,tree expr)2373 fold_convert_size (tree type, tree expr)
2374 {
2375   /* We assume that size expressions do not wrap around.  */
2376   if (TREE_CODE (expr) == MULT_EXPR || TREE_CODE (expr) == PLUS_EXPR)
2377     return size_binop (TREE_CODE (expr),
2378 		       fold_convert_size (type, TREE_OPERAND (expr, 0)),
2379 		       fold_convert_size (type, TREE_OPERAND (expr, 1)));
2380 
2381   return fold_convert (type, expr);
2382 }
2383 
2384 /* Return the bit position of FIELD, in bits from the start of the record,
2385    and fold it as much as possible.  This is a tree of type bitsizetype.  */
2386 
2387 static tree
fold_bit_position(const_tree field)2388 fold_bit_position (const_tree field)
2389 {
2390   tree offset = fold_convert_size (bitsizetype, DECL_FIELD_OFFSET (field));
2391   return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2392  		     size_binop (MULT_EXPR, offset, bitsize_unit_node));
2393 }
2394 
2395 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2396    related by the addition of a constant.  Return that constant if so.  */
2397 
2398 static tree
compute_related_constant(tree op0,tree op1)2399 compute_related_constant (tree op0, tree op1)
2400 {
2401   tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2402 
2403   if (TREE_CODE (op0) == MULT_EXPR
2404       && TREE_CODE (op1) == MULT_EXPR
2405       && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2406       && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2407     {
2408       factor = TREE_OPERAND (op0, 1);
2409       op0 = TREE_OPERAND (op0, 0);
2410       op1 = TREE_OPERAND (op1, 0);
2411     }
2412   else
2413     factor = NULL_TREE;
2414 
2415   op0_cst = split_plus (op0, &op0_var);
2416   op1_cst = split_plus (op1, &op1_var);
2417   result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2418 
2419   if (operand_equal_p (op0_var, op1_var, 0))
2420     return factor ? size_binop (MULT_EXPR, factor, result) : result;
2421 
2422   return NULL_TREE;
2423 }
2424 
2425 /* Utility function of above to split a tree OP which may be a sum, into a
2426    constant part, which is returned, and a variable part, which is stored
2427    in *PVAR.  *PVAR may be bitsize_zero_node.  All operations must be of
2428    bitsizetype.  */
2429 
2430 static tree
split_plus(tree in,tree * pvar)2431 split_plus (tree in, tree *pvar)
2432 {
2433   /* Strip conversions in order to ease the tree traversal and maximize the
2434      potential for constant or plus/minus discovery.  We need to be careful
2435      to always return and set *pvar to bitsizetype trees, but it's worth
2436      the effort.  */
2437   in = remove_conversions (in, false);
2438 
2439   *pvar = convert (bitsizetype, in);
2440 
2441   if (TREE_CODE (in) == INTEGER_CST)
2442     {
2443       *pvar = bitsize_zero_node;
2444       return convert (bitsizetype, in);
2445     }
2446   else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2447     {
2448       tree lhs_var, rhs_var;
2449       tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2450       tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2451 
2452       if (lhs_var == TREE_OPERAND (in, 0)
2453 	  && rhs_var == TREE_OPERAND (in, 1))
2454 	return bitsize_zero_node;
2455 
2456       *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2457       return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2458     }
2459   else
2460     return bitsize_zero_node;
2461 }
2462 
2463 /* Return a copy of TYPE but safe to modify in any way.  */
2464 
2465 tree
copy_type(tree type)2466 copy_type (tree type)
2467 {
2468   tree new_type = copy_node (type);
2469 
2470   /* Unshare the language-specific data.  */
2471   if (TYPE_LANG_SPECIFIC (type))
2472     {
2473       TYPE_LANG_SPECIFIC (new_type) = NULL;
2474       SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2475     }
2476 
2477   /* And the contents of the language-specific slot if needed.  */
2478   if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2479       && TYPE_RM_VALUES (type))
2480     {
2481       TYPE_RM_VALUES (new_type) = NULL_TREE;
2482       SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2483       SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2484       SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2485     }
2486 
2487   /* copy_node clears this field instead of copying it, because it is
2488      aliased with TREE_CHAIN.  */
2489   TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2490 
2491   TYPE_POINTER_TO (new_type) = NULL_TREE;
2492   TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2493   TYPE_MAIN_VARIANT (new_type) = new_type;
2494   TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2495   TYPE_CANONICAL (new_type) = new_type;
2496 
2497   return new_type;
2498 }
2499 
2500 /* Return a subtype of sizetype with range MIN to MAX and whose
2501    TYPE_INDEX_TYPE is INDEX.  GNAT_NODE is used for the position
2502    of the associated TYPE_DECL.  */
2503 
2504 tree
create_index_type(tree min,tree max,tree index,Node_Id gnat_node)2505 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2506 {
2507   /* First build a type for the desired range.  */
2508   tree type = build_nonshared_range_type (sizetype, min, max);
2509 
2510   /* Then set the index type.  */
2511   SET_TYPE_INDEX_TYPE (type, index);
2512   create_type_decl (NULL_TREE, type, true, false, gnat_node);
2513 
2514   return type;
2515 }
2516 
2517 /* Return a subtype of TYPE with range MIN to MAX.  If TYPE is NULL,
2518    sizetype is used.  */
2519 
2520 tree
create_range_type(tree type,tree min,tree max)2521 create_range_type (tree type, tree min, tree max)
2522 {
2523   tree range_type;
2524 
2525   if (!type)
2526     type = sizetype;
2527 
2528   /* First build a type with the base range.  */
2529   range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2530 						 TYPE_MAX_VALUE (type));
2531 
2532   /* Then set the actual range.  */
2533   SET_TYPE_RM_MIN_VALUE (range_type, min);
2534   SET_TYPE_RM_MAX_VALUE (range_type, max);
2535 
2536   return range_type;
2537 }
2538 
2539 /* Return an extra subtype of TYPE with range MIN to MAX.  */
2540 
2541 tree
create_extra_subtype(tree type,tree min,tree max)2542 create_extra_subtype (tree type, tree min, tree max)
2543 {
2544   const bool uns = TYPE_UNSIGNED (type);
2545   const unsigned prec = TYPE_PRECISION (type);
2546   tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2547 
2548   TREE_TYPE (subtype) = type;
2549   TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2550 
2551   SET_TYPE_RM_MIN_VALUE (subtype, min);
2552   SET_TYPE_RM_MAX_VALUE (subtype, max);
2553 
2554   return subtype;
2555 }
2556 
2557 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2558    NAME gives the name of the type to be used in the declaration.  */
2559 
2560 tree
create_type_stub_decl(tree name,tree type)2561 create_type_stub_decl (tree name, tree type)
2562 {
2563   tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2564   DECL_ARTIFICIAL (type_decl) = 1;
2565   TYPE_ARTIFICIAL (type) = 1;
2566   return type_decl;
2567 }
2568 
2569 /* Return a TYPE_DECL node for TYPE.  NAME gives the name of the type to be
2570    used in the declaration.  ARTIFICIAL_P is true if the declaration was
2571    generated by the compiler.  DEBUG_INFO_P is true if we need to write
2572    debug information about this type.  GNAT_NODE is used for the position
2573    of the decl.  */
2574 
2575 tree
create_type_decl(tree name,tree type,bool artificial_p,bool debug_info_p,Node_Id gnat_node)2576 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2577 		  Node_Id gnat_node)
2578 {
2579   enum tree_code code = TREE_CODE (type);
2580   bool is_named
2581     = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2582   tree type_decl;
2583 
2584   /* Only the builtin TYPE_STUB_DECL should be used for dummy types.  */
2585   gcc_assert (!TYPE_IS_DUMMY_P (type));
2586 
2587   /* If the type hasn't been named yet, we're naming it; preserve an existing
2588      TYPE_STUB_DECL that has been attached to it for some purpose.  */
2589   if (!is_named && TYPE_STUB_DECL (type))
2590     {
2591       type_decl = TYPE_STUB_DECL (type);
2592       DECL_NAME (type_decl) = name;
2593     }
2594   else
2595     type_decl = build_decl (input_location, TYPE_DECL, name, type);
2596 
2597   DECL_ARTIFICIAL (type_decl) = artificial_p;
2598   TYPE_ARTIFICIAL (type) = artificial_p;
2599 
2600   /* Add this decl to the current binding level.  */
2601   gnat_pushdecl (type_decl, gnat_node);
2602 
2603   /* If we're naming the type, equate the TYPE_STUB_DECL to the name.  This
2604      causes the name to be also viewed as a "tag" by the debug back-end, with
2605      the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2606      types in DWARF.
2607 
2608      Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2609      from multiple contexts, and "type_decl" references a copy of it: in such a
2610      case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2611      with the mechanism above.  */
2612   if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2613     TYPE_STUB_DECL (type) = type_decl;
2614 
2615   /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2616      back-end doesn't support, and for others if we don't need to.  */
2617   if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2618     DECL_IGNORED_P (type_decl) = 1;
2619 
2620   return type_decl;
2621 }
2622 
2623 /* Return a VAR_DECL or CONST_DECL node.
2624 
2625    NAME gives the name of the variable.  ASM_NAME is its assembler name
2626    (if provided).  TYPE is its data type (a GCC ..._TYPE node).  INIT is
2627    the GCC tree for an optional initial expression; NULL_TREE if none.
2628 
2629    CONST_FLAG is true if this variable is constant, in which case we might
2630    return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2631 
2632    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2633    definition to be made visible outside of the current compilation unit, for
2634    instance variable definitions in a package specification.
2635 
2636    EXTERN_FLAG is true when processing an external variable declaration (as
2637    opposed to a definition: no storage is to be allocated for the variable).
2638 
2639    STATIC_FLAG is only relevant when not at top level and indicates whether
2640    to always allocate storage to the variable.
2641 
2642    VOLATILE_FLAG is true if this variable is declared as volatile.
2643 
2644    ARTIFICIAL_P is true if the variable was generated by the compiler.
2645 
2646    DEBUG_INFO_P is true if we need to write debug information for it.
2647 
2648    ATTR_LIST is the list of attributes to be attached to the variable.
2649 
2650    GNAT_NODE is used for the position of the decl.  */
2651 
2652 tree
create_var_decl(tree name,tree asm_name,tree type,tree init,bool const_flag,bool public_flag,bool extern_flag,bool static_flag,bool volatile_flag,bool artificial_p,bool debug_info_p,struct attrib * attr_list,Node_Id gnat_node,bool const_decl_allowed_p)2653 create_var_decl (tree name, tree asm_name, tree type, tree init,
2654 		 bool const_flag, bool public_flag, bool extern_flag,
2655 		 bool static_flag, bool volatile_flag, bool artificial_p,
2656 		 bool debug_info_p, struct attrib *attr_list,
2657 		 Node_Id gnat_node, bool const_decl_allowed_p)
2658 {
2659   /* Whether the object has static storage duration, either explicitly or by
2660      virtue of being declared at the global level.  */
2661   const bool static_storage = static_flag || global_bindings_p ();
2662 
2663   /* Whether the initializer is constant: for an external object or an object
2664      with static storage duration, we check that the initializer is a valid
2665      constant expression for initializing a static variable; otherwise, we
2666      only check that it is constant.  */
2667   const bool init_const
2668     = (init
2669        && gnat_types_compatible_p (type, TREE_TYPE (init))
2670        && (extern_flag || static_storage
2671 	   ? initializer_constant_valid_p (init, TREE_TYPE (init))
2672 	     != NULL_TREE
2673 	   : TREE_CONSTANT (init)));
2674 
2675   /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2676      case the initializer may be used in lieu of the DECL node (as done in
2677      Identifier_to_gnu).  This is useful to prevent the need of elaboration
2678      code when an identifier for which such a DECL is made is in turn used
2679      as an initializer.  We used to rely on CONST_DECL vs VAR_DECL for this,
2680      but extra constraints apply to this choice (see below) and they are not
2681      relevant to the distinction we wish to make.  */
2682   const bool constant_p = const_flag && init_const;
2683 
2684   /* The actual DECL node.  CONST_DECL was initially intended for enumerals
2685      and may be used for scalars in general but not for aggregates.  */
2686   tree var_decl
2687     = build_decl (input_location,
2688 		  (constant_p
2689 		   && const_decl_allowed_p
2690 		   && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2691 		  name, type);
2692 
2693   /* Detect constants created by the front-end to hold 'reference to function
2694      calls for stabilization purposes.  This is needed for renaming.  */
2695   if (const_flag && init && POINTER_TYPE_P (type))
2696     {
2697       tree inner = init;
2698       if (TREE_CODE (inner) == COMPOUND_EXPR)
2699 	inner = TREE_OPERAND (inner, 1);
2700       inner = remove_conversions (inner, true);
2701       if (TREE_CODE (inner) == ADDR_EXPR
2702 	  && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2703 	       && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2704 	      || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2705 		  && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2706 	DECL_RETURN_VALUE_P (var_decl) = 1;
2707     }
2708 
2709   /* If this is external, throw away any initializations (they will be done
2710      elsewhere) unless this is a constant for which we would like to remain
2711      able to get the initializer.  If we are defining a global here, leave a
2712      constant initialization and save any variable elaborations for the
2713      elaboration routine.  If we are just annotating types, throw away the
2714      initialization if it isn't a constant.  */
2715   if ((extern_flag && !constant_p)
2716       || (type_annotate_only && init && !TREE_CONSTANT (init)))
2717     init = NULL_TREE;
2718 
2719   /* At the global level, a non-constant initializer generates elaboration
2720      statements.  Check that such statements are allowed, that is to say,
2721      not violating a No_Elaboration_Code restriction.  */
2722   if (init && !init_const && global_bindings_p ())
2723     Check_Elaboration_Code_Allowed (gnat_node);
2724 
2725   /* Attach the initializer, if any.  */
2726   DECL_INITIAL (var_decl) = init;
2727 
2728   /* Directly set some flags.  */
2729   DECL_ARTIFICIAL (var_decl) = artificial_p;
2730   DECL_EXTERNAL (var_decl) = extern_flag;
2731 
2732   TREE_CONSTANT (var_decl) = constant_p;
2733   TREE_READONLY (var_decl) = const_flag;
2734 
2735   /* The object is public if it is external or if it is declared public
2736      and has static storage duration.  */
2737   TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2738 
2739   /* We need to allocate static storage for an object with static storage
2740      duration if it isn't external.  */
2741   TREE_STATIC (var_decl) = !extern_flag && static_storage;
2742 
2743   TREE_SIDE_EFFECTS (var_decl)
2744     = TREE_THIS_VOLATILE (var_decl)
2745     = TYPE_VOLATILE (type) | volatile_flag;
2746 
2747   if (TREE_SIDE_EFFECTS (var_decl))
2748     TREE_ADDRESSABLE (var_decl) = 1;
2749 
2750   /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2751      try to fiddle with DECL_COMMON.  However, on platforms that don't
2752      support global BSS sections, uninitialized global variables would
2753      go in DATA instead, thus increasing the size of the executable.  */
2754   if (!flag_no_common
2755       && TREE_CODE (var_decl) == VAR_DECL
2756       && TREE_PUBLIC (var_decl)
2757       && !have_global_bss_p ())
2758     DECL_COMMON (var_decl) = 1;
2759 
2760   /* Do not emit debug info if not requested, or for an external constant whose
2761      initializer is not absolute because this would require a global relocation
2762      in a read-only section which runs afoul of the PE-COFF run-time relocation
2763      mechanism.  */
2764   if (!debug_info_p
2765       || (extern_flag
2766 	  && constant_p
2767 	  && init
2768 	  && initializer_constant_valid_p (init, TREE_TYPE (init))
2769 	     != null_pointer_node))
2770     DECL_IGNORED_P (var_decl) = 1;
2771 
2772   /* ??? Some attributes cannot be applied to CONST_DECLs.  */
2773   if (TREE_CODE (var_decl) == VAR_DECL)
2774     process_attributes (&var_decl, &attr_list, true, gnat_node);
2775 
2776   /* Add this decl to the current binding level.  */
2777   gnat_pushdecl (var_decl, gnat_node);
2778 
2779   if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2780     {
2781       /* Let the target mangle the name if this isn't a verbatim asm.  */
2782       if (*IDENTIFIER_POINTER (asm_name) != '*')
2783 	asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2784 
2785       SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2786     }
2787 
2788   return var_decl;
2789 }
2790 
2791 /* Return true if TYPE, an aggregate type, contains (or is) an array.
2792    If SELF_REFERENTIAL is true, then an additional requirement on the
2793    array is that it be self-referential.  */
2794 
2795 bool
aggregate_type_contains_array_p(tree type,bool self_referential)2796 aggregate_type_contains_array_p (tree type, bool self_referential)
2797 {
2798   switch (TREE_CODE (type))
2799     {
2800     case RECORD_TYPE:
2801     case UNION_TYPE:
2802     case QUAL_UNION_TYPE:
2803       {
2804 	tree field;
2805 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2806 	  if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2807 	      && aggregate_type_contains_array_p (TREE_TYPE (field),
2808 						  self_referential))
2809 	    return true;
2810 	return false;
2811       }
2812 
2813     case ARRAY_TYPE:
2814       return self_referential ? type_contains_placeholder_p (type) : true;
2815 
2816     default:
2817       gcc_unreachable ();
2818     }
2819 }
2820 
2821 /* Return true if TYPE is a type with variable size or a padding type with a
2822    field of variable size or a record that has a field with such a type.  */
2823 
2824 static bool
type_has_variable_size(tree type)2825 type_has_variable_size (tree type)
2826 {
2827   tree field;
2828 
2829   if (!TREE_CONSTANT (TYPE_SIZE (type)))
2830     return true;
2831 
2832   if (TYPE_IS_PADDING_P (type)
2833       && !TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (type))))
2834     return true;
2835 
2836   if (!RECORD_OR_UNION_TYPE_P (type))
2837     return false;
2838 
2839   for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2840     if (type_has_variable_size (TREE_TYPE (field)))
2841       return true;
2842 
2843   return false;
2844 }
2845 
2846 /* Return a FIELD_DECL node.  NAME is the field's name, TYPE is its type and
2847    RECORD_TYPE is the type of the enclosing record.  If SIZE is nonzero, it
2848    is the specified size of the field.  If POS is nonzero, it is the bit
2849    position.  PACKED is 1 if the enclosing record is packed, -1 if it has
2850    Component_Alignment of Storage_Unit.  If ADDRESSABLE is nonzero, it
2851    means we are allowed to take the address of the field; if it is negative,
2852    we should not make a bitfield, which is used by make_aligning_type.  */
2853 
2854 tree
create_field_decl(tree name,tree type,tree record_type,tree size,tree pos,int packed,int addressable)2855 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2856 		   int packed, int addressable)
2857 {
2858   tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2859 
2860   DECL_CONTEXT (field_decl) = record_type;
2861   TREE_READONLY (field_decl) = TYPE_READONLY (type);
2862 
2863   /* If a size is specified, use it.  Otherwise, if the record type is packed
2864      compute a size to use, which may differ from the object's natural size.
2865      We always set a size in this case to trigger the checks for bitfield
2866      creation below, which is typically required when no position has been
2867      specified.  */
2868   if (size)
2869     size = convert (bitsizetype, size);
2870   else if (packed == 1)
2871     {
2872       size = rm_size (type);
2873       if (TYPE_MODE (type) == BLKmode)
2874 	size = round_up (size, BITS_PER_UNIT);
2875     }
2876 
2877   /* If we may, according to ADDRESSABLE, then make a bitfield when the size
2878      is specified for two reasons: first, when it differs from the natural
2879      size; second, when the alignment is insufficient.
2880 
2881      We never make a bitfield if the type of the field has a nonconstant size,
2882      because no such entity requiring bitfield operations should reach here.
2883 
2884      We do *preventively* make a bitfield when there might be the need for it
2885      but we don't have all the necessary information to decide, as is the case
2886      of a field in a packed record.
2887 
2888      We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2889      in layout_decl or finish_record_type to clear the bit_field indication if
2890      it is in fact not needed.  */
2891   if (addressable >= 0
2892       && size
2893       && TREE_CODE (size) == INTEGER_CST
2894       && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2895       && (packed
2896 	  || !tree_int_cst_equal (size, TYPE_SIZE (type))
2897 	  || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2898 	  || (TYPE_ALIGN (record_type)
2899 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2900     {
2901       DECL_BIT_FIELD (field_decl) = 1;
2902       DECL_SIZE (field_decl) = size;
2903       if (!packed && !pos)
2904 	{
2905 	  if (TYPE_ALIGN (record_type)
2906 	      && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2907 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2908 	  else
2909 	    SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2910 	}
2911     }
2912 
2913   DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2914 
2915   /* If FIELD_TYPE has BLKmode, we must ensure this is aligned to at least
2916      a byte boundary since GCC cannot handle less aligned BLKmode bitfields.
2917      Likewise if it has a variable size and no specified position because
2918      variable-sized objects need to be aligned to at least a byte boundary.
2919      Likewise for an aggregate without specified position that contains an
2920      array because, in this case, slices of variable length of this array
2921      must be handled by GCC and have variable size.  */
2922   if (packed && (TYPE_MODE (type) == BLKmode
2923 		 || (!pos && type_has_variable_size (type))
2924 		 || (!pos
2925 		     && AGGREGATE_TYPE_P (type)
2926 		     && aggregate_type_contains_array_p (type, false))))
2927     SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2928 
2929   /* Bump the alignment if need be, either for bitfield/packing purposes or
2930      to satisfy the type requirements if no such considerations apply.  When
2931      we get the alignment from the type, indicate if this is from an explicit
2932      user request, which prevents stor-layout from lowering it later on.  */
2933   else
2934     {
2935       const unsigned int field_align
2936 	= DECL_BIT_FIELD (field_decl)
2937 	  ? 1
2938 	  : packed
2939 	    ? BITS_PER_UNIT
2940 	    : 0;
2941 
2942       if (field_align > DECL_ALIGN (field_decl))
2943 	SET_DECL_ALIGN (field_decl, field_align);
2944       else if (!field_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2945 	{
2946 	  SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2947 	  DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2948 	}
2949     }
2950 
2951   if (pos)
2952     {
2953       /* We need to pass in the alignment the DECL is known to have.
2954 	 This is the lowest-order bit set in POS, but no more than
2955 	 the alignment of the record, if one is specified.  Note
2956 	 that an alignment of 0 is taken as infinite.  */
2957       unsigned int known_align;
2958 
2959       if (tree_fits_uhwi_p (pos))
2960 	known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2961       else
2962 	known_align = BITS_PER_UNIT;
2963 
2964       if (TYPE_ALIGN (record_type)
2965 	  && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2966 	known_align = TYPE_ALIGN (record_type);
2967 
2968       layout_decl (field_decl, known_align);
2969       SET_DECL_OFFSET_ALIGN (field_decl,
2970 			     tree_fits_uhwi_p (pos)
2971 			     ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
2972       pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2973 		    &DECL_FIELD_BIT_OFFSET (field_decl),
2974 		    DECL_OFFSET_ALIGN (field_decl), pos);
2975     }
2976 
2977   /* In addition to what our caller says, claim the field is addressable if we
2978      know that its type is not suitable.
2979 
2980      The field may also be "technically" nonaddressable, meaning that even if
2981      we attempt to take the field's address we will actually get the address
2982      of a copy.  This is the case for true bitfields, but the DECL_BIT_FIELD
2983      value we have at this point is not accurate enough, so we don't account
2984      for this here and let finish_record_type decide.  */
2985   if (!addressable && !type_for_nonaliased_component_p (type))
2986     addressable = 1;
2987 
2988   /* Note that there is a trade-off in making a field nonaddressable because
2989      this will cause type-based alias analysis to use the same alias set for
2990      accesses to the field as for accesses to the whole record: while doing
2991      so will make it more likely to disambiguate accesses to other objects
2992      and accesses to the field, it will make it less likely to disambiguate
2993      accesses to the other fields of the record and accesses to the field.
2994      If the record is fully static, then the trade-off is irrelevant since
2995      the fields of the record can always be disambiguated by their offsets
2996      but, if the record is dynamic, then it can become problematic.  */
2997   DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2998 
2999   return field_decl;
3000 }
3001 
3002 /* Return a PARM_DECL node with NAME and TYPE.  */
3003 
3004 tree
create_param_decl(tree name,tree type)3005 create_param_decl (tree name, tree type)
3006 {
3007   tree param_decl = build_decl (input_location, PARM_DECL, name, type);
3008 
3009   /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
3010      can lead to various ABI violations.  */
3011   if (targetm.calls.promote_prototypes (NULL_TREE)
3012       && INTEGRAL_TYPE_P (type)
3013       && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
3014     {
3015       /* We have to be careful about biased types here.  Make a subtype
3016 	 of integer_type_node with the proper biasing.  */
3017       if (TREE_CODE (type) == INTEGER_TYPE
3018 	  && TYPE_BIASED_REPRESENTATION_P (type))
3019 	{
3020 	  tree subtype
3021 	    = make_unsigned_type (TYPE_PRECISION (integer_type_node));
3022 	  TREE_TYPE (subtype) = integer_type_node;
3023 	  TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
3024 	  SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
3025 	  SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
3026 	  type = subtype;
3027 	}
3028       else
3029 	type = integer_type_node;
3030     }
3031 
3032   DECL_ARG_TYPE (param_decl) = type;
3033   return param_decl;
3034 }
3035 
3036 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
3037    a TYPE.  If IN_PLACE is true, the tree pointed to by NODE should not be
3038    changed.  GNAT_NODE is used for the position of error messages.  */
3039 
3040 void
process_attributes(tree * node,struct attrib ** attr_list,bool in_place,Node_Id gnat_node)3041 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
3042 		    Node_Id gnat_node)
3043 {
3044   struct attrib *attr;
3045 
3046   for (attr = *attr_list; attr; attr = attr->next)
3047     switch (attr->type)
3048       {
3049       case ATTR_MACHINE_ATTRIBUTE:
3050 	Sloc_to_locus (Sloc (gnat_node), &input_location);
3051 	decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
3052 			 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
3053 	break;
3054 
3055       case ATTR_LINK_ALIAS:
3056         if (!DECL_EXTERNAL (*node))
3057 	  {
3058 	    TREE_STATIC (*node) = 1;
3059 	    assemble_alias (*node, attr->name);
3060 	  }
3061 	break;
3062 
3063       case ATTR_WEAK_EXTERNAL:
3064 	if (SUPPORTS_WEAK)
3065 	  declare_weak (*node);
3066 	else
3067 	  post_error ("?weak declarations not supported on this target",
3068 		      attr->error_point);
3069 	break;
3070 
3071       case ATTR_LINK_SECTION:
3072 	if (targetm_common.have_named_sections)
3073 	  {
3074 	    set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
3075 	    DECL_COMMON (*node) = 0;
3076 	  }
3077 	else
3078 	  post_error ("?section attributes are not supported for this target",
3079 		      attr->error_point);
3080 	break;
3081 
3082       case ATTR_LINK_CONSTRUCTOR:
3083 	DECL_STATIC_CONSTRUCTOR (*node) = 1;
3084 	TREE_USED (*node) = 1;
3085 	break;
3086 
3087       case ATTR_LINK_DESTRUCTOR:
3088 	DECL_STATIC_DESTRUCTOR (*node) = 1;
3089 	TREE_USED (*node) = 1;
3090 	break;
3091 
3092       case ATTR_THREAD_LOCAL_STORAGE:
3093 	set_decl_tls_model (*node, decl_default_tls_model (*node));
3094 	DECL_COMMON (*node) = 0;
3095 	break;
3096       }
3097 
3098   *attr_list = NULL;
3099 }
3100 
3101 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3102    a power of 2. */
3103 
3104 bool
value_factor_p(tree value,unsigned HOST_WIDE_INT factor)3105 value_factor_p (tree value, unsigned HOST_WIDE_INT factor)
3106 {
3107   gcc_checking_assert (pow2p_hwi (factor));
3108 
3109   if (tree_fits_uhwi_p (value))
3110     return (tree_to_uhwi (value) & (factor - 1)) == 0;
3111 
3112   if (TREE_CODE (value) == MULT_EXPR)
3113     return (value_factor_p (TREE_OPERAND (value, 0), factor)
3114             || value_factor_p (TREE_OPERAND (value, 1), factor));
3115 
3116   return false;
3117 }
3118 
3119 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3120    feed it with the elaboration of GNAT_SCOPE.  */
3121 
3122 static struct deferred_decl_context_node *
add_deferred_decl_context(tree decl,Entity_Id gnat_scope,int force_global)3123 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3124 {
3125   struct deferred_decl_context_node *new_node;
3126 
3127   new_node
3128     = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3129   new_node->decl = decl;
3130   new_node->gnat_scope = gnat_scope;
3131   new_node->force_global = force_global;
3132   new_node->types.create (1);
3133   new_node->next = deferred_decl_context_queue;
3134   deferred_decl_context_queue = new_node;
3135   return new_node;
3136 }
3137 
3138 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3139    feed it with the DECL_CONTEXT computed as part of N as soon as it is
3140    computed.  */
3141 
3142 static void
add_deferred_type_context(struct deferred_decl_context_node * n,tree type)3143 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3144 {
3145   n->types.safe_push (type);
3146 }
3147 
3148 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available.  Return
3149    NULL_TREE if it is not available.  */
3150 
3151 static tree
compute_deferred_decl_context(Entity_Id gnat_scope)3152 compute_deferred_decl_context (Entity_Id gnat_scope)
3153 {
3154   tree context;
3155 
3156   if (present_gnu_tree (gnat_scope))
3157     context = get_gnu_tree (gnat_scope);
3158   else
3159     return NULL_TREE;
3160 
3161   if (TREE_CODE (context) == TYPE_DECL)
3162     {
3163       const tree context_type = TREE_TYPE (context);
3164 
3165       /* Skip dummy types: only the final ones can appear in the context
3166 	 chain.  */
3167       if (TYPE_DUMMY_P (context_type))
3168 	return NULL_TREE;
3169 
3170       /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3171 	 chain.  */
3172       else
3173 	context = context_type;
3174     }
3175 
3176   return context;
3177 }
3178 
3179 /* Try to process all deferred nodes in the queue.  Keep in the queue the ones
3180    that cannot be processed yet, remove the other ones.  If FORCE is true,
3181    force the processing for all nodes, use the global context when nodes don't
3182    have a GNU translation.  */
3183 
3184 void
process_deferred_decl_context(bool force)3185 process_deferred_decl_context (bool force)
3186 {
3187   struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3188   struct deferred_decl_context_node *node;
3189 
3190   while (*it)
3191     {
3192       bool processed = false;
3193       tree context = NULL_TREE;
3194       Entity_Id gnat_scope;
3195 
3196       node = *it;
3197 
3198       /* If FORCE, get the innermost elaborated scope.  Otherwise, just try to
3199 	 get the first scope.  */
3200       gnat_scope = node->gnat_scope;
3201       while (Present (gnat_scope))
3202 	{
3203 	  context = compute_deferred_decl_context (gnat_scope);
3204 	  if (!force || context)
3205 	    break;
3206 	  gnat_scope = get_debug_scope (gnat_scope, NULL);
3207 	}
3208 
3209       /* Imported declarations must not be in a local context (i.e. not inside
3210 	 a function).  */
3211       if (context && node->force_global > 0)
3212 	{
3213 	  tree ctx = context;
3214 
3215 	  while (ctx)
3216 	    {
3217 	      gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3218 	      ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3219 	    }
3220 	}
3221 
3222       /* If FORCE, we want to get rid of all nodes in the queue: in case there
3223 	 was no elaborated scope, use the global context.  */
3224       if (force && !context)
3225 	context = get_global_context ();
3226 
3227       if (context)
3228 	{
3229 	  tree t;
3230 	  int i;
3231 
3232 	  DECL_CONTEXT (node->decl) = context;
3233 
3234 	  /* Propagate it to the TYPE_CONTEXT attributes of the requested
3235 	     ..._TYPE nodes.  */
3236 	  FOR_EACH_VEC_ELT (node->types, i, t)
3237 	    {
3238 	      gnat_set_type_context (t, context);
3239 	    }
3240 	  processed = true;
3241 	}
3242 
3243       /* If this node has been successfuly processed, remove it from the
3244 	 queue.  Then move to the next node.  */
3245       if (processed)
3246 	{
3247 	  *it = node->next;
3248 	  node->types.release ();
3249 	  free (node);
3250 	}
3251       else
3252 	it = &node->next;
3253     }
3254 }
3255 
3256 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR.  */
3257 
3258 static unsigned int
scale_by_factor_of(tree expr,unsigned int value)3259 scale_by_factor_of (tree expr, unsigned int value)
3260 {
3261   unsigned HOST_WIDE_INT addend = 0;
3262   unsigned HOST_WIDE_INT factor = 1;
3263 
3264   /* Peel conversions around EXPR and try to extract bodies from function
3265      calls: it is possible to get the scale factor from size functions.  */
3266   expr = remove_conversions (expr, true);
3267   if (TREE_CODE (expr) == CALL_EXPR)
3268     expr = maybe_inline_call_in_expr (expr);
3269 
3270   /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3271      multiple of the scale factor we are looking for.  */
3272   if (TREE_CODE (expr) == PLUS_EXPR
3273       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3274       && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3275     {
3276       addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3277       expr = TREE_OPERAND (expr, 0);
3278     }
3279 
3280   /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3281      corresponding to the number of trailing zeros of the mask.  */
3282   if (TREE_CODE (expr) == BIT_AND_EXPR
3283       && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3284     {
3285       unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3286       unsigned int i = 0;
3287 
3288       while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3289 	{
3290 	  mask >>= 1;
3291 	  factor *= 2;
3292 	  i++;
3293 	}
3294     }
3295 
3296   /* If the addend is not a multiple of the factor we found, give up.  In
3297      theory we could find a smaller common factor but it's useless for our
3298      needs.  This situation arises when dealing with a field F1 with no
3299      alignment requirement but that is following a field F2 with such
3300      requirements.  As long as we have F2's offset, we don't need alignment
3301      information to compute F1's.  */
3302   if (addend % factor != 0)
3303     factor = 1;
3304 
3305   return factor * value;
3306 }
3307 
3308 /* Return a LABEL_DECL with NAME.  GNAT_NODE is used for the position of
3309    the decl.  */
3310 
3311 tree
create_label_decl(tree name,Node_Id gnat_node)3312 create_label_decl (tree name, Node_Id gnat_node)
3313 {
3314   tree label_decl
3315     = build_decl (input_location, LABEL_DECL, name, void_type_node);
3316 
3317   SET_DECL_MODE (label_decl, VOIDmode);
3318 
3319   /* Add this decl to the current binding level.  */
3320   gnat_pushdecl (label_decl, gnat_node);
3321 
3322   return label_decl;
3323 }
3324 
3325 /* Return a FUNCTION_DECL node.  NAME is the name of the subprogram, ASM_NAME
3326    its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3327    PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3328    chained through the DECL_CHAIN field).
3329 
3330    INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3331 
3332    PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3333    definition to be made visible outside of the current compilation unit.
3334 
3335    EXTERN_FLAG is true when processing an external subprogram declaration.
3336 
3337    ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3338 
3339    DEBUG_INFO_P is true if we need to write debug information for it.
3340 
3341    DEFINITION is true if the subprogram is to be considered as a definition.
3342 
3343    ATTR_LIST is the list of attributes to be attached to the subprogram.
3344 
3345    GNAT_NODE is used for the position of the decl.  */
3346 
3347 tree
create_subprog_decl(tree name,tree asm_name,tree type,tree param_decl_list,enum inline_status_t inline_status,bool public_flag,bool extern_flag,bool artificial_p,bool debug_info_p,bool definition,struct attrib * attr_list,Node_Id gnat_node)3348 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3349 		     enum inline_status_t inline_status, bool public_flag,
3350 		     bool extern_flag, bool artificial_p, bool debug_info_p,
3351 		     bool definition, struct attrib *attr_list,
3352 		     Node_Id gnat_node)
3353 {
3354   tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3355   DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3356 
3357   DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3358   DECL_EXTERNAL (subprog_decl) = extern_flag;
3359   DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3360   DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3361   TREE_PUBLIC (subprog_decl) = public_flag;
3362 
3363   switch (inline_status)
3364     {
3365     case is_suppressed:
3366       DECL_UNINLINABLE (subprog_decl) = 1;
3367       break;
3368 
3369     case is_default:
3370       break;
3371 
3372     case is_required:
3373       if (Back_End_Inlining)
3374 	{
3375 	  decl_attributes (&subprog_decl,
3376 			   tree_cons (get_identifier ("always_inline"),
3377 				      NULL_TREE, NULL_TREE),
3378 			   ATTR_FLAG_TYPE_IN_PLACE);
3379 
3380 	  /* Inline_Always guarantees that every direct call is inlined and
3381 	     that there is no indirect reference to the subprogram, so the
3382 	     instance in the original package (as well as its clones in the
3383 	     client packages created for inter-unit inlining) can be made
3384 	     private, which causes the out-of-line body to be eliminated.  */
3385 	  TREE_PUBLIC (subprog_decl) = 0;
3386 	}
3387 
3388       /* ... fall through ... */
3389 
3390     case is_prescribed:
3391       DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3392 
3393       /* ... fall through ... */
3394 
3395     case is_requested:
3396       DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3397       if (!Debug_Generated_Code)
3398 	DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3399       break;
3400 
3401     default:
3402       gcc_unreachable ();
3403     }
3404 
3405   process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3406 
3407   /* Once everything is processed, finish the subprogram declaration.  */
3408   finish_subprog_decl (subprog_decl, asm_name, type);
3409 
3410   /* Add this decl to the current binding level.  */
3411   gnat_pushdecl (subprog_decl, gnat_node);
3412 
3413   /* Output the assembler code and/or RTL for the declaration.  */
3414   rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3415 
3416   return subprog_decl;
3417 }
3418 
3419 /* Given a subprogram declaration DECL, its assembler name and its type,
3420    finish constructing the subprogram declaration from ASM_NAME and TYPE.  */
3421 
3422 void
finish_subprog_decl(tree decl,tree asm_name,tree type)3423 finish_subprog_decl (tree decl, tree asm_name, tree type)
3424 {
3425   /* DECL_ARGUMENTS is set by the caller, but not its context.  */
3426   for (tree param_decl = DECL_ARGUMENTS (decl);
3427        param_decl;
3428        param_decl = DECL_CHAIN (param_decl))
3429     DECL_CONTEXT (param_decl) = decl;
3430 
3431   tree result_decl
3432     = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3433 		  TREE_TYPE (type));
3434 
3435   DECL_ARTIFICIAL (result_decl) = 1;
3436   DECL_IGNORED_P (result_decl) = 1;
3437   DECL_CONTEXT (result_decl) = decl;
3438   DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3439   DECL_RESULT (decl) = result_decl;
3440 
3441   /* Propagate the "pure" property.  */
3442   DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3443 
3444   /* Propagate the "noreturn" property.  */
3445   TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3446 
3447   if (asm_name)
3448     {
3449       /* Let the target mangle the name if this isn't a verbatim asm.  */
3450       if (*IDENTIFIER_POINTER (asm_name) != '*')
3451 	asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3452 
3453       SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3454 
3455       /* The expand_main_function circuitry expects "main_identifier_node" to
3456 	 designate the DECL_NAME of the 'main' entry point, in turn expected
3457 	 to be declared as the "main" function literally by default.  Ada
3458 	 program entry points are typically declared with a different name
3459 	 within the binder generated file, exported as 'main' to satisfy the
3460 	 system expectations.  Force main_identifier_node in this case.  */
3461       if (asm_name == main_identifier_node)
3462 	DECL_NAME (decl) = main_identifier_node;
3463     }
3464 }
3465 
3466 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3467    body.  This routine needs to be invoked before processing the declarations
3468    appearing in the subprogram.  */
3469 
3470 void
begin_subprog_body(tree subprog_decl)3471 begin_subprog_body (tree subprog_decl)
3472 {
3473   announce_function (subprog_decl);
3474 
3475   /* This function is being defined.  */
3476   TREE_STATIC (subprog_decl) = 1;
3477 
3478   /* The failure of this assertion will likely come from a wrong context for
3479      the subprogram body, e.g. another procedure for a procedure declared at
3480      library level.  */
3481   gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3482 
3483   current_function_decl = subprog_decl;
3484 
3485   /* Enter a new binding level and show that all the parameters belong to
3486      this function.  */
3487   gnat_pushlevel ();
3488 }
3489 
3490 /* Finish translating the current subprogram and set its BODY.  */
3491 
3492 void
end_subprog_body(tree body)3493 end_subprog_body (tree body)
3494 {
3495   tree fndecl = current_function_decl;
3496 
3497   /* Attach the BLOCK for this level to the function and pop the level.  */
3498   BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3499   DECL_INITIAL (fndecl) = current_binding_level->block;
3500   gnat_poplevel ();
3501 
3502   /* The body should be a BIND_EXPR whose BLOCK is the top-level one.  */
3503   if (TREE_CODE (body) == BIND_EXPR)
3504     {
3505       BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3506       DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3507     }
3508 
3509   DECL_SAVED_TREE (fndecl) = body;
3510 
3511   current_function_decl = decl_function_context (fndecl);
3512 }
3513 
3514 /* Wrap up compilation of SUBPROG_DECL, a subprogram body.  */
3515 
3516 void
rest_of_subprog_body_compilation(tree subprog_decl)3517 rest_of_subprog_body_compilation (tree subprog_decl)
3518 {
3519   /* We cannot track the location of errors past this point.  */
3520   Current_Error_Node = Empty;
3521 
3522   /* If we're only annotating types, don't actually compile this function.  */
3523   if (type_annotate_only)
3524     return;
3525 
3526   /* Dump functions before gimplification.  */
3527   dump_function (TDI_original, subprog_decl);
3528 
3529   if (!decl_function_context (subprog_decl))
3530     cgraph_node::finalize_function (subprog_decl, false);
3531   else
3532     /* Register this function with cgraph just far enough to get it
3533        added to our parent's nested function list.  */
3534     (void) cgraph_node::get_create (subprog_decl);
3535 }
3536 
3537 tree
gnat_builtin_function(tree decl)3538 gnat_builtin_function (tree decl)
3539 {
3540   gnat_pushdecl (decl, Empty);
3541   return decl;
3542 }
3543 
3544 /* Return an integer type with the number of bits of precision given by
3545    PRECISION.  UNSIGNEDP is nonzero if the type is unsigned; otherwise
3546    it is a signed type.  */
3547 
3548 tree
gnat_type_for_size(unsigned precision,int unsignedp)3549 gnat_type_for_size (unsigned precision, int unsignedp)
3550 {
3551   tree t;
3552   char type_name[20];
3553 
3554   if (precision <= 2 * MAX_BITS_PER_WORD
3555       && signed_and_unsigned_types[precision][unsignedp])
3556     return signed_and_unsigned_types[precision][unsignedp];
3557 
3558  if (unsignedp)
3559     t = make_unsigned_type (precision);
3560   else
3561     t = make_signed_type (precision);
3562   TYPE_ARTIFICIAL (t) = 1;
3563 
3564   if (precision <= 2 * MAX_BITS_PER_WORD)
3565     signed_and_unsigned_types[precision][unsignedp] = t;
3566 
3567   if (!TYPE_NAME (t))
3568     {
3569       sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3570       TYPE_NAME (t) = get_identifier (type_name);
3571     }
3572 
3573   return t;
3574 }
3575 
3576 /* Likewise for floating-point types.  */
3577 
3578 static tree
float_type_for_precision(int precision,machine_mode mode)3579 float_type_for_precision (int precision, machine_mode mode)
3580 {
3581   tree t;
3582   char type_name[20];
3583 
3584   if (float_types[(int) mode])
3585     return float_types[(int) mode];
3586 
3587   float_types[(int) mode] = t = make_node (REAL_TYPE);
3588   TYPE_PRECISION (t) = precision;
3589   layout_type (t);
3590 
3591   gcc_assert (TYPE_MODE (t) == mode);
3592   if (!TYPE_NAME (t))
3593     {
3594       sprintf (type_name, "FLOAT_%d", precision);
3595       TYPE_NAME (t) = get_identifier (type_name);
3596     }
3597 
3598   return t;
3599 }
3600 
3601 /* Return a data type that has machine mode MODE.  UNSIGNEDP selects
3602    an unsigned type; otherwise a signed type is returned.  */
3603 
3604 tree
gnat_type_for_mode(machine_mode mode,int unsignedp)3605 gnat_type_for_mode (machine_mode mode, int unsignedp)
3606 {
3607   if (mode == BLKmode)
3608     return NULL_TREE;
3609 
3610   if (mode == VOIDmode)
3611     return void_type_node;
3612 
3613   if (COMPLEX_MODE_P (mode))
3614     return NULL_TREE;
3615 
3616   scalar_float_mode float_mode;
3617   if (is_a <scalar_float_mode> (mode, &float_mode))
3618     return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3619 				     float_mode);
3620 
3621   scalar_int_mode int_mode;
3622   if (is_a <scalar_int_mode> (mode, &int_mode))
3623     return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3624 
3625   if (VECTOR_MODE_P (mode))
3626     {
3627       machine_mode inner_mode = GET_MODE_INNER (mode);
3628       tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3629       if (inner_type)
3630 	return build_vector_type_for_mode (inner_type, mode);
3631     }
3632 
3633   return NULL_TREE;
3634 }
3635 
3636 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3637    signedness being specified by UNSIGNEDP.  */
3638 
3639 tree
gnat_signed_or_unsigned_type_for(int unsignedp,tree type_node)3640 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3641 {
3642   if (type_node == char_type_node)
3643     return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3644 
3645   tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3646 
3647   if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3648     {
3649       type = copy_type (type);
3650       TREE_TYPE (type) = type_node;
3651     }
3652   else if (TREE_TYPE (type_node)
3653 	   && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3654 	   && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3655     {
3656       type = copy_type (type);
3657       TREE_TYPE (type) = TREE_TYPE (type_node);
3658     }
3659 
3660   return type;
3661 }
3662 
3663 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3664    transparently converted to each other.  */
3665 
3666 int
gnat_types_compatible_p(tree t1,tree t2)3667 gnat_types_compatible_p (tree t1, tree t2)
3668 {
3669   enum tree_code code;
3670 
3671   /* This is the default criterion.  */
3672   if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3673     return 1;
3674 
3675   /* We only check structural equivalence here.  */
3676   if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3677     return 0;
3678 
3679   /* Vector types are also compatible if they have the same number of subparts
3680      and the same form of (scalar) element type.  */
3681   if (code == VECTOR_TYPE
3682       && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3683       && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3684       && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3685     return 1;
3686 
3687   /* Array types are also compatible if they are constrained and have the same
3688      domain(s), the same component type and the same scalar storage order.  */
3689   if (code == ARRAY_TYPE
3690       && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3691 	  || (TYPE_DOMAIN (t1)
3692 	      && TYPE_DOMAIN (t2)
3693 	      && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3694 				     TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3695 	      && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3696 				     TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3697       && (TREE_TYPE (t1) == TREE_TYPE (t2)
3698 	  || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3699 	      && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3700       && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3701     return 1;
3702 
3703   return 0;
3704 }
3705 
3706 /* Return true if EXPR is a useless type conversion.  */
3707 
3708 bool
gnat_useless_type_conversion(tree expr)3709 gnat_useless_type_conversion (tree expr)
3710 {
3711   if (CONVERT_EXPR_P (expr)
3712       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3713       || TREE_CODE (expr) == NON_LVALUE_EXPR)
3714     return gnat_types_compatible_p (TREE_TYPE (expr),
3715 				    TREE_TYPE (TREE_OPERAND (expr, 0)));
3716 
3717   return false;
3718 }
3719 
3720 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags.  */
3721 
3722 bool
fntype_same_flags_p(const_tree t,tree cico_list,bool return_unconstrained_p,bool return_by_direct_ref_p,bool return_by_invisi_ref_p)3723 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3724 		     bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3725 {
3726   return TYPE_CI_CO_LIST (t) == cico_list
3727 	 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3728 	 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3729 	 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3730 }
3731 
3732 /* EXP is an expression for the size of an object.  If this size contains
3733    discriminant references, replace them with the maximum (if MAX_P) or
3734    minimum (if !MAX_P) possible value of the discriminant.
3735 
3736    Note that the expression may have already been gimplified,in which case
3737    COND_EXPRs have VOID_TYPE and no operands, and this must be handled.  */
3738 
3739 tree
max_size(tree exp,bool max_p)3740 max_size (tree exp, bool max_p)
3741 {
3742   enum tree_code code = TREE_CODE (exp);
3743   tree type = TREE_TYPE (exp);
3744   tree op0, op1, op2;
3745 
3746   switch (TREE_CODE_CLASS (code))
3747     {
3748     case tcc_declaration:
3749     case tcc_constant:
3750       return exp;
3751 
3752     case tcc_exceptional:
3753       gcc_assert (code == SSA_NAME);
3754       return exp;
3755 
3756     case tcc_vl_exp:
3757       if (code == CALL_EXPR)
3758 	{
3759 	  tree t, *argarray;
3760 	  int n, i;
3761 
3762 	  t = maybe_inline_call_in_expr (exp);
3763 	  if (t)
3764 	    return max_size (t, max_p);
3765 
3766 	  n = call_expr_nargs (exp);
3767 	  gcc_assert (n > 0);
3768 	  argarray = XALLOCAVEC (tree, n);
3769 	  for (i = 0; i < n; i++)
3770 	    argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3771 	  return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3772 	}
3773       break;
3774 
3775     case tcc_reference:
3776       /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3777 	 modify.  Otherwise, we treat it like a variable.  */
3778       if (CONTAINS_PLACEHOLDER_P (exp))
3779 	{
3780 	  tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3781 	  tree val
3782 	    = fold_convert (base_type,
3783 			    max_p
3784 			    ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3785 
3786 	  /* Walk down the extra subtypes to get more restrictive bounds.  */
3787 	  while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3788 	    {
3789 	      type = TREE_TYPE (type);
3790 	      if (max_p)
3791 		val = fold_build2 (MIN_EXPR, base_type, val,
3792 				   fold_convert (base_type,
3793 						 TYPE_MAX_VALUE (type)));
3794 	      else
3795 		val = fold_build2 (MAX_EXPR, base_type, val,
3796 				   fold_convert (base_type,
3797 						 TYPE_MIN_VALUE (type)));
3798 	    }
3799 
3800 	  return fold_convert (type, max_size (val, max_p));
3801 	}
3802 
3803       return exp;
3804 
3805     case tcc_comparison:
3806       return build_int_cst (type, max_p ? 1 : 0);
3807 
3808     case tcc_unary:
3809       op0 = TREE_OPERAND (exp, 0);
3810 
3811       if (code == NON_LVALUE_EXPR)
3812 	return max_size (op0, max_p);
3813 
3814       if (VOID_TYPE_P (TREE_TYPE (op0)))
3815 	return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3816 
3817       op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3818 
3819       if (op0 == TREE_OPERAND (exp, 0))
3820 	return exp;
3821 
3822       return fold_build1 (code, type, op0);
3823 
3824     case tcc_binary:
3825       op0 = TREE_OPERAND (exp, 0);
3826       op1 = TREE_OPERAND (exp, 1);
3827 
3828       /* If we have a multiply-add with a "negative" value in an unsigned
3829 	 type, do a multiply-subtract with the negated value, in order to
3830 	 avoid creating a spurious overflow below.  */
3831       if (code == PLUS_EXPR
3832 	  && TREE_CODE (op0) == MULT_EXPR
3833 	  && TYPE_UNSIGNED (type)
3834 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3835 	  && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3836 	  && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3837 	{
3838 	  tree tmp = op1;
3839 	  op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3840 			fold_build1 (NEGATE_EXPR, type,
3841 				    TREE_OPERAND (op0, 1)));
3842 	  op0 = tmp;
3843 	  code = MINUS_EXPR;
3844 	}
3845 
3846       op0 = max_size (op0, max_p);
3847       op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3848 
3849       if ((code == MINUS_EXPR || code == PLUS_EXPR))
3850 	{
3851 	  /* If the op0 has overflowed and the op1 is a variable,
3852 	     propagate the overflow by returning the op0.  */
3853 	  if (TREE_CODE (op0) == INTEGER_CST
3854 	      && TREE_OVERFLOW (op0)
3855 	      && TREE_CODE (op1) != INTEGER_CST)
3856 	    return op0;
3857 
3858 	  /* If we have a "negative" value in an unsigned type, do the
3859 	     opposite operation on the negated value, in order to avoid
3860 	     creating a spurious overflow below.  */
3861 	  if (TYPE_UNSIGNED (type)
3862 	      && TREE_CODE (op1) == INTEGER_CST
3863 	      && !TREE_OVERFLOW (op1)
3864 	      && tree_int_cst_sign_bit (op1))
3865 	    {
3866 	      op1 = fold_build1 (NEGATE_EXPR, type, op1);
3867 	      code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3868 	    }
3869 	}
3870 
3871       if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3872 	return exp;
3873 
3874       /* We need to detect overflows so we call size_binop here.  */
3875       return size_binop (code, op0, op1);
3876 
3877     case tcc_expression:
3878       switch (TREE_CODE_LENGTH (code))
3879 	{
3880 	case 1:
3881 	  if (code == SAVE_EXPR)
3882 	    return exp;
3883 
3884 	  op0 = max_size (TREE_OPERAND (exp, 0),
3885 			  code == TRUTH_NOT_EXPR ? !max_p : max_p);
3886 
3887 	  if (op0 == TREE_OPERAND (exp, 0))
3888 	    return exp;
3889 
3890 	  return fold_build1 (code, type, op0);
3891 
3892 	case 2:
3893 	  if (code == COMPOUND_EXPR)
3894 	    return max_size (TREE_OPERAND (exp, 1), max_p);
3895 
3896 	  op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3897 	  op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3898 
3899 	  if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3900 	    return exp;
3901 
3902 	  return fold_build2 (code, type, op0, op1);
3903 
3904 	case 3:
3905 	  if (code == COND_EXPR)
3906 	    {
3907 	      op0 = TREE_OPERAND (exp, 0);
3908 	      op1 = TREE_OPERAND (exp, 1);
3909 	      op2 = TREE_OPERAND (exp, 2);
3910 
3911 	      if (!op1 || !op2)
3912 		return exp;
3913 
3914 	      op1 = max_size (op1, max_p);
3915 	      op2 = max_size (op2, max_p);
3916 
3917 	      /* If we have the MAX of a "negative" value in an unsigned type
3918 		 and zero for a length expression, just return zero.  */
3919 	      if (max_p
3920 		  && TREE_CODE (op0) == LE_EXPR
3921 		  && TYPE_UNSIGNED (type)
3922 		  && TREE_CODE (op1) == INTEGER_CST
3923 		  && !TREE_OVERFLOW (op1)
3924 		  && tree_int_cst_sign_bit (op1)
3925 		  && integer_zerop (op2))
3926 		return op2;
3927 
3928 	      return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3929 	    }
3930 	  break;
3931 
3932 	default:
3933 	  break;
3934 	}
3935 
3936       /* Other tree classes cannot happen.  */
3937     default:
3938       break;
3939     }
3940 
3941   gcc_unreachable ();
3942 }
3943 
3944 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3945    EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3946    Return a constructor for the template.  */
3947 
3948 tree
build_template(tree template_type,tree array_type,tree expr)3949 build_template (tree template_type, tree array_type, tree expr)
3950 {
3951   vec<constructor_elt, va_gc> *template_elts = NULL;
3952   tree bound_list = NULL_TREE;
3953   tree field;
3954 
3955   while (TREE_CODE (array_type) == RECORD_TYPE
3956 	 && (TYPE_PADDING_P (array_type)
3957 	     || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3958     array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3959 
3960   if (TREE_CODE (array_type) == ARRAY_TYPE
3961       || (TREE_CODE (array_type) == INTEGER_TYPE
3962 	  && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3963     bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3964 
3965   /* First make the list for a CONSTRUCTOR for the template.  Go down
3966      the field list of the template instead of the type chain because
3967      this array might be an Ada array of array and we can't tell where
3968      the nested array stop being the underlying object.  */
3969   for (field = TYPE_FIELDS (template_type);
3970        field;
3971        field = DECL_CHAIN (DECL_CHAIN (field)))
3972     {
3973       tree bounds, min, max;
3974 
3975       /* If we have a bound list, get the bounds from there.  Likewise
3976 	 for an ARRAY_TYPE.  Otherwise, if expr is a PARM_DECL with
3977 	 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the
3978 	 template, but this will only give us a maximum range.  */
3979       if (bound_list)
3980 	{
3981 	  bounds = TREE_VALUE (bound_list);
3982 	  bound_list = TREE_CHAIN (bound_list);
3983 	}
3984       else if (TREE_CODE (array_type) == ARRAY_TYPE)
3985 	{
3986 	  bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3987 	  array_type = TREE_TYPE (array_type);
3988 	}
3989       else if (expr && TREE_CODE (expr) == PARM_DECL
3990 	       && DECL_BY_COMPONENT_PTR_P (expr))
3991 	bounds = TREE_TYPE (field);
3992       else
3993 	gcc_unreachable ();
3994 
3995       min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3996       max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3997 
3998       /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3999 	 substitute it from OBJECT.  */
4000       min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
4001       max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
4002 
4003       CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
4004       CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
4005     }
4006 
4007   return gnat_build_constructor (template_type, template_elts);
4008 }
4009 
4010 /* Return true if TYPE is suitable for the element type of a vector.  */
4011 
4012 static bool
type_for_vector_element_p(tree type)4013 type_for_vector_element_p (tree type)
4014 {
4015   machine_mode mode;
4016 
4017   if (!INTEGRAL_TYPE_P (type)
4018       && !SCALAR_FLOAT_TYPE_P (type)
4019       && !FIXED_POINT_TYPE_P (type))
4020     return false;
4021 
4022   mode = TYPE_MODE (type);
4023   if (GET_MODE_CLASS (mode) != MODE_INT
4024       && !SCALAR_FLOAT_MODE_P (mode)
4025       && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
4026     return false;
4027 
4028   return true;
4029 }
4030 
4031 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
4032    this is not possible.  If ATTRIBUTE is non-zero, we are processing the
4033    attribute declaration and want to issue error messages on failure.  */
4034 
4035 static tree
build_vector_type_for_size(tree inner_type,tree size,tree attribute)4036 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
4037 {
4038   unsigned HOST_WIDE_INT size_int, inner_size_int;
4039   int nunits;
4040 
4041   /* Silently punt on variable sizes.  We can't make vector types for them,
4042      need to ignore them on front-end generated subtypes of unconstrained
4043      base types, and this attribute is for binding implementors, not end
4044      users, so we should never get there from legitimate explicit uses.  */
4045   if (!tree_fits_uhwi_p (size))
4046     return NULL_TREE;
4047   size_int = tree_to_uhwi (size);
4048 
4049   if (!type_for_vector_element_p (inner_type))
4050     {
4051       if (attribute)
4052 	error ("invalid element type for attribute %qs",
4053 	       IDENTIFIER_POINTER (attribute));
4054       return NULL_TREE;
4055     }
4056   inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4057 
4058   if (size_int % inner_size_int)
4059     {
4060       if (attribute)
4061 	error ("vector size not an integral multiple of component size");
4062       return NULL_TREE;
4063     }
4064 
4065   if (size_int == 0)
4066     {
4067       if (attribute)
4068 	error ("zero vector size");
4069       return NULL_TREE;
4070     }
4071 
4072   nunits = size_int / inner_size_int;
4073   if (nunits & (nunits - 1))
4074     {
4075       if (attribute)
4076 	error ("number of components of vector not a power of two");
4077       return NULL_TREE;
4078     }
4079 
4080   return build_vector_type (inner_type, nunits);
4081 }
4082 
4083 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4084    NULL_TREE if this is not possible.  If ATTRIBUTE is non-zero, we are
4085    processing the attribute and want to issue error messages on failure.  */
4086 
4087 static tree
build_vector_type_for_array(tree array_type,tree attribute)4088 build_vector_type_for_array (tree array_type, tree attribute)
4089 {
4090   tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4091 						 TYPE_SIZE_UNIT (array_type),
4092 						 attribute);
4093   if (!vector_type)
4094     return NULL_TREE;
4095 
4096   TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4097   return vector_type;
4098 }
4099 
4100 /* Build a type to be used to represent an aliased object whose nominal type
4101    is an unconstrained array.  This consists of a RECORD_TYPE containing a
4102    field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4103    If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4104    an arbitrary unconstrained object.  Use NAME as the name of the record.
4105    DEBUG_INFO_P is true if we need to write debug information for the type.  */
4106 
4107 tree
build_unc_object_type(tree template_type,tree object_type,tree name,bool debug_info_p)4108 build_unc_object_type (tree template_type, tree object_type, tree name,
4109 		       bool debug_info_p)
4110 {
4111   tree decl;
4112   tree type = make_node (RECORD_TYPE);
4113   tree template_field
4114     = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4115 			 NULL_TREE, NULL_TREE, 0, 1);
4116   tree array_field
4117     = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4118 			 NULL_TREE, NULL_TREE, 0, 1);
4119 
4120   TYPE_NAME (type) = name;
4121   TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4122   DECL_CHAIN (template_field) = array_field;
4123   finish_record_type (type, template_field, 0, true);
4124 
4125   /* Declare it now since it will never be declared otherwise.  This is
4126      necessary to ensure that its subtrees are properly marked.  */
4127   decl = create_type_decl (name, type, true, debug_info_p, Empty);
4128 
4129   /* template_type will not be used elsewhere than here, so to keep the debug
4130      info clean and in order to avoid scoping issues, make decl its
4131      context.  */
4132   gnat_set_type_context (template_type, decl);
4133 
4134   return type;
4135 }
4136 
4137 /* Same, taking a thin or fat pointer type instead of a template type. */
4138 
4139 tree
build_unc_object_type_from_ptr(tree thin_fat_ptr_type,tree object_type,tree name,bool debug_info_p)4140 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4141 				tree name, bool debug_info_p)
4142 {
4143   tree template_type;
4144 
4145   gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4146 
4147   template_type
4148     = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4149        ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4150        : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4151 
4152   return
4153     build_unc_object_type (template_type, object_type, name, debug_info_p);
4154 }
4155 
4156 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4157    In the normal case this is just two adjustments, but we have more to
4158    do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE.  */
4159 
4160 void
update_pointer_to(tree old_type,tree new_type)4161 update_pointer_to (tree old_type, tree new_type)
4162 {
4163   tree ptr = TYPE_POINTER_TO (old_type);
4164   tree ref = TYPE_REFERENCE_TO (old_type);
4165   tree t;
4166 
4167   /* If this is the main variant, process all the other variants first.  */
4168   if (TYPE_MAIN_VARIANT (old_type) == old_type)
4169     for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4170       update_pointer_to (t, new_type);
4171 
4172   /* If no pointers and no references, we are done.  */
4173   if (!ptr && !ref)
4174     return;
4175 
4176   /* Merge the old type qualifiers in the new type.
4177 
4178      Each old variant has qualifiers for specific reasons, and the new
4179      designated type as well.  Each set of qualifiers represents useful
4180      information grabbed at some point, and merging the two simply unifies
4181      these inputs into the final type description.
4182 
4183      Consider for instance a volatile type frozen after an access to constant
4184      type designating it; after the designated type's freeze, we get here with
4185      a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4186      when the access type was processed.  We will make a volatile and readonly
4187      designated type, because that's what it really is.
4188 
4189      We might also get here for a non-dummy OLD_TYPE variant with different
4190      qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4191      to private record type elaboration (see the comments around the call to
4192      this routine in gnat_to_gnu_entity <E_Access_Type>).  We have to merge
4193      the qualifiers in those cases too, to avoid accidentally discarding the
4194      initial set, and will often end up with OLD_TYPE == NEW_TYPE then.  */
4195   new_type
4196     = build_qualified_type (new_type,
4197 			    TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4198 
4199   /* If old type and new type are identical, there is nothing to do.  */
4200   if (old_type == new_type)
4201     return;
4202 
4203   /* Otherwise, first handle the simple case.  */
4204   if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4205     {
4206       tree new_ptr, new_ref;
4207 
4208       /* If pointer or reference already points to new type, nothing to do.
4209 	 This can happen as update_pointer_to can be invoked multiple times
4210 	 on the same couple of types because of the type variants.  */
4211       if ((ptr && TREE_TYPE (ptr) == new_type)
4212 	  || (ref && TREE_TYPE (ref) == new_type))
4213 	return;
4214 
4215       /* Chain PTR and its variants at the end.  */
4216       new_ptr = TYPE_POINTER_TO (new_type);
4217       if (new_ptr)
4218 	{
4219 	  while (TYPE_NEXT_PTR_TO (new_ptr))
4220 	    new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4221 	  TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4222 	}
4223       else
4224 	TYPE_POINTER_TO (new_type) = ptr;
4225 
4226       /* Now adjust them.  */
4227       for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4228 	for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4229 	  {
4230 	    TREE_TYPE (t) = new_type;
4231 	    if (TYPE_NULL_BOUNDS (t))
4232 	      TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4233 	    TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_POINTER_TO (new_type));
4234 	  }
4235 
4236       /* Chain REF and its variants at the end.  */
4237       new_ref = TYPE_REFERENCE_TO (new_type);
4238       if (new_ref)
4239 	{
4240 	  while (TYPE_NEXT_REF_TO (new_ref))
4241 	    new_ref = TYPE_NEXT_REF_TO (new_ref);
4242 	  TYPE_NEXT_REF_TO (new_ref) = ref;
4243 	}
4244       else
4245 	TYPE_REFERENCE_TO (new_type) = ref;
4246 
4247       /* Now adjust them.  */
4248       for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4249 	for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4250 	  {
4251 	    TREE_TYPE (t) = new_type;
4252 	    TYPE_CANONICAL (t) = TYPE_CANONICAL (TYPE_REFERENCE_TO (new_type));
4253 	  }
4254 
4255       TYPE_POINTER_TO (old_type) = NULL_TREE;
4256       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4257     }
4258 
4259   /* Now deal with the unconstrained array case.  In this case the pointer
4260      is actually a record where both fields are pointers to dummy nodes.
4261      Turn them into pointers to the correct types using update_pointer_to.
4262      Likewise for the pointer to the object record (thin pointer).  */
4263   else
4264     {
4265       tree new_ptr = TYPE_POINTER_TO (new_type);
4266 
4267       gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4268 
4269       /* If PTR already points to NEW_TYPE, nothing to do.  This can happen
4270 	 since update_pointer_to can be invoked multiple times on the same
4271 	 couple of types because of the type variants.  */
4272       if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4273 	return;
4274 
4275       update_pointer_to
4276 	(TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4277 	 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4278 
4279       update_pointer_to
4280 	(TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4281 	 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4282 
4283       update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4284 			 TYPE_OBJECT_RECORD_TYPE (new_type));
4285 
4286       TYPE_POINTER_TO (old_type) = NULL_TREE;
4287       TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4288     }
4289 }
4290 
4291 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4292    unconstrained one.  This involves making or finding a template.  */
4293 
4294 static tree
convert_to_fat_pointer(tree type,tree expr)4295 convert_to_fat_pointer (tree type, tree expr)
4296 {
4297   tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4298   tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4299   tree etype = TREE_TYPE (expr);
4300   tree template_addr;
4301   vec<constructor_elt, va_gc> *v;
4302   vec_alloc (v, 2);
4303 
4304   /* If EXPR is null, make a fat pointer that contains a null pointer to the
4305      array (compare_fat_pointers ensures that this is the full discriminant)
4306      and a valid pointer to the bounds.  This latter property is necessary
4307      since the compiler can hoist the load of the bounds done through it.  */
4308   if (integer_zerop (expr))
4309     {
4310       tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4311       tree null_bounds, t;
4312 
4313       if (TYPE_NULL_BOUNDS (ptr_template_type))
4314 	null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4315       else
4316 	{
4317 	  /* The template type can still be dummy at this point so we build an
4318 	     empty constructor.  The middle-end will fill it in with zeros.  */
4319 	  t = build_constructor (template_type, NULL);
4320 	  TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4321 	  null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4322 	  SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4323 	}
4324 
4325       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4326 			      fold_convert (p_array_type, null_pointer_node));
4327       CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4328       t = build_constructor (type, v);
4329       /* Do not set TREE_CONSTANT so as to force T to static memory.  */
4330       TREE_CONSTANT (t) = 0;
4331       TREE_STATIC (t) = 1;
4332 
4333       return t;
4334     }
4335 
4336   /* If EXPR is a thin pointer, make template and data from the record.  */
4337   if (TYPE_IS_THIN_POINTER_P (etype))
4338     {
4339       tree field = TYPE_FIELDS (TREE_TYPE (etype));
4340 
4341       expr = gnat_protect_expr (expr);
4342 
4343       /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4344 	 the thin pointer value has been shifted so we shift it back to get
4345 	 the template address.  */
4346       if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4347 	{
4348 	  template_addr
4349 	    = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4350 			       fold_build1 (NEGATE_EXPR, sizetype,
4351 					    byte_position
4352 					    (DECL_CHAIN (field))));
4353 	  template_addr
4354 	    = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4355 			    template_addr);
4356 	}
4357 
4358       /* Otherwise we explicitly take the address of the fields.  */
4359       else
4360 	{
4361 	  expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4362 	  template_addr
4363 	    = build_unary_op (ADDR_EXPR, NULL_TREE,
4364 			      build_component_ref (expr, field, false));
4365 	  expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4366 				 build_component_ref (expr, DECL_CHAIN (field),
4367 						      false));
4368 	}
4369     }
4370 
4371   /* Otherwise, build the constructor for the template.  */
4372   else
4373     template_addr
4374       = build_unary_op (ADDR_EXPR, NULL_TREE,
4375 			build_template (template_type, TREE_TYPE (etype),
4376 					expr));
4377 
4378   /* The final result is a constructor for the fat pointer.
4379 
4380      If EXPR is an argument of a foreign convention subprogram, the type it
4381      points to is directly the component type.  In this case, the expression
4382      type may not match the corresponding FIELD_DECL type at this point, so we
4383      call "convert" here to fix that up if necessary.  This type consistency is
4384      required, for instance because it ensures that possible later folding of
4385      COMPONENT_REFs against this constructor always yields something of the
4386      same type as the initial reference.
4387 
4388      Note that the call to "build_template" above is still fine because it
4389      will only refer to the provided TEMPLATE_TYPE in this case.  */
4390   CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4391   CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4392   return gnat_build_constructor (type, v);
4393 }
4394 
4395 /* Create an expression whose value is that of EXPR,
4396    converted to type TYPE.  The TREE_TYPE of the value
4397    is always TYPE.  This function implements all reasonable
4398    conversions; callers should filter out those that are
4399    not permitted by the language being compiled.  */
4400 
4401 tree
convert(tree type,tree expr)4402 convert (tree type, tree expr)
4403 {
4404   tree etype = TREE_TYPE (expr);
4405   enum tree_code ecode = TREE_CODE (etype);
4406   enum tree_code code = TREE_CODE (type);
4407 
4408   /* If the expression is already of the right type, we are done.  */
4409   if (etype == type)
4410     return expr;
4411 
4412   /* If both input and output have padding and are of variable size, do this
4413      as an unchecked conversion.  Likewise if one is a mere variant of the
4414      other, so we avoid a pointless unpad/repad sequence.  */
4415   else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4416 	   && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4417 	   && (!TREE_CONSTANT (TYPE_SIZE (type))
4418 	       || !TREE_CONSTANT (TYPE_SIZE (etype))
4419 	       || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4420 	       || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4421 		  == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4422     ;
4423 
4424   /* If the output type has padding, convert to the inner type and make a
4425      constructor to build the record, unless a variable size is involved.  */
4426   else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4427     {
4428       /* If we previously converted from another type and our type is
4429 	 of variable size, remove the conversion to avoid the need for
4430 	 variable-sized temporaries.  Likewise for a conversion between
4431 	 original and packable version.  */
4432       if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4433 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4434 	      || (ecode == RECORD_TYPE
4435 		  && TYPE_NAME (etype)
4436 		     == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4437 	expr = TREE_OPERAND (expr, 0);
4438 
4439       /* If we are just removing the padding from expr, convert the original
4440 	 object if we have variable size in order to avoid the need for some
4441 	 variable-sized temporaries.  Likewise if the padding is a variant
4442 	 of the other, so we avoid a pointless unpad/repad sequence.  */
4443       if (TREE_CODE (expr) == COMPONENT_REF
4444 	  && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4445 	  && (!TREE_CONSTANT (TYPE_SIZE (type))
4446 	      || TYPE_MAIN_VARIANT (type)
4447 		 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4448 	      || (ecode == RECORD_TYPE
4449 		  && TYPE_NAME (etype)
4450 		     == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4451 	return convert (type, TREE_OPERAND (expr, 0));
4452 
4453       /* If the inner type is of self-referential size and the expression type
4454 	 is a record, do this as an unchecked conversion unless both types are
4455 	 essentially the same.  */
4456       if (ecode == RECORD_TYPE
4457 	  && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4458 	  && TYPE_MAIN_VARIANT (etype)
4459 	     != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4460 	return unchecked_convert (type, expr, false);
4461 
4462       /* If we are converting between array types with variable size, do the
4463 	 final conversion as an unchecked conversion, again to avoid the need
4464 	 for some variable-sized temporaries.  If valid, this conversion is
4465 	 very likely purely technical and without real effects.  */
4466       if (ecode == ARRAY_TYPE
4467 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4468 	  && !TREE_CONSTANT (TYPE_SIZE (etype))
4469 	  && !TREE_CONSTANT (TYPE_SIZE (type)))
4470 	return unchecked_convert (type,
4471 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
4472 					   expr),
4473 				  false);
4474 
4475       tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4476 
4477       /* If converting to the inner type has already created a CONSTRUCTOR with
4478          the right size, then reuse it instead of creating another one.  This
4479          can happen for the padding type built to overalign local variables.  */
4480       if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4481 	  && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4482 	  && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4483 	  && tree_int_cst_equal (TYPE_SIZE (type),
4484 				 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4485 	return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4486 
4487       vec<constructor_elt, va_gc> *v;
4488       vec_alloc (v, 1);
4489       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4490       return gnat_build_constructor (type, v);
4491     }
4492 
4493   /* If the input type has padding, remove it and convert to the output type.
4494      The conditions ordering is arranged to ensure that the output type is not
4495      a padding type here, as it is not clear whether the conversion would
4496      always be correct if this was to happen.  */
4497   else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4498     {
4499       tree unpadded;
4500 
4501       /* If we have just converted to this padded type, just get the
4502 	 inner expression.  */
4503       if (TREE_CODE (expr) == CONSTRUCTOR)
4504 	unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4505 
4506       /* Otherwise, build an explicit component reference.  */
4507       else
4508 	unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4509 
4510       return convert (type, unpadded);
4511     }
4512 
4513   /* If the input is a biased type, convert first to the base type and add
4514      the bias.  Note that the bias must go through a full conversion to the
4515      base type, lest it is itself a biased value; this happens for subtypes
4516      of biased types.  */
4517   if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4518     return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4519 				       fold_convert (TREE_TYPE (etype), expr),
4520 				       convert (TREE_TYPE (etype),
4521 						TYPE_MIN_VALUE (etype))));
4522 
4523   /* If the input is a justified modular type, we need to extract the actual
4524      object before converting it to an other type with the exceptions of an
4525      [unconstrained] array or a mere type variant.  It is useful to avoid
4526      the extraction and conversion in these cases because it could end up
4527      replacing a VAR_DECL by a constructor and we might be about the take
4528      the address of the result.  */
4529   if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4530       && code != ARRAY_TYPE
4531       && code != UNCONSTRAINED_ARRAY_TYPE
4532       && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4533     return
4534       convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4535 
4536   /* If converting to a type that contains a template, convert to the data
4537      type and then build the template. */
4538   if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4539     {
4540       tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4541       vec<constructor_elt, va_gc> *v;
4542       vec_alloc (v, 2);
4543 
4544       /* If the source already has a template, get a reference to the
4545 	 associated array only, as we are going to rebuild a template
4546 	 for the target type anyway.  */
4547       expr = maybe_unconstrained_array (expr);
4548 
4549       CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4550 			      build_template (TREE_TYPE (TYPE_FIELDS (type)),
4551 					      obj_type, NULL_TREE));
4552       if (expr)
4553 	CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4554 				convert (obj_type, expr));
4555       return gnat_build_constructor (type, v);
4556     }
4557 
4558   /* There are some cases of expressions that we process specially.  */
4559   switch (TREE_CODE (expr))
4560     {
4561     case ERROR_MARK:
4562       return expr;
4563 
4564     case NULL_EXPR:
4565       /* Just set its type here.  For TRANSFORM_EXPR, we will do the actual
4566 	 conversion in gnat_expand_expr.  NULL_EXPR does not represent
4567 	 and actual value, so no conversion is needed.  */
4568       expr = copy_node (expr);
4569       TREE_TYPE (expr) = type;
4570       return expr;
4571 
4572     case STRING_CST:
4573       /* If we are converting a STRING_CST to another constrained array type,
4574 	 just make a new one in the proper type.  */
4575       if (code == ecode
4576 	  && !(TREE_CONSTANT (TYPE_SIZE (etype))
4577 	       && !TREE_CONSTANT (TYPE_SIZE (type))))
4578 	{
4579 	  expr = copy_node (expr);
4580 	  TREE_TYPE (expr) = type;
4581 	  return expr;
4582 	}
4583       break;
4584 
4585     case VECTOR_CST:
4586       /* If we are converting a VECTOR_CST to a mere type variant, just make
4587 	 a new one in the proper type.  */
4588       if (code == ecode && gnat_types_compatible_p (type, etype))
4589 	{
4590 	  expr = copy_node (expr);
4591 	  TREE_TYPE (expr) = type;
4592 	  return expr;
4593 	}
4594       break;
4595 
4596     case CONSTRUCTOR:
4597       /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4598 	 another padding type around the same type, just make a new one in
4599 	 the proper type.  */
4600       if (code == ecode
4601 	  && (gnat_types_compatible_p (type, etype)
4602 	      || (code == RECORD_TYPE
4603 		  && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4604 		  && TREE_TYPE (TYPE_FIELDS (type))
4605 		     == TREE_TYPE (TYPE_FIELDS (etype)))))
4606 	{
4607 	  expr = copy_node (expr);
4608 	  TREE_TYPE (expr) = type;
4609 	  CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4610 	  return expr;
4611 	}
4612 
4613       /* Likewise for a conversion between original and packable version, or
4614 	 conversion between types of the same size and with the same list of
4615 	 fields, but we have to work harder to preserve type consistency.  */
4616       if (code == ecode
4617 	  && code == RECORD_TYPE
4618 	  && (TYPE_NAME (type) == TYPE_NAME (etype)
4619 	      || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4620 
4621 	{
4622 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4623 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4624 	  vec<constructor_elt, va_gc> *v;
4625 	  vec_alloc (v, len);
4626 	  tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4627 	  unsigned HOST_WIDE_INT idx;
4628 	  tree index, value;
4629 
4630 	  /* Whether we need to clear TREE_CONSTANT et al. on the output
4631 	     constructor when we convert in place.  */
4632 	  bool clear_constant = false;
4633 
4634 	  FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4635 	    {
4636 	      /* Skip the missing fields in the CONSTRUCTOR.  */
4637 	      while (efield && field && !SAME_FIELD_P (efield, index))
4638 	        {
4639 		  efield = DECL_CHAIN (efield);
4640 		  field = DECL_CHAIN (field);
4641 		}
4642 	      /* The field must be the same.  */
4643 	      if (!(efield && field && SAME_FIELD_P (efield, field)))
4644 		break;
4645 	      constructor_elt elt
4646 	        = {field, convert (TREE_TYPE (field), value)};
4647 	      v->quick_push (elt);
4648 
4649 	      /* If packing has made this field a bitfield and the input
4650 		 value couldn't be emitted statically any more, we need to
4651 		 clear TREE_CONSTANT on our output.  */
4652 	      if (!clear_constant
4653 		  && TREE_CONSTANT (expr)
4654 		  && !CONSTRUCTOR_BITFIELD_P (efield)
4655 		  && CONSTRUCTOR_BITFIELD_P (field)
4656 		  && !initializer_constant_valid_for_bitfield_p (value))
4657 		clear_constant = true;
4658 
4659 	      efield = DECL_CHAIN (efield);
4660 	      field = DECL_CHAIN (field);
4661 	    }
4662 
4663 	  /* If we have been able to match and convert all the input fields
4664 	     to their output type, convert in place now.  We'll fallback to a
4665 	     view conversion downstream otherwise.  */
4666 	  if (idx == len)
4667 	    {
4668 	      expr = copy_node (expr);
4669 	      TREE_TYPE (expr) = type;
4670 	      CONSTRUCTOR_ELTS (expr) = v;
4671 	      if (clear_constant)
4672 		TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4673 	      return expr;
4674 	    }
4675 	}
4676 
4677       /* Likewise for a conversion between array type and vector type with a
4678          compatible representative array.  */
4679       else if (code == VECTOR_TYPE
4680 	       && ecode == ARRAY_TYPE
4681 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4682 					   etype))
4683 	{
4684 	  vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4685 	  unsigned HOST_WIDE_INT len = vec_safe_length (e);
4686 	  vec<constructor_elt, va_gc> *v;
4687 	  unsigned HOST_WIDE_INT ix;
4688 	  tree value;
4689 
4690 	  /* Build a VECTOR_CST from a *constant* array constructor.  */
4691 	  if (TREE_CONSTANT (expr))
4692 	    {
4693 	      bool constant_p = true;
4694 
4695 	      /* Iterate through elements and check if all constructor
4696 		 elements are *_CSTs.  */
4697 	      FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4698 		if (!CONSTANT_CLASS_P (value))
4699 		  {
4700 		    constant_p = false;
4701 		    break;
4702 		  }
4703 
4704 	      if (constant_p)
4705 		return build_vector_from_ctor (type,
4706 					       CONSTRUCTOR_ELTS (expr));
4707 	    }
4708 
4709 	  /* Otherwise, build a regular vector constructor.  */
4710 	  vec_alloc (v, len);
4711 	  FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4712 	    {
4713 	      constructor_elt elt = {NULL_TREE, value};
4714 	      v->quick_push (elt);
4715 	    }
4716 	  expr = copy_node (expr);
4717 	  TREE_TYPE (expr) = type;
4718 	  CONSTRUCTOR_ELTS (expr) = v;
4719 	  return expr;
4720 	}
4721       break;
4722 
4723     case UNCONSTRAINED_ARRAY_REF:
4724       /* First retrieve the underlying array.  */
4725       expr = maybe_unconstrained_array (expr);
4726       etype = TREE_TYPE (expr);
4727       ecode = TREE_CODE (etype);
4728       break;
4729 
4730     case VIEW_CONVERT_EXPR:
4731       {
4732 	/* GCC 4.x is very sensitive to type consistency overall, and view
4733 	   conversions thus are very frequent.  Even though just "convert"ing
4734 	   the inner operand to the output type is fine in most cases, it
4735 	   might expose unexpected input/output type mismatches in special
4736 	   circumstances so we avoid such recursive calls when we can.  */
4737 	tree op0 = TREE_OPERAND (expr, 0);
4738 
4739 	/* If we are converting back to the original type, we can just
4740 	   lift the input conversion.  This is a common occurrence with
4741 	   switches back-and-forth amongst type variants.  */
4742 	if (type == TREE_TYPE (op0))
4743 	  return op0;
4744 
4745 	/* Otherwise, if we're converting between two aggregate or vector
4746 	   types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4747 	   target type in place or to just convert the inner expression.  */
4748 	if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4749 	    || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4750 	  {
4751 	    /* If we are converting between mere variants, we can just
4752 	       substitute the VIEW_CONVERT_EXPR in place.  */
4753 	    if (gnat_types_compatible_p (type, etype))
4754 	      return build1 (VIEW_CONVERT_EXPR, type, op0);
4755 
4756 	    /* Otherwise, we may just bypass the input view conversion unless
4757 	       one of the types is a fat pointer,  which is handled by
4758 	       specialized code below which relies on exact type matching.  */
4759 	    else if (!TYPE_IS_FAT_POINTER_P (type)
4760 		     && !TYPE_IS_FAT_POINTER_P (etype))
4761 	      return convert (type, op0);
4762 	  }
4763 
4764 	break;
4765       }
4766 
4767     default:
4768       break;
4769     }
4770 
4771   /* Check for converting to a pointer to an unconstrained array.  */
4772   if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4773     return convert_to_fat_pointer (type, expr);
4774 
4775   /* If we are converting between two aggregate or vector types that are mere
4776      variants, just make a VIEW_CONVERT_EXPR.  Likewise when we are converting
4777      to a vector type from its representative array type.  */
4778   else if ((code == ecode
4779 	    && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4780 	    && gnat_types_compatible_p (type, etype))
4781 	   || (code == VECTOR_TYPE
4782 	       && ecode == ARRAY_TYPE
4783 	       && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4784 					   etype)))
4785     return build1 (VIEW_CONVERT_EXPR, type, expr);
4786 
4787   /* If we are converting between tagged types, try to upcast properly.
4788      But don't do it if we are just annotating types since tagged types
4789      aren't fully laid out in this mode.  */
4790   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4791 	   && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4792 	   && !type_annotate_only)
4793     {
4794       tree child_etype = etype;
4795       do {
4796 	tree field = TYPE_FIELDS (child_etype);
4797 	if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4798 	  return build_component_ref (expr, field, false);
4799 	child_etype = TREE_TYPE (field);
4800       } while (TREE_CODE (child_etype) == RECORD_TYPE);
4801     }
4802 
4803   /* If we are converting from a smaller form of record type back to it, just
4804      make a VIEW_CONVERT_EXPR.  But first pad the expression to have the same
4805      size on both sides.  */
4806   else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4807 	   && smaller_form_type_p (etype, type))
4808     {
4809       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4810 				      false, false, false, true),
4811 		      expr);
4812       return build1 (VIEW_CONVERT_EXPR, type, expr);
4813     }
4814 
4815   /* In all other cases of related types, make a NOP_EXPR.  */
4816   else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4817     return fold_convert (type, expr);
4818 
4819   switch (code)
4820     {
4821     case VOID_TYPE:
4822       return fold_build1 (CONVERT_EXPR, type, expr);
4823 
4824     case INTEGER_TYPE:
4825       if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4826 	  && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4827 	      || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4828 	return unchecked_convert (type, expr, false);
4829 
4830       /* If the output is a biased type, convert first to the base type and
4831 	 subtract the bias.  Note that the bias itself must go through a full
4832 	 conversion to the base type, lest it is a biased value; this happens
4833 	 for subtypes of biased types.  */
4834       if (TYPE_BIASED_REPRESENTATION_P (type))
4835 	return fold_convert (type,
4836 			     fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4837 					  convert (TREE_TYPE (type), expr),
4838 					  convert (TREE_TYPE (type),
4839 						   TYPE_MIN_VALUE (type))));
4840 
4841       /* ... fall through ... */
4842 
4843     case ENUMERAL_TYPE:
4844     case BOOLEAN_TYPE:
4845       /* If we are converting an additive expression to an integer type
4846 	 with lower precision, be wary of the optimization that can be
4847 	 applied by convert_to_integer.  There are 2 problematic cases:
4848 	   - if the first operand was originally of a biased type,
4849 	     because we could be recursively called to convert it
4850 	     to an intermediate type and thus rematerialize the
4851 	     additive operator endlessly,
4852 	   - if the expression contains a placeholder, because an
4853 	     intermediate conversion that changes the sign could
4854 	     be inserted and thus introduce an artificial overflow
4855 	     at compile time when the placeholder is substituted.  */
4856       if (code == INTEGER_TYPE
4857 	  && ecode == INTEGER_TYPE
4858 	  && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4859 	  && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4860 	{
4861 	  tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4862 
4863 	  if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4864 	       && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4865 	      || CONTAINS_PLACEHOLDER_P (expr))
4866 	    return build1 (NOP_EXPR, type, expr);
4867 	}
4868 
4869       return fold (convert_to_integer (type, expr));
4870 
4871     case POINTER_TYPE:
4872     case REFERENCE_TYPE:
4873       /* If converting between two thin pointers, adjust if needed to account
4874 	 for differing offsets from the base pointer, depending on whether
4875 	 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type.  */
4876       if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4877 	{
4878 	  tree etype_pos
4879 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4880 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4881 	      : size_zero_node;
4882 	  tree type_pos
4883 	    = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4884 	      ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4885 	      : size_zero_node;
4886 	  tree byte_diff = size_diffop (type_pos, etype_pos);
4887 
4888 	  expr = build1 (NOP_EXPR, type, expr);
4889 	  if (integer_zerop (byte_diff))
4890 	    return expr;
4891 
4892 	  return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4893 				  fold_convert (sizetype, byte_diff));
4894 	}
4895 
4896       /* If converting fat pointer to normal or thin pointer, get the pointer
4897 	 to the array and then convert it.  */
4898       if (TYPE_IS_FAT_POINTER_P (etype))
4899 	expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4900 
4901       return fold (convert_to_pointer (type, expr));
4902 
4903     case REAL_TYPE:
4904       return fold (convert_to_real (type, expr));
4905 
4906     case RECORD_TYPE:
4907       /* Do a normal conversion between scalar and justified modular type.  */
4908       if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4909 	{
4910 	  vec<constructor_elt, va_gc> *v;
4911 	  vec_alloc (v, 1);
4912 
4913 	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4914 				  convert (TREE_TYPE (TYPE_FIELDS (type)),
4915 					   expr));
4916 	  return gnat_build_constructor (type, v);
4917 	}
4918 
4919       /* In these cases, assume the front-end has validated the conversion.
4920 	 If the conversion is valid, it will be a bit-wise conversion, so
4921 	 it can be viewed as an unchecked conversion.  */
4922       return unchecked_convert (type, expr, false);
4923 
4924     case ARRAY_TYPE:
4925       /* Do a normal conversion between unconstrained and constrained array
4926 	 type, assuming the latter is a constrained version of the former.  */
4927       if (TREE_CODE (expr) == INDIRECT_REF
4928 	  && ecode == ARRAY_TYPE
4929 	  && TREE_TYPE (etype) == TREE_TYPE (type))
4930 	{
4931 	  tree ptr_type = build_pointer_type (type);
4932 	  tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4933 				   fold_convert (ptr_type,
4934 						 TREE_OPERAND (expr, 0)));
4935 	  TREE_READONLY (t) = TREE_READONLY (expr);
4936 	  TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4937 	  return t;
4938 	}
4939 
4940       /* In these cases, assume the front-end has validated the conversion.
4941 	 If the conversion is valid, it will be a bit-wise conversion, so
4942 	 it can be viewed as an unchecked conversion.  */
4943       return unchecked_convert (type, expr, false);
4944 
4945     case UNION_TYPE:
4946       /* This is a either a conversion between a tagged type and some
4947 	 subtype, which we have to mark as a UNION_TYPE because of
4948 	 overlapping fields or a conversion of an Unchecked_Union.  */
4949       return unchecked_convert (type, expr, false);
4950 
4951     case UNCONSTRAINED_ARRAY_TYPE:
4952       /* If the input is a VECTOR_TYPE, convert to the representative
4953 	 array type first.  */
4954       if (ecode == VECTOR_TYPE)
4955 	{
4956 	  expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4957 	  etype = TREE_TYPE (expr);
4958 	  ecode = TREE_CODE (etype);
4959 	}
4960 
4961       /* If EXPR is a constrained array, take its address, convert it to a
4962 	 fat pointer, and then dereference it.  Likewise if EXPR is a
4963 	 record containing both a template and a constrained array.
4964 	 Note that a record representing a justified modular type
4965 	 always represents a packed constrained array.  */
4966       if (ecode == ARRAY_TYPE
4967 	  || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4968 	  || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4969 	  || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4970 	return
4971 	  build_unary_op
4972 	    (INDIRECT_REF, NULL_TREE,
4973 	     convert_to_fat_pointer (TREE_TYPE (type),
4974 				     build_unary_op (ADDR_EXPR,
4975 						     NULL_TREE, expr)));
4976 
4977       /* Do something very similar for converting one unconstrained
4978 	 array to another.  */
4979       else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4980 	return
4981 	  build_unary_op (INDIRECT_REF, NULL_TREE,
4982 			  convert (TREE_TYPE (type),
4983 				   build_unary_op (ADDR_EXPR,
4984 						   NULL_TREE, expr)));
4985       else
4986 	gcc_unreachable ();
4987 
4988     case COMPLEX_TYPE:
4989       return fold (convert_to_complex (type, expr));
4990 
4991     default:
4992       gcc_unreachable ();
4993     }
4994 }
4995 
4996 /* Create an expression whose value is that of EXPR converted to the common
4997    index type, which is sizetype.  EXPR is supposed to be in the base type
4998    of the GNAT index type.  Calling it is equivalent to doing
4999 
5000      convert (sizetype, expr)
5001 
5002    but we try to distribute the type conversion with the knowledge that EXPR
5003    cannot overflow in its type.  This is a best-effort approach and we fall
5004    back to the above expression as soon as difficulties are encountered.
5005 
5006    This is necessary to overcome issues that arise when the GNAT base index
5007    type and the GCC common index type (sizetype) don't have the same size,
5008    which is quite frequent on 64-bit architectures.  In this case, and if
5009    the GNAT base index type is signed but the iteration type of the loop has
5010    been forced to unsigned, the loop scalar evolution engine cannot compute
5011    a simple evolution for the general induction variables associated with the
5012    array indices, because it will preserve the wrap-around semantics in the
5013    unsigned type of their "inner" part.  As a result, many loop optimizations
5014    are blocked.
5015 
5016    The solution is to use a special (basic) induction variable that is at
5017    least as large as sizetype, and to express the aforementioned general
5018    induction variables in terms of this induction variable, eliminating
5019    the problematic intermediate truncation to the GNAT base index type.
5020    This is possible as long as the original expression doesn't overflow
5021    and if the middle-end hasn't introduced artificial overflows in the
5022    course of the various simplification it can make to the expression.  */
5023 
5024 tree
convert_to_index_type(tree expr)5025 convert_to_index_type (tree expr)
5026 {
5027   enum tree_code code = TREE_CODE (expr);
5028   tree type = TREE_TYPE (expr);
5029 
5030   /* If the type is unsigned, overflow is allowed so we cannot be sure that
5031      EXPR doesn't overflow.  Keep it simple if optimization is disabled.  */
5032   if (TYPE_UNSIGNED (type) || !optimize || optimize_debug)
5033     return convert (sizetype, expr);
5034 
5035   switch (code)
5036     {
5037     case VAR_DECL:
5038       /* The main effect of the function: replace a loop parameter with its
5039 	 associated special induction variable.  */
5040       if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5041 	expr = DECL_INDUCTION_VAR (expr);
5042       break;
5043 
5044     CASE_CONVERT:
5045       {
5046 	tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5047 	/* Bail out as soon as we suspect some sort of type frobbing.  */
5048 	if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5049 	    || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5050 	  break;
5051       }
5052 
5053       /* ... fall through ... */
5054 
5055     case NON_LVALUE_EXPR:
5056       return fold_build1 (code, sizetype,
5057 			  convert_to_index_type (TREE_OPERAND (expr, 0)));
5058 
5059     case PLUS_EXPR:
5060     case MINUS_EXPR:
5061     case MULT_EXPR:
5062       return fold_build2 (code, sizetype,
5063 			  convert_to_index_type (TREE_OPERAND (expr, 0)),
5064 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5065 
5066     case COMPOUND_EXPR:
5067       return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5068 			  convert_to_index_type (TREE_OPERAND (expr, 1)));
5069 
5070     case COND_EXPR:
5071       return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5072 			  convert_to_index_type (TREE_OPERAND (expr, 1)),
5073 			  convert_to_index_type (TREE_OPERAND (expr, 2)));
5074 
5075     default:
5076       break;
5077     }
5078 
5079   return convert (sizetype, expr);
5080 }
5081 
5082 /* Remove all conversions that are done in EXP.  This includes converting
5083    from a padded type or to a justified modular type.  If TRUE_ADDRESS
5084    is true, always return the address of the containing object even if
5085    the address is not bit-aligned.  */
5086 
5087 tree
remove_conversions(tree exp,bool true_address)5088 remove_conversions (tree exp, bool true_address)
5089 {
5090   switch (TREE_CODE (exp))
5091     {
5092     case CONSTRUCTOR:
5093       if (true_address
5094 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5095 	  && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5096 	return
5097 	  remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5098       break;
5099 
5100     case COMPONENT_REF:
5101       if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5102 	return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5103       break;
5104 
5105     CASE_CONVERT:
5106     case VIEW_CONVERT_EXPR:
5107     case NON_LVALUE_EXPR:
5108       return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5109 
5110     default:
5111       break;
5112     }
5113 
5114   return exp;
5115 }
5116 
5117 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5118    refers to the underlying array.  If it has TYPE_CONTAINS_TEMPLATE_P,
5119    likewise return an expression pointing to the underlying array.  */
5120 
5121 tree
maybe_unconstrained_array(tree exp)5122 maybe_unconstrained_array (tree exp)
5123 {
5124   enum tree_code code = TREE_CODE (exp);
5125   tree type = TREE_TYPE (exp);
5126 
5127   switch (TREE_CODE (type))
5128     {
5129     case UNCONSTRAINED_ARRAY_TYPE:
5130       if (code == UNCONSTRAINED_ARRAY_REF)
5131 	{
5132 	  const bool read_only = TREE_READONLY (exp);
5133 	  const bool no_trap = TREE_THIS_NOTRAP (exp);
5134 
5135 	  exp = TREE_OPERAND (exp, 0);
5136 	  type = TREE_TYPE (exp);
5137 
5138 	  if (TREE_CODE (exp) == COND_EXPR)
5139 	    {
5140 	      tree op1
5141 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5142 				  build_component_ref (TREE_OPERAND (exp, 1),
5143 						       TYPE_FIELDS (type),
5144 						       false));
5145 	      tree op2
5146 		= build_unary_op (INDIRECT_REF, NULL_TREE,
5147 				  build_component_ref (TREE_OPERAND (exp, 2),
5148 						       TYPE_FIELDS (type),
5149 						       false));
5150 
5151 	      exp = build3 (COND_EXPR,
5152 			    TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5153 			    TREE_OPERAND (exp, 0), op1, op2);
5154 	    }
5155 	  else
5156 	    {
5157 	      exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5158 				    build_component_ref (exp,
5159 							 TYPE_FIELDS (type),
5160 						         false));
5161 	      TREE_READONLY (exp) = read_only;
5162 	      TREE_THIS_NOTRAP (exp) = no_trap;
5163 	    }
5164 	}
5165 
5166       else if (code == NULL_EXPR)
5167 	exp = build1 (NULL_EXPR,
5168 		      TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5169 		      TREE_OPERAND (exp, 0));
5170       break;
5171 
5172     case RECORD_TYPE:
5173       /* If this is a padded type and it contains a template, convert to the
5174 	 unpadded type first.  */
5175       if (TYPE_PADDING_P (type)
5176 	  && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5177 	  && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5178 	{
5179 	  exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5180 	  code = TREE_CODE (exp);
5181 	  type = TREE_TYPE (exp);
5182 	}
5183 
5184       if (TYPE_CONTAINS_TEMPLATE_P (type))
5185 	{
5186 	  /* If the array initializer is a box, return NULL_TREE.  */
5187 	  if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5188 	    return NULL_TREE;
5189 
5190 	  exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5191 				     false);
5192 	  type = TREE_TYPE (exp);
5193 
5194 	  /* If the array type is padded, convert to the unpadded type.  */
5195 	  if (TYPE_IS_PADDING_P (type))
5196 	    exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5197 	}
5198       break;
5199 
5200     default:
5201       break;
5202     }
5203 
5204   return exp;
5205 }
5206 
5207 /* Return true if EXPR is an expression that can be folded as an operand
5208    of a VIEW_CONVERT_EXPR.  See ada-tree.h for a complete rationale.  */
5209 
5210 static bool
can_fold_for_view_convert_p(tree expr)5211 can_fold_for_view_convert_p (tree expr)
5212 {
5213   tree t1, t2;
5214 
5215   /* The folder will fold NOP_EXPRs between integral types with the same
5216      precision (in the middle-end's sense).  We cannot allow it if the
5217      types don't have the same precision in the Ada sense as well.  */
5218   if (TREE_CODE (expr) != NOP_EXPR)
5219     return true;
5220 
5221   t1 = TREE_TYPE (expr);
5222   t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5223 
5224   /* Defer to the folder for non-integral conversions.  */
5225   if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5226     return true;
5227 
5228   /* Only fold conversions that preserve both precisions.  */
5229   if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5230       && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5231     return true;
5232 
5233   return false;
5234 }
5235 
5236 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5237    If NOTRUNC_P is true, truncation operations should be suppressed.
5238 
5239    Special care is required with (source or target) integral types whose
5240    precision is not equal to their size, to make sure we fetch or assign
5241    the value bits whose location might depend on the endianness, e.g.
5242 
5243      Rmsize : constant := 8;
5244      subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5245 
5246      type Bit_Array is array (1 .. Rmsize) of Boolean;
5247      pragma Pack (Bit_Array);
5248 
5249      function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5250 
5251      Value : Int := 2#1000_0001#;
5252      Vbits : Bit_Array := To_Bit_Array (Value);
5253 
5254    we expect the 8 bits at Vbits'Address to always contain Value, while
5255    their original location depends on the endianness, at Value'Address
5256    on a little-endian architecture but not on a big-endian one.
5257 
5258    One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5259    the bits between the precision and the size are filled, because of the
5260    trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5261    So we use the special predicate type_unsigned_for_rm above.  */
5262 
5263 tree
unchecked_convert(tree type,tree expr,bool notrunc_p)5264 unchecked_convert (tree type, tree expr, bool notrunc_p)
5265 {
5266   tree etype = TREE_TYPE (expr);
5267   enum tree_code ecode = TREE_CODE (etype);
5268   enum tree_code code = TREE_CODE (type);
5269   const bool ebiased
5270     = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5271   const bool biased
5272     = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5273   const bool ereverse
5274     = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5275   const bool reverse
5276     = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5277   tree tem;
5278   int c = 0;
5279 
5280   /* If the expression is already of the right type, we are done.  */
5281   if (etype == type)
5282     return expr;
5283 
5284   /* If both types are integral or regular pointer, then just do a normal
5285      conversion.  Likewise for a conversion to an unconstrained array.  */
5286   if (((INTEGRAL_TYPE_P (type)
5287 	|| (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5288 	|| (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5289        && (INTEGRAL_TYPE_P (etype)
5290 	   || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5291 	   || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5292       || code == UNCONSTRAINED_ARRAY_TYPE)
5293     {
5294       if (ebiased)
5295 	{
5296 	  tree ntype = copy_type (etype);
5297 	  TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5298 	  TYPE_MAIN_VARIANT (ntype) = ntype;
5299 	  expr = build1 (NOP_EXPR, ntype, expr);
5300 	}
5301 
5302       if (biased)
5303 	{
5304 	  tree rtype = copy_type (type);
5305 	  TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5306 	  TYPE_MAIN_VARIANT (rtype) = rtype;
5307 	  expr = convert (rtype, expr);
5308 	  expr = build1 (NOP_EXPR, type, expr);
5309 	}
5310       else
5311 	expr = convert (type, expr);
5312     }
5313 
5314   /* If we are converting to an integral type whose precision is not equal
5315      to its size, first unchecked convert to a record type that contains a
5316      field of the given precision.  Then extract the result from the field.
5317 
5318      There is a subtlety if the source type is an aggregate type with reverse
5319      storage order because its representation is not contiguous in the native
5320      storage order, i.e. a direct unchecked conversion to an integral type
5321      with N bits of precision cannot read the first N bits of the aggregate
5322      type.  To overcome it, we do an unchecked conversion to an integral type
5323      with reverse storage order and return the resulting value.  This also
5324      ensures that the result of the unchecked conversion doesn't depend on
5325      the endianness of the target machine, but only on the storage order of
5326      the aggregate type.
5327 
5328      Finally, for the sake of consistency, we do the unchecked conversion
5329      to an integral type with reverse storage order as soon as the source
5330      type is an aggregate type with reverse storage order, even if there
5331      are no considerations of precision or size involved.  Ultimately, we
5332      further extend this processing to any scalar type.  */
5333   else if ((INTEGRAL_TYPE_P (type)
5334 	    && TYPE_RM_SIZE (type)
5335 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5336 					   TYPE_SIZE (type))) < 0
5337 		|| ereverse))
5338 	   || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5339     {
5340       tree rec_type = make_node (RECORD_TYPE);
5341       tree field_type, field;
5342 
5343       TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5344 
5345       if (c < 0)
5346 	{
5347 	  const unsigned HOST_WIDE_INT prec
5348 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5349 	  if (type_unsigned_for_rm (type))
5350 	    field_type = make_unsigned_type (prec);
5351 	  else
5352 	    field_type = make_signed_type (prec);
5353 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5354 	}
5355       else
5356 	field_type = type;
5357 
5358       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5359 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5360 
5361       finish_record_type (rec_type, field, 1, false);
5362 
5363       expr = unchecked_convert (rec_type, expr, notrunc_p);
5364       expr = build_component_ref (expr, field, false);
5365       expr = fold_build1 (NOP_EXPR, type, expr);
5366     }
5367 
5368   /* Similarly if we are converting from an integral type whose precision is
5369      not equal to its size, first copy into a field of the given precision
5370      and unchecked convert the record type.
5371 
5372      The same considerations as above apply if the target type is an aggregate
5373      type with reverse storage order and we also proceed similarly.  */
5374   else if ((INTEGRAL_TYPE_P (etype)
5375 	    && TYPE_RM_SIZE (etype)
5376 	    && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5377 					   TYPE_SIZE (etype))) < 0
5378 		|| reverse))
5379 	   || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5380     {
5381       tree rec_type = make_node (RECORD_TYPE);
5382       vec<constructor_elt, va_gc> *v;
5383       vec_alloc (v, 1);
5384       tree field_type, field;
5385 
5386       TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5387 
5388       if (c < 0)
5389 	{
5390 	  const unsigned HOST_WIDE_INT prec
5391 	    = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5392 	  if (type_unsigned_for_rm (etype))
5393 	    field_type = make_unsigned_type (prec);
5394 	  else
5395 	    field_type = make_signed_type (prec);
5396 	  SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5397 	}
5398       else
5399 	field_type = etype;
5400 
5401       field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5402 				 NULL_TREE, bitsize_zero_node, c < 0, 0);
5403 
5404       finish_record_type (rec_type, field, 1, false);
5405 
5406       expr = fold_build1 (NOP_EXPR, field_type, expr);
5407       CONSTRUCTOR_APPEND_ELT (v, field, expr);
5408       expr = gnat_build_constructor (rec_type, v);
5409       expr = unchecked_convert (type, expr, notrunc_p);
5410     }
5411 
5412   /* If we are converting from a scalar type to a type with a different size,
5413      we need to pad to have the same size on both sides.
5414 
5415      ??? We cannot do it unconditionally because unchecked conversions are
5416      used liberally by the front-end to implement interface thunks:
5417 
5418        type ada__tags__addr_ptr is access system.address;
5419        S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5420        return p___size__4 (p__object!(S191s.all));
5421 
5422      so we need to skip dereferences.  */
5423   else if (!INDIRECT_REF_P (expr)
5424 	   && !AGGREGATE_TYPE_P (etype)
5425 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5426 	   && TREE_CONSTANT (TYPE_SIZE (type))
5427 	   && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5428     {
5429       if (c < 0)
5430 	{
5431 	  expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5432 					  false, false, false, true),
5433 			  expr);
5434 	  expr = unchecked_convert (type, expr, notrunc_p);
5435 	}
5436       else
5437 	{
5438 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5439 					  false, false, false, true);
5440 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5441 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5442 	}
5443     }
5444 
5445   /* Likewise if we are converting from a scalar type to a type with self-
5446      referential size.  We use the max size to do the padding in this case.  */
5447   else if (!INDIRECT_REF_P (expr)
5448 	   && !AGGREGATE_TYPE_P (etype)
5449 	   && ecode != UNCONSTRAINED_ARRAY_TYPE
5450 	   && CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)))
5451     {
5452       tree new_size = max_size (TYPE_SIZE (type), true);
5453       c = tree_int_cst_compare (TYPE_SIZE (etype), new_size);
5454       if (c < 0)
5455 	{
5456 	  expr = convert (maybe_pad_type (etype, new_size, 0, Empty,
5457 					  false, false, false, true),
5458 			  expr);
5459 	  expr = unchecked_convert (type, expr, notrunc_p);
5460 	}
5461       else
5462 	{
5463 	  tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5464 					  false, false, false, true);
5465 	  expr = unchecked_convert (rec_type, expr, notrunc_p);
5466 	  expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5467 	}
5468     }
5469 
5470   /* We have a special case when we are converting between two unconstrained
5471      array types.  In that case, take the address, convert the fat pointer
5472      types, and dereference.  */
5473   else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5474     expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5475 			   build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5476 				   build_unary_op (ADDR_EXPR, NULL_TREE,
5477 						   expr)));
5478 
5479   /* Another special case is when we are converting to a vector type from its
5480      representative array type; this a regular conversion.  */
5481   else if (code == VECTOR_TYPE
5482 	   && ecode == ARRAY_TYPE
5483 	   && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5484 				       etype))
5485     expr = convert (type, expr);
5486 
5487   /* And, if the array type is not the representative, we try to build an
5488      intermediate vector type of which the array type is the representative
5489      and to do the unchecked conversion between the vector types, in order
5490      to enable further simplifications in the middle-end.  */
5491   else if (code == VECTOR_TYPE
5492 	   && ecode == ARRAY_TYPE
5493 	   && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5494     {
5495       expr = convert (tem, expr);
5496       return unchecked_convert (type, expr, notrunc_p);
5497     }
5498 
5499   /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5500      the alignment of the CONSTRUCTOR to speed up the copy operation.  But do
5501      not do it for a conversion between original and packable version to avoid
5502      an infinite recursion.  */
5503   else if (TREE_CODE (expr) == CONSTRUCTOR
5504 	   && AGGREGATE_TYPE_P (type)
5505 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5506 	   && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5507     {
5508       expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5509 				      Empty, false, false, false, true),
5510 		      expr);
5511       return unchecked_convert (type, expr, notrunc_p);
5512     }
5513 
5514   /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5515      size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5516      But do not do it for a conversion between original and packable version
5517      to avoid an infinite recursion.  */
5518   else if (TREE_CODE (expr) == CONSTRUCTOR
5519 	   && AGGREGATE_TYPE_P (type)
5520 	   && TYPE_NAME (type) != TYPE_NAME (etype)
5521 	   && TREE_CONSTANT (TYPE_SIZE (type))
5522 	   && (!TREE_CONSTANT (TYPE_SIZE (etype))
5523 	       || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5524     {
5525       expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5526 				      Empty, false, false, false, true),
5527 		      expr);
5528       return unchecked_convert (type, expr, notrunc_p);
5529     }
5530 
5531   /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression.  */
5532   else
5533     {
5534       expr = maybe_unconstrained_array (expr);
5535       etype = TREE_TYPE (expr);
5536       ecode = TREE_CODE (etype);
5537       if (can_fold_for_view_convert_p (expr))
5538 	expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5539       else
5540 	expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5541     }
5542 
5543   /* If the result is a non-biased integral type whose precision is not equal
5544      to its size, sign- or zero-extend the result.  But we need not do this
5545      if the input is also an integral type and both are unsigned or both are
5546      signed and have the same precision.  */
5547   tree type_rm_size;
5548   if (!notrunc_p
5549       && !biased
5550       && INTEGRAL_TYPE_P (type)
5551       && (type_rm_size = TYPE_RM_SIZE (type))
5552       && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5553       && !(INTEGRAL_TYPE_P (etype)
5554 	   && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5555 	   && (type_unsigned_for_rm (type)
5556 	       || tree_int_cst_compare (type_rm_size,
5557 					TYPE_RM_SIZE (etype)
5558 					? TYPE_RM_SIZE (etype)
5559 					: TYPE_SIZE (etype)) == 0)))
5560     {
5561       if (integer_zerop (type_rm_size))
5562 	expr = build_int_cst (type, 0);
5563       else
5564 	{
5565 	  tree base_type
5566 	    = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5567 				  type_unsigned_for_rm (type));
5568 	  tree shift_expr
5569 	    = convert (base_type,
5570 		       size_binop (MINUS_EXPR,
5571 				   TYPE_SIZE (type), type_rm_size));
5572 	  expr
5573 	    = convert (type,
5574 		       build_binary_op (RSHIFT_EXPR, base_type,
5575 				        build_binary_op (LSHIFT_EXPR, base_type,
5576 							 convert (base_type,
5577 								  expr),
5578 							 shift_expr),
5579 				        shift_expr));
5580 	}
5581     }
5582 
5583   /* An unchecked conversion should never raise Constraint_Error.  The code
5584      below assumes that GCC's conversion routines overflow the same way that
5585      the underlying hardware does.  This is probably true.  In the rare case
5586      when it is false, we can rely on the fact that such conversions are
5587      erroneous anyway.  */
5588   if (TREE_CODE (expr) == INTEGER_CST)
5589     TREE_OVERFLOW (expr) = 0;
5590 
5591   /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5592      show no longer constant.  */
5593   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5594       && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5595 			   OEP_ONLY_CONST))
5596     TREE_CONSTANT (expr) = 0;
5597 
5598   return expr;
5599 }
5600 
5601 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5602    the latter being a record type as predicated by Is_Record_Type.  */
5603 
5604 enum tree_code
tree_code_for_record_type(Entity_Id gnat_type)5605 tree_code_for_record_type (Entity_Id gnat_type)
5606 {
5607   Node_Id component_list, component;
5608 
5609   /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5610      fields are all in the variant part.  Otherwise, return RECORD_TYPE.  */
5611   if (!Is_Unchecked_Union (gnat_type))
5612     return RECORD_TYPE;
5613 
5614   gnat_type = Implementation_Base_Type (gnat_type);
5615   component_list
5616     = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5617 
5618   for (component = First_Non_Pragma (Component_Items (component_list));
5619        Present (component);
5620        component = Next_Non_Pragma (component))
5621     if (Ekind (Defining_Entity (component)) == E_Component)
5622       return RECORD_TYPE;
5623 
5624   return UNION_TYPE;
5625 }
5626 
5627 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5628    size is equal to 64 bits, or an array of such a type.  Set ALIGN_CLAUSE
5629    according to the presence of an alignment clause on the type or, if it
5630    is an array, on the component type.  */
5631 
5632 bool
is_double_float_or_array(Entity_Id gnat_type,bool * align_clause)5633 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5634 {
5635   gnat_type = Underlying_Type (gnat_type);
5636 
5637   *align_clause = Present (Alignment_Clause (gnat_type));
5638 
5639   if (Is_Array_Type (gnat_type))
5640     {
5641       gnat_type = Underlying_Type (Component_Type (gnat_type));
5642       if (Present (Alignment_Clause (gnat_type)))
5643 	*align_clause = true;
5644     }
5645 
5646   if (!Is_Floating_Point_Type (gnat_type))
5647     return false;
5648 
5649   if (UI_To_Int (Esize (gnat_type)) != 64)
5650     return false;
5651 
5652   return true;
5653 }
5654 
5655 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5656    size is greater or equal to 64 bits, or an array of such a type.  Set
5657    ALIGN_CLAUSE according to the presence of an alignment clause on the
5658    type or, if it is an array, on the component type.  */
5659 
5660 bool
is_double_scalar_or_array(Entity_Id gnat_type,bool * align_clause)5661 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5662 {
5663   gnat_type = Underlying_Type (gnat_type);
5664 
5665   *align_clause = Present (Alignment_Clause (gnat_type));
5666 
5667   if (Is_Array_Type (gnat_type))
5668     {
5669       gnat_type = Underlying_Type (Component_Type (gnat_type));
5670       if (Present (Alignment_Clause (gnat_type)))
5671 	*align_clause = true;
5672     }
5673 
5674   if (!Is_Scalar_Type (gnat_type))
5675     return false;
5676 
5677   if (UI_To_Int (Esize (gnat_type)) < 64)
5678     return false;
5679 
5680   return true;
5681 }
5682 
5683 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5684    component of an aggregate type.  */
5685 
5686 bool
type_for_nonaliased_component_p(tree gnu_type)5687 type_for_nonaliased_component_p (tree gnu_type)
5688 {
5689   /* If the type is passed by reference, we may have pointers to the
5690      component so it cannot be made non-aliased. */
5691   if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5692     return false;
5693 
5694   /* We used to say that any component of aggregate type is aliased
5695      because the front-end may take 'Reference of it.  The front-end
5696      has been enhanced in the meantime so as to use a renaming instead
5697      in most cases, but the back-end can probably take the address of
5698      such a component too so we go for the conservative stance.
5699 
5700      For instance, we might need the address of any array type, even
5701      if normally passed by copy, to construct a fat pointer if the
5702      component is used as an actual for an unconstrained formal.
5703 
5704      Likewise for record types: even if a specific record subtype is
5705      passed by copy, the parent type might be passed by ref (e.g. if
5706      it's of variable size) and we might take the address of a child
5707      component to pass to a parent formal.  We have no way to check
5708      for such conditions here.  */
5709   if (AGGREGATE_TYPE_P (gnu_type))
5710     return false;
5711 
5712   return true;
5713 }
5714 
5715 /* Return true if TYPE is a smaller form of ORIG_TYPE.  */
5716 
5717 bool
smaller_form_type_p(tree type,tree orig_type)5718 smaller_form_type_p (tree type, tree orig_type)
5719 {
5720   tree size, osize;
5721 
5722   /* We're not interested in variants here.  */
5723   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5724     return false;
5725 
5726   /* Like a variant, a packable version keeps the original TYPE_NAME.  */
5727   if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5728     return false;
5729 
5730   size = TYPE_SIZE (type);
5731   osize = TYPE_SIZE (orig_type);
5732 
5733   if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5734     return false;
5735 
5736   return tree_int_cst_lt (size, osize) != 0;
5737 }
5738 
5739 /* Return whether EXPR, which is the renamed object in an object renaming
5740    declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5741    This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration.  */
5742 
5743 bool
can_materialize_object_renaming_p(Node_Id expr)5744 can_materialize_object_renaming_p (Node_Id expr)
5745 {
5746   while (true)
5747     {
5748       expr = Original_Node (expr);
5749 
5750       switch (Nkind (expr))
5751 	{
5752 	case N_Identifier:
5753 	case N_Expanded_Name:
5754 	  if (!Present (Renamed_Object (Entity (expr))))
5755 	    return true;
5756 	  expr = Renamed_Object (Entity (expr));
5757 	  break;
5758 
5759 	case N_Selected_Component:
5760 	  {
5761 	    if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5762 	      return false;
5763 
5764 	    const Uint bitpos
5765 	      = Normalized_First_Bit (Entity (Selector_Name (expr)));
5766 	    if (bitpos != UI_No_Uint && bitpos != Uint_0)
5767 	      return false;
5768 
5769 	    expr = Prefix (expr);
5770 	    break;
5771 	  }
5772 
5773 	case N_Indexed_Component:
5774 	case N_Slice:
5775 	  {
5776 	    const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5777 
5778 	    if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5779 	      return false;
5780 
5781 	    expr = Prefix (expr);
5782 	    break;
5783 	  }
5784 
5785 	case N_Explicit_Dereference:
5786 	  expr = Prefix (expr);
5787 	  break;
5788 
5789 	default:
5790 	  return true;
5791 	};
5792     }
5793 }
5794 
5795 /* Perform final processing on global declarations.  */
5796 
5797 static GTY (()) tree dummy_global;
5798 
5799 void
gnat_write_global_declarations(void)5800 gnat_write_global_declarations (void)
5801 {
5802   unsigned int i;
5803   tree iter;
5804 
5805   /* If we have declared types as used at the global level, insert them in
5806      the global hash table.  We use a dummy variable for this purpose, but
5807      we need to build it unconditionally to avoid -fcompare-debug issues.  */
5808   if (first_global_object_name)
5809     {
5810       struct varpool_node *node;
5811       char *label;
5812 
5813       ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, ULONG_MAX);
5814       dummy_global
5815 	= build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5816 		      void_type_node);
5817       DECL_HARD_REGISTER (dummy_global) = 1;
5818       TREE_STATIC (dummy_global) = 1;
5819       node = varpool_node::get_create (dummy_global);
5820       node->definition = 1;
5821       node->force_output = 1;
5822 
5823       if (types_used_by_cur_var_decl)
5824 	while (!types_used_by_cur_var_decl->is_empty ())
5825 	  {
5826 	    tree t = types_used_by_cur_var_decl->pop ();
5827 	    types_used_by_var_decl_insert (t, dummy_global);
5828 	  }
5829     }
5830 
5831   /* Output debug information for all global type declarations first.  This
5832      ensures that global types whose compilation hasn't been finalized yet,
5833      for example pointers to Taft amendment types, have their compilation
5834      finalized in the right context.  */
5835   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5836     if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5837       debug_hooks->type_decl (iter, false);
5838 
5839   /* Output imported functions.  */
5840   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5841     if (TREE_CODE (iter) == FUNCTION_DECL
5842 	&& DECL_EXTERNAL (iter)
5843 	&& DECL_INITIAL (iter) == NULL
5844 	&& !DECL_IGNORED_P (iter)
5845 	&& DECL_FUNCTION_IS_DEF (iter))
5846       debug_hooks->early_global_decl (iter);
5847 
5848   /* Output global constants.  */
5849   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5850     if (TREE_CODE (iter) == CONST_DECL && !DECL_IGNORED_P (iter))
5851       debug_hooks->early_global_decl (iter);
5852 
5853   /* Then output the global variables.  We need to do that after the debug
5854      information for global types is emitted so that they are finalized.  Skip
5855      external global variables, unless we need to emit debug info for them:
5856      this is useful for imported variables, for instance.  */
5857   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5858     if (TREE_CODE (iter) == VAR_DECL
5859 	&& (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5860       rest_of_decl_compilation (iter, true, 0);
5861 
5862   /* Output the imported modules/declarations.  In GNAT, these are only
5863      materializing subprogram.  */
5864   FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5865    if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5866      debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5867 					   DECL_CONTEXT (iter), false, false);
5868 }
5869 
5870 /* ************************************************************************
5871  * *                           GCC builtins support                       *
5872  * ************************************************************************ */
5873 
5874 /* The general scheme is fairly simple:
5875 
5876    For each builtin function/type to be declared, gnat_install_builtins calls
5877    internal facilities which eventually get to gnat_pushdecl, which in turn
5878    tracks the so declared builtin function decls in the 'builtin_decls' global
5879    datastructure. When an Intrinsic subprogram declaration is processed, we
5880    search this global datastructure to retrieve the associated BUILT_IN DECL
5881    node.  */
5882 
5883 /* Search the chain of currently available builtin declarations for a node
5884    corresponding to function NAME (an IDENTIFIER_NODE).  Return the first node
5885    found, if any, or NULL_TREE otherwise.  */
5886 tree
builtin_decl_for(tree name)5887 builtin_decl_for (tree name)
5888 {
5889   unsigned i;
5890   tree decl;
5891 
5892   FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5893     if (DECL_NAME (decl) == name)
5894       return decl;
5895 
5896   return NULL_TREE;
5897 }
5898 
5899 /* The code below eventually exposes gnat_install_builtins, which declares
5900    the builtin types and functions we might need, either internally or as
5901    user accessible facilities.
5902 
5903    ??? This is a first implementation shot, still in rough shape.  It is
5904    heavily inspired from the "C" family implementation, with chunks copied
5905    verbatim from there.
5906 
5907    Two obvious improvement candidates are:
5908    o Use a more efficient name/decl mapping scheme
5909    o Devise a middle-end infrastructure to avoid having to copy
5910      pieces between front-ends.  */
5911 
5912 /* ----------------------------------------------------------------------- *
5913  *                         BUILTIN ELEMENTARY TYPES                        *
5914  * ----------------------------------------------------------------------- */
5915 
5916 /* Standard data types to be used in builtin argument declarations.  */
5917 
5918 enum c_tree_index
5919 {
5920     CTI_SIGNED_SIZE_TYPE, /* For format checking only.  */
5921     CTI_STRING_TYPE,
5922     CTI_CONST_STRING_TYPE,
5923 
5924     CTI_MAX
5925 };
5926 
5927 static tree c_global_trees[CTI_MAX];
5928 
5929 #define signed_size_type_node	c_global_trees[CTI_SIGNED_SIZE_TYPE]
5930 #define string_type_node	c_global_trees[CTI_STRING_TYPE]
5931 #define const_string_type_node	c_global_trees[CTI_CONST_STRING_TYPE]
5932 
5933 /* ??? In addition some attribute handlers, we currently don't support a
5934    (small) number of builtin-types, which in turns inhibits support for a
5935    number of builtin functions.  */
5936 #define wint_type_node    void_type_node
5937 #define intmax_type_node  void_type_node
5938 #define uintmax_type_node void_type_node
5939 
5940 /* Used to help initialize the builtin-types.def table.  When a type of
5941    the correct size doesn't exist, use error_mark_node instead of NULL.
5942    The later results in segfaults even when a decl using the type doesn't
5943    get invoked.  */
5944 
5945 static tree
builtin_type_for_size(int size,bool unsignedp)5946 builtin_type_for_size (int size, bool unsignedp)
5947 {
5948   tree type = gnat_type_for_size (size, unsignedp);
5949   return type ? type : error_mark_node;
5950 }
5951 
5952 /* Build/push the elementary type decls that builtin functions/types
5953    will need.  */
5954 
5955 static void
install_builtin_elementary_types(void)5956 install_builtin_elementary_types (void)
5957 {
5958   signed_size_type_node = gnat_signed_type_for (size_type_node);
5959   pid_type_node = integer_type_node;
5960 
5961   string_type_node = build_pointer_type (char_type_node);
5962   const_string_type_node
5963     = build_pointer_type (build_qualified_type
5964 			  (char_type_node, TYPE_QUAL_CONST));
5965 }
5966 
5967 /* ----------------------------------------------------------------------- *
5968  *                          BUILTIN FUNCTION TYPES                         *
5969  * ----------------------------------------------------------------------- */
5970 
5971 /* Now, builtin function types per se.  */
5972 
5973 enum c_builtin_type
5974 {
5975 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5976 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5977 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5978 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5979 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5980 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5981 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5982 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5983 			    ARG6) NAME,
5984 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5985 			    ARG6, ARG7) NAME,
5986 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5987 			    ARG6, ARG7, ARG8) NAME,
5988 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5989 			    ARG6, ARG7, ARG8, ARG9) NAME,
5990 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5991 			     ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5992 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5993 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5994 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5995 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5996 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5997 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5998 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5999 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6000 				NAME,
6001 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6002 				ARG6) NAME,
6003 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6004 				ARG6, ARG7) NAME,
6005 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
6006 #include "builtin-types.def"
6007 #include "ada-builtin-types.def"
6008 #undef DEF_PRIMITIVE_TYPE
6009 #undef DEF_FUNCTION_TYPE_0
6010 #undef DEF_FUNCTION_TYPE_1
6011 #undef DEF_FUNCTION_TYPE_2
6012 #undef DEF_FUNCTION_TYPE_3
6013 #undef DEF_FUNCTION_TYPE_4
6014 #undef DEF_FUNCTION_TYPE_5
6015 #undef DEF_FUNCTION_TYPE_6
6016 #undef DEF_FUNCTION_TYPE_7
6017 #undef DEF_FUNCTION_TYPE_8
6018 #undef DEF_FUNCTION_TYPE_9
6019 #undef DEF_FUNCTION_TYPE_10
6020 #undef DEF_FUNCTION_TYPE_11
6021 #undef DEF_FUNCTION_TYPE_VAR_0
6022 #undef DEF_FUNCTION_TYPE_VAR_1
6023 #undef DEF_FUNCTION_TYPE_VAR_2
6024 #undef DEF_FUNCTION_TYPE_VAR_3
6025 #undef DEF_FUNCTION_TYPE_VAR_4
6026 #undef DEF_FUNCTION_TYPE_VAR_5
6027 #undef DEF_FUNCTION_TYPE_VAR_6
6028 #undef DEF_FUNCTION_TYPE_VAR_7
6029 #undef DEF_POINTER_TYPE
6030   BT_LAST
6031 };
6032 
6033 typedef enum c_builtin_type builtin_type;
6034 
6035 /* A temporary array used in communication with def_fn_type.  */
6036 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
6037 
6038 /* A helper function for install_builtin_types.  Build function type
6039    for DEF with return type RET and N arguments.  If VAR is true, then the
6040    function should be variadic after those N arguments.
6041 
6042    Takes special care not to ICE if any of the types involved are
6043    error_mark_node, which indicates that said type is not in fact available
6044    (see builtin_type_for_size).  In which case the function type as a whole
6045    should be error_mark_node.  */
6046 
6047 static void
def_fn_type(builtin_type def,builtin_type ret,bool var,int n,...)6048 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
6049 {
6050   tree t;
6051   tree *args = XALLOCAVEC (tree, n);
6052   va_list list;
6053   int i;
6054 
6055   va_start (list, n);
6056   for (i = 0; i < n; ++i)
6057     {
6058       builtin_type a = (builtin_type) va_arg (list, int);
6059       t = builtin_types[a];
6060       if (t == error_mark_node)
6061 	goto egress;
6062       args[i] = t;
6063     }
6064 
6065   t = builtin_types[ret];
6066   if (t == error_mark_node)
6067     goto egress;
6068   if (var)
6069     t = build_varargs_function_type_array (t, n, args);
6070   else
6071     t = build_function_type_array (t, n, args);
6072 
6073  egress:
6074   builtin_types[def] = t;
6075   va_end (list);
6076 }
6077 
6078 /* Build the builtin function types and install them in the builtin_types
6079    array for later use in builtin function decls.  */
6080 
6081 static void
install_builtin_function_types(void)6082 install_builtin_function_types (void)
6083 {
6084   tree va_list_ref_type_node;
6085   tree va_list_arg_type_node;
6086 
6087   if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6088     {
6089       va_list_arg_type_node = va_list_ref_type_node =
6090 	build_pointer_type (TREE_TYPE (va_list_type_node));
6091     }
6092   else
6093     {
6094       va_list_arg_type_node = va_list_type_node;
6095       va_list_ref_type_node = build_reference_type (va_list_type_node);
6096     }
6097 
6098 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6099   builtin_types[ENUM] = VALUE;
6100 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6101   def_fn_type (ENUM, RETURN, 0, 0);
6102 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6103   def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6104 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6105   def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6106 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6107   def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6108 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6109   def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6110 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5)	\
6111   def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6112 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6113 			    ARG6)					\
6114   def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6115 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6116 			    ARG6, ARG7)					\
6117   def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6118 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6119 			    ARG6, ARG7, ARG8)				\
6120   def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6121 	       ARG7, ARG8);
6122 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6123 			    ARG6, ARG7, ARG8, ARG9)			\
6124   def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6125 	       ARG7, ARG8, ARG9);
6126 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6127 			     ARG6, ARG7, ARG8, ARG9, ARG10)		\
6128   def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6129 	       ARG7, ARG8, ARG9, ARG10);
6130 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6131 			     ARG6, ARG7, ARG8, ARG9, ARG10, ARG11)	\
6132   def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6,	\
6133 	       ARG7, ARG8, ARG9, ARG10, ARG11);
6134 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6135   def_fn_type (ENUM, RETURN, 1, 0);
6136 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6137   def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6138 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6139   def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6140 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6141   def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6142 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6143   def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6144 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6145   def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6146 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6147 				ARG6)				\
6148   def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6149 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6150 				ARG6, ARG7)				\
6151   def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6152 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6153   builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6154 
6155 #include "builtin-types.def"
6156 #include "ada-builtin-types.def"
6157 
6158 #undef DEF_PRIMITIVE_TYPE
6159 #undef DEF_FUNCTION_TYPE_0
6160 #undef DEF_FUNCTION_TYPE_1
6161 #undef DEF_FUNCTION_TYPE_2
6162 #undef DEF_FUNCTION_TYPE_3
6163 #undef DEF_FUNCTION_TYPE_4
6164 #undef DEF_FUNCTION_TYPE_5
6165 #undef DEF_FUNCTION_TYPE_6
6166 #undef DEF_FUNCTION_TYPE_7
6167 #undef DEF_FUNCTION_TYPE_8
6168 #undef DEF_FUNCTION_TYPE_9
6169 #undef DEF_FUNCTION_TYPE_10
6170 #undef DEF_FUNCTION_TYPE_11
6171 #undef DEF_FUNCTION_TYPE_VAR_0
6172 #undef DEF_FUNCTION_TYPE_VAR_1
6173 #undef DEF_FUNCTION_TYPE_VAR_2
6174 #undef DEF_FUNCTION_TYPE_VAR_3
6175 #undef DEF_FUNCTION_TYPE_VAR_4
6176 #undef DEF_FUNCTION_TYPE_VAR_5
6177 #undef DEF_FUNCTION_TYPE_VAR_6
6178 #undef DEF_FUNCTION_TYPE_VAR_7
6179 #undef DEF_POINTER_TYPE
6180   builtin_types[(int) BT_LAST] = NULL_TREE;
6181 }
6182 
6183 /* ----------------------------------------------------------------------- *
6184  *                            BUILTIN ATTRIBUTES                           *
6185  * ----------------------------------------------------------------------- */
6186 
6187 enum built_in_attribute
6188 {
6189 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6190 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6191 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6192 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6193 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6194 #include "builtin-attrs.def"
6195 #undef DEF_ATTR_NULL_TREE
6196 #undef DEF_ATTR_INT
6197 #undef DEF_ATTR_STRING
6198 #undef DEF_ATTR_IDENT
6199 #undef DEF_ATTR_TREE_LIST
6200   ATTR_LAST
6201 };
6202 
6203 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6204 
6205 static void
install_builtin_attributes(void)6206 install_builtin_attributes (void)
6207 {
6208   /* Fill in the built_in_attributes array.  */
6209 #define DEF_ATTR_NULL_TREE(ENUM)				\
6210   built_in_attributes[(int) ENUM] = NULL_TREE;
6211 #define DEF_ATTR_INT(ENUM, VALUE)				\
6212   built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6213 #define DEF_ATTR_STRING(ENUM, VALUE)				\
6214   built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6215 #define DEF_ATTR_IDENT(ENUM, STRING)				\
6216   built_in_attributes[(int) ENUM] = get_identifier (STRING);
6217 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN)	\
6218   built_in_attributes[(int) ENUM]			\
6219     = tree_cons (built_in_attributes[(int) PURPOSE],	\
6220 		 built_in_attributes[(int) VALUE],	\
6221 		 built_in_attributes[(int) CHAIN]);
6222 #include "builtin-attrs.def"
6223 #undef DEF_ATTR_NULL_TREE
6224 #undef DEF_ATTR_INT
6225 #undef DEF_ATTR_STRING
6226 #undef DEF_ATTR_IDENT
6227 #undef DEF_ATTR_TREE_LIST
6228 }
6229 
6230 /* Handle a "const" attribute; arguments as in
6231    struct attribute_spec.handler.  */
6232 
6233 static tree
handle_const_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6234 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6235 			tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6236 			bool *no_add_attrs)
6237 {
6238   if (TREE_CODE (*node) == FUNCTION_DECL)
6239     TREE_READONLY (*node) = 1;
6240   else
6241     *no_add_attrs = true;
6242 
6243   return NULL_TREE;
6244 }
6245 
6246 /* Handle a "nothrow" attribute; arguments as in
6247    struct attribute_spec.handler.  */
6248 
6249 static tree
handle_nothrow_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6250 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6251 			  tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6252 			  bool *no_add_attrs)
6253 {
6254   if (TREE_CODE (*node) == FUNCTION_DECL)
6255     TREE_NOTHROW (*node) = 1;
6256   else
6257     *no_add_attrs = true;
6258 
6259   return NULL_TREE;
6260 }
6261 
6262 /* Handle a "pure" attribute; arguments as in
6263    struct attribute_spec.handler.  */
6264 
6265 static tree
handle_pure_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6266 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6267 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6268 {
6269   if (TREE_CODE (*node) == FUNCTION_DECL)
6270     DECL_PURE_P (*node) = 1;
6271   /* TODO: support types.  */
6272   else
6273     {
6274       warning (OPT_Wattributes, "%qs attribute ignored",
6275 	       IDENTIFIER_POINTER (name));
6276       *no_add_attrs = true;
6277     }
6278 
6279   return NULL_TREE;
6280 }
6281 
6282 /* Handle a "no vops" attribute; arguments as in
6283    struct attribute_spec.handler.  */
6284 
6285 static tree
handle_novops_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6286 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6287 			 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6288 			 bool *ARG_UNUSED (no_add_attrs))
6289 {
6290   gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6291   DECL_IS_NOVOPS (*node) = 1;
6292   return NULL_TREE;
6293 }
6294 
6295 /* Helper for nonnull attribute handling; fetch the operand number
6296    from the attribute argument list.  */
6297 
6298 static bool
get_nonnull_operand(tree arg_num_expr,unsigned HOST_WIDE_INT * valp)6299 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6300 {
6301   /* Verify the arg number is a constant.  */
6302   if (!tree_fits_uhwi_p (arg_num_expr))
6303     return false;
6304 
6305   *valp = TREE_INT_CST_LOW (arg_num_expr);
6306   return true;
6307 }
6308 
6309 /* Handle the "nonnull" attribute.  */
6310 static tree
handle_nonnull_attribute(tree * node,tree ARG_UNUSED (name),tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6311 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6312 			  tree args, int ARG_UNUSED (flags),
6313 			  bool *no_add_attrs)
6314 {
6315   tree type = *node;
6316   unsigned HOST_WIDE_INT attr_arg_num;
6317 
6318   /* If no arguments are specified, all pointer arguments should be
6319      non-null.  Verify a full prototype is given so that the arguments
6320      will have the correct types when we actually check them later.
6321      Avoid diagnosing type-generic built-ins since those have no
6322      prototype.  */
6323   if (!args)
6324     {
6325       if (!prototype_p (type)
6326 	  && (!TYPE_ATTRIBUTES (type)
6327 	      || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6328 	{
6329 	  error ("%qs attribute without arguments on a non-prototype",
6330 		 "nonnull");
6331 	  *no_add_attrs = true;
6332 	}
6333       return NULL_TREE;
6334     }
6335 
6336   /* Argument list specified.  Verify that each argument number references
6337      a pointer argument.  */
6338   for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6339     {
6340       unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6341 
6342       if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6343 	{
6344 	  error ("%qs argument has invalid operand number (argument %lu)",
6345 		 "nonnull", (unsigned long) attr_arg_num);
6346 	  *no_add_attrs = true;
6347 	  return NULL_TREE;
6348 	}
6349 
6350       if (prototype_p (type))
6351 	{
6352 	  function_args_iterator iter;
6353 	  tree argument;
6354 
6355 	  function_args_iter_init (&iter, type);
6356 	  for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6357 	    {
6358 	      argument = function_args_iter_cond (&iter);
6359 	      if (!argument || ck_num == arg_num)
6360 		break;
6361 	    }
6362 
6363 	  if (!argument
6364 	      || TREE_CODE (argument) == VOID_TYPE)
6365 	    {
6366 	      error ("%qs argument with out-of-range operand number "
6367 		     "(argument %lu, operand %lu)", "nonnull",
6368 		     (unsigned long) attr_arg_num, (unsigned long) arg_num);
6369 	      *no_add_attrs = true;
6370 	      return NULL_TREE;
6371 	    }
6372 
6373 	  if (TREE_CODE (argument) != POINTER_TYPE)
6374 	    {
6375 	      error ("%qs argument references non-pointer operand "
6376 		     "(argument %lu, operand %lu)", "nonnull",
6377 		   (unsigned long) attr_arg_num, (unsigned long) arg_num);
6378 	      *no_add_attrs = true;
6379 	      return NULL_TREE;
6380 	    }
6381 	}
6382     }
6383 
6384   return NULL_TREE;
6385 }
6386 
6387 /* Handle a "sentinel" attribute.  */
6388 
6389 static tree
handle_sentinel_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6390 handle_sentinel_attribute (tree *node, tree name, tree args,
6391 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6392 {
6393   if (!prototype_p (*node))
6394     {
6395       warning (OPT_Wattributes,
6396 	       "%qs attribute requires prototypes with named arguments",
6397 	       IDENTIFIER_POINTER (name));
6398       *no_add_attrs = true;
6399     }
6400   else
6401     {
6402       if (!stdarg_p (*node))
6403         {
6404 	  warning (OPT_Wattributes,
6405 		   "%qs attribute only applies to variadic functions",
6406 		   IDENTIFIER_POINTER (name));
6407 	  *no_add_attrs = true;
6408 	}
6409     }
6410 
6411   if (args)
6412     {
6413       tree position = TREE_VALUE (args);
6414 
6415       if (TREE_CODE (position) != INTEGER_CST)
6416         {
6417 	  warning (0, "requested position is not an integer constant");
6418 	  *no_add_attrs = true;
6419 	}
6420       else
6421         {
6422 	  if (tree_int_cst_lt (position, integer_zero_node))
6423 	    {
6424 	      warning (0, "requested position is less than zero");
6425 	      *no_add_attrs = true;
6426 	    }
6427 	}
6428     }
6429 
6430   return NULL_TREE;
6431 }
6432 
6433 /* Handle a "noreturn" attribute; arguments as in
6434    struct attribute_spec.handler.  */
6435 
6436 static tree
handle_noreturn_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6437 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6438 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6439 {
6440   tree type = TREE_TYPE (*node);
6441 
6442   /* See FIXME comment in c_common_attribute_table.  */
6443   if (TREE_CODE (*node) == FUNCTION_DECL)
6444     TREE_THIS_VOLATILE (*node) = 1;
6445   else if (TREE_CODE (type) == POINTER_TYPE
6446 	   && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6447     TREE_TYPE (*node)
6448       = build_pointer_type
6449 	(change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6450   else
6451     {
6452       warning (OPT_Wattributes, "%qs attribute ignored",
6453 	       IDENTIFIER_POINTER (name));
6454       *no_add_attrs = true;
6455     }
6456 
6457   return NULL_TREE;
6458 }
6459 
6460 /* Handle a "stack_protect" attribute; arguments as in
6461    struct attribute_spec.handler.  */
6462 
6463 static tree
handle_stack_protect_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6464 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6465 				bool *no_add_attrs)
6466 {
6467   if (TREE_CODE (*node) != FUNCTION_DECL)
6468     {
6469       warning (OPT_Wattributes, "%qE attribute ignored", name);
6470       *no_add_attrs = true;
6471     }
6472 
6473   return NULL_TREE;
6474 }
6475 
6476 /* Handle a "noinline" attribute; arguments as in
6477    struct attribute_spec.handler.  */
6478 
6479 static tree
handle_noinline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6480 handle_noinline_attribute (tree *node, tree name,
6481 			   tree ARG_UNUSED (args),
6482 			   int ARG_UNUSED (flags), bool *no_add_attrs)
6483 {
6484   if (TREE_CODE (*node) == FUNCTION_DECL)
6485     {
6486       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6487 	{
6488 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6489 		   "with attribute %qs", name, "always_inline");
6490 	  *no_add_attrs = true;
6491 	}
6492       else
6493 	DECL_UNINLINABLE (*node) = 1;
6494     }
6495   else
6496     {
6497       warning (OPT_Wattributes, "%qE attribute ignored", name);
6498       *no_add_attrs = true;
6499     }
6500 
6501   return NULL_TREE;
6502 }
6503 
6504 /* Handle a "noclone" attribute; arguments as in
6505    struct attribute_spec.handler.  */
6506 
6507 static tree
handle_noclone_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6508 handle_noclone_attribute (tree *node, tree name,
6509 			  tree ARG_UNUSED (args),
6510 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6511 {
6512   if (TREE_CODE (*node) != FUNCTION_DECL)
6513     {
6514       warning (OPT_Wattributes, "%qE attribute ignored", name);
6515       *no_add_attrs = true;
6516     }
6517 
6518   return NULL_TREE;
6519 }
6520 
6521 /* Handle a "no_icf" attribute; arguments as in
6522    struct attribute_spec.handler.  */
6523 
6524 static tree
handle_noicf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6525 handle_noicf_attribute (tree *node, tree name,
6526 			tree ARG_UNUSED (args),
6527 			int ARG_UNUSED (flags), bool *no_add_attrs)
6528 {
6529   if (TREE_CODE (*node) != FUNCTION_DECL)
6530     {
6531       warning (OPT_Wattributes, "%qE attribute ignored", name);
6532       *no_add_attrs = true;
6533     }
6534 
6535   return NULL_TREE;
6536 }
6537 
6538 /* Handle a "noipa" attribute; arguments as in
6539    struct attribute_spec.handler.  */
6540 
6541 static tree
handle_noipa_attribute(tree * node,tree name,tree,int,bool * no_add_attrs)6542 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6543 {
6544   if (TREE_CODE (*node) != FUNCTION_DECL)
6545     {
6546       warning (OPT_Wattributes, "%qE attribute ignored", name);
6547       *no_add_attrs = true;
6548     }
6549 
6550   return NULL_TREE;
6551 }
6552 
6553 /* Handle a "leaf" attribute; arguments as in
6554    struct attribute_spec.handler.  */
6555 
6556 static tree
handle_leaf_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6557 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6558 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6559 {
6560   if (TREE_CODE (*node) != FUNCTION_DECL)
6561     {
6562       warning (OPT_Wattributes, "%qE attribute ignored", name);
6563       *no_add_attrs = true;
6564     }
6565   if (!TREE_PUBLIC (*node))
6566     {
6567       warning (OPT_Wattributes, "%qE attribute has no effect", name);
6568       *no_add_attrs = true;
6569     }
6570 
6571   return NULL_TREE;
6572 }
6573 
6574 /* Handle a "always_inline" attribute; arguments as in
6575    struct attribute_spec.handler.  */
6576 
6577 static tree
handle_always_inline_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6578 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6579 				int ARG_UNUSED (flags), bool *no_add_attrs)
6580 {
6581   if (TREE_CODE (*node) == FUNCTION_DECL)
6582     {
6583       /* Set the attribute and mark it for disregarding inline limits.  */
6584       DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6585     }
6586   else
6587     {
6588       warning (OPT_Wattributes, "%qE attribute ignored", name);
6589       *no_add_attrs = true;
6590     }
6591 
6592   return NULL_TREE;
6593 }
6594 
6595 /* Handle a "malloc" attribute; arguments as in
6596    struct attribute_spec.handler.  */
6597 
6598 static tree
handle_malloc_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6599 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6600 			 int ARG_UNUSED (flags), bool *no_add_attrs)
6601 {
6602   if (TREE_CODE (*node) == FUNCTION_DECL
6603       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6604     DECL_IS_MALLOC (*node) = 1;
6605   else
6606     {
6607       warning (OPT_Wattributes, "%qs attribute ignored",
6608 	       IDENTIFIER_POINTER (name));
6609       *no_add_attrs = true;
6610     }
6611 
6612   return NULL_TREE;
6613 }
6614 
6615 /* Fake handler for attributes we don't properly support.  */
6616 
6617 tree
fake_attribute_handler(tree * ARG_UNUSED (node),tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6618 fake_attribute_handler (tree * ARG_UNUSED (node),
6619 			tree ARG_UNUSED (name),
6620 			tree ARG_UNUSED (args),
6621 			int  ARG_UNUSED (flags),
6622 			bool * ARG_UNUSED (no_add_attrs))
6623 {
6624   return NULL_TREE;
6625 }
6626 
6627 /* Handle a "type_generic" attribute.  */
6628 
6629 static tree
handle_type_generic_attribute(tree * node,tree ARG_UNUSED (name),tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * ARG_UNUSED (no_add_attrs))6630 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6631 			       tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6632 			       bool * ARG_UNUSED (no_add_attrs))
6633 {
6634   /* Ensure we have a function type.  */
6635   gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6636 
6637   /* Ensure we have a variadic function.  */
6638   gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6639 
6640   return NULL_TREE;
6641 }
6642 
6643 /* Handle a "flatten" attribute; arguments as in
6644    struct attribute_spec.handler.  */
6645 
6646 static tree
handle_flatten_attribute(tree * node,tree name,tree args ATTRIBUTE_UNUSED,int flags ATTRIBUTE_UNUSED,bool * no_add_attrs)6647 handle_flatten_attribute (tree *node, tree name,
6648 			  tree args ATTRIBUTE_UNUSED,
6649 			  int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6650 {
6651   if (TREE_CODE (*node) == FUNCTION_DECL)
6652     /* Do nothing else, just set the attribute.  We'll get at
6653        it later with lookup_attribute.  */
6654     ;
6655   else
6656     {
6657       warning (OPT_Wattributes, "%qE attribute ignored", name);
6658       *no_add_attrs = true;
6659     }
6660 
6661   return NULL_TREE;
6662 }
6663 
6664 /* Handle a "used" attribute; arguments as in
6665    struct attribute_spec.handler.  */
6666 
6667 static tree
handle_used_attribute(tree * pnode,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6668 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6669 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6670 {
6671   tree node = *pnode;
6672 
6673   if (TREE_CODE (node) == FUNCTION_DECL
6674       || (VAR_P (node) && TREE_STATIC (node))
6675       || (TREE_CODE (node) == TYPE_DECL))
6676     {
6677       TREE_USED (node) = 1;
6678       DECL_PRESERVE_P (node) = 1;
6679       if (VAR_P (node))
6680 	DECL_READ_P (node) = 1;
6681     }
6682   else
6683     {
6684       warning (OPT_Wattributes, "%qE attribute ignored", name);
6685       *no_add_attrs = true;
6686     }
6687 
6688   return NULL_TREE;
6689 }
6690 
6691 /* Handle a "cold" and attribute; arguments as in
6692    struct attribute_spec.handler.  */
6693 
6694 static tree
handle_cold_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6695 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6696 		       int ARG_UNUSED (flags), bool *no_add_attrs)
6697 {
6698   if (TREE_CODE (*node) == FUNCTION_DECL
6699       || TREE_CODE (*node) == LABEL_DECL)
6700     {
6701       /* Attribute cold processing is done later with lookup_attribute.  */
6702     }
6703   else
6704     {
6705       warning (OPT_Wattributes, "%qE attribute ignored", name);
6706       *no_add_attrs = true;
6707     }
6708 
6709   return NULL_TREE;
6710 }
6711 
6712 /* Handle a "hot" and attribute; arguments as in
6713    struct attribute_spec.handler.  */
6714 
6715 static tree
handle_hot_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6716 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6717 		      int ARG_UNUSED (flags), bool *no_add_attrs)
6718 {
6719   if (TREE_CODE (*node) == FUNCTION_DECL
6720       || TREE_CODE (*node) == LABEL_DECL)
6721     {
6722       /* Attribute hot processing is done later with lookup_attribute.  */
6723     }
6724   else
6725     {
6726       warning (OPT_Wattributes, "%qE attribute ignored", name);
6727       *no_add_attrs = true;
6728     }
6729 
6730   return NULL_TREE;
6731 }
6732 
6733 /* Handle a "target" attribute.  */
6734 
6735 static tree
handle_target_attribute(tree * node,tree name,tree args,int flags,bool * no_add_attrs)6736 handle_target_attribute (tree *node, tree name, tree args, int flags,
6737 			 bool *no_add_attrs)
6738 {
6739   /* Ensure we have a function type.  */
6740   if (TREE_CODE (*node) != FUNCTION_DECL)
6741     {
6742       warning (OPT_Wattributes, "%qE attribute ignored", name);
6743       *no_add_attrs = true;
6744     }
6745   else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
6746     {
6747       warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6748 		   "with %qs attribute", name, "target_clones");
6749       *no_add_attrs = true;
6750     }
6751   else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
6752     *no_add_attrs = true;
6753 
6754   /* Check that there's no empty string in values of the attribute.  */
6755   for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
6756     {
6757       tree value = TREE_VALUE (t);
6758       if (TREE_CODE (value) == STRING_CST
6759 	  && TREE_STRING_LENGTH (value) == 1
6760 	  && TREE_STRING_POINTER (value)[0] == '\0')
6761 	{
6762 	  warning (OPT_Wattributes, "empty string in attribute %<target%>");
6763 	  *no_add_attrs = true;
6764 	}
6765     }
6766 
6767   return NULL_TREE;
6768 }
6769 
6770 /* Handle a "target_clones" attribute.  */
6771 
6772 static tree
handle_target_clones_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6773 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6774 			  int ARG_UNUSED (flags), bool *no_add_attrs)
6775 {
6776   /* Ensure we have a function type.  */
6777   if (TREE_CODE (*node) == FUNCTION_DECL)
6778     {
6779       if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6780 	{
6781 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6782 		   "with %qs attribute", name, "always_inline");
6783 	  *no_add_attrs = true;
6784 	}
6785       else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
6786 	{
6787 	  warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6788 		   "with %qs attribute", name, "target");
6789 	  *no_add_attrs = true;
6790 	}
6791       else
6792 	/* Do not inline functions with multiple clone targets.  */
6793 	DECL_UNINLINABLE (*node) = 1;
6794     }
6795   else
6796     {
6797       warning (OPT_Wattributes, "%qE attribute ignored", name);
6798       *no_add_attrs = true;
6799     }
6800   return NULL_TREE;
6801 }
6802 
6803 /* Handle a "vector_size" attribute; arguments as in
6804    struct attribute_spec.handler.  */
6805 
6806 static tree
handle_vector_size_attribute(tree * node,tree name,tree args,int ARG_UNUSED (flags),bool * no_add_attrs)6807 handle_vector_size_attribute (tree *node, tree name, tree args,
6808 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6809 {
6810   tree type = *node;
6811   tree vector_type;
6812 
6813   *no_add_attrs = true;
6814 
6815   /* We need to provide for vector pointers, vector arrays, and
6816      functions returning vectors.  For example:
6817 
6818        __attribute__((vector_size(16))) short *foo;
6819 
6820      In this case, the mode is SI, but the type being modified is
6821      HI, so we need to look further.  */
6822   while (POINTER_TYPE_P (type)
6823 	 || TREE_CODE (type) == FUNCTION_TYPE
6824 	 || TREE_CODE (type) == ARRAY_TYPE)
6825     type = TREE_TYPE (type);
6826 
6827   vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6828   if (!vector_type)
6829     return NULL_TREE;
6830 
6831   /* Build back pointers if needed.  */
6832   *node = reconstruct_complex_type (*node, vector_type);
6833 
6834   return NULL_TREE;
6835 }
6836 
6837 /* Handle a "vector_type" attribute; arguments as in
6838    struct attribute_spec.handler.  */
6839 
6840 static tree
handle_vector_type_attribute(tree * node,tree name,tree ARG_UNUSED (args),int ARG_UNUSED (flags),bool * no_add_attrs)6841 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6842 			      int ARG_UNUSED (flags), bool *no_add_attrs)
6843 {
6844   tree type = *node;
6845   tree vector_type;
6846 
6847   *no_add_attrs = true;
6848 
6849   if (TREE_CODE (type) != ARRAY_TYPE)
6850     {
6851       error ("attribute %qs applies to array types only",
6852 	     IDENTIFIER_POINTER (name));
6853       return NULL_TREE;
6854     }
6855 
6856   vector_type = build_vector_type_for_array (type, name);
6857   if (!vector_type)
6858     return NULL_TREE;
6859 
6860   TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6861   *node = vector_type;
6862 
6863   return NULL_TREE;
6864 }
6865 
6866 /* ----------------------------------------------------------------------- *
6867  *                              BUILTIN FUNCTIONS                          *
6868  * ----------------------------------------------------------------------- */
6869 
6870 /* Worker for DEF_BUILTIN.  Possibly define a builtin function with one or two
6871    names.  Does not declare a non-__builtin_ function if flag_no_builtin, or
6872    if nonansi_p and flag_no_nonansi_builtin.  */
6873 
6874 static void
def_builtin_1(enum built_in_function fncode,const char * name,enum built_in_class fnclass,tree fntype,tree libtype,bool both_p,bool fallback_p,bool nonansi_p ATTRIBUTE_UNUSED,tree fnattrs,bool implicit_p)6875 def_builtin_1 (enum built_in_function fncode,
6876 	       const char *name,
6877 	       enum built_in_class fnclass,
6878 	       tree fntype, tree libtype,
6879 	       bool both_p, bool fallback_p,
6880 	       bool nonansi_p ATTRIBUTE_UNUSED,
6881 	       tree fnattrs, bool implicit_p)
6882 {
6883   tree decl;
6884   const char *libname;
6885 
6886   /* Preserve an already installed decl.  It most likely was setup in advance
6887      (e.g. as part of the internal builtins) for specific reasons.  */
6888   if (builtin_decl_explicit (fncode))
6889     return;
6890 
6891   if (fntype == error_mark_node)
6892     return;
6893 
6894   gcc_assert ((!both_p && !fallback_p)
6895 	      || !strncmp (name, "__builtin_",
6896 			   strlen ("__builtin_")));
6897 
6898   libname = name + strlen ("__builtin_");
6899   decl = add_builtin_function (name, fntype, fncode, fnclass,
6900 			       (fallback_p ? libname : NULL),
6901 			       fnattrs);
6902   if (both_p)
6903     /* ??? This is normally further controlled by command-line options
6904        like -fno-builtin, but we don't have them for Ada.  */
6905     add_builtin_function (libname, libtype, fncode, fnclass,
6906 			  NULL, fnattrs);
6907 
6908   set_builtin_decl (fncode, decl, implicit_p);
6909 }
6910 
6911 static int flag_isoc94 = 0;
6912 static int flag_isoc99 = 0;
6913 static int flag_isoc11 = 0;
6914 static int flag_isoc2x = 0;
6915 
6916 /* Install what the common builtins.def offers plus our local additions.
6917 
6918    Note that ada-builtins.def is included first so that locally redefined
6919    built-in functions take precedence over the commonly defined ones.  */
6920 
6921 static void
install_builtin_functions(void)6922 install_builtin_functions (void)
6923 {
6924 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6925 		    NONANSI_P, ATTRS, IMPLICIT, COND)			\
6926   if (NAME && COND)							\
6927     def_builtin_1 (ENUM, NAME, CLASS,                                   \
6928                    builtin_types[(int) TYPE],                           \
6929                    builtin_types[(int) LIBTYPE],                        \
6930                    BOTH_P, FALLBACK_P, NONANSI_P,                       \
6931                    built_in_attributes[(int) ATTRS], IMPLICIT);
6932 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS)		\
6933   DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
6934 	       false, false, false, ATTRS, true, true)
6935 #include "ada-builtins.def"
6936 #include "builtins.def"
6937 }
6938 
6939 /* ----------------------------------------------------------------------- *
6940  *                              BUILTIN FUNCTIONS                          *
6941  * ----------------------------------------------------------------------- */
6942 
6943 /* Install the builtin functions we might need.  */
6944 
6945 void
gnat_install_builtins(void)6946 gnat_install_builtins (void)
6947 {
6948   install_builtin_elementary_types ();
6949   install_builtin_function_types ();
6950   install_builtin_attributes ();
6951 
6952   /* Install builtins used by generic middle-end pieces first.  Some of these
6953      know about internal specificities and control attributes accordingly, for
6954      instance __builtin_alloca vs no-throw and -fstack-check.  We will ignore
6955      the generic definition from builtins.def.  */
6956   build_common_builtin_nodes ();
6957 
6958   /* Now, install the target specific builtins, such as the AltiVec family on
6959      ppc, and the common set as exposed by builtins.def.  */
6960   targetm.init_builtins ();
6961   install_builtin_functions ();
6962 }
6963 
6964 #include "gt-ada-utils.h"
6965 #include "gtype-ada.h"
6966