1 /* C-compiler utilities for types and variables storage layout
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "expr.h"
31 #include "toplev.h"
32 #include "ggc.h"
33 #include "target.h"
34 #include "langhooks.h"
35
36 /* Set to one when set_sizetype has been called. */
37 static int sizetype_set;
38
39 /* List of types created before set_sizetype has been called. We do not
40 make this a GGC root since we want these nodes to be reclaimed. */
41 static tree early_type_list;
42
43 /* Data type for the expressions representing sizes of data types.
44 It is the first integer type laid out. */
45 tree sizetype_tab[(int) TYPE_KIND_LAST];
46
47 /* If nonzero, this is an upper limit on alignment of structure fields.
48 The value is measured in bits. */
49 unsigned int maximum_field_alignment;
50
51 /* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
52 May be overridden by front-ends. */
53 unsigned int set_alignment = 0;
54
55 /* Nonzero if all REFERENCE_TYPEs are internal and hence should be
56 allocated in Pmode, not ptr_mode. Set only by internal_reference_types
57 called only by a front end. */
58 static int reference_types_internal = 0;
59
60 static void finalize_record_size PARAMS ((record_layout_info));
61 static void finalize_type_size PARAMS ((tree));
62 static void place_union_field PARAMS ((record_layout_info, tree));
63 static unsigned int update_alignment_for_field
64 PARAMS ((record_layout_info, tree,
65 unsigned int));
66 extern void debug_rli PARAMS ((record_layout_info));
67
68 /* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
69
70 static GTY(()) tree pending_sizes;
71
72 /* Nonzero means cannot safely call expand_expr now,
73 so put variable sizes onto `pending_sizes' instead. */
74
75 int immediate_size_expand;
76
77 /* Show that REFERENCE_TYPES are internal and should be Pmode. Called only
78 by front end. */
79
80 void
internal_reference_types()81 internal_reference_types ()
82 {
83 reference_types_internal = 1;
84 }
85
86 /* Get a list of all the objects put on the pending sizes list. */
87
88 tree
get_pending_sizes()89 get_pending_sizes ()
90 {
91 tree chain = pending_sizes;
92 tree t;
93
94 /* Put each SAVE_EXPR into the current function. */
95 for (t = chain; t; t = TREE_CHAIN (t))
96 SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
97
98 pending_sizes = 0;
99 return chain;
100 }
101
102 /* Return nonzero if EXPR is present on the pending sizes list. */
103
104 int
is_pending_size(expr)105 is_pending_size (expr)
106 tree expr;
107 {
108 tree t;
109
110 for (t = pending_sizes; t; t = TREE_CHAIN (t))
111 if (TREE_VALUE (t) == expr)
112 return 1;
113 return 0;
114 }
115
116 /* Add EXPR to the pending sizes list. */
117
118 void
put_pending_size(expr)119 put_pending_size (expr)
120 tree expr;
121 {
122 /* Strip any simple arithmetic from EXPR to see if it has an underlying
123 SAVE_EXPR. */
124 while (TREE_CODE_CLASS (TREE_CODE (expr)) == '1'
125 || (TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
126 && TREE_CONSTANT (TREE_OPERAND (expr, 1))))
127 expr = TREE_OPERAND (expr, 0);
128
129 if (TREE_CODE (expr) == SAVE_EXPR)
130 pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
131 }
132
133 /* Put a chain of objects into the pending sizes list, which must be
134 empty. */
135
136 void
put_pending_sizes(chain)137 put_pending_sizes (chain)
138 tree chain;
139 {
140 if (pending_sizes)
141 abort ();
142
143 pending_sizes = chain;
144 }
145
146 /* Given a size SIZE that may not be a constant, return a SAVE_EXPR
147 to serve as the actual size-expression for a type or decl. */
148
149 tree
variable_size(size)150 variable_size (size)
151 tree size;
152 {
153 /* If the language-processor is to take responsibility for variable-sized
154 items (e.g., languages which have elaboration procedures like Ada),
155 just return SIZE unchanged. Likewise for self-referential sizes and
156 constant sizes. */
157 if (TREE_CONSTANT (size)
158 || (*lang_hooks.decls.global_bindings_p) () < 0
159 || contains_placeholder_p (size))
160 return size;
161
162 size = save_expr (size);
163
164 /* If an array with a variable number of elements is declared, and
165 the elements require destruction, we will emit a cleanup for the
166 array. That cleanup is run both on normal exit from the block
167 and in the exception-handler for the block. Normally, when code
168 is used in both ordinary code and in an exception handler it is
169 `unsaved', i.e., all SAVE_EXPRs are recalculated. However, we do
170 not wish to do that here; the array-size is the same in both
171 places. */
172 if (TREE_CODE (size) == SAVE_EXPR)
173 SAVE_EXPR_PERSISTENT_P (size) = 1;
174
175 if ((*lang_hooks.decls.global_bindings_p) ())
176 {
177 if (TREE_CONSTANT (size))
178 error ("type size can't be explicitly evaluated");
179 else
180 error ("variable-size type declared outside of any function");
181
182 return size_one_node;
183 }
184
185 if (immediate_size_expand)
186 /* NULL_RTX is not defined; neither is the rtx type.
187 Also, we would like to pass const0_rtx here, but don't have it. */
188 expand_expr (size, expand_expr (integer_zero_node, NULL_RTX, VOIDmode, 0),
189 VOIDmode, 0);
190 else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
191 /* The front-end doesn't want us to keep a list of the expressions
192 that determine sizes for variable size objects. */
193 ;
194 else
195 put_pending_size (size);
196
197 return size;
198 }
199
200 #ifndef MAX_FIXED_MODE_SIZE
201 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
202 #endif
203
204 /* Return the machine mode to use for a nonscalar of SIZE bits.
205 The mode must be in class CLASS, and have exactly that many bits.
206 If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
207 be used. */
208
209 enum machine_mode
mode_for_size(size,class,limit)210 mode_for_size (size, class, limit)
211 unsigned int size;
212 enum mode_class class;
213 int limit;
214 {
215 enum machine_mode mode;
216
217 if (limit && size > MAX_FIXED_MODE_SIZE)
218 return BLKmode;
219
220 /* Get the first mode which has this size, in the specified class. */
221 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
222 mode = GET_MODE_WIDER_MODE (mode))
223 if (GET_MODE_BITSIZE (mode) == size)
224 return mode;
225
226 return BLKmode;
227 }
228
229 /* Similar, except passed a tree node. */
230
231 enum machine_mode
mode_for_size_tree(size,class,limit)232 mode_for_size_tree (size, class, limit)
233 tree size;
234 enum mode_class class;
235 int limit;
236 {
237 if (TREE_CODE (size) != INTEGER_CST
238 /* What we really want to say here is that the size can fit in a
239 host integer, but we know there's no way we'd find a mode for
240 this many bits, so there's no point in doing the precise test. */
241 || compare_tree_int (size, 1000) > 0)
242 return BLKmode;
243 else
244 return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
245 }
246
247 /* Similar, but never return BLKmode; return the narrowest mode that
248 contains at least the requested number of bits. */
249
250 enum machine_mode
smallest_mode_for_size(size,class)251 smallest_mode_for_size (size, class)
252 unsigned int size;
253 enum mode_class class;
254 {
255 enum machine_mode mode;
256
257 /* Get the first mode which has at least this size, in the
258 specified class. */
259 for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
260 mode = GET_MODE_WIDER_MODE (mode))
261 if (GET_MODE_BITSIZE (mode) >= size)
262 return mode;
263
264 abort ();
265 }
266
267 /* Find an integer mode of the exact same size, or BLKmode on failure. */
268
269 enum machine_mode
int_mode_for_mode(mode)270 int_mode_for_mode (mode)
271 enum machine_mode mode;
272 {
273 switch (GET_MODE_CLASS (mode))
274 {
275 case MODE_INT:
276 case MODE_PARTIAL_INT:
277 break;
278
279 case MODE_COMPLEX_INT:
280 case MODE_COMPLEX_FLOAT:
281 case MODE_FLOAT:
282 case MODE_VECTOR_INT:
283 case MODE_VECTOR_FLOAT:
284 mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
285 break;
286
287 case MODE_RANDOM:
288 if (mode == BLKmode)
289 break;
290
291 /* ... fall through ... */
292
293 case MODE_CC:
294 default:
295 abort ();
296 }
297
298 return mode;
299 }
300
301 /* Return the alignment of MODE. This will be bounded by 1 and
302 BIGGEST_ALIGNMENT. */
303
304 unsigned int
get_mode_alignment(mode)305 get_mode_alignment (mode)
306 enum machine_mode mode;
307 {
308 unsigned int alignment;
309
310 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
311 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
312 alignment = GET_MODE_UNIT_SIZE (mode);
313 else
314 alignment = GET_MODE_SIZE (mode);
315
316 /* Extract the LSB of the size. */
317 alignment = alignment & -alignment;
318 alignment *= BITS_PER_UNIT;
319
320 alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
321 return alignment;
322 }
323
324 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
325 This can only be applied to objects of a sizetype. */
326
327 tree
round_up(value,divisor)328 round_up (value, divisor)
329 tree value;
330 int divisor;
331 {
332 tree arg = size_int_type (divisor, TREE_TYPE (value));
333
334 return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
335 }
336
337 /* Likewise, but round down. */
338
339 tree
round_down(value,divisor)340 round_down (value, divisor)
341 tree value;
342 int divisor;
343 {
344 tree arg = size_int_type (divisor, TREE_TYPE (value));
345
346 return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
347 }
348
349 /* Set the size, mode and alignment of a ..._DECL node.
350 TYPE_DECL does need this for C++.
351 Note that LABEL_DECL and CONST_DECL nodes do not need this,
352 and FUNCTION_DECL nodes have them set up in a special (and simple) way.
353 Don't call layout_decl for them.
354
355 KNOWN_ALIGN is the amount of alignment we can assume this
356 decl has with no special effort. It is relevant only for FIELD_DECLs
357 and depends on the previous fields.
358 All that matters about KNOWN_ALIGN is which powers of 2 divide it.
359 If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
360 the record will be aligned to suit. */
361
362 void
layout_decl(decl,known_align)363 layout_decl (decl, known_align)
364 tree decl;
365 unsigned int known_align;
366 {
367 tree type = TREE_TYPE (decl);
368 enum tree_code code = TREE_CODE (decl);
369 rtx rtl = NULL_RTX;
370
371 if (code == CONST_DECL)
372 return;
373 else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
374 && code != TYPE_DECL && code != FIELD_DECL)
375 abort ();
376
377 rtl = DECL_RTL_IF_SET (decl);
378
379 if (type == error_mark_node)
380 type = void_type_node;
381
382 /* Usually the size and mode come from the data type without change,
383 however, the front-end may set the explicit width of the field, so its
384 size may not be the same as the size of its type. This happens with
385 bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
386 also happens with other fields. For example, the C++ front-end creates
387 zero-sized fields corresponding to empty base classes, and depends on
388 layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
389 size in bytes from the size in bits. If we have already set the mode,
390 don't set it again since we can be called twice for FIELD_DECLs. */
391
392 TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
393 if (DECL_MODE (decl) == VOIDmode)
394 DECL_MODE (decl) = TYPE_MODE (type);
395
396 if (DECL_SIZE (decl) == 0)
397 {
398 DECL_SIZE (decl) = TYPE_SIZE (type);
399 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
400 }
401 else
402 DECL_SIZE_UNIT (decl)
403 = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
404 bitsize_unit_node));
405
406 /* Force alignment required for the data type.
407 But if the decl itself wants greater alignment, don't override that.
408 Likewise, if the decl is packed, don't override it. */
409 if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
410 && (DECL_ALIGN (decl) == 0
411 || (! (code == FIELD_DECL && DECL_PACKED (decl))
412 && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
413 {
414 DECL_ALIGN (decl) = TYPE_ALIGN (type);
415 DECL_USER_ALIGN (decl) = 0;
416 }
417
418 /* For fields, set the bit field type and update the alignment. */
419 if (code == FIELD_DECL)
420 {
421 DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
422 if (maximum_field_alignment != 0)
423 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
424
425 /* If the field is of variable size, we can't misalign it since we
426 have no way to make a temporary to align the result. But this
427 isn't an issue if the decl is not addressable. Likewise if it
428 is of unknown size. */
429 else if (DECL_PACKED (decl)
430 && (DECL_NONADDRESSABLE_P (decl)
431 || DECL_SIZE_UNIT (decl) == 0
432 || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
433 {
434 DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
435 DECL_USER_ALIGN (decl) = 0;
436 }
437 }
438
439 /* See if we can use an ordinary integer mode for a bit-field.
440 Conditions are: a fixed size that is correct for another mode
441 and occupying a complete byte or bytes on proper boundary. */
442 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
443 && TYPE_SIZE (type) != 0
444 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
445 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
446 {
447 enum machine_mode xmode
448 = mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
449
450 if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
451 {
452 DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
453 DECL_ALIGN (decl));
454 DECL_MODE (decl) = xmode;
455 DECL_BIT_FIELD (decl) = 0;
456 }
457 }
458
459 /* Turn off DECL_BIT_FIELD if we won't need it set. */
460 if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
461 && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
462 && known_align >= TYPE_ALIGN (type)
463 && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
464 && DECL_SIZE_UNIT (decl) != 0)
465 DECL_BIT_FIELD (decl) = 0;
466
467 /* Evaluate nonconstant size only once, either now or as soon as safe. */
468 if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
469 DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
470 if (DECL_SIZE_UNIT (decl) != 0
471 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
472 DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
473
474 /* If requested, warn about definitions of large data objects. */
475 if (warn_larger_than
476 && (code == VAR_DECL || code == PARM_DECL)
477 && ! DECL_EXTERNAL (decl))
478 {
479 tree size = DECL_SIZE_UNIT (decl);
480
481 if (size != 0 && TREE_CODE (size) == INTEGER_CST
482 && compare_tree_int (size, larger_than_size) > 0)
483 {
484 unsigned int size_as_int = TREE_INT_CST_LOW (size);
485
486 if (compare_tree_int (size, size_as_int) == 0)
487 warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
488 else
489 warning_with_decl (decl, "size of `%s' is larger than %d bytes",
490 larger_than_size);
491 }
492 }
493
494 /* If the RTL was already set, update its mode and mem attributes. */
495 if (rtl)
496 {
497 PUT_MODE (rtl, DECL_MODE (decl));
498 SET_DECL_RTL (decl, 0);
499 set_mem_attributes (rtl, decl, 1);
500 SET_DECL_RTL (decl, rtl);
501 }
502 }
503
504 /* Hook for a front-end function that can modify the record layout as needed
505 immediately before it is finalized. */
506
507 void (*lang_adjust_rli) PARAMS ((record_layout_info)) = 0;
508
509 void
510 set_lang_adjust_rli (f)
511 void (*f) PARAMS ((record_layout_info));
512 {
513 lang_adjust_rli = f;
514 }
515
516 /* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
517 QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
518 is to be passed to all other layout functions for this record. It is the
519 responsibility of the caller to call `free' for the storage returned.
520 Note that garbage collection is not permitted until we finish laying
521 out the record. */
522
523 record_layout_info
start_record_layout(t)524 start_record_layout (t)
525 tree t;
526 {
527 record_layout_info rli
528 = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
529
530 rli->t = t;
531
532 /* If the type has a minimum specified alignment (via an attribute
533 declaration, for example) use it -- otherwise, start with a
534 one-byte alignment. */
535 rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
536 rli->unpacked_align = rli->unpadded_align = rli->record_align;
537 rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
538
539 #ifdef STRUCTURE_SIZE_BOUNDARY
540 /* Packed structures don't need to have minimum size. */
541 if (! TYPE_PACKED (t))
542 rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
543 #endif
544
545 rli->offset = size_zero_node;
546 rli->bitpos = bitsize_zero_node;
547 rli->prev_field = 0;
548 rli->pending_statics = 0;
549 rli->packed_maybe_necessary = 0;
550
551 return rli;
552 }
553
554 /* These four routines perform computations that convert between
555 the offset/bitpos forms and byte and bit offsets. */
556
557 tree
bit_from_pos(offset,bitpos)558 bit_from_pos (offset, bitpos)
559 tree offset, bitpos;
560 {
561 return size_binop (PLUS_EXPR, bitpos,
562 size_binop (MULT_EXPR, convert (bitsizetype, offset),
563 bitsize_unit_node));
564 }
565
566 tree
byte_from_pos(offset,bitpos)567 byte_from_pos (offset, bitpos)
568 tree offset, bitpos;
569 {
570 return size_binop (PLUS_EXPR, offset,
571 convert (sizetype,
572 size_binop (TRUNC_DIV_EXPR, bitpos,
573 bitsize_unit_node)));
574 }
575
576 void
pos_from_byte(poffset,pbitpos,off_align,pos)577 pos_from_byte (poffset, pbitpos, off_align, pos)
578 tree *poffset, *pbitpos;
579 unsigned int off_align;
580 tree pos;
581 {
582 *poffset
583 = size_binop (MULT_EXPR,
584 convert (sizetype,
585 size_binop (FLOOR_DIV_EXPR, pos,
586 bitsize_int (off_align
587 / BITS_PER_UNIT))),
588 size_int (off_align / BITS_PER_UNIT));
589 *pbitpos = size_binop (MULT_EXPR,
590 size_binop (FLOOR_MOD_EXPR, pos,
591 bitsize_int (off_align / BITS_PER_UNIT)),
592 bitsize_unit_node);
593 }
594
595 void
pos_from_bit(poffset,pbitpos,off_align,pos)596 pos_from_bit (poffset, pbitpos, off_align, pos)
597 tree *poffset, *pbitpos;
598 unsigned int off_align;
599 tree pos;
600 {
601 *poffset = size_binop (MULT_EXPR,
602 convert (sizetype,
603 size_binop (FLOOR_DIV_EXPR, pos,
604 bitsize_int (off_align))),
605 size_int (off_align / BITS_PER_UNIT));
606 *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
607 }
608
609 /* Given a pointer to bit and byte offsets and an offset alignment,
610 normalize the offsets so they are within the alignment. */
611
612 void
normalize_offset(poffset,pbitpos,off_align)613 normalize_offset (poffset, pbitpos, off_align)
614 tree *poffset, *pbitpos;
615 unsigned int off_align;
616 {
617 /* If the bit position is now larger than it should be, adjust it
618 downwards. */
619 if (compare_tree_int (*pbitpos, off_align) >= 0)
620 {
621 tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
622 bitsize_int (off_align));
623
624 *poffset
625 = size_binop (PLUS_EXPR, *poffset,
626 size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
627 size_int (off_align / BITS_PER_UNIT)));
628
629 *pbitpos
630 = size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
631 }
632 }
633
634 /* Print debugging information about the information in RLI. */
635
636 void
debug_rli(rli)637 debug_rli (rli)
638 record_layout_info rli;
639 {
640 print_node_brief (stderr, "type", rli->t, 0);
641 print_node_brief (stderr, "\noffset", rli->offset, 0);
642 print_node_brief (stderr, " bitpos", rli->bitpos, 0);
643
644 fprintf (stderr, "\naligns: rec = %u, unpack = %u, unpad = %u, off = %u\n",
645 rli->record_align, rli->unpacked_align, rli->unpadded_align,
646 rli->offset_align);
647 if (rli->packed_maybe_necessary)
648 fprintf (stderr, "packed may be necessary\n");
649
650 if (rli->pending_statics)
651 {
652 fprintf (stderr, "pending statics:\n");
653 debug_tree (rli->pending_statics);
654 }
655 }
656
657 /* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
658 BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
659
660 void
normalize_rli(rli)661 normalize_rli (rli)
662 record_layout_info rli;
663 {
664 normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
665 }
666
667 /* Returns the size in bytes allocated so far. */
668
669 tree
rli_size_unit_so_far(rli)670 rli_size_unit_so_far (rli)
671 record_layout_info rli;
672 {
673 return byte_from_pos (rli->offset, rli->bitpos);
674 }
675
676 /* Returns the size in bits allocated so far. */
677
678 tree
rli_size_so_far(rli)679 rli_size_so_far (rli)
680 record_layout_info rli;
681 {
682 return bit_from_pos (rli->offset, rli->bitpos);
683 }
684
685 /* FIELD is about to be added to RLI->T. The alignment (in bits) of
686 the next available location is given by KNOWN_ALIGN. Update the
687 variable alignment fields in RLI, and return the alignment to give
688 the FIELD. */
689
690 static unsigned int
update_alignment_for_field(rli,field,known_align)691 update_alignment_for_field (rli, field, known_align)
692 record_layout_info rli;
693 tree field;
694 unsigned int known_align;
695 {
696 /* The alignment required for FIELD. */
697 unsigned int desired_align;
698 /* The type of this field. */
699 tree type = TREE_TYPE (field);
700 /* True if the field was explicitly aligned by the user. */
701 bool user_align;
702
703 /* Lay out the field so we know what alignment it needs. For a
704 packed field, use the alignment as specified, disregarding what
705 the type would want. */
706 desired_align = DECL_ALIGN (field);
707 user_align = DECL_USER_ALIGN (field);
708 layout_decl (field, known_align);
709 if (! DECL_PACKED (field))
710 {
711 desired_align = DECL_ALIGN (field);
712 user_align = DECL_USER_ALIGN (field);
713 }
714 else if (!DECL_BIT_FIELD_TYPE (field))
715 /* Even packed non-bit-fields get byte alignment. */
716 desired_align = MAX (desired_align, BITS_PER_UNIT);
717
718 /* Some targets (i.e. i386, VMS) limit struct field alignment
719 to a lower boundary than alignment of variables unless
720 it was overridden by attribute aligned. */
721 #ifdef BIGGEST_FIELD_ALIGNMENT
722 if (!user_align)
723 desired_align
724 = MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
725 #endif
726
727 #ifdef ADJUST_FIELD_ALIGN
728 if (!user_align)
729 desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
730 #endif
731
732 /* Record must have at least as much alignment as any field.
733 Otherwise, the alignment of the field within the record is
734 meaningless. */
735 if ((* targetm.ms_bitfield_layout_p) (rli->t)
736 && type != error_mark_node
737 && DECL_BIT_FIELD_TYPE (field)
738 && ! integer_zerop (TYPE_SIZE (type)))
739 {
740 /* Here, the alignment of the underlying type of a bitfield can
741 affect the alignment of a record; even a zero-sized field
742 can do this. The alignment should be to the alignment of
743 the type, except that for zero-size bitfields this only
744 applies if there was an immediately prior, nonzero-size
745 bitfield. (That's the way it is, experimentally.) */
746 if (! integer_zerop (DECL_SIZE (field))
747 ? ! DECL_PACKED (field)
748 : (rli->prev_field
749 && DECL_BIT_FIELD_TYPE (rli->prev_field)
750 && ! integer_zerop (DECL_SIZE (rli->prev_field))))
751 {
752 unsigned int type_align = TYPE_ALIGN (type);
753 type_align = MAX (type_align, desired_align);
754 if (maximum_field_alignment != 0)
755 type_align = MIN (type_align, maximum_field_alignment);
756 rli->record_align = MAX (rli->record_align, type_align);
757 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
758 rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
759 }
760 else
761 desired_align = 1;
762 }
763 else
764 #ifdef PCC_BITFIELD_TYPE_MATTERS
765 if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
766 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
767 && DECL_BIT_FIELD_TYPE (field)
768 && ! integer_zerop (TYPE_SIZE (type)))
769 {
770 /* For these machines, a zero-length field does not
771 affect the alignment of the structure as a whole.
772 It does, however, affect the alignment of the next field
773 within the structure. */
774 if (! integer_zerop (DECL_SIZE (field)))
775 rli->record_align = MAX (rli->record_align, desired_align);
776 else if (! DECL_PACKED (field) && !user_align)
777 desired_align = TYPE_ALIGN (type);
778
779 /* A named bit field of declared type `int'
780 forces the entire structure to have `int' alignment. */
781 if (DECL_NAME (field) != 0)
782 {
783 unsigned int type_align = TYPE_ALIGN (type);
784
785 #ifdef ADJUST_FIELD_ALIGN
786 if (! TYPE_USER_ALIGN (type))
787 type_align = ADJUST_FIELD_ALIGN (field, type_align);
788 #endif
789
790 if (maximum_field_alignment != 0)
791 type_align = MIN (type_align, maximum_field_alignment);
792 else if (DECL_PACKED (field))
793 type_align = MIN (type_align, BITS_PER_UNIT);
794
795 rli->record_align = MAX (rli->record_align, type_align);
796 rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
797 if (warn_packed)
798 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
799 user_align |= TYPE_USER_ALIGN (type);
800 }
801 }
802 else
803 #endif
804 {
805 rli->record_align = MAX (rli->record_align, desired_align);
806 rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
807 rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
808 }
809
810 TYPE_USER_ALIGN (rli->t) |= user_align;
811
812 DECL_ALIGN (field) = desired_align;
813
814 return desired_align;
815 }
816
817 /* Called from place_field to handle unions. */
818
819 static void
place_union_field(rli,field)820 place_union_field (rli, field)
821 record_layout_info rli;
822 tree field;
823 {
824 update_alignment_for_field (rli, field, /*known_align=*/0);
825
826 DECL_FIELD_OFFSET (field) = size_zero_node;
827 DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
828 SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
829
830 /* We assume the union's size will be a multiple of a byte so we don't
831 bother with BITPOS. */
832 if (TREE_CODE (rli->t) == UNION_TYPE)
833 rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
834 else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
835 rli->offset = fold (build (COND_EXPR, sizetype,
836 DECL_QUALIFIER (field),
837 DECL_SIZE_UNIT (field), rli->offset));
838 }
839
840 /* RLI contains information about the layout of a RECORD_TYPE. FIELD
841 is a FIELD_DECL to be added after those fields already present in
842 T. (FIELD is not actually added to the TYPE_FIELDS list here;
843 callers that desire that behavior must manually perform that step.) */
844
845 void
place_field(rli,field)846 place_field (rli, field)
847 record_layout_info rli;
848 tree field;
849 {
850 /* The alignment required for FIELD. */
851 unsigned int desired_align;
852 /* The alignment FIELD would have if we just dropped it into the
853 record as it presently stands. */
854 unsigned int known_align;
855 unsigned int actual_align;
856 /* The type of this field. */
857 tree type = TREE_TYPE (field);
858
859 if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
860 return;
861
862 /* If FIELD is static, then treat it like a separate variable, not
863 really like a structure field. If it is a FUNCTION_DECL, it's a
864 method. In both cases, all we do is lay out the decl, and we do
865 it *after* the record is laid out. */
866 if (TREE_CODE (field) == VAR_DECL)
867 {
868 rli->pending_statics = tree_cons (NULL_TREE, field,
869 rli->pending_statics);
870 return;
871 }
872
873 /* Enumerators and enum types which are local to this class need not
874 be laid out. Likewise for initialized constant fields. */
875 else if (TREE_CODE (field) != FIELD_DECL)
876 return;
877
878 /* Unions are laid out very differently than records, so split
879 that code off to another function. */
880 else if (TREE_CODE (rli->t) != RECORD_TYPE)
881 {
882 place_union_field (rli, field);
883 return;
884 }
885
886 /* Work out the known alignment so far. Note that A & (-A) is the
887 value of the least-significant bit in A that is one. */
888 if (! integer_zerop (rli->bitpos))
889 known_align = (tree_low_cst (rli->bitpos, 1)
890 & - tree_low_cst (rli->bitpos, 1));
891 else if (integer_zerop (rli->offset))
892 known_align = BIGGEST_ALIGNMENT;
893 else if (host_integerp (rli->offset, 1))
894 known_align = (BITS_PER_UNIT
895 * (tree_low_cst (rli->offset, 1)
896 & - tree_low_cst (rli->offset, 1)));
897 else
898 known_align = rli->offset_align;
899
900 desired_align = update_alignment_for_field (rli, field, known_align);
901
902 if (warn_packed && DECL_PACKED (field))
903 {
904 if (known_align > TYPE_ALIGN (type))
905 {
906 if (TYPE_ALIGN (type) > desired_align)
907 {
908 if (STRICT_ALIGNMENT)
909 warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
910 else
911 warning_with_decl (field, "packed attribute is unnecessary for `%s'");
912 }
913 }
914 else
915 rli->packed_maybe_necessary = 1;
916 }
917
918 /* Does this field automatically have alignment it needs by virtue
919 of the fields that precede it and the record's own alignment? */
920 if (known_align < desired_align)
921 {
922 /* No, we need to skip space before this field.
923 Bump the cumulative size to multiple of field alignment. */
924
925 if (warn_padded)
926 warning_with_decl (field, "padding struct to align `%s'");
927
928 /* If the alignment is still within offset_align, just align
929 the bit position. */
930 if (desired_align < rli->offset_align)
931 rli->bitpos = round_up (rli->bitpos, desired_align);
932 else
933 {
934 /* First adjust OFFSET by the partial bits, then align. */
935 rli->offset
936 = size_binop (PLUS_EXPR, rli->offset,
937 convert (sizetype,
938 size_binop (CEIL_DIV_EXPR, rli->bitpos,
939 bitsize_unit_node)));
940 rli->bitpos = bitsize_zero_node;
941
942 rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
943 }
944
945 if (! TREE_CONSTANT (rli->offset))
946 rli->offset_align = desired_align;
947
948 }
949
950 /* Handle compatibility with PCC. Note that if the record has any
951 variable-sized fields, we need not worry about compatibility. */
952 #ifdef PCC_BITFIELD_TYPE_MATTERS
953 if (PCC_BITFIELD_TYPE_MATTERS
954 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
955 && TREE_CODE (field) == FIELD_DECL
956 && type != error_mark_node
957 && DECL_BIT_FIELD (field)
958 && ! DECL_PACKED (field)
959 && maximum_field_alignment == 0
960 && ! integer_zerop (DECL_SIZE (field))
961 && host_integerp (DECL_SIZE (field), 1)
962 && host_integerp (rli->offset, 1)
963 && host_integerp (TYPE_SIZE (type), 1))
964 {
965 unsigned int type_align = TYPE_ALIGN (type);
966 tree dsize = DECL_SIZE (field);
967 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
968 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
969 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
970
971 #ifdef ADJUST_FIELD_ALIGN
972 if (! TYPE_USER_ALIGN (type))
973 type_align = ADJUST_FIELD_ALIGN (field, type_align);
974 #endif
975
976 /* A bit field may not span more units of alignment of its type
977 than its type itself. Advance to next boundary if necessary. */
978 if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
979 type_align - 1)
980 / type_align)
981 - (offset * BITS_PER_UNIT + bit_offset) / type_align)
982 > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
983 rli->bitpos = round_up (rli->bitpos, type_align);
984
985 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
986 }
987 #endif
988
989 #ifdef BITFIELD_NBYTES_LIMITED
990 if (BITFIELD_NBYTES_LIMITED
991 && ! (* targetm.ms_bitfield_layout_p) (rli->t)
992 && TREE_CODE (field) == FIELD_DECL
993 && type != error_mark_node
994 && DECL_BIT_FIELD_TYPE (field)
995 && ! DECL_PACKED (field)
996 && ! integer_zerop (DECL_SIZE (field))
997 && host_integerp (DECL_SIZE (field), 1)
998 && host_integerp (rli->offset, 1)
999 && host_integerp (TYPE_SIZE (type), 1))
1000 {
1001 unsigned int type_align = TYPE_ALIGN (type);
1002 tree dsize = DECL_SIZE (field);
1003 HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
1004 HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
1005 HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
1006
1007 #ifdef ADJUST_FIELD_ALIGN
1008 if (! TYPE_USER_ALIGN (type))
1009 type_align = ADJUST_FIELD_ALIGN (field, type_align);
1010 #endif
1011
1012 if (maximum_field_alignment != 0)
1013 type_align = MIN (type_align, maximum_field_alignment);
1014 /* ??? This test is opposite the test in the containing if
1015 statement, so this code is unreachable currently. */
1016 else if (DECL_PACKED (field))
1017 type_align = MIN (type_align, BITS_PER_UNIT);
1018
1019 /* A bit field may not span the unit of alignment of its type.
1020 Advance to next boundary if necessary. */
1021 /* ??? This code should match the code above for the
1022 PCC_BITFIELD_TYPE_MATTERS case. */
1023 if ((offset * BITS_PER_UNIT + bit_offset) / type_align
1024 != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
1025 / type_align))
1026 rli->bitpos = round_up (rli->bitpos, type_align);
1027
1028 TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1029 }
1030 #endif
1031
1032 /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1033 A subtlety:
1034 When a bit field is inserted into a packed record, the whole
1035 size of the underlying type is used by one or more same-size
1036 adjacent bitfields. (That is, if its long:3, 32 bits is
1037 used in the record, and any additional adjacent long bitfields are
1038 packed into the same chunk of 32 bits. However, if the size
1039 changes, a new field of that size is allocated.) In an unpacked
1040 record, this is the same as using alignment, but not eqivalent
1041 when packing.
1042
1043 Note: for compatability, we use the type size, not the type alignment
1044 to determine alignment, since that matches the documentation */
1045
1046 if ((* targetm.ms_bitfield_layout_p) (rli->t)
1047 && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
1048 || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
1049 {
1050 /* At this point, either the prior or current are bitfields,
1051 (possibly both), and we're dealing with MS packing. */
1052 tree prev_saved = rli->prev_field;
1053
1054 /* Is the prior field a bitfield? If so, handle "runs" of same
1055 type size fields. */
1056 if (rli->prev_field /* necessarily a bitfield if it exists. */)
1057 {
1058 /* If both are bitfields, nonzero, and the same size, this is
1059 the middle of a run. Zero declared size fields are special
1060 and handled as "end of run". (Note: it's nonzero declared
1061 size, but equal type sizes!) (Since we know that both
1062 the current and previous fields are bitfields by the
1063 time we check it, DECL_SIZE must be present for both.) */
1064 if (DECL_BIT_FIELD_TYPE (field)
1065 && !integer_zerop (DECL_SIZE (field))
1066 && !integer_zerop (DECL_SIZE (rli->prev_field))
1067 && simple_cst_equal (TYPE_SIZE (type),
1068 TYPE_SIZE (TREE_TYPE (rli->prev_field))) )
1069 {
1070 /* We're in the middle of a run of equal type size fields; make
1071 sure we realign if we run out of bits. (Not decl size,
1072 type size!) */
1073 int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
1074 tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1075
1076 if (rli->remaining_in_alignment < bitsize)
1077 {
1078 /* out of bits; bump up to next 'word'. */
1079 rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1080 rli->bitpos = size_binop (PLUS_EXPR,
1081 type_size,
1082 DECL_FIELD_BIT_OFFSET(rli->prev_field));
1083 rli->prev_field = field;
1084 rli->remaining_in_alignment = TREE_INT_CST_LOW (type_size);
1085 }
1086 rli->remaining_in_alignment -= bitsize;
1087 }
1088 else
1089 {
1090 /* End of a run: if leaving a run of bitfields of the same type
1091 size, we have to "use up" the rest of the bits of the type
1092 size.
1093
1094 Compute the new position as the sum of the size for the prior
1095 type and where we first started working on that type.
1096 Note: since the beginning of the field was aligned then
1097 of course the end will be too. No round needed. */
1098
1099 if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1100 {
1101 tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1102 rli->bitpos = size_binop (PLUS_EXPR,
1103 type_size,
1104 DECL_FIELD_BIT_OFFSET(rli->prev_field));
1105 }
1106 else
1107 {
1108 /* We "use up" size zero fields; the code below should behave
1109 as if the prior field was not a bitfield. */
1110 prev_saved = NULL;
1111 }
1112
1113 /* Cause a new bitfield to be captured, either this time (if
1114 currently a bitfield) or next time we see one. */
1115 if (!DECL_BIT_FIELD_TYPE(field)
1116 || integer_zerop (DECL_SIZE (field)))
1117 {
1118 rli->prev_field = NULL;
1119 }
1120 }
1121 normalize_rli (rli);
1122 }
1123
1124 /* If we're starting a new run of same size type bitfields
1125 (or a run of non-bitfields), set up the "first of the run"
1126 fields.
1127
1128 That is, if the current field is not a bitfield, or if there
1129 was a prior bitfield the type sizes differ, or if there wasn't
1130 a prior bitfield the size of the current field is nonzero.
1131
1132 Note: we must be sure to test ONLY the type size if there was
1133 a prior bitfield and ONLY for the current field being zero if
1134 there wasn't. */
1135
1136 if (!DECL_BIT_FIELD_TYPE (field)
1137 || ( prev_saved != NULL
1138 ? !simple_cst_equal (TYPE_SIZE (type),
1139 TYPE_SIZE (TREE_TYPE (prev_saved)))
1140 : !integer_zerop (DECL_SIZE (field)) ))
1141 {
1142 unsigned int type_align = 8; /* Never below 8 for compatability */
1143
1144 /* (When not a bitfield), we could be seeing a flex array (with
1145 no DECL_SIZE). Since we won't be using remaining_in_alignment
1146 until we see a bitfield (and come by here again) we just skip
1147 calculating it. */
1148
1149 if (DECL_SIZE (field) != NULL)
1150 rli->remaining_in_alignment
1151 = TREE_INT_CST_LOW (TYPE_SIZE(TREE_TYPE(field)))
1152 - TREE_INT_CST_LOW (DECL_SIZE (field));
1153
1154 /* Now align (conventionally) for the new type. */
1155 if (!DECL_PACKED(field))
1156 type_align = MAX(TYPE_ALIGN (type), type_align);
1157
1158 if (prev_saved
1159 && DECL_BIT_FIELD_TYPE (prev_saved)
1160 /* If the previous bit-field is zero-sized, we've already
1161 accounted for its alignment needs (or ignored it, if
1162 appropriate) while placing it. */
1163 && ! integer_zerop (DECL_SIZE (prev_saved)))
1164 type_align = MAX (type_align,
1165 TYPE_ALIGN (TREE_TYPE (prev_saved)));
1166
1167 if (maximum_field_alignment != 0)
1168 type_align = MIN (type_align, maximum_field_alignment);
1169
1170 rli->bitpos = round_up (rli->bitpos, type_align);
1171 /* If we really aligned, don't allow subsequent bitfields
1172 to undo that. */
1173 rli->prev_field = NULL;
1174 }
1175 }
1176
1177 /* Offset so far becomes the position of this field after normalizing. */
1178 normalize_rli (rli);
1179 DECL_FIELD_OFFSET (field) = rli->offset;
1180 DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1181 SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1182
1183 /* If this field ended up more aligned than we thought it would be (we
1184 approximate this by seeing if its position changed), lay out the field
1185 again; perhaps we can use an integral mode for it now. */
1186 if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1187 actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1188 & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1189 else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1190 actual_align = BIGGEST_ALIGNMENT;
1191 else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1192 actual_align = (BITS_PER_UNIT
1193 * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1194 & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1195 else
1196 actual_align = DECL_OFFSET_ALIGN (field);
1197
1198 if (known_align != actual_align)
1199 layout_decl (field, actual_align);
1200
1201 /* Only the MS bitfields use this. */
1202 if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1203 rli->prev_field = field;
1204
1205 /* Now add size of this field to the size of the record. If the size is
1206 not constant, treat the field as being a multiple of bytes and just
1207 adjust the offset, resetting the bit position. Otherwise, apportion the
1208 size amongst the bit position and offset. First handle the case of an
1209 unspecified size, which can happen when we have an invalid nested struct
1210 definition, such as struct j { struct j { int i; } }. The error message
1211 is printed in finish_struct. */
1212 if (DECL_SIZE (field) == 0)
1213 /* Do nothing. */;
1214 else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1215 || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1216 {
1217 rli->offset
1218 = size_binop (PLUS_EXPR, rli->offset,
1219 convert (sizetype,
1220 size_binop (CEIL_DIV_EXPR, rli->bitpos,
1221 bitsize_unit_node)));
1222 rli->offset
1223 = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1224 rli->bitpos = bitsize_zero_node;
1225 rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
1226 }
1227 else
1228 {
1229 rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1230 normalize_rli (rli);
1231 }
1232 }
1233
1234 /* Assuming that all the fields have been laid out, this function uses
1235 RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1236 inidicated by RLI. */
1237
1238 static void
finalize_record_size(rli)1239 finalize_record_size (rli)
1240 record_layout_info rli;
1241 {
1242 tree unpadded_size, unpadded_size_unit;
1243
1244 /* Now we want just byte and bit offsets, so set the offset alignment
1245 to be a byte and then normalize. */
1246 rli->offset_align = BITS_PER_UNIT;
1247 normalize_rli (rli);
1248
1249 /* Determine the desired alignment. */
1250 #ifdef ROUND_TYPE_ALIGN
1251 TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1252 rli->record_align);
1253 #else
1254 TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1255 #endif
1256
1257 /* Compute the size so far. Be sure to allow for extra bits in the
1258 size in bytes. We have guaranteed above that it will be no more
1259 than a single byte. */
1260 unpadded_size = rli_size_so_far (rli);
1261 unpadded_size_unit = rli_size_unit_so_far (rli);
1262 if (! integer_zerop (rli->bitpos))
1263 unpadded_size_unit
1264 = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1265
1266 /* Record the un-rounded size in the binfo node. But first we check
1267 the size of TYPE_BINFO to make sure that BINFO_SIZE is available. */
1268 if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
1269 {
1270 TYPE_BINFO_SIZE (rli->t) = unpadded_size;
1271 TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
1272 }
1273
1274 /* Round the size up to be a multiple of the required alignment */
1275 #ifdef ROUND_TYPE_SIZE
1276 TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
1277 TYPE_ALIGN (rli->t));
1278 TYPE_SIZE_UNIT (rli->t)
1279 = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
1280 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1281 #else
1282 TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1283 TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1284 TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1285 #endif
1286
1287 if (warn_padded && TREE_CONSTANT (unpadded_size)
1288 && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1289 warning ("padding struct size to alignment boundary");
1290
1291 if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1292 && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1293 && TREE_CONSTANT (unpadded_size))
1294 {
1295 tree unpacked_size;
1296
1297 #ifdef ROUND_TYPE_ALIGN
1298 rli->unpacked_align
1299 = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1300 #else
1301 rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1302 #endif
1303
1304 #ifdef ROUND_TYPE_SIZE
1305 unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1306 rli->unpacked_align);
1307 #else
1308 unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1309 #endif
1310
1311 if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1312 {
1313 TYPE_PACKED (rli->t) = 0;
1314
1315 if (TYPE_NAME (rli->t))
1316 {
1317 const char *name;
1318
1319 if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1320 name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1321 else
1322 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1323
1324 if (STRICT_ALIGNMENT)
1325 warning ("packed attribute causes inefficient alignment for `%s'", name);
1326 else
1327 warning ("packed attribute is unnecessary for `%s'", name);
1328 }
1329 else
1330 {
1331 if (STRICT_ALIGNMENT)
1332 warning ("packed attribute causes inefficient alignment");
1333 else
1334 warning ("packed attribute is unnecessary");
1335 }
1336 }
1337 }
1338 }
1339
1340 /* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE). */
1341
1342 void
compute_record_mode(type)1343 compute_record_mode (type)
1344 tree type;
1345 {
1346 tree field;
1347 enum machine_mode mode = VOIDmode;
1348
1349 /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1350 However, if possible, we use a mode that fits in a register
1351 instead, in order to allow for better optimization down the
1352 line. */
1353 TYPE_MODE (type) = BLKmode;
1354
1355 if (! host_integerp (TYPE_SIZE (type), 1))
1356 return;
1357
1358 /* A record which has any BLKmode members must itself be
1359 BLKmode; it can't go in a register. Unless the member is
1360 BLKmode only because it isn't aligned. */
1361 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1362 {
1363 unsigned HOST_WIDE_INT bitpos;
1364
1365 if (TREE_CODE (field) != FIELD_DECL)
1366 continue;
1367
1368 if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1369 || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1370 && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1371 || ! host_integerp (bit_position (field), 1)
1372 || DECL_SIZE (field) == 0
1373 || ! host_integerp (DECL_SIZE (field), 1))
1374 return;
1375
1376 bitpos = int_bit_position (field);
1377
1378 /* Must be BLKmode if any field crosses a word boundary,
1379 since extract_bit_field can't handle that in registers. */
1380 if (bitpos / BITS_PER_WORD
1381 != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1382 / BITS_PER_WORD)
1383 /* But there is no problem if the field is entire words. */
1384 && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1385 return;
1386
1387 /* If this field is the whole struct, remember its mode so
1388 that, say, we can put a double in a class into a DF
1389 register instead of forcing it to live in the stack. */
1390 if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1391 mode = DECL_MODE (field);
1392
1393 #ifdef MEMBER_TYPE_FORCES_BLK
1394 /* With some targets, eg. c4x, it is sub-optimal
1395 to access an aligned BLKmode structure as a scalar. */
1396
1397 if (MEMBER_TYPE_FORCES_BLK (field, mode))
1398 return;
1399 #endif /* MEMBER_TYPE_FORCES_BLK */
1400 }
1401
1402 /* If we only have one real field; use its mode. This only applies to
1403 RECORD_TYPE. This does not apply to unions. */
1404 if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1405 TYPE_MODE (type) = mode;
1406 else
1407 TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1408
1409 /* If structure's known alignment is less than what the scalar
1410 mode would need, and it matters, then stick with BLKmode. */
1411 if (TYPE_MODE (type) != BLKmode
1412 && STRICT_ALIGNMENT
1413 && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1414 || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1415 {
1416 /* If this is the only reason this type is BLKmode, then
1417 don't force containing types to be BLKmode. */
1418 TYPE_NO_FORCE_BLK (type) = 1;
1419 TYPE_MODE (type) = BLKmode;
1420 }
1421 }
1422
1423 /* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1424 out. */
1425
1426 static void
finalize_type_size(type)1427 finalize_type_size (type)
1428 tree type;
1429 {
1430 /* Normally, use the alignment corresponding to the mode chosen.
1431 However, where strict alignment is not required, avoid
1432 over-aligning structures, since most compilers do not do this
1433 alignment. */
1434
1435 if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1436 && (STRICT_ALIGNMENT
1437 || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1438 && TREE_CODE (type) != QUAL_UNION_TYPE
1439 && TREE_CODE (type) != ARRAY_TYPE)))
1440 {
1441 TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1442 TYPE_USER_ALIGN (type) = 0;
1443 }
1444
1445 /* Do machine-dependent extra alignment. */
1446 #ifdef ROUND_TYPE_ALIGN
1447 TYPE_ALIGN (type)
1448 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1449 #endif
1450
1451 /* If we failed to find a simple way to calculate the unit size
1452 of the type, find it by division. */
1453 if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1454 /* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
1455 result will fit in sizetype. We will get more efficient code using
1456 sizetype, so we force a conversion. */
1457 TYPE_SIZE_UNIT (type)
1458 = convert (sizetype,
1459 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1460 bitsize_unit_node));
1461
1462 if (TYPE_SIZE (type) != 0)
1463 {
1464 #ifdef ROUND_TYPE_SIZE
1465 TYPE_SIZE (type)
1466 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1467 TYPE_SIZE_UNIT (type)
1468 = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1469 TYPE_ALIGN (type) / BITS_PER_UNIT);
1470 #else
1471 TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1472 TYPE_SIZE_UNIT (type)
1473 = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1474 #endif
1475 }
1476
1477 /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
1478 if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1479 TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1480 if (TYPE_SIZE_UNIT (type) != 0
1481 && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1482 TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1483
1484 /* Also layout any other variants of the type. */
1485 if (TYPE_NEXT_VARIANT (type)
1486 || type != TYPE_MAIN_VARIANT (type))
1487 {
1488 tree variant;
1489 /* Record layout info of this variant. */
1490 tree size = TYPE_SIZE (type);
1491 tree size_unit = TYPE_SIZE_UNIT (type);
1492 unsigned int align = TYPE_ALIGN (type);
1493 unsigned int user_align = TYPE_USER_ALIGN (type);
1494 enum machine_mode mode = TYPE_MODE (type);
1495
1496 /* Copy it into all variants. */
1497 for (variant = TYPE_MAIN_VARIANT (type);
1498 variant != 0;
1499 variant = TYPE_NEXT_VARIANT (variant))
1500 {
1501 TYPE_SIZE (variant) = size;
1502 TYPE_SIZE_UNIT (variant) = size_unit;
1503 TYPE_ALIGN (variant) = align;
1504 TYPE_USER_ALIGN (variant) = user_align;
1505 TYPE_MODE (variant) = mode;
1506 }
1507 }
1508 }
1509
1510 /* Do all of the work required to layout the type indicated by RLI,
1511 once the fields have been laid out. This function will call `free'
1512 for RLI, unless FREE_P is false. Passing a value other than false
1513 for FREE_P is bad practice; this option only exists to support the
1514 G++ 3.2 ABI. */
1515
1516 void
finish_record_layout(rli,free_p)1517 finish_record_layout (rli, free_p)
1518 record_layout_info rli;
1519 int free_p;
1520 {
1521 /* Compute the final size. */
1522 finalize_record_size (rli);
1523
1524 /* Compute the TYPE_MODE for the record. */
1525 compute_record_mode (rli->t);
1526
1527 /* Perform any last tweaks to the TYPE_SIZE, etc. */
1528 finalize_type_size (rli->t);
1529
1530 /* Lay out any static members. This is done now because their type
1531 may use the record's type. */
1532 while (rli->pending_statics)
1533 {
1534 layout_decl (TREE_VALUE (rli->pending_statics), 0);
1535 rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1536 }
1537
1538 /* Clean up. */
1539 if (free_p)
1540 free (rli);
1541 }
1542
1543 /* Calculate the mode, size, and alignment for TYPE.
1544 For an array type, calculate the element separation as well.
1545 Record TYPE on the chain of permanent or temporary types
1546 so that dbxout will find out about it.
1547
1548 TYPE_SIZE of a type is nonzero if the type has been laid out already.
1549 layout_type does nothing on such a type.
1550
1551 If the type is incomplete, its TYPE_SIZE remains zero. */
1552
1553 void
layout_type(type)1554 layout_type (type)
1555 tree type;
1556 {
1557 if (type == 0)
1558 abort ();
1559
1560 /* Do nothing if type has been laid out before. */
1561 if (TYPE_SIZE (type))
1562 return;
1563
1564 switch (TREE_CODE (type))
1565 {
1566 case LANG_TYPE:
1567 /* This kind of type is the responsibility
1568 of the language-specific code. */
1569 abort ();
1570
1571 case BOOLEAN_TYPE: /* Used for Java, Pascal, and Chill. */
1572 if (TYPE_PRECISION (type) == 0)
1573 TYPE_PRECISION (type) = 1; /* default to one byte/boolean. */
1574
1575 /* ... fall through ... */
1576
1577 case INTEGER_TYPE:
1578 case ENUMERAL_TYPE:
1579 case CHAR_TYPE:
1580 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1581 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1582 TREE_UNSIGNED (type) = 1;
1583
1584 TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1585 MODE_INT);
1586 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1587 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1588 break;
1589
1590 case REAL_TYPE:
1591 TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1592 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1593 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1594 break;
1595
1596 case COMPLEX_TYPE:
1597 TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1598 TYPE_MODE (type)
1599 = mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1600 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1601 ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1602 0);
1603 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1604 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1605 break;
1606
1607 case VECTOR_TYPE:
1608 {
1609 tree subtype;
1610
1611 subtype = TREE_TYPE (type);
1612 TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1613 TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1614 TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1615 }
1616 break;
1617
1618 case VOID_TYPE:
1619 /* This is an incomplete type and so doesn't have a size. */
1620 TYPE_ALIGN (type) = 1;
1621 TYPE_USER_ALIGN (type) = 0;
1622 TYPE_MODE (type) = VOIDmode;
1623 break;
1624
1625 case OFFSET_TYPE:
1626 TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1627 TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1628 /* A pointer might be MODE_PARTIAL_INT,
1629 but ptrdiff_t must be integral. */
1630 TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1631 break;
1632
1633 case FUNCTION_TYPE:
1634 case METHOD_TYPE:
1635 TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1636 TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1637 TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1638 break;
1639
1640 case POINTER_TYPE:
1641 case REFERENCE_TYPE:
1642 {
1643 int nbits = ((TREE_CODE (type) == REFERENCE_TYPE
1644 && reference_types_internal)
1645 ? GET_MODE_BITSIZE (Pmode) : POINTER_SIZE);
1646
1647 TYPE_MODE (type) = nbits == POINTER_SIZE ? ptr_mode : Pmode;
1648 TYPE_SIZE (type) = bitsize_int (nbits);
1649 TYPE_SIZE_UNIT (type) = size_int (nbits / BITS_PER_UNIT);
1650 TREE_UNSIGNED (type) = 1;
1651 TYPE_PRECISION (type) = nbits;
1652 }
1653 break;
1654
1655 case ARRAY_TYPE:
1656 {
1657 tree index = TYPE_DOMAIN (type);
1658 tree element = TREE_TYPE (type);
1659
1660 build_pointer_type (element);
1661
1662 /* We need to know both bounds in order to compute the size. */
1663 if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1664 && TYPE_SIZE (element))
1665 {
1666 tree ub = TYPE_MAX_VALUE (index);
1667 tree lb = TYPE_MIN_VALUE (index);
1668 tree length;
1669 tree element_size;
1670
1671 /* The initial subtraction should happen in the original type so
1672 that (possible) negative values are handled appropriately. */
1673 length = size_binop (PLUS_EXPR, size_one_node,
1674 convert (sizetype,
1675 fold (build (MINUS_EXPR,
1676 TREE_TYPE (lb),
1677 ub, lb))));
1678
1679 /* Special handling for arrays of bits (for Chill). */
1680 element_size = TYPE_SIZE (element);
1681 if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1682 && (integer_zerop (TYPE_MAX_VALUE (element))
1683 || integer_onep (TYPE_MAX_VALUE (element)))
1684 && host_integerp (TYPE_MIN_VALUE (element), 1))
1685 {
1686 HOST_WIDE_INT maxvalue
1687 = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1688 HOST_WIDE_INT minvalue
1689 = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1690
1691 if (maxvalue - minvalue == 1
1692 && (maxvalue == 1 || maxvalue == 0))
1693 element_size = integer_one_node;
1694 }
1695
1696 TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1697 convert (bitsizetype, length));
1698
1699 /* If we know the size of the element, calculate the total
1700 size directly, rather than do some division thing below.
1701 This optimization helps Fortran assumed-size arrays
1702 (where the size of the array is determined at runtime)
1703 substantially.
1704 Note that we can't do this in the case where the size of
1705 the elements is one bit since TYPE_SIZE_UNIT cannot be
1706 set correctly in that case. */
1707 if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1708 TYPE_SIZE_UNIT (type)
1709 = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1710 }
1711
1712 /* Now round the alignment and size,
1713 using machine-dependent criteria if any. */
1714
1715 #ifdef ROUND_TYPE_ALIGN
1716 TYPE_ALIGN (type)
1717 = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1718 #else
1719 TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1720 #endif
1721 TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1722
1723 #ifdef ROUND_TYPE_SIZE
1724 if (TYPE_SIZE (type) != 0)
1725 {
1726 tree tmp
1727 = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1728
1729 /* If the rounding changed the size of the type, remove any
1730 pre-calculated TYPE_SIZE_UNIT. */
1731 if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1732 TYPE_SIZE_UNIT (type) = NULL;
1733
1734 TYPE_SIZE (type) = tmp;
1735 }
1736 #endif
1737
1738 TYPE_MODE (type) = BLKmode;
1739 if (TYPE_SIZE (type) != 0
1740 #ifdef MEMBER_TYPE_FORCES_BLK
1741 && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1742 #endif
1743 /* BLKmode elements force BLKmode aggregate;
1744 else extract/store fields may lose. */
1745 && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1746 || TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1747 {
1748 /* One-element arrays get the component type's mode. */
1749 if (simple_cst_equal (TYPE_SIZE (type),
1750 TYPE_SIZE (TREE_TYPE (type))))
1751 TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1752 else
1753 TYPE_MODE (type)
1754 = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1755
1756 if (TYPE_MODE (type) != BLKmode
1757 && STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1758 && TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1759 && TYPE_MODE (type) != BLKmode)
1760 {
1761 TYPE_NO_FORCE_BLK (type) = 1;
1762 TYPE_MODE (type) = BLKmode;
1763 }
1764 }
1765 break;
1766 }
1767
1768 case RECORD_TYPE:
1769 case UNION_TYPE:
1770 case QUAL_UNION_TYPE:
1771 {
1772 tree field;
1773 record_layout_info rli;
1774
1775 /* Initialize the layout information. */
1776 rli = start_record_layout (type);
1777
1778 /* If this is a QUAL_UNION_TYPE, we want to process the fields
1779 in the reverse order in building the COND_EXPR that denotes
1780 its size. We reverse them again later. */
1781 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1782 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1783
1784 /* Place all the fields. */
1785 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1786 place_field (rli, field);
1787
1788 if (TREE_CODE (type) == QUAL_UNION_TYPE)
1789 TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1790
1791 if (lang_adjust_rli)
1792 (*lang_adjust_rli) (rli);
1793
1794 /* Finish laying out the record. */
1795 finish_record_layout (rli, /*free_p=*/true);
1796 }
1797 break;
1798
1799 case SET_TYPE: /* Used by Chill and Pascal. */
1800 if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1801 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1802 abort ();
1803 else
1804 {
1805 #ifndef SET_WORD_SIZE
1806 #define SET_WORD_SIZE BITS_PER_WORD
1807 #endif
1808 unsigned int alignment
1809 = set_alignment ? set_alignment : SET_WORD_SIZE;
1810 int size_in_bits
1811 = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1812 - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1813 int rounded_size
1814 = ((size_in_bits + alignment - 1) / alignment) * alignment;
1815
1816 if (rounded_size > (int) alignment)
1817 TYPE_MODE (type) = BLKmode;
1818 else
1819 TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1820
1821 TYPE_SIZE (type) = bitsize_int (rounded_size);
1822 TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1823 TYPE_ALIGN (type) = alignment;
1824 TYPE_USER_ALIGN (type) = 0;
1825 TYPE_PRECISION (type) = size_in_bits;
1826 }
1827 break;
1828
1829 case FILE_TYPE:
1830 /* The size may vary in different languages, so the language front end
1831 should fill in the size. */
1832 TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1833 TYPE_USER_ALIGN (type) = 0;
1834 TYPE_MODE (type) = BLKmode;
1835 break;
1836
1837 default:
1838 abort ();
1839 }
1840
1841 /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE. For
1842 records and unions, finish_record_layout already called this
1843 function. */
1844 if (TREE_CODE (type) != RECORD_TYPE
1845 && TREE_CODE (type) != UNION_TYPE
1846 && TREE_CODE (type) != QUAL_UNION_TYPE)
1847 finalize_type_size (type);
1848
1849 /* If this type is created before sizetype has been permanently set,
1850 record it so set_sizetype can fix it up. */
1851 if (! sizetype_set)
1852 early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1853
1854 /* If an alias set has been set for this aggregate when it was incomplete,
1855 force it into alias set 0.
1856 This is too conservative, but we cannot call record_component_aliases
1857 here because some frontends still change the aggregates after
1858 layout_type. */
1859 if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1860 TYPE_ALIAS_SET (type) = 0;
1861 }
1862
1863 /* Create and return a type for signed integers of PRECISION bits. */
1864
1865 tree
make_signed_type(precision)1866 make_signed_type (precision)
1867 int precision;
1868 {
1869 tree type = make_node (INTEGER_TYPE);
1870
1871 TYPE_PRECISION (type) = precision;
1872
1873 fixup_signed_type (type);
1874 return type;
1875 }
1876
1877 /* Create and return a type for unsigned integers of PRECISION bits. */
1878
1879 tree
make_unsigned_type(precision)1880 make_unsigned_type (precision)
1881 int precision;
1882 {
1883 tree type = make_node (INTEGER_TYPE);
1884
1885 TYPE_PRECISION (type) = precision;
1886
1887 fixup_unsigned_type (type);
1888 return type;
1889 }
1890
1891 /* Initialize sizetype and bitsizetype to a reasonable and temporary
1892 value to enable integer types to be created. */
1893
1894 void
initialize_sizetypes()1895 initialize_sizetypes ()
1896 {
1897 tree t = make_node (INTEGER_TYPE);
1898
1899 /* Set this so we do something reasonable for the build_int_2 calls
1900 below. */
1901 integer_type_node = t;
1902
1903 TYPE_MODE (t) = SImode;
1904 TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1905 TYPE_USER_ALIGN (t) = 0;
1906 TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1907 TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1908 TREE_UNSIGNED (t) = 1;
1909 TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1910 TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1911 TYPE_IS_SIZETYPE (t) = 1;
1912
1913 /* 1000 avoids problems with possible overflow and is certainly
1914 larger than any size value we'd want to be storing. */
1915 TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1916
1917 /* These two must be different nodes because of the caching done in
1918 size_int_wide. */
1919 sizetype = t;
1920 bitsizetype = copy_node (t);
1921 integer_type_node = 0;
1922 }
1923
1924 /* Set sizetype to TYPE, and initialize *sizetype accordingly.
1925 Also update the type of any standard type's sizes made so far. */
1926
1927 void
set_sizetype(type)1928 set_sizetype (type)
1929 tree type;
1930 {
1931 int oprecision = TYPE_PRECISION (type);
1932 /* The *bitsizetype types use a precision that avoids overflows when
1933 calculating signed sizes / offsets in bits. However, when
1934 cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1935 precision. */
1936 int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1937 2 * HOST_BITS_PER_WIDE_INT);
1938 unsigned int i;
1939 tree t;
1940
1941 if (sizetype_set)
1942 abort ();
1943
1944 /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
1945 sizetype = copy_node (type);
1946 TYPE_DOMAIN (sizetype) = type;
1947 TYPE_IS_SIZETYPE (sizetype) = 1;
1948 bitsizetype = make_node (INTEGER_TYPE);
1949 TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1950 TYPE_PRECISION (bitsizetype) = precision;
1951 TYPE_IS_SIZETYPE (bitsizetype) = 1;
1952
1953 if (TREE_UNSIGNED (type))
1954 fixup_unsigned_type (bitsizetype);
1955 else
1956 fixup_signed_type (bitsizetype);
1957
1958 layout_type (bitsizetype);
1959
1960 if (TREE_UNSIGNED (type))
1961 {
1962 usizetype = sizetype;
1963 ubitsizetype = bitsizetype;
1964 ssizetype = copy_node (make_signed_type (oprecision));
1965 sbitsizetype = copy_node (make_signed_type (precision));
1966 }
1967 else
1968 {
1969 ssizetype = sizetype;
1970 sbitsizetype = bitsizetype;
1971 usizetype = copy_node (make_unsigned_type (oprecision));
1972 ubitsizetype = copy_node (make_unsigned_type (precision));
1973 }
1974
1975 TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1976
1977 /* Show is a sizetype, is a main type, and has no pointers to it. */
1978 for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1979 {
1980 TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1981 TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1982 TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1983 TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1984 TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1985 }
1986
1987 /* Go down each of the types we already made and set the proper type
1988 for the sizes in them. */
1989 for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1990 {
1991 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1992 abort ();
1993
1994 TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1995 TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1996 }
1997
1998 early_type_list = 0;
1999 sizetype_set = 1;
2000 }
2001
2002 /* Set the extreme values of TYPE based on its precision in bits,
2003 then lay it out. Used when make_signed_type won't do
2004 because the tree code is not INTEGER_TYPE.
2005 E.g. for Pascal, when the -fsigned-char option is given. */
2006
2007 void
fixup_signed_type(type)2008 fixup_signed_type (type)
2009 tree type;
2010 {
2011 int precision = TYPE_PRECISION (type);
2012
2013 /* We can not represent properly constants greater then
2014 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2015 as they are used by i386 vector extensions and friends. */
2016 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2017 precision = HOST_BITS_PER_WIDE_INT * 2;
2018
2019 TYPE_MIN_VALUE (type)
2020 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2021 ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2022 (((HOST_WIDE_INT) (-1)
2023 << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2024 ? precision - HOST_BITS_PER_WIDE_INT - 1
2025 : 0))));
2026 TYPE_MAX_VALUE (type)
2027 = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2028 ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2029 (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2030 ? (((HOST_WIDE_INT) 1
2031 << (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2032 : 0));
2033
2034 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2035 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2036
2037 /* Lay out the type: set its alignment, size, etc. */
2038 layout_type (type);
2039 }
2040
2041 /* Set the extreme values of TYPE based on its precision in bits,
2042 then lay it out. This is used both in `make_unsigned_type'
2043 and for enumeral types. */
2044
2045 void
fixup_unsigned_type(type)2046 fixup_unsigned_type (type)
2047 tree type;
2048 {
2049 int precision = TYPE_PRECISION (type);
2050
2051 /* We can not represent properly constants greater then
2052 2 * HOST_BITS_PER_WIDE_INT, still we need the types
2053 as they are used by i386 vector extensions and friends. */
2054 if (precision > HOST_BITS_PER_WIDE_INT * 2)
2055 precision = HOST_BITS_PER_WIDE_INT * 2;
2056
2057 TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
2058 TYPE_MAX_VALUE (type)
2059 = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
2060 ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
2061 precision - HOST_BITS_PER_WIDE_INT > 0
2062 ? ((unsigned HOST_WIDE_INT) ~0
2063 >> (HOST_BITS_PER_WIDE_INT
2064 - (precision - HOST_BITS_PER_WIDE_INT)))
2065 : 0);
2066 TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2067 TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2068
2069 /* Lay out the type: set its alignment, size, etc. */
2070 layout_type (type);
2071 }
2072
2073 /* Find the best machine mode to use when referencing a bit field of length
2074 BITSIZE bits starting at BITPOS.
2075
2076 The underlying object is known to be aligned to a boundary of ALIGN bits.
2077 If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2078 larger than LARGEST_MODE (usually SImode).
2079
2080 If no mode meets all these conditions, we return VOIDmode. Otherwise, if
2081 VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2082 mode meeting these conditions.
2083
2084 Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2085 the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2086 all the conditions. */
2087
2088 enum machine_mode
get_best_mode(bitsize,bitpos,align,largest_mode,volatilep)2089 get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
2090 int bitsize, bitpos;
2091 unsigned int align;
2092 enum machine_mode largest_mode;
2093 int volatilep;
2094 {
2095 enum machine_mode mode;
2096 unsigned int unit = 0;
2097
2098 /* Find the narrowest integer mode that contains the bit field. */
2099 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2100 mode = GET_MODE_WIDER_MODE (mode))
2101 {
2102 unit = GET_MODE_BITSIZE (mode);
2103 if ((bitpos % unit) + bitsize <= unit)
2104 break;
2105 }
2106
2107 if (mode == VOIDmode
2108 /* It is tempting to omit the following line
2109 if STRICT_ALIGNMENT is true.
2110 But that is incorrect, since if the bitfield uses part of 3 bytes
2111 and we use a 4-byte mode, we could get a spurious segv
2112 if the extra 4th byte is past the end of memory.
2113 (Though at least one Unix compiler ignores this problem:
2114 that on the Sequent 386 machine. */
2115 || MIN (unit, BIGGEST_ALIGNMENT) > align
2116 || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2117 return VOIDmode;
2118
2119 if (SLOW_BYTE_ACCESS && ! volatilep)
2120 {
2121 enum machine_mode wide_mode = VOIDmode, tmode;
2122
2123 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2124 tmode = GET_MODE_WIDER_MODE (tmode))
2125 {
2126 unit = GET_MODE_BITSIZE (tmode);
2127 if (bitpos / unit == (bitpos + bitsize - 1) / unit
2128 && unit <= BITS_PER_WORD
2129 && unit <= MIN (align, BIGGEST_ALIGNMENT)
2130 && (largest_mode == VOIDmode
2131 || unit <= GET_MODE_BITSIZE (largest_mode)))
2132 wide_mode = tmode;
2133 }
2134
2135 if (wide_mode != VOIDmode)
2136 return wide_mode;
2137 }
2138
2139 return mode;
2140 }
2141
2142 #include "gt-stor-layout.h"
2143