1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "flags.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "tree-inline.h"
43
44 #include "ada.h"
45 #include "types.h"
46 #include "atree.h"
47 #include "elists.h"
48 #include "namet.h"
49 #include "nlists.h"
50 #include "snames.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
58
59 /* Return the base type of TYPE. */
60
61 tree
get_base_type(tree type)62 get_base_type (tree type)
63 {
64 if (TREE_CODE (type) == RECORD_TYPE
65 && TYPE_JUSTIFIED_MODULAR_P (type))
66 type = TREE_TYPE (TYPE_FIELDS (type));
67
68 while (TREE_TYPE (type)
69 && (TREE_CODE (type) == INTEGER_TYPE
70 || TREE_CODE (type) == REAL_TYPE))
71 type = TREE_TYPE (type);
72
73 return type;
74 }
75
76 /* EXP is a GCC tree representing an address. See if we can find how strictly
77 the object at this address is aligned and, if so, return the alignment of
78 the object in bits. Otherwise return 0. */
79
80 unsigned int
known_alignment(tree exp)81 known_alignment (tree exp)
82 {
83 unsigned int this_alignment;
84 unsigned int lhs, rhs;
85
86 switch (TREE_CODE (exp))
87 {
88 CASE_CONVERT:
89 case VIEW_CONVERT_EXPR:
90 case NON_LVALUE_EXPR:
91 /* Conversions between pointers and integers don't change the alignment
92 of the underlying object. */
93 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94 break;
95
96 case COMPOUND_EXPR:
97 /* The value of a COMPOUND_EXPR is that of its second operand. */
98 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
99 break;
100
101 case PLUS_EXPR:
102 case MINUS_EXPR:
103 /* If two addresses are added, the alignment of the result is the
104 minimum of the two alignments. */
105 lhs = known_alignment (TREE_OPERAND (exp, 0));
106 rhs = known_alignment (TREE_OPERAND (exp, 1));
107 this_alignment = MIN (lhs, rhs);
108 break;
109
110 case POINTER_PLUS_EXPR:
111 /* If this is the pattern built for aligning types, decode it. */
112 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
113 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
114 {
115 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
116 return
117 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
118 }
119
120 /* If we don't know the alignment of the offset, we assume that
121 of the base. */
122 lhs = known_alignment (TREE_OPERAND (exp, 0));
123 rhs = known_alignment (TREE_OPERAND (exp, 1));
124
125 if (rhs == 0)
126 this_alignment = lhs;
127 else
128 this_alignment = MIN (lhs, rhs);
129 break;
130
131 case COND_EXPR:
132 /* If there is a choice between two values, use the smaller one. */
133 lhs = known_alignment (TREE_OPERAND (exp, 1));
134 rhs = known_alignment (TREE_OPERAND (exp, 2));
135 this_alignment = MIN (lhs, rhs);
136 break;
137
138 case INTEGER_CST:
139 {
140 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
141 /* The first part of this represents the lowest bit in the constant,
142 but it is originally in bytes, not bits. */
143 this_alignment = (c & -c) * BITS_PER_UNIT;
144 }
145 break;
146
147 case MULT_EXPR:
148 /* If we know the alignment of just one side, use it. Otherwise,
149 use the product of the alignments. */
150 lhs = known_alignment (TREE_OPERAND (exp, 0));
151 rhs = known_alignment (TREE_OPERAND (exp, 1));
152
153 if (lhs == 0)
154 this_alignment = rhs;
155 else if (rhs == 0)
156 this_alignment = lhs;
157 else
158 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
159 break;
160
161 case BIT_AND_EXPR:
162 /* A bit-and expression is as aligned as the maximum alignment of the
163 operands. We typically get here for a complex lhs and a constant
164 negative power of two on the rhs to force an explicit alignment, so
165 don't bother looking at the lhs. */
166 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
167 break;
168
169 case ADDR_EXPR:
170 this_alignment = expr_align (TREE_OPERAND (exp, 0));
171 break;
172
173 case CALL_EXPR:
174 {
175 tree fndecl = get_callee_fndecl (exp);
176 if (fndecl == malloc_decl || fndecl == realloc_decl)
177 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
178
179 tree t = maybe_inline_call_in_expr (exp);
180 if (t)
181 return known_alignment (t);
182 }
183
184 /* ... fall through ... */
185
186 default:
187 /* For other pointer expressions, we assume that the pointed-to object
188 is at least as aligned as the pointed-to type. Beware that we can
189 have a dummy type here (e.g. a Taft Amendment type), for which the
190 alignment is meaningless and should be ignored. */
191 if (POINTER_TYPE_P (TREE_TYPE (exp))
192 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
193 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
194 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
195 else
196 this_alignment = 0;
197 break;
198 }
199
200 return this_alignment;
201 }
202
203 /* We have a comparison or assignment operation on two types, T1 and T2, which
204 are either both array types or both record types. T1 is assumed to be for
205 the left hand side operand, and T2 for the right hand side. Return the
206 type that both operands should be converted to for the operation, if any.
207 Otherwise return zero. */
208
209 static tree
find_common_type(tree t1,tree t2)210 find_common_type (tree t1, tree t2)
211 {
212 /* ??? As of today, various constructs lead to here with types of different
213 sizes even when both constants (e.g. tagged types, packable vs regular
214 component types, padded vs unpadded types, ...). While some of these
215 would better be handled upstream (types should be made consistent before
216 calling into build_binary_op), some others are really expected and we
217 have to be careful. */
218
219 const bool variable_record_on_lhs
220 = (TREE_CODE (t1) == RECORD_TYPE
221 && TREE_CODE (t2) == RECORD_TYPE
222 && get_variant_part (t1)
223 && !get_variant_part (t2));
224
225 const bool variable_array_on_lhs
226 = (TREE_CODE (t1) == ARRAY_TYPE
227 && TREE_CODE (t2) == ARRAY_TYPE
228 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
229 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
230
231 /* We must avoid writing more than what the target can hold if this is for
232 an assignment and the case of tagged types is handled in build_binary_op
233 so we use the lhs type if it is known to be smaller or of constant size
234 and the rhs type is not, whatever the modes. We also force t1 in case of
235 constant size equality to minimize occurrences of view conversions on the
236 lhs of an assignment, except for the case of types with a variable part
237 on the lhs but not on the rhs to make the conversion simpler. */
238 if (TREE_CONSTANT (TYPE_SIZE (t1))
239 && (!TREE_CONSTANT (TYPE_SIZE (t2))
240 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
241 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
242 && !variable_record_on_lhs
243 && !variable_array_on_lhs)))
244 return t1;
245
246 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
247 a non-BLKmode rhs and array types with a variable part on the lhs but not
248 on the rhs to make sure the conversion is preserved during gimplification.
249 Note that we know that we will not have any alignment problems since, if
250 we did, the non-BLKmode type could not have been used. */
251 if (TYPE_MODE (t1) != BLKmode
252 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
253 return t1;
254
255 /* If the rhs type is of constant size, use it whatever the modes. At
256 this point it is known to be smaller, or of constant size and the
257 lhs type is not. */
258 if (TREE_CONSTANT (TYPE_SIZE (t2)))
259 return t2;
260
261 /* Otherwise, if the rhs type is non-BLKmode, use it. */
262 if (TYPE_MODE (t2) != BLKmode)
263 return t2;
264
265 /* In this case, both types have variable size and BLKmode. It's
266 probably best to leave the "type mismatch" because changing it
267 could cause a bad self-referential reference. */
268 return NULL_TREE;
269 }
270
271 /* Return an expression tree representing an equality comparison of A1 and A2,
272 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
273
274 Two arrays are equal in one of two ways: (1) if both have zero length in
275 some dimension (not necessarily the same dimension) or (2) if the lengths
276 in each dimension are equal and the data is equal. We perform the length
277 tests in as efficient a manner as possible. */
278
279 static tree
compare_arrays(location_t loc,tree result_type,tree a1,tree a2)280 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
281 {
282 tree result = convert (result_type, boolean_true_node);
283 tree a1_is_null = convert (result_type, boolean_false_node);
284 tree a2_is_null = convert (result_type, boolean_false_node);
285 tree t1 = TREE_TYPE (a1);
286 tree t2 = TREE_TYPE (a2);
287 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
288 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
289 bool length_zero_p = false;
290
291 /* If the operands have side-effects, they need to be evaluated only once
292 in spite of the multiple references in the comparison. */
293 if (a1_side_effects_p)
294 a1 = gnat_protect_expr (a1);
295
296 if (a2_side_effects_p)
297 a2 = gnat_protect_expr (a2);
298
299 /* Process each dimension separately and compare the lengths. If any
300 dimension has a length known to be zero, set LENGTH_ZERO_P to true
301 in order to suppress the comparison of the data at the end. */
302 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
303 {
304 tree dom1 = TYPE_DOMAIN (t1);
305 tree dom2 = TYPE_DOMAIN (t2);
306 tree length1 = size_binop (PLUS_EXPR,
307 size_binop (MINUS_EXPR,
308 TYPE_MAX_VALUE (dom1),
309 TYPE_MIN_VALUE (dom1)),
310 size_one_node);
311 tree length2 = size_binop (PLUS_EXPR,
312 size_binop (MINUS_EXPR,
313 TYPE_MAX_VALUE (dom2),
314 TYPE_MIN_VALUE (dom2)),
315 size_one_node);
316 tree ind1 = TYPE_INDEX_TYPE (dom1);
317 tree ind2 = TYPE_INDEX_TYPE (dom2);
318 tree base_type = maybe_character_type (get_base_type (ind1));
319 tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
320 tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
321 tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
322 tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
323 tree comparison, this_a1_is_null, this_a2_is_null;
324
325 /* If the length of the first array is a constant and that of the second
326 array is not, swap our operands to have the constant second. */
327 if (TREE_CODE (length1) == INTEGER_CST
328 && TREE_CODE (length2) != INTEGER_CST)
329 {
330 tree tem;
331 bool btem;
332
333 tem = a1, a1 = a2, a2 = tem;
334 tem = t1, t1 = t2, t2 = tem;
335 tem = lb1, lb1 = lb2, lb2 = tem;
336 tem = ub1, ub1 = ub2, ub2 = tem;
337 tem = length1, length1 = length2, length2 = tem;
338 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
339 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
340 a2_side_effects_p = btem;
341 }
342
343 /* If the length of the second array is the constant zero, we can just
344 use the original stored bounds for the first array and see whether
345 last < first holds. */
346 if (integer_zerop (length2))
347 {
348 length_zero_p = true;
349
350 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
351 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
352
353 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
354 if (EXPR_P (comparison))
355 SET_EXPR_LOCATION (comparison, loc);
356
357 this_a1_is_null = comparison;
358 this_a2_is_null = convert (result_type, boolean_true_node);
359 }
360
361 /* Otherwise, if the length is some other constant value, we know that
362 this dimension in the second array cannot be superflat, so we can
363 just use its length computed from the actual stored bounds. */
364 else if (TREE_CODE (length2) == INTEGER_CST)
365 {
366 /* Note that we know that LB2 and UB2 are constant and hence
367 cannot contain a PLACEHOLDER_EXPR. */
368 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
369 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
370
371 comparison
372 = fold_build2_loc (loc, EQ_EXPR, result_type,
373 build_binary_op (MINUS_EXPR, base_type,
374 ub1, lb1),
375 build_binary_op (MINUS_EXPR, base_type,
376 ub2, lb2));
377 if (EXPR_P (comparison))
378 SET_EXPR_LOCATION (comparison, loc);
379
380 this_a1_is_null
381 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
382
383 this_a2_is_null = convert (result_type, boolean_false_node);
384 }
385
386 /* Otherwise, compare the computed lengths. */
387 else
388 {
389 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
390 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
391
392 comparison
393 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
394 if (EXPR_P (comparison))
395 SET_EXPR_LOCATION (comparison, loc);
396
397 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
398 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
399
400 this_a1_is_null
401 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
402
403 lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
404 ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
405
406 this_a2_is_null
407 = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
408 }
409
410 /* Append expressions for this dimension to the final expressions. */
411 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
412 result, comparison);
413
414 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
415 this_a1_is_null, a1_is_null);
416
417 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
418 this_a2_is_null, a2_is_null);
419
420 t1 = TREE_TYPE (t1);
421 t2 = TREE_TYPE (t2);
422 }
423
424 /* Unless the length of some dimension is known to be zero, compare the
425 data in the array. */
426 if (!length_zero_p)
427 {
428 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
429 tree comparison;
430
431 if (type)
432 {
433 a1 = convert (type, a1),
434 a2 = convert (type, a2);
435 }
436
437 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
438
439 result
440 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
441 }
442
443 /* The result is also true if both sizes are zero. */
444 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
445 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
446 a1_is_null, a2_is_null),
447 result);
448
449 /* If the operands have side-effects, they need to be evaluated before
450 doing the tests above since the place they otherwise would end up
451 being evaluated at run time could be wrong. */
452 if (a1_side_effects_p)
453 result = build2 (COMPOUND_EXPR, result_type, a1, result);
454
455 if (a2_side_effects_p)
456 result = build2 (COMPOUND_EXPR, result_type, a2, result);
457
458 return result;
459 }
460
461 /* Return an expression tree representing an equality comparison of P1 and P2,
462 two objects of fat pointer type. The result should be of type RESULT_TYPE.
463
464 Two fat pointers are equal in one of two ways: (1) if both have a null
465 pointer to the array or (2) if they contain the same couple of pointers.
466 We perform the comparison in as efficient a manner as possible. */
467
468 static tree
compare_fat_pointers(location_t loc,tree result_type,tree p1,tree p2)469 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
470 {
471 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
472 tree p1_array_is_null, p2_array_is_null;
473
474 /* If either operand has side-effects, they have to be evaluated only once
475 in spite of the multiple references to the operand in the comparison. */
476 p1 = gnat_protect_expr (p1);
477 p2 = gnat_protect_expr (p2);
478
479 /* The constant folder doesn't fold fat pointer types so we do it here. */
480 if (TREE_CODE (p1) == CONSTRUCTOR)
481 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
482 else
483 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
484
485 p1_array_is_null
486 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
487 fold_convert_loc (loc, TREE_TYPE (p1_array),
488 null_pointer_node));
489
490 if (TREE_CODE (p2) == CONSTRUCTOR)
491 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
492 else
493 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
494
495 p2_array_is_null
496 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
497 fold_convert_loc (loc, TREE_TYPE (p2_array),
498 null_pointer_node));
499
500 /* If one of the pointers to the array is null, just compare the other. */
501 if (integer_zerop (p1_array))
502 return p2_array_is_null;
503 else if (integer_zerop (p2_array))
504 return p1_array_is_null;
505
506 /* Otherwise, do the fully-fledged comparison. */
507 same_array
508 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
509
510 if (TREE_CODE (p1) == CONSTRUCTOR)
511 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
512 else
513 p1_bounds
514 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
515 true);
516
517 if (TREE_CODE (p2) == CONSTRUCTOR)
518 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
519 else
520 p2_bounds
521 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
522 true);
523
524 same_bounds
525 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
526
527 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
528 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
529 build_binary_op (TRUTH_ORIF_EXPR, result_type,
530 p1_array_is_null, same_bounds));
531 }
532
533 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
534 type TYPE. We know that TYPE is a modular type with a nonbinary
535 modulus. */
536
537 static tree
nonbinary_modular_operation(enum tree_code op_code,tree type,tree lhs,tree rhs)538 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
539 tree rhs)
540 {
541 tree modulus = TYPE_MODULUS (type);
542 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
543 unsigned int precision;
544 bool unsignedp = true;
545 tree op_type = type;
546 tree result;
547
548 /* If this is an addition of a constant, convert it to a subtraction
549 of a constant since we can do that faster. */
550 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
551 {
552 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
553 op_code = MINUS_EXPR;
554 }
555
556 /* For the logical operations, we only need PRECISION bits. For
557 addition and subtraction, we need one more and for multiplication we
558 need twice as many. But we never want to make a size smaller than
559 our size. */
560 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
561 needed_precision += 1;
562 else if (op_code == MULT_EXPR)
563 needed_precision *= 2;
564
565 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
566
567 /* Unsigned will do for everything but subtraction. */
568 if (op_code == MINUS_EXPR)
569 unsignedp = false;
570
571 /* If our type is the wrong signedness or isn't wide enough, make a new
572 type and convert both our operands to it. */
573 if (TYPE_PRECISION (op_type) < precision
574 || TYPE_UNSIGNED (op_type) != unsignedp)
575 {
576 /* Copy the type so we ensure it can be modified to make it modular. */
577 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
578 modulus = convert (op_type, modulus);
579 SET_TYPE_MODULUS (op_type, modulus);
580 TYPE_MODULAR_P (op_type) = 1;
581 lhs = convert (op_type, lhs);
582 rhs = convert (op_type, rhs);
583 }
584
585 /* Do the operation, then we'll fix it up. */
586 result = fold_build2 (op_code, op_type, lhs, rhs);
587
588 /* For multiplication, we have no choice but to do a full modulus
589 operation. However, we want to do this in the narrowest
590 possible size. */
591 if (op_code == MULT_EXPR)
592 {
593 /* Copy the type so we ensure it can be modified to make it modular. */
594 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
595 modulus = convert (div_type, modulus);
596 SET_TYPE_MODULUS (div_type, modulus);
597 TYPE_MODULAR_P (div_type) = 1;
598 result = convert (op_type,
599 fold_build2 (TRUNC_MOD_EXPR, div_type,
600 convert (div_type, result), modulus));
601 }
602
603 /* For subtraction, add the modulus back if we are negative. */
604 else if (op_code == MINUS_EXPR)
605 {
606 result = gnat_protect_expr (result);
607 result = fold_build3 (COND_EXPR, op_type,
608 fold_build2 (LT_EXPR, boolean_type_node, result,
609 build_int_cst (op_type, 0)),
610 fold_build2 (PLUS_EXPR, op_type, result, modulus),
611 result);
612 }
613
614 /* For the other operations, subtract the modulus if we are >= it. */
615 else
616 {
617 result = gnat_protect_expr (result);
618 result = fold_build3 (COND_EXPR, op_type,
619 fold_build2 (GE_EXPR, boolean_type_node,
620 result, modulus),
621 fold_build2 (MINUS_EXPR, op_type,
622 result, modulus),
623 result);
624 }
625
626 return convert (type, result);
627 }
628
629 /* This page contains routines that implement the Ada semantics with regard
630 to atomic objects. They are fully piggybacked on the middle-end support
631 for atomic loads and stores.
632
633 *** Memory barriers and volatile objects ***
634
635 We implement the weakened form of the C.6(16) clause that was introduced
636 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
637 implementable without significant performance hits on modern platforms.
638
639 We also take advantage of the requirements imposed on shared variables by
640 9.10 (conditions for sequential actions) to have non-erroneous execution
641 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
642 volatile updates with regard to sequential actions, i.e. with regard to
643 reads or updates of atomic objects.
644
645 As such, an update of an atomic object by a task requires that all earlier
646 accesses to volatile objects have completed. Similarly, later accesses to
647 volatile objects cannot be reordered before the update of the atomic object.
648 So, memory barriers both before and after the atomic update are needed.
649
650 For a read of an atomic object, to avoid seeing writes of volatile objects
651 by a task earlier than by the other tasks, a memory barrier is needed before
652 the atomic read. Finally, to avoid reordering later reads or updates of
653 volatile objects to before the atomic read, a barrier is needed after the
654 atomic read.
655
656 So, memory barriers are needed before and after atomic reads and updates.
657 And, in order to simplify the implementation, we use full memory barriers
658 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
659
660 /* Return the size of TYPE, which must be a positive power of 2. */
661
662 static unsigned int
resolve_atomic_size(tree type)663 resolve_atomic_size (tree type)
664 {
665 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
666
667 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
668 return size;
669
670 /* We shouldn't reach here without having already detected that the size
671 isn't compatible with an atomic access. */
672 gcc_assert (Serious_Errors_Detected);
673
674 return 0;
675 }
676
677 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
678 true if the load requires synchronization. */
679
680 tree
build_atomic_load(tree src,bool sync)681 build_atomic_load (tree src, bool sync)
682 {
683 tree ptr_type
684 = build_pointer_type
685 (build_qualified_type (void_type_node,
686 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
687 tree mem_model
688 = build_int_cst (integer_type_node,
689 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
690 tree orig_src = src;
691 tree t, addr, val;
692 unsigned int size;
693 int fncode;
694
695 /* Remove conversions to get the address of the underlying object. */
696 src = remove_conversions (src, false);
697 size = resolve_atomic_size (TREE_TYPE (src));
698 if (size == 0)
699 return orig_src;
700
701 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
702 t = builtin_decl_implicit ((enum built_in_function) fncode);
703
704 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
705 val = build_call_expr (t, 2, addr, mem_model);
706
707 /* First reinterpret the loaded bits in the original type of the load,
708 then convert to the expected result type. */
709 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
710 return convert (TREE_TYPE (orig_src), t);
711 }
712
713 /* Build an atomic store from SRC to the underlying atomic object in DEST.
714 SYNC is true if the store requires synchronization. */
715
716 tree
build_atomic_store(tree dest,tree src,bool sync)717 build_atomic_store (tree dest, tree src, bool sync)
718 {
719 tree ptr_type
720 = build_pointer_type
721 (build_qualified_type (void_type_node,
722 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
723 tree mem_model
724 = build_int_cst (integer_type_node,
725 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
726 tree orig_dest = dest;
727 tree t, int_type, addr;
728 unsigned int size;
729 int fncode;
730
731 /* Remove conversions to get the address of the underlying object. */
732 dest = remove_conversions (dest, false);
733 size = resolve_atomic_size (TREE_TYPE (dest));
734 if (size == 0)
735 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
736
737 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
738 t = builtin_decl_implicit ((enum built_in_function) fncode);
739 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
740
741 /* First convert the bits to be stored to the original type of the store,
742 then reinterpret them in the effective type. But if the original type
743 is a padded type with the same size, convert to the inner type instead,
744 as we don't want to artificially introduce a CONSTRUCTOR here. */
745 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
746 && TYPE_SIZE (TREE_TYPE (dest))
747 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
748 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
749 else
750 src = convert (TREE_TYPE (dest), src);
751 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
752 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
753
754 return build_call_expr (t, 3, addr, src, mem_model);
755 }
756
757 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
758 the location of the sequence. Note that, even though the load and the store
759 are both atomic, the sequence itself is not atomic. */
760
761 tree
build_load_modify_store(tree dest,tree src,Node_Id gnat_node)762 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
763 {
764 /* We will be modifying DEST below so we build a copy. */
765 dest = copy_node (dest);
766 tree ref = dest;
767
768 while (handled_component_p (ref))
769 {
770 /* The load should already have been generated during the translation
771 of the GNAT destination tree; find it out in the GNU tree. */
772 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
773 {
774 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
775 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
776 {
777 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
778 tree t = CALL_EXPR_ARG (op, 0);
779 tree obj, temp, stmt;
780
781 /* Find out the loaded object. */
782 if (TREE_CODE (t) == NOP_EXPR)
783 t = TREE_OPERAND (t, 0);
784 if (TREE_CODE (t) == ADDR_EXPR)
785 obj = TREE_OPERAND (t, 0);
786 else
787 obj = build1 (INDIRECT_REF, type, t);
788
789 /* Drop atomic and volatile qualifiers for the temporary. */
790 type = TYPE_MAIN_VARIANT (type);
791
792 /* And drop BLKmode, if need be, to put it into a register. */
793 if (TYPE_MODE (type) == BLKmode)
794 {
795 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
796 type = copy_type (type);
797 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
798 SET_TYPE_MODE (type, mode);
799 }
800
801 /* Create the temporary by inserting a SAVE_EXPR. */
802 temp = build1 (SAVE_EXPR, type,
803 build1 (VIEW_CONVERT_EXPR, type, op));
804 TREE_OPERAND (ref, 0) = temp;
805
806 start_stmt_group ();
807
808 /* Build the modify of the temporary. */
809 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
810 add_stmt_with_node (stmt, gnat_node);
811
812 /* Build the store to the object. */
813 stmt = build_atomic_store (obj, temp, false);
814 add_stmt_with_node (stmt, gnat_node);
815
816 return end_stmt_group ();
817 }
818 }
819
820 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
821 ref = TREE_OPERAND (ref, 0);
822 }
823
824 /* Something went wrong earlier if we have not found the atomic load. */
825 gcc_unreachable ();
826 }
827
828 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
829 desired for the result. Usually the operation is to be performed
830 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
831 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
832 case the type to be used will be derived from the operands.
833 Don't fold the result if NO_FOLD is true.
834
835 This function is very much unlike the ones for C and C++ since we
836 have already done any type conversion and matching required. All we
837 have to do here is validate the work done by SEM and handle subtypes. */
838
839 tree
build_binary_op(enum tree_code op_code,tree result_type,tree left_operand,tree right_operand,bool no_fold)840 build_binary_op (enum tree_code op_code, tree result_type,
841 tree left_operand, tree right_operand,
842 bool no_fold)
843 {
844 tree left_type = TREE_TYPE (left_operand);
845 tree right_type = TREE_TYPE (right_operand);
846 tree left_base_type = get_base_type (left_type);
847 tree right_base_type = get_base_type (right_type);
848 tree operation_type = result_type;
849 tree best_type = NULL_TREE;
850 tree modulus, result;
851 bool has_side_effects = false;
852
853 if (operation_type
854 && TREE_CODE (operation_type) == RECORD_TYPE
855 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
856 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
857
858 if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
859 operation_type = get_base_type (operation_type);
860
861 modulus = (operation_type
862 && TREE_CODE (operation_type) == INTEGER_TYPE
863 && TYPE_MODULAR_P (operation_type)
864 ? TYPE_MODULUS (operation_type) : NULL_TREE);
865
866 switch (op_code)
867 {
868 case INIT_EXPR:
869 case MODIFY_EXPR:
870 gcc_checking_assert (!result_type);
871
872 /* If there were integral or pointer conversions on the LHS, remove
873 them; we'll be putting them back below if needed. Likewise for
874 conversions between array and record types, except for justified
875 modular types. But don't do this if the right operand is not
876 BLKmode (for packed arrays) unless we are not changing the mode. */
877 while ((CONVERT_EXPR_P (left_operand)
878 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
879 && (((INTEGRAL_TYPE_P (left_type)
880 || POINTER_TYPE_P (left_type))
881 && (INTEGRAL_TYPE_P (TREE_TYPE
882 (TREE_OPERAND (left_operand, 0)))
883 || POINTER_TYPE_P (TREE_TYPE
884 (TREE_OPERAND (left_operand, 0)))))
885 || (((TREE_CODE (left_type) == RECORD_TYPE
886 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
887 || TREE_CODE (left_type) == ARRAY_TYPE)
888 && ((TREE_CODE (TREE_TYPE
889 (TREE_OPERAND (left_operand, 0)))
890 == RECORD_TYPE)
891 || (TREE_CODE (TREE_TYPE
892 (TREE_OPERAND (left_operand, 0)))
893 == ARRAY_TYPE))
894 && (TYPE_MODE (right_type) == BLKmode
895 || (TYPE_MODE (left_type)
896 == TYPE_MODE (TREE_TYPE
897 (TREE_OPERAND
898 (left_operand, 0))))))))
899 {
900 left_operand = TREE_OPERAND (left_operand, 0);
901 left_type = TREE_TYPE (left_operand);
902 }
903
904 /* If a class-wide type may be involved, force use of the RHS type. */
905 if ((TREE_CODE (right_type) == RECORD_TYPE
906 || TREE_CODE (right_type) == UNION_TYPE)
907 && TYPE_ALIGN_OK (right_type))
908 operation_type = right_type;
909
910 /* If we are copying between padded objects with compatible types, use
911 the padded view of the objects, this is very likely more efficient.
912 Likewise for a padded object that is assigned a constructor, if we
913 can convert the constructor to the inner type, to avoid putting a
914 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
915 actually copied anything. */
916 else if (TYPE_IS_PADDING_P (left_type)
917 && TREE_CONSTANT (TYPE_SIZE (left_type))
918 && ((TREE_CODE (right_operand) == COMPONENT_REF
919 && TYPE_MAIN_VARIANT (left_type)
920 == TYPE_MAIN_VARIANT
921 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
922 || (TREE_CODE (right_operand) == CONSTRUCTOR
923 && !CONTAINS_PLACEHOLDER_P
924 (DECL_SIZE (TYPE_FIELDS (left_type)))))
925 && !integer_zerop (TYPE_SIZE (right_type)))
926 {
927 /* We make an exception for a BLKmode type padding a non-BLKmode
928 inner type and do the conversion of the LHS right away, since
929 unchecked_convert wouldn't do it properly. */
930 if (TYPE_MODE (left_type) == BLKmode
931 && TYPE_MODE (right_type) != BLKmode
932 && TREE_CODE (right_operand) != CONSTRUCTOR)
933 {
934 operation_type = right_type;
935 left_operand = convert (operation_type, left_operand);
936 left_type = operation_type;
937 }
938 else
939 operation_type = left_type;
940 }
941
942 /* If we have a call to a function that returns with variable size, use
943 the RHS type in case we want to use the return slot optimization. */
944 else if (TREE_CODE (right_operand) == CALL_EXPR
945 && return_type_with_variable_size_p (right_type))
946 operation_type = right_type;
947
948 /* Find the best type to use for copying between aggregate types. */
949 else if (((TREE_CODE (left_type) == ARRAY_TYPE
950 && TREE_CODE (right_type) == ARRAY_TYPE)
951 || (TREE_CODE (left_type) == RECORD_TYPE
952 && TREE_CODE (right_type) == RECORD_TYPE))
953 && (best_type = find_common_type (left_type, right_type)))
954 operation_type = best_type;
955
956 /* Otherwise use the LHS type. */
957 else
958 operation_type = left_type;
959
960 /* Ensure everything on the LHS is valid. If we have a field reference,
961 strip anything that get_inner_reference can handle. Then remove any
962 conversions between types having the same code and mode. And mark
963 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
964 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
965 result = left_operand;
966 while (true)
967 {
968 tree restype = TREE_TYPE (result);
969
970 if (TREE_CODE (result) == COMPONENT_REF
971 || TREE_CODE (result) == ARRAY_REF
972 || TREE_CODE (result) == ARRAY_RANGE_REF)
973 while (handled_component_p (result))
974 result = TREE_OPERAND (result, 0);
975 else if (TREE_CODE (result) == REALPART_EXPR
976 || TREE_CODE (result) == IMAGPART_EXPR
977 || (CONVERT_EXPR_P (result)
978 && (((TREE_CODE (restype)
979 == TREE_CODE (TREE_TYPE
980 (TREE_OPERAND (result, 0))))
981 && (TYPE_MODE (TREE_TYPE
982 (TREE_OPERAND (result, 0)))
983 == TYPE_MODE (restype)))
984 || TYPE_ALIGN_OK (restype))))
985 result = TREE_OPERAND (result, 0);
986 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
987 {
988 TREE_ADDRESSABLE (result) = 1;
989 result = TREE_OPERAND (result, 0);
990 }
991 else
992 break;
993 }
994
995 gcc_assert (TREE_CODE (result) == INDIRECT_REF
996 || TREE_CODE (result) == NULL_EXPR
997 || TREE_CODE (result) == SAVE_EXPR
998 || DECL_P (result));
999
1000 /* Convert the right operand to the operation type unless it is
1001 either already of the correct type or if the type involves a
1002 placeholder, since the RHS may not have the same record type. */
1003 if (operation_type != right_type
1004 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1005 {
1006 right_operand = convert (operation_type, right_operand);
1007 right_type = operation_type;
1008 }
1009
1010 /* If the left operand is not of the same type as the operation
1011 type, wrap it up in a VIEW_CONVERT_EXPR. */
1012 if (left_type != operation_type)
1013 left_operand = unchecked_convert (operation_type, left_operand, false);
1014
1015 has_side_effects = true;
1016 modulus = NULL_TREE;
1017 break;
1018
1019 case ARRAY_REF:
1020 if (!operation_type)
1021 operation_type = TREE_TYPE (left_type);
1022
1023 /* ... fall through ... */
1024
1025 case ARRAY_RANGE_REF:
1026 /* First look through conversion between type variants. Note that
1027 this changes neither the operation type nor the type domain. */
1028 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1029 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1030 == TYPE_MAIN_VARIANT (left_type))
1031 {
1032 left_operand = TREE_OPERAND (left_operand, 0);
1033 left_type = TREE_TYPE (left_operand);
1034 }
1035
1036 /* For a range, make sure the element type is consistent. */
1037 if (op_code == ARRAY_RANGE_REF
1038 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1039 operation_type = build_array_type (TREE_TYPE (left_type),
1040 TYPE_DOMAIN (operation_type));
1041
1042 /* Then convert the right operand to its base type. This will prevent
1043 unneeded sign conversions when sizetype is wider than integer. */
1044 right_operand = convert (right_base_type, right_operand);
1045 right_operand = convert_to_index_type (right_operand);
1046 modulus = NULL_TREE;
1047 break;
1048
1049 case TRUTH_ANDIF_EXPR:
1050 case TRUTH_ORIF_EXPR:
1051 case TRUTH_AND_EXPR:
1052 case TRUTH_OR_EXPR:
1053 case TRUTH_XOR_EXPR:
1054 gcc_checking_assert
1055 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1056 operation_type = left_base_type;
1057 left_operand = convert (operation_type, left_operand);
1058 right_operand = convert (operation_type, right_operand);
1059 break;
1060
1061 case GE_EXPR:
1062 case LE_EXPR:
1063 case GT_EXPR:
1064 case LT_EXPR:
1065 case EQ_EXPR:
1066 case NE_EXPR:
1067 gcc_checking_assert
1068 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1069 /* If either operand is a NULL_EXPR, just return a new one. */
1070 if (TREE_CODE (left_operand) == NULL_EXPR)
1071 return build2 (op_code, result_type,
1072 build1 (NULL_EXPR, integer_type_node,
1073 TREE_OPERAND (left_operand, 0)),
1074 integer_zero_node);
1075
1076 else if (TREE_CODE (right_operand) == NULL_EXPR)
1077 return build2 (op_code, result_type,
1078 build1 (NULL_EXPR, integer_type_node,
1079 TREE_OPERAND (right_operand, 0)),
1080 integer_zero_node);
1081
1082 /* If either object is a justified modular types, get the
1083 fields from within. */
1084 if (TREE_CODE (left_type) == RECORD_TYPE
1085 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1086 {
1087 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1088 left_operand);
1089 left_type = TREE_TYPE (left_operand);
1090 left_base_type = get_base_type (left_type);
1091 }
1092
1093 if (TREE_CODE (right_type) == RECORD_TYPE
1094 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1095 {
1096 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1097 right_operand);
1098 right_type = TREE_TYPE (right_operand);
1099 right_base_type = get_base_type (right_type);
1100 }
1101
1102 /* If both objects are arrays, compare them specially. */
1103 if ((TREE_CODE (left_type) == ARRAY_TYPE
1104 || (TREE_CODE (left_type) == INTEGER_TYPE
1105 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1106 && (TREE_CODE (right_type) == ARRAY_TYPE
1107 || (TREE_CODE (right_type) == INTEGER_TYPE
1108 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1109 {
1110 result = compare_arrays (input_location,
1111 result_type, left_operand, right_operand);
1112 if (op_code == NE_EXPR)
1113 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1114 else
1115 gcc_assert (op_code == EQ_EXPR);
1116
1117 return result;
1118 }
1119
1120 /* Otherwise, the base types must be the same, unless they are both fat
1121 pointer types or record types. In the latter case, use the best type
1122 and convert both operands to that type. */
1123 if (left_base_type != right_base_type)
1124 {
1125 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1126 && TYPE_IS_FAT_POINTER_P (right_base_type))
1127 {
1128 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1129 == TYPE_MAIN_VARIANT (right_base_type));
1130 best_type = left_base_type;
1131 }
1132
1133 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1134 && TREE_CODE (right_base_type) == RECORD_TYPE)
1135 {
1136 /* The only way this is permitted is if both types have the same
1137 name. In that case, one of them must not be self-referential.
1138 Use it as the best type. Even better with a fixed size. */
1139 gcc_assert (TYPE_NAME (left_base_type)
1140 && TYPE_NAME (left_base_type)
1141 == TYPE_NAME (right_base_type));
1142
1143 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1144 best_type = left_base_type;
1145 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1146 best_type = right_base_type;
1147 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1148 best_type = left_base_type;
1149 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1150 best_type = right_base_type;
1151 else
1152 gcc_unreachable ();
1153 }
1154
1155 else if (POINTER_TYPE_P (left_base_type)
1156 && POINTER_TYPE_P (right_base_type))
1157 {
1158 gcc_assert (TREE_TYPE (left_base_type)
1159 == TREE_TYPE (right_base_type));
1160 best_type = left_base_type;
1161 }
1162 else
1163 gcc_unreachable ();
1164
1165 left_operand = convert (best_type, left_operand);
1166 right_operand = convert (best_type, right_operand);
1167 }
1168 else
1169 {
1170 left_operand = convert (left_base_type, left_operand);
1171 right_operand = convert (right_base_type, right_operand);
1172 }
1173
1174 /* If both objects are fat pointers, compare them specially. */
1175 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1176 {
1177 result
1178 = compare_fat_pointers (input_location,
1179 result_type, left_operand, right_operand);
1180 if (op_code == NE_EXPR)
1181 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1182 else
1183 gcc_assert (op_code == EQ_EXPR);
1184
1185 return result;
1186 }
1187
1188 modulus = NULL_TREE;
1189 break;
1190
1191 case LSHIFT_EXPR:
1192 case RSHIFT_EXPR:
1193 case LROTATE_EXPR:
1194 case RROTATE_EXPR:
1195 /* The RHS of a shift can be any type. Also, ignore any modulus
1196 (we used to abort, but this is needed for unchecked conversion
1197 to modular types). Otherwise, processing is the same as normal. */
1198 gcc_assert (operation_type == left_base_type);
1199 modulus = NULL_TREE;
1200 left_operand = convert (operation_type, left_operand);
1201 break;
1202
1203 case BIT_AND_EXPR:
1204 case BIT_IOR_EXPR:
1205 case BIT_XOR_EXPR:
1206 /* For binary modulus, if the inputs are in range, so are the
1207 outputs. */
1208 if (modulus && integer_pow2p (modulus))
1209 modulus = NULL_TREE;
1210 goto common;
1211
1212 case COMPLEX_EXPR:
1213 gcc_assert (TREE_TYPE (result_type) == left_base_type
1214 && TREE_TYPE (result_type) == right_base_type);
1215 left_operand = convert (left_base_type, left_operand);
1216 right_operand = convert (right_base_type, right_operand);
1217 break;
1218
1219 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1220 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1221 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1222 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1223 /* These always produce results lower than either operand. */
1224 modulus = NULL_TREE;
1225 goto common;
1226
1227 case POINTER_PLUS_EXPR:
1228 gcc_assert (operation_type == left_base_type
1229 && sizetype == right_base_type);
1230 left_operand = convert (operation_type, left_operand);
1231 right_operand = convert (sizetype, right_operand);
1232 break;
1233
1234 case PLUS_NOMOD_EXPR:
1235 case MINUS_NOMOD_EXPR:
1236 if (op_code == PLUS_NOMOD_EXPR)
1237 op_code = PLUS_EXPR;
1238 else
1239 op_code = MINUS_EXPR;
1240 modulus = NULL_TREE;
1241
1242 /* ... fall through ... */
1243
1244 case PLUS_EXPR:
1245 case MINUS_EXPR:
1246 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1247 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1248 these types but can generate addition/subtraction for Succ/Pred. */
1249 if (operation_type
1250 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1251 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1252 operation_type = left_base_type = right_base_type
1253 = gnat_type_for_mode (TYPE_MODE (operation_type),
1254 TYPE_UNSIGNED (operation_type));
1255
1256 /* ... fall through ... */
1257
1258 default:
1259 common:
1260 /* The result type should be the same as the base types of the
1261 both operands (and they should be the same). Convert
1262 everything to the result type. */
1263
1264 gcc_assert (operation_type == left_base_type
1265 && left_base_type == right_base_type);
1266 left_operand = convert (operation_type, left_operand);
1267 right_operand = convert (operation_type, right_operand);
1268 }
1269
1270 if (modulus && !integer_pow2p (modulus))
1271 {
1272 result = nonbinary_modular_operation (op_code, operation_type,
1273 left_operand, right_operand);
1274 modulus = NULL_TREE;
1275 }
1276 /* If either operand is a NULL_EXPR, just return a new one. */
1277 else if (TREE_CODE (left_operand) == NULL_EXPR)
1278 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1279 else if (TREE_CODE (right_operand) == NULL_EXPR)
1280 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1281 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1282 {
1283 result = build4 (op_code, operation_type, left_operand, right_operand,
1284 NULL_TREE, NULL_TREE);
1285 if (!no_fold)
1286 result = fold (result);
1287 }
1288 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1289 result = build2 (op_code, void_type_node, left_operand, right_operand);
1290 else if (no_fold)
1291 result = build2 (op_code, operation_type, left_operand, right_operand);
1292 else
1293 result
1294 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1295
1296 if (TREE_CONSTANT (result))
1297 ;
1298 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1299 {
1300 if (TYPE_VOLATILE (operation_type))
1301 TREE_THIS_VOLATILE (result) = 1;
1302 }
1303 else
1304 TREE_CONSTANT (result)
1305 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1306
1307 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1308
1309 /* If we are working with modular types, perform the MOD operation
1310 if something above hasn't eliminated the need for it. */
1311 if (modulus)
1312 {
1313 modulus = convert (operation_type, modulus);
1314 if (no_fold)
1315 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1316 else
1317 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1318 }
1319
1320 if (result_type && result_type != operation_type)
1321 result = convert (result_type, result);
1322
1323 return result;
1324 }
1325
1326 /* Similar, but for unary operations. */
1327
1328 tree
build_unary_op(enum tree_code op_code,tree result_type,tree operand)1329 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1330 {
1331 tree type = TREE_TYPE (operand);
1332 tree base_type = get_base_type (type);
1333 tree operation_type = result_type;
1334 tree result;
1335
1336 if (operation_type
1337 && TREE_CODE (operation_type) == RECORD_TYPE
1338 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1339 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1340
1341 if (operation_type
1342 && TREE_CODE (operation_type) == INTEGER_TYPE
1343 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1344 operation_type = get_base_type (operation_type);
1345
1346 switch (op_code)
1347 {
1348 case REALPART_EXPR:
1349 case IMAGPART_EXPR:
1350 if (!operation_type)
1351 result_type = operation_type = TREE_TYPE (type);
1352 else
1353 gcc_assert (result_type == TREE_TYPE (type));
1354
1355 result = fold_build1 (op_code, operation_type, operand);
1356 break;
1357
1358 case TRUTH_NOT_EXPR:
1359 gcc_checking_assert
1360 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1361 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1362 /* When not optimizing, fold the result as invert_truthvalue_loc
1363 doesn't fold the result of comparisons. This is intended to undo
1364 the trick used for boolean rvalues in gnat_to_gnu. */
1365 if (!optimize)
1366 result = fold (result);
1367 break;
1368
1369 case ATTR_ADDR_EXPR:
1370 case ADDR_EXPR:
1371 switch (TREE_CODE (operand))
1372 {
1373 case INDIRECT_REF:
1374 case UNCONSTRAINED_ARRAY_REF:
1375 result = TREE_OPERAND (operand, 0);
1376
1377 /* Make sure the type here is a pointer, not a reference.
1378 GCC wants pointer types for function addresses. */
1379 if (!result_type)
1380 result_type = build_pointer_type (type);
1381
1382 /* If the underlying object can alias everything, propagate the
1383 property since we are effectively retrieving the object. */
1384 if (POINTER_TYPE_P (TREE_TYPE (result))
1385 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1386 {
1387 if (TREE_CODE (result_type) == POINTER_TYPE
1388 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1389 result_type
1390 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1391 TYPE_MODE (result_type),
1392 true);
1393 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1394 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1395 result_type
1396 = build_reference_type_for_mode (TREE_TYPE (result_type),
1397 TYPE_MODE (result_type),
1398 true);
1399 }
1400 break;
1401
1402 case NULL_EXPR:
1403 result = operand;
1404 TREE_TYPE (result) = type = build_pointer_type (type);
1405 break;
1406
1407 case COMPOUND_EXPR:
1408 /* Fold a compound expression if it has unconstrained array type
1409 since the middle-end cannot handle it. But we don't it in the
1410 general case because it may introduce aliasing issues if the
1411 first operand is an indirect assignment and the second operand
1412 the corresponding address, e.g. for an allocator. However do
1413 it for a return value to expose it for later recognition. */
1414 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1415 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1416 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1417 {
1418 result = build_unary_op (ADDR_EXPR, result_type,
1419 TREE_OPERAND (operand, 1));
1420 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1421 TREE_OPERAND (operand, 0), result);
1422 break;
1423 }
1424 goto common;
1425
1426 case ARRAY_REF:
1427 case ARRAY_RANGE_REF:
1428 case COMPONENT_REF:
1429 case BIT_FIELD_REF:
1430 /* If this is for 'Address, find the address of the prefix and add
1431 the offset to the field. Otherwise, do this the normal way. */
1432 if (op_code == ATTR_ADDR_EXPR)
1433 {
1434 poly_int64 bitsize;
1435 poly_int64 bitpos;
1436 tree offset, inner;
1437 machine_mode mode;
1438 int unsignedp, reversep, volatilep;
1439
1440 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1441 &mode, &unsignedp, &reversep,
1442 &volatilep);
1443
1444 /* If INNER is a padding type whose field has a self-referential
1445 size, convert to that inner type. We know the offset is zero
1446 and we need to have that type visible. */
1447 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1448 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1449 inner);
1450
1451 /* Compute the offset as a byte offset from INNER. */
1452 if (!offset)
1453 offset = size_zero_node;
1454
1455 offset
1456 = size_binop (PLUS_EXPR, offset,
1457 size_int (bits_to_bytes_round_down (bitpos)));
1458
1459 /* Take the address of INNER, convert it to a pointer to our type
1460 and add the offset. */
1461 inner = build_unary_op (ADDR_EXPR,
1462 build_pointer_type (TREE_TYPE (operand)),
1463 inner);
1464 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1465 inner, offset);
1466 break;
1467 }
1468 goto common;
1469
1470 case CONSTRUCTOR:
1471 /* If this is just a constructor for a padded record, we can
1472 just take the address of the single field and convert it to
1473 a pointer to our type. */
1474 if (TYPE_IS_PADDING_P (type))
1475 {
1476 result
1477 = build_unary_op (ADDR_EXPR,
1478 build_pointer_type (TREE_TYPE (operand)),
1479 CONSTRUCTOR_ELT (operand, 0)->value);
1480 break;
1481 }
1482 goto common;
1483
1484 case NOP_EXPR:
1485 if (AGGREGATE_TYPE_P (type)
1486 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1487 return build_unary_op (ADDR_EXPR, result_type,
1488 TREE_OPERAND (operand, 0));
1489
1490 /* ... fallthru ... */
1491
1492 case VIEW_CONVERT_EXPR:
1493 /* If this just a variant conversion or if the conversion doesn't
1494 change the mode, get the result type from this type and go down.
1495 This is needed for conversions of CONST_DECLs, to eventually get
1496 to the address of their CORRESPONDING_VARs. */
1497 if ((TYPE_MAIN_VARIANT (type)
1498 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1499 || (TYPE_MODE (type) != BLKmode
1500 && (TYPE_MODE (type)
1501 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1502 return build_unary_op (ADDR_EXPR,
1503 (result_type ? result_type
1504 : build_pointer_type (type)),
1505 TREE_OPERAND (operand, 0));
1506 goto common;
1507
1508 case CONST_DECL:
1509 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1510
1511 /* ... fall through ... */
1512
1513 default:
1514 common:
1515
1516 /* If we are taking the address of a padded record whose field
1517 contains a template, take the address of the field. */
1518 if (TYPE_IS_PADDING_P (type)
1519 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1520 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1521 {
1522 type = TREE_TYPE (TYPE_FIELDS (type));
1523 operand = convert (type, operand);
1524 }
1525
1526 gnat_mark_addressable (operand);
1527 result = build_fold_addr_expr (operand);
1528 }
1529
1530 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1531 break;
1532
1533 case INDIRECT_REF:
1534 {
1535 tree t = remove_conversions (operand, false);
1536 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1537
1538 /* If TYPE is a thin pointer, either first retrieve the base if this
1539 is an expression with an offset built for the initialization of an
1540 object with an unconstrained nominal subtype, or else convert to
1541 the fat pointer. */
1542 if (TYPE_IS_THIN_POINTER_P (type))
1543 {
1544 tree rec_type = TREE_TYPE (type);
1545
1546 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1547 && TREE_OPERAND (operand, 1)
1548 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1549 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1550 {
1551 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1552 type = TREE_TYPE (operand);
1553 }
1554 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1555 {
1556 operand
1557 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1558 operand);
1559 type = TREE_TYPE (operand);
1560 }
1561 }
1562
1563 /* If we want to refer to an unconstrained array, use the appropriate
1564 expression. But this will never survive down to the back-end. */
1565 if (TYPE_IS_FAT_POINTER_P (type))
1566 {
1567 result = build1 (UNCONSTRAINED_ARRAY_REF,
1568 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1569 TREE_READONLY (result)
1570 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1571 }
1572
1573 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1574 else if (TREE_CODE (operand) == ADDR_EXPR)
1575 result = TREE_OPERAND (operand, 0);
1576
1577 /* Otherwise, build and fold the indirect reference. */
1578 else
1579 {
1580 result = build_fold_indirect_ref (operand);
1581 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1582 }
1583
1584 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1585 {
1586 TREE_SIDE_EFFECTS (result) = 1;
1587 if (TREE_CODE (result) == INDIRECT_REF)
1588 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1589 }
1590
1591 if ((TREE_CODE (result) == INDIRECT_REF
1592 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1593 && can_never_be_null)
1594 TREE_THIS_NOTRAP (result) = 1;
1595
1596 break;
1597 }
1598
1599 case NEGATE_EXPR:
1600 case BIT_NOT_EXPR:
1601 {
1602 tree modulus = ((operation_type
1603 && TREE_CODE (operation_type) == INTEGER_TYPE
1604 && TYPE_MODULAR_P (operation_type))
1605 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1606 int mod_pow2 = modulus && integer_pow2p (modulus);
1607
1608 /* If this is a modular type, there are various possibilities
1609 depending on the operation and whether the modulus is a
1610 power of two or not. */
1611
1612 if (modulus)
1613 {
1614 gcc_assert (operation_type == base_type);
1615 operand = convert (operation_type, operand);
1616
1617 /* The fastest in the negate case for binary modulus is
1618 the straightforward code; the TRUNC_MOD_EXPR below
1619 is an AND operation. */
1620 if (op_code == NEGATE_EXPR && mod_pow2)
1621 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1622 fold_build1 (NEGATE_EXPR, operation_type,
1623 operand),
1624 modulus);
1625
1626 /* For nonbinary negate case, return zero for zero operand,
1627 else return the modulus minus the operand. If the modulus
1628 is a power of two minus one, we can do the subtraction
1629 as an XOR since it is equivalent and faster on most machines. */
1630 else if (op_code == NEGATE_EXPR && !mod_pow2)
1631 {
1632 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1633 modulus,
1634 build_int_cst (operation_type,
1635 1))))
1636 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1637 operand, modulus);
1638 else
1639 result = fold_build2 (MINUS_EXPR, operation_type,
1640 modulus, operand);
1641
1642 result = fold_build3 (COND_EXPR, operation_type,
1643 fold_build2 (NE_EXPR,
1644 boolean_type_node,
1645 operand,
1646 build_int_cst
1647 (operation_type, 0)),
1648 result, operand);
1649 }
1650 else
1651 {
1652 /* For the NOT cases, we need a constant equal to
1653 the modulus minus one. For a binary modulus, we
1654 XOR against the constant and subtract the operand from
1655 that constant for nonbinary modulus. */
1656
1657 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1658 build_int_cst (operation_type, 1));
1659
1660 if (mod_pow2)
1661 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1662 operand, cnst);
1663 else
1664 result = fold_build2 (MINUS_EXPR, operation_type,
1665 cnst, operand);
1666 }
1667
1668 break;
1669 }
1670 }
1671
1672 /* ... fall through ... */
1673
1674 default:
1675 gcc_assert (operation_type == base_type);
1676 result = fold_build1 (op_code, operation_type,
1677 convert (operation_type, operand));
1678 }
1679
1680 if (result_type && TREE_TYPE (result) != result_type)
1681 result = convert (result_type, result);
1682
1683 return result;
1684 }
1685
1686 /* Similar, but for COND_EXPR. */
1687
1688 tree
build_cond_expr(tree result_type,tree condition_operand,tree true_operand,tree false_operand)1689 build_cond_expr (tree result_type, tree condition_operand,
1690 tree true_operand, tree false_operand)
1691 {
1692 bool addr_p = false;
1693 tree result;
1694
1695 /* The front-end verified that result, true and false operands have
1696 same base type. Convert everything to the result type. */
1697 true_operand = convert (result_type, true_operand);
1698 false_operand = convert (result_type, false_operand);
1699
1700 /* If the result type is unconstrained, take the address of the operands and
1701 then dereference the result. Likewise if the result type is passed by
1702 reference, because creating a temporary of this type is not allowed. */
1703 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1704 || TYPE_IS_BY_REFERENCE_P (result_type)
1705 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1706 {
1707 result_type = build_pointer_type (result_type);
1708 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1709 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1710 addr_p = true;
1711 }
1712
1713 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1714 true_operand, false_operand);
1715
1716 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1717 in both arms, make sure it gets evaluated by moving it ahead of the
1718 conditional expression. This is necessary because it is evaluated
1719 in only one place at run time and would otherwise be uninitialized
1720 in one of the arms. */
1721 true_operand = skip_simple_arithmetic (true_operand);
1722 false_operand = skip_simple_arithmetic (false_operand);
1723
1724 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1725 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1726
1727 if (addr_p)
1728 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1729
1730 return result;
1731 }
1732
1733 /* Similar, but for COMPOUND_EXPR. */
1734
1735 tree
build_compound_expr(tree result_type,tree stmt_operand,tree expr_operand)1736 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1737 {
1738 bool addr_p = false;
1739 tree result;
1740
1741 /* If the result type is unconstrained, take the address of the operand and
1742 then dereference the result. Likewise if the result type is passed by
1743 reference, but this is natively handled in the gimplifier. */
1744 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1745 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1746 {
1747 result_type = build_pointer_type (result_type);
1748 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1749 addr_p = true;
1750 }
1751
1752 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1753 expr_operand);
1754
1755 if (addr_p)
1756 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1757
1758 return result;
1759 }
1760
1761 /* Conveniently construct a function call expression. FNDECL names the
1762 function to be called, N is the number of arguments, and the "..."
1763 parameters are the argument expressions. Unlike build_call_expr
1764 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1765
1766 tree
build_call_n_expr(tree fndecl,int n,...)1767 build_call_n_expr (tree fndecl, int n, ...)
1768 {
1769 va_list ap;
1770 tree fntype = TREE_TYPE (fndecl);
1771 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1772
1773 va_start (ap, n);
1774 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1775 va_end (ap);
1776 return fn;
1777 }
1778
1779 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1780 MSG gives the exception's identity for the call to Local_Raise, if any. */
1781
1782 static tree
build_goto_raise(Entity_Id gnat_label,int msg)1783 build_goto_raise (Entity_Id gnat_label, int msg)
1784 {
1785 tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1786 tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
1787 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1788
1789 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1790 if (Present (local_raise))
1791 {
1792 tree gnu_local_raise
1793 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1794 tree gnu_exception_entity
1795 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1796 tree gnu_call
1797 = build_call_n_expr (gnu_local_raise, 1,
1798 build_unary_op (ADDR_EXPR, NULL_TREE,
1799 gnu_exception_entity));
1800 gnu_result
1801 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1802 }
1803
1804 TREE_USED (gnu_label) = 1;
1805 return gnu_result;
1806 }
1807
1808 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1809 pointed to by FILENAME, LINE and COL. Fall back to the current location
1810 if GNAT_NODE is absent or has no SLOC. */
1811
1812 static void
expand_sloc(Node_Id gnat_node,tree * filename,tree * line,tree * col)1813 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1814 {
1815 const char *str;
1816 int line_number, column_number;
1817
1818 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1819 {
1820 str = "";
1821 line_number = 0;
1822 column_number = 0;
1823 }
1824 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1825 {
1826 str = Get_Name_String
1827 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1828 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1829 column_number = Get_Column_Number (Sloc (gnat_node));
1830 }
1831 else
1832 {
1833 str = lbasename (LOCATION_FILE (input_location));
1834 line_number = LOCATION_LINE (input_location);
1835 column_number = LOCATION_COLUMN (input_location);
1836 }
1837
1838 const int len = strlen (str);
1839 *filename = build_string (len, str);
1840 TREE_TYPE (*filename) = build_array_type (char_type_node,
1841 build_index_type (size_int (len)));
1842 *line = build_int_cst (NULL_TREE, line_number);
1843 if (col)
1844 *col = build_int_cst (NULL_TREE, column_number);
1845 }
1846
1847 /* Build a call to a function that raises an exception and passes file name
1848 and line number, if requested. MSG says which exception function to call.
1849 GNAT_NODE is the node conveying the source location for which the error
1850 should be signaled, or Empty in which case the error is signaled for the
1851 current location. KIND says which kind of exception node this is for,
1852 among N_Raise_{Constraint,Storage,Program}_Error. */
1853
1854 tree
build_call_raise(int msg,Node_Id gnat_node,char kind)1855 build_call_raise (int msg, Node_Id gnat_node, char kind)
1856 {
1857 Entity_Id gnat_label = get_exception_label (kind);
1858 tree fndecl = gnat_raise_decls[msg];
1859 tree filename, line;
1860
1861 /* If this is to be done as a goto, handle that case. */
1862 if (Present (gnat_label))
1863 return build_goto_raise (gnat_label, msg);
1864
1865 expand_sloc (gnat_node, &filename, &line, NULL);
1866
1867 return
1868 build_call_n_expr (fndecl, 2,
1869 build1 (ADDR_EXPR,
1870 build_pointer_type (char_type_node),
1871 filename),
1872 line);
1873 }
1874
1875 /* Similar to build_call_raise, with extra information about the column
1876 where the check failed. */
1877
1878 tree
build_call_raise_column(int msg,Node_Id gnat_node,char kind)1879 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1880 {
1881 Entity_Id gnat_label = get_exception_label (kind);
1882 tree fndecl = gnat_raise_decls_ext[msg];
1883 tree filename, line, col;
1884
1885 /* If this is to be done as a goto, handle that case. */
1886 if (Present (gnat_label))
1887 return build_goto_raise (gnat_label, msg);
1888
1889 expand_sloc (gnat_node, &filename, &line, &col);
1890
1891 return
1892 build_call_n_expr (fndecl, 3,
1893 build1 (ADDR_EXPR,
1894 build_pointer_type (char_type_node),
1895 filename),
1896 line, col);
1897 }
1898
1899 /* Similar to build_call_raise_column, for an index or range check exception ,
1900 with extra information of the form "INDEX out of range FIRST..LAST". */
1901
1902 tree
build_call_raise_range(int msg,Node_Id gnat_node,char kind,tree index,tree first,tree last)1903 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1904 tree index, tree first, tree last)
1905 {
1906 Entity_Id gnat_label = get_exception_label (kind);
1907 tree fndecl = gnat_raise_decls_ext[msg];
1908 tree filename, line, col;
1909
1910 /* If this is to be done as a goto, handle that case. */
1911 if (Present (gnat_label))
1912 return build_goto_raise (gnat_label, msg);
1913
1914 expand_sloc (gnat_node, &filename, &line, &col);
1915
1916 return
1917 build_call_n_expr (fndecl, 6,
1918 build1 (ADDR_EXPR,
1919 build_pointer_type (char_type_node),
1920 filename),
1921 line, col,
1922 convert (integer_type_node, index),
1923 convert (integer_type_node, first),
1924 convert (integer_type_node, last));
1925 }
1926
1927 /* qsort comparer for the bit positions of two constructor elements
1928 for record components. */
1929
1930 static int
compare_elmt_bitpos(const PTR rt1,const PTR rt2)1931 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1932 {
1933 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1934 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
1935 const_tree const field1 = elmt1->index;
1936 const_tree const field2 = elmt2->index;
1937 const int ret
1938 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1939
1940 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1941 }
1942
1943 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1944
1945 tree
gnat_build_constructor(tree type,vec<constructor_elt,va_gc> * v)1946 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1947 {
1948 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1949 bool read_only = true;
1950 bool side_effects = false;
1951 tree result, obj, val;
1952 unsigned int n_elmts;
1953
1954 /* Scan the elements to see if they are all constant or if any has side
1955 effects, to let us set global flags on the resulting constructor. Count
1956 the elements along the way for possible sorting purposes below. */
1957 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1958 {
1959 /* The predicate must be in keeping with output_constructor. */
1960 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1961 || (TREE_CODE (type) == RECORD_TYPE
1962 && CONSTRUCTOR_BITFIELD_P (obj)
1963 && !initializer_constant_valid_for_bitfield_p (val))
1964 || !initializer_constant_valid_p (val,
1965 TREE_TYPE (val),
1966 TYPE_REVERSE_STORAGE_ORDER (type)))
1967 allconstant = false;
1968
1969 if (!TREE_READONLY (val))
1970 read_only = false;
1971
1972 if (TREE_SIDE_EFFECTS (val))
1973 side_effects = true;
1974 }
1975
1976 /* For record types with constant components only, sort field list
1977 by increasing bit position. This is necessary to ensure the
1978 constructor can be output as static data. */
1979 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1980 v->qsort (compare_elmt_bitpos);
1981
1982 result = build_constructor (type, v);
1983 CONSTRUCTOR_NO_CLEARING (result) = 1;
1984 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1985 TREE_SIDE_EFFECTS (result) = side_effects;
1986 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1987 return result;
1988 }
1989
1990 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1991 is not found in the record. Don't fold the result if NO_FOLD is true. */
1992
1993 static tree
build_simple_component_ref(tree record,tree field,bool no_fold)1994 build_simple_component_ref (tree record, tree field, bool no_fold)
1995 {
1996 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1997 tree ref;
1998
1999 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
2000
2001 /* Try to fold a conversion from another record or union type unless the type
2002 contains a placeholder as it might be needed for a later substitution. */
2003 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2004 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2005 && !type_contains_placeholder_p (type))
2006 {
2007 tree op = TREE_OPERAND (record, 0);
2008
2009 /* If this is an unpadding operation, convert the underlying object to
2010 the unpadded type directly. */
2011 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2012 return convert (TREE_TYPE (field), op);
2013
2014 /* Otherwise try to access FIELD directly in the underlying type, but
2015 make sure that the form of the reference doesn't change too much;
2016 this can happen for an unconstrained bit-packed array type whose
2017 constrained form can be an integer type. */
2018 ref = build_simple_component_ref (op, field, no_fold);
2019 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2020 return ref;
2021 }
2022
2023 /* If this field is not in the specified record, see if we can find a field
2024 in the specified record whose original field is the same as this one. */
2025 if (DECL_CONTEXT (field) != type)
2026 {
2027 tree new_field;
2028
2029 /* First loop through normal components. */
2030 for (new_field = TYPE_FIELDS (type);
2031 new_field;
2032 new_field = DECL_CHAIN (new_field))
2033 if (SAME_FIELD_P (field, new_field))
2034 break;
2035
2036 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2037 component in the first search. Doing this search in two steps is
2038 required to avoid hidden homonymous fields in the _Parent field. */
2039 if (!new_field)
2040 for (new_field = TYPE_FIELDS (type);
2041 new_field;
2042 new_field = DECL_CHAIN (new_field))
2043 if (DECL_INTERNAL_P (new_field)
2044 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2045 {
2046 tree field_ref
2047 = build_simple_component_ref (record, new_field, no_fold);
2048 ref = build_simple_component_ref (field_ref, field, no_fold);
2049 if (ref)
2050 return ref;
2051 }
2052
2053 field = new_field;
2054 }
2055
2056 if (!field)
2057 return NULL_TREE;
2058
2059 /* If the field's offset has overflowed, do not try to access it, as doing
2060 so may trigger sanity checks deeper in the back-end. Note that we don't
2061 need to warn since this will be done on trying to declare the object. */
2062 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2063 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2064 return NULL_TREE;
2065
2066 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2067
2068 if (TREE_READONLY (record)
2069 || TREE_READONLY (field)
2070 || TYPE_READONLY (type))
2071 TREE_READONLY (ref) = 1;
2072
2073 if (TREE_THIS_VOLATILE (record)
2074 || TREE_THIS_VOLATILE (field)
2075 || TYPE_VOLATILE (type))
2076 TREE_THIS_VOLATILE (ref) = 1;
2077
2078 if (no_fold)
2079 return ref;
2080
2081 /* The generic folder may punt in this case because the inner array type
2082 can be self-referential, but folding is in fact not problematic. */
2083 if (TREE_CODE (record) == CONSTRUCTOR
2084 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2085 {
2086 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2087 unsigned HOST_WIDE_INT idx;
2088 tree index, value;
2089 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2090 if (index == field)
2091 return value;
2092 return ref;
2093 }
2094
2095 return fold (ref);
2096 }
2097
2098 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2099 field is not found in the record. */
2100
2101 tree
build_component_ref(tree record,tree field,bool no_fold)2102 build_component_ref (tree record, tree field, bool no_fold)
2103 {
2104 tree ref = build_simple_component_ref (record, field, no_fold);
2105 if (ref)
2106 return ref;
2107
2108 /* Assume this is an invalid user field so raise Constraint_Error. */
2109 return build1 (NULL_EXPR, TREE_TYPE (field),
2110 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2111 N_Raise_Constraint_Error));
2112 }
2113
2114 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2115 identically. Process the case where a GNAT_PROC to call is provided. */
2116
2117 static inline tree
build_call_alloc_dealloc_proc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool)2118 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2119 Entity_Id gnat_proc, Entity_Id gnat_pool)
2120 {
2121 tree gnu_proc = gnat_to_gnu (gnat_proc);
2122 tree gnu_call;
2123
2124 /* A storage pool's underlying type is a record type (for both predefined
2125 storage pools and GNAT simple storage pools). The secondary stack uses
2126 the same mechanism, but its pool object (SS_Pool) is an integer. */
2127 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2128 {
2129 /* The size is the third parameter; the alignment is the
2130 same type. */
2131 Entity_Id gnat_size_type
2132 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2133 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2134
2135 tree gnu_pool = gnat_to_gnu (gnat_pool);
2136 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2137 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2138
2139 gnu_size = convert (gnu_size_type, gnu_size);
2140 gnu_align = convert (gnu_size_type, gnu_align);
2141
2142 /* The first arg is always the address of the storage pool; next
2143 comes the address of the object, for a deallocator, then the
2144 size and alignment. */
2145 if (gnu_obj)
2146 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2147 gnu_size, gnu_align);
2148 else
2149 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2150 gnu_size, gnu_align);
2151 }
2152
2153 /* Secondary stack case. */
2154 else
2155 {
2156 /* The size is the second parameter. */
2157 Entity_Id gnat_size_type
2158 = Etype (Next_Formal (First_Formal (gnat_proc)));
2159 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2160
2161 gnu_size = convert (gnu_size_type, gnu_size);
2162
2163 /* The first arg is the address of the object, for a deallocator,
2164 then the size. */
2165 if (gnu_obj)
2166 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2167 else
2168 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2169 }
2170
2171 return gnu_call;
2172 }
2173
2174 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2175 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2176 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2177 latter offers. */
2178
2179 static inline tree
maybe_wrap_malloc(tree data_size,tree data_type,Node_Id gnat_node)2180 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2181 {
2182 /* When the DATA_TYPE alignment is stricter than what malloc offers
2183 (super-aligned case), we allocate an "aligning" wrapper type and return
2184 the address of its single data field with the malloc's return value
2185 stored just in front. */
2186
2187 unsigned int data_align = TYPE_ALIGN (data_type);
2188 unsigned int system_allocator_alignment
2189 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2190
2191 tree aligning_type
2192 = ((data_align > system_allocator_alignment)
2193 ? make_aligning_type (data_type, data_align, data_size,
2194 system_allocator_alignment,
2195 POINTER_SIZE / BITS_PER_UNIT,
2196 gnat_node)
2197 : NULL_TREE);
2198
2199 tree size_to_malloc
2200 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2201
2202 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2203
2204 if (aligning_type)
2205 {
2206 /* Latch malloc's return value and get a pointer to the aligning field
2207 first. */
2208 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2209
2210 tree aligning_record_addr
2211 = convert (build_pointer_type (aligning_type), storage_ptr);
2212
2213 tree aligning_record
2214 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2215
2216 tree aligning_field
2217 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2218 false);
2219
2220 tree aligning_field_addr
2221 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2222
2223 /* Then arrange to store the allocator's return value ahead
2224 and return. */
2225 tree storage_ptr_slot_addr
2226 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2227 convert (ptr_type_node, aligning_field_addr),
2228 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2229 / BITS_PER_UNIT));
2230
2231 tree storage_ptr_slot
2232 = build_unary_op (INDIRECT_REF, NULL_TREE,
2233 convert (build_pointer_type (ptr_type_node),
2234 storage_ptr_slot_addr));
2235
2236 return
2237 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2238 build_binary_op (INIT_EXPR, NULL_TREE,
2239 storage_ptr_slot, storage_ptr),
2240 aligning_field_addr);
2241 }
2242 else
2243 return malloc_ptr;
2244 }
2245
2246 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2247 designated by DATA_PTR using the __gnat_free entry point. */
2248
2249 static inline tree
maybe_wrap_free(tree data_ptr,tree data_type)2250 maybe_wrap_free (tree data_ptr, tree data_type)
2251 {
2252 /* In the regular alignment case, we pass the data pointer straight to free.
2253 In the superaligned case, we need to retrieve the initial allocator
2254 return value, stored in front of the data block at allocation time. */
2255
2256 unsigned int data_align = TYPE_ALIGN (data_type);
2257 unsigned int system_allocator_alignment
2258 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2259
2260 tree free_ptr;
2261
2262 if (data_align > system_allocator_alignment)
2263 {
2264 /* DATA_FRONT_PTR (void *)
2265 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2266 tree data_front_ptr
2267 = build_binary_op
2268 (POINTER_PLUS_EXPR, ptr_type_node,
2269 convert (ptr_type_node, data_ptr),
2270 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2271
2272 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2273 free_ptr
2274 = build_unary_op
2275 (INDIRECT_REF, NULL_TREE,
2276 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2277 }
2278 else
2279 free_ptr = data_ptr;
2280
2281 return build_call_n_expr (free_decl, 1, free_ptr);
2282 }
2283
2284 /* Build a GCC tree to call an allocation or deallocation function.
2285 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2286 generate an allocator.
2287
2288 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2289 object type, used to determine the to-be-honored address alignment.
2290 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2291 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2292 to provide an error location for restriction violation messages. */
2293
2294 tree
build_call_alloc_dealloc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node)2295 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2296 Entity_Id gnat_proc, Entity_Id gnat_pool,
2297 Node_Id gnat_node)
2298 {
2299 /* Explicit proc to call ? This one is assumed to deal with the type
2300 alignment constraints. */
2301 if (Present (gnat_proc))
2302 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2303 gnat_proc, gnat_pool);
2304
2305 /* Otherwise, object to "free" or "malloc" with possible special processing
2306 for alignments stricter than what the default allocator honors. */
2307 else if (gnu_obj)
2308 return maybe_wrap_free (gnu_obj, gnu_type);
2309 else
2310 {
2311 /* Assert that we no longer can be called with this special pool. */
2312 gcc_assert (gnat_pool != -1);
2313
2314 /* Check that we aren't violating the associated restriction. */
2315 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2316 {
2317 Check_No_Implicit_Heap_Alloc (gnat_node);
2318 if (Has_Task (Etype (gnat_node)))
2319 Check_No_Implicit_Task_Alloc (gnat_node);
2320 if (Has_Protected (Etype (gnat_node)))
2321 Check_No_Implicit_Protected_Alloc (gnat_node);
2322 }
2323 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2324 }
2325 }
2326
2327 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2328 initial value is INIT, if INIT is nonzero. Convert the expression to
2329 RESULT_TYPE, which must be some pointer type, and return the result.
2330
2331 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2332 the storage pool to use. GNAT_NODE is used to provide an error
2333 location for restriction violation messages. If IGNORE_INIT_TYPE is
2334 true, ignore the type of INIT for the purpose of determining the size;
2335 this will cause the maximum size to be allocated if TYPE is of
2336 self-referential size. */
2337
2338 tree
build_allocator(tree type,tree init,tree result_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node,bool ignore_init_type)2339 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2340 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2341 {
2342 tree size, storage, storage_deref, storage_init;
2343
2344 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2345 if (init && TREE_CODE (init) == NULL_EXPR)
2346 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2347
2348 /* If we are just annotating types, also return a NULL_EXPR. */
2349 else if (type_annotate_only)
2350 return build1 (NULL_EXPR, result_type,
2351 build_call_raise (CE_Range_Check_Failed, gnat_node,
2352 N_Raise_Constraint_Error));
2353
2354 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2355 else if (init && TREE_CODE (init) == COND_EXPR)
2356 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2357 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2358 gnat_proc, gnat_pool, gnat_node,
2359 ignore_init_type),
2360 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2361 gnat_proc, gnat_pool, gnat_node,
2362 ignore_init_type));
2363
2364 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2365 sizes of the object and its template. Allocate the whole thing and
2366 fill in the parts that are known. */
2367 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2368 {
2369 tree storage_type
2370 = build_unc_object_type_from_ptr (result_type, type,
2371 get_identifier ("ALLOC"), false);
2372 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2373 tree storage_ptr_type = build_pointer_type (storage_type);
2374
2375 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2376 init);
2377
2378 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2379 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2380 size = size_int (-1);
2381
2382 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2383 gnat_proc, gnat_pool, gnat_node);
2384 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2385 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2386 TREE_THIS_NOTRAP (storage_deref) = 1;
2387
2388 /* If there is an initializing expression, then make a constructor for
2389 the entire object including the bounds and copy it into the object.
2390 If there is no initializing expression, just set the bounds. */
2391 if (init)
2392 {
2393 vec<constructor_elt, va_gc> *v;
2394 vec_alloc (v, 2);
2395
2396 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2397 build_template (template_type, type, init));
2398 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2399 init);
2400 storage_init
2401 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2402 gnat_build_constructor (storage_type, v));
2403 }
2404 else
2405 storage_init
2406 = build_binary_op (INIT_EXPR, NULL_TREE,
2407 build_component_ref (storage_deref,
2408 TYPE_FIELDS (storage_type),
2409 false),
2410 build_template (template_type, type, NULL_TREE));
2411
2412 return build2 (COMPOUND_EXPR, result_type,
2413 storage_init, convert (result_type, storage));
2414 }
2415
2416 size = TYPE_SIZE_UNIT (type);
2417
2418 /* If we have an initializing expression, see if its size is simpler
2419 than the size from the type. */
2420 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2421 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2422 || CONTAINS_PLACEHOLDER_P (size)))
2423 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2424
2425 /* If the size is still self-referential, reference the initializing
2426 expression, if it is present. If not, this must have been a call
2427 to allocate a library-level object, in which case we just use the
2428 maximum size. */
2429 if (!ignore_init_type && init)
2430 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2431 else if (CONTAINS_PLACEHOLDER_P (size))
2432 size = max_size (size, true);
2433
2434 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2435 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2436 size = size_int (-1);
2437
2438 storage = convert (result_type,
2439 build_call_alloc_dealloc (NULL_TREE, size, type,
2440 gnat_proc, gnat_pool,
2441 gnat_node));
2442
2443 /* If we have an initial value, protect the new address, assign the value
2444 and return the address with a COMPOUND_EXPR. */
2445 if (init)
2446 {
2447 storage = gnat_protect_expr (storage);
2448 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2449 TREE_THIS_NOTRAP (storage_deref) = 1;
2450 storage_init
2451 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2452 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2453 }
2454
2455 return storage;
2456 }
2457
2458 /* Indicate that we need to take the address of T and that it therefore
2459 should not be allocated in a register. Return true if successful. */
2460
2461 bool
gnat_mark_addressable(tree t)2462 gnat_mark_addressable (tree t)
2463 {
2464 while (true)
2465 switch (TREE_CODE (t))
2466 {
2467 case ADDR_EXPR:
2468 case COMPONENT_REF:
2469 case ARRAY_REF:
2470 case ARRAY_RANGE_REF:
2471 case REALPART_EXPR:
2472 case IMAGPART_EXPR:
2473 case VIEW_CONVERT_EXPR:
2474 case NON_LVALUE_EXPR:
2475 CASE_CONVERT:
2476 t = TREE_OPERAND (t, 0);
2477 break;
2478
2479 case COMPOUND_EXPR:
2480 t = TREE_OPERAND (t, 1);
2481 break;
2482
2483 case CONSTRUCTOR:
2484 TREE_ADDRESSABLE (t) = 1;
2485 return true;
2486
2487 case VAR_DECL:
2488 case PARM_DECL:
2489 case RESULT_DECL:
2490 TREE_ADDRESSABLE (t) = 1;
2491 return true;
2492
2493 case FUNCTION_DECL:
2494 TREE_ADDRESSABLE (t) = 1;
2495 return true;
2496
2497 case CONST_DECL:
2498 return DECL_CONST_CORRESPONDING_VAR (t)
2499 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2500
2501 default:
2502 return true;
2503 }
2504 }
2505
2506 /* Return true if EXP is a stable expression for the purpose of the functions
2507 below and, therefore, can be returned unmodified by them. We accept things
2508 that are actual constants or that have already been handled. */
2509
2510 static bool
gnat_stable_expr_p(tree exp)2511 gnat_stable_expr_p (tree exp)
2512 {
2513 enum tree_code code = TREE_CODE (exp);
2514 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2515 }
2516
2517 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2518 but we know how to handle our own nodes. */
2519
2520 tree
gnat_save_expr(tree exp)2521 gnat_save_expr (tree exp)
2522 {
2523 tree type = TREE_TYPE (exp);
2524 enum tree_code code = TREE_CODE (exp);
2525
2526 if (gnat_stable_expr_p (exp))
2527 return exp;
2528
2529 if (code == UNCONSTRAINED_ARRAY_REF)
2530 {
2531 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2532 TREE_READONLY (t) = TYPE_READONLY (type);
2533 return t;
2534 }
2535
2536 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2537 This may be more efficient, but will also allow us to more easily find
2538 the match for the PLACEHOLDER_EXPR. */
2539 if (code == COMPONENT_REF
2540 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2541 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2542 TREE_OPERAND (exp, 1), NULL_TREE);
2543
2544 return save_expr (exp);
2545 }
2546
2547 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2548 is optimized under the assumption that EXP's value doesn't change before
2549 its subsequent reuse(s) except through its potential reevaluation. */
2550
2551 tree
gnat_protect_expr(tree exp)2552 gnat_protect_expr (tree exp)
2553 {
2554 tree type = TREE_TYPE (exp);
2555 enum tree_code code = TREE_CODE (exp);
2556
2557 if (gnat_stable_expr_p (exp))
2558 return exp;
2559
2560 /* If EXP has no side effects, we theoretically don't need to do anything.
2561 However, we may be recursively passed more and more complex expressions
2562 involving checks which will be reused multiple times and eventually be
2563 unshared for gimplification; in order to avoid a complexity explosion
2564 at that point, we protect any expressions more complex than a simple
2565 arithmetic expression. */
2566 if (!TREE_SIDE_EFFECTS (exp))
2567 {
2568 tree inner = skip_simple_arithmetic (exp);
2569 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2570 return exp;
2571 }
2572
2573 /* If this is a conversion, protect what's inside the conversion. */
2574 if (code == NON_LVALUE_EXPR
2575 || CONVERT_EXPR_CODE_P (code)
2576 || code == VIEW_CONVERT_EXPR)
2577 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2578
2579 /* If we're indirectly referencing something, we only need to protect the
2580 address since the data itself can't change in these situations. */
2581 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2582 {
2583 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2584 TREE_READONLY (t) = TYPE_READONLY (type);
2585 return t;
2586 }
2587
2588 /* Likewise if we're indirectly referencing part of something. */
2589 if (code == COMPONENT_REF
2590 && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2591 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2592 TREE_OPERAND (exp, 1), NULL_TREE);
2593
2594 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2595 This may be more efficient, but will also allow us to more easily find
2596 the match for the PLACEHOLDER_EXPR. */
2597 if (code == COMPONENT_REF
2598 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2599 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2600 TREE_OPERAND (exp, 1), NULL_TREE);
2601
2602 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2603 for a CALL_EXPR as large objects are returned via invisible reference
2604 in most ABIs so the temporary will directly be filled by the callee. */
2605 if (TYPE_IS_FAT_POINTER_P (type)
2606 || !AGGREGATE_TYPE_P (type)
2607 || code == CALL_EXPR)
2608 return save_expr (exp);
2609
2610 /* Otherwise reference, protect the address and dereference. */
2611 return
2612 build_unary_op (INDIRECT_REF, type,
2613 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2614 }
2615
2616 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2617 argument to force evaluation of everything. */
2618
2619 static tree
gnat_stabilize_reference_1(tree e,void * data)2620 gnat_stabilize_reference_1 (tree e, void *data)
2621 {
2622 const bool force = *(bool *)data;
2623 enum tree_code code = TREE_CODE (e);
2624 tree type = TREE_TYPE (e);
2625 tree result;
2626
2627 if (gnat_stable_expr_p (e))
2628 return e;
2629
2630 switch (TREE_CODE_CLASS (code))
2631 {
2632 case tcc_exceptional:
2633 case tcc_declaration:
2634 case tcc_comparison:
2635 case tcc_expression:
2636 case tcc_reference:
2637 case tcc_vl_exp:
2638 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2639 fat pointer. This may be more efficient, but will also allow
2640 us to more easily find the match for the PLACEHOLDER_EXPR. */
2641 if (code == COMPONENT_REF
2642 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2643 result
2644 = build3 (code, type,
2645 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2646 TREE_OPERAND (e, 1), NULL_TREE);
2647 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2648 so that it will only be evaluated once. */
2649 /* The tcc_reference and tcc_comparison classes could be handled as
2650 below, but it is generally faster to only evaluate them once. */
2651 else if (TREE_SIDE_EFFECTS (e) || force)
2652 return save_expr (e);
2653 else
2654 return e;
2655 break;
2656
2657 case tcc_binary:
2658 /* Recursively stabilize each operand. */
2659 result
2660 = build2 (code, type,
2661 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2662 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2663 break;
2664
2665 case tcc_unary:
2666 /* Recursively stabilize each operand. */
2667 result
2668 = build1 (code, type,
2669 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2670 break;
2671
2672 default:
2673 gcc_unreachable ();
2674 }
2675
2676 TREE_READONLY (result) = TREE_READONLY (e);
2677 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2678 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2679
2680 return result;
2681 }
2682
2683 /* This is equivalent to stabilize_reference in tree.c but we know how to
2684 handle our own nodes and we take extra arguments. FORCE says whether to
2685 force evaluation of everything in REF. INIT is set to the first arm of
2686 a COMPOUND_EXPR present in REF, if any. */
2687
2688 tree
gnat_stabilize_reference(tree ref,bool force,tree * init)2689 gnat_stabilize_reference (tree ref, bool force, tree *init)
2690 {
2691 return
2692 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2693 }
2694
2695 /* Rewrite reference REF and call FUNC on each expression within REF in the
2696 process. DATA is passed unmodified to FUNC. INIT is set to the first
2697 arm of a COMPOUND_EXPR present in REF, if any. */
2698
2699 tree
gnat_rewrite_reference(tree ref,rewrite_fn func,void * data,tree * init)2700 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2701 {
2702 tree type = TREE_TYPE (ref);
2703 enum tree_code code = TREE_CODE (ref);
2704 tree result;
2705
2706 switch (code)
2707 {
2708 case CONST_DECL:
2709 case VAR_DECL:
2710 case PARM_DECL:
2711 case RESULT_DECL:
2712 /* No action is needed in this case. */
2713 return ref;
2714
2715 CASE_CONVERT:
2716 case FLOAT_EXPR:
2717 case FIX_TRUNC_EXPR:
2718 case REALPART_EXPR:
2719 case IMAGPART_EXPR:
2720 case VIEW_CONVERT_EXPR:
2721 result
2722 = build1 (code, type,
2723 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2724 init));
2725 break;
2726
2727 case INDIRECT_REF:
2728 case UNCONSTRAINED_ARRAY_REF:
2729 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2730 break;
2731
2732 case COMPONENT_REF:
2733 result = build3 (COMPONENT_REF, type,
2734 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2735 data, init),
2736 TREE_OPERAND (ref, 1), NULL_TREE);
2737 break;
2738
2739 case BIT_FIELD_REF:
2740 result = build3 (BIT_FIELD_REF, type,
2741 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2742 data, init),
2743 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2744 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2745 break;
2746
2747 case ARRAY_REF:
2748 case ARRAY_RANGE_REF:
2749 result
2750 = build4 (code, type,
2751 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2752 init),
2753 func (TREE_OPERAND (ref, 1), data),
2754 TREE_OPERAND (ref, 2), NULL_TREE);
2755 break;
2756
2757 case COMPOUND_EXPR:
2758 gcc_assert (!*init);
2759 *init = TREE_OPERAND (ref, 0);
2760 /* We expect only the pattern built in Call_to_gnu. */
2761 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2762 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2763 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2764 return TREE_OPERAND (ref, 1);
2765
2766 case CALL_EXPR:
2767 {
2768 /* This can only be an atomic load. */
2769 gcc_assert (call_is_atomic_load (ref));
2770
2771 /* An atomic load is an INDIRECT_REF of its first argument. */
2772 tree t = CALL_EXPR_ARG (ref, 0);
2773 if (TREE_CODE (t) == NOP_EXPR)
2774 t = TREE_OPERAND (t, 0);
2775 if (TREE_CODE (t) == ADDR_EXPR)
2776 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2777 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2778 init));
2779 else
2780 t = func (t, data);
2781 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2782
2783 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2784 t, CALL_EXPR_ARG (ref, 1));
2785 }
2786 break;
2787
2788 case ERROR_MARK:
2789 case NULL_EXPR:
2790 return ref;
2791
2792 default:
2793 gcc_unreachable ();
2794 }
2795
2796 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2797 may not be sustained across some paths, such as the way via build1 for
2798 INDIRECT_REF. We reset those flags here in the general case, which is
2799 consistent with the GCC version of this routine.
2800
2801 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2802 paths introduce side-effects where there was none initially (e.g. if a
2803 SAVE_EXPR is built) and we also want to keep track of that. */
2804 TREE_READONLY (result) = TREE_READONLY (ref);
2805 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2806 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2807
2808 if (code == INDIRECT_REF
2809 || code == UNCONSTRAINED_ARRAY_REF
2810 || code == ARRAY_REF
2811 || code == ARRAY_RANGE_REF)
2812 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2813
2814 return result;
2815 }
2816
2817 /* This is equivalent to get_inner_reference in expr.c but it returns the
2818 ultimate containing object only if the reference (lvalue) is constant,
2819 i.e. if it doesn't depend on the context in which it is evaluated. */
2820
2821 tree
get_inner_constant_reference(tree exp)2822 get_inner_constant_reference (tree exp)
2823 {
2824 while (true)
2825 {
2826 switch (TREE_CODE (exp))
2827 {
2828 case BIT_FIELD_REF:
2829 break;
2830
2831 case COMPONENT_REF:
2832 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2833 return NULL_TREE;
2834 break;
2835
2836 case ARRAY_REF:
2837 case ARRAY_RANGE_REF:
2838 {
2839 if (TREE_OPERAND (exp, 2))
2840 return NULL_TREE;
2841
2842 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2843 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2844 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2845 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2846 return NULL_TREE;
2847 }
2848 break;
2849
2850 case REALPART_EXPR:
2851 case IMAGPART_EXPR:
2852 case VIEW_CONVERT_EXPR:
2853 break;
2854
2855 default:
2856 goto done;
2857 }
2858
2859 exp = TREE_OPERAND (exp, 0);
2860 }
2861
2862 done:
2863 return exp;
2864 }
2865
2866 /* Return true if EXPR is the addition or the subtraction of a constant and,
2867 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2868 if this is a subtraction. */
2869
2870 bool
is_simple_additive_expression(tree expr,tree * add,tree * cst,bool * minus_p)2871 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2872 {
2873 /* Skip overflow checks. */
2874 if (TREE_CODE (expr) == COND_EXPR
2875 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2876 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2877 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2878 == gnat_raise_decls[CE_Overflow_Check_Failed])
2879 expr = COND_EXPR_ELSE (expr);
2880
2881 if (TREE_CODE (expr) == PLUS_EXPR)
2882 {
2883 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2884 {
2885 *add = TREE_OPERAND (expr, 1);
2886 *cst = TREE_OPERAND (expr, 0);
2887 *minus_p = false;
2888 return true;
2889 }
2890 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2891 {
2892 *add = TREE_OPERAND (expr, 0);
2893 *cst = TREE_OPERAND (expr, 1);
2894 *minus_p = false;
2895 return true;
2896 }
2897 }
2898 else if (TREE_CODE (expr) == MINUS_EXPR)
2899 {
2900 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2901 {
2902 *add = TREE_OPERAND (expr, 0);
2903 *cst = TREE_OPERAND (expr, 1);
2904 *minus_p = true;
2905 return true;
2906 }
2907 }
2908
2909 return false;
2910 }
2911
2912 /* If EXPR is an expression that is invariant in the current function, in the
2913 sense that it can be evaluated anywhere in the function and any number of
2914 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2915
2916 tree
gnat_invariant_expr(tree expr)2917 gnat_invariant_expr (tree expr)
2918 {
2919 const tree type = TREE_TYPE (expr);
2920 tree add, cst;
2921 bool minus_p;
2922
2923 expr = remove_conversions (expr, false);
2924
2925 /* Look through temporaries created to capture values. */
2926 while ((TREE_CODE (expr) == CONST_DECL
2927 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2928 && decl_function_context (expr) == current_function_decl
2929 && DECL_INITIAL (expr))
2930 {
2931 expr = DECL_INITIAL (expr);
2932 /* Look into CONSTRUCTORs built to initialize padded types. */
2933 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2934 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2935 expr = remove_conversions (expr, false);
2936 }
2937
2938 /* We are only interested in scalar types at the moment and, even if we may
2939 have gone through padding types in the above loop, we must be back to a
2940 scalar value at this point. */
2941 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2942 return NULL_TREE;
2943
2944 if (TREE_CONSTANT (expr))
2945 return fold_convert (type, expr);
2946
2947 /* Deal with addition or subtraction of constants. */
2948 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2949 {
2950 add = gnat_invariant_expr (add);
2951 if (add)
2952 return
2953 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2954 fold_convert (type, add), fold_convert (type, cst));
2955 else
2956 return NULL_TREE;
2957 }
2958
2959 bool invariant_p = false;
2960 tree t = expr;
2961
2962 while (true)
2963 {
2964 switch (TREE_CODE (t))
2965 {
2966 case COMPONENT_REF:
2967 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2968 break;
2969
2970 case ARRAY_REF:
2971 case ARRAY_RANGE_REF:
2972 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
2973 return NULL_TREE;
2974 break;
2975
2976 case BIT_FIELD_REF:
2977 case REALPART_EXPR:
2978 case IMAGPART_EXPR:
2979 case VIEW_CONVERT_EXPR:
2980 CASE_CONVERT:
2981 break;
2982
2983 case INDIRECT_REF:
2984 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2985 return NULL_TREE;
2986 invariant_p = false;
2987 break;
2988
2989 default:
2990 goto object;
2991 }
2992
2993 t = TREE_OPERAND (t, 0);
2994 }
2995
2996 object:
2997 if (TREE_SIDE_EFFECTS (t))
2998 return NULL_TREE;
2999
3000 if (TREE_CODE (t) == CONST_DECL
3001 && (DECL_EXTERNAL (t)
3002 || decl_function_context (t) != current_function_decl))
3003 return fold_convert (type, expr);
3004
3005 if (!invariant_p && !TREE_READONLY (t))
3006 return NULL_TREE;
3007
3008 if (TREE_CODE (t) == PARM_DECL)
3009 return fold_convert (type, expr);
3010
3011 if (TREE_CODE (t) == VAR_DECL
3012 && (DECL_EXTERNAL (t)
3013 || decl_function_context (t) != current_function_decl))
3014 return fold_convert (type, expr);
3015
3016 return NULL_TREE;
3017 }
3018