1 /****************************************************************************
2  *                                                                          *
3  *                         GNAT COMPILER COMPONENTS                         *
4  *                                                                          *
5  *                               U T I L S 2                                *
6  *                                                                          *
7  *                          C Implementation File                           *
8  *                                                                          *
9  *          Copyright (C) 1992-2016, Free Software Foundation, Inc.         *
10  *                                                                          *
11  * GNAT is free software;  you can  redistribute it  and/or modify it under *
12  * terms of the  GNU General Public License as published  by the Free Soft- *
13  * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14  * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15  * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16  * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17  * for  more details.  You should have received a copy of the GNU General   *
18  * Public License along with GCC; see the file COPYING3.  If not see        *
19  * <http://www.gnu.org/licenses/>.                                          *
20  *                                                                          *
21  * GNAT was originally developed  by the GNAT team at  New York University. *
22  * Extensive contributions were provided by Ada Core Technologies Inc.      *
23  *                                                                          *
24  ****************************************************************************/
25 
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
42 
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
57 
58 /* Return the base type of TYPE.  */
59 
60 tree
get_base_type(tree type)61 get_base_type (tree type)
62 {
63   if (TREE_CODE (type) == RECORD_TYPE
64       && TYPE_JUSTIFIED_MODULAR_P (type))
65     type = TREE_TYPE (TYPE_FIELDS (type));
66 
67   while (TREE_TYPE (type)
68 	 && (TREE_CODE (type) == INTEGER_TYPE
69 	     || TREE_CODE (type) == REAL_TYPE))
70     type = TREE_TYPE (type);
71 
72   return type;
73 }
74 
75 /* EXP is a GCC tree representing an address.  See if we can find how strictly
76    the object at this address is aligned and, if so, return the alignment of
77    the object in bits.  Otherwise return 0.  */
78 
79 unsigned int
known_alignment(tree exp)80 known_alignment (tree exp)
81 {
82   unsigned int this_alignment;
83   unsigned int lhs, rhs;
84 
85   switch (TREE_CODE (exp))
86     {
87     CASE_CONVERT:
88     case VIEW_CONVERT_EXPR:
89     case NON_LVALUE_EXPR:
90       /* Conversions between pointers and integers don't change the alignment
91 	 of the underlying object.  */
92       this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93       break;
94 
95     case COMPOUND_EXPR:
96       /* The value of a COMPOUND_EXPR is that of its second operand.  */
97       this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98       break;
99 
100     case PLUS_EXPR:
101     case MINUS_EXPR:
102       /* If two addresses are added, the alignment of the result is the
103 	 minimum of the two alignments.  */
104       lhs = known_alignment (TREE_OPERAND (exp, 0));
105       rhs = known_alignment (TREE_OPERAND (exp, 1));
106       this_alignment = MIN (lhs, rhs);
107       break;
108 
109     case POINTER_PLUS_EXPR:
110       /* If this is the pattern built for aligning types, decode it.  */
111       if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
113 	{
114 	  tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 	  return
116 	    known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
117 	}
118 
119       /* If we don't know the alignment of the offset, we assume that
120 	 of the base.  */
121       lhs = known_alignment (TREE_OPERAND (exp, 0));
122       rhs = known_alignment (TREE_OPERAND (exp, 1));
123 
124       if (rhs == 0)
125 	this_alignment = lhs;
126       else
127 	this_alignment = MIN (lhs, rhs);
128       break;
129 
130     case COND_EXPR:
131       /* If there is a choice between two values, use the smaller one.  */
132       lhs = known_alignment (TREE_OPERAND (exp, 1));
133       rhs = known_alignment (TREE_OPERAND (exp, 2));
134       this_alignment = MIN (lhs, rhs);
135       break;
136 
137     case INTEGER_CST:
138       {
139 	unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 	/* The first part of this represents the lowest bit in the constant,
141 	   but it is originally in bytes, not bits.  */
142 	this_alignment = (c & -c) * BITS_PER_UNIT;
143       }
144       break;
145 
146     case MULT_EXPR:
147       /* If we know the alignment of just one side, use it.  Otherwise,
148 	 use the product of the alignments.  */
149       lhs = known_alignment (TREE_OPERAND (exp, 0));
150       rhs = known_alignment (TREE_OPERAND (exp, 1));
151 
152       if (lhs == 0)
153 	this_alignment = rhs;
154       else if (rhs == 0)
155 	this_alignment = lhs;
156       else
157 	this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158       break;
159 
160     case BIT_AND_EXPR:
161       /* A bit-and expression is as aligned as the maximum alignment of the
162 	 operands.  We typically get here for a complex lhs and a constant
163 	 negative power of two on the rhs to force an explicit alignment, so
164 	 don't bother looking at the lhs.  */
165       this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166       break;
167 
168     case ADDR_EXPR:
169       this_alignment = expr_align (TREE_OPERAND (exp, 0));
170       break;
171 
172     case CALL_EXPR:
173       {
174 	tree t = maybe_inline_call_in_expr (exp);
175 	if (t)
176 	  return known_alignment (t);
177       }
178 
179       /* ... fall through ... */
180 
181     default:
182       /* For other pointer expressions, we assume that the pointed-to object
183 	 is at least as aligned as the pointed-to type.  Beware that we can
184 	 have a dummy type here (e.g. a Taft Amendment type), for which the
185 	 alignment is meaningless and should be ignored.  */
186       if (POINTER_TYPE_P (TREE_TYPE (exp))
187 	  && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
188 	this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189       else
190 	this_alignment = 0;
191       break;
192     }
193 
194   return this_alignment;
195 }
196 
197 /* We have a comparison or assignment operation on two types, T1 and T2, which
198    are either both array types or both record types.  T1 is assumed to be for
199    the left hand side operand, and T2 for the right hand side.  Return the
200    type that both operands should be converted to for the operation, if any.
201    Otherwise return zero.  */
202 
203 static tree
find_common_type(tree t1,tree t2)204 find_common_type (tree t1, tree t2)
205 {
206   /* ??? As of today, various constructs lead to here with types of different
207      sizes even when both constants (e.g. tagged types, packable vs regular
208      component types, padded vs unpadded types, ...).  While some of these
209      would better be handled upstream (types should be made consistent before
210      calling into build_binary_op), some others are really expected and we
211      have to be careful.  */
212 
213   const bool variable_record_on_lhs
214     = (TREE_CODE (t1) == RECORD_TYPE
215        && TREE_CODE (t2) == RECORD_TYPE
216        && get_variant_part (t1)
217        && !get_variant_part (t2));
218 
219   const bool variable_array_on_lhs
220     = (TREE_CODE (t1) == ARRAY_TYPE
221        && TREE_CODE (t2) == ARRAY_TYPE
222        && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
223        && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
224 
225   /* We must avoid writing more than what the target can hold if this is for
226      an assignment and the case of tagged types is handled in build_binary_op
227      so we use the lhs type if it is known to be smaller or of constant size
228      and the rhs type is not, whatever the modes.  We also force t1 in case of
229      constant size equality to minimize occurrences of view conversions on the
230      lhs of an assignment, except for the case of types with a variable part
231      on the lhs but not on the rhs to make the conversion simpler.  */
232   if (TREE_CONSTANT (TYPE_SIZE (t1))
233       && (!TREE_CONSTANT (TYPE_SIZE (t2))
234 	  || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
235 	  || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
236 	      && !variable_record_on_lhs
237 	      && !variable_array_on_lhs)))
238     return t1;
239 
240   /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
241      a non-BLKmode rhs and array types with a variable part on the lhs but not
242      on the rhs to make sure the conversion is preserved during gimplification.
243      Note that we know that we will not have any alignment problems since, if
244      we did, the non-BLKmode type could not have been used.  */
245   if (TYPE_MODE (t1) != BLKmode
246       && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
247     return t1;
248 
249   /* If the rhs type is of constant size, use it whatever the modes.  At
250      this point it is known to be smaller, or of constant size and the
251      lhs type is not.  */
252   if (TREE_CONSTANT (TYPE_SIZE (t2)))
253     return t2;
254 
255   /* Otherwise, if the rhs type is non-BLKmode, use it.  */
256   if (TYPE_MODE (t2) != BLKmode)
257     return t2;
258 
259   /* In this case, both types have variable size and BLKmode.  It's
260      probably best to leave the "type mismatch" because changing it
261      could cause a bad self-referential reference.  */
262   return NULL_TREE;
263 }
264 
265 /* Return an expression tree representing an equality comparison of A1 and A2,
266    two objects of type ARRAY_TYPE.  The result should be of type RESULT_TYPE.
267 
268    Two arrays are equal in one of two ways: (1) if both have zero length in
269    some dimension (not necessarily the same dimension) or (2) if the lengths
270    in each dimension are equal and the data is equal.  We perform the length
271    tests in as efficient a manner as possible.  */
272 
273 static tree
compare_arrays(location_t loc,tree result_type,tree a1,tree a2)274 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
275 {
276   tree result = convert (result_type, boolean_true_node);
277   tree a1_is_null = convert (result_type, boolean_false_node);
278   tree a2_is_null = convert (result_type, boolean_false_node);
279   tree t1 = TREE_TYPE (a1);
280   tree t2 = TREE_TYPE (a2);
281   bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
282   bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
283   bool length_zero_p = false;
284 
285   /* If the operands have side-effects, they need to be evaluated only once
286      in spite of the multiple references in the comparison.  */
287   if (a1_side_effects_p)
288     a1 = gnat_protect_expr (a1);
289 
290   if (a2_side_effects_p)
291     a2 = gnat_protect_expr (a2);
292 
293   /* Process each dimension separately and compare the lengths.  If any
294      dimension has a length known to be zero, set LENGTH_ZERO_P to true
295      in order to suppress the comparison of the data at the end.  */
296   while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
297     {
298       tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
299       tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
300       tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
301       tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
302       tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
303 				 size_one_node);
304       tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
305 				 size_one_node);
306       tree comparison, this_a1_is_null, this_a2_is_null;
307 
308       /* If the length of the first array is a constant, swap our operands
309 	 unless the length of the second array is the constant zero.  */
310       if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
311 	{
312 	  tree tem;
313 	  bool btem;
314 
315 	  tem = a1, a1 = a2, a2 = tem;
316 	  tem = t1, t1 = t2, t2 = tem;
317 	  tem = lb1, lb1 = lb2, lb2 = tem;
318 	  tem = ub1, ub1 = ub2, ub2 = tem;
319 	  tem = length1, length1 = length2, length2 = tem;
320 	  tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
321 	  btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
322 	  a2_side_effects_p = btem;
323 	}
324 
325       /* If the length of the second array is the constant zero, we can just
326 	 use the original stored bounds for the first array and see whether
327 	 last < first holds.  */
328       if (integer_zerop (length2))
329 	{
330 	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
331 
332 	  length_zero_p = true;
333 
334 	  ub1
335 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
336 	  lb1
337 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
338 
339 	  comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
340 	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
341 	  if (EXPR_P (comparison))
342 	    SET_EXPR_LOCATION (comparison, loc);
343 
344 	  this_a1_is_null = comparison;
345 	  this_a2_is_null = convert (result_type, boolean_true_node);
346 	}
347 
348       /* Otherwise, if the length is some other constant value, we know that
349 	 this dimension in the second array cannot be superflat, so we can
350 	 just use its length computed from the actual stored bounds.  */
351       else if (TREE_CODE (length2) == INTEGER_CST)
352 	{
353 	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
354 
355 	  ub1
356 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
357 	  lb1
358 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
359 	  /* Note that we know that UB2 and LB2 are constant and hence
360 	     cannot contain a PLACEHOLDER_EXPR.  */
361 	  ub2
362 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
363 	  lb2
364 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
365 
366 	  comparison
367 	    = fold_build2_loc (loc, EQ_EXPR, result_type,
368 			       build_binary_op (MINUS_EXPR, b, ub1, lb1),
369 			       build_binary_op (MINUS_EXPR, b, ub2, lb2));
370 	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
371 	  if (EXPR_P (comparison))
372 	    SET_EXPR_LOCATION (comparison, loc);
373 
374 	  this_a1_is_null
375 	    = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
376 
377 	  this_a2_is_null = convert (result_type, boolean_false_node);
378 	}
379 
380       /* Otherwise, compare the computed lengths.  */
381       else
382 	{
383 	  length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
384 	  length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
385 
386 	  comparison
387 	    = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
388 
389 	  /* If the length expression is of the form (cond ? val : 0), assume
390 	     that cond is equivalent to (length != 0).  That's guaranteed by
391 	     construction of the array types in gnat_to_gnu_entity.  */
392 	  if (TREE_CODE (length1) == COND_EXPR
393 	      && integer_zerop (TREE_OPERAND (length1, 2)))
394 	    this_a1_is_null
395 	      = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
396 	  else
397 	    this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
398 					       length1, size_zero_node);
399 
400 	  /* Likewise for the second array.  */
401 	  if (TREE_CODE (length2) == COND_EXPR
402 	      && integer_zerop (TREE_OPERAND (length2, 2)))
403 	    this_a2_is_null
404 	      = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
405 	  else
406 	    this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
407 					       length2, size_zero_node);
408 	}
409 
410       /* Append expressions for this dimension to the final expressions.  */
411       result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
412 				result, comparison);
413 
414       a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
415 				    this_a1_is_null, a1_is_null);
416 
417       a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
418 				    this_a2_is_null, a2_is_null);
419 
420       t1 = TREE_TYPE (t1);
421       t2 = TREE_TYPE (t2);
422     }
423 
424   /* Unless the length of some dimension is known to be zero, compare the
425      data in the array.  */
426   if (!length_zero_p)
427     {
428       tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
429       tree comparison;
430 
431       if (type)
432 	{
433 	  a1 = convert (type, a1),
434 	  a2 = convert (type, a2);
435 	}
436 
437       comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
438 
439       result
440 	= build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
441     }
442 
443   /* The result is also true if both sizes are zero.  */
444   result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
445 			    build_binary_op (TRUTH_ANDIF_EXPR, result_type,
446 					     a1_is_null, a2_is_null),
447 			    result);
448 
449   /* If the operands have side-effects, they need to be evaluated before
450      doing the tests above since the place they otherwise would end up
451      being evaluated at run time could be wrong.  */
452   if (a1_side_effects_p)
453     result = build2 (COMPOUND_EXPR, result_type, a1, result);
454 
455   if (a2_side_effects_p)
456     result = build2 (COMPOUND_EXPR, result_type, a2, result);
457 
458   return result;
459 }
460 
461 /* Return an expression tree representing an equality comparison of P1 and P2,
462    two objects of fat pointer type.  The result should be of type RESULT_TYPE.
463 
464    Two fat pointers are equal in one of two ways: (1) if both have a null
465    pointer to the array or (2) if they contain the same couple of pointers.
466    We perform the comparison in as efficient a manner as possible.  */
467 
468 static tree
compare_fat_pointers(location_t loc,tree result_type,tree p1,tree p2)469 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
470 {
471   tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
472   tree p1_array_is_null, p2_array_is_null;
473 
474   /* If either operand has side-effects, they have to be evaluated only once
475      in spite of the multiple references to the operand in the comparison.  */
476   p1 = gnat_protect_expr (p1);
477   p2 = gnat_protect_expr (p2);
478 
479   /* The constant folder doesn't fold fat pointer types so we do it here.  */
480   if (TREE_CODE (p1) == CONSTRUCTOR)
481     p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
482   else
483     p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
484 
485   p1_array_is_null
486     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
487 		       fold_convert_loc (loc, TREE_TYPE (p1_array),
488 					 null_pointer_node));
489 
490   if (TREE_CODE (p2) == CONSTRUCTOR)
491     p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
492   else
493     p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
494 
495   p2_array_is_null
496     = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
497 		       fold_convert_loc (loc, TREE_TYPE (p2_array),
498 					 null_pointer_node));
499 
500   /* If one of the pointers to the array is null, just compare the other.  */
501   if (integer_zerop (p1_array))
502     return p2_array_is_null;
503   else if (integer_zerop (p2_array))
504     return p1_array_is_null;
505 
506   /* Otherwise, do the fully-fledged comparison.  */
507   same_array
508     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
509 
510   if (TREE_CODE (p1) == CONSTRUCTOR)
511     p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
512   else
513     p1_bounds
514       = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
515 			     true);
516 
517   if (TREE_CODE (p2) == CONSTRUCTOR)
518     p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
519   else
520     p2_bounds
521       = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
522 			     true);
523 
524   same_bounds
525     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
526 
527   /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS).  */
528   return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
529 			  build_binary_op (TRUTH_ORIF_EXPR, result_type,
530 					   p1_array_is_null, same_bounds));
531 }
532 
533 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
534    type TYPE.  We know that TYPE is a modular type with a nonbinary
535    modulus.  */
536 
537 static tree
nonbinary_modular_operation(enum tree_code op_code,tree type,tree lhs,tree rhs)538 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
539                              tree rhs)
540 {
541   tree modulus = TYPE_MODULUS (type);
542   unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
543   unsigned int precision;
544   bool unsignedp = true;
545   tree op_type = type;
546   tree result;
547 
548   /* If this is an addition of a constant, convert it to a subtraction
549      of a constant since we can do that faster.  */
550   if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
551     {
552       rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
553       op_code = MINUS_EXPR;
554     }
555 
556   /* For the logical operations, we only need PRECISION bits.  For
557      addition and subtraction, we need one more and for multiplication we
558      need twice as many.  But we never want to make a size smaller than
559      our size. */
560   if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
561     needed_precision += 1;
562   else if (op_code == MULT_EXPR)
563     needed_precision *= 2;
564 
565   precision = MAX (needed_precision, TYPE_PRECISION (op_type));
566 
567   /* Unsigned will do for everything but subtraction.  */
568   if (op_code == MINUS_EXPR)
569     unsignedp = false;
570 
571   /* If our type is the wrong signedness or isn't wide enough, make a new
572      type and convert both our operands to it.  */
573   if (TYPE_PRECISION (op_type) < precision
574       || TYPE_UNSIGNED (op_type) != unsignedp)
575     {
576       /* Copy the node so we ensure it can be modified to make it modular.  */
577       op_type = copy_node (gnat_type_for_size (precision, unsignedp));
578       modulus = convert (op_type, modulus);
579       SET_TYPE_MODULUS (op_type, modulus);
580       TYPE_MODULAR_P (op_type) = 1;
581       lhs = convert (op_type, lhs);
582       rhs = convert (op_type, rhs);
583     }
584 
585   /* Do the operation, then we'll fix it up.  */
586   result = fold_build2 (op_code, op_type, lhs, rhs);
587 
588   /* For multiplication, we have no choice but to do a full modulus
589      operation.  However, we want to do this in the narrowest
590      possible size.  */
591   if (op_code == MULT_EXPR)
592     {
593       tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
594       modulus = convert (div_type, modulus);
595       SET_TYPE_MODULUS (div_type, modulus);
596       TYPE_MODULAR_P (div_type) = 1;
597       result = convert (op_type,
598 			fold_build2 (TRUNC_MOD_EXPR, div_type,
599 				     convert (div_type, result), modulus));
600     }
601 
602   /* For subtraction, add the modulus back if we are negative.  */
603   else if (op_code == MINUS_EXPR)
604     {
605       result = gnat_protect_expr (result);
606       result = fold_build3 (COND_EXPR, op_type,
607 			    fold_build2 (LT_EXPR, boolean_type_node, result,
608 					 build_int_cst (op_type, 0)),
609 			    fold_build2 (PLUS_EXPR, op_type, result, modulus),
610 			    result);
611     }
612 
613   /* For the other operations, subtract the modulus if we are >= it.  */
614   else
615     {
616       result = gnat_protect_expr (result);
617       result = fold_build3 (COND_EXPR, op_type,
618 			    fold_build2 (GE_EXPR, boolean_type_node,
619 					 result, modulus),
620 			    fold_build2 (MINUS_EXPR, op_type,
621 					 result, modulus),
622 			    result);
623     }
624 
625   return convert (type, result);
626 }
627 
628 /* This page contains routines that implement the Ada semantics with regard
629    to atomic objects.  They are fully piggybacked on the middle-end support
630    for atomic loads and stores.
631 
632    *** Memory barriers and volatile objects ***
633 
634    We implement the weakened form of the C.6(16) clause that was introduced
635    in Ada 2012 (AI05-117).  Earlier forms of this clause wouldn't have been
636    implementable without significant performance hits on modern platforms.
637 
638    We also take advantage of the requirements imposed on shared variables by
639    9.10 (conditions for sequential actions) to have non-erroneous execution
640    and consider that C.6(16) and C.6(17) only prescribe an uniform order of
641    volatile updates with regard to sequential actions, i.e. with regard to
642    reads or updates of atomic objects.
643 
644    As such, an update of an atomic object by a task requires that all earlier
645    accesses to volatile objects have completed.  Similarly, later accesses to
646    volatile objects cannot be reordered before the update of the atomic object.
647    So, memory barriers both before and after the atomic update are needed.
648 
649    For a read of an atomic object, to avoid seeing writes of volatile objects
650    by a task earlier than by the other tasks, a memory barrier is needed before
651    the atomic read.  Finally, to avoid reordering later reads or updates of
652    volatile objects to before the atomic read, a barrier is needed after the
653    atomic read.
654 
655    So, memory barriers are needed before and after atomic reads and updates.
656    And, in order to simplify the implementation, we use full memory barriers
657    in all cases, i.e. we enforce sequential consistency for atomic accesses.  */
658 
659 /* Return the size of TYPE, which must be a positive power of 2.  */
660 
661 static unsigned int
resolve_atomic_size(tree type)662 resolve_atomic_size (tree type)
663 {
664   unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
665 
666   if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
667     return size;
668 
669   /* We shouldn't reach here without having already detected that the size
670      isn't compatible with an atomic access.  */
671   gcc_assert (Serious_Errors_Detected);
672 
673   return 0;
674 }
675 
676 /* Build an atomic load for the underlying atomic object in SRC.  SYNC is
677    true if the load requires synchronization.  */
678 
679 tree
build_atomic_load(tree src,bool sync)680 build_atomic_load (tree src, bool sync)
681 {
682   tree ptr_type
683     = build_pointer_type
684       (build_qualified_type (void_type_node,
685 			     TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
686   tree mem_model
687     = build_int_cst (integer_type_node,
688 		     sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
689   tree orig_src = src;
690   tree t, addr, val;
691   unsigned int size;
692   int fncode;
693 
694   /* Remove conversions to get the address of the underlying object.  */
695   src = remove_conversions (src, false);
696   size = resolve_atomic_size (TREE_TYPE (src));
697   if (size == 0)
698     return orig_src;
699 
700   fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
701   t = builtin_decl_implicit ((enum built_in_function) fncode);
702 
703   addr = build_unary_op (ADDR_EXPR, ptr_type, src);
704   val = build_call_expr (t, 2, addr, mem_model);
705 
706   /* First reinterpret the loaded bits in the original type of the load,
707      then convert to the expected result type.  */
708   t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
709   return convert (TREE_TYPE (orig_src), t);
710 }
711 
712 /* Build an atomic store from SRC to the underlying atomic object in DEST.
713    SYNC is true if the store requires synchronization.  */
714 
715 tree
build_atomic_store(tree dest,tree src,bool sync)716 build_atomic_store (tree dest, tree src, bool sync)
717 {
718   tree ptr_type
719     = build_pointer_type
720       (build_qualified_type (void_type_node,
721 			     TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
722   tree mem_model
723     = build_int_cst (integer_type_node,
724 		     sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
725   tree orig_dest = dest;
726   tree t, int_type, addr;
727   unsigned int size;
728   int fncode;
729 
730   /* Remove conversions to get the address of the underlying object.  */
731   dest = remove_conversions (dest, false);
732   size = resolve_atomic_size (TREE_TYPE (dest));
733   if (size == 0)
734     return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
735 
736   fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
737   t = builtin_decl_implicit ((enum built_in_function) fncode);
738   int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
739 
740   /* First convert the bits to be stored to the original type of the store,
741      then reinterpret them in the effective type.  But if the original type
742      is a padded type with the same size, convert to the inner type instead,
743      as we don't want to artificially introduce a CONSTRUCTOR here.  */
744   if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
745       && TYPE_SIZE (TREE_TYPE (dest))
746 	 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
747     src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
748   else
749     src = convert (TREE_TYPE (dest), src);
750   src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
751   addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
752 
753   return build_call_expr (t, 3, addr, src, mem_model);
754 }
755 
756 /* Build a load-modify-store sequence from SRC to DEST.  GNAT_NODE is used for
757    the location of the sequence.  Note that, even though the load and the store
758    are both atomic, the sequence itself is not atomic.  */
759 
760 tree
build_load_modify_store(tree dest,tree src,Node_Id gnat_node)761 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
762 {
763   /* We will be modifying DEST below so we build a copy.  */
764   dest = copy_node (dest);
765   tree ref = dest;
766 
767   while (handled_component_p (ref))
768     {
769       /* The load should already have been generated during the translation
770 	 of the GNAT destination tree; find it out in the GNU tree.  */
771       if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
772 	{
773 	  tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
774 	  if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
775 	    {
776 	      tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
777 	      tree t = CALL_EXPR_ARG (op, 0);
778 	      tree obj, temp, stmt;
779 
780 	      /* Find out the loaded object.  */
781 	      if (TREE_CODE (t) == NOP_EXPR)
782 		t = TREE_OPERAND (t, 0);
783 	      if (TREE_CODE (t) == ADDR_EXPR)
784 		obj = TREE_OPERAND (t, 0);
785 	      else
786 		obj = build1 (INDIRECT_REF, type, t);
787 
788 	      /* Drop atomic and volatile qualifiers for the temporary.  */
789 	      type = TYPE_MAIN_VARIANT (type);
790 
791 	      /* And drop BLKmode, if need be, to put it into a register.  */
792 	      if (TYPE_MODE (type) == BLKmode)
793 		{
794 		  unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
795 		  type = copy_type (type);
796 		  SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
797 		}
798 
799 	      /* Create the temporary by inserting a SAVE_EXPR.  */
800 	      temp = build1 (SAVE_EXPR, type,
801 			     build1 (VIEW_CONVERT_EXPR, type, op));
802 	      TREE_OPERAND (ref, 0) = temp;
803 
804 	      start_stmt_group ();
805 
806 	      /* Build the modify of the temporary.  */
807 	      stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
808 	      add_stmt_with_node (stmt, gnat_node);
809 
810 	      /* Build the store to the object.  */
811 	      stmt = build_atomic_store (obj, temp, false);
812 	      add_stmt_with_node (stmt, gnat_node);
813 
814 	      return end_stmt_group ();
815 	    }
816 	}
817 
818       TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
819       ref = TREE_OPERAND (ref, 0);
820     }
821 
822   /* Something went wrong earlier if we have not found the atomic load.  */
823   gcc_unreachable ();
824 }
825 
826 /* Make a binary operation of kind OP_CODE.  RESULT_TYPE is the type
827    desired for the result.  Usually the operation is to be performed
828    in that type.  For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
829    NULL_TREE.  For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
830    case the type to be used will be derived from the operands.
831 
832    This function is very much unlike the ones for C and C++ since we
833    have already done any type conversion and matching required.  All we
834    have to do here is validate the work done by SEM and handle subtypes.  */
835 
836 tree
build_binary_op(enum tree_code op_code,tree result_type,tree left_operand,tree right_operand)837 build_binary_op (enum tree_code op_code, tree result_type,
838                  tree left_operand, tree right_operand)
839 {
840   tree left_type = TREE_TYPE (left_operand);
841   tree right_type = TREE_TYPE (right_operand);
842   tree left_base_type = get_base_type (left_type);
843   tree right_base_type = get_base_type (right_type);
844   tree operation_type = result_type;
845   tree best_type = NULL_TREE;
846   tree modulus, result;
847   bool has_side_effects = false;
848 
849   if (operation_type
850       && TREE_CODE (operation_type) == RECORD_TYPE
851       && TYPE_JUSTIFIED_MODULAR_P (operation_type))
852     operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
853 
854   if (operation_type
855       && TREE_CODE (operation_type) == INTEGER_TYPE
856       && TYPE_EXTRA_SUBTYPE_P (operation_type))
857     operation_type = get_base_type (operation_type);
858 
859   modulus = (operation_type
860 	     && TREE_CODE (operation_type) == INTEGER_TYPE
861 	     && TYPE_MODULAR_P (operation_type)
862 	     ? TYPE_MODULUS (operation_type) : NULL_TREE);
863 
864   switch (op_code)
865     {
866     case INIT_EXPR:
867     case MODIFY_EXPR:
868       gcc_checking_assert (!result_type);
869 
870       /* If there were integral or pointer conversions on the LHS, remove
871 	 them; we'll be putting them back below if needed.  Likewise for
872 	 conversions between array and record types, except for justified
873 	 modular types.  But don't do this if the right operand is not
874 	 BLKmode (for packed arrays) unless we are not changing the mode.  */
875       while ((CONVERT_EXPR_P (left_operand)
876 	      || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
877 	     && (((INTEGRAL_TYPE_P (left_type)
878 		   || POINTER_TYPE_P (left_type))
879 		  && (INTEGRAL_TYPE_P (TREE_TYPE
880 				       (TREE_OPERAND (left_operand, 0)))
881 		      || POINTER_TYPE_P (TREE_TYPE
882 					 (TREE_OPERAND (left_operand, 0)))))
883 		 || (((TREE_CODE (left_type) == RECORD_TYPE
884 		       && !TYPE_JUSTIFIED_MODULAR_P (left_type))
885 		      || TREE_CODE (left_type) == ARRAY_TYPE)
886 		     && ((TREE_CODE (TREE_TYPE
887 				     (TREE_OPERAND (left_operand, 0)))
888 			  == RECORD_TYPE)
889 			 || (TREE_CODE (TREE_TYPE
890 					(TREE_OPERAND (left_operand, 0)))
891 			     == ARRAY_TYPE))
892 		     && (TYPE_MODE (right_type) == BLKmode
893 			 || (TYPE_MODE (left_type)
894 			     == TYPE_MODE (TREE_TYPE
895 					   (TREE_OPERAND
896 					    (left_operand, 0))))))))
897 	{
898 	  left_operand = TREE_OPERAND (left_operand, 0);
899 	  left_type = TREE_TYPE (left_operand);
900 	}
901 
902       /* If a class-wide type may be involved, force use of the RHS type.  */
903       if ((TREE_CODE (right_type) == RECORD_TYPE
904 	   || TREE_CODE (right_type) == UNION_TYPE)
905 	  && TYPE_ALIGN_OK (right_type))
906 	operation_type = right_type;
907 
908       /* If we are copying between padded objects with compatible types, use
909 	 the padded view of the objects, this is very likely more efficient.
910 	 Likewise for a padded object that is assigned a constructor, if we
911 	 can convert the constructor to the inner type, to avoid putting a
912 	 VIEW_CONVERT_EXPR on the LHS.  But don't do so if we wouldn't have
913 	 actually copied anything.  */
914       else if (TYPE_IS_PADDING_P (left_type)
915 	       && TREE_CONSTANT (TYPE_SIZE (left_type))
916 	       && ((TREE_CODE (right_operand) == COMPONENT_REF
917 		    && TYPE_MAIN_VARIANT (left_type)
918 		       == TYPE_MAIN_VARIANT
919 			  (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
920 		   || (TREE_CODE (right_operand) == CONSTRUCTOR
921 		       && !CONTAINS_PLACEHOLDER_P
922 			   (DECL_SIZE (TYPE_FIELDS (left_type)))))
923 	       && !integer_zerop (TYPE_SIZE (right_type)))
924 	{
925 	  /* We make an exception for a BLKmode type padding a non-BLKmode
926 	     inner type and do the conversion of the LHS right away, since
927 	     unchecked_convert wouldn't do it properly.  */
928 	  if (TYPE_MODE (left_type) == BLKmode
929 	      && TYPE_MODE (right_type) != BLKmode
930 	      && TREE_CODE (right_operand) != CONSTRUCTOR)
931 	    {
932 	      operation_type = right_type;
933 	      left_operand = convert (operation_type, left_operand);
934 	      left_type = operation_type;
935 	    }
936 	  else
937 	    operation_type = left_type;
938 	}
939 
940       /* If we have a call to a function that returns with variable size, use
941 	 the RHS type in case we want to use the return slot optimization.  */
942       else if (TREE_CODE (right_operand) == CALL_EXPR
943 	       && return_type_with_variable_size_p (right_type))
944 	operation_type = right_type;
945 
946       /* Find the best type to use for copying between aggregate types.  */
947       else if (((TREE_CODE (left_type) == ARRAY_TYPE
948 		 && TREE_CODE (right_type) == ARRAY_TYPE)
949 		|| (TREE_CODE (left_type) == RECORD_TYPE
950 		    && TREE_CODE (right_type) == RECORD_TYPE))
951 	       && (best_type = find_common_type (left_type, right_type)))
952 	operation_type = best_type;
953 
954       /* Otherwise use the LHS type.  */
955       else
956 	operation_type = left_type;
957 
958       /* Ensure everything on the LHS is valid.  If we have a field reference,
959 	 strip anything that get_inner_reference can handle.  Then remove any
960 	 conversions between types having the same code and mode.  And mark
961 	 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE.  When done, we must have
962 	 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node.  */
963       result = left_operand;
964       while (true)
965 	{
966 	  tree restype = TREE_TYPE (result);
967 
968 	  if (TREE_CODE (result) == COMPONENT_REF
969 	      || TREE_CODE (result) == ARRAY_REF
970 	      || TREE_CODE (result) == ARRAY_RANGE_REF)
971 	    while (handled_component_p (result))
972 	      result = TREE_OPERAND (result, 0);
973 	  else if (TREE_CODE (result) == REALPART_EXPR
974 		   || TREE_CODE (result) == IMAGPART_EXPR
975 		   || (CONVERT_EXPR_P (result)
976 		       && (((TREE_CODE (restype)
977 			     == TREE_CODE (TREE_TYPE
978 					   (TREE_OPERAND (result, 0))))
979 			     && (TYPE_MODE (TREE_TYPE
980 					    (TREE_OPERAND (result, 0)))
981 				 == TYPE_MODE (restype)))
982 			   || TYPE_ALIGN_OK (restype))))
983 	    result = TREE_OPERAND (result, 0);
984 	  else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
985 	    {
986 	      TREE_ADDRESSABLE (result) = 1;
987 	      result = TREE_OPERAND (result, 0);
988 	    }
989 	  else
990 	    break;
991 	}
992 
993       gcc_assert (TREE_CODE (result) == INDIRECT_REF
994 		  || TREE_CODE (result) == NULL_EXPR
995 		  || TREE_CODE (result) == SAVE_EXPR
996 		  || DECL_P (result));
997 
998       /* Convert the right operand to the operation type unless it is
999 	 either already of the correct type or if the type involves a
1000 	 placeholder, since the RHS may not have the same record type.  */
1001       if (operation_type != right_type
1002 	  && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1003 	{
1004 	  right_operand = convert (operation_type, right_operand);
1005 	  right_type = operation_type;
1006 	}
1007 
1008       /* If the left operand is not of the same type as the operation
1009 	 type, wrap it up in a VIEW_CONVERT_EXPR.  */
1010       if (left_type != operation_type)
1011 	left_operand = unchecked_convert (operation_type, left_operand, false);
1012 
1013       has_side_effects = true;
1014       modulus = NULL_TREE;
1015       break;
1016 
1017     case ARRAY_REF:
1018       if (!operation_type)
1019 	operation_type = TREE_TYPE (left_type);
1020 
1021       /* ... fall through ... */
1022 
1023     case ARRAY_RANGE_REF:
1024       /* First look through conversion between type variants.  Note that
1025 	 this changes neither the operation type nor the type domain.  */
1026       if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1027 	  && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1028 	     == TYPE_MAIN_VARIANT (left_type))
1029 	{
1030 	  left_operand = TREE_OPERAND (left_operand, 0);
1031 	  left_type = TREE_TYPE (left_operand);
1032 	}
1033 
1034       /* For a range, make sure the element type is consistent.  */
1035       if (op_code == ARRAY_RANGE_REF
1036 	  && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1037 	operation_type = build_array_type (TREE_TYPE (left_type),
1038 					   TYPE_DOMAIN (operation_type));
1039 
1040       /* Then convert the right operand to its base type.  This will prevent
1041 	 unneeded sign conversions when sizetype is wider than integer.  */
1042       right_operand = convert (right_base_type, right_operand);
1043       right_operand = convert_to_index_type (right_operand);
1044       modulus = NULL_TREE;
1045       break;
1046 
1047     case TRUTH_ANDIF_EXPR:
1048     case TRUTH_ORIF_EXPR:
1049     case TRUTH_AND_EXPR:
1050     case TRUTH_OR_EXPR:
1051     case TRUTH_XOR_EXPR:
1052       gcc_checking_assert
1053 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1054       operation_type = left_base_type;
1055       left_operand = convert (operation_type, left_operand);
1056       right_operand = convert (operation_type, right_operand);
1057       break;
1058 
1059     case GE_EXPR:
1060     case LE_EXPR:
1061     case GT_EXPR:
1062     case LT_EXPR:
1063     case EQ_EXPR:
1064     case NE_EXPR:
1065       gcc_checking_assert
1066 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1067       /* If either operand is a NULL_EXPR, just return a new one.  */
1068       if (TREE_CODE (left_operand) == NULL_EXPR)
1069 	return build2 (op_code, result_type,
1070 		       build1 (NULL_EXPR, integer_type_node,
1071 			       TREE_OPERAND (left_operand, 0)),
1072 		       integer_zero_node);
1073 
1074       else if (TREE_CODE (right_operand) == NULL_EXPR)
1075 	return build2 (op_code, result_type,
1076 		       build1 (NULL_EXPR, integer_type_node,
1077 			       TREE_OPERAND (right_operand, 0)),
1078 		       integer_zero_node);
1079 
1080       /* If either object is a justified modular types, get the
1081 	 fields from within.  */
1082       if (TREE_CODE (left_type) == RECORD_TYPE
1083 	  && TYPE_JUSTIFIED_MODULAR_P (left_type))
1084 	{
1085 	  left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1086 				  left_operand);
1087 	  left_type = TREE_TYPE (left_operand);
1088 	  left_base_type = get_base_type (left_type);
1089 	}
1090 
1091       if (TREE_CODE (right_type) == RECORD_TYPE
1092 	  && TYPE_JUSTIFIED_MODULAR_P (right_type))
1093 	{
1094 	  right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1095 				  right_operand);
1096 	  right_type = TREE_TYPE (right_operand);
1097 	  right_base_type = get_base_type (right_type);
1098 	}
1099 
1100       /* If both objects are arrays, compare them specially.  */
1101       if ((TREE_CODE (left_type) == ARRAY_TYPE
1102 	   || (TREE_CODE (left_type) == INTEGER_TYPE
1103 	       && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1104 	  && (TREE_CODE (right_type) == ARRAY_TYPE
1105 	      || (TREE_CODE (right_type) == INTEGER_TYPE
1106 		  && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1107 	{
1108 	  result = compare_arrays (input_location,
1109 				   result_type, left_operand, right_operand);
1110 	  if (op_code == NE_EXPR)
1111 	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1112 	  else
1113 	    gcc_assert (op_code == EQ_EXPR);
1114 
1115 	  return result;
1116 	}
1117 
1118       /* Otherwise, the base types must be the same, unless they are both fat
1119 	 pointer types or record types.  In the latter case, use the best type
1120 	 and convert both operands to that type.  */
1121       if (left_base_type != right_base_type)
1122 	{
1123 	  if (TYPE_IS_FAT_POINTER_P (left_base_type)
1124 	      && TYPE_IS_FAT_POINTER_P (right_base_type))
1125 	    {
1126 	      gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1127 			  == TYPE_MAIN_VARIANT (right_base_type));
1128 	      best_type = left_base_type;
1129 	    }
1130 
1131 	  else if (TREE_CODE (left_base_type) == RECORD_TYPE
1132 		   && TREE_CODE (right_base_type) == RECORD_TYPE)
1133 	    {
1134 	      /* The only way this is permitted is if both types have the same
1135 		 name.  In that case, one of them must not be self-referential.
1136 		 Use it as the best type.  Even better with a fixed size.  */
1137 	      gcc_assert (TYPE_NAME (left_base_type)
1138 			  && TYPE_NAME (left_base_type)
1139 			     == TYPE_NAME (right_base_type));
1140 
1141 	      if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1142 		best_type = left_base_type;
1143 	      else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1144 		best_type = right_base_type;
1145 	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1146 		best_type = left_base_type;
1147 	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1148 		best_type = right_base_type;
1149 	      else
1150 		gcc_unreachable ();
1151 	    }
1152 
1153 	  else if (POINTER_TYPE_P (left_base_type)
1154 		   && POINTER_TYPE_P (right_base_type))
1155 	    {
1156 	      gcc_assert (TREE_TYPE (left_base_type)
1157 			  == TREE_TYPE (right_base_type));
1158 	      best_type = left_base_type;
1159 	    }
1160 	  else
1161 	    gcc_unreachable ();
1162 
1163 	  left_operand = convert (best_type, left_operand);
1164 	  right_operand = convert (best_type, right_operand);
1165 	}
1166       else
1167 	{
1168 	  left_operand = convert (left_base_type, left_operand);
1169 	  right_operand = convert (right_base_type, right_operand);
1170 	}
1171 
1172       /* If both objects are fat pointers, compare them specially.  */
1173       if (TYPE_IS_FAT_POINTER_P (left_base_type))
1174 	{
1175 	  result
1176 	    = compare_fat_pointers (input_location,
1177 				    result_type, left_operand, right_operand);
1178 	  if (op_code == NE_EXPR)
1179 	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1180 	  else
1181 	    gcc_assert (op_code == EQ_EXPR);
1182 
1183 	  return result;
1184 	}
1185 
1186       modulus = NULL_TREE;
1187       break;
1188 
1189     case LSHIFT_EXPR:
1190     case RSHIFT_EXPR:
1191     case LROTATE_EXPR:
1192     case RROTATE_EXPR:
1193        /* The RHS of a shift can be any type.  Also, ignore any modulus
1194 	 (we used to abort, but this is needed for unchecked conversion
1195 	 to modular types).  Otherwise, processing is the same as normal.  */
1196       gcc_assert (operation_type == left_base_type);
1197       modulus = NULL_TREE;
1198       left_operand = convert (operation_type, left_operand);
1199       break;
1200 
1201     case BIT_AND_EXPR:
1202     case BIT_IOR_EXPR:
1203     case BIT_XOR_EXPR:
1204       /* For binary modulus, if the inputs are in range, so are the
1205 	 outputs.  */
1206       if (modulus && integer_pow2p (modulus))
1207 	modulus = NULL_TREE;
1208       goto common;
1209 
1210     case COMPLEX_EXPR:
1211       gcc_assert (TREE_TYPE (result_type) == left_base_type
1212 		  && TREE_TYPE (result_type) == right_base_type);
1213       left_operand = convert (left_base_type, left_operand);
1214       right_operand = convert (right_base_type, right_operand);
1215       break;
1216 
1217     case TRUNC_DIV_EXPR:   case TRUNC_MOD_EXPR:
1218     case CEIL_DIV_EXPR:    case CEIL_MOD_EXPR:
1219     case FLOOR_DIV_EXPR:   case FLOOR_MOD_EXPR:
1220     case ROUND_DIV_EXPR:   case ROUND_MOD_EXPR:
1221       /* These always produce results lower than either operand.  */
1222       modulus = NULL_TREE;
1223       goto common;
1224 
1225     case POINTER_PLUS_EXPR:
1226       gcc_assert (operation_type == left_base_type
1227 		  && sizetype == right_base_type);
1228       left_operand = convert (operation_type, left_operand);
1229       right_operand = convert (sizetype, right_operand);
1230       break;
1231 
1232     case PLUS_NOMOD_EXPR:
1233     case MINUS_NOMOD_EXPR:
1234       if (op_code == PLUS_NOMOD_EXPR)
1235 	op_code = PLUS_EXPR;
1236       else
1237 	op_code = MINUS_EXPR;
1238       modulus = NULL_TREE;
1239 
1240       /* ... fall through ... */
1241 
1242     case PLUS_EXPR:
1243     case MINUS_EXPR:
1244       /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1245 	 other compilers.  Contrary to C, Ada doesn't allow arithmetics in
1246 	 these types but can generate addition/subtraction for Succ/Pred.  */
1247       if (operation_type
1248 	  && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1249 	      || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1250 	operation_type = left_base_type = right_base_type
1251 	  = gnat_type_for_mode (TYPE_MODE (operation_type),
1252 				TYPE_UNSIGNED (operation_type));
1253 
1254       /* ... fall through ... */
1255 
1256     default:
1257     common:
1258       /* The result type should be the same as the base types of the
1259 	 both operands (and they should be the same).  Convert
1260 	 everything to the result type.  */
1261 
1262       gcc_assert (operation_type == left_base_type
1263 		  && left_base_type == right_base_type);
1264       left_operand = convert (operation_type, left_operand);
1265       right_operand = convert (operation_type, right_operand);
1266     }
1267 
1268   if (modulus && !integer_pow2p (modulus))
1269     {
1270       result = nonbinary_modular_operation (op_code, operation_type,
1271 					    left_operand, right_operand);
1272       modulus = NULL_TREE;
1273     }
1274   /* If either operand is a NULL_EXPR, just return a new one.  */
1275   else if (TREE_CODE (left_operand) == NULL_EXPR)
1276     return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1277   else if (TREE_CODE (right_operand) == NULL_EXPR)
1278     return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1279   else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1280     result = fold (build4 (op_code, operation_type, left_operand,
1281 			   right_operand, NULL_TREE, NULL_TREE));
1282   else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1283     result = build2 (op_code, void_type_node, left_operand, right_operand);
1284   else
1285     result
1286       = fold_build2 (op_code, operation_type, left_operand, right_operand);
1287 
1288   if (TREE_CONSTANT (result))
1289     ;
1290   else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1291     {
1292       if (TYPE_VOLATILE (operation_type))
1293 	TREE_THIS_VOLATILE (result) = 1;
1294     }
1295   else
1296     TREE_CONSTANT (result)
1297       |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1298 
1299   TREE_SIDE_EFFECTS (result) |= has_side_effects;
1300 
1301   /* If we are working with modular types, perform the MOD operation
1302      if something above hasn't eliminated the need for it.  */
1303   if (modulus)
1304     result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1305 			  convert (operation_type, modulus));
1306 
1307   if (result_type && result_type != operation_type)
1308     result = convert (result_type, result);
1309 
1310   return result;
1311 }
1312 
1313 /* Similar, but for unary operations.  */
1314 
1315 tree
build_unary_op(enum tree_code op_code,tree result_type,tree operand)1316 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1317 {
1318   tree type = TREE_TYPE (operand);
1319   tree base_type = get_base_type (type);
1320   tree operation_type = result_type;
1321   tree result;
1322 
1323   if (operation_type
1324       && TREE_CODE (operation_type) == RECORD_TYPE
1325       && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1326     operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1327 
1328   if (operation_type
1329       && TREE_CODE (operation_type) == INTEGER_TYPE
1330       && TYPE_EXTRA_SUBTYPE_P (operation_type))
1331     operation_type = get_base_type (operation_type);
1332 
1333   switch (op_code)
1334     {
1335     case REALPART_EXPR:
1336     case IMAGPART_EXPR:
1337       if (!operation_type)
1338 	result_type = operation_type = TREE_TYPE (type);
1339       else
1340 	gcc_assert (result_type == TREE_TYPE (type));
1341 
1342       result = fold_build1 (op_code, operation_type, operand);
1343       break;
1344 
1345     case TRUTH_NOT_EXPR:
1346       gcc_checking_assert
1347 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1348       result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1349       /* When not optimizing, fold the result as invert_truthvalue_loc
1350 	 doesn't fold the result of comparisons.  This is intended to undo
1351 	 the trick used for boolean rvalues in gnat_to_gnu.  */
1352       if (!optimize)
1353 	result = fold (result);
1354       break;
1355 
1356     case ATTR_ADDR_EXPR:
1357     case ADDR_EXPR:
1358       switch (TREE_CODE (operand))
1359 	{
1360 	case INDIRECT_REF:
1361 	case UNCONSTRAINED_ARRAY_REF:
1362 	  result = TREE_OPERAND (operand, 0);
1363 
1364 	  /* Make sure the type here is a pointer, not a reference.
1365 	     GCC wants pointer types for function addresses.  */
1366 	  if (!result_type)
1367 	    result_type = build_pointer_type (type);
1368 
1369 	  /* If the underlying object can alias everything, propagate the
1370 	     property since we are effectively retrieving the object.  */
1371 	  if (POINTER_TYPE_P (TREE_TYPE (result))
1372 	      && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1373 	    {
1374 	      if (TREE_CODE (result_type) == POINTER_TYPE
1375 		  && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1376 		result_type
1377 		  = build_pointer_type_for_mode (TREE_TYPE (result_type),
1378 						 TYPE_MODE (result_type),
1379 						 true);
1380 	      else if (TREE_CODE (result_type) == REFERENCE_TYPE
1381 		       && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1382 	        result_type
1383 		  = build_reference_type_for_mode (TREE_TYPE (result_type),
1384 						   TYPE_MODE (result_type),
1385 						   true);
1386 	    }
1387 	  break;
1388 
1389 	case NULL_EXPR:
1390 	  result = operand;
1391 	  TREE_TYPE (result) = type = build_pointer_type (type);
1392 	  break;
1393 
1394 	case COMPOUND_EXPR:
1395 	  /* Fold a compound expression if it has unconstrained array type
1396 	     since the middle-end cannot handle it.  But we don't it in the
1397 	     general case because it may introduce aliasing issues if the
1398 	     first operand is an indirect assignment and the second operand
1399 	     the corresponding address, e.g. for an allocator.  However do
1400 	     it for a return value to expose it for later recognition.  */
1401 	  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1402 	      || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1403 		  && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1404 	    {
1405 	      result = build_unary_op (ADDR_EXPR, result_type,
1406 				       TREE_OPERAND (operand, 1));
1407 	      result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1408 			       TREE_OPERAND (operand, 0), result);
1409 	      break;
1410 	    }
1411 	  goto common;
1412 
1413 	case ARRAY_REF:
1414 	case ARRAY_RANGE_REF:
1415 	case COMPONENT_REF:
1416 	case BIT_FIELD_REF:
1417 	    /* If this is for 'Address, find the address of the prefix and add
1418 	       the offset to the field.  Otherwise, do this the normal way.  */
1419 	  if (op_code == ATTR_ADDR_EXPR)
1420 	    {
1421 	      HOST_WIDE_INT bitsize;
1422 	      HOST_WIDE_INT bitpos;
1423 	      tree offset, inner;
1424 	      machine_mode mode;
1425 	      int unsignedp, reversep, volatilep;
1426 
1427 	      inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1428 					   &mode, &unsignedp, &reversep,
1429 					   &volatilep, false);
1430 
1431 	      /* If INNER is a padding type whose field has a self-referential
1432 		 size, convert to that inner type.  We know the offset is zero
1433 		 and we need to have that type visible.  */
1434 	      if (type_is_padding_self_referential (TREE_TYPE (inner)))
1435 		inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1436 				 inner);
1437 
1438 	      /* Compute the offset as a byte offset from INNER.  */
1439 	      if (!offset)
1440 		offset = size_zero_node;
1441 
1442 	      offset = size_binop (PLUS_EXPR, offset,
1443 				   size_int (bitpos / BITS_PER_UNIT));
1444 
1445 	      /* Take the address of INNER, convert it to a pointer to our type
1446 		 and add the offset.  */
1447 	      inner = build_unary_op (ADDR_EXPR,
1448 				      build_pointer_type (TREE_TYPE (operand)),
1449 				      inner);
1450 	      result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1451 					inner, offset);
1452 	      break;
1453 	    }
1454 	  goto common;
1455 
1456 	case CONSTRUCTOR:
1457 	  /* If this is just a constructor for a padded record, we can
1458 	     just take the address of the single field and convert it to
1459 	     a pointer to our type.  */
1460 	  if (TYPE_IS_PADDING_P (type))
1461 	    {
1462 	      result
1463 		= build_unary_op (ADDR_EXPR,
1464 				  build_pointer_type (TREE_TYPE (operand)),
1465 				  CONSTRUCTOR_ELT (operand, 0)->value);
1466 	      break;
1467 	    }
1468 	  goto common;
1469 
1470 	case NOP_EXPR:
1471 	  if (AGGREGATE_TYPE_P (type)
1472 	      && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1473 	    return build_unary_op (ADDR_EXPR, result_type,
1474 				   TREE_OPERAND (operand, 0));
1475 
1476 	  /* ... fallthru ... */
1477 
1478 	case VIEW_CONVERT_EXPR:
1479 	  /* If this just a variant conversion or if the conversion doesn't
1480 	     change the mode, get the result type from this type and go down.
1481 	     This is needed for conversions of CONST_DECLs, to eventually get
1482 	     to the address of their CORRESPONDING_VARs.  */
1483 	  if ((TYPE_MAIN_VARIANT (type)
1484 	       == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1485 	      || (TYPE_MODE (type) != BLKmode
1486 		  && (TYPE_MODE (type)
1487 		      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1488 	    return build_unary_op (ADDR_EXPR,
1489 				   (result_type ? result_type
1490 				    : build_pointer_type (type)),
1491 				   TREE_OPERAND (operand, 0));
1492 	  goto common;
1493 
1494 	case CONST_DECL:
1495 	  operand = DECL_CONST_CORRESPONDING_VAR (operand);
1496 
1497 	  /* ... fall through ... */
1498 
1499 	default:
1500 	common:
1501 
1502 	  /* If we are taking the address of a padded record whose field
1503 	     contains a template, take the address of the field.  */
1504 	  if (TYPE_IS_PADDING_P (type)
1505 	      && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1506 	      && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1507 	    {
1508 	      type = TREE_TYPE (TYPE_FIELDS (type));
1509 	      operand = convert (type, operand);
1510 	    }
1511 
1512 	  gnat_mark_addressable (operand);
1513 	  result = build_fold_addr_expr (operand);
1514 	}
1515 
1516       TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1517       break;
1518 
1519     case INDIRECT_REF:
1520       {
1521 	tree t = remove_conversions (operand, false);
1522 	bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1523 
1524 	/* If TYPE is a thin pointer, either first retrieve the base if this
1525 	   is an expression with an offset built for the initialization of an
1526 	   object with an unconstrained nominal subtype, or else convert to
1527 	   the fat pointer.  */
1528 	if (TYPE_IS_THIN_POINTER_P (type))
1529 	  {
1530 	    tree rec_type = TREE_TYPE (type);
1531 
1532 	    if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1533 		&& TREE_OPERAND (operand, 1)
1534 		   == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1535 		&& TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1536 	      {
1537 		operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1538 		type = TREE_TYPE (operand);
1539 	      }
1540 	    else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1541 	      {
1542 		operand
1543 		  = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1544 			     operand);
1545 		type = TREE_TYPE (operand);
1546 	      }
1547 	  }
1548 
1549 	/* If we want to refer to an unconstrained array, use the appropriate
1550 	   expression.  But this will never survive down to the back-end.  */
1551 	if (TYPE_IS_FAT_POINTER_P (type))
1552 	  {
1553 	    result = build1 (UNCONSTRAINED_ARRAY_REF,
1554 			     TYPE_UNCONSTRAINED_ARRAY (type), operand);
1555 	    TREE_READONLY (result)
1556 	      = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1557 	  }
1558 
1559 	/* If we are dereferencing an ADDR_EXPR, return its operand.  */
1560 	else if (TREE_CODE (operand) == ADDR_EXPR)
1561 	  result = TREE_OPERAND (operand, 0);
1562 
1563 	/* Otherwise, build and fold the indirect reference.  */
1564 	else
1565 	  {
1566 	    result = build_fold_indirect_ref (operand);
1567 	    TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1568 	  }
1569 
1570 	if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1571 	  {
1572 	    TREE_SIDE_EFFECTS (result) = 1;
1573 	    if (TREE_CODE (result) == INDIRECT_REF)
1574 	      TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1575 	  }
1576 
1577 	if ((TREE_CODE (result) == INDIRECT_REF
1578 	     || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1579 	    && can_never_be_null)
1580 	  TREE_THIS_NOTRAP (result) = 1;
1581 
1582 	break;
1583       }
1584 
1585     case NEGATE_EXPR:
1586     case BIT_NOT_EXPR:
1587       {
1588 	tree modulus = ((operation_type
1589 			 && TREE_CODE (operation_type) == INTEGER_TYPE
1590 			 && TYPE_MODULAR_P (operation_type))
1591 			? TYPE_MODULUS (operation_type) : NULL_TREE);
1592 	int mod_pow2 = modulus && integer_pow2p (modulus);
1593 
1594 	/* If this is a modular type, there are various possibilities
1595 	   depending on the operation and whether the modulus is a
1596 	   power of two or not.  */
1597 
1598 	if (modulus)
1599 	  {
1600 	    gcc_assert (operation_type == base_type);
1601 	    operand = convert (operation_type, operand);
1602 
1603 	    /* The fastest in the negate case for binary modulus is
1604 	       the straightforward code; the TRUNC_MOD_EXPR below
1605 	       is an AND operation.  */
1606 	    if (op_code == NEGATE_EXPR && mod_pow2)
1607 	      result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1608 				    fold_build1 (NEGATE_EXPR, operation_type,
1609 						 operand),
1610 				    modulus);
1611 
1612 	    /* For nonbinary negate case, return zero for zero operand,
1613 	       else return the modulus minus the operand.  If the modulus
1614 	       is a power of two minus one, we can do the subtraction
1615 	       as an XOR since it is equivalent and faster on most machines. */
1616 	    else if (op_code == NEGATE_EXPR && !mod_pow2)
1617 	      {
1618 		if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1619 						modulus,
1620 						build_int_cst (operation_type,
1621 							       1))))
1622 		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1623 					operand, modulus);
1624 		else
1625 		  result = fold_build2 (MINUS_EXPR, operation_type,
1626 					modulus, operand);
1627 
1628 		result = fold_build3 (COND_EXPR, operation_type,
1629 				      fold_build2 (NE_EXPR,
1630 						   boolean_type_node,
1631 						   operand,
1632 						   build_int_cst
1633 						   (operation_type, 0)),
1634 				      result, operand);
1635 	      }
1636 	    else
1637 	      {
1638 		/* For the NOT cases, we need a constant equal to
1639 		   the modulus minus one.  For a binary modulus, we
1640 		   XOR against the constant and subtract the operand from
1641 		   that constant for nonbinary modulus.  */
1642 
1643 		tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1644 					 build_int_cst (operation_type, 1));
1645 
1646 		if (mod_pow2)
1647 		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1648 					operand, cnst);
1649 		else
1650 		  result = fold_build2 (MINUS_EXPR, operation_type,
1651 					cnst, operand);
1652 	      }
1653 
1654 	    break;
1655 	  }
1656       }
1657 
1658       /* ... fall through ... */
1659 
1660     default:
1661       gcc_assert (operation_type == base_type);
1662       result = fold_build1 (op_code, operation_type,
1663 			    convert (operation_type, operand));
1664     }
1665 
1666   if (result_type && TREE_TYPE (result) != result_type)
1667     result = convert (result_type, result);
1668 
1669   return result;
1670 }
1671 
1672 /* Similar, but for COND_EXPR.  */
1673 
1674 tree
build_cond_expr(tree result_type,tree condition_operand,tree true_operand,tree false_operand)1675 build_cond_expr (tree result_type, tree condition_operand,
1676                  tree true_operand, tree false_operand)
1677 {
1678   bool addr_p = false;
1679   tree result;
1680 
1681   /* The front-end verified that result, true and false operands have
1682      same base type.  Convert everything to the result type.  */
1683   true_operand = convert (result_type, true_operand);
1684   false_operand = convert (result_type, false_operand);
1685 
1686   /* If the result type is unconstrained, take the address of the operands and
1687      then dereference the result.  Likewise if the result type is passed by
1688      reference, because creating a temporary of this type is not allowed.  */
1689   if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1690       || TYPE_IS_BY_REFERENCE_P (result_type)
1691       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1692     {
1693       result_type = build_pointer_type (result_type);
1694       true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1695       false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1696       addr_p = true;
1697     }
1698 
1699   result = fold_build3 (COND_EXPR, result_type, condition_operand,
1700 			true_operand, false_operand);
1701 
1702   /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1703      in both arms, make sure it gets evaluated by moving it ahead of the
1704      conditional expression.  This is necessary because it is evaluated
1705      in only one place at run time and would otherwise be uninitialized
1706      in one of the arms.  */
1707   true_operand = skip_simple_arithmetic (true_operand);
1708   false_operand = skip_simple_arithmetic (false_operand);
1709 
1710   if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1711     result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1712 
1713   if (addr_p)
1714     result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1715 
1716   return result;
1717 }
1718 
1719 /* Similar, but for COMPOUND_EXPR.  */
1720 
1721 tree
build_compound_expr(tree result_type,tree stmt_operand,tree expr_operand)1722 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1723 {
1724   bool addr_p = false;
1725   tree result;
1726 
1727   /* If the result type is unconstrained, take the address of the operand and
1728      then dereference the result.  Likewise if the result type is passed by
1729      reference, but this is natively handled in the gimplifier.  */
1730   if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1731       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1732     {
1733       result_type = build_pointer_type (result_type);
1734       expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1735       addr_p = true;
1736     }
1737 
1738   result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1739 			expr_operand);
1740 
1741   if (addr_p)
1742     result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1743 
1744   return result;
1745 }
1746 
1747 /* Conveniently construct a function call expression.  FNDECL names the
1748    function to be called, N is the number of arguments, and the "..."
1749    parameters are the argument expressions.  Unlike build_call_expr
1750    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
1751 
1752 tree
build_call_n_expr(tree fndecl,int n,...)1753 build_call_n_expr (tree fndecl, int n, ...)
1754 {
1755   va_list ap;
1756   tree fntype = TREE_TYPE (fndecl);
1757   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1758 
1759   va_start (ap, n);
1760   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1761   va_end (ap);
1762   return fn;
1763 }
1764 
1765 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1766    MSG gives the exception's identity for the call to Local_Raise, if any.  */
1767 
1768 static tree
build_goto_raise(tree label,int msg)1769 build_goto_raise (tree label, int msg)
1770 {
1771   tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1772   Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1773 
1774   /* If Local_Raise is present, build Local_Raise (Exception'Identity).  */
1775   if (Present (local_raise))
1776     {
1777       tree gnu_local_raise = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1778       tree gnu_exception_entity
1779 	= gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1780       tree gnu_call
1781 	= build_call_n_expr (gnu_local_raise, 1,
1782 			     build_unary_op (ADDR_EXPR, NULL_TREE,
1783 					     gnu_exception_entity));
1784       gnu_result
1785 	= build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1786     }
1787 
1788   return gnu_result;
1789 }
1790 
1791 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1792    pointed to by FILENAME, LINE and COL.  Fall back to the current location
1793    if GNAT_NODE is absent or has no SLOC.  */
1794 
1795 static void
expand_sloc(Node_Id gnat_node,tree * filename,tree * line,tree * col)1796 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1797 {
1798   const char *str;
1799   int line_number, column_number;
1800 
1801   if (Debug_Flag_NN || Exception_Locations_Suppressed)
1802     {
1803       str = "";
1804       line_number = 0;
1805       column_number = 0;
1806     }
1807   else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1808     {
1809       str = Get_Name_String
1810 	    (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1811       line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1812       column_number = Get_Column_Number (Sloc (gnat_node));
1813     }
1814   else
1815     {
1816       str = lbasename (LOCATION_FILE (input_location));
1817       line_number = LOCATION_LINE (input_location);
1818       column_number = LOCATION_COLUMN (input_location);
1819     }
1820 
1821   const int len = strlen (str);
1822   *filename = build_string (len, str);
1823   TREE_TYPE (*filename) = build_array_type (char_type_node,
1824 					    build_index_type (size_int (len)));
1825   *line = build_int_cst (NULL_TREE, line_number);
1826   if (col)
1827     *col = build_int_cst (NULL_TREE, column_number);
1828 }
1829 
1830 /* Build a call to a function that raises an exception and passes file name
1831    and line number, if requested.  MSG says which exception function to call.
1832    GNAT_NODE is the node conveying the source location for which the error
1833    should be signaled, or Empty in which case the error is signaled for the
1834    current location.  KIND says which kind of exception node this is for,
1835    among N_Raise_{Constraint,Storage,Program}_Error.  */
1836 
1837 tree
build_call_raise(int msg,Node_Id gnat_node,char kind)1838 build_call_raise (int msg, Node_Id gnat_node, char kind)
1839 {
1840   tree fndecl = gnat_raise_decls[msg];
1841   tree label = get_exception_label (kind);
1842   tree filename, line;
1843 
1844   /* If this is to be done as a goto, handle that case.  */
1845   if (label)
1846     return build_goto_raise (label, msg);
1847 
1848   expand_sloc (gnat_node, &filename, &line, NULL);
1849 
1850   return
1851     build_call_n_expr (fndecl, 2,
1852 		       build1 (ADDR_EXPR,
1853 			       build_pointer_type (char_type_node),
1854 			       filename),
1855 		       line);
1856 }
1857 
1858 /* Similar to build_call_raise, with extra information about the column
1859    where the check failed.  */
1860 
1861 tree
build_call_raise_column(int msg,Node_Id gnat_node,char kind)1862 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1863 {
1864   tree fndecl = gnat_raise_decls_ext[msg];
1865   tree label = get_exception_label (kind);
1866   tree filename, line, col;
1867 
1868   /* If this is to be done as a goto, handle that case.  */
1869   if (label)
1870     return build_goto_raise (label, msg);
1871 
1872   expand_sloc (gnat_node, &filename, &line, &col);
1873 
1874   return
1875     build_call_n_expr (fndecl, 3,
1876 		       build1 (ADDR_EXPR,
1877 			       build_pointer_type (char_type_node),
1878 			       filename),
1879 		       line, col);
1880 }
1881 
1882 /* Similar to build_call_raise_column, for an index or range check exception ,
1883    with extra information of the form "INDEX out of range FIRST..LAST".  */
1884 
1885 tree
build_call_raise_range(int msg,Node_Id gnat_node,char kind,tree index,tree first,tree last)1886 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1887 			tree index, tree first, tree last)
1888 {
1889   tree fndecl = gnat_raise_decls_ext[msg];
1890   tree label = get_exception_label (kind);
1891   tree filename, line, col;
1892 
1893   /* If this is to be done as a goto, handle that case.  */
1894   if (label)
1895     return build_goto_raise (label, msg);
1896 
1897   expand_sloc (gnat_node, &filename, &line, &col);
1898 
1899   return
1900     build_call_n_expr (fndecl, 6,
1901 		       build1 (ADDR_EXPR,
1902 			       build_pointer_type (char_type_node),
1903 			       filename),
1904 		       line, col,
1905 		       convert (integer_type_node, index),
1906 		       convert (integer_type_node, first),
1907 		       convert (integer_type_node, last));
1908 }
1909 
1910 /* qsort comparer for the bit positions of two constructor elements
1911    for record components.  */
1912 
1913 static int
compare_elmt_bitpos(const PTR rt1,const PTR rt2)1914 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1915 {
1916   const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1917   const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1918   const_tree const field1 = elmt1->index;
1919   const_tree const field2 = elmt2->index;
1920   const int ret
1921     = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1922 
1923   return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1924 }
1925 
1926 /* Return a CONSTRUCTOR of TYPE whose elements are V.  */
1927 
1928 tree
gnat_build_constructor(tree type,vec<constructor_elt,va_gc> * v)1929 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1930 {
1931   bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1932   bool read_only = true;
1933   bool side_effects = false;
1934   tree result, obj, val;
1935   unsigned int n_elmts;
1936 
1937   /* Scan the elements to see if they are all constant or if any has side
1938      effects, to let us set global flags on the resulting constructor.  Count
1939      the elements along the way for possible sorting purposes below.  */
1940   FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1941     {
1942       /* The predicate must be in keeping with output_constructor.  */
1943       if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1944 	  || (TREE_CODE (type) == RECORD_TYPE
1945 	      && CONSTRUCTOR_BITFIELD_P (obj)
1946 	      && !initializer_constant_valid_for_bitfield_p (val))
1947 	  || !initializer_constant_valid_p (val,
1948 					    TREE_TYPE (val),
1949 					    TYPE_REVERSE_STORAGE_ORDER (type)))
1950 	allconstant = false;
1951 
1952       if (!TREE_READONLY (val))
1953 	read_only = false;
1954 
1955       if (TREE_SIDE_EFFECTS (val))
1956 	side_effects = true;
1957     }
1958 
1959   /* For record types with constant components only, sort field list
1960      by increasing bit position.  This is necessary to ensure the
1961      constructor can be output as static data.  */
1962   if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1963     v->qsort (compare_elmt_bitpos);
1964 
1965   result = build_constructor (type, v);
1966   CONSTRUCTOR_NO_CLEARING (result) = 1;
1967   TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1968   TREE_SIDE_EFFECTS (result) = side_effects;
1969   TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1970   return result;
1971 }
1972 
1973 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1974    is not found in the record.  Don't fold the result if NO_FOLD is true.  */
1975 
1976 static tree
build_simple_component_ref(tree record,tree field,bool no_fold)1977 build_simple_component_ref (tree record, tree field, bool no_fold)
1978 {
1979   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
1980   tree ref;
1981 
1982   gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
1983 
1984   /* Try to fold a conversion from another record or union type unless the type
1985      contains a placeholder as it might be needed for a later substitution.  */
1986   if (TREE_CODE (record) == VIEW_CONVERT_EXPR
1987       && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
1988       && !type_contains_placeholder_p (type))
1989     {
1990       tree op = TREE_OPERAND (record, 0);
1991 
1992       /* If this is an unpadding operation, convert the underlying object to
1993 	 the unpadded type directly.  */
1994       if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
1995 	return convert (TREE_TYPE (field), op);
1996 
1997       /* Otherwise try to access FIELD directly in the underlying type, but
1998 	 make sure that the form of the reference doesn't change too much;
1999 	 this can happen for an unconstrained bit-packed array type whose
2000 	 constrained form can be an integer type.  */
2001       ref = build_simple_component_ref (op, field, no_fold);
2002       if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2003 	return ref;
2004     }
2005 
2006   /* If this field is not in the specified record, see if we can find a field
2007      in the specified record whose original field is the same as this one.  */
2008   if (DECL_CONTEXT (field) != type)
2009     {
2010       tree new_field;
2011 
2012       /* First loop through normal components.  */
2013       for (new_field = TYPE_FIELDS (type);
2014 	   new_field;
2015 	   new_field = DECL_CHAIN (new_field))
2016 	if (SAME_FIELD_P (field, new_field))
2017 	  break;
2018 
2019       /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2020 	 component in the first search.  Doing this search in two steps is
2021 	 required to avoid hidden homonymous fields in the _Parent field.  */
2022       if (!new_field)
2023 	for (new_field = TYPE_FIELDS (type);
2024 	     new_field;
2025 	     new_field = DECL_CHAIN (new_field))
2026 	  if (DECL_INTERNAL_P (new_field)
2027 	      && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2028 	    {
2029 	      tree field_ref
2030 		= build_simple_component_ref (record, new_field, no_fold);
2031 	      ref = build_simple_component_ref (field_ref, field, no_fold);
2032 	      if (ref)
2033 		return ref;
2034 	    }
2035 
2036       field = new_field;
2037     }
2038 
2039   if (!field)
2040     return NULL_TREE;
2041 
2042   /* If the field's offset has overflowed, do not try to access it, as doing
2043      so may trigger sanity checks deeper in the back-end.  Note that we don't
2044      need to warn since this will be done on trying to declare the object.  */
2045   if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2046       && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2047     return NULL_TREE;
2048 
2049   ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2050 
2051   if (TREE_READONLY (record)
2052       || TREE_READONLY (field)
2053       || TYPE_READONLY (type))
2054     TREE_READONLY (ref) = 1;
2055 
2056   if (TREE_THIS_VOLATILE (record)
2057       || TREE_THIS_VOLATILE (field)
2058       || TYPE_VOLATILE (type))
2059     TREE_THIS_VOLATILE (ref) = 1;
2060 
2061   if (no_fold)
2062     return ref;
2063 
2064   /* The generic folder may punt in this case because the inner array type
2065      can be self-referential, but folding is in fact not problematic.  */
2066   if (TREE_CODE (record) == CONSTRUCTOR
2067       && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2068     {
2069       vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2070       unsigned HOST_WIDE_INT idx;
2071       tree index, value;
2072       FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2073        if (index == field)
2074 	return value;
2075       return ref;
2076     }
2077 
2078   return fold (ref);
2079 }
2080 
2081 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2082    field is not found in the record.  */
2083 
2084 tree
build_component_ref(tree record,tree field,bool no_fold)2085 build_component_ref (tree record, tree field, bool no_fold)
2086 {
2087   tree ref = build_simple_component_ref (record, field, no_fold);
2088   if (ref)
2089     return ref;
2090 
2091   /* Assume this is an invalid user field so raise Constraint_Error.  */
2092   return build1 (NULL_EXPR, TREE_TYPE (field),
2093 		 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2094 				   N_Raise_Constraint_Error));
2095 }
2096 
2097 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2098    identically.  Process the case where a GNAT_PROC to call is provided.  */
2099 
2100 static inline tree
build_call_alloc_dealloc_proc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool)2101 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2102 			       Entity_Id gnat_proc, Entity_Id gnat_pool)
2103 {
2104   tree gnu_proc = gnat_to_gnu (gnat_proc);
2105   tree gnu_call;
2106 
2107   /* A storage pool's underlying type is a record type (for both predefined
2108      storage pools and GNAT simple storage pools). The secondary stack uses
2109      the same mechanism, but its pool object (SS_Pool) is an integer.  */
2110   if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2111     {
2112       /* The size is the third parameter; the alignment is the
2113 	 same type.  */
2114       Entity_Id gnat_size_type
2115 	= Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2116       tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2117 
2118       tree gnu_pool = gnat_to_gnu (gnat_pool);
2119       tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2120       tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2121 
2122       gnu_size = convert (gnu_size_type, gnu_size);
2123       gnu_align = convert (gnu_size_type, gnu_align);
2124 
2125       /* The first arg is always the address of the storage pool; next
2126 	 comes the address of the object, for a deallocator, then the
2127 	 size and alignment.  */
2128       if (gnu_obj)
2129 	gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2130 				      gnu_size, gnu_align);
2131       else
2132 	gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2133 				      gnu_size, gnu_align);
2134     }
2135 
2136   /* Secondary stack case.  */
2137   else
2138     {
2139       /* The size is the second parameter.  */
2140       Entity_Id gnat_size_type
2141 	= Etype (Next_Formal (First_Formal (gnat_proc)));
2142       tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2143 
2144       gnu_size = convert (gnu_size_type, gnu_size);
2145 
2146       /* The first arg is the address of the object, for a deallocator,
2147 	 then the size.  */
2148       if (gnu_obj)
2149 	gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2150       else
2151 	gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2152     }
2153 
2154   return gnu_call;
2155 }
2156 
2157 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2158    DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2159    __gnat_malloc allocator.  Honor DATA_TYPE alignments greater than what the
2160    latter offers.  */
2161 
2162 static inline tree
maybe_wrap_malloc(tree data_size,tree data_type,Node_Id gnat_node)2163 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2164 {
2165   /* When the DATA_TYPE alignment is stricter than what malloc offers
2166      (super-aligned case), we allocate an "aligning" wrapper type and return
2167      the address of its single data field with the malloc's return value
2168      stored just in front.  */
2169 
2170   unsigned int data_align = TYPE_ALIGN (data_type);
2171   unsigned int system_allocator_alignment
2172       = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2173 
2174   tree aligning_type
2175     = ((data_align > system_allocator_alignment)
2176        ? make_aligning_type (data_type, data_align, data_size,
2177 			     system_allocator_alignment,
2178 			     POINTER_SIZE / BITS_PER_UNIT,
2179 			     gnat_node)
2180        : NULL_TREE);
2181 
2182   tree size_to_malloc
2183     = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2184 
2185   tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2186 
2187   if (aligning_type)
2188     {
2189       /* Latch malloc's return value and get a pointer to the aligning field
2190 	 first.  */
2191       tree storage_ptr = gnat_protect_expr (malloc_ptr);
2192 
2193       tree aligning_record_addr
2194 	= convert (build_pointer_type (aligning_type), storage_ptr);
2195 
2196       tree aligning_record
2197 	= build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2198 
2199       tree aligning_field
2200 	= build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2201 			       false);
2202 
2203       tree aligning_field_addr
2204         = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2205 
2206       /* Then arrange to store the allocator's return value ahead
2207 	 and return.  */
2208       tree storage_ptr_slot_addr
2209 	= build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2210 			   convert (ptr_type_node, aligning_field_addr),
2211 			   size_int (-(HOST_WIDE_INT) POINTER_SIZE
2212 				     / BITS_PER_UNIT));
2213 
2214       tree storage_ptr_slot
2215 	= build_unary_op (INDIRECT_REF, NULL_TREE,
2216 			  convert (build_pointer_type (ptr_type_node),
2217 				   storage_ptr_slot_addr));
2218 
2219       return
2220 	build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2221 		build_binary_op (INIT_EXPR, NULL_TREE,
2222 				 storage_ptr_slot, storage_ptr),
2223 		aligning_field_addr);
2224     }
2225   else
2226     return malloc_ptr;
2227 }
2228 
2229 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2230    designated by DATA_PTR using the __gnat_free entry point.  */
2231 
2232 static inline tree
maybe_wrap_free(tree data_ptr,tree data_type)2233 maybe_wrap_free (tree data_ptr, tree data_type)
2234 {
2235   /* In the regular alignment case, we pass the data pointer straight to free.
2236      In the superaligned case, we need to retrieve the initial allocator
2237      return value, stored in front of the data block at allocation time.  */
2238 
2239   unsigned int data_align = TYPE_ALIGN (data_type);
2240   unsigned int system_allocator_alignment
2241       = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2242 
2243   tree free_ptr;
2244 
2245   if (data_align > system_allocator_alignment)
2246     {
2247       /* DATA_FRONT_PTR (void *)
2248 	 = (void *)DATA_PTR - (void *)sizeof (void *))  */
2249       tree data_front_ptr
2250 	= build_binary_op
2251 	  (POINTER_PLUS_EXPR, ptr_type_node,
2252 	   convert (ptr_type_node, data_ptr),
2253 	   size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2254 
2255       /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR  */
2256       free_ptr
2257 	= build_unary_op
2258 	  (INDIRECT_REF, NULL_TREE,
2259 	   convert (build_pointer_type (ptr_type_node), data_front_ptr));
2260     }
2261   else
2262     free_ptr = data_ptr;
2263 
2264   return build_call_n_expr (free_decl, 1, free_ptr);
2265 }
2266 
2267 /* Build a GCC tree to call an allocation or deallocation function.
2268    If GNU_OBJ is nonzero, it is an object to deallocate.  Otherwise,
2269    generate an allocator.
2270 
2271    GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2272    object type, used to determine the to-be-honored address alignment.
2273    GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2274    pool to use.  If not present, malloc and free are used.  GNAT_NODE is used
2275    to provide an error location for restriction violation messages.  */
2276 
2277 tree
build_call_alloc_dealloc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node)2278 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2279                           Entity_Id gnat_proc, Entity_Id gnat_pool,
2280                           Node_Id gnat_node)
2281 {
2282   /* Explicit proc to call ?  This one is assumed to deal with the type
2283      alignment constraints.  */
2284   if (Present (gnat_proc))
2285     return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2286 					  gnat_proc, gnat_pool);
2287 
2288   /* Otherwise, object to "free" or "malloc" with possible special processing
2289      for alignments stricter than what the default allocator honors.  */
2290   else if (gnu_obj)
2291     return maybe_wrap_free (gnu_obj, gnu_type);
2292   else
2293     {
2294       /* Assert that we no longer can be called with this special pool.  */
2295       gcc_assert (gnat_pool != -1);
2296 
2297       /* Check that we aren't violating the associated restriction.  */
2298       if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2299 	{
2300 	  Check_No_Implicit_Heap_Alloc (gnat_node);
2301 	  if (Has_Task (Etype (gnat_node)))
2302 	    Check_No_Implicit_Task_Alloc (gnat_node);
2303 	  if (Has_Protected (Etype (gnat_node)))
2304 	    Check_No_Implicit_Protected_Alloc (gnat_node);
2305 	}
2306       return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2307     }
2308 }
2309 
2310 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2311    initial value is INIT, if INIT is nonzero.  Convert the expression to
2312    RESULT_TYPE, which must be some pointer type, and return the result.
2313 
2314    GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2315    the storage pool to use.  GNAT_NODE is used to provide an error
2316    location for restriction violation messages.  If IGNORE_INIT_TYPE is
2317    true, ignore the type of INIT for the purpose of determining the size;
2318    this will cause the maximum size to be allocated if TYPE is of
2319    self-referential size.  */
2320 
2321 tree
build_allocator(tree type,tree init,tree result_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node,bool ignore_init_type)2322 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2323                  Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2324 {
2325   tree size, storage, storage_deref, storage_init;
2326 
2327   /* If the initializer, if present, is a NULL_EXPR, just return a new one.  */
2328   if (init && TREE_CODE (init) == NULL_EXPR)
2329     return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2330 
2331   /* If the initializer, if present, is a COND_EXPR, deal with each branch.  */
2332   else if (init && TREE_CODE (init) == COND_EXPR)
2333     return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2334 		   build_allocator (type, TREE_OPERAND (init, 1), result_type,
2335 				    gnat_proc, gnat_pool, gnat_node,
2336 				    ignore_init_type),
2337 		   build_allocator (type, TREE_OPERAND (init, 2), result_type,
2338 				    gnat_proc, gnat_pool, gnat_node,
2339 				    ignore_init_type));
2340 
2341   /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2342      sizes of the object and its template.  Allocate the whole thing and
2343      fill in the parts that are known.  */
2344   else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2345     {
2346       tree storage_type
2347 	= build_unc_object_type_from_ptr (result_type, type,
2348 					  get_identifier ("ALLOC"), false);
2349       tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2350       tree storage_ptr_type = build_pointer_type (storage_type);
2351 
2352       size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2353 					     init);
2354 
2355       /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2356       if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2357 	size = size_int (-1);
2358 
2359       storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2360 					  gnat_proc, gnat_pool, gnat_node);
2361       storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2362       storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2363       TREE_THIS_NOTRAP (storage_deref) = 1;
2364 
2365       /* If there is an initializing expression, then make a constructor for
2366 	 the entire object including the bounds and copy it into the object.
2367 	 If there is no initializing expression, just set the bounds.  */
2368       if (init)
2369 	{
2370 	  vec<constructor_elt, va_gc> *v;
2371 	  vec_alloc (v, 2);
2372 
2373 	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2374 				  build_template (template_type, type, init));
2375 	  CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2376 				  init);
2377 	  storage_init
2378 	    = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2379 			       gnat_build_constructor (storage_type, v));
2380 	}
2381       else
2382 	storage_init
2383 	  = build_binary_op (INIT_EXPR, NULL_TREE,
2384 			     build_component_ref (storage_deref,
2385 						  TYPE_FIELDS (storage_type),
2386 						  false),
2387 			     build_template (template_type, type, NULL_TREE));
2388 
2389       return build2 (COMPOUND_EXPR, result_type,
2390 		     storage_init, convert (result_type, storage));
2391     }
2392 
2393   size = TYPE_SIZE_UNIT (type);
2394 
2395   /* If we have an initializing expression, see if its size is simpler
2396      than the size from the type.  */
2397   if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2398       && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2399 	  || CONTAINS_PLACEHOLDER_P (size)))
2400     size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2401 
2402   /* If the size is still self-referential, reference the initializing
2403      expression, if it is present.  If not, this must have been a
2404      call to allocate a library-level object, in which case we use
2405      the maximum size.  */
2406   if (CONTAINS_PLACEHOLDER_P (size))
2407     {
2408       if (!ignore_init_type && init)
2409 	size = substitute_placeholder_in_expr (size, init);
2410       else
2411 	size = max_size (size, true);
2412     }
2413 
2414   /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2415   if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2416     size = size_int (-1);
2417 
2418   storage = convert (result_type,
2419 		     build_call_alloc_dealloc (NULL_TREE, size, type,
2420 					       gnat_proc, gnat_pool,
2421 					       gnat_node));
2422 
2423   /* If we have an initial value, protect the new address, assign the value
2424      and return the address with a COMPOUND_EXPR.  */
2425   if (init)
2426     {
2427       storage = gnat_protect_expr (storage);
2428       storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2429       TREE_THIS_NOTRAP (storage_deref) = 1;
2430       storage_init
2431 	= build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2432       return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2433     }
2434 
2435   return storage;
2436 }
2437 
2438 /* Indicate that we need to take the address of T and that it therefore
2439    should not be allocated in a register.  Return true if successful.  */
2440 
2441 bool
gnat_mark_addressable(tree t)2442 gnat_mark_addressable (tree t)
2443 {
2444   while (true)
2445     switch (TREE_CODE (t))
2446       {
2447       case ADDR_EXPR:
2448       case COMPONENT_REF:
2449       case ARRAY_REF:
2450       case ARRAY_RANGE_REF:
2451       case REALPART_EXPR:
2452       case IMAGPART_EXPR:
2453       case VIEW_CONVERT_EXPR:
2454       case NON_LVALUE_EXPR:
2455       CASE_CONVERT:
2456 	t = TREE_OPERAND (t, 0);
2457 	break;
2458 
2459       case COMPOUND_EXPR:
2460 	t = TREE_OPERAND (t, 1);
2461 	break;
2462 
2463       case CONSTRUCTOR:
2464 	TREE_ADDRESSABLE (t) = 1;
2465 	return true;
2466 
2467       case VAR_DECL:
2468       case PARM_DECL:
2469       case RESULT_DECL:
2470 	TREE_ADDRESSABLE (t) = 1;
2471 	return true;
2472 
2473       case FUNCTION_DECL:
2474 	TREE_ADDRESSABLE (t) = 1;
2475 	return true;
2476 
2477       case CONST_DECL:
2478 	return DECL_CONST_CORRESPONDING_VAR (t)
2479 	       && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2480 
2481       default:
2482 	return true;
2483     }
2484 }
2485 
2486 /* Return true if EXP is a stable expression for the purpose of the functions
2487    below and, therefore, can be returned unmodified by them.  We accept things
2488    that are actual constants or that have already been handled.  */
2489 
2490 static bool
gnat_stable_expr_p(tree exp)2491 gnat_stable_expr_p (tree exp)
2492 {
2493   enum tree_code code = TREE_CODE (exp);
2494   return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2495 }
2496 
2497 /* Save EXP for later use or reuse.  This is equivalent to save_expr in tree.c
2498    but we know how to handle our own nodes.  */
2499 
2500 tree
gnat_save_expr(tree exp)2501 gnat_save_expr (tree exp)
2502 {
2503   tree type = TREE_TYPE (exp);
2504   enum tree_code code = TREE_CODE (exp);
2505 
2506   if (gnat_stable_expr_p (exp))
2507     return exp;
2508 
2509   if (code == UNCONSTRAINED_ARRAY_REF)
2510     {
2511       tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2512       TREE_READONLY (t) = TYPE_READONLY (type);
2513       return t;
2514     }
2515 
2516   /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2517      This may be more efficient, but will also allow us to more easily find
2518      the match for the PLACEHOLDER_EXPR.  */
2519   if (code == COMPONENT_REF
2520       && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2521     return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2522 		   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2523 
2524   return save_expr (exp);
2525 }
2526 
2527 /* Protect EXP for immediate reuse.  This is a variant of gnat_save_expr that
2528    is optimized under the assumption that EXP's value doesn't change before
2529    its subsequent reuse(s) except through its potential reevaluation.  */
2530 
2531 tree
gnat_protect_expr(tree exp)2532 gnat_protect_expr (tree exp)
2533 {
2534   tree type = TREE_TYPE (exp);
2535   enum tree_code code = TREE_CODE (exp);
2536 
2537   if (gnat_stable_expr_p (exp))
2538     return exp;
2539 
2540   /* If EXP has no side effects, we theoretically don't need to do anything.
2541      However, we may be recursively passed more and more complex expressions
2542      involving checks which will be reused multiple times and eventually be
2543      unshared for gimplification; in order to avoid a complexity explosion
2544      at that point, we protect any expressions more complex than a simple
2545      arithmetic expression.  */
2546   if (!TREE_SIDE_EFFECTS (exp))
2547     {
2548       tree inner = skip_simple_arithmetic (exp);
2549       if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2550 	return exp;
2551     }
2552 
2553   /* If this is a conversion, protect what's inside the conversion.  */
2554   if (code == NON_LVALUE_EXPR
2555       || CONVERT_EXPR_CODE_P (code)
2556       || code == VIEW_CONVERT_EXPR)
2557   return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2558 
2559   /* If we're indirectly referencing something, we only need to protect the
2560      address since the data itself can't change in these situations.  */
2561   if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2562     {
2563       tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2564       TREE_READONLY (t) = TYPE_READONLY (type);
2565       return t;
2566     }
2567 
2568   /* Likewise if we're indirectly referencing part of something.  */
2569   if (code == COMPONENT_REF
2570       && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2571     return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2572 		   TREE_OPERAND (exp, 1), NULL_TREE);
2573 
2574   /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2575      This may be more efficient, but will also allow us to more easily find
2576      the match for the PLACEHOLDER_EXPR.  */
2577   if (code == COMPONENT_REF
2578       && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2579     return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2580 		   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2581 
2582   /* If this is a fat pointer or a scalar, just make a SAVE_EXPR.  Likewise
2583      for a CALL_EXPR as large objects are returned via invisible reference
2584      in most ABIs so the temporary will directly be filled by the callee.  */
2585   if (TYPE_IS_FAT_POINTER_P (type)
2586       || !AGGREGATE_TYPE_P (type)
2587       || code == CALL_EXPR)
2588     return save_expr (exp);
2589 
2590   /* Otherwise reference, protect the address and dereference.  */
2591   return
2592     build_unary_op (INDIRECT_REF, type,
2593 		    save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2594 }
2595 
2596 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2597    argument to force evaluation of everything.  */
2598 
2599 static tree
gnat_stabilize_reference_1(tree e,void * data)2600 gnat_stabilize_reference_1 (tree e, void *data)
2601 {
2602   const bool force = *(bool *)data;
2603   enum tree_code code = TREE_CODE (e);
2604   tree type = TREE_TYPE (e);
2605   tree result;
2606 
2607   if (gnat_stable_expr_p (e))
2608     return e;
2609 
2610   switch (TREE_CODE_CLASS (code))
2611     {
2612     case tcc_exceptional:
2613     case tcc_declaration:
2614     case tcc_comparison:
2615     case tcc_expression:
2616     case tcc_reference:
2617     case tcc_vl_exp:
2618       /* If this is a COMPONENT_REF of a fat pointer, save the entire
2619 	 fat pointer.  This may be more efficient, but will also allow
2620 	 us to more easily find the match for the PLACEHOLDER_EXPR.  */
2621       if (code == COMPONENT_REF
2622 	  && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2623 	result
2624 	  = build3 (code, type,
2625 		    gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2626 		    TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2627       /* If the expression has side-effects, then encase it in a SAVE_EXPR
2628 	 so that it will only be evaluated once.  */
2629       /* The tcc_reference and tcc_comparison classes could be handled as
2630 	 below, but it is generally faster to only evaluate them once.  */
2631       else if (TREE_SIDE_EFFECTS (e) || force)
2632 	return save_expr (e);
2633       else
2634 	return e;
2635       break;
2636 
2637     case tcc_binary:
2638       /* Recursively stabilize each operand.  */
2639       result
2640 	= build2 (code, type,
2641 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2642 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2643       break;
2644 
2645     case tcc_unary:
2646       /* Recursively stabilize each operand.  */
2647       result
2648 	= build1 (code, type,
2649 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2650       break;
2651 
2652     default:
2653       gcc_unreachable ();
2654     }
2655 
2656   TREE_READONLY (result) = TREE_READONLY (e);
2657   TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2658   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2659 
2660   return result;
2661 }
2662 
2663 /* This is equivalent to stabilize_reference in tree.c but we know how to
2664    handle our own nodes and we take extra arguments.  FORCE says whether to
2665    force evaluation of everything in REF.  INIT is set to the first arm of
2666    a COMPOUND_EXPR present in REF, if any.  */
2667 
2668 tree
gnat_stabilize_reference(tree ref,bool force,tree * init)2669 gnat_stabilize_reference (tree ref, bool force, tree *init)
2670 {
2671   return
2672     gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2673 }
2674 
2675 /* Rewrite reference REF and call FUNC on each expression within REF in the
2676    process.  DATA is passed unmodified to FUNC.  INIT is set to the first
2677    arm of a COMPOUND_EXPR present in REF, if any.  */
2678 
2679 tree
gnat_rewrite_reference(tree ref,rewrite_fn func,void * data,tree * init)2680 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2681 {
2682   tree type = TREE_TYPE (ref);
2683   enum tree_code code = TREE_CODE (ref);
2684   tree result;
2685 
2686   switch (code)
2687     {
2688     case CONST_DECL:
2689     case VAR_DECL:
2690     case PARM_DECL:
2691     case RESULT_DECL:
2692       /* No action is needed in this case.  */
2693       return ref;
2694 
2695     CASE_CONVERT:
2696     case FLOAT_EXPR:
2697     case FIX_TRUNC_EXPR:
2698     case REALPART_EXPR:
2699     case IMAGPART_EXPR:
2700     case VIEW_CONVERT_EXPR:
2701       result
2702 	= build1 (code, type,
2703 		  gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2704 					  init));
2705       break;
2706 
2707     case INDIRECT_REF:
2708     case UNCONSTRAINED_ARRAY_REF:
2709       result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2710       break;
2711 
2712     case COMPONENT_REF:
2713       result = build3 (COMPONENT_REF, type,
2714 		       gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2715 					       data, init),
2716 		       TREE_OPERAND (ref, 1), NULL_TREE);
2717       break;
2718 
2719     case BIT_FIELD_REF:
2720       result = build3 (BIT_FIELD_REF, type,
2721 		       gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2722 					       data, init),
2723 		       TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2724       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2725       break;
2726 
2727     case ARRAY_REF:
2728     case ARRAY_RANGE_REF:
2729       result
2730 	= build4 (code, type,
2731 		  gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2732 					  init),
2733 		  func (TREE_OPERAND (ref, 1), data),
2734 		  TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2735       break;
2736 
2737     case COMPOUND_EXPR:
2738       gcc_assert (!*init);
2739       *init = TREE_OPERAND (ref, 0);
2740       /* We expect only the pattern built in Call_to_gnu.  */
2741       gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2742 		  || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2743 		      && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2744       return TREE_OPERAND (ref, 1);
2745 
2746     case CALL_EXPR:
2747       {
2748 	/* This can only be an atomic load.  */
2749 	gcc_assert (call_is_atomic_load (ref));
2750 
2751 	/* An atomic load is an INDIRECT_REF of its first argument.  */
2752 	tree t = CALL_EXPR_ARG (ref, 0);
2753 	if (TREE_CODE (t) == NOP_EXPR)
2754 	  t = TREE_OPERAND (t, 0);
2755 	if (TREE_CODE (t) == ADDR_EXPR)
2756 	  t = build1 (ADDR_EXPR, TREE_TYPE (t),
2757 		      gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2758 					      init));
2759 	else
2760 	  t = func (t, data);
2761 	t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2762 
2763 	result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2764 				  t, CALL_EXPR_ARG (ref, 1));
2765       }
2766       break;
2767 
2768     case ERROR_MARK:
2769     case NULL_EXPR:
2770       return ref;
2771 
2772     default:
2773       gcc_unreachable ();
2774     }
2775 
2776   /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2777      may not be sustained across some paths, such as the way via build1 for
2778      INDIRECT_REF.  We reset those flags here in the general case, which is
2779      consistent with the GCC version of this routine.
2780 
2781      Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2782      paths introduce side-effects where there was none initially (e.g. if a
2783      SAVE_EXPR is built) and we also want to keep track of that.  */
2784   TREE_READONLY (result) = TREE_READONLY (ref);
2785   TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2786   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2787 
2788   if (code == INDIRECT_REF
2789       || code == UNCONSTRAINED_ARRAY_REF
2790       || code == ARRAY_REF
2791       || code == ARRAY_RANGE_REF)
2792     TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2793 
2794   return result;
2795 }
2796 
2797 /* This is equivalent to get_inner_reference in expr.c but it returns the
2798    ultimate containing object only if the reference (lvalue) is constant,
2799    i.e. if it doesn't depend on the context in which it is evaluated.  */
2800 
2801 tree
get_inner_constant_reference(tree exp)2802 get_inner_constant_reference (tree exp)
2803 {
2804   while (true)
2805     {
2806       switch (TREE_CODE (exp))
2807 	{
2808 	case BIT_FIELD_REF:
2809 	  break;
2810 
2811 	case COMPONENT_REF:
2812 	  if (TREE_OPERAND (exp, 2))
2813 	    return NULL_TREE;
2814 
2815 	  if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2816 	    return NULL_TREE;
2817 	  break;
2818 
2819 	case ARRAY_REF:
2820 	case ARRAY_RANGE_REF:
2821 	  {
2822 	    if (TREE_OPERAND (exp, 2) || TREE_OPERAND (exp, 3))
2823 	      return NULL_TREE;
2824 
2825 	    tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2826 	    if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2827 	        || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2828 	        || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2829 	      return NULL_TREE;
2830 	  }
2831 	  break;
2832 
2833 	case REALPART_EXPR:
2834 	case IMAGPART_EXPR:
2835 	case VIEW_CONVERT_EXPR:
2836 	  break;
2837 
2838 	default:
2839 	  goto done;
2840 	}
2841 
2842       exp = TREE_OPERAND (exp, 0);
2843     }
2844 
2845 done:
2846   return exp;
2847 }
2848 
2849 /* Return true if EXPR is the addition or the subtraction of a constant and,
2850    if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2851    if this is a subtraction.  */
2852 
2853 bool
is_simple_additive_expression(tree expr,tree * add,tree * cst,bool * minus_p)2854 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2855 {
2856   /* Skip overflow checks.  */
2857   if (TREE_CODE (expr) == COND_EXPR
2858       && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2859       && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2860       && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2861          == gnat_raise_decls[CE_Overflow_Check_Failed])
2862     expr = COND_EXPR_ELSE (expr);
2863 
2864   if (TREE_CODE (expr) == PLUS_EXPR)
2865     {
2866       if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2867 	{
2868 	  *add = TREE_OPERAND (expr, 1);
2869 	  *cst = TREE_OPERAND (expr, 0);
2870 	  *minus_p = false;
2871 	  return true;
2872 	}
2873       else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2874 	{
2875 	  *add = TREE_OPERAND (expr, 0);
2876 	  *cst = TREE_OPERAND (expr, 1);
2877 	  *minus_p = false;
2878 	  return true;
2879 	}
2880     }
2881   else if (TREE_CODE (expr) == MINUS_EXPR)
2882     {
2883       if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2884 	{
2885 	  *add = TREE_OPERAND (expr, 0);
2886 	  *cst = TREE_OPERAND (expr, 1);
2887 	  *minus_p = true;
2888 	  return true;
2889 	}
2890     }
2891 
2892   return false;
2893 }
2894 
2895 /* If EXPR is an expression that is invariant in the current function, in the
2896    sense that it can be evaluated anywhere in the function and any number of
2897    times, return EXPR or an equivalent expression.  Otherwise return NULL.  */
2898 
2899 tree
gnat_invariant_expr(tree expr)2900 gnat_invariant_expr (tree expr)
2901 {
2902   const tree type = TREE_TYPE (expr);
2903   tree add, cst;
2904   bool minus_p;
2905 
2906   expr = remove_conversions (expr, false);
2907 
2908   /* Look through temporaries created to capture values.  */
2909   while ((TREE_CODE (expr) == CONST_DECL
2910 	  || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2911 	 && decl_function_context (expr) == current_function_decl
2912 	 && DECL_INITIAL (expr))
2913     {
2914       expr = DECL_INITIAL (expr);
2915       /* Look into CONSTRUCTORs built to initialize padded types.  */
2916       if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2917 	expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2918       expr = remove_conversions (expr, false);
2919     }
2920 
2921   /* We are only interested in scalar types at the moment and, even if we may
2922      have gone through padding types in the above loop, we must be back to a
2923      scalar value at this point.  */
2924   if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2925     return NULL_TREE;
2926 
2927   if (TREE_CONSTANT (expr))
2928     return fold_convert (type, expr);
2929 
2930   /* Deal with addition or subtraction of constants.  */
2931   if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2932     {
2933       add = gnat_invariant_expr (add);
2934       if (add)
2935 	return
2936 	  fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2937 		       fold_convert (type, add), fold_convert (type, cst));
2938       else
2939 	return NULL_TREE;
2940     }
2941 
2942   bool invariant_p = false;
2943   tree t = expr;
2944 
2945   while (true)
2946     {
2947       switch (TREE_CODE (t))
2948 	{
2949 	case COMPONENT_REF:
2950 	  if (TREE_OPERAND (t, 2))
2951 	    return NULL_TREE;
2952 	  invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2953 	  break;
2954 
2955 	case ARRAY_REF:
2956 	case ARRAY_RANGE_REF:
2957 	  if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2958 	      || TREE_OPERAND (t, 2)
2959 	      || TREE_OPERAND (t, 3))
2960 	    return NULL_TREE;
2961 	  break;
2962 
2963 	case BIT_FIELD_REF:
2964 	case REALPART_EXPR:
2965 	case IMAGPART_EXPR:
2966 	case VIEW_CONVERT_EXPR:
2967 	CASE_CONVERT:
2968 	  break;
2969 
2970 	case INDIRECT_REF:
2971 	  if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2972 	    return NULL_TREE;
2973 	  invariant_p = false;
2974 	  break;
2975 
2976 	default:
2977 	  goto object;
2978 	}
2979 
2980       t = TREE_OPERAND (t, 0);
2981     }
2982 
2983 object:
2984   if (TREE_SIDE_EFFECTS (t))
2985     return NULL_TREE;
2986 
2987   if (TREE_CODE (t) == CONST_DECL
2988       && (DECL_EXTERNAL (t)
2989 	  || decl_function_context (t) != current_function_decl))
2990     return fold_convert (type, expr);
2991 
2992   if (!invariant_p && !TREE_READONLY (t))
2993     return NULL_TREE;
2994 
2995   if (TREE_CODE (t) == PARM_DECL)
2996     return fold_convert (type, expr);
2997 
2998   if (TREE_CODE (t) == VAR_DECL
2999       && (DECL_EXTERNAL (t)
3000 	  || decl_function_context (t) != current_function_decl))
3001     return fold_convert (type, expr);
3002 
3003   return NULL_TREE;
3004 }
3005