1 /****************************************************************************
2  *                                                                          *
3  *                         GNAT COMPILER COMPONENTS                         *
4  *                                                                          *
5  *                               U T I L S 2                                *
6  *                                                                          *
7  *                          C Implementation File                           *
8  *                                                                          *
9  *          Copyright (C) 1992-2017, Free Software Foundation, Inc.         *
10  *                                                                          *
11  * GNAT is free software;  you can  redistribute it  and/or modify it under *
12  * terms of the  GNU General Public License as published  by the Free Soft- *
13  * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14  * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15  * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16  * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17  * for  more details.  You should have received a copy of the GNU General   *
18  * Public License along with GCC; see the file COPYING3.  If not see        *
19  * <http://www.gnu.org/licenses/>.                                          *
20  *                                                                          *
21  * GNAT was originally developed  by the GNAT team at  New York University. *
22  * Extensive contributions were provided by Ada Core Technologies Inc.      *
23  *                                                                          *
24  ****************************************************************************/
25 
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "fold-const.h"
36 #include "stor-layout.h"
37 #include "stringpool.h"
38 #include "varasm.h"
39 #include "flags.h"
40 #include "toplev.h"
41 #include "ggc.h"
42 #include "tree-inline.h"
43 
44 #include "ada.h"
45 #include "types.h"
46 #include "atree.h"
47 #include "elists.h"
48 #include "namet.h"
49 #include "nlists.h"
50 #include "snames.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
58 
59 /* Return the base type of TYPE.  */
60 
61 tree
get_base_type(tree type)62 get_base_type (tree type)
63 {
64   if (TREE_CODE (type) == RECORD_TYPE
65       && TYPE_JUSTIFIED_MODULAR_P (type))
66     type = TREE_TYPE (TYPE_FIELDS (type));
67 
68   while (TREE_TYPE (type)
69 	 && (TREE_CODE (type) == INTEGER_TYPE
70 	     || TREE_CODE (type) == REAL_TYPE))
71     type = TREE_TYPE (type);
72 
73   return type;
74 }
75 
76 /* EXP is a GCC tree representing an address.  See if we can find how strictly
77    the object at this address is aligned and, if so, return the alignment of
78    the object in bits.  Otherwise return 0.  */
79 
80 unsigned int
known_alignment(tree exp)81 known_alignment (tree exp)
82 {
83   unsigned int this_alignment;
84   unsigned int lhs, rhs;
85 
86   switch (TREE_CODE (exp))
87     {
88     CASE_CONVERT:
89     case VIEW_CONVERT_EXPR:
90     case NON_LVALUE_EXPR:
91       /* Conversions between pointers and integers don't change the alignment
92 	 of the underlying object.  */
93       this_alignment = known_alignment (TREE_OPERAND (exp, 0));
94       break;
95 
96     case COMPOUND_EXPR:
97       /* The value of a COMPOUND_EXPR is that of its second operand.  */
98       this_alignment = known_alignment (TREE_OPERAND (exp, 1));
99       break;
100 
101     case PLUS_EXPR:
102     case MINUS_EXPR:
103       /* If two addresses are added, the alignment of the result is the
104 	 minimum of the two alignments.  */
105       lhs = known_alignment (TREE_OPERAND (exp, 0));
106       rhs = known_alignment (TREE_OPERAND (exp, 1));
107       this_alignment = MIN (lhs, rhs);
108       break;
109 
110     case POINTER_PLUS_EXPR:
111       /* If this is the pattern built for aligning types, decode it.  */
112       if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
113 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
114 	{
115 	  tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
116 	  return
117 	    known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
118 	}
119 
120       /* If we don't know the alignment of the offset, we assume that
121 	 of the base.  */
122       lhs = known_alignment (TREE_OPERAND (exp, 0));
123       rhs = known_alignment (TREE_OPERAND (exp, 1));
124 
125       if (rhs == 0)
126 	this_alignment = lhs;
127       else
128 	this_alignment = MIN (lhs, rhs);
129       break;
130 
131     case COND_EXPR:
132       /* If there is a choice between two values, use the smaller one.  */
133       lhs = known_alignment (TREE_OPERAND (exp, 1));
134       rhs = known_alignment (TREE_OPERAND (exp, 2));
135       this_alignment = MIN (lhs, rhs);
136       break;
137 
138     case INTEGER_CST:
139       {
140 	unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
141 	/* The first part of this represents the lowest bit in the constant,
142 	   but it is originally in bytes, not bits.  */
143 	this_alignment = (c & -c) * BITS_PER_UNIT;
144       }
145       break;
146 
147     case MULT_EXPR:
148       /* If we know the alignment of just one side, use it.  Otherwise,
149 	 use the product of the alignments.  */
150       lhs = known_alignment (TREE_OPERAND (exp, 0));
151       rhs = known_alignment (TREE_OPERAND (exp, 1));
152 
153       if (lhs == 0)
154 	this_alignment = rhs;
155       else if (rhs == 0)
156 	this_alignment = lhs;
157       else
158 	this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
159       break;
160 
161     case BIT_AND_EXPR:
162       /* A bit-and expression is as aligned as the maximum alignment of the
163 	 operands.  We typically get here for a complex lhs and a constant
164 	 negative power of two on the rhs to force an explicit alignment, so
165 	 don't bother looking at the lhs.  */
166       this_alignment = known_alignment (TREE_OPERAND (exp, 1));
167       break;
168 
169     case ADDR_EXPR:
170       this_alignment = expr_align (TREE_OPERAND (exp, 0));
171       break;
172 
173     case CALL_EXPR:
174       {
175 	tree fndecl = get_callee_fndecl (exp);
176 	if (fndecl == malloc_decl || fndecl == realloc_decl)
177 	  return get_target_system_allocator_alignment () * BITS_PER_UNIT;
178 
179 	tree t = maybe_inline_call_in_expr (exp);
180 	if (t)
181 	  return known_alignment (t);
182       }
183 
184       /* ... fall through ... */
185 
186     default:
187       /* For other pointer expressions, we assume that the pointed-to object
188 	 is at least as aligned as the pointed-to type.  Beware that we can
189 	 have a dummy type here (e.g. a Taft Amendment type), for which the
190 	 alignment is meaningless and should be ignored.  */
191       if (POINTER_TYPE_P (TREE_TYPE (exp))
192 	  && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
193 	  && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
194 	this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
195       else
196 	this_alignment = 0;
197       break;
198     }
199 
200   return this_alignment;
201 }
202 
203 /* We have a comparison or assignment operation on two types, T1 and T2, which
204    are either both array types or both record types.  T1 is assumed to be for
205    the left hand side operand, and T2 for the right hand side.  Return the
206    type that both operands should be converted to for the operation, if any.
207    Otherwise return zero.  */
208 
209 static tree
find_common_type(tree t1,tree t2)210 find_common_type (tree t1, tree t2)
211 {
212   /* ??? As of today, various constructs lead to here with types of different
213      sizes even when both constants (e.g. tagged types, packable vs regular
214      component types, padded vs unpadded types, ...).  While some of these
215      would better be handled upstream (types should be made consistent before
216      calling into build_binary_op), some others are really expected and we
217      have to be careful.  */
218 
219   const bool variable_record_on_lhs
220     = (TREE_CODE (t1) == RECORD_TYPE
221        && TREE_CODE (t2) == RECORD_TYPE
222        && get_variant_part (t1)
223        && !get_variant_part (t2));
224 
225   const bool variable_array_on_lhs
226     = (TREE_CODE (t1) == ARRAY_TYPE
227        && TREE_CODE (t2) == ARRAY_TYPE
228        && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
229        && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
230 
231   /* We must avoid writing more than what the target can hold if this is for
232      an assignment and the case of tagged types is handled in build_binary_op
233      so we use the lhs type if it is known to be smaller or of constant size
234      and the rhs type is not, whatever the modes.  We also force t1 in case of
235      constant size equality to minimize occurrences of view conversions on the
236      lhs of an assignment, except for the case of types with a variable part
237      on the lhs but not on the rhs to make the conversion simpler.  */
238   if (TREE_CONSTANT (TYPE_SIZE (t1))
239       && (!TREE_CONSTANT (TYPE_SIZE (t2))
240 	  || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
241 	  || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
242 	      && !variable_record_on_lhs
243 	      && !variable_array_on_lhs)))
244     return t1;
245 
246   /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
247      a non-BLKmode rhs and array types with a variable part on the lhs but not
248      on the rhs to make sure the conversion is preserved during gimplification.
249      Note that we know that we will not have any alignment problems since, if
250      we did, the non-BLKmode type could not have been used.  */
251   if (TYPE_MODE (t1) != BLKmode
252       && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
253     return t1;
254 
255   /* If the rhs type is of constant size, use it whatever the modes.  At
256      this point it is known to be smaller, or of constant size and the
257      lhs type is not.  */
258   if (TREE_CONSTANT (TYPE_SIZE (t2)))
259     return t2;
260 
261   /* Otherwise, if the rhs type is non-BLKmode, use it.  */
262   if (TYPE_MODE (t2) != BLKmode)
263     return t2;
264 
265   /* In this case, both types have variable size and BLKmode.  It's
266      probably best to leave the "type mismatch" because changing it
267      could cause a bad self-referential reference.  */
268   return NULL_TREE;
269 }
270 
271 /* Return an expression tree representing an equality comparison of A1 and A2,
272    two objects of type ARRAY_TYPE.  The result should be of type RESULT_TYPE.
273 
274    Two arrays are equal in one of two ways: (1) if both have zero length in
275    some dimension (not necessarily the same dimension) or (2) if the lengths
276    in each dimension are equal and the data is equal.  We perform the length
277    tests in as efficient a manner as possible.  */
278 
279 static tree
compare_arrays(location_t loc,tree result_type,tree a1,tree a2)280 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
281 {
282   tree result = convert (result_type, boolean_true_node);
283   tree a1_is_null = convert (result_type, boolean_false_node);
284   tree a2_is_null = convert (result_type, boolean_false_node);
285   tree t1 = TREE_TYPE (a1);
286   tree t2 = TREE_TYPE (a2);
287   bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
288   bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
289   bool length_zero_p = false;
290 
291   /* If the operands have side-effects, they need to be evaluated only once
292      in spite of the multiple references in the comparison.  */
293   if (a1_side_effects_p)
294     a1 = gnat_protect_expr (a1);
295 
296   if (a2_side_effects_p)
297     a2 = gnat_protect_expr (a2);
298 
299   /* Process each dimension separately and compare the lengths.  If any
300      dimension has a length known to be zero, set LENGTH_ZERO_P to true
301      in order to suppress the comparison of the data at the end.  */
302   while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
303     {
304       tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
305       tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
306       tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
307       tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
308       tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
309 				 size_one_node);
310       tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
311 				 size_one_node);
312       tree comparison, this_a1_is_null, this_a2_is_null;
313 
314       /* If the length of the first array is a constant, swap our operands
315 	 unless the length of the second array is the constant zero.  */
316       if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
317 	{
318 	  tree tem;
319 	  bool btem;
320 
321 	  tem = a1, a1 = a2, a2 = tem;
322 	  tem = t1, t1 = t2, t2 = tem;
323 	  tem = lb1, lb1 = lb2, lb2 = tem;
324 	  tem = ub1, ub1 = ub2, ub2 = tem;
325 	  tem = length1, length1 = length2, length2 = tem;
326 	  tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
327 	  btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
328 	  a2_side_effects_p = btem;
329 	}
330 
331       /* If the length of the second array is the constant zero, we can just
332 	 use the original stored bounds for the first array and see whether
333 	 last < first holds.  */
334       if (integer_zerop (length2))
335 	{
336 	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
337 
338 	  length_zero_p = true;
339 
340 	  ub1
341 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
342 	  lb1
343 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
344 
345 	  comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
346 	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
347 	  if (EXPR_P (comparison))
348 	    SET_EXPR_LOCATION (comparison, loc);
349 
350 	  this_a1_is_null = comparison;
351 	  this_a2_is_null = convert (result_type, boolean_true_node);
352 	}
353 
354       /* Otherwise, if the length is some other constant value, we know that
355 	 this dimension in the second array cannot be superflat, so we can
356 	 just use its length computed from the actual stored bounds.  */
357       else if (TREE_CODE (length2) == INTEGER_CST)
358 	{
359 	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
360 
361 	  ub1
362 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
363 	  lb1
364 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
365 	  /* Note that we know that UB2 and LB2 are constant and hence
366 	     cannot contain a PLACEHOLDER_EXPR.  */
367 	  ub2
368 	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
369 	  lb2
370 	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
371 
372 	  comparison
373 	    = fold_build2_loc (loc, EQ_EXPR, result_type,
374 			       build_binary_op (MINUS_EXPR, b, ub1, lb1),
375 			       build_binary_op (MINUS_EXPR, b, ub2, lb2));
376 	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
377 	  if (EXPR_P (comparison))
378 	    SET_EXPR_LOCATION (comparison, loc);
379 
380 	  this_a1_is_null
381 	    = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
382 
383 	  this_a2_is_null = convert (result_type, boolean_false_node);
384 	}
385 
386       /* Otherwise, compare the computed lengths.  */
387       else
388 	{
389 	  length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
390 	  length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
391 
392 	  comparison
393 	    = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
394 
395 	  /* If the length expression is of the form (cond ? val : 0), assume
396 	     that cond is equivalent to (length != 0).  That's guaranteed by
397 	     construction of the array types in gnat_to_gnu_entity.  */
398 	  if (TREE_CODE (length1) == COND_EXPR
399 	      && integer_zerop (TREE_OPERAND (length1, 2)))
400 	    this_a1_is_null
401 	      = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
402 	  else
403 	    this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
404 					       length1, size_zero_node);
405 
406 	  /* Likewise for the second array.  */
407 	  if (TREE_CODE (length2) == COND_EXPR
408 	      && integer_zerop (TREE_OPERAND (length2, 2)))
409 	    this_a2_is_null
410 	      = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
411 	  else
412 	    this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
413 					       length2, size_zero_node);
414 	}
415 
416       /* Append expressions for this dimension to the final expressions.  */
417       result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
418 				result, comparison);
419 
420       a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
421 				    this_a1_is_null, a1_is_null);
422 
423       a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
424 				    this_a2_is_null, a2_is_null);
425 
426       t1 = TREE_TYPE (t1);
427       t2 = TREE_TYPE (t2);
428     }
429 
430   /* Unless the length of some dimension is known to be zero, compare the
431      data in the array.  */
432   if (!length_zero_p)
433     {
434       tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
435       tree comparison;
436 
437       if (type)
438 	{
439 	  a1 = convert (type, a1),
440 	  a2 = convert (type, a2);
441 	}
442 
443       comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
444 
445       result
446 	= build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
447     }
448 
449   /* The result is also true if both sizes are zero.  */
450   result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
451 			    build_binary_op (TRUTH_ANDIF_EXPR, result_type,
452 					     a1_is_null, a2_is_null),
453 			    result);
454 
455   /* If the operands have side-effects, they need to be evaluated before
456      doing the tests above since the place they otherwise would end up
457      being evaluated at run time could be wrong.  */
458   if (a1_side_effects_p)
459     result = build2 (COMPOUND_EXPR, result_type, a1, result);
460 
461   if (a2_side_effects_p)
462     result = build2 (COMPOUND_EXPR, result_type, a2, result);
463 
464   return result;
465 }
466 
467 /* Return an expression tree representing an equality comparison of P1 and P2,
468    two objects of fat pointer type.  The result should be of type RESULT_TYPE.
469 
470    Two fat pointers are equal in one of two ways: (1) if both have a null
471    pointer to the array or (2) if they contain the same couple of pointers.
472    We perform the comparison in as efficient a manner as possible.  */
473 
474 static tree
compare_fat_pointers(location_t loc,tree result_type,tree p1,tree p2)475 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
476 {
477   tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
478   tree p1_array_is_null, p2_array_is_null;
479 
480   /* If either operand has side-effects, they have to be evaluated only once
481      in spite of the multiple references to the operand in the comparison.  */
482   p1 = gnat_protect_expr (p1);
483   p2 = gnat_protect_expr (p2);
484 
485   /* The constant folder doesn't fold fat pointer types so we do it here.  */
486   if (TREE_CODE (p1) == CONSTRUCTOR)
487     p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
488   else
489     p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
490 
491   p1_array_is_null
492     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
493 		       fold_convert_loc (loc, TREE_TYPE (p1_array),
494 					 null_pointer_node));
495 
496   if (TREE_CODE (p2) == CONSTRUCTOR)
497     p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
498   else
499     p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
500 
501   p2_array_is_null
502     = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
503 		       fold_convert_loc (loc, TREE_TYPE (p2_array),
504 					 null_pointer_node));
505 
506   /* If one of the pointers to the array is null, just compare the other.  */
507   if (integer_zerop (p1_array))
508     return p2_array_is_null;
509   else if (integer_zerop (p2_array))
510     return p1_array_is_null;
511 
512   /* Otherwise, do the fully-fledged comparison.  */
513   same_array
514     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
515 
516   if (TREE_CODE (p1) == CONSTRUCTOR)
517     p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
518   else
519     p1_bounds
520       = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
521 			     true);
522 
523   if (TREE_CODE (p2) == CONSTRUCTOR)
524     p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
525   else
526     p2_bounds
527       = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
528 			     true);
529 
530   same_bounds
531     = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
532 
533   /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS).  */
534   return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
535 			  build_binary_op (TRUTH_ORIF_EXPR, result_type,
536 					   p1_array_is_null, same_bounds));
537 }
538 
539 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
540    type TYPE.  We know that TYPE is a modular type with a nonbinary
541    modulus.  */
542 
543 static tree
nonbinary_modular_operation(enum tree_code op_code,tree type,tree lhs,tree rhs)544 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
545                              tree rhs)
546 {
547   tree modulus = TYPE_MODULUS (type);
548   unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
549   unsigned int precision;
550   bool unsignedp = true;
551   tree op_type = type;
552   tree result;
553 
554   /* If this is an addition of a constant, convert it to a subtraction
555      of a constant since we can do that faster.  */
556   if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
557     {
558       rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
559       op_code = MINUS_EXPR;
560     }
561 
562   /* For the logical operations, we only need PRECISION bits.  For
563      addition and subtraction, we need one more and for multiplication we
564      need twice as many.  But we never want to make a size smaller than
565      our size. */
566   if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
567     needed_precision += 1;
568   else if (op_code == MULT_EXPR)
569     needed_precision *= 2;
570 
571   precision = MAX (needed_precision, TYPE_PRECISION (op_type));
572 
573   /* Unsigned will do for everything but subtraction.  */
574   if (op_code == MINUS_EXPR)
575     unsignedp = false;
576 
577   /* If our type is the wrong signedness or isn't wide enough, make a new
578      type and convert both our operands to it.  */
579   if (TYPE_PRECISION (op_type) < precision
580       || TYPE_UNSIGNED (op_type) != unsignedp)
581     {
582       /* Copy the type so we ensure it can be modified to make it modular.  */
583       op_type = copy_type (gnat_type_for_size (precision, unsignedp));
584       modulus = convert (op_type, modulus);
585       SET_TYPE_MODULUS (op_type, modulus);
586       TYPE_MODULAR_P (op_type) = 1;
587       lhs = convert (op_type, lhs);
588       rhs = convert (op_type, rhs);
589     }
590 
591   /* Do the operation, then we'll fix it up.  */
592   result = fold_build2 (op_code, op_type, lhs, rhs);
593 
594   /* For multiplication, we have no choice but to do a full modulus
595      operation.  However, we want to do this in the narrowest
596      possible size.  */
597   if (op_code == MULT_EXPR)
598     {
599       /* Copy the type so we ensure it can be modified to make it modular.  */
600       tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
601       modulus = convert (div_type, modulus);
602       SET_TYPE_MODULUS (div_type, modulus);
603       TYPE_MODULAR_P (div_type) = 1;
604       result = convert (op_type,
605 			fold_build2 (TRUNC_MOD_EXPR, div_type,
606 				     convert (div_type, result), modulus));
607     }
608 
609   /* For subtraction, add the modulus back if we are negative.  */
610   else if (op_code == MINUS_EXPR)
611     {
612       result = gnat_protect_expr (result);
613       result = fold_build3 (COND_EXPR, op_type,
614 			    fold_build2 (LT_EXPR, boolean_type_node, result,
615 					 build_int_cst (op_type, 0)),
616 			    fold_build2 (PLUS_EXPR, op_type, result, modulus),
617 			    result);
618     }
619 
620   /* For the other operations, subtract the modulus if we are >= it.  */
621   else
622     {
623       result = gnat_protect_expr (result);
624       result = fold_build3 (COND_EXPR, op_type,
625 			    fold_build2 (GE_EXPR, boolean_type_node,
626 					 result, modulus),
627 			    fold_build2 (MINUS_EXPR, op_type,
628 					 result, modulus),
629 			    result);
630     }
631 
632   return convert (type, result);
633 }
634 
635 /* This page contains routines that implement the Ada semantics with regard
636    to atomic objects.  They are fully piggybacked on the middle-end support
637    for atomic loads and stores.
638 
639    *** Memory barriers and volatile objects ***
640 
641    We implement the weakened form of the C.6(16) clause that was introduced
642    in Ada 2012 (AI05-117).  Earlier forms of this clause wouldn't have been
643    implementable without significant performance hits on modern platforms.
644 
645    We also take advantage of the requirements imposed on shared variables by
646    9.10 (conditions for sequential actions) to have non-erroneous execution
647    and consider that C.6(16) and C.6(17) only prescribe an uniform order of
648    volatile updates with regard to sequential actions, i.e. with regard to
649    reads or updates of atomic objects.
650 
651    As such, an update of an atomic object by a task requires that all earlier
652    accesses to volatile objects have completed.  Similarly, later accesses to
653    volatile objects cannot be reordered before the update of the atomic object.
654    So, memory barriers both before and after the atomic update are needed.
655 
656    For a read of an atomic object, to avoid seeing writes of volatile objects
657    by a task earlier than by the other tasks, a memory barrier is needed before
658    the atomic read.  Finally, to avoid reordering later reads or updates of
659    volatile objects to before the atomic read, a barrier is needed after the
660    atomic read.
661 
662    So, memory barriers are needed before and after atomic reads and updates.
663    And, in order to simplify the implementation, we use full memory barriers
664    in all cases, i.e. we enforce sequential consistency for atomic accesses.  */
665 
666 /* Return the size of TYPE, which must be a positive power of 2.  */
667 
668 static unsigned int
resolve_atomic_size(tree type)669 resolve_atomic_size (tree type)
670 {
671   unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
672 
673   if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
674     return size;
675 
676   /* We shouldn't reach here without having already detected that the size
677      isn't compatible with an atomic access.  */
678   gcc_assert (Serious_Errors_Detected);
679 
680   return 0;
681 }
682 
683 /* Build an atomic load for the underlying atomic object in SRC.  SYNC is
684    true if the load requires synchronization.  */
685 
686 tree
build_atomic_load(tree src,bool sync)687 build_atomic_load (tree src, bool sync)
688 {
689   tree ptr_type
690     = build_pointer_type
691       (build_qualified_type (void_type_node,
692 			     TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
693   tree mem_model
694     = build_int_cst (integer_type_node,
695 		     sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
696   tree orig_src = src;
697   tree t, addr, val;
698   unsigned int size;
699   int fncode;
700 
701   /* Remove conversions to get the address of the underlying object.  */
702   src = remove_conversions (src, false);
703   size = resolve_atomic_size (TREE_TYPE (src));
704   if (size == 0)
705     return orig_src;
706 
707   fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
708   t = builtin_decl_implicit ((enum built_in_function) fncode);
709 
710   addr = build_unary_op (ADDR_EXPR, ptr_type, src);
711   val = build_call_expr (t, 2, addr, mem_model);
712 
713   /* First reinterpret the loaded bits in the original type of the load,
714      then convert to the expected result type.  */
715   t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
716   return convert (TREE_TYPE (orig_src), t);
717 }
718 
719 /* Build an atomic store from SRC to the underlying atomic object in DEST.
720    SYNC is true if the store requires synchronization.  */
721 
722 tree
build_atomic_store(tree dest,tree src,bool sync)723 build_atomic_store (tree dest, tree src, bool sync)
724 {
725   tree ptr_type
726     = build_pointer_type
727       (build_qualified_type (void_type_node,
728 			     TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
729   tree mem_model
730     = build_int_cst (integer_type_node,
731 		     sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
732   tree orig_dest = dest;
733   tree t, int_type, addr;
734   unsigned int size;
735   int fncode;
736 
737   /* Remove conversions to get the address of the underlying object.  */
738   dest = remove_conversions (dest, false);
739   size = resolve_atomic_size (TREE_TYPE (dest));
740   if (size == 0)
741     return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
742 
743   fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
744   t = builtin_decl_implicit ((enum built_in_function) fncode);
745   int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
746 
747   /* First convert the bits to be stored to the original type of the store,
748      then reinterpret them in the effective type.  But if the original type
749      is a padded type with the same size, convert to the inner type instead,
750      as we don't want to artificially introduce a CONSTRUCTOR here.  */
751   if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
752       && TYPE_SIZE (TREE_TYPE (dest))
753 	 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
754     src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
755   else
756     src = convert (TREE_TYPE (dest), src);
757   src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
758   addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
759 
760   return build_call_expr (t, 3, addr, src, mem_model);
761 }
762 
763 /* Build a load-modify-store sequence from SRC to DEST.  GNAT_NODE is used for
764    the location of the sequence.  Note that, even though the load and the store
765    are both atomic, the sequence itself is not atomic.  */
766 
767 tree
build_load_modify_store(tree dest,tree src,Node_Id gnat_node)768 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
769 {
770   /* We will be modifying DEST below so we build a copy.  */
771   dest = copy_node (dest);
772   tree ref = dest;
773 
774   while (handled_component_p (ref))
775     {
776       /* The load should already have been generated during the translation
777 	 of the GNAT destination tree; find it out in the GNU tree.  */
778       if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
779 	{
780 	  tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
781 	  if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
782 	    {
783 	      tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
784 	      tree t = CALL_EXPR_ARG (op, 0);
785 	      tree obj, temp, stmt;
786 
787 	      /* Find out the loaded object.  */
788 	      if (TREE_CODE (t) == NOP_EXPR)
789 		t = TREE_OPERAND (t, 0);
790 	      if (TREE_CODE (t) == ADDR_EXPR)
791 		obj = TREE_OPERAND (t, 0);
792 	      else
793 		obj = build1 (INDIRECT_REF, type, t);
794 
795 	      /* Drop atomic and volatile qualifiers for the temporary.  */
796 	      type = TYPE_MAIN_VARIANT (type);
797 
798 	      /* And drop BLKmode, if need be, to put it into a register.  */
799 	      if (TYPE_MODE (type) == BLKmode)
800 		{
801 		  unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
802 		  type = copy_type (type);
803 		  machine_mode mode = int_mode_for_size (size, 0).else_blk ();
804 		  SET_TYPE_MODE (type, mode);
805 		}
806 
807 	      /* Create the temporary by inserting a SAVE_EXPR.  */
808 	      temp = build1 (SAVE_EXPR, type,
809 			     build1 (VIEW_CONVERT_EXPR, type, op));
810 	      TREE_OPERAND (ref, 0) = temp;
811 
812 	      start_stmt_group ();
813 
814 	      /* Build the modify of the temporary.  */
815 	      stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
816 	      add_stmt_with_node (stmt, gnat_node);
817 
818 	      /* Build the store to the object.  */
819 	      stmt = build_atomic_store (obj, temp, false);
820 	      add_stmt_with_node (stmt, gnat_node);
821 
822 	      return end_stmt_group ();
823 	    }
824 	}
825 
826       TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
827       ref = TREE_OPERAND (ref, 0);
828     }
829 
830   /* Something went wrong earlier if we have not found the atomic load.  */
831   gcc_unreachable ();
832 }
833 
834 /* Make a binary operation of kind OP_CODE.  RESULT_TYPE is the type
835    desired for the result.  Usually the operation is to be performed
836    in that type.  For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
837    NULL_TREE.  For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
838    case the type to be used will be derived from the operands.
839    Don't fold the result if NO_FOLD is true.
840 
841    This function is very much unlike the ones for C and C++ since we
842    have already done any type conversion and matching required.  All we
843    have to do here is validate the work done by SEM and handle subtypes.  */
844 
845 tree
build_binary_op(enum tree_code op_code,tree result_type,tree left_operand,tree right_operand,bool no_fold)846 build_binary_op (enum tree_code op_code, tree result_type,
847 		 tree left_operand, tree right_operand,
848 		 bool no_fold)
849 {
850   tree left_type = TREE_TYPE (left_operand);
851   tree right_type = TREE_TYPE (right_operand);
852   tree left_base_type = get_base_type (left_type);
853   tree right_base_type = get_base_type (right_type);
854   tree operation_type = result_type;
855   tree best_type = NULL_TREE;
856   tree modulus, result;
857   bool has_side_effects = false;
858 
859   if (operation_type
860       && TREE_CODE (operation_type) == RECORD_TYPE
861       && TYPE_JUSTIFIED_MODULAR_P (operation_type))
862     operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
863 
864   if (operation_type
865       && TREE_CODE (operation_type) == INTEGER_TYPE
866       && TYPE_EXTRA_SUBTYPE_P (operation_type))
867     operation_type = get_base_type (operation_type);
868 
869   modulus = (operation_type
870 	     && TREE_CODE (operation_type) == INTEGER_TYPE
871 	     && TYPE_MODULAR_P (operation_type)
872 	     ? TYPE_MODULUS (operation_type) : NULL_TREE);
873 
874   switch (op_code)
875     {
876     case INIT_EXPR:
877     case MODIFY_EXPR:
878       gcc_checking_assert (!result_type);
879 
880       /* If there were integral or pointer conversions on the LHS, remove
881 	 them; we'll be putting them back below if needed.  Likewise for
882 	 conversions between array and record types, except for justified
883 	 modular types.  But don't do this if the right operand is not
884 	 BLKmode (for packed arrays) unless we are not changing the mode.  */
885       while ((CONVERT_EXPR_P (left_operand)
886 	      || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
887 	     && (((INTEGRAL_TYPE_P (left_type)
888 		   || POINTER_TYPE_P (left_type))
889 		  && (INTEGRAL_TYPE_P (TREE_TYPE
890 				       (TREE_OPERAND (left_operand, 0)))
891 		      || POINTER_TYPE_P (TREE_TYPE
892 					 (TREE_OPERAND (left_operand, 0)))))
893 		 || (((TREE_CODE (left_type) == RECORD_TYPE
894 		       && !TYPE_JUSTIFIED_MODULAR_P (left_type))
895 		      || TREE_CODE (left_type) == ARRAY_TYPE)
896 		     && ((TREE_CODE (TREE_TYPE
897 				     (TREE_OPERAND (left_operand, 0)))
898 			  == RECORD_TYPE)
899 			 || (TREE_CODE (TREE_TYPE
900 					(TREE_OPERAND (left_operand, 0)))
901 			     == ARRAY_TYPE))
902 		     && (TYPE_MODE (right_type) == BLKmode
903 			 || (TYPE_MODE (left_type)
904 			     == TYPE_MODE (TREE_TYPE
905 					   (TREE_OPERAND
906 					    (left_operand, 0))))))))
907 	{
908 	  left_operand = TREE_OPERAND (left_operand, 0);
909 	  left_type = TREE_TYPE (left_operand);
910 	}
911 
912       /* If a class-wide type may be involved, force use of the RHS type.  */
913       if ((TREE_CODE (right_type) == RECORD_TYPE
914 	   || TREE_CODE (right_type) == UNION_TYPE)
915 	  && TYPE_ALIGN_OK (right_type))
916 	operation_type = right_type;
917 
918       /* If we are copying between padded objects with compatible types, use
919 	 the padded view of the objects, this is very likely more efficient.
920 	 Likewise for a padded object that is assigned a constructor, if we
921 	 can convert the constructor to the inner type, to avoid putting a
922 	 VIEW_CONVERT_EXPR on the LHS.  But don't do so if we wouldn't have
923 	 actually copied anything.  */
924       else if (TYPE_IS_PADDING_P (left_type)
925 	       && TREE_CONSTANT (TYPE_SIZE (left_type))
926 	       && ((TREE_CODE (right_operand) == COMPONENT_REF
927 		    && TYPE_MAIN_VARIANT (left_type)
928 		       == TYPE_MAIN_VARIANT
929 			  (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
930 		   || (TREE_CODE (right_operand) == CONSTRUCTOR
931 		       && !CONTAINS_PLACEHOLDER_P
932 			   (DECL_SIZE (TYPE_FIELDS (left_type)))))
933 	       && !integer_zerop (TYPE_SIZE (right_type)))
934 	{
935 	  /* We make an exception for a BLKmode type padding a non-BLKmode
936 	     inner type and do the conversion of the LHS right away, since
937 	     unchecked_convert wouldn't do it properly.  */
938 	  if (TYPE_MODE (left_type) == BLKmode
939 	      && TYPE_MODE (right_type) != BLKmode
940 	      && TREE_CODE (right_operand) != CONSTRUCTOR)
941 	    {
942 	      operation_type = right_type;
943 	      left_operand = convert (operation_type, left_operand);
944 	      left_type = operation_type;
945 	    }
946 	  else
947 	    operation_type = left_type;
948 	}
949 
950       /* If we have a call to a function that returns with variable size, use
951 	 the RHS type in case we want to use the return slot optimization.  */
952       else if (TREE_CODE (right_operand) == CALL_EXPR
953 	       && return_type_with_variable_size_p (right_type))
954 	operation_type = right_type;
955 
956       /* Find the best type to use for copying between aggregate types.  */
957       else if (((TREE_CODE (left_type) == ARRAY_TYPE
958 		 && TREE_CODE (right_type) == ARRAY_TYPE)
959 		|| (TREE_CODE (left_type) == RECORD_TYPE
960 		    && TREE_CODE (right_type) == RECORD_TYPE))
961 	       && (best_type = find_common_type (left_type, right_type)))
962 	operation_type = best_type;
963 
964       /* Otherwise use the LHS type.  */
965       else
966 	operation_type = left_type;
967 
968       /* Ensure everything on the LHS is valid.  If we have a field reference,
969 	 strip anything that get_inner_reference can handle.  Then remove any
970 	 conversions between types having the same code and mode.  And mark
971 	 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE.  When done, we must have
972 	 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node.  */
973       result = left_operand;
974       while (true)
975 	{
976 	  tree restype = TREE_TYPE (result);
977 
978 	  if (TREE_CODE (result) == COMPONENT_REF
979 	      || TREE_CODE (result) == ARRAY_REF
980 	      || TREE_CODE (result) == ARRAY_RANGE_REF)
981 	    while (handled_component_p (result))
982 	      result = TREE_OPERAND (result, 0);
983 	  else if (TREE_CODE (result) == REALPART_EXPR
984 		   || TREE_CODE (result) == IMAGPART_EXPR
985 		   || (CONVERT_EXPR_P (result)
986 		       && (((TREE_CODE (restype)
987 			     == TREE_CODE (TREE_TYPE
988 					   (TREE_OPERAND (result, 0))))
989 			     && (TYPE_MODE (TREE_TYPE
990 					    (TREE_OPERAND (result, 0)))
991 				 == TYPE_MODE (restype)))
992 			   || TYPE_ALIGN_OK (restype))))
993 	    result = TREE_OPERAND (result, 0);
994 	  else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
995 	    {
996 	      TREE_ADDRESSABLE (result) = 1;
997 	      result = TREE_OPERAND (result, 0);
998 	    }
999 	  else
1000 	    break;
1001 	}
1002 
1003       gcc_assert (TREE_CODE (result) == INDIRECT_REF
1004 		  || TREE_CODE (result) == NULL_EXPR
1005 		  || TREE_CODE (result) == SAVE_EXPR
1006 		  || DECL_P (result));
1007 
1008       /* Convert the right operand to the operation type unless it is
1009 	 either already of the correct type or if the type involves a
1010 	 placeholder, since the RHS may not have the same record type.  */
1011       if (operation_type != right_type
1012 	  && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
1013 	{
1014 	  right_operand = convert (operation_type, right_operand);
1015 	  right_type = operation_type;
1016 	}
1017 
1018       /* If the left operand is not of the same type as the operation
1019 	 type, wrap it up in a VIEW_CONVERT_EXPR.  */
1020       if (left_type != operation_type)
1021 	left_operand = unchecked_convert (operation_type, left_operand, false);
1022 
1023       has_side_effects = true;
1024       modulus = NULL_TREE;
1025       break;
1026 
1027     case ARRAY_REF:
1028       if (!operation_type)
1029 	operation_type = TREE_TYPE (left_type);
1030 
1031       /* ... fall through ... */
1032 
1033     case ARRAY_RANGE_REF:
1034       /* First look through conversion between type variants.  Note that
1035 	 this changes neither the operation type nor the type domain.  */
1036       if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1037 	  && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1038 	     == TYPE_MAIN_VARIANT (left_type))
1039 	{
1040 	  left_operand = TREE_OPERAND (left_operand, 0);
1041 	  left_type = TREE_TYPE (left_operand);
1042 	}
1043 
1044       /* For a range, make sure the element type is consistent.  */
1045       if (op_code == ARRAY_RANGE_REF
1046 	  && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1047 	operation_type = build_array_type (TREE_TYPE (left_type),
1048 					   TYPE_DOMAIN (operation_type));
1049 
1050       /* Then convert the right operand to its base type.  This will prevent
1051 	 unneeded sign conversions when sizetype is wider than integer.  */
1052       right_operand = convert (right_base_type, right_operand);
1053       right_operand = convert_to_index_type (right_operand);
1054       modulus = NULL_TREE;
1055       break;
1056 
1057     case TRUTH_ANDIF_EXPR:
1058     case TRUTH_ORIF_EXPR:
1059     case TRUTH_AND_EXPR:
1060     case TRUTH_OR_EXPR:
1061     case TRUTH_XOR_EXPR:
1062       gcc_checking_assert
1063 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1064       operation_type = left_base_type;
1065       left_operand = convert (operation_type, left_operand);
1066       right_operand = convert (operation_type, right_operand);
1067       break;
1068 
1069     case GE_EXPR:
1070     case LE_EXPR:
1071     case GT_EXPR:
1072     case LT_EXPR:
1073     case EQ_EXPR:
1074     case NE_EXPR:
1075       gcc_checking_assert
1076 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1077       /* If either operand is a NULL_EXPR, just return a new one.  */
1078       if (TREE_CODE (left_operand) == NULL_EXPR)
1079 	return build2 (op_code, result_type,
1080 		       build1 (NULL_EXPR, integer_type_node,
1081 			       TREE_OPERAND (left_operand, 0)),
1082 		       integer_zero_node);
1083 
1084       else if (TREE_CODE (right_operand) == NULL_EXPR)
1085 	return build2 (op_code, result_type,
1086 		       build1 (NULL_EXPR, integer_type_node,
1087 			       TREE_OPERAND (right_operand, 0)),
1088 		       integer_zero_node);
1089 
1090       /* If either object is a justified modular types, get the
1091 	 fields from within.  */
1092       if (TREE_CODE (left_type) == RECORD_TYPE
1093 	  && TYPE_JUSTIFIED_MODULAR_P (left_type))
1094 	{
1095 	  left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1096 				  left_operand);
1097 	  left_type = TREE_TYPE (left_operand);
1098 	  left_base_type = get_base_type (left_type);
1099 	}
1100 
1101       if (TREE_CODE (right_type) == RECORD_TYPE
1102 	  && TYPE_JUSTIFIED_MODULAR_P (right_type))
1103 	{
1104 	  right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1105 				  right_operand);
1106 	  right_type = TREE_TYPE (right_operand);
1107 	  right_base_type = get_base_type (right_type);
1108 	}
1109 
1110       /* If both objects are arrays, compare them specially.  */
1111       if ((TREE_CODE (left_type) == ARRAY_TYPE
1112 	   || (TREE_CODE (left_type) == INTEGER_TYPE
1113 	       && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1114 	  && (TREE_CODE (right_type) == ARRAY_TYPE
1115 	      || (TREE_CODE (right_type) == INTEGER_TYPE
1116 		  && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1117 	{
1118 	  result = compare_arrays (input_location,
1119 				   result_type, left_operand, right_operand);
1120 	  if (op_code == NE_EXPR)
1121 	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1122 	  else
1123 	    gcc_assert (op_code == EQ_EXPR);
1124 
1125 	  return result;
1126 	}
1127 
1128       /* Otherwise, the base types must be the same, unless they are both fat
1129 	 pointer types or record types.  In the latter case, use the best type
1130 	 and convert both operands to that type.  */
1131       if (left_base_type != right_base_type)
1132 	{
1133 	  if (TYPE_IS_FAT_POINTER_P (left_base_type)
1134 	      && TYPE_IS_FAT_POINTER_P (right_base_type))
1135 	    {
1136 	      gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1137 			  == TYPE_MAIN_VARIANT (right_base_type));
1138 	      best_type = left_base_type;
1139 	    }
1140 
1141 	  else if (TREE_CODE (left_base_type) == RECORD_TYPE
1142 		   && TREE_CODE (right_base_type) == RECORD_TYPE)
1143 	    {
1144 	      /* The only way this is permitted is if both types have the same
1145 		 name.  In that case, one of them must not be self-referential.
1146 		 Use it as the best type.  Even better with a fixed size.  */
1147 	      gcc_assert (TYPE_NAME (left_base_type)
1148 			  && TYPE_NAME (left_base_type)
1149 			     == TYPE_NAME (right_base_type));
1150 
1151 	      if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1152 		best_type = left_base_type;
1153 	      else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1154 		best_type = right_base_type;
1155 	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1156 		best_type = left_base_type;
1157 	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1158 		best_type = right_base_type;
1159 	      else
1160 		gcc_unreachable ();
1161 	    }
1162 
1163 	  else if (POINTER_TYPE_P (left_base_type)
1164 		   && POINTER_TYPE_P (right_base_type))
1165 	    {
1166 	      gcc_assert (TREE_TYPE (left_base_type)
1167 			  == TREE_TYPE (right_base_type));
1168 	      best_type = left_base_type;
1169 	    }
1170 	  else
1171 	    gcc_unreachable ();
1172 
1173 	  left_operand = convert (best_type, left_operand);
1174 	  right_operand = convert (best_type, right_operand);
1175 	}
1176       else
1177 	{
1178 	  left_operand = convert (left_base_type, left_operand);
1179 	  right_operand = convert (right_base_type, right_operand);
1180 	}
1181 
1182       /* If both objects are fat pointers, compare them specially.  */
1183       if (TYPE_IS_FAT_POINTER_P (left_base_type))
1184 	{
1185 	  result
1186 	    = compare_fat_pointers (input_location,
1187 				    result_type, left_operand, right_operand);
1188 	  if (op_code == NE_EXPR)
1189 	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1190 	  else
1191 	    gcc_assert (op_code == EQ_EXPR);
1192 
1193 	  return result;
1194 	}
1195 
1196       modulus = NULL_TREE;
1197       break;
1198 
1199     case LSHIFT_EXPR:
1200     case RSHIFT_EXPR:
1201     case LROTATE_EXPR:
1202     case RROTATE_EXPR:
1203        /* The RHS of a shift can be any type.  Also, ignore any modulus
1204 	 (we used to abort, but this is needed for unchecked conversion
1205 	 to modular types).  Otherwise, processing is the same as normal.  */
1206       gcc_assert (operation_type == left_base_type);
1207       modulus = NULL_TREE;
1208       left_operand = convert (operation_type, left_operand);
1209       break;
1210 
1211     case BIT_AND_EXPR:
1212     case BIT_IOR_EXPR:
1213     case BIT_XOR_EXPR:
1214       /* For binary modulus, if the inputs are in range, so are the
1215 	 outputs.  */
1216       if (modulus && integer_pow2p (modulus))
1217 	modulus = NULL_TREE;
1218       goto common;
1219 
1220     case COMPLEX_EXPR:
1221       gcc_assert (TREE_TYPE (result_type) == left_base_type
1222 		  && TREE_TYPE (result_type) == right_base_type);
1223       left_operand = convert (left_base_type, left_operand);
1224       right_operand = convert (right_base_type, right_operand);
1225       break;
1226 
1227     case TRUNC_DIV_EXPR:   case TRUNC_MOD_EXPR:
1228     case CEIL_DIV_EXPR:    case CEIL_MOD_EXPR:
1229     case FLOOR_DIV_EXPR:   case FLOOR_MOD_EXPR:
1230     case ROUND_DIV_EXPR:   case ROUND_MOD_EXPR:
1231       /* These always produce results lower than either operand.  */
1232       modulus = NULL_TREE;
1233       goto common;
1234 
1235     case POINTER_PLUS_EXPR:
1236       gcc_assert (operation_type == left_base_type
1237 		  && sizetype == right_base_type);
1238       left_operand = convert (operation_type, left_operand);
1239       right_operand = convert (sizetype, right_operand);
1240       break;
1241 
1242     case PLUS_NOMOD_EXPR:
1243     case MINUS_NOMOD_EXPR:
1244       if (op_code == PLUS_NOMOD_EXPR)
1245 	op_code = PLUS_EXPR;
1246       else
1247 	op_code = MINUS_EXPR;
1248       modulus = NULL_TREE;
1249 
1250       /* ... fall through ... */
1251 
1252     case PLUS_EXPR:
1253     case MINUS_EXPR:
1254       /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1255 	 other compilers.  Contrary to C, Ada doesn't allow arithmetics in
1256 	 these types but can generate addition/subtraction for Succ/Pred.  */
1257       if (operation_type
1258 	  && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1259 	      || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1260 	operation_type = left_base_type = right_base_type
1261 	  = gnat_type_for_mode (TYPE_MODE (operation_type),
1262 				TYPE_UNSIGNED (operation_type));
1263 
1264       /* ... fall through ... */
1265 
1266     default:
1267     common:
1268       /* The result type should be the same as the base types of the
1269 	 both operands (and they should be the same).  Convert
1270 	 everything to the result type.  */
1271 
1272       gcc_assert (operation_type == left_base_type
1273 		  && left_base_type == right_base_type);
1274       left_operand = convert (operation_type, left_operand);
1275       right_operand = convert (operation_type, right_operand);
1276     }
1277 
1278   if (modulus && !integer_pow2p (modulus))
1279     {
1280       result = nonbinary_modular_operation (op_code, operation_type,
1281 					    left_operand, right_operand);
1282       modulus = NULL_TREE;
1283     }
1284   /* If either operand is a NULL_EXPR, just return a new one.  */
1285   else if (TREE_CODE (left_operand) == NULL_EXPR)
1286     return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1287   else if (TREE_CODE (right_operand) == NULL_EXPR)
1288     return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1289   else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1290     {
1291       result = build4 (op_code, operation_type, left_operand, right_operand,
1292 		       NULL_TREE, NULL_TREE);
1293       if (!no_fold)
1294 	result = fold (result);
1295     }
1296   else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1297     result = build2 (op_code, void_type_node, left_operand, right_operand);
1298   else if (no_fold)
1299     result = build2 (op_code, operation_type, left_operand, right_operand);
1300   else
1301     result
1302       = fold_build2 (op_code, operation_type, left_operand, right_operand);
1303 
1304   if (TREE_CONSTANT (result))
1305     ;
1306   else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1307     {
1308       if (TYPE_VOLATILE (operation_type))
1309 	TREE_THIS_VOLATILE (result) = 1;
1310     }
1311   else
1312     TREE_CONSTANT (result)
1313       |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1314 
1315   TREE_SIDE_EFFECTS (result) |= has_side_effects;
1316 
1317   /* If we are working with modular types, perform the MOD operation
1318      if something above hasn't eliminated the need for it.  */
1319   if (modulus)
1320     {
1321       modulus = convert (operation_type, modulus);
1322       if (no_fold)
1323 	result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1324       else
1325 	result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1326     }
1327 
1328   if (result_type && result_type != operation_type)
1329     result = convert (result_type, result);
1330 
1331   return result;
1332 }
1333 
1334 /* Similar, but for unary operations.  */
1335 
1336 tree
build_unary_op(enum tree_code op_code,tree result_type,tree operand)1337 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1338 {
1339   tree type = TREE_TYPE (operand);
1340   tree base_type = get_base_type (type);
1341   tree operation_type = result_type;
1342   tree result;
1343 
1344   if (operation_type
1345       && TREE_CODE (operation_type) == RECORD_TYPE
1346       && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1347     operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1348 
1349   if (operation_type
1350       && TREE_CODE (operation_type) == INTEGER_TYPE
1351       && TYPE_EXTRA_SUBTYPE_P (operation_type))
1352     operation_type = get_base_type (operation_type);
1353 
1354   switch (op_code)
1355     {
1356     case REALPART_EXPR:
1357     case IMAGPART_EXPR:
1358       if (!operation_type)
1359 	result_type = operation_type = TREE_TYPE (type);
1360       else
1361 	gcc_assert (result_type == TREE_TYPE (type));
1362 
1363       result = fold_build1 (op_code, operation_type, operand);
1364       break;
1365 
1366     case TRUTH_NOT_EXPR:
1367       gcc_checking_assert
1368 	(TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1369       result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1370       /* When not optimizing, fold the result as invert_truthvalue_loc
1371 	 doesn't fold the result of comparisons.  This is intended to undo
1372 	 the trick used for boolean rvalues in gnat_to_gnu.  */
1373       if (!optimize)
1374 	result = fold (result);
1375       break;
1376 
1377     case ATTR_ADDR_EXPR:
1378     case ADDR_EXPR:
1379       switch (TREE_CODE (operand))
1380 	{
1381 	case INDIRECT_REF:
1382 	case UNCONSTRAINED_ARRAY_REF:
1383 	  result = TREE_OPERAND (operand, 0);
1384 
1385 	  /* Make sure the type here is a pointer, not a reference.
1386 	     GCC wants pointer types for function addresses.  */
1387 	  if (!result_type)
1388 	    result_type = build_pointer_type (type);
1389 
1390 	  /* If the underlying object can alias everything, propagate the
1391 	     property since we are effectively retrieving the object.  */
1392 	  if (POINTER_TYPE_P (TREE_TYPE (result))
1393 	      && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1394 	    {
1395 	      if (TREE_CODE (result_type) == POINTER_TYPE
1396 		  && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1397 		result_type
1398 		  = build_pointer_type_for_mode (TREE_TYPE (result_type),
1399 						 TYPE_MODE (result_type),
1400 						 true);
1401 	      else if (TREE_CODE (result_type) == REFERENCE_TYPE
1402 		       && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1403 	        result_type
1404 		  = build_reference_type_for_mode (TREE_TYPE (result_type),
1405 						   TYPE_MODE (result_type),
1406 						   true);
1407 	    }
1408 	  break;
1409 
1410 	case NULL_EXPR:
1411 	  result = operand;
1412 	  TREE_TYPE (result) = type = build_pointer_type (type);
1413 	  break;
1414 
1415 	case COMPOUND_EXPR:
1416 	  /* Fold a compound expression if it has unconstrained array type
1417 	     since the middle-end cannot handle it.  But we don't it in the
1418 	     general case because it may introduce aliasing issues if the
1419 	     first operand is an indirect assignment and the second operand
1420 	     the corresponding address, e.g. for an allocator.  However do
1421 	     it for a return value to expose it for later recognition.  */
1422 	  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1423 	      || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1424 		  && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1425 	    {
1426 	      result = build_unary_op (ADDR_EXPR, result_type,
1427 				       TREE_OPERAND (operand, 1));
1428 	      result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1429 			       TREE_OPERAND (operand, 0), result);
1430 	      break;
1431 	    }
1432 	  goto common;
1433 
1434 	case ARRAY_REF:
1435 	case ARRAY_RANGE_REF:
1436 	case COMPONENT_REF:
1437 	case BIT_FIELD_REF:
1438 	    /* If this is for 'Address, find the address of the prefix and add
1439 	       the offset to the field.  Otherwise, do this the normal way.  */
1440 	  if (op_code == ATTR_ADDR_EXPR)
1441 	    {
1442 	      poly_int64 bitsize;
1443 	      poly_int64 bitpos;
1444 	      tree offset, inner;
1445 	      machine_mode mode;
1446 	      int unsignedp, reversep, volatilep;
1447 
1448 	      inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1449 					   &mode, &unsignedp, &reversep,
1450 					   &volatilep);
1451 
1452 	      /* If INNER is a padding type whose field has a self-referential
1453 		 size, convert to that inner type.  We know the offset is zero
1454 		 and we need to have that type visible.  */
1455 	      if (type_is_padding_self_referential (TREE_TYPE (inner)))
1456 		inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1457 				 inner);
1458 
1459 	      /* Compute the offset as a byte offset from INNER.  */
1460 	      if (!offset)
1461 		offset = size_zero_node;
1462 
1463 	      offset
1464 		= size_binop (PLUS_EXPR, offset,
1465 			      size_int (bits_to_bytes_round_down (bitpos)));
1466 
1467 	      /* Take the address of INNER, convert it to a pointer to our type
1468 		 and add the offset.  */
1469 	      inner = build_unary_op (ADDR_EXPR,
1470 				      build_pointer_type (TREE_TYPE (operand)),
1471 				      inner);
1472 	      result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1473 					inner, offset);
1474 	      break;
1475 	    }
1476 	  goto common;
1477 
1478 	case CONSTRUCTOR:
1479 	  /* If this is just a constructor for a padded record, we can
1480 	     just take the address of the single field and convert it to
1481 	     a pointer to our type.  */
1482 	  if (TYPE_IS_PADDING_P (type))
1483 	    {
1484 	      result
1485 		= build_unary_op (ADDR_EXPR,
1486 				  build_pointer_type (TREE_TYPE (operand)),
1487 				  CONSTRUCTOR_ELT (operand, 0)->value);
1488 	      break;
1489 	    }
1490 	  goto common;
1491 
1492 	case NOP_EXPR:
1493 	  if (AGGREGATE_TYPE_P (type)
1494 	      && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1495 	    return build_unary_op (ADDR_EXPR, result_type,
1496 				   TREE_OPERAND (operand, 0));
1497 
1498 	  /* ... fallthru ... */
1499 
1500 	case VIEW_CONVERT_EXPR:
1501 	  /* If this just a variant conversion or if the conversion doesn't
1502 	     change the mode, get the result type from this type and go down.
1503 	     This is needed for conversions of CONST_DECLs, to eventually get
1504 	     to the address of their CORRESPONDING_VARs.  */
1505 	  if ((TYPE_MAIN_VARIANT (type)
1506 	       == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1507 	      || (TYPE_MODE (type) != BLKmode
1508 		  && (TYPE_MODE (type)
1509 		      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1510 	    return build_unary_op (ADDR_EXPR,
1511 				   (result_type ? result_type
1512 				    : build_pointer_type (type)),
1513 				   TREE_OPERAND (operand, 0));
1514 	  goto common;
1515 
1516 	case CONST_DECL:
1517 	  operand = DECL_CONST_CORRESPONDING_VAR (operand);
1518 
1519 	  /* ... fall through ... */
1520 
1521 	default:
1522 	common:
1523 
1524 	  /* If we are taking the address of a padded record whose field
1525 	     contains a template, take the address of the field.  */
1526 	  if (TYPE_IS_PADDING_P (type)
1527 	      && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1528 	      && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1529 	    {
1530 	      type = TREE_TYPE (TYPE_FIELDS (type));
1531 	      operand = convert (type, operand);
1532 	    }
1533 
1534 	  gnat_mark_addressable (operand);
1535 	  result = build_fold_addr_expr (operand);
1536 	}
1537 
1538       TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1539       break;
1540 
1541     case INDIRECT_REF:
1542       {
1543 	tree t = remove_conversions (operand, false);
1544 	bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1545 
1546 	/* If TYPE is a thin pointer, either first retrieve the base if this
1547 	   is an expression with an offset built for the initialization of an
1548 	   object with an unconstrained nominal subtype, or else convert to
1549 	   the fat pointer.  */
1550 	if (TYPE_IS_THIN_POINTER_P (type))
1551 	  {
1552 	    tree rec_type = TREE_TYPE (type);
1553 
1554 	    if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1555 		&& TREE_OPERAND (operand, 1)
1556 		   == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1557 		&& TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1558 	      {
1559 		operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1560 		type = TREE_TYPE (operand);
1561 	      }
1562 	    else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1563 	      {
1564 		operand
1565 		  = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1566 			     operand);
1567 		type = TREE_TYPE (operand);
1568 	      }
1569 	  }
1570 
1571 	/* If we want to refer to an unconstrained array, use the appropriate
1572 	   expression.  But this will never survive down to the back-end.  */
1573 	if (TYPE_IS_FAT_POINTER_P (type))
1574 	  {
1575 	    result = build1 (UNCONSTRAINED_ARRAY_REF,
1576 			     TYPE_UNCONSTRAINED_ARRAY (type), operand);
1577 	    TREE_READONLY (result)
1578 	      = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1579 	  }
1580 
1581 	/* If we are dereferencing an ADDR_EXPR, return its operand.  */
1582 	else if (TREE_CODE (operand) == ADDR_EXPR)
1583 	  result = TREE_OPERAND (operand, 0);
1584 
1585 	/* Otherwise, build and fold the indirect reference.  */
1586 	else
1587 	  {
1588 	    result = build_fold_indirect_ref (operand);
1589 	    TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1590 	  }
1591 
1592 	if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1593 	  {
1594 	    TREE_SIDE_EFFECTS (result) = 1;
1595 	    if (TREE_CODE (result) == INDIRECT_REF)
1596 	      TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1597 	  }
1598 
1599 	if ((TREE_CODE (result) == INDIRECT_REF
1600 	     || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1601 	    && can_never_be_null)
1602 	  TREE_THIS_NOTRAP (result) = 1;
1603 
1604 	break;
1605       }
1606 
1607     case NEGATE_EXPR:
1608     case BIT_NOT_EXPR:
1609       {
1610 	tree modulus = ((operation_type
1611 			 && TREE_CODE (operation_type) == INTEGER_TYPE
1612 			 && TYPE_MODULAR_P (operation_type))
1613 			? TYPE_MODULUS (operation_type) : NULL_TREE);
1614 	int mod_pow2 = modulus && integer_pow2p (modulus);
1615 
1616 	/* If this is a modular type, there are various possibilities
1617 	   depending on the operation and whether the modulus is a
1618 	   power of two or not.  */
1619 
1620 	if (modulus)
1621 	  {
1622 	    gcc_assert (operation_type == base_type);
1623 	    operand = convert (operation_type, operand);
1624 
1625 	    /* The fastest in the negate case for binary modulus is
1626 	       the straightforward code; the TRUNC_MOD_EXPR below
1627 	       is an AND operation.  */
1628 	    if (op_code == NEGATE_EXPR && mod_pow2)
1629 	      result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1630 				    fold_build1 (NEGATE_EXPR, operation_type,
1631 						 operand),
1632 				    modulus);
1633 
1634 	    /* For nonbinary negate case, return zero for zero operand,
1635 	       else return the modulus minus the operand.  If the modulus
1636 	       is a power of two minus one, we can do the subtraction
1637 	       as an XOR since it is equivalent and faster on most machines. */
1638 	    else if (op_code == NEGATE_EXPR && !mod_pow2)
1639 	      {
1640 		if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1641 						modulus,
1642 						build_int_cst (operation_type,
1643 							       1))))
1644 		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1645 					operand, modulus);
1646 		else
1647 		  result = fold_build2 (MINUS_EXPR, operation_type,
1648 					modulus, operand);
1649 
1650 		result = fold_build3 (COND_EXPR, operation_type,
1651 				      fold_build2 (NE_EXPR,
1652 						   boolean_type_node,
1653 						   operand,
1654 						   build_int_cst
1655 						   (operation_type, 0)),
1656 				      result, operand);
1657 	      }
1658 	    else
1659 	      {
1660 		/* For the NOT cases, we need a constant equal to
1661 		   the modulus minus one.  For a binary modulus, we
1662 		   XOR against the constant and subtract the operand from
1663 		   that constant for nonbinary modulus.  */
1664 
1665 		tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1666 					 build_int_cst (operation_type, 1));
1667 
1668 		if (mod_pow2)
1669 		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1670 					operand, cnst);
1671 		else
1672 		  result = fold_build2 (MINUS_EXPR, operation_type,
1673 					cnst, operand);
1674 	      }
1675 
1676 	    break;
1677 	  }
1678       }
1679 
1680       /* ... fall through ... */
1681 
1682     default:
1683       gcc_assert (operation_type == base_type);
1684       result = fold_build1 (op_code, operation_type,
1685 			    convert (operation_type, operand));
1686     }
1687 
1688   if (result_type && TREE_TYPE (result) != result_type)
1689     result = convert (result_type, result);
1690 
1691   return result;
1692 }
1693 
1694 /* Similar, but for COND_EXPR.  */
1695 
1696 tree
build_cond_expr(tree result_type,tree condition_operand,tree true_operand,tree false_operand)1697 build_cond_expr (tree result_type, tree condition_operand,
1698                  tree true_operand, tree false_operand)
1699 {
1700   bool addr_p = false;
1701   tree result;
1702 
1703   /* The front-end verified that result, true and false operands have
1704      same base type.  Convert everything to the result type.  */
1705   true_operand = convert (result_type, true_operand);
1706   false_operand = convert (result_type, false_operand);
1707 
1708   /* If the result type is unconstrained, take the address of the operands and
1709      then dereference the result.  Likewise if the result type is passed by
1710      reference, because creating a temporary of this type is not allowed.  */
1711   if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1712       || TYPE_IS_BY_REFERENCE_P (result_type)
1713       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1714     {
1715       result_type = build_pointer_type (result_type);
1716       true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1717       false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1718       addr_p = true;
1719     }
1720 
1721   result = fold_build3 (COND_EXPR, result_type, condition_operand,
1722 			true_operand, false_operand);
1723 
1724   /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1725      in both arms, make sure it gets evaluated by moving it ahead of the
1726      conditional expression.  This is necessary because it is evaluated
1727      in only one place at run time and would otherwise be uninitialized
1728      in one of the arms.  */
1729   true_operand = skip_simple_arithmetic (true_operand);
1730   false_operand = skip_simple_arithmetic (false_operand);
1731 
1732   if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1733     result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1734 
1735   if (addr_p)
1736     result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1737 
1738   return result;
1739 }
1740 
1741 /* Similar, but for COMPOUND_EXPR.  */
1742 
1743 tree
build_compound_expr(tree result_type,tree stmt_operand,tree expr_operand)1744 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1745 {
1746   bool addr_p = false;
1747   tree result;
1748 
1749   /* If the result type is unconstrained, take the address of the operand and
1750      then dereference the result.  Likewise if the result type is passed by
1751      reference, but this is natively handled in the gimplifier.  */
1752   if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1753       || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1754     {
1755       result_type = build_pointer_type (result_type);
1756       expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1757       addr_p = true;
1758     }
1759 
1760   result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1761 			expr_operand);
1762 
1763   if (addr_p)
1764     result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1765 
1766   return result;
1767 }
1768 
1769 /* Conveniently construct a function call expression.  FNDECL names the
1770    function to be called, N is the number of arguments, and the "..."
1771    parameters are the argument expressions.  Unlike build_call_expr
1772    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
1773 
1774 tree
build_call_n_expr(tree fndecl,int n,...)1775 build_call_n_expr (tree fndecl, int n, ...)
1776 {
1777   va_list ap;
1778   tree fntype = TREE_TYPE (fndecl);
1779   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1780 
1781   va_start (ap, n);
1782   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1783   va_end (ap);
1784   return fn;
1785 }
1786 
1787 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1788    MSG gives the exception's identity for the call to Local_Raise, if any.  */
1789 
1790 static tree
build_goto_raise(Entity_Id gnat_label,int msg)1791 build_goto_raise (Entity_Id gnat_label, int msg)
1792 {
1793   tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1794   tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
1795   Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1796 
1797   /* If Local_Raise is present, build Local_Raise (Exception'Identity).  */
1798   if (Present (local_raise))
1799     {
1800       tree gnu_local_raise
1801 	= gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1802       tree gnu_exception_entity
1803 	= gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1804       tree gnu_call
1805 	= build_call_n_expr (gnu_local_raise, 1,
1806 			     build_unary_op (ADDR_EXPR, NULL_TREE,
1807 					     gnu_exception_entity));
1808       gnu_result
1809 	= build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1810     }
1811 
1812   TREE_USED (gnu_label) = 1;
1813   return gnu_result;
1814 }
1815 
1816 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1817    pointed to by FILENAME, LINE and COL.  Fall back to the current location
1818    if GNAT_NODE is absent or has no SLOC.  */
1819 
1820 static void
expand_sloc(Node_Id gnat_node,tree * filename,tree * line,tree * col)1821 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1822 {
1823   const char *str;
1824   int line_number, column_number;
1825 
1826   if (Debug_Flag_NN || Exception_Locations_Suppressed)
1827     {
1828       str = "";
1829       line_number = 0;
1830       column_number = 0;
1831     }
1832   else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1833     {
1834       str = Get_Name_String
1835 	    (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1836       line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1837       column_number = Get_Column_Number (Sloc (gnat_node));
1838     }
1839   else
1840     {
1841       str = lbasename (LOCATION_FILE (input_location));
1842       line_number = LOCATION_LINE (input_location);
1843       column_number = LOCATION_COLUMN (input_location);
1844     }
1845 
1846   const int len = strlen (str);
1847   *filename = build_string (len, str);
1848   TREE_TYPE (*filename) = build_array_type (char_type_node,
1849 					    build_index_type (size_int (len)));
1850   *line = build_int_cst (NULL_TREE, line_number);
1851   if (col)
1852     *col = build_int_cst (NULL_TREE, column_number);
1853 }
1854 
1855 /* Build a call to a function that raises an exception and passes file name
1856    and line number, if requested.  MSG says which exception function to call.
1857    GNAT_NODE is the node conveying the source location for which the error
1858    should be signaled, or Empty in which case the error is signaled for the
1859    current location.  KIND says which kind of exception node this is for,
1860    among N_Raise_{Constraint,Storage,Program}_Error.  */
1861 
1862 tree
build_call_raise(int msg,Node_Id gnat_node,char kind)1863 build_call_raise (int msg, Node_Id gnat_node, char kind)
1864 {
1865   Entity_Id gnat_label = get_exception_label (kind);
1866   tree fndecl = gnat_raise_decls[msg];
1867   tree filename, line;
1868 
1869   /* If this is to be done as a goto, handle that case.  */
1870   if (Present (gnat_label))
1871     return build_goto_raise (gnat_label, msg);
1872 
1873   expand_sloc (gnat_node, &filename, &line, NULL);
1874 
1875   return
1876     build_call_n_expr (fndecl, 2,
1877 		       build1 (ADDR_EXPR,
1878 			       build_pointer_type (char_type_node),
1879 			       filename),
1880 		       line);
1881 }
1882 
1883 /* Similar to build_call_raise, with extra information about the column
1884    where the check failed.  */
1885 
1886 tree
build_call_raise_column(int msg,Node_Id gnat_node,char kind)1887 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1888 {
1889   Entity_Id gnat_label = get_exception_label (kind);
1890   tree fndecl = gnat_raise_decls_ext[msg];
1891   tree filename, line, col;
1892 
1893   /* If this is to be done as a goto, handle that case.  */
1894   if (Present (gnat_label))
1895     return build_goto_raise (gnat_label, msg);
1896 
1897   expand_sloc (gnat_node, &filename, &line, &col);
1898 
1899   return
1900     build_call_n_expr (fndecl, 3,
1901 		       build1 (ADDR_EXPR,
1902 			       build_pointer_type (char_type_node),
1903 			       filename),
1904 		       line, col);
1905 }
1906 
1907 /* Similar to build_call_raise_column, for an index or range check exception ,
1908    with extra information of the form "INDEX out of range FIRST..LAST".  */
1909 
1910 tree
build_call_raise_range(int msg,Node_Id gnat_node,char kind,tree index,tree first,tree last)1911 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1912 			tree index, tree first, tree last)
1913 {
1914   Entity_Id gnat_label = get_exception_label (kind);
1915   tree fndecl = gnat_raise_decls_ext[msg];
1916   tree filename, line, col;
1917 
1918   /* If this is to be done as a goto, handle that case.  */
1919   if (Present (gnat_label))
1920     return build_goto_raise (gnat_label, msg);
1921 
1922   expand_sloc (gnat_node, &filename, &line, &col);
1923 
1924   return
1925     build_call_n_expr (fndecl, 6,
1926 		       build1 (ADDR_EXPR,
1927 			       build_pointer_type (char_type_node),
1928 			       filename),
1929 		       line, col,
1930 		       convert (integer_type_node, index),
1931 		       convert (integer_type_node, first),
1932 		       convert (integer_type_node, last));
1933 }
1934 
1935 /* qsort comparer for the bit positions of two constructor elements
1936    for record components.  */
1937 
1938 static int
compare_elmt_bitpos(const PTR rt1,const PTR rt2)1939 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1940 {
1941   const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1942   const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
1943   const_tree const field1 = elmt1->index;
1944   const_tree const field2 = elmt2->index;
1945   const int ret
1946     = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1947 
1948   return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1949 }
1950 
1951 /* Return a CONSTRUCTOR of TYPE whose elements are V.  */
1952 
1953 tree
gnat_build_constructor(tree type,vec<constructor_elt,va_gc> * v)1954 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1955 {
1956   bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1957   bool read_only = true;
1958   bool side_effects = false;
1959   tree result, obj, val;
1960   unsigned int n_elmts;
1961 
1962   /* Scan the elements to see if they are all constant or if any has side
1963      effects, to let us set global flags on the resulting constructor.  Count
1964      the elements along the way for possible sorting purposes below.  */
1965   FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1966     {
1967       /* The predicate must be in keeping with output_constructor.  */
1968       if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1969 	  || (TREE_CODE (type) == RECORD_TYPE
1970 	      && CONSTRUCTOR_BITFIELD_P (obj)
1971 	      && !initializer_constant_valid_for_bitfield_p (val))
1972 	  || !initializer_constant_valid_p (val,
1973 					    TREE_TYPE (val),
1974 					    TYPE_REVERSE_STORAGE_ORDER (type)))
1975 	allconstant = false;
1976 
1977       if (!TREE_READONLY (val))
1978 	read_only = false;
1979 
1980       if (TREE_SIDE_EFFECTS (val))
1981 	side_effects = true;
1982     }
1983 
1984   /* For record types with constant components only, sort field list
1985      by increasing bit position.  This is necessary to ensure the
1986      constructor can be output as static data.  */
1987   if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1988     v->qsort (compare_elmt_bitpos);
1989 
1990   result = build_constructor (type, v);
1991   CONSTRUCTOR_NO_CLEARING (result) = 1;
1992   TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1993   TREE_SIDE_EFFECTS (result) = side_effects;
1994   TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1995   return result;
1996 }
1997 
1998 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
1999    is not found in the record.  Don't fold the result if NO_FOLD is true.  */
2000 
2001 static tree
build_simple_component_ref(tree record,tree field,bool no_fold)2002 build_simple_component_ref (tree record, tree field, bool no_fold)
2003 {
2004   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2005   tree ref;
2006 
2007   gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
2008 
2009   /* Try to fold a conversion from another record or union type unless the type
2010      contains a placeholder as it might be needed for a later substitution.  */
2011   if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2012       && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2013       && !type_contains_placeholder_p (type))
2014     {
2015       tree op = TREE_OPERAND (record, 0);
2016 
2017       /* If this is an unpadding operation, convert the underlying object to
2018 	 the unpadded type directly.  */
2019       if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2020 	return convert (TREE_TYPE (field), op);
2021 
2022       /* Otherwise try to access FIELD directly in the underlying type, but
2023 	 make sure that the form of the reference doesn't change too much;
2024 	 this can happen for an unconstrained bit-packed array type whose
2025 	 constrained form can be an integer type.  */
2026       ref = build_simple_component_ref (op, field, no_fold);
2027       if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2028 	return ref;
2029     }
2030 
2031   /* If this field is not in the specified record, see if we can find a field
2032      in the specified record whose original field is the same as this one.  */
2033   if (DECL_CONTEXT (field) != type)
2034     {
2035       tree new_field;
2036 
2037       /* First loop through normal components.  */
2038       for (new_field = TYPE_FIELDS (type);
2039 	   new_field;
2040 	   new_field = DECL_CHAIN (new_field))
2041 	if (SAME_FIELD_P (field, new_field))
2042 	  break;
2043 
2044       /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2045 	 component in the first search.  Doing this search in two steps is
2046 	 required to avoid hidden homonymous fields in the _Parent field.  */
2047       if (!new_field)
2048 	for (new_field = TYPE_FIELDS (type);
2049 	     new_field;
2050 	     new_field = DECL_CHAIN (new_field))
2051 	  if (DECL_INTERNAL_P (new_field)
2052 	      && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2053 	    {
2054 	      tree field_ref
2055 		= build_simple_component_ref (record, new_field, no_fold);
2056 	      ref = build_simple_component_ref (field_ref, field, no_fold);
2057 	      if (ref)
2058 		return ref;
2059 	    }
2060 
2061       field = new_field;
2062     }
2063 
2064   if (!field)
2065     return NULL_TREE;
2066 
2067   /* If the field's offset has overflowed, do not try to access it, as doing
2068      so may trigger sanity checks deeper in the back-end.  Note that we don't
2069      need to warn since this will be done on trying to declare the object.  */
2070   if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2071       && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2072     return NULL_TREE;
2073 
2074   ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2075 
2076   if (TREE_READONLY (record)
2077       || TREE_READONLY (field)
2078       || TYPE_READONLY (type))
2079     TREE_READONLY (ref) = 1;
2080 
2081   if (TREE_THIS_VOLATILE (record)
2082       || TREE_THIS_VOLATILE (field)
2083       || TYPE_VOLATILE (type))
2084     TREE_THIS_VOLATILE (ref) = 1;
2085 
2086   if (no_fold)
2087     return ref;
2088 
2089   /* The generic folder may punt in this case because the inner array type
2090      can be self-referential, but folding is in fact not problematic.  */
2091   if (TREE_CODE (record) == CONSTRUCTOR
2092       && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2093     {
2094       vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2095       unsigned HOST_WIDE_INT idx;
2096       tree index, value;
2097       FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2098        if (index == field)
2099 	return value;
2100       return ref;
2101     }
2102 
2103   return fold (ref);
2104 }
2105 
2106 /* Likewise, but return NULL_EXPR and generate a Constraint_Error if the
2107    field is not found in the record.  */
2108 
2109 tree
build_component_ref(tree record,tree field,bool no_fold)2110 build_component_ref (tree record, tree field, bool no_fold)
2111 {
2112   tree ref = build_simple_component_ref (record, field, no_fold);
2113   if (ref)
2114     return ref;
2115 
2116   /* Assume this is an invalid user field so raise Constraint_Error.  */
2117   return build1 (NULL_EXPR, TREE_TYPE (field),
2118 		 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2119 				   N_Raise_Constraint_Error));
2120 }
2121 
2122 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2123    identically.  Process the case where a GNAT_PROC to call is provided.  */
2124 
2125 static inline tree
build_call_alloc_dealloc_proc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool)2126 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2127 			       Entity_Id gnat_proc, Entity_Id gnat_pool)
2128 {
2129   tree gnu_proc = gnat_to_gnu (gnat_proc);
2130   tree gnu_call;
2131 
2132   /* A storage pool's underlying type is a record type (for both predefined
2133      storage pools and GNAT simple storage pools). The secondary stack uses
2134      the same mechanism, but its pool object (SS_Pool) is an integer.  */
2135   if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2136     {
2137       /* The size is the third parameter; the alignment is the
2138 	 same type.  */
2139       Entity_Id gnat_size_type
2140 	= Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2141       tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2142 
2143       tree gnu_pool = gnat_to_gnu (gnat_pool);
2144       tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2145       tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2146 
2147       gnu_size = convert (gnu_size_type, gnu_size);
2148       gnu_align = convert (gnu_size_type, gnu_align);
2149 
2150       /* The first arg is always the address of the storage pool; next
2151 	 comes the address of the object, for a deallocator, then the
2152 	 size and alignment.  */
2153       if (gnu_obj)
2154 	gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2155 				      gnu_size, gnu_align);
2156       else
2157 	gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2158 				      gnu_size, gnu_align);
2159     }
2160 
2161   /* Secondary stack case.  */
2162   else
2163     {
2164       /* The size is the second parameter.  */
2165       Entity_Id gnat_size_type
2166 	= Etype (Next_Formal (First_Formal (gnat_proc)));
2167       tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2168 
2169       gnu_size = convert (gnu_size_type, gnu_size);
2170 
2171       /* The first arg is the address of the object, for a deallocator,
2172 	 then the size.  */
2173       if (gnu_obj)
2174 	gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2175       else
2176 	gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2177     }
2178 
2179   return gnu_call;
2180 }
2181 
2182 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2183    DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2184    __gnat_malloc allocator.  Honor DATA_TYPE alignments greater than what the
2185    latter offers.  */
2186 
2187 static inline tree
maybe_wrap_malloc(tree data_size,tree data_type,Node_Id gnat_node)2188 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2189 {
2190   /* When the DATA_TYPE alignment is stricter than what malloc offers
2191      (super-aligned case), we allocate an "aligning" wrapper type and return
2192      the address of its single data field with the malloc's return value
2193      stored just in front.  */
2194 
2195   unsigned int data_align = TYPE_ALIGN (data_type);
2196   unsigned int system_allocator_alignment
2197       = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2198 
2199   tree aligning_type
2200     = ((data_align > system_allocator_alignment)
2201        ? make_aligning_type (data_type, data_align, data_size,
2202 			     system_allocator_alignment,
2203 			     POINTER_SIZE / BITS_PER_UNIT,
2204 			     gnat_node)
2205        : NULL_TREE);
2206 
2207   tree size_to_malloc
2208     = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2209 
2210   tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2211 
2212   if (aligning_type)
2213     {
2214       /* Latch malloc's return value and get a pointer to the aligning field
2215 	 first.  */
2216       tree storage_ptr = gnat_protect_expr (malloc_ptr);
2217 
2218       tree aligning_record_addr
2219 	= convert (build_pointer_type (aligning_type), storage_ptr);
2220 
2221       tree aligning_record
2222 	= build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2223 
2224       tree aligning_field
2225 	= build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2226 			       false);
2227 
2228       tree aligning_field_addr
2229         = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2230 
2231       /* Then arrange to store the allocator's return value ahead
2232 	 and return.  */
2233       tree storage_ptr_slot_addr
2234 	= build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2235 			   convert (ptr_type_node, aligning_field_addr),
2236 			   size_int (-(HOST_WIDE_INT) POINTER_SIZE
2237 				     / BITS_PER_UNIT));
2238 
2239       tree storage_ptr_slot
2240 	= build_unary_op (INDIRECT_REF, NULL_TREE,
2241 			  convert (build_pointer_type (ptr_type_node),
2242 				   storage_ptr_slot_addr));
2243 
2244       return
2245 	build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2246 		build_binary_op (INIT_EXPR, NULL_TREE,
2247 				 storage_ptr_slot, storage_ptr),
2248 		aligning_field_addr);
2249     }
2250   else
2251     return malloc_ptr;
2252 }
2253 
2254 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2255    designated by DATA_PTR using the __gnat_free entry point.  */
2256 
2257 static inline tree
maybe_wrap_free(tree data_ptr,tree data_type)2258 maybe_wrap_free (tree data_ptr, tree data_type)
2259 {
2260   /* In the regular alignment case, we pass the data pointer straight to free.
2261      In the superaligned case, we need to retrieve the initial allocator
2262      return value, stored in front of the data block at allocation time.  */
2263 
2264   unsigned int data_align = TYPE_ALIGN (data_type);
2265   unsigned int system_allocator_alignment
2266       = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2267 
2268   tree free_ptr;
2269 
2270   if (data_align > system_allocator_alignment)
2271     {
2272       /* DATA_FRONT_PTR (void *)
2273 	 = (void *)DATA_PTR - (void *)sizeof (void *))  */
2274       tree data_front_ptr
2275 	= build_binary_op
2276 	  (POINTER_PLUS_EXPR, ptr_type_node,
2277 	   convert (ptr_type_node, data_ptr),
2278 	   size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2279 
2280       /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR  */
2281       free_ptr
2282 	= build_unary_op
2283 	  (INDIRECT_REF, NULL_TREE,
2284 	   convert (build_pointer_type (ptr_type_node), data_front_ptr));
2285     }
2286   else
2287     free_ptr = data_ptr;
2288 
2289   return build_call_n_expr (free_decl, 1, free_ptr);
2290 }
2291 
2292 /* Build a GCC tree to call an allocation or deallocation function.
2293    If GNU_OBJ is nonzero, it is an object to deallocate.  Otherwise,
2294    generate an allocator.
2295 
2296    GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2297    object type, used to determine the to-be-honored address alignment.
2298    GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2299    pool to use.  If not present, malloc and free are used.  GNAT_NODE is used
2300    to provide an error location for restriction violation messages.  */
2301 
2302 tree
build_call_alloc_dealloc(tree gnu_obj,tree gnu_size,tree gnu_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node)2303 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2304                           Entity_Id gnat_proc, Entity_Id gnat_pool,
2305                           Node_Id gnat_node)
2306 {
2307   /* Explicit proc to call ?  This one is assumed to deal with the type
2308      alignment constraints.  */
2309   if (Present (gnat_proc))
2310     return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2311 					  gnat_proc, gnat_pool);
2312 
2313   /* Otherwise, object to "free" or "malloc" with possible special processing
2314      for alignments stricter than what the default allocator honors.  */
2315   else if (gnu_obj)
2316     return maybe_wrap_free (gnu_obj, gnu_type);
2317   else
2318     {
2319       /* Assert that we no longer can be called with this special pool.  */
2320       gcc_assert (gnat_pool != -1);
2321 
2322       /* Check that we aren't violating the associated restriction.  */
2323       if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2324 	{
2325 	  Check_No_Implicit_Heap_Alloc (gnat_node);
2326 	  if (Has_Task (Etype (gnat_node)))
2327 	    Check_No_Implicit_Task_Alloc (gnat_node);
2328 	  if (Has_Protected (Etype (gnat_node)))
2329 	    Check_No_Implicit_Protected_Alloc (gnat_node);
2330 	}
2331       return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2332     }
2333 }
2334 
2335 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2336    initial value is INIT, if INIT is nonzero.  Convert the expression to
2337    RESULT_TYPE, which must be some pointer type, and return the result.
2338 
2339    GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2340    the storage pool to use.  GNAT_NODE is used to provide an error
2341    location for restriction violation messages.  If IGNORE_INIT_TYPE is
2342    true, ignore the type of INIT for the purpose of determining the size;
2343    this will cause the maximum size to be allocated if TYPE is of
2344    self-referential size.  */
2345 
2346 tree
build_allocator(tree type,tree init,tree result_type,Entity_Id gnat_proc,Entity_Id gnat_pool,Node_Id gnat_node,bool ignore_init_type)2347 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2348                  Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2349 {
2350   tree size, storage, storage_deref, storage_init;
2351 
2352   /* If the initializer, if present, is a NULL_EXPR, just return a new one.  */
2353   if (init && TREE_CODE (init) == NULL_EXPR)
2354     return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2355 
2356   /* If we are just annotating types, also return a NULL_EXPR.  */
2357   else if (type_annotate_only)
2358     return build1 (NULL_EXPR, result_type,
2359 		   build_call_raise (CE_Range_Check_Failed, gnat_node,
2360 				     N_Raise_Constraint_Error));
2361 
2362   /* If the initializer, if present, is a COND_EXPR, deal with each branch.  */
2363   else if (init && TREE_CODE (init) == COND_EXPR)
2364     return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2365 		   build_allocator (type, TREE_OPERAND (init, 1), result_type,
2366 				    gnat_proc, gnat_pool, gnat_node,
2367 				    ignore_init_type),
2368 		   build_allocator (type, TREE_OPERAND (init, 2), result_type,
2369 				    gnat_proc, gnat_pool, gnat_node,
2370 				    ignore_init_type));
2371 
2372   /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2373      sizes of the object and its template.  Allocate the whole thing and
2374      fill in the parts that are known.  */
2375   else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2376     {
2377       tree storage_type
2378 	= build_unc_object_type_from_ptr (result_type, type,
2379 					  get_identifier ("ALLOC"), false);
2380       tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2381       tree storage_ptr_type = build_pointer_type (storage_type);
2382 
2383       size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2384 					     init);
2385 
2386       /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2387       if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2388 	size = size_int (-1);
2389 
2390       storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2391 					  gnat_proc, gnat_pool, gnat_node);
2392       storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2393       storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2394       TREE_THIS_NOTRAP (storage_deref) = 1;
2395 
2396       /* If there is an initializing expression, then make a constructor for
2397 	 the entire object including the bounds and copy it into the object.
2398 	 If there is no initializing expression, just set the bounds.  */
2399       if (init)
2400 	{
2401 	  vec<constructor_elt, va_gc> *v;
2402 	  vec_alloc (v, 2);
2403 
2404 	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2405 				  build_template (template_type, type, init));
2406 	  CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2407 				  init);
2408 	  storage_init
2409 	    = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2410 			       gnat_build_constructor (storage_type, v));
2411 	}
2412       else
2413 	storage_init
2414 	  = build_binary_op (INIT_EXPR, NULL_TREE,
2415 			     build_component_ref (storage_deref,
2416 						  TYPE_FIELDS (storage_type),
2417 						  false),
2418 			     build_template (template_type, type, NULL_TREE));
2419 
2420       return build2 (COMPOUND_EXPR, result_type,
2421 		     storage_init, convert (result_type, storage));
2422     }
2423 
2424   size = TYPE_SIZE_UNIT (type);
2425 
2426   /* If we have an initializing expression, see if its size is simpler
2427      than the size from the type.  */
2428   if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2429       && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2430 	  || CONTAINS_PLACEHOLDER_P (size)))
2431     size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2432 
2433   /* If the size is still self-referential, reference the initializing
2434      expression, if it is present.  If not, this must have been a
2435      call to allocate a library-level object, in which case we use
2436      the maximum size.  */
2437   if (CONTAINS_PLACEHOLDER_P (size))
2438     {
2439       if (!ignore_init_type && init)
2440 	size = substitute_placeholder_in_expr (size, init);
2441       else
2442 	size = max_size (size, true);
2443     }
2444 
2445   /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2446   if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2447     size = size_int (-1);
2448 
2449   storage = convert (result_type,
2450 		     build_call_alloc_dealloc (NULL_TREE, size, type,
2451 					       gnat_proc, gnat_pool,
2452 					       gnat_node));
2453 
2454   /* If we have an initial value, protect the new address, assign the value
2455      and return the address with a COMPOUND_EXPR.  */
2456   if (init)
2457     {
2458       storage = gnat_protect_expr (storage);
2459       storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2460       TREE_THIS_NOTRAP (storage_deref) = 1;
2461       storage_init
2462 	= build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2463       return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2464     }
2465 
2466   return storage;
2467 }
2468 
2469 /* Indicate that we need to take the address of T and that it therefore
2470    should not be allocated in a register.  Return true if successful.  */
2471 
2472 bool
gnat_mark_addressable(tree t)2473 gnat_mark_addressable (tree t)
2474 {
2475   while (true)
2476     switch (TREE_CODE (t))
2477       {
2478       case ADDR_EXPR:
2479       case COMPONENT_REF:
2480       case ARRAY_REF:
2481       case ARRAY_RANGE_REF:
2482       case REALPART_EXPR:
2483       case IMAGPART_EXPR:
2484       case VIEW_CONVERT_EXPR:
2485       case NON_LVALUE_EXPR:
2486       CASE_CONVERT:
2487 	t = TREE_OPERAND (t, 0);
2488 	break;
2489 
2490       case COMPOUND_EXPR:
2491 	t = TREE_OPERAND (t, 1);
2492 	break;
2493 
2494       case CONSTRUCTOR:
2495 	TREE_ADDRESSABLE (t) = 1;
2496 	return true;
2497 
2498       case VAR_DECL:
2499       case PARM_DECL:
2500       case RESULT_DECL:
2501 	TREE_ADDRESSABLE (t) = 1;
2502 	return true;
2503 
2504       case FUNCTION_DECL:
2505 	TREE_ADDRESSABLE (t) = 1;
2506 	return true;
2507 
2508       case CONST_DECL:
2509 	return DECL_CONST_CORRESPONDING_VAR (t)
2510 	       && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2511 
2512       default:
2513 	return true;
2514     }
2515 }
2516 
2517 /* Return true if EXP is a stable expression for the purpose of the functions
2518    below and, therefore, can be returned unmodified by them.  We accept things
2519    that are actual constants or that have already been handled.  */
2520 
2521 static bool
gnat_stable_expr_p(tree exp)2522 gnat_stable_expr_p (tree exp)
2523 {
2524   enum tree_code code = TREE_CODE (exp);
2525   return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2526 }
2527 
2528 /* Save EXP for later use or reuse.  This is equivalent to save_expr in tree.c
2529    but we know how to handle our own nodes.  */
2530 
2531 tree
gnat_save_expr(tree exp)2532 gnat_save_expr (tree exp)
2533 {
2534   tree type = TREE_TYPE (exp);
2535   enum tree_code code = TREE_CODE (exp);
2536 
2537   if (gnat_stable_expr_p (exp))
2538     return exp;
2539 
2540   if (code == UNCONSTRAINED_ARRAY_REF)
2541     {
2542       tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2543       TREE_READONLY (t) = TYPE_READONLY (type);
2544       return t;
2545     }
2546 
2547   /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2548      This may be more efficient, but will also allow us to more easily find
2549      the match for the PLACEHOLDER_EXPR.  */
2550   if (code == COMPONENT_REF
2551       && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2552     return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2553 		   TREE_OPERAND (exp, 1), NULL_TREE);
2554 
2555   return save_expr (exp);
2556 }
2557 
2558 /* Protect EXP for immediate reuse.  This is a variant of gnat_save_expr that
2559    is optimized under the assumption that EXP's value doesn't change before
2560    its subsequent reuse(s) except through its potential reevaluation.  */
2561 
2562 tree
gnat_protect_expr(tree exp)2563 gnat_protect_expr (tree exp)
2564 {
2565   tree type = TREE_TYPE (exp);
2566   enum tree_code code = TREE_CODE (exp);
2567 
2568   if (gnat_stable_expr_p (exp))
2569     return exp;
2570 
2571   /* If EXP has no side effects, we theoretically don't need to do anything.
2572      However, we may be recursively passed more and more complex expressions
2573      involving checks which will be reused multiple times and eventually be
2574      unshared for gimplification; in order to avoid a complexity explosion
2575      at that point, we protect any expressions more complex than a simple
2576      arithmetic expression.  */
2577   if (!TREE_SIDE_EFFECTS (exp))
2578     {
2579       tree inner = skip_simple_arithmetic (exp);
2580       if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2581 	return exp;
2582     }
2583 
2584   /* If this is a conversion, protect what's inside the conversion.  */
2585   if (code == NON_LVALUE_EXPR
2586       || CONVERT_EXPR_CODE_P (code)
2587       || code == VIEW_CONVERT_EXPR)
2588   return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2589 
2590   /* If we're indirectly referencing something, we only need to protect the
2591      address since the data itself can't change in these situations.  */
2592   if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2593     {
2594       tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2595       TREE_READONLY (t) = TYPE_READONLY (type);
2596       return t;
2597     }
2598 
2599   /* Likewise if we're indirectly referencing part of something.  */
2600   if (code == COMPONENT_REF
2601       && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2602     return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2603 		   TREE_OPERAND (exp, 1), NULL_TREE);
2604 
2605   /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2606      This may be more efficient, but will also allow us to more easily find
2607      the match for the PLACEHOLDER_EXPR.  */
2608   if (code == COMPONENT_REF
2609       && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2610     return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2611 		   TREE_OPERAND (exp, 1), NULL_TREE);
2612 
2613   /* If this is a fat pointer or a scalar, just make a SAVE_EXPR.  Likewise
2614      for a CALL_EXPR as large objects are returned via invisible reference
2615      in most ABIs so the temporary will directly be filled by the callee.  */
2616   if (TYPE_IS_FAT_POINTER_P (type)
2617       || !AGGREGATE_TYPE_P (type)
2618       || code == CALL_EXPR)
2619     return save_expr (exp);
2620 
2621   /* Otherwise reference, protect the address and dereference.  */
2622   return
2623     build_unary_op (INDIRECT_REF, type,
2624 		    save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2625 }
2626 
2627 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2628    argument to force evaluation of everything.  */
2629 
2630 static tree
gnat_stabilize_reference_1(tree e,void * data)2631 gnat_stabilize_reference_1 (tree e, void *data)
2632 {
2633   const bool force = *(bool *)data;
2634   enum tree_code code = TREE_CODE (e);
2635   tree type = TREE_TYPE (e);
2636   tree result;
2637 
2638   if (gnat_stable_expr_p (e))
2639     return e;
2640 
2641   switch (TREE_CODE_CLASS (code))
2642     {
2643     case tcc_exceptional:
2644     case tcc_declaration:
2645     case tcc_comparison:
2646     case tcc_expression:
2647     case tcc_reference:
2648     case tcc_vl_exp:
2649       /* If this is a COMPONENT_REF of a fat pointer, save the entire
2650 	 fat pointer.  This may be more efficient, but will also allow
2651 	 us to more easily find the match for the PLACEHOLDER_EXPR.  */
2652       if (code == COMPONENT_REF
2653 	  && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2654 	result
2655 	  = build3 (code, type,
2656 		    gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2657 		    TREE_OPERAND (e, 1), NULL_TREE);
2658       /* If the expression has side-effects, then encase it in a SAVE_EXPR
2659 	 so that it will only be evaluated once.  */
2660       /* The tcc_reference and tcc_comparison classes could be handled as
2661 	 below, but it is generally faster to only evaluate them once.  */
2662       else if (TREE_SIDE_EFFECTS (e) || force)
2663 	return save_expr (e);
2664       else
2665 	return e;
2666       break;
2667 
2668     case tcc_binary:
2669       /* Recursively stabilize each operand.  */
2670       result
2671 	= build2 (code, type,
2672 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2673 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2674       break;
2675 
2676     case tcc_unary:
2677       /* Recursively stabilize each operand.  */
2678       result
2679 	= build1 (code, type,
2680 		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2681       break;
2682 
2683     default:
2684       gcc_unreachable ();
2685     }
2686 
2687   TREE_READONLY (result) = TREE_READONLY (e);
2688   TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2689   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2690 
2691   return result;
2692 }
2693 
2694 /* This is equivalent to stabilize_reference in tree.c but we know how to
2695    handle our own nodes and we take extra arguments.  FORCE says whether to
2696    force evaluation of everything in REF.  INIT is set to the first arm of
2697    a COMPOUND_EXPR present in REF, if any.  */
2698 
2699 tree
gnat_stabilize_reference(tree ref,bool force,tree * init)2700 gnat_stabilize_reference (tree ref, bool force, tree *init)
2701 {
2702   return
2703     gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2704 }
2705 
2706 /* Rewrite reference REF and call FUNC on each expression within REF in the
2707    process.  DATA is passed unmodified to FUNC.  INIT is set to the first
2708    arm of a COMPOUND_EXPR present in REF, if any.  */
2709 
2710 tree
gnat_rewrite_reference(tree ref,rewrite_fn func,void * data,tree * init)2711 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2712 {
2713   tree type = TREE_TYPE (ref);
2714   enum tree_code code = TREE_CODE (ref);
2715   tree result;
2716 
2717   switch (code)
2718     {
2719     case CONST_DECL:
2720     case VAR_DECL:
2721     case PARM_DECL:
2722     case RESULT_DECL:
2723       /* No action is needed in this case.  */
2724       return ref;
2725 
2726     CASE_CONVERT:
2727     case FLOAT_EXPR:
2728     case FIX_TRUNC_EXPR:
2729     case REALPART_EXPR:
2730     case IMAGPART_EXPR:
2731     case VIEW_CONVERT_EXPR:
2732       result
2733 	= build1 (code, type,
2734 		  gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2735 					  init));
2736       break;
2737 
2738     case INDIRECT_REF:
2739     case UNCONSTRAINED_ARRAY_REF:
2740       result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2741       break;
2742 
2743     case COMPONENT_REF:
2744       result = build3 (COMPONENT_REF, type,
2745 		       gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2746 					       data, init),
2747 		       TREE_OPERAND (ref, 1), NULL_TREE);
2748       break;
2749 
2750     case BIT_FIELD_REF:
2751       result = build3 (BIT_FIELD_REF, type,
2752 		       gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2753 					       data, init),
2754 		       TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2755       REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2756       break;
2757 
2758     case ARRAY_REF:
2759     case ARRAY_RANGE_REF:
2760       result
2761 	= build4 (code, type,
2762 		  gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2763 					  init),
2764 		  func (TREE_OPERAND (ref, 1), data),
2765 		  TREE_OPERAND (ref, 2), NULL_TREE);
2766       break;
2767 
2768     case COMPOUND_EXPR:
2769       gcc_assert (!*init);
2770       *init = TREE_OPERAND (ref, 0);
2771       /* We expect only the pattern built in Call_to_gnu.  */
2772       gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2773 		  || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2774 		      && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2775       return TREE_OPERAND (ref, 1);
2776 
2777     case CALL_EXPR:
2778       {
2779 	/* This can only be an atomic load.  */
2780 	gcc_assert (call_is_atomic_load (ref));
2781 
2782 	/* An atomic load is an INDIRECT_REF of its first argument.  */
2783 	tree t = CALL_EXPR_ARG (ref, 0);
2784 	if (TREE_CODE (t) == NOP_EXPR)
2785 	  t = TREE_OPERAND (t, 0);
2786 	if (TREE_CODE (t) == ADDR_EXPR)
2787 	  t = build1 (ADDR_EXPR, TREE_TYPE (t),
2788 		      gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2789 					      init));
2790 	else
2791 	  t = func (t, data);
2792 	t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2793 
2794 	result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2795 				  t, CALL_EXPR_ARG (ref, 1));
2796       }
2797       break;
2798 
2799     case ERROR_MARK:
2800     case NULL_EXPR:
2801       return ref;
2802 
2803     default:
2804       gcc_unreachable ();
2805     }
2806 
2807   /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2808      may not be sustained across some paths, such as the way via build1 for
2809      INDIRECT_REF.  We reset those flags here in the general case, which is
2810      consistent with the GCC version of this routine.
2811 
2812      Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2813      paths introduce side-effects where there was none initially (e.g. if a
2814      SAVE_EXPR is built) and we also want to keep track of that.  */
2815   TREE_READONLY (result) = TREE_READONLY (ref);
2816   TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2817   TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2818 
2819   if (code == INDIRECT_REF
2820       || code == UNCONSTRAINED_ARRAY_REF
2821       || code == ARRAY_REF
2822       || code == ARRAY_RANGE_REF)
2823     TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2824 
2825   return result;
2826 }
2827 
2828 /* This is equivalent to get_inner_reference in expr.c but it returns the
2829    ultimate containing object only if the reference (lvalue) is constant,
2830    i.e. if it doesn't depend on the context in which it is evaluated.  */
2831 
2832 tree
get_inner_constant_reference(tree exp)2833 get_inner_constant_reference (tree exp)
2834 {
2835   while (true)
2836     {
2837       switch (TREE_CODE (exp))
2838 	{
2839 	case BIT_FIELD_REF:
2840 	  break;
2841 
2842 	case COMPONENT_REF:
2843 	  if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2844 	    return NULL_TREE;
2845 	  break;
2846 
2847 	case ARRAY_REF:
2848 	case ARRAY_RANGE_REF:
2849 	  {
2850 	    if (TREE_OPERAND (exp, 2))
2851 	      return NULL_TREE;
2852 
2853 	    tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2854 	    if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2855 	        || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2856 	        || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2857 	      return NULL_TREE;
2858 	  }
2859 	  break;
2860 
2861 	case REALPART_EXPR:
2862 	case IMAGPART_EXPR:
2863 	case VIEW_CONVERT_EXPR:
2864 	  break;
2865 
2866 	default:
2867 	  goto done;
2868 	}
2869 
2870       exp = TREE_OPERAND (exp, 0);
2871     }
2872 
2873 done:
2874   return exp;
2875 }
2876 
2877 /* Return true if EXPR is the addition or the subtraction of a constant and,
2878    if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2879    if this is a subtraction.  */
2880 
2881 bool
is_simple_additive_expression(tree expr,tree * add,tree * cst,bool * minus_p)2882 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2883 {
2884   /* Skip overflow checks.  */
2885   if (TREE_CODE (expr) == COND_EXPR
2886       && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2887       && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2888       && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2889          == gnat_raise_decls[CE_Overflow_Check_Failed])
2890     expr = COND_EXPR_ELSE (expr);
2891 
2892   if (TREE_CODE (expr) == PLUS_EXPR)
2893     {
2894       if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2895 	{
2896 	  *add = TREE_OPERAND (expr, 1);
2897 	  *cst = TREE_OPERAND (expr, 0);
2898 	  *minus_p = false;
2899 	  return true;
2900 	}
2901       else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2902 	{
2903 	  *add = TREE_OPERAND (expr, 0);
2904 	  *cst = TREE_OPERAND (expr, 1);
2905 	  *minus_p = false;
2906 	  return true;
2907 	}
2908     }
2909   else if (TREE_CODE (expr) == MINUS_EXPR)
2910     {
2911       if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2912 	{
2913 	  *add = TREE_OPERAND (expr, 0);
2914 	  *cst = TREE_OPERAND (expr, 1);
2915 	  *minus_p = true;
2916 	  return true;
2917 	}
2918     }
2919 
2920   return false;
2921 }
2922 
2923 /* If EXPR is an expression that is invariant in the current function, in the
2924    sense that it can be evaluated anywhere in the function and any number of
2925    times, return EXPR or an equivalent expression.  Otherwise return NULL.  */
2926 
2927 tree
gnat_invariant_expr(tree expr)2928 gnat_invariant_expr (tree expr)
2929 {
2930   const tree type = TREE_TYPE (expr);
2931   tree add, cst;
2932   bool minus_p;
2933 
2934   expr = remove_conversions (expr, false);
2935 
2936   /* Look through temporaries created to capture values.  */
2937   while ((TREE_CODE (expr) == CONST_DECL
2938 	  || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2939 	 && decl_function_context (expr) == current_function_decl
2940 	 && DECL_INITIAL (expr))
2941     {
2942       expr = DECL_INITIAL (expr);
2943       /* Look into CONSTRUCTORs built to initialize padded types.  */
2944       if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2945 	expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2946       expr = remove_conversions (expr, false);
2947     }
2948 
2949   /* We are only interested in scalar types at the moment and, even if we may
2950      have gone through padding types in the above loop, we must be back to a
2951      scalar value at this point.  */
2952   if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2953     return NULL_TREE;
2954 
2955   if (TREE_CONSTANT (expr))
2956     return fold_convert (type, expr);
2957 
2958   /* Deal with addition or subtraction of constants.  */
2959   if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2960     {
2961       add = gnat_invariant_expr (add);
2962       if (add)
2963 	return
2964 	  fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2965 		       fold_convert (type, add), fold_convert (type, cst));
2966       else
2967 	return NULL_TREE;
2968     }
2969 
2970   bool invariant_p = false;
2971   tree t = expr;
2972 
2973   while (true)
2974     {
2975       switch (TREE_CODE (t))
2976 	{
2977 	case COMPONENT_REF:
2978 	  invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
2979 	  break;
2980 
2981 	case ARRAY_REF:
2982 	case ARRAY_RANGE_REF:
2983 	  if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
2984 	    return NULL_TREE;
2985 	  break;
2986 
2987 	case BIT_FIELD_REF:
2988 	case REALPART_EXPR:
2989 	case IMAGPART_EXPR:
2990 	case VIEW_CONVERT_EXPR:
2991 	CASE_CONVERT:
2992 	  break;
2993 
2994 	case INDIRECT_REF:
2995 	  if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
2996 	    return NULL_TREE;
2997 	  invariant_p = false;
2998 	  break;
2999 
3000 	default:
3001 	  goto object;
3002 	}
3003 
3004       t = TREE_OPERAND (t, 0);
3005     }
3006 
3007 object:
3008   if (TREE_SIDE_EFFECTS (t))
3009     return NULL_TREE;
3010 
3011   if (TREE_CODE (t) == CONST_DECL
3012       && (DECL_EXTERNAL (t)
3013 	  || decl_function_context (t) != current_function_decl))
3014     return fold_convert (type, expr);
3015 
3016   if (!invariant_p && !TREE_READONLY (t))
3017     return NULL_TREE;
3018 
3019   if (TREE_CODE (t) == PARM_DECL)
3020     return fold_convert (type, expr);
3021 
3022   if (TREE_CODE (t) == VAR_DECL
3023       && (DECL_EXTERNAL (t)
3024 	  || decl_function_context (t) != current_function_decl))
3025     return fold_convert (type, expr);
3026 
3027   return NULL_TREE;
3028 }
3029