1 /* Support for fully folding sub-trees of an expression for C compiler.
2 Copyright (C) 1992-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "bitmap.h"
26 #include "c-tree.h"
27 #include "intl.h"
28 #include "gimplify.h"
29
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
31 bool);
32
33 /* If DISABLE is true, stop issuing warnings. This is used when
34 parsing code that we know will not be executed. This function may
35 be called multiple times, and works as a stack. */
36
37 static void
c_disable_warnings(bool disable)38 c_disable_warnings (bool disable)
39 {
40 if (disable)
41 {
42 ++c_inhibit_evaluation_warnings;
43 fold_defer_overflow_warnings ();
44 }
45 }
46
47 /* If ENABLE is true, reenable issuing warnings. */
48
49 static void
c_enable_warnings(bool enable)50 c_enable_warnings (bool enable)
51 {
52 if (enable)
53 {
54 --c_inhibit_evaluation_warnings;
55 fold_undefer_and_ignore_overflow_warnings ();
56 }
57 }
58
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60 normal fold, return NULL_TREE otherwise. */
61
62 static tree
c_fold_array_ref(tree type,tree ary,tree index)63 c_fold_array_ref (tree type, tree ary, tree index)
64 {
65 if (TREE_CODE (ary) != STRING_CST
66 || TREE_CODE (index) != INTEGER_CST
67 || TREE_OVERFLOW (index)
68 || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69 || !tree_fits_uhwi_p (index))
70 return NULL_TREE;
71
72 tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73 unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 / TYPE_PRECISION (char_type_node));
75 unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76 tree nelts = array_type_nelts (TREE_TYPE (ary));
77 bool dummy1 = true, dummy2 = true;
78 nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79 unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80 if (!tree_int_cst_le (index, nelts)
81 || i >= len
82 || i + elem_nchars > len)
83 return NULL_TREE;
84
85 if (elem_nchars == 1)
86 return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
87
88 const unsigned char *ptr
89 = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90 return native_interpret_expr (type, ptr, elem_nchars);
91 }
92
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94 constant expressions and null pointer constants) when being built
95 up. If IN_INIT, this is in a static initializer and certain
96 changes are made to the folding done. Clear *MAYBE_CONST if
97 MAYBE_CONST is not NULL and EXPR is definitely not a constant
98 expression because it contains an evaluated operator (in C99) or an
99 operator outside of sizeof returning an integer constant (in C90)
100 not permitted in constant expressions, or because it contains an
101 evaluated arithmetic overflow. (*MAYBE_CONST should typically be
102 set to true by callers before calling this function.) Return the
103 folded expression. Function arguments have already been folded
104 before calling this function, as have the contents of SAVE_EXPR,
105 TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106 C_MAYBE_CONST_EXPR. LVAL is true if it should be treated as an
107 lvalue. */
108
109 tree
c_fully_fold(tree expr,bool in_init,bool * maybe_const,bool lval)110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
111 {
112 tree ret;
113 tree eptype = NULL_TREE;
114 bool dummy = true;
115 bool maybe_const_itself = true;
116 location_t loc = EXPR_LOCATION (expr);
117
118 if (!maybe_const)
119 maybe_const = &dummy;
120 if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
121 {
122 eptype = TREE_TYPE (expr);
123 expr = TREE_OPERAND (expr, 0);
124 }
125 ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 &maybe_const_itself, false, lval);
127 if (eptype)
128 ret = fold_convert_loc (loc, eptype, ret);
129 *maybe_const &= maybe_const_itself;
130 return ret;
131 }
132
133 /* Internal helper for c_fully_fold. EXPR and IN_INIT are as for
134 c_fully_fold. *MAYBE_CONST_OPERANDS is cleared because of operands
135 not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136 arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137 both evaluated and unevaluated subexpressions while
138 *MAYBE_CONST_ITSELF is carried from only evaluated
139 subexpressions). FOR_INT_CONST indicates if EXPR is an expression
140 with integer constant operands, and if any of the operands doesn't
141 get folded to an integer constant, don't fold the expression itself.
142 LVAL indicates folding of lvalue, where we can't replace it with
143 an rvalue. */
144
145 static tree
c_fully_fold_internal(tree expr,bool in_init,bool * maybe_const_operands,bool * maybe_const_itself,bool for_int_const,bool lval)146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 bool *maybe_const_itself, bool for_int_const, bool lval)
148 {
149 tree ret = expr;
150 enum tree_code code = TREE_CODE (expr);
151 enum tree_code_class kind = TREE_CODE_CLASS (code);
152 location_t loc = EXPR_LOCATION (expr);
153 tree op0, op1, op2, op3;
154 tree orig_op0, orig_op1, orig_op2;
155 bool op0_const = true, op1_const = true, op2_const = true;
156 bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157 bool nowarning = TREE_NO_WARNING (expr);
158 bool unused_p;
159 bool op0_lval = false;
160 source_range old_range;
161
162 /* Constants, declarations, statements, errors, and anything else not
163 counted as an expression cannot usefully be folded further at this
164 point. */
165 if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
166 {
167 /* Except for variables which we can optimize to its initializer. */
168 if (VAR_P (expr) && !lval && (optimize || in_init))
169 {
170 if (in_init)
171 ret = decl_constant_value_1 (expr, true);
172 else
173 {
174 ret = decl_constant_value (expr);
175 if (ret != expr
176 && (TYPE_MODE (TREE_TYPE (ret)) == BLKmode
177 || TREE_CODE (TREE_TYPE (ret)) == ARRAY_TYPE))
178 return expr;
179 }
180 /* Avoid unwanted tree sharing between the initializer and current
181 function's body where the tree can be modified e.g. by the
182 gimplifier. */
183 if (ret != expr && TREE_STATIC (expr))
184 ret = unshare_expr (ret);
185 return ret;
186 }
187 return expr;
188 }
189
190 if (IS_EXPR_CODE_CLASS (kind))
191 old_range = EXPR_LOCATION_RANGE (expr);
192
193 /* Operands of variable-length expressions (function calls) have
194 already been folded, as have __builtin_* function calls, and such
195 expressions cannot occur in constant expressions. */
196 if (kind == tcc_vl_exp)
197 {
198 *maybe_const_operands = false;
199 ret = fold (expr);
200 goto out;
201 }
202
203 if (code == C_MAYBE_CONST_EXPR)
204 {
205 tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
206 tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
207 if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
208 *maybe_const_operands = false;
209 if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
210 {
211 *maybe_const_itself = false;
212 inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
213 maybe_const_itself, true, lval);
214 }
215 if (pre && !in_init)
216 ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
217 else
218 ret = inner;
219 goto out;
220 }
221
222 /* Assignment, increment, decrement, function call and comma
223 operators, and statement expressions, cannot occur in constant
224 expressions if evaluated / outside of sizeof. (Function calls
225 were handled above, though VA_ARG_EXPR is treated like a function
226 call here, and statement expressions are handled through
227 C_MAYBE_CONST_EXPR to avoid folding inside them.) */
228 switch (code)
229 {
230 case MODIFY_EXPR:
231 case PREDECREMENT_EXPR:
232 case PREINCREMENT_EXPR:
233 case POSTDECREMENT_EXPR:
234 case POSTINCREMENT_EXPR:
235 case COMPOUND_EXPR:
236 *maybe_const_operands = false;
237 break;
238
239 case VA_ARG_EXPR:
240 case TARGET_EXPR:
241 case BIND_EXPR:
242 case OBJ_TYPE_REF:
243 *maybe_const_operands = false;
244 ret = fold (expr);
245 goto out;
246
247 default:
248 break;
249 }
250
251 /* Fold individual tree codes as appropriate. */
252 switch (code)
253 {
254 case COMPOUND_LITERAL_EXPR:
255 /* Any non-constancy will have been marked in a containing
256 C_MAYBE_CONST_EXPR; there is no more folding to do here. */
257 goto out;
258
259 case COMPONENT_REF:
260 orig_op0 = op0 = TREE_OPERAND (expr, 0);
261 op1 = TREE_OPERAND (expr, 1);
262 op2 = TREE_OPERAND (expr, 2);
263 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
264 maybe_const_itself, for_int_const, lval);
265 STRIP_TYPE_NOPS (op0);
266 if (op0 != orig_op0)
267 ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
268 if (ret != expr)
269 {
270 TREE_READONLY (ret) = TREE_READONLY (expr);
271 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
272 }
273 if (!lval)
274 ret = fold (ret);
275 goto out;
276
277 case ARRAY_REF:
278 orig_op0 = op0 = TREE_OPERAND (expr, 0);
279 orig_op1 = op1 = TREE_OPERAND (expr, 1);
280 op2 = TREE_OPERAND (expr, 2);
281 op3 = TREE_OPERAND (expr, 3);
282 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
283 maybe_const_itself, for_int_const, lval);
284 STRIP_TYPE_NOPS (op0);
285 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
286 maybe_const_itself, for_int_const, false);
287 STRIP_TYPE_NOPS (op1);
288 /* Fold "foo"[2] in initializers. */
289 if (!lval && in_init)
290 {
291 ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
292 if (ret)
293 goto out;
294 ret = expr;
295 }
296 if (op0 != orig_op0 || op1 != orig_op1)
297 ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
298 if (ret != expr)
299 {
300 TREE_READONLY (ret) = TREE_READONLY (expr);
301 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
302 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
303 }
304 if (!lval)
305 ret = fold (ret);
306 goto out;
307
308 case MODIFY_EXPR:
309 case PREDECREMENT_EXPR:
310 case PREINCREMENT_EXPR:
311 case POSTDECREMENT_EXPR:
312 case POSTINCREMENT_EXPR:
313 op0_lval = true;
314 /* FALLTHRU */
315 case COMPOUND_EXPR:
316 case PLUS_EXPR:
317 case MINUS_EXPR:
318 case MULT_EXPR:
319 case POINTER_PLUS_EXPR:
320 case POINTER_DIFF_EXPR:
321 case TRUNC_DIV_EXPR:
322 case CEIL_DIV_EXPR:
323 case FLOOR_DIV_EXPR:
324 case TRUNC_MOD_EXPR:
325 case RDIV_EXPR:
326 case EXACT_DIV_EXPR:
327 case LSHIFT_EXPR:
328 case RSHIFT_EXPR:
329 case BIT_IOR_EXPR:
330 case BIT_XOR_EXPR:
331 case BIT_AND_EXPR:
332 case LT_EXPR:
333 case LE_EXPR:
334 case GT_EXPR:
335 case GE_EXPR:
336 case EQ_EXPR:
337 case NE_EXPR:
338 case COMPLEX_EXPR:
339 case TRUTH_AND_EXPR:
340 case TRUTH_OR_EXPR:
341 case TRUTH_XOR_EXPR:
342 case UNORDERED_EXPR:
343 case ORDERED_EXPR:
344 case UNLT_EXPR:
345 case UNLE_EXPR:
346 case UNGT_EXPR:
347 case UNGE_EXPR:
348 case UNEQ_EXPR:
349 case MEM_REF:
350 /* Binary operations evaluating both arguments (increment and
351 decrement are binary internally in GCC). */
352 orig_op0 = op0 = TREE_OPERAND (expr, 0);
353 orig_op1 = op1 = TREE_OPERAND (expr, 1);
354 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
355 maybe_const_itself, for_int_const,
356 op0_lval);
357 STRIP_TYPE_NOPS (op0);
358 /* The RHS of a MODIFY_EXPR was fully folded when building that
359 expression for the sake of conversion warnings. */
360 if (code != MODIFY_EXPR)
361 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
362 maybe_const_itself, for_int_const, false);
363 STRIP_TYPE_NOPS (op1);
364
365 if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
366 || TREE_CODE (op1) != INTEGER_CST))
367 goto out;
368
369 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
370 ret = in_init
371 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
372 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
373 else
374 ret = fold (expr);
375 if (TREE_OVERFLOW_P (ret)
376 && !TREE_OVERFLOW_P (op0)
377 && !(BINARY_CLASS_P (op0) && TREE_OVERFLOW_P (TREE_OPERAND (op0, 1)))
378 && !TREE_OVERFLOW_P (op1))
379 overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
380 if (code == LSHIFT_EXPR
381 && TREE_CODE (orig_op0) != INTEGER_CST
382 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
383 && TREE_CODE (op0) == INTEGER_CST
384 && c_inhibit_evaluation_warnings == 0
385 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (orig_op0))
386 && tree_int_cst_sgn (op0) < 0)
387 warning_at (loc, OPT_Wshift_negative_value,
388 "left shift of negative value");
389 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
390 && TREE_CODE (orig_op1) != INTEGER_CST
391 && TREE_CODE (op1) == INTEGER_CST
392 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
393 && c_inhibit_evaluation_warnings == 0)
394 {
395 if (tree_int_cst_sgn (op1) < 0)
396 warning_at (loc, OPT_Wshift_count_negative,
397 (code == LSHIFT_EXPR
398 ? G_("left shift count is negative")
399 : G_("right shift count is negative")));
400 else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
401 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
402 && compare_tree_int (op1,
403 TYPE_PRECISION (TREE_TYPE (orig_op0)))
404 >= 0)
405 warning_at (loc, OPT_Wshift_count_overflow,
406 (code == LSHIFT_EXPR
407 ? G_("left shift count >= width of type")
408 : G_("right shift count >= width of type")));
409 else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
410 && compare_tree_int (op1,
411 TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
412 >= 0)
413 warning_at (loc, OPT_Wshift_count_overflow,
414 code == LSHIFT_EXPR
415 ? G_("left shift count >= width of vector element")
416 : G_("right shift count >= width of vector element"));
417 }
418 if (code == LSHIFT_EXPR
419 /* If either OP0 has been folded to INTEGER_CST... */
420 && ((TREE_CODE (orig_op0) != INTEGER_CST
421 && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
422 && TREE_CODE (op0) == INTEGER_CST)
423 /* ...or if OP1 has been folded to INTEGER_CST... */
424 || (TREE_CODE (orig_op1) != INTEGER_CST
425 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
426 && TREE_CODE (op1) == INTEGER_CST))
427 && c_inhibit_evaluation_warnings == 0)
428 /* ...then maybe we can detect an overflow. */
429 maybe_warn_shift_overflow (loc, op0, op1);
430 if ((code == TRUNC_DIV_EXPR
431 || code == CEIL_DIV_EXPR
432 || code == FLOOR_DIV_EXPR
433 || code == EXACT_DIV_EXPR
434 || code == TRUNC_MOD_EXPR)
435 && TREE_CODE (orig_op1) != INTEGER_CST
436 && TREE_CODE (op1) == INTEGER_CST
437 && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
438 || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
439 && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
440 warn_for_div_by_zero (loc, op1);
441 if (code == MEM_REF
442 && ret != expr
443 && TREE_CODE (ret) == MEM_REF)
444 {
445 TREE_READONLY (ret) = TREE_READONLY (expr);
446 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
447 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
448 }
449 goto out;
450
451 case ADDR_EXPR:
452 op0_lval = true;
453 goto unary;
454 case REALPART_EXPR:
455 case IMAGPART_EXPR:
456 case VIEW_CONVERT_EXPR:
457 op0_lval = lval;
458 /* FALLTHRU */
459 case INDIRECT_REF:
460 case FIX_TRUNC_EXPR:
461 case FLOAT_EXPR:
462 CASE_CONVERT:
463 case ADDR_SPACE_CONVERT_EXPR:
464 case NON_LVALUE_EXPR:
465 case NEGATE_EXPR:
466 case BIT_NOT_EXPR:
467 case TRUTH_NOT_EXPR:
468 case CONJ_EXPR:
469 unary:
470 /* Unary operations. */
471 orig_op0 = op0 = TREE_OPERAND (expr, 0);
472 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
473 maybe_const_itself, for_int_const,
474 op0_lval);
475 STRIP_TYPE_NOPS (op0);
476
477 if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
478 goto out;
479
480 /* ??? Cope with user tricks that amount to offsetof. The middle-end is
481 not prepared to deal with them if they occur in initializers. */
482 if (op0 != orig_op0
483 && code == ADDR_EXPR
484 && (op1 = get_base_address (op0)) != NULL_TREE
485 && INDIRECT_REF_P (op1)
486 && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
487 ret = fold_offsetof (op0, TREE_TYPE (expr));
488 else if (op0 != orig_op0 || in_init)
489 ret = in_init
490 ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
491 : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
492 else
493 ret = fold (expr);
494 if (code == INDIRECT_REF
495 && ret != expr
496 && INDIRECT_REF_P (ret))
497 {
498 TREE_READONLY (ret) = TREE_READONLY (expr);
499 TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
500 TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
501 }
502 switch (code)
503 {
504 case FIX_TRUNC_EXPR:
505 case FLOAT_EXPR:
506 CASE_CONVERT:
507 /* Don't warn about explicit conversions. We will already
508 have warned about suspect implicit conversions. */
509 break;
510
511 default:
512 if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
513 overflow_warning (EXPR_LOCATION (expr), ret, op0);
514 break;
515 }
516 goto out;
517
518 case TRUTH_ANDIF_EXPR:
519 case TRUTH_ORIF_EXPR:
520 /* Binary operations not necessarily evaluating both
521 arguments. */
522 orig_op0 = op0 = TREE_OPERAND (expr, 0);
523 orig_op1 = op1 = TREE_OPERAND (expr, 1);
524 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
525 for_int_const, false);
526 STRIP_TYPE_NOPS (op0);
527
528 unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
529 ? truthvalue_false_node
530 : truthvalue_true_node));
531 c_disable_warnings (unused_p);
532 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
533 for_int_const, false);
534 STRIP_TYPE_NOPS (op1);
535 c_enable_warnings (unused_p);
536
537 if (for_int_const
538 && (TREE_CODE (op0) != INTEGER_CST
539 /* Require OP1 be an INTEGER_CST only if it's evaluated. */
540 || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
541 goto out;
542
543 if (op0 != orig_op0 || op1 != orig_op1 || in_init)
544 ret = in_init
545 ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
546 : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
547 else
548 ret = fold (expr);
549 *maybe_const_operands &= op0_const;
550 *maybe_const_itself &= op0_const_self;
551 if (!(flag_isoc99
552 && op0_const
553 && op0_const_self
554 && (code == TRUTH_ANDIF_EXPR
555 ? op0 == truthvalue_false_node
556 : op0 == truthvalue_true_node)))
557 *maybe_const_operands &= op1_const;
558 if (!(op0_const
559 && op0_const_self
560 && (code == TRUTH_ANDIF_EXPR
561 ? op0 == truthvalue_false_node
562 : op0 == truthvalue_true_node)))
563 *maybe_const_itself &= op1_const_self;
564 goto out;
565
566 case COND_EXPR:
567 orig_op0 = op0 = TREE_OPERAND (expr, 0);
568 orig_op1 = op1 = TREE_OPERAND (expr, 1);
569 orig_op2 = op2 = TREE_OPERAND (expr, 2);
570 op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
571 for_int_const, false);
572
573 STRIP_TYPE_NOPS (op0);
574 c_disable_warnings (op0 == truthvalue_false_node);
575 op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
576 for_int_const, false);
577 STRIP_TYPE_NOPS (op1);
578 c_enable_warnings (op0 == truthvalue_false_node);
579
580 c_disable_warnings (op0 == truthvalue_true_node);
581 op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
582 for_int_const, false);
583 STRIP_TYPE_NOPS (op2);
584 c_enable_warnings (op0 == truthvalue_true_node);
585
586 if (for_int_const
587 && (TREE_CODE (op0) != INTEGER_CST
588 /* Only the evaluated operand must be an INTEGER_CST. */
589 || (op0 == truthvalue_true_node
590 ? TREE_CODE (op1) != INTEGER_CST
591 : TREE_CODE (op2) != INTEGER_CST)))
592 goto out;
593
594 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
595 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
596 else
597 ret = fold (expr);
598 *maybe_const_operands &= op0_const;
599 *maybe_const_itself &= op0_const_self;
600 if (!(flag_isoc99
601 && op0_const
602 && op0_const_self
603 && op0 == truthvalue_false_node))
604 *maybe_const_operands &= op1_const;
605 if (!(op0_const
606 && op0_const_self
607 && op0 == truthvalue_false_node))
608 *maybe_const_itself &= op1_const_self;
609 if (!(flag_isoc99
610 && op0_const
611 && op0_const_self
612 && op0 == truthvalue_true_node))
613 *maybe_const_operands &= op2_const;
614 if (!(op0_const
615 && op0_const_self
616 && op0 == truthvalue_true_node))
617 *maybe_const_itself &= op2_const_self;
618 goto out;
619
620 case VEC_COND_EXPR:
621 orig_op0 = op0 = TREE_OPERAND (expr, 0);
622 orig_op1 = op1 = TREE_OPERAND (expr, 1);
623 orig_op2 = op2 = TREE_OPERAND (expr, 2);
624 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
625 maybe_const_itself, for_int_const, false);
626 STRIP_TYPE_NOPS (op0);
627 op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
628 maybe_const_itself, for_int_const, false);
629 STRIP_TYPE_NOPS (op1);
630 op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
631 maybe_const_itself, for_int_const, false);
632 STRIP_TYPE_NOPS (op2);
633
634 if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
635 ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
636 else
637 ret = fold (expr);
638 goto out;
639
640 case EXCESS_PRECISION_EXPR:
641 /* Each case where an operand with excess precision may be
642 encountered must remove the EXCESS_PRECISION_EXPR around
643 inner operands and possibly put one around the whole
644 expression or possibly convert to the semantic type (which
645 c_fully_fold does); we cannot tell at this stage which is
646 appropriate in any particular case. */
647 gcc_unreachable ();
648
649 case SAVE_EXPR:
650 /* Make sure to fold the contents of a SAVE_EXPR exactly once. */
651 op0 = TREE_OPERAND (expr, 0);
652 if (!SAVE_EXPR_FOLDED_P (expr))
653 {
654 op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
655 maybe_const_itself, for_int_const,
656 false);
657 TREE_OPERAND (expr, 0) = op0;
658 SAVE_EXPR_FOLDED_P (expr) = true;
659 }
660 /* Return the SAVE_EXPR operand if it is invariant. */
661 if (tree_invariant_p (op0))
662 ret = op0;
663 goto out;
664
665 default:
666 /* Various codes may appear through folding built-in functions
667 and their arguments. */
668 goto out;
669 }
670
671 out:
672 /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
673 have been done by this point, so remove them again. */
674 nowarning |= TREE_NO_WARNING (ret);
675 STRIP_TYPE_NOPS (ret);
676 if (nowarning && !TREE_NO_WARNING (ret))
677 {
678 if (!CAN_HAVE_LOCATION_P (ret))
679 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
680 TREE_NO_WARNING (ret) = 1;
681 }
682 if (ret != expr)
683 {
684 protected_set_expr_location (ret, loc);
685 if (IS_EXPR_CODE_CLASS (kind))
686 set_source_range (ret, old_range.m_start, old_range.m_finish);
687 }
688 return ret;
689 }
690
691 /* Fold X for consideration by one of the warning functions when checking
692 whether an expression has a constant value. */
693
694 tree
fold_for_warn(tree x)695 fold_for_warn (tree x)
696 {
697 /* The C front-end has already folded X appropriately. */
698 return x;
699 }
700