xref: /dragonfly/contrib/gcc-8.0/gcc/c/c-fold.c (revision 6491ace5)
1 /* Support for fully folding sub-trees of an expression for C compiler.
2    Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "target.h"
24 #include "function.h"
25 #include "bitmap.h"
26 #include "c-tree.h"
27 #include "intl.h"
28 #include "gimplify.h"
29 
30 static tree c_fully_fold_internal (tree expr, bool, bool *, bool *, bool,
31 				   bool);
32 
33 /* If DISABLE is true, stop issuing warnings.  This is used when
34    parsing code that we know will not be executed.  This function may
35    be called multiple times, and works as a stack.  */
36 
37 static void
38 c_disable_warnings (bool disable)
39 {
40   if (disable)
41     {
42       ++c_inhibit_evaluation_warnings;
43       fold_defer_overflow_warnings ();
44     }
45 }
46 
47 /* If ENABLE is true, reenable issuing warnings.  */
48 
49 static void
50 c_enable_warnings (bool enable)
51 {
52   if (enable)
53     {
54       --c_inhibit_evaluation_warnings;
55       fold_undefer_and_ignore_overflow_warnings ();
56     }
57 }
58 
59 /* Try to fold ARRAY_REF ary[index] if possible and not handled by
60    normal fold, return NULL_TREE otherwise.  */
61 
62 static tree
63 c_fold_array_ref (tree type, tree ary, tree index)
64 {
65   if (TREE_CODE (ary) != STRING_CST
66       || TREE_CODE (index) != INTEGER_CST
67       || TREE_OVERFLOW (index)
68       || TREE_CODE (TREE_TYPE (ary)) != ARRAY_TYPE
69       || !tree_fits_uhwi_p (index))
70     return NULL_TREE;
71 
72   tree elem_type = TREE_TYPE (TREE_TYPE (ary));
73   unsigned elem_nchars = (TYPE_PRECISION (elem_type)
74 			  / TYPE_PRECISION (char_type_node));
75   unsigned len = (unsigned) TREE_STRING_LENGTH (ary) / elem_nchars;
76   tree nelts = array_type_nelts (TREE_TYPE (ary));
77   bool dummy1 = true, dummy2 = true;
78   nelts = c_fully_fold_internal (nelts, true, &dummy1, &dummy2, false, false);
79   unsigned HOST_WIDE_INT i = tree_to_uhwi (index);
80   if (!tree_int_cst_le (index, nelts)
81       || i >= len
82       || i + elem_nchars > len)
83     return NULL_TREE;
84 
85   if (elem_nchars == 1)
86     return build_int_cst (type, TREE_STRING_POINTER (ary)[i]);
87 
88   const unsigned char *ptr
89     = ((const unsigned char *)TREE_STRING_POINTER (ary) + i * elem_nchars);
90   return native_interpret_expr (type, ptr, elem_nchars);
91 }
92 
93 /* Fully fold EXPR, an expression that was not folded (beyond integer
94    constant expressions and null pointer constants) when being built
95    up.  If IN_INIT, this is in a static initializer and certain
96    changes are made to the folding done.  Clear *MAYBE_CONST if
97    MAYBE_CONST is not NULL and EXPR is definitely not a constant
98    expression because it contains an evaluated operator (in C99) or an
99    operator outside of sizeof returning an integer constant (in C90)
100    not permitted in constant expressions, or because it contains an
101    evaluated arithmetic overflow.  (*MAYBE_CONST should typically be
102    set to true by callers before calling this function.)  Return the
103    folded expression.  Function arguments have already been folded
104    before calling this function, as have the contents of SAVE_EXPR,
105    TARGET_EXPR, BIND_EXPR, VA_ARG_EXPR, OBJ_TYPE_REF and
106    C_MAYBE_CONST_EXPR.  LVAL is true if it should be treated as an
107    lvalue.  */
108 
109 tree
110 c_fully_fold (tree expr, bool in_init, bool *maybe_const, bool lval)
111 {
112   tree ret;
113   tree eptype = NULL_TREE;
114   bool dummy = true;
115   bool maybe_const_itself = true;
116   location_t loc = EXPR_LOCATION (expr);
117 
118   if (!maybe_const)
119     maybe_const = &dummy;
120   if (TREE_CODE (expr) == EXCESS_PRECISION_EXPR)
121     {
122       eptype = TREE_TYPE (expr);
123       expr = TREE_OPERAND (expr, 0);
124     }
125   ret = c_fully_fold_internal (expr, in_init, maybe_const,
126 			       &maybe_const_itself, false, lval);
127   if (eptype)
128     ret = fold_convert_loc (loc, eptype, ret);
129   *maybe_const &= maybe_const_itself;
130   return ret;
131 }
132 
133 /* Internal helper for c_fully_fold.  EXPR and IN_INIT are as for
134    c_fully_fold.  *MAYBE_CONST_OPERANDS is cleared because of operands
135    not permitted, while *MAYBE_CONST_ITSELF is cleared because of
136    arithmetic overflow (for C90, *MAYBE_CONST_OPERANDS is carried from
137    both evaluated and unevaluated subexpressions while
138    *MAYBE_CONST_ITSELF is carried from only evaluated
139    subexpressions).  FOR_INT_CONST indicates if EXPR is an expression
140    with integer constant operands, and if any of the operands doesn't
141    get folded to an integer constant, don't fold the expression itself.
142    LVAL indicates folding of lvalue, where we can't replace it with
143    an rvalue.  */
144 
145 static tree
146 c_fully_fold_internal (tree expr, bool in_init, bool *maybe_const_operands,
147 		       bool *maybe_const_itself, bool for_int_const, bool lval)
148 {
149   tree ret = expr;
150   enum tree_code code = TREE_CODE (expr);
151   enum tree_code_class kind = TREE_CODE_CLASS (code);
152   location_t loc = EXPR_LOCATION (expr);
153   tree op0, op1, op2, op3;
154   tree orig_op0, orig_op1, orig_op2;
155   bool op0_const = true, op1_const = true, op2_const = true;
156   bool op0_const_self = true, op1_const_self = true, op2_const_self = true;
157   bool nowarning = TREE_NO_WARNING (expr);
158   bool unused_p;
159   bool op0_lval = false;
160   source_range old_range;
161 
162   /* Constants, declarations, statements, errors, and anything else not
163      counted as an expression cannot usefully be folded further at this
164      point.  */
165   if (!IS_EXPR_CODE_CLASS (kind) || kind == tcc_statement)
166     {
167       /* Except for variables which we can optimize to its initializer.  */
168       if (VAR_P (expr) && !lval && (optimize || in_init))
169 	{
170 	  if (in_init)
171 	    ret = decl_constant_value_1 (expr, true);
172 	  else
173 	    {
174 	      ret = decl_constant_value (expr);
175 	      if (ret != expr
176 		  && (TYPE_MODE (TREE_TYPE (ret)) == BLKmode
177 		      || TREE_CODE (TREE_TYPE (ret)) == ARRAY_TYPE))
178 		return expr;
179 	    }
180 	  /* Avoid unwanted tree sharing between the initializer and current
181 	     function's body where the tree can be modified e.g. by the
182 	     gimplifier.  */
183 	  if (ret != expr && TREE_STATIC (expr))
184 	    ret = unshare_expr (ret);
185 	  return ret;
186 	}
187       return expr;
188     }
189 
190   if (IS_EXPR_CODE_CLASS (kind))
191     old_range = EXPR_LOCATION_RANGE (expr);
192 
193   /* Operands of variable-length expressions (function calls) have
194      already been folded, as have __builtin_* function calls, and such
195      expressions cannot occur in constant expressions.  */
196   if (kind == tcc_vl_exp)
197     {
198       *maybe_const_operands = false;
199       ret = fold (expr);
200       goto out;
201     }
202 
203   if (code == C_MAYBE_CONST_EXPR)
204     {
205       tree pre = C_MAYBE_CONST_EXPR_PRE (expr);
206       tree inner = C_MAYBE_CONST_EXPR_EXPR (expr);
207       if (C_MAYBE_CONST_EXPR_NON_CONST (expr))
208 	*maybe_const_operands = false;
209       if (C_MAYBE_CONST_EXPR_INT_OPERANDS (expr))
210 	{
211 	  *maybe_const_itself = false;
212 	  inner = c_fully_fold_internal (inner, in_init, maybe_const_operands,
213 					 maybe_const_itself, true, lval);
214 	}
215       if (pre && !in_init)
216 	ret = build2 (COMPOUND_EXPR, TREE_TYPE (expr), pre, inner);
217       else
218 	ret = inner;
219       goto out;
220     }
221 
222   /* Assignment, increment, decrement, function call and comma
223      operators, and statement expressions, cannot occur in constant
224      expressions if evaluated / outside of sizeof.  (Function calls
225      were handled above, though VA_ARG_EXPR is treated like a function
226      call here, and statement expressions are handled through
227      C_MAYBE_CONST_EXPR to avoid folding inside them.)  */
228   switch (code)
229     {
230     case MODIFY_EXPR:
231     case PREDECREMENT_EXPR:
232     case PREINCREMENT_EXPR:
233     case POSTDECREMENT_EXPR:
234     case POSTINCREMENT_EXPR:
235     case COMPOUND_EXPR:
236       *maybe_const_operands = false;
237       break;
238 
239     case VA_ARG_EXPR:
240     case TARGET_EXPR:
241     case BIND_EXPR:
242     case OBJ_TYPE_REF:
243       *maybe_const_operands = false;
244       ret = fold (expr);
245       goto out;
246 
247     default:
248       break;
249     }
250 
251   /* Fold individual tree codes as appropriate.  */
252   switch (code)
253     {
254     case COMPOUND_LITERAL_EXPR:
255       /* Any non-constancy will have been marked in a containing
256 	 C_MAYBE_CONST_EXPR; there is no more folding to do here.  */
257       goto out;
258 
259     case COMPONENT_REF:
260       orig_op0 = op0 = TREE_OPERAND (expr, 0);
261       op1 = TREE_OPERAND (expr, 1);
262       op2 = TREE_OPERAND (expr, 2);
263       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
264 				   maybe_const_itself, for_int_const, lval);
265       STRIP_TYPE_NOPS (op0);
266       if (op0 != orig_op0)
267 	ret = build3 (COMPONENT_REF, TREE_TYPE (expr), op0, op1, op2);
268       if (ret != expr)
269 	{
270 	  TREE_READONLY (ret) = TREE_READONLY (expr);
271 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
272 	}
273       if (!lval)
274 	ret = fold (ret);
275       goto out;
276 
277     case ARRAY_REF:
278       orig_op0 = op0 = TREE_OPERAND (expr, 0);
279       orig_op1 = op1 = TREE_OPERAND (expr, 1);
280       op2 = TREE_OPERAND (expr, 2);
281       op3 = TREE_OPERAND (expr, 3);
282       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
283 				   maybe_const_itself, for_int_const, lval);
284       STRIP_TYPE_NOPS (op0);
285       op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
286 				   maybe_const_itself, for_int_const, false);
287       STRIP_TYPE_NOPS (op1);
288       /* Fold "foo"[2] in initializers.  */
289       if (!lval && in_init)
290 	{
291 	  ret = c_fold_array_ref (TREE_TYPE (expr), op0, op1);
292 	  if (ret)
293 	    goto out;
294 	  ret = expr;
295 	}
296       if (op0 != orig_op0 || op1 != orig_op1)
297 	ret = build4 (ARRAY_REF, TREE_TYPE (expr), op0, op1, op2, op3);
298       if (ret != expr)
299 	{
300 	  TREE_READONLY (ret) = TREE_READONLY (expr);
301 	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
302 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
303 	}
304       if (!lval)
305 	ret = fold (ret);
306       goto out;
307 
308     case MODIFY_EXPR:
309     case PREDECREMENT_EXPR:
310     case PREINCREMENT_EXPR:
311     case POSTDECREMENT_EXPR:
312     case POSTINCREMENT_EXPR:
313       op0_lval = true;
314       /* FALLTHRU */
315     case COMPOUND_EXPR:
316     case PLUS_EXPR:
317     case MINUS_EXPR:
318     case MULT_EXPR:
319     case POINTER_PLUS_EXPR:
320     case POINTER_DIFF_EXPR:
321     case TRUNC_DIV_EXPR:
322     case CEIL_DIV_EXPR:
323     case FLOOR_DIV_EXPR:
324     case TRUNC_MOD_EXPR:
325     case RDIV_EXPR:
326     case EXACT_DIV_EXPR:
327     case LSHIFT_EXPR:
328     case RSHIFT_EXPR:
329     case BIT_IOR_EXPR:
330     case BIT_XOR_EXPR:
331     case BIT_AND_EXPR:
332     case LT_EXPR:
333     case LE_EXPR:
334     case GT_EXPR:
335     case GE_EXPR:
336     case EQ_EXPR:
337     case NE_EXPR:
338     case COMPLEX_EXPR:
339     case TRUTH_AND_EXPR:
340     case TRUTH_OR_EXPR:
341     case TRUTH_XOR_EXPR:
342     case UNORDERED_EXPR:
343     case ORDERED_EXPR:
344     case UNLT_EXPR:
345     case UNLE_EXPR:
346     case UNGT_EXPR:
347     case UNGE_EXPR:
348     case UNEQ_EXPR:
349       /* Binary operations evaluating both arguments (increment and
350 	 decrement are binary internally in GCC).  */
351       orig_op0 = op0 = TREE_OPERAND (expr, 0);
352       orig_op1 = op1 = TREE_OPERAND (expr, 1);
353       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
354 				   maybe_const_itself, for_int_const,
355 				   op0_lval);
356       STRIP_TYPE_NOPS (op0);
357       /* The RHS of a MODIFY_EXPR was fully folded when building that
358 	 expression for the sake of conversion warnings.  */
359       if (code != MODIFY_EXPR)
360 	op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
361 				     maybe_const_itself, for_int_const, false);
362       STRIP_TYPE_NOPS (op1);
363 
364       if (for_int_const && (TREE_CODE (op0) != INTEGER_CST
365 			    || TREE_CODE (op1) != INTEGER_CST))
366 	goto out;
367 
368       if (op0 != orig_op0 || op1 != orig_op1 || in_init)
369 	ret = in_init
370 	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
371 	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
372       else
373 	ret = fold (expr);
374       if (TREE_OVERFLOW_P (ret)
375 	  && !TREE_OVERFLOW_P (op0)
376 	  && !TREE_OVERFLOW_P (op1))
377 	overflow_warning (EXPR_LOC_OR_LOC (expr, input_location), ret, expr);
378       if (code == LSHIFT_EXPR
379 	  && TREE_CODE (orig_op0) != INTEGER_CST
380 	  && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
381 	  && TREE_CODE (op0) == INTEGER_CST
382 	  && c_inhibit_evaluation_warnings == 0
383 	  && tree_int_cst_sgn (op0) < 0)
384 	warning_at (loc, OPT_Wshift_negative_value,
385 		    "left shift of negative value");
386       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
387 	  && TREE_CODE (orig_op1) != INTEGER_CST
388 	  && TREE_CODE (op1) == INTEGER_CST
389 	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
390 	  && c_inhibit_evaluation_warnings == 0)
391 	{
392 	  if (tree_int_cst_sgn (op1) < 0)
393 	    warning_at (loc, OPT_Wshift_count_negative,
394 			(code == LSHIFT_EXPR
395 			 ? G_("left shift count is negative")
396 			 : G_("right shift count is negative")));
397 	  else if ((TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
398 		    || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
399 		   && compare_tree_int (op1,
400 					TYPE_PRECISION (TREE_TYPE (orig_op0)))
401 		      >= 0)
402 	    warning_at (loc, OPT_Wshift_count_overflow,
403 			(code == LSHIFT_EXPR
404 			 ? G_("left shift count >= width of type")
405 			 : G_("right shift count >= width of type")));
406 	  else if (TREE_CODE (TREE_TYPE (orig_op0)) == VECTOR_TYPE
407 		   && compare_tree_int (op1,
408 					TYPE_PRECISION (TREE_TYPE (TREE_TYPE (orig_op0))))
409 		      >= 0)
410 	    warning_at (loc, OPT_Wshift_count_overflow,
411 			code == LSHIFT_EXPR
412 			? G_("left shift count >= width of vector element")
413 			: G_("right shift count >= width of vector element"));
414 	}
415       if (code == LSHIFT_EXPR
416 	  /* If either OP0 has been folded to INTEGER_CST...  */
417 	  && ((TREE_CODE (orig_op0) != INTEGER_CST
418 	       && TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
419 	       && TREE_CODE (op0) == INTEGER_CST)
420 	      /* ...or if OP1 has been folded to INTEGER_CST...  */
421 	      || (TREE_CODE (orig_op1) != INTEGER_CST
422 		  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE
423 		  && TREE_CODE (op1) == INTEGER_CST))
424 	  && c_inhibit_evaluation_warnings == 0)
425 	/* ...then maybe we can detect an overflow.  */
426 	maybe_warn_shift_overflow (loc, op0, op1);
427       if ((code == TRUNC_DIV_EXPR
428 	   || code == CEIL_DIV_EXPR
429 	   || code == FLOOR_DIV_EXPR
430 	   || code == EXACT_DIV_EXPR
431 	   || code == TRUNC_MOD_EXPR)
432 	  && TREE_CODE (orig_op1) != INTEGER_CST
433 	  && TREE_CODE (op1) == INTEGER_CST
434 	  && (TREE_CODE (TREE_TYPE (orig_op0)) == INTEGER_TYPE
435 	      || TREE_CODE (TREE_TYPE (orig_op0)) == FIXED_POINT_TYPE)
436 	  && TREE_CODE (TREE_TYPE (orig_op1)) == INTEGER_TYPE)
437 	warn_for_div_by_zero (loc, op1);
438       goto out;
439 
440     case ADDR_EXPR:
441       op0_lval = true;
442       goto unary;
443     case REALPART_EXPR:
444     case IMAGPART_EXPR:
445     case VIEW_CONVERT_EXPR:
446       op0_lval = lval;
447       /* FALLTHRU */
448     case INDIRECT_REF:
449     case FIX_TRUNC_EXPR:
450     case FLOAT_EXPR:
451     CASE_CONVERT:
452     case ADDR_SPACE_CONVERT_EXPR:
453     case NON_LVALUE_EXPR:
454     case NEGATE_EXPR:
455     case BIT_NOT_EXPR:
456     case TRUTH_NOT_EXPR:
457     case CONJ_EXPR:
458     unary:
459       /* Unary operations.  */
460       orig_op0 = op0 = TREE_OPERAND (expr, 0);
461       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
462 				   maybe_const_itself, for_int_const,
463 				   op0_lval);
464       STRIP_TYPE_NOPS (op0);
465 
466       if (for_int_const && TREE_CODE (op0) != INTEGER_CST)
467 	goto out;
468 
469       /* ??? Cope with user tricks that amount to offsetof.  The middle-end is
470 	 not prepared to deal with them if they occur in initializers.  */
471       if (op0 != orig_op0
472 	  && code == ADDR_EXPR
473 	  && (op1 = get_base_address (op0)) != NULL_TREE
474 	  && INDIRECT_REF_P (op1)
475 	  && TREE_CONSTANT (TREE_OPERAND (op1, 0)))
476 	ret = fold_offsetof (op0, TREE_TYPE (expr));
477       else if (op0 != orig_op0 || in_init)
478 	ret = in_init
479 	  ? fold_build1_initializer_loc (loc, code, TREE_TYPE (expr), op0)
480 	  : fold_build1_loc (loc, code, TREE_TYPE (expr), op0);
481       else
482 	ret = fold (expr);
483       if (code == INDIRECT_REF
484 	  && ret != expr
485 	  && INDIRECT_REF_P (ret))
486 	{
487 	  TREE_READONLY (ret) = TREE_READONLY (expr);
488 	  TREE_SIDE_EFFECTS (ret) = TREE_SIDE_EFFECTS (expr);
489 	  TREE_THIS_VOLATILE (ret) = TREE_THIS_VOLATILE (expr);
490 	}
491       switch (code)
492 	{
493 	case FIX_TRUNC_EXPR:
494 	case FLOAT_EXPR:
495 	CASE_CONVERT:
496 	  /* Don't warn about explicit conversions.  We will already
497 	     have warned about suspect implicit conversions.  */
498 	  break;
499 
500 	default:
501 	  if (TREE_OVERFLOW_P (ret) && !TREE_OVERFLOW_P (op0))
502 	    overflow_warning (EXPR_LOCATION (expr), ret, op0);
503 	  break;
504 	}
505       goto out;
506 
507     case TRUTH_ANDIF_EXPR:
508     case TRUTH_ORIF_EXPR:
509       /* Binary operations not necessarily evaluating both
510 	 arguments.  */
511       orig_op0 = op0 = TREE_OPERAND (expr, 0);
512       orig_op1 = op1 = TREE_OPERAND (expr, 1);
513       op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
514 				   for_int_const, false);
515       STRIP_TYPE_NOPS (op0);
516 
517       unused_p = (op0 == (code == TRUTH_ANDIF_EXPR
518 			  ? truthvalue_false_node
519 			  : truthvalue_true_node));
520       c_disable_warnings (unused_p);
521       op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
522 				   for_int_const, false);
523       STRIP_TYPE_NOPS (op1);
524       c_enable_warnings (unused_p);
525 
526       if (for_int_const
527 	  && (TREE_CODE (op0) != INTEGER_CST
528 	      /* Require OP1 be an INTEGER_CST only if it's evaluated.  */
529 	      || (!unused_p && TREE_CODE (op1) != INTEGER_CST)))
530 	goto out;
531 
532       if (op0 != orig_op0 || op1 != orig_op1 || in_init)
533 	ret = in_init
534 	  ? fold_build2_initializer_loc (loc, code, TREE_TYPE (expr), op0, op1)
535 	  : fold_build2_loc (loc, code, TREE_TYPE (expr), op0, op1);
536       else
537 	ret = fold (expr);
538       *maybe_const_operands &= op0_const;
539       *maybe_const_itself &= op0_const_self;
540       if (!(flag_isoc99
541 	    && op0_const
542 	    && op0_const_self
543 	    && (code == TRUTH_ANDIF_EXPR
544 		? op0 == truthvalue_false_node
545 		: op0 == truthvalue_true_node)))
546 	*maybe_const_operands &= op1_const;
547       if (!(op0_const
548 	    && op0_const_self
549 	    && (code == TRUTH_ANDIF_EXPR
550 		? op0 == truthvalue_false_node
551 		: op0 == truthvalue_true_node)))
552 	*maybe_const_itself &= op1_const_self;
553       goto out;
554 
555     case COND_EXPR:
556       orig_op0 = op0 = TREE_OPERAND (expr, 0);
557       orig_op1 = op1 = TREE_OPERAND (expr, 1);
558       orig_op2 = op2 = TREE_OPERAND (expr, 2);
559       op0 = c_fully_fold_internal (op0, in_init, &op0_const, &op0_const_self,
560 				   for_int_const, false);
561 
562       STRIP_TYPE_NOPS (op0);
563       c_disable_warnings (op0 == truthvalue_false_node);
564       op1 = c_fully_fold_internal (op1, in_init, &op1_const, &op1_const_self,
565 				   for_int_const, false);
566       STRIP_TYPE_NOPS (op1);
567       c_enable_warnings (op0 == truthvalue_false_node);
568 
569       c_disable_warnings (op0 == truthvalue_true_node);
570       op2 = c_fully_fold_internal (op2, in_init, &op2_const, &op2_const_self,
571 				   for_int_const, false);
572       STRIP_TYPE_NOPS (op2);
573       c_enable_warnings (op0 == truthvalue_true_node);
574 
575       if (for_int_const
576 	  && (TREE_CODE (op0) != INTEGER_CST
577 	      /* Only the evaluated operand must be an INTEGER_CST.  */
578 	      || (op0 == truthvalue_true_node
579 		  ? TREE_CODE (op1) != INTEGER_CST
580 		  : TREE_CODE (op2) != INTEGER_CST)))
581 	goto out;
582 
583       if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
584 	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
585       else
586 	ret = fold (expr);
587       *maybe_const_operands &= op0_const;
588       *maybe_const_itself &= op0_const_self;
589       if (!(flag_isoc99
590 	    && op0_const
591 	    && op0_const_self
592 	    && op0 == truthvalue_false_node))
593 	*maybe_const_operands &= op1_const;
594       if (!(op0_const
595 	    && op0_const_self
596 	    && op0 == truthvalue_false_node))
597 	*maybe_const_itself &= op1_const_self;
598       if (!(flag_isoc99
599 	    && op0_const
600 	    && op0_const_self
601 	    && op0 == truthvalue_true_node))
602 	*maybe_const_operands &= op2_const;
603       if (!(op0_const
604 	    && op0_const_self
605 	    && op0 == truthvalue_true_node))
606 	*maybe_const_itself &= op2_const_self;
607       goto out;
608 
609     case VEC_COND_EXPR:
610       orig_op0 = op0 = TREE_OPERAND (expr, 0);
611       orig_op1 = op1 = TREE_OPERAND (expr, 1);
612       orig_op2 = op2 = TREE_OPERAND (expr, 2);
613       op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
614 				   maybe_const_itself, for_int_const, false);
615       STRIP_TYPE_NOPS (op0);
616       op1 = c_fully_fold_internal (op1, in_init, maybe_const_operands,
617 				   maybe_const_itself, for_int_const, false);
618       STRIP_TYPE_NOPS (op1);
619       op2 = c_fully_fold_internal (op2, in_init, maybe_const_operands,
620 				   maybe_const_itself, for_int_const, false);
621       STRIP_TYPE_NOPS (op2);
622 
623       if (op0 != orig_op0 || op1 != orig_op1 || op2 != orig_op2)
624 	ret = fold_build3_loc (loc, code, TREE_TYPE (expr), op0, op1, op2);
625       else
626 	ret = fold (expr);
627       goto out;
628 
629     case EXCESS_PRECISION_EXPR:
630       /* Each case where an operand with excess precision may be
631 	 encountered must remove the EXCESS_PRECISION_EXPR around
632 	 inner operands and possibly put one around the whole
633 	 expression or possibly convert to the semantic type (which
634 	 c_fully_fold does); we cannot tell at this stage which is
635 	 appropriate in any particular case.  */
636       gcc_unreachable ();
637 
638     case SAVE_EXPR:
639       /* Make sure to fold the contents of a SAVE_EXPR exactly once.  */
640       op0 = TREE_OPERAND (expr, 0);
641       if (!SAVE_EXPR_FOLDED_P (expr))
642 	{
643 	  op0 = c_fully_fold_internal (op0, in_init, maybe_const_operands,
644 				       maybe_const_itself, for_int_const,
645 				       false);
646 	  TREE_OPERAND (expr, 0) = op0;
647 	  SAVE_EXPR_FOLDED_P (expr) = true;
648 	}
649       /* Return the SAVE_EXPR operand if it is invariant.  */
650       if (tree_invariant_p (op0))
651 	ret = op0;
652       goto out;
653 
654     default:
655       /* Various codes may appear through folding built-in functions
656 	 and their arguments.  */
657       goto out;
658     }
659 
660  out:
661   /* Some folding may introduce NON_LVALUE_EXPRs; all lvalue checks
662      have been done by this point, so remove them again.  */
663   nowarning |= TREE_NO_WARNING (ret);
664   STRIP_TYPE_NOPS (ret);
665   if (nowarning && !TREE_NO_WARNING (ret))
666     {
667       if (!CAN_HAVE_LOCATION_P (ret))
668 	ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
669       TREE_NO_WARNING (ret) = 1;
670     }
671   if (ret != expr)
672     {
673       protected_set_expr_location (ret, loc);
674       if (IS_EXPR_CODE_CLASS (kind))
675 	set_source_range (ret, old_range.m_start, old_range.m_finish);
676     }
677   return ret;
678 }
679 
680 /* Fold X for consideration by one of the warning functions when checking
681    whether an expression has a constant value.  */
682 
683 tree
684 fold_for_warn (tree x)
685 {
686   /* The C front-end has already folded X appropriately.  */
687   return x;
688 }
689