1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2014 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h"  /* Required for ENABLE_FOLD_CHECKING.  */
71 
72 /* Nonzero if we are folding constants inside an initializer; zero
73    otherwise.  */
74 int folding_initializer = 0;
75 
76 /* The following constants represent a bit based encoding of GCC's
77    comparison operators.  This encoding simplifies transformations
78    on relational comparison operators, such as AND and OR.  */
79 enum comparison_code {
80   COMPCODE_FALSE = 0,
81   COMPCODE_LT = 1,
82   COMPCODE_EQ = 2,
83   COMPCODE_LE = 3,
84   COMPCODE_GT = 4,
85   COMPCODE_LTGT = 5,
86   COMPCODE_GE = 6,
87   COMPCODE_ORD = 7,
88   COMPCODE_UNORD = 8,
89   COMPCODE_UNLT = 9,
90   COMPCODE_UNEQ = 10,
91   COMPCODE_UNLE = 11,
92   COMPCODE_UNGT = 12,
93   COMPCODE_NE = 13,
94   COMPCODE_UNGE = 14,
95   COMPCODE_TRUE = 15
96 };
97 
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 				HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 					tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 				    HOST_WIDE_INT *,
117 				    enum machine_mode *, int *, int *,
118 				    tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 					tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 						 enum tree_code, tree,
135 						 tree, tree,
136 						 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 				 enum built_in_function, enum tree_code,
139 				 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147 
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149    Otherwise, return LOC.  */
150 
151 static location_t
expr_location_or(tree t,location_t loc)152 expr_location_or (tree t, location_t loc)
153 {
154   location_t tloc = EXPR_LOCATION (t);
155   return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 }
157 
158 /* Similar to protected_set_expr_location, but never modify x in place,
159    if location can and needs to be set, unshare it.  */
160 
161 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)162 protected_set_expr_location_unshare (tree x, location_t loc)
163 {
164   if (CAN_HAVE_LOCATION_P (x)
165       && EXPR_LOCATION (x) != loc
166       && !(TREE_CODE (x) == SAVE_EXPR
167 	   || TREE_CODE (x) == TARGET_EXPR
168 	   || TREE_CODE (x) == BIND_EXPR))
169     {
170       x = copy_node (x);
171       SET_EXPR_LOCATION (x, loc);
172     }
173   return x;
174 }
175 
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177    of type CODE and returns the quotient.
178    Otherwise returns NULL_TREE.  */
179 
180 tree
div_if_zero_remainder(enum tree_code code,const_tree arg1,const_tree arg2)181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
182 {
183   double_int quo, rem;
184   int uns;
185 
186   /* The sign of the division is according to operand two, that
187      does the correct thing for POINTER_PLUS_EXPR where we want
188      a signed division.  */
189   uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
190 
191   quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 					  uns, code, &rem);
193 
194   if (rem.is_zero ())
195     return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
196 
197   return NULL_TREE;
198 }
199 
200 /* This is nonzero if we should defer warnings about undefined
201    overflow.  This facility exists because these warnings are a
202    special case.  The code to estimate loop iterations does not want
203    to issue any warnings, since it works with expressions which do not
204    occur in user code.  Various bits of cleanup code call fold(), but
205    only use the result if it has certain characteristics (e.g., is a
206    constant); that code only wants to issue a warning if the result is
207    used.  */
208 
209 static int fold_deferring_overflow_warnings;
210 
211 /* If a warning about undefined overflow is deferred, this is the
212    warning.  Note that this may cause us to turn two warnings into
213    one, but that is fine since it is sufficient to only give one
214    warning per expression.  */
215 
216 static const char* fold_deferred_overflow_warning;
217 
218 /* If a warning about undefined overflow is deferred, this is the
219    level at which the warning should be emitted.  */
220 
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222 
223 /* Start deferring overflow warnings.  We could use a stack here to
224    permit nested calls, but at present it is not necessary.  */
225 
226 void
fold_defer_overflow_warnings(void)227 fold_defer_overflow_warnings (void)
228 {
229   ++fold_deferring_overflow_warnings;
230 }
231 
232 /* Stop deferring overflow warnings.  If there is a pending warning,
233    and ISSUE is true, then issue the warning if appropriate.  STMT is
234    the statement with which the warning should be associated (used for
235    location information); STMT may be NULL.  CODE is the level of the
236    warning--a warn_strict_overflow_code value.  This function will use
237    the smaller of CODE and the deferred code when deciding whether to
238    issue the warning.  CODE may be zero to mean to always use the
239    deferred code.  */
240 
241 void
fold_undefer_overflow_warnings(bool issue,const_gimple stmt,int code)242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 {
244   const char *warnmsg;
245   location_t locus;
246 
247   gcc_assert (fold_deferring_overflow_warnings > 0);
248   --fold_deferring_overflow_warnings;
249   if (fold_deferring_overflow_warnings > 0)
250     {
251       if (fold_deferred_overflow_warning != NULL
252 	  && code != 0
253 	  && code < (int) fold_deferred_overflow_code)
254 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255       return;
256     }
257 
258   warnmsg = fold_deferred_overflow_warning;
259   fold_deferred_overflow_warning = NULL;
260 
261   if (!issue || warnmsg == NULL)
262     return;
263 
264   if (gimple_no_warning_p (stmt))
265     return;
266 
267   /* Use the smallest code level when deciding to issue the
268      warning.  */
269   if (code == 0 || code > (int) fold_deferred_overflow_code)
270     code = fold_deferred_overflow_code;
271 
272   if (!issue_strict_overflow_warning (code))
273     return;
274 
275   if (stmt == NULL)
276     locus = input_location;
277   else
278     locus = gimple_location (stmt);
279   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 }
281 
282 /* Stop deferring overflow warnings, ignoring any deferred
283    warnings.  */
284 
285 void
fold_undefer_and_ignore_overflow_warnings(void)286 fold_undefer_and_ignore_overflow_warnings (void)
287 {
288   fold_undefer_overflow_warnings (false, NULL, 0);
289 }
290 
291 /* Whether we are deferring overflow warnings.  */
292 
293 bool
fold_deferring_overflow_warnings_p(void)294 fold_deferring_overflow_warnings_p (void)
295 {
296   return fold_deferring_overflow_warnings > 0;
297 }
298 
299 /* This is called when we fold something based on the fact that signed
300    overflow is undefined.  */
301 
302 static void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 {
305   if (fold_deferring_overflow_warnings > 0)
306     {
307       if (fold_deferred_overflow_warning == NULL
308 	  || wc < fold_deferred_overflow_code)
309 	{
310 	  fold_deferred_overflow_warning = gmsgid;
311 	  fold_deferred_overflow_code = wc;
312 	}
313     }
314   else if (issue_strict_overflow_warning (wc))
315     warning (OPT_Wstrict_overflow, gmsgid);
316 }
317 
318 /* Return true if the built-in mathematical function specified by CODE
319    is odd, i.e. -f(x) == f(-x).  */
320 
321 static bool
negate_mathfn_p(enum built_in_function code)322 negate_mathfn_p (enum built_in_function code)
323 {
324   switch (code)
325     {
326     CASE_FLT_FN (BUILT_IN_ASIN):
327     CASE_FLT_FN (BUILT_IN_ASINH):
328     CASE_FLT_FN (BUILT_IN_ATAN):
329     CASE_FLT_FN (BUILT_IN_ATANH):
330     CASE_FLT_FN (BUILT_IN_CASIN):
331     CASE_FLT_FN (BUILT_IN_CASINH):
332     CASE_FLT_FN (BUILT_IN_CATAN):
333     CASE_FLT_FN (BUILT_IN_CATANH):
334     CASE_FLT_FN (BUILT_IN_CBRT):
335     CASE_FLT_FN (BUILT_IN_CPROJ):
336     CASE_FLT_FN (BUILT_IN_CSIN):
337     CASE_FLT_FN (BUILT_IN_CSINH):
338     CASE_FLT_FN (BUILT_IN_CTAN):
339     CASE_FLT_FN (BUILT_IN_CTANH):
340     CASE_FLT_FN (BUILT_IN_ERF):
341     CASE_FLT_FN (BUILT_IN_LLROUND):
342     CASE_FLT_FN (BUILT_IN_LROUND):
343     CASE_FLT_FN (BUILT_IN_ROUND):
344     CASE_FLT_FN (BUILT_IN_SIN):
345     CASE_FLT_FN (BUILT_IN_SINH):
346     CASE_FLT_FN (BUILT_IN_TAN):
347     CASE_FLT_FN (BUILT_IN_TANH):
348     CASE_FLT_FN (BUILT_IN_TRUNC):
349       return true;
350 
351     CASE_FLT_FN (BUILT_IN_LLRINT):
352     CASE_FLT_FN (BUILT_IN_LRINT):
353     CASE_FLT_FN (BUILT_IN_NEARBYINT):
354     CASE_FLT_FN (BUILT_IN_RINT):
355       return !flag_rounding_math;
356 
357     default:
358       break;
359     }
360   return false;
361 }
362 
363 /* Check whether we may negate an integer constant T without causing
364    overflow.  */
365 
366 bool
may_negate_without_overflow_p(const_tree t)367 may_negate_without_overflow_p (const_tree t)
368 {
369   unsigned HOST_WIDE_INT val;
370   unsigned int prec;
371   tree type;
372 
373   gcc_assert (TREE_CODE (t) == INTEGER_CST);
374 
375   type = TREE_TYPE (t);
376   if (TYPE_UNSIGNED (type))
377     return false;
378 
379   prec = TYPE_PRECISION (type);
380   if (prec > HOST_BITS_PER_WIDE_INT)
381     {
382       if (TREE_INT_CST_LOW (t) != 0)
383 	return true;
384       prec -= HOST_BITS_PER_WIDE_INT;
385       val = TREE_INT_CST_HIGH (t);
386     }
387   else
388     val = TREE_INT_CST_LOW (t);
389   if (prec < HOST_BITS_PER_WIDE_INT)
390     val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391   return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
392 }
393 
394 /* Determine whether an expression T can be cheaply negated using
395    the function negate_expr without introducing undefined overflow.  */
396 
397 static bool
negate_expr_p(tree t)398 negate_expr_p (tree t)
399 {
400   tree type;
401 
402   if (t == 0)
403     return false;
404 
405   type = TREE_TYPE (t);
406 
407   STRIP_SIGN_NOPS (t);
408   switch (TREE_CODE (t))
409     {
410     case INTEGER_CST:
411       if (TYPE_OVERFLOW_WRAPS (type))
412 	return true;
413 
414       /* Check that -CST will not overflow type.  */
415       return may_negate_without_overflow_p (t);
416     case BIT_NOT_EXPR:
417       return (INTEGRAL_TYPE_P (type)
418 	      && TYPE_OVERFLOW_WRAPS (type));
419 
420     case FIXED_CST:
421     case NEGATE_EXPR:
422       return true;
423 
424     case REAL_CST:
425       /* We want to canonicalize to positive real constants.  Pretend
426          that only negative ones can be easily negated.  */
427       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
428 
429     case COMPLEX_CST:
430       return negate_expr_p (TREE_REALPART (t))
431 	     && negate_expr_p (TREE_IMAGPART (t));
432 
433     case VECTOR_CST:
434       {
435 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 	  return true;
437 
438 	int count = TYPE_VECTOR_SUBPARTS (type), i;
439 
440 	for (i = 0; i < count; i++)
441 	  if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 	    return false;
443 
444 	return true;
445       }
446 
447     case COMPLEX_EXPR:
448       return negate_expr_p (TREE_OPERAND (t, 0))
449 	     && negate_expr_p (TREE_OPERAND (t, 1));
450 
451     case CONJ_EXPR:
452       return negate_expr_p (TREE_OPERAND (t, 0));
453 
454     case PLUS_EXPR:
455       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 	  || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 	return false;
458       /* -(A + B) -> (-B) - A.  */
459       if (negate_expr_p (TREE_OPERAND (t, 1))
460 	  && reorder_operands_p (TREE_OPERAND (t, 0),
461 				 TREE_OPERAND (t, 1)))
462 	return true;
463       /* -(A + B) -> (-A) - B.  */
464       return negate_expr_p (TREE_OPERAND (t, 0));
465 
466     case MINUS_EXPR:
467       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
468       return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 	     && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 	     && reorder_operands_p (TREE_OPERAND (t, 0),
471 				    TREE_OPERAND (t, 1));
472 
473     case MULT_EXPR:
474       if (TYPE_UNSIGNED (TREE_TYPE (t)))
475         break;
476 
477       /* Fall through.  */
478 
479     case RDIV_EXPR:
480       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 	return negate_expr_p (TREE_OPERAND (t, 1))
482 	       || negate_expr_p (TREE_OPERAND (t, 0));
483       break;
484 
485     case TRUNC_DIV_EXPR:
486     case ROUND_DIV_EXPR:
487     case EXACT_DIV_EXPR:
488       /* In general we can't negate A / B, because if A is INT_MIN and
489 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
490 	 and actually traps on some architectures.  But if overflow is
491 	 undefined, we can negate, because - (INT_MIN / 1) is an
492 	 overflow.  */
493       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
494 	{
495 	  if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
496 	    break;
497 	  /* If overflow is undefined then we have to be careful because
498 	     we ask whether it's ok to associate the negate with the
499 	     division which is not ok for example for
500 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
501 	     overflow because of negating INT_MIN.  So do not use
502 	     negate_expr_p here but open-code the two important cases.  */
503 	  if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
504 	      || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
505 		  && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
506 	    return true;
507 	}
508       else if (negate_expr_p (TREE_OPERAND (t, 0)))
509 	return true;
510       return negate_expr_p (TREE_OPERAND (t, 1));
511 
512     case NOP_EXPR:
513       /* Negate -((double)float) as (double)(-float).  */
514       if (TREE_CODE (type) == REAL_TYPE)
515 	{
516 	  tree tem = strip_float_extensions (t);
517 	  if (tem != t)
518 	    return negate_expr_p (tem);
519 	}
520       break;
521 
522     case CALL_EXPR:
523       /* Negate -f(x) as f(-x).  */
524       if (negate_mathfn_p (builtin_mathfn_code (t)))
525 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
526       break;
527 
528     case RSHIFT_EXPR:
529       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
530       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
531 	{
532 	  tree op1 = TREE_OPERAND (t, 1);
533 	  if (TREE_INT_CST_HIGH (op1) == 0
534 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
535 		 == TREE_INT_CST_LOW (op1))
536 	    return true;
537 	}
538       break;
539 
540     default:
541       break;
542     }
543   return false;
544 }
545 
546 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
547    simplification is possible.
548    If negate_expr_p would return true for T, NULL_TREE will never be
549    returned.  */
550 
551 static tree
fold_negate_expr(location_t loc,tree t)552 fold_negate_expr (location_t loc, tree t)
553 {
554   tree type = TREE_TYPE (t);
555   tree tem;
556 
557   switch (TREE_CODE (t))
558     {
559     /* Convert - (~A) to A + 1.  */
560     case BIT_NOT_EXPR:
561       if (INTEGRAL_TYPE_P (type))
562         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
563                             build_one_cst (type));
564       break;
565 
566     case INTEGER_CST:
567       tem = fold_negate_const (t, type);
568       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
569 	  || !TYPE_OVERFLOW_TRAPS (type))
570 	return tem;
571       break;
572 
573     case REAL_CST:
574       tem = fold_negate_const (t, type);
575       /* Two's complement FP formats, such as c4x, may overflow.  */
576       if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
577 	return tem;
578       break;
579 
580     case FIXED_CST:
581       tem = fold_negate_const (t, type);
582       return tem;
583 
584     case COMPLEX_CST:
585       {
586 	tree rpart = negate_expr (TREE_REALPART (t));
587 	tree ipart = negate_expr (TREE_IMAGPART (t));
588 
589 	if ((TREE_CODE (rpart) == REAL_CST
590 	     && TREE_CODE (ipart) == REAL_CST)
591 	    || (TREE_CODE (rpart) == INTEGER_CST
592 		&& TREE_CODE (ipart) == INTEGER_CST))
593 	  return build_complex (type, rpart, ipart);
594       }
595       break;
596 
597     case VECTOR_CST:
598       {
599 	int count = TYPE_VECTOR_SUBPARTS (type), i;
600 	tree *elts = XALLOCAVEC (tree, count);
601 
602 	for (i = 0; i < count; i++)
603 	  {
604 	    elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
605 	    if (elts[i] == NULL_TREE)
606 	      return NULL_TREE;
607 	  }
608 
609 	return build_vector (type, elts);
610       }
611 
612     case COMPLEX_EXPR:
613       if (negate_expr_p (t))
614 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
615 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
616 			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
617       break;
618 
619     case CONJ_EXPR:
620       if (negate_expr_p (t))
621 	return fold_build1_loc (loc, CONJ_EXPR, type,
622 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
623       break;
624 
625     case NEGATE_EXPR:
626       return TREE_OPERAND (t, 0);
627 
628     case PLUS_EXPR:
629       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
630 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
631 	{
632 	  /* -(A + B) -> (-B) - A.  */
633 	  if (negate_expr_p (TREE_OPERAND (t, 1))
634 	      && reorder_operands_p (TREE_OPERAND (t, 0),
635 				     TREE_OPERAND (t, 1)))
636 	    {
637 	      tem = negate_expr (TREE_OPERAND (t, 1));
638 	      return fold_build2_loc (loc, MINUS_EXPR, type,
639 				  tem, TREE_OPERAND (t, 0));
640 	    }
641 
642 	  /* -(A + B) -> (-A) - B.  */
643 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
644 	    {
645 	      tem = negate_expr (TREE_OPERAND (t, 0));
646 	      return fold_build2_loc (loc, MINUS_EXPR, type,
647 				  tem, TREE_OPERAND (t, 1));
648 	    }
649 	}
650       break;
651 
652     case MINUS_EXPR:
653       /* - (A - B) -> B - A  */
654       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
655 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
656 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
657 	return fold_build2_loc (loc, MINUS_EXPR, type,
658 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
659       break;
660 
661     case MULT_EXPR:
662       if (TYPE_UNSIGNED (type))
663         break;
664 
665       /* Fall through.  */
666 
667     case RDIV_EXPR:
668       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
669 	{
670 	  tem = TREE_OPERAND (t, 1);
671 	  if (negate_expr_p (tem))
672 	    return fold_build2_loc (loc, TREE_CODE (t), type,
673 				TREE_OPERAND (t, 0), negate_expr (tem));
674 	  tem = TREE_OPERAND (t, 0);
675 	  if (negate_expr_p (tem))
676 	    return fold_build2_loc (loc, TREE_CODE (t), type,
677 				negate_expr (tem), TREE_OPERAND (t, 1));
678 	}
679       break;
680 
681     case TRUNC_DIV_EXPR:
682     case ROUND_DIV_EXPR:
683     case EXACT_DIV_EXPR:
684       /* In general we can't negate A / B, because if A is INT_MIN and
685 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
686 	 and actually traps on some architectures.  But if overflow is
687 	 undefined, we can negate, because - (INT_MIN / 1) is an
688 	 overflow.  */
689       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
690         {
691 	  const char * const warnmsg = G_("assuming signed overflow does not "
692 					  "occur when negating a division");
693           tem = TREE_OPERAND (t, 1);
694           if (negate_expr_p (tem))
695 	    {
696 	      if (INTEGRAL_TYPE_P (type)
697 		  && (TREE_CODE (tem) != INTEGER_CST
698 		      || integer_onep (tem)))
699 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
700 	      return fold_build2_loc (loc, TREE_CODE (t), type,
701 				  TREE_OPERAND (t, 0), negate_expr (tem));
702 	    }
703 	  /* If overflow is undefined then we have to be careful because
704 	     we ask whether it's ok to associate the negate with the
705 	     division which is not ok for example for
706 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
707 	     overflow because of negating INT_MIN.  So do not use
708 	     negate_expr_p here but open-code the two important cases.  */
709           tem = TREE_OPERAND (t, 0);
710 	  if ((INTEGRAL_TYPE_P (type)
711 	       && (TREE_CODE (tem) == NEGATE_EXPR
712 		   || (TREE_CODE (tem) == INTEGER_CST
713 		       && may_negate_without_overflow_p (tem))))
714 	      || !INTEGRAL_TYPE_P (type))
715 	    return fold_build2_loc (loc, TREE_CODE (t), type,
716 				    negate_expr (tem), TREE_OPERAND (t, 1));
717         }
718       break;
719 
720     case NOP_EXPR:
721       /* Convert -((double)float) into (double)(-float).  */
722       if (TREE_CODE (type) == REAL_TYPE)
723 	{
724 	  tem = strip_float_extensions (t);
725 	  if (tem != t && negate_expr_p (tem))
726 	    return fold_convert_loc (loc, type, negate_expr (tem));
727 	}
728       break;
729 
730     case CALL_EXPR:
731       /* Negate -f(x) as f(-x).  */
732       if (negate_mathfn_p (builtin_mathfn_code (t))
733 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
734 	{
735 	  tree fndecl, arg;
736 
737 	  fndecl = get_callee_fndecl (t);
738 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
739 	  return build_call_expr_loc (loc, fndecl, 1, arg);
740 	}
741       break;
742 
743     case RSHIFT_EXPR:
744       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
745       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
746 	{
747 	  tree op1 = TREE_OPERAND (t, 1);
748 	  if (TREE_INT_CST_HIGH (op1) == 0
749 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
750 		 == TREE_INT_CST_LOW (op1))
751 	    {
752 	      tree ntype = TYPE_UNSIGNED (type)
753 			   ? signed_type_for (type)
754 			   : unsigned_type_for (type);
755 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
756 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
757 	      return fold_convert_loc (loc, type, temp);
758 	    }
759 	}
760       break;
761 
762     default:
763       break;
764     }
765 
766   return NULL_TREE;
767 }
768 
769 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
770    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
771    return NULL_TREE. */
772 
773 static tree
negate_expr(tree t)774 negate_expr (tree t)
775 {
776   tree type, tem;
777   location_t loc;
778 
779   if (t == NULL_TREE)
780     return NULL_TREE;
781 
782   loc = EXPR_LOCATION (t);
783   type = TREE_TYPE (t);
784   STRIP_SIGN_NOPS (t);
785 
786   tem = fold_negate_expr (loc, t);
787   if (!tem)
788     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
789   return fold_convert_loc (loc, type, tem);
790 }
791 
792 /* Split a tree IN into a constant, literal and variable parts that could be
793    combined with CODE to make IN.  "constant" means an expression with
794    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
795    commutative arithmetic operation.  Store the constant part into *CONP,
796    the literal in *LITP and return the variable part.  If a part isn't
797    present, set it to null.  If the tree does not decompose in this way,
798    return the entire tree as the variable part and the other parts as null.
799 
800    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
801    case, we negate an operand that was subtracted.  Except if it is a
802    literal for which we use *MINUS_LITP instead.
803 
804    If NEGATE_P is true, we are negating all of IN, again except a literal
805    for which we use *MINUS_LITP instead.
806 
807    If IN is itself a literal or constant, return it as appropriate.
808 
809    Note that we do not guarantee that any of the three values will be the
810    same type as IN, but they will have the same signedness and mode.  */
811 
812 static tree
split_tree(tree in,enum tree_code code,tree * conp,tree * litp,tree * minus_litp,int negate_p)813 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
814 	    tree *minus_litp, int negate_p)
815 {
816   tree var = 0;
817 
818   *conp = 0;
819   *litp = 0;
820   *minus_litp = 0;
821 
822   /* Strip any conversions that don't change the machine mode or signedness.  */
823   STRIP_SIGN_NOPS (in);
824 
825   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
826       || TREE_CODE (in) == FIXED_CST)
827     *litp = in;
828   else if (TREE_CODE (in) == code
829 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
830 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
831 	       /* We can associate addition and subtraction together (even
832 		  though the C standard doesn't say so) for integers because
833 		  the value is not affected.  For reals, the value might be
834 		  affected, so we can't.  */
835 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
836 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
837     {
838       tree op0 = TREE_OPERAND (in, 0);
839       tree op1 = TREE_OPERAND (in, 1);
840       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
841       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
842 
843       /* First see if either of the operands is a literal, then a constant.  */
844       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
845 	  || TREE_CODE (op0) == FIXED_CST)
846 	*litp = op0, op0 = 0;
847       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
848 	       || TREE_CODE (op1) == FIXED_CST)
849 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
850 
851       if (op0 != 0 && TREE_CONSTANT (op0))
852 	*conp = op0, op0 = 0;
853       else if (op1 != 0 && TREE_CONSTANT (op1))
854 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
855 
856       /* If we haven't dealt with either operand, this is not a case we can
857 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
858       if (op0 != 0 && op1 != 0)
859 	var = in;
860       else if (op0 != 0)
861 	var = op0;
862       else
863 	var = op1, neg_var_p = neg1_p;
864 
865       /* Now do any needed negations.  */
866       if (neg_litp_p)
867 	*minus_litp = *litp, *litp = 0;
868       if (neg_conp_p)
869 	*conp = negate_expr (*conp);
870       if (neg_var_p)
871 	var = negate_expr (var);
872     }
873   else if (TREE_CODE (in) == BIT_NOT_EXPR
874 	   && code == PLUS_EXPR)
875     {
876       /* -X - 1 is folded to ~X, undo that here.  */
877       *minus_litp = build_one_cst (TREE_TYPE (in));
878       var = negate_expr (TREE_OPERAND (in, 0));
879     }
880   else if (TREE_CONSTANT (in))
881     *conp = in;
882   else
883     var = in;
884 
885   if (negate_p)
886     {
887       if (*litp)
888 	*minus_litp = *litp, *litp = 0;
889       else if (*minus_litp)
890 	*litp = *minus_litp, *minus_litp = 0;
891       *conp = negate_expr (*conp);
892       var = negate_expr (var);
893     }
894 
895   return var;
896 }
897 
898 /* Re-associate trees split by the above function.  T1 and T2 are
899    either expressions to associate or null.  Return the new
900    expression, if any.  LOC is the location of the new expression.  If
901    we build an operation, do it in TYPE and with CODE.  */
902 
903 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)904 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
905 {
906   if (t1 == 0)
907     return t2;
908   else if (t2 == 0)
909     return t1;
910 
911   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
912      try to fold this since we will have infinite recursion.  But do
913      deal with any NEGATE_EXPRs.  */
914   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
915       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
916     {
917       if (code == PLUS_EXPR)
918 	{
919 	  if (TREE_CODE (t1) == NEGATE_EXPR)
920 	    return build2_loc (loc, MINUS_EXPR, type,
921 			       fold_convert_loc (loc, type, t2),
922 			       fold_convert_loc (loc, type,
923 						 TREE_OPERAND (t1, 0)));
924 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
925 	    return build2_loc (loc, MINUS_EXPR, type,
926 			       fold_convert_loc (loc, type, t1),
927 			       fold_convert_loc (loc, type,
928 						 TREE_OPERAND (t2, 0)));
929 	  else if (integer_zerop (t2))
930 	    return fold_convert_loc (loc, type, t1);
931 	}
932       else if (code == MINUS_EXPR)
933 	{
934 	  if (integer_zerop (t2))
935 	    return fold_convert_loc (loc, type, t1);
936 	}
937 
938       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 			 fold_convert_loc (loc, type, t2));
940     }
941 
942   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 			  fold_convert_loc (loc, type, t2));
944 }
945 
946 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
947    for use in int_const_binop, size_binop and size_diffop.  */
948 
949 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)950 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
951 {
952   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
953     return false;
954   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
955     return false;
956 
957   switch (code)
958     {
959     case LSHIFT_EXPR:
960     case RSHIFT_EXPR:
961     case LROTATE_EXPR:
962     case RROTATE_EXPR:
963       return true;
964 
965     default:
966       break;
967     }
968 
969   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
970 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
971 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
972 }
973 
974 
975 /* Combine two integer constants ARG1 and ARG2 under operation CODE
976    to produce a new constant.  Return NULL_TREE if we don't know how
977    to evaluate CODE at compile-time.  */
978 
979 static tree
int_const_binop_1(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)980 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
981 		   int overflowable)
982 {
983   double_int op1, op2, res, tmp;
984   tree t;
985   tree type = TREE_TYPE (arg1);
986   bool uns = TYPE_UNSIGNED (type);
987   bool overflow = false;
988 
989   op1 = tree_to_double_int (arg1);
990   op2 = tree_to_double_int (arg2);
991 
992   switch (code)
993     {
994     case BIT_IOR_EXPR:
995       res = op1 | op2;
996       break;
997 
998     case BIT_XOR_EXPR:
999       res = op1 ^ op2;
1000       break;
1001 
1002     case BIT_AND_EXPR:
1003       res = op1 & op2;
1004       break;
1005 
1006     case RSHIFT_EXPR:
1007       res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1008       break;
1009 
1010     case LSHIFT_EXPR:
1011       /* It's unclear from the C standard whether shifts can overflow.
1012 	 The following code ignores overflow; perhaps a C standard
1013 	 interpretation ruling is needed.  */
1014       res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1015       break;
1016 
1017     case RROTATE_EXPR:
1018       res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019       break;
1020 
1021     case LROTATE_EXPR:
1022       res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023       break;
1024 
1025     case PLUS_EXPR:
1026       res = op1.add_with_sign (op2, false, &overflow);
1027       break;
1028 
1029     case MINUS_EXPR:
1030       res = op1.sub_with_overflow (op2, &overflow);
1031       break;
1032 
1033     case MULT_EXPR:
1034       res = op1.mul_with_sign (op2, false, &overflow);
1035       break;
1036 
1037     case MULT_HIGHPART_EXPR:
1038       if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1039 	{
1040 	  bool dummy_overflow;
1041 	  if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1042 	    return NULL_TREE;
1043 	  op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1044 	}
1045       else
1046 	{
1047 	  bool dummy_overflow;
1048 	  /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1049 	     is performed in twice the precision of arguments.  */
1050 	  tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1051 	  res = tmp.rshift (TYPE_PRECISION (type),
1052 			    2 * TYPE_PRECISION (type), !uns);
1053 	}
1054       break;
1055 
1056     case TRUNC_DIV_EXPR:
1057     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1058     case EXACT_DIV_EXPR:
1059       /* This is a shortcut for a common special case.  */
1060       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1061 	  && !TREE_OVERFLOW (arg1)
1062 	  && !TREE_OVERFLOW (arg2)
1063 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1064 	{
1065 	  if (code == CEIL_DIV_EXPR)
1066 	    op1.low += op2.low - 1;
1067 
1068 	  res.low = op1.low / op2.low, res.high = 0;
1069 	  break;
1070 	}
1071 
1072       /* ... fall through ...  */
1073 
1074     case ROUND_DIV_EXPR:
1075       if (op2.is_zero ())
1076 	return NULL_TREE;
1077       if (op2.is_one ())
1078 	{
1079 	  res = op1;
1080 	  break;
1081 	}
1082       if (op1 == op2 && !op1.is_zero ())
1083 	{
1084 	  res = double_int_one;
1085 	  break;
1086 	}
1087       res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1088       break;
1089 
1090     case TRUNC_MOD_EXPR:
1091     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1092       /* This is a shortcut for a common special case.  */
1093       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1094 	  && !TREE_OVERFLOW (arg1)
1095 	  && !TREE_OVERFLOW (arg2)
1096 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1097 	{
1098 	  if (code == CEIL_MOD_EXPR)
1099 	    op1.low += op2.low - 1;
1100 	  res.low = op1.low % op2.low, res.high = 0;
1101 	  break;
1102 	}
1103 
1104       /* ... fall through ...  */
1105 
1106     case ROUND_MOD_EXPR:
1107       if (op2.is_zero ())
1108 	return NULL_TREE;
1109 
1110       /* Check for the case the case of INT_MIN % -1 and return
1111        overflow and result = 0.  The TImode case is handled properly
1112        in double-int.  */
1113       if (TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT
1114 	  && !uns
1115           && op2.is_minus_one ()
1116 	  && op1.high == (HOST_WIDE_INT) -1
1117 	  && (HOST_WIDE_INT) op1.low
1118 	  == (((HOST_WIDE_INT)-1) << (TYPE_PRECISION (type) - 1)))
1119 	{
1120 	  overflow = 1;
1121 	  res = double_int_zero;
1122 	}
1123       else
1124 	tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1125       break;
1126 
1127     case MIN_EXPR:
1128       res = op1.min (op2, uns);
1129       break;
1130 
1131     case MAX_EXPR:
1132       res = op1.max (op2, uns);
1133       break;
1134 
1135     default:
1136       return NULL_TREE;
1137     }
1138 
1139   t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1140 			     (!uns && overflow)
1141 			     | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1142 
1143   return t;
1144 }
1145 
1146 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2)1147 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1148 {
1149   return int_const_binop_1 (code, arg1, arg2, 1);
1150 }
1151 
1152 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1153    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1154    are the same kind of constant and the same machine mode.  Return zero if
1155    combining the constants is not allowed in the current operating mode.  */
1156 
1157 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1158 const_binop (enum tree_code code, tree arg1, tree arg2)
1159 {
1160   /* Sanity check for the recursive cases.  */
1161   if (!arg1 || !arg2)
1162     return NULL_TREE;
1163 
1164   STRIP_NOPS (arg1);
1165   STRIP_NOPS (arg2);
1166 
1167   if (TREE_CODE (arg1) == INTEGER_CST)
1168     return int_const_binop (code, arg1, arg2);
1169 
1170   if (TREE_CODE (arg1) == REAL_CST)
1171     {
1172       enum machine_mode mode;
1173       REAL_VALUE_TYPE d1;
1174       REAL_VALUE_TYPE d2;
1175       REAL_VALUE_TYPE value;
1176       REAL_VALUE_TYPE result;
1177       bool inexact;
1178       tree t, type;
1179 
1180       /* The following codes are handled by real_arithmetic.  */
1181       switch (code)
1182 	{
1183 	case PLUS_EXPR:
1184 	case MINUS_EXPR:
1185 	case MULT_EXPR:
1186 	case RDIV_EXPR:
1187 	case MIN_EXPR:
1188 	case MAX_EXPR:
1189 	  break;
1190 
1191 	default:
1192 	  return NULL_TREE;
1193 	}
1194 
1195       d1 = TREE_REAL_CST (arg1);
1196       d2 = TREE_REAL_CST (arg2);
1197 
1198       type = TREE_TYPE (arg1);
1199       mode = TYPE_MODE (type);
1200 
1201       /* Don't perform operation if we honor signaling NaNs and
1202 	 either operand is a NaN.  */
1203       if (HONOR_SNANS (mode)
1204 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1205 	return NULL_TREE;
1206 
1207       /* Don't perform operation if it would raise a division
1208 	 by zero exception.  */
1209       if (code == RDIV_EXPR
1210 	  && REAL_VALUES_EQUAL (d2, dconst0)
1211 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1212 	return NULL_TREE;
1213 
1214       /* If either operand is a NaN, just return it.  Otherwise, set up
1215 	 for floating-point trap; we return an overflow.  */
1216       if (REAL_VALUE_ISNAN (d1))
1217 	return arg1;
1218       else if (REAL_VALUE_ISNAN (d2))
1219 	return arg2;
1220 
1221       inexact = real_arithmetic (&value, code, &d1, &d2);
1222       real_convert (&result, mode, &value);
1223 
1224       /* Don't constant fold this floating point operation if
1225 	 the result has overflowed and flag_trapping_math.  */
1226       if (flag_trapping_math
1227 	  && MODE_HAS_INFINITIES (mode)
1228 	  && REAL_VALUE_ISINF (result)
1229 	  && !REAL_VALUE_ISINF (d1)
1230 	  && !REAL_VALUE_ISINF (d2))
1231 	return NULL_TREE;
1232 
1233       /* Don't constant fold this floating point operation if the
1234 	 result may dependent upon the run-time rounding mode and
1235 	 flag_rounding_math is set, or if GCC's software emulation
1236 	 is unable to accurately represent the result.  */
1237       if ((flag_rounding_math
1238 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1239 	  && (inexact || !real_identical (&result, &value)))
1240 	return NULL_TREE;
1241 
1242       t = build_real (type, result);
1243 
1244       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1245       return t;
1246     }
1247 
1248   if (TREE_CODE (arg1) == FIXED_CST)
1249     {
1250       FIXED_VALUE_TYPE f1;
1251       FIXED_VALUE_TYPE f2;
1252       FIXED_VALUE_TYPE result;
1253       tree t, type;
1254       int sat_p;
1255       bool overflow_p;
1256 
1257       /* The following codes are handled by fixed_arithmetic.  */
1258       switch (code)
1259         {
1260 	case PLUS_EXPR:
1261 	case MINUS_EXPR:
1262 	case MULT_EXPR:
1263 	case TRUNC_DIV_EXPR:
1264 	  f2 = TREE_FIXED_CST (arg2);
1265 	  break;
1266 
1267 	case LSHIFT_EXPR:
1268 	case RSHIFT_EXPR:
1269 	  f2.data.high = TREE_INT_CST_HIGH (arg2);
1270 	  f2.data.low = TREE_INT_CST_LOW (arg2);
1271 	  f2.mode = SImode;
1272 	  break;
1273 
1274         default:
1275 	  return NULL_TREE;
1276         }
1277 
1278       f1 = TREE_FIXED_CST (arg1);
1279       type = TREE_TYPE (arg1);
1280       sat_p = TYPE_SATURATING (type);
1281       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1282       t = build_fixed (type, result);
1283       /* Propagate overflow flags.  */
1284       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1285 	TREE_OVERFLOW (t) = 1;
1286       return t;
1287     }
1288 
1289   if (TREE_CODE (arg1) == COMPLEX_CST)
1290     {
1291       tree type = TREE_TYPE (arg1);
1292       tree r1 = TREE_REALPART (arg1);
1293       tree i1 = TREE_IMAGPART (arg1);
1294       tree r2 = TREE_REALPART (arg2);
1295       tree i2 = TREE_IMAGPART (arg2);
1296       tree real, imag;
1297 
1298       switch (code)
1299 	{
1300 	case PLUS_EXPR:
1301 	case MINUS_EXPR:
1302 	  real = const_binop (code, r1, r2);
1303 	  imag = const_binop (code, i1, i2);
1304 	  break;
1305 
1306 	case MULT_EXPR:
1307 	  if (COMPLEX_FLOAT_TYPE_P (type))
1308 	    return do_mpc_arg2 (arg1, arg2, type,
1309 				/* do_nonfinite= */ folding_initializer,
1310 				mpc_mul);
1311 
1312 	  real = const_binop (MINUS_EXPR,
1313 			      const_binop (MULT_EXPR, r1, r2),
1314 			      const_binop (MULT_EXPR, i1, i2));
1315 	  imag = const_binop (PLUS_EXPR,
1316 			      const_binop (MULT_EXPR, r1, i2),
1317 			      const_binop (MULT_EXPR, i1, r2));
1318 	  break;
1319 
1320 	case RDIV_EXPR:
1321 	  if (COMPLEX_FLOAT_TYPE_P (type))
1322 	    return do_mpc_arg2 (arg1, arg2, type,
1323                                 /* do_nonfinite= */ folding_initializer,
1324 				mpc_div);
1325 	  /* Fallthru ... */
1326 	case TRUNC_DIV_EXPR:
1327 	case CEIL_DIV_EXPR:
1328 	case FLOOR_DIV_EXPR:
1329 	case ROUND_DIV_EXPR:
1330 	  if (flag_complex_method == 0)
1331 	  {
1332 	    /* Keep this algorithm in sync with
1333 	       tree-complex.c:expand_complex_div_straight().
1334 
1335 	       Expand complex division to scalars, straightforward algorithm.
1336 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1337 	       t = br*br + bi*bi
1338 	    */
1339 	    tree magsquared
1340 	      = const_binop (PLUS_EXPR,
1341 			     const_binop (MULT_EXPR, r2, r2),
1342 			     const_binop (MULT_EXPR, i2, i2));
1343 	    tree t1
1344 	      = const_binop (PLUS_EXPR,
1345 			     const_binop (MULT_EXPR, r1, r2),
1346 			     const_binop (MULT_EXPR, i1, i2));
1347 	    tree t2
1348 	      = const_binop (MINUS_EXPR,
1349 			     const_binop (MULT_EXPR, i1, r2),
1350 			     const_binop (MULT_EXPR, r1, i2));
1351 
1352 	    real = const_binop (code, t1, magsquared);
1353 	    imag = const_binop (code, t2, magsquared);
1354 	  }
1355 	  else
1356 	  {
1357 	    /* Keep this algorithm in sync with
1358                tree-complex.c:expand_complex_div_wide().
1359 
1360 	       Expand complex division to scalars, modified algorithm to minimize
1361 	       overflow with wide input ranges.  */
1362 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1363 					fold_abs_const (r2, TREE_TYPE (type)),
1364 					fold_abs_const (i2, TREE_TYPE (type)));
1365 
1366 	    if (integer_nonzerop (compare))
1367 	      {
1368 		/* In the TRUE branch, we compute
1369 		   ratio = br/bi;
1370 		   div = (br * ratio) + bi;
1371 		   tr = (ar * ratio) + ai;
1372 		   ti = (ai * ratio) - ar;
1373 		   tr = tr / div;
1374 		   ti = ti / div;  */
1375 		tree ratio = const_binop (code, r2, i2);
1376 		tree div = const_binop (PLUS_EXPR, i2,
1377 					const_binop (MULT_EXPR, r2, ratio));
1378 		real = const_binop (MULT_EXPR, r1, ratio);
1379 		real = const_binop (PLUS_EXPR, real, i1);
1380 		real = const_binop (code, real, div);
1381 
1382 		imag = const_binop (MULT_EXPR, i1, ratio);
1383 		imag = const_binop (MINUS_EXPR, imag, r1);
1384 		imag = const_binop (code, imag, div);
1385 	      }
1386 	    else
1387 	      {
1388 		/* In the FALSE branch, we compute
1389 		   ratio = d/c;
1390 		   divisor = (d * ratio) + c;
1391 		   tr = (b * ratio) + a;
1392 		   ti = b - (a * ratio);
1393 		   tr = tr / div;
1394 		   ti = ti / div;  */
1395 		tree ratio = const_binop (code, i2, r2);
1396 		tree div = const_binop (PLUS_EXPR, r2,
1397                                         const_binop (MULT_EXPR, i2, ratio));
1398 
1399 		real = const_binop (MULT_EXPR, i1, ratio);
1400 		real = const_binop (PLUS_EXPR, real, r1);
1401 		real = const_binop (code, real, div);
1402 
1403 		imag = const_binop (MULT_EXPR, r1, ratio);
1404 		imag = const_binop (MINUS_EXPR, i1, imag);
1405 		imag = const_binop (code, imag, div);
1406 	      }
1407 	  }
1408 	  break;
1409 
1410 	default:
1411 	  return NULL_TREE;
1412 	}
1413 
1414       if (real && imag)
1415 	return build_complex (type, real, imag);
1416     }
1417 
1418   if (TREE_CODE (arg1) == VECTOR_CST
1419       && TREE_CODE (arg2) == VECTOR_CST)
1420     {
1421       tree type = TREE_TYPE (arg1);
1422       int count = TYPE_VECTOR_SUBPARTS (type), i;
1423       tree *elts = XALLOCAVEC (tree, count);
1424 
1425       for (i = 0; i < count; i++)
1426 	{
1427 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1428 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1429 
1430 	  elts[i] = const_binop (code, elem1, elem2);
1431 
1432 	  /* It is possible that const_binop cannot handle the given
1433 	     code and return NULL_TREE */
1434 	  if (elts[i] == NULL_TREE)
1435 	    return NULL_TREE;
1436 	}
1437 
1438       return build_vector (type, elts);
1439     }
1440 
1441   /* Shifts allow a scalar offset for a vector.  */
1442   if (TREE_CODE (arg1) == VECTOR_CST
1443       && TREE_CODE (arg2) == INTEGER_CST)
1444     {
1445       tree type = TREE_TYPE (arg1);
1446       int count = TYPE_VECTOR_SUBPARTS (type), i;
1447       tree *elts = XALLOCAVEC (tree, count);
1448 
1449       if (code == VEC_LSHIFT_EXPR
1450 	  || code == VEC_RSHIFT_EXPR)
1451 	{
1452 	  if (!tree_fits_uhwi_p (arg2))
1453 	    return NULL_TREE;
1454 
1455 	  unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1456 	  unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1457 	  unsigned HOST_WIDE_INT innerc
1458 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1459 	  if (shiftc >= outerc || (shiftc % innerc) != 0)
1460 	    return NULL_TREE;
1461 	  int offset = shiftc / innerc;
1462 	  /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1463 	     For reductions, compiler emits VEC_RSHIFT_EXPR always,
1464 	     for !BYTES_BIG_ENDIAN picks first vector element, but
1465 	     for BYTES_BIG_ENDIAN last element from the vector.  */
1466 	  if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1467 	    offset = -offset;
1468 	  tree zero = build_zero_cst (TREE_TYPE (type));
1469 	  for (i = 0; i < count; i++)
1470 	    {
1471 	      if (i + offset < 0 || i + offset >= count)
1472 		elts[i] = zero;
1473 	      else
1474 		elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1475 	    }
1476 	}
1477       else
1478 	for (i = 0; i < count; i++)
1479 	  {
1480 	    tree elem1 = VECTOR_CST_ELT (arg1, i);
1481 
1482 	    elts[i] = const_binop (code, elem1, arg2);
1483 
1484 	    /* It is possible that const_binop cannot handle the given
1485 	       code and return NULL_TREE */
1486 	    if (elts[i] == NULL_TREE)
1487 	      return NULL_TREE;
1488 	  }
1489 
1490       return build_vector (type, elts);
1491     }
1492   return NULL_TREE;
1493 }
1494 
1495 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1496    indicates which particular sizetype to create.  */
1497 
1498 tree
size_int_kind(HOST_WIDE_INT number,enum size_type_kind kind)1499 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1500 {
1501   return build_int_cst (sizetype_tab[(int) kind], number);
1502 }
1503 
1504 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1505    is a tree code.  The type of the result is taken from the operands.
1506    Both must be equivalent integer types, ala int_binop_types_match_p.
1507    If the operands are constant, so is the result.  */
1508 
1509 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1510 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1511 {
1512   tree type = TREE_TYPE (arg0);
1513 
1514   if (arg0 == error_mark_node || arg1 == error_mark_node)
1515     return error_mark_node;
1516 
1517   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1518                                        TREE_TYPE (arg1)));
1519 
1520   /* Handle the special case of two integer constants faster.  */
1521   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1522     {
1523       /* And some specific cases even faster than that.  */
1524       if (code == PLUS_EXPR)
1525 	{
1526 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1527 	    return arg1;
1528 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1529 	    return arg0;
1530 	}
1531       else if (code == MINUS_EXPR)
1532 	{
1533 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1534 	    return arg0;
1535 	}
1536       else if (code == MULT_EXPR)
1537 	{
1538 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1539 	    return arg1;
1540 	}
1541 
1542       /* Handle general case of two integer constants.  For sizetype
1543          constant calculations we always want to know about overflow,
1544 	 even in the unsigned case.  */
1545       return int_const_binop_1 (code, arg0, arg1, -1);
1546     }
1547 
1548   return fold_build2_loc (loc, code, type, arg0, arg1);
1549 }
1550 
1551 /* Given two values, either both of sizetype or both of bitsizetype,
1552    compute the difference between the two values.  Return the value
1553    in signed type corresponding to the type of the operands.  */
1554 
1555 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1556 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1557 {
1558   tree type = TREE_TYPE (arg0);
1559   tree ctype;
1560 
1561   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1562 				       TREE_TYPE (arg1)));
1563 
1564   /* If the type is already signed, just do the simple thing.  */
1565   if (!TYPE_UNSIGNED (type))
1566     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1567 
1568   if (type == sizetype)
1569     ctype = ssizetype;
1570   else if (type == bitsizetype)
1571     ctype = sbitsizetype;
1572   else
1573     ctype = signed_type_for (type);
1574 
1575   /* If either operand is not a constant, do the conversions to the signed
1576      type and subtract.  The hardware will do the right thing with any
1577      overflow in the subtraction.  */
1578   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1579     return size_binop_loc (loc, MINUS_EXPR,
1580 			   fold_convert_loc (loc, ctype, arg0),
1581 			   fold_convert_loc (loc, ctype, arg1));
1582 
1583   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1584      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1585      overflow) and negate (which can't either).  Special-case a result
1586      of zero while we're here.  */
1587   if (tree_int_cst_equal (arg0, arg1))
1588     return build_int_cst (ctype, 0);
1589   else if (tree_int_cst_lt (arg1, arg0))
1590     return fold_convert_loc (loc, ctype,
1591 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1592   else
1593     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1594 			   fold_convert_loc (loc, ctype,
1595 					     size_binop_loc (loc,
1596 							     MINUS_EXPR,
1597 							     arg1, arg0)));
1598 }
1599 
1600 /* A subroutine of fold_convert_const handling conversions of an
1601    INTEGER_CST to another integer type.  */
1602 
1603 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1604 fold_convert_const_int_from_int (tree type, const_tree arg1)
1605 {
1606   tree t;
1607 
1608   /* Given an integer constant, make new constant with new type,
1609      appropriately sign-extended or truncated.  */
1610   t = force_fit_type_double (type, tree_to_double_int (arg1),
1611 			     !POINTER_TYPE_P (TREE_TYPE (arg1)),
1612 			     (TREE_INT_CST_HIGH (arg1) < 0
1613 		 	      && (TYPE_UNSIGNED (type)
1614 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1615 			     | TREE_OVERFLOW (arg1));
1616 
1617   return t;
1618 }
1619 
1620 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1621    to an integer type.  */
1622 
1623 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)1624 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1625 {
1626   int overflow = 0;
1627   tree t;
1628 
1629   /* The following code implements the floating point to integer
1630      conversion rules required by the Java Language Specification,
1631      that IEEE NaNs are mapped to zero and values that overflow
1632      the target precision saturate, i.e. values greater than
1633      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1634      are mapped to INT_MIN.  These semantics are allowed by the
1635      C and C++ standards that simply state that the behavior of
1636      FP-to-integer conversion is unspecified upon overflow.  */
1637 
1638   double_int val;
1639   REAL_VALUE_TYPE r;
1640   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1641 
1642   switch (code)
1643     {
1644     case FIX_TRUNC_EXPR:
1645       real_trunc (&r, VOIDmode, &x);
1646       break;
1647 
1648     default:
1649       gcc_unreachable ();
1650     }
1651 
1652   /* If R is NaN, return zero and show we have an overflow.  */
1653   if (REAL_VALUE_ISNAN (r))
1654     {
1655       overflow = 1;
1656       val = double_int_zero;
1657     }
1658 
1659   /* See if R is less than the lower bound or greater than the
1660      upper bound.  */
1661 
1662   if (! overflow)
1663     {
1664       tree lt = TYPE_MIN_VALUE (type);
1665       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1666       if (REAL_VALUES_LESS (r, l))
1667 	{
1668 	  overflow = 1;
1669 	  val = tree_to_double_int (lt);
1670 	}
1671     }
1672 
1673   if (! overflow)
1674     {
1675       tree ut = TYPE_MAX_VALUE (type);
1676       if (ut)
1677 	{
1678 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1679 	  if (REAL_VALUES_LESS (u, r))
1680 	    {
1681 	      overflow = 1;
1682 	      val = tree_to_double_int (ut);
1683 	    }
1684 	}
1685     }
1686 
1687   if (! overflow)
1688     real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1689 
1690   t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1691   return t;
1692 }
1693 
1694 /* A subroutine of fold_convert_const handling conversions of a
1695    FIXED_CST to an integer type.  */
1696 
1697 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)1698 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1699 {
1700   tree t;
1701   double_int temp, temp_trunc;
1702   unsigned int mode;
1703 
1704   /* Right shift FIXED_CST to temp by fbit.  */
1705   temp = TREE_FIXED_CST (arg1).data;
1706   mode = TREE_FIXED_CST (arg1).mode;
1707   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1708     {
1709       temp = temp.rshift (GET_MODE_FBIT (mode),
1710 			  HOST_BITS_PER_DOUBLE_INT,
1711 			  SIGNED_FIXED_POINT_MODE_P (mode));
1712 
1713       /* Left shift temp to temp_trunc by fbit.  */
1714       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1715 				HOST_BITS_PER_DOUBLE_INT,
1716 				SIGNED_FIXED_POINT_MODE_P (mode));
1717     }
1718   else
1719     {
1720       temp = double_int_zero;
1721       temp_trunc = double_int_zero;
1722     }
1723 
1724   /* If FIXED_CST is negative, we need to round the value toward 0.
1725      By checking if the fractional bits are not zero to add 1 to temp.  */
1726   if (SIGNED_FIXED_POINT_MODE_P (mode)
1727       && temp_trunc.is_negative ()
1728       && TREE_FIXED_CST (arg1).data != temp_trunc)
1729     temp += double_int_one;
1730 
1731   /* Given a fixed-point constant, make new constant with new type,
1732      appropriately sign-extended or truncated.  */
1733   t = force_fit_type_double (type, temp, -1,
1734 			     (temp.is_negative ()
1735 		 	      && (TYPE_UNSIGNED (type)
1736 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1737 			     | TREE_OVERFLOW (arg1));
1738 
1739   return t;
1740 }
1741 
1742 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1743    to another floating point type.  */
1744 
1745 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)1746 fold_convert_const_real_from_real (tree type, const_tree arg1)
1747 {
1748   REAL_VALUE_TYPE value;
1749   tree t;
1750 
1751   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1752   t = build_real (type, value);
1753 
1754   /* If converting an infinity or NAN to a representation that doesn't
1755      have one, set the overflow bit so that we can produce some kind of
1756      error message at the appropriate point if necessary.  It's not the
1757      most user-friendly message, but it's better than nothing.  */
1758   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1759       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1760     TREE_OVERFLOW (t) = 1;
1761   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1762 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
1763     TREE_OVERFLOW (t) = 1;
1764   /* Regular overflow, conversion produced an infinity in a mode that
1765      can't represent them.  */
1766   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1767 	   && REAL_VALUE_ISINF (value)
1768 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1769     TREE_OVERFLOW (t) = 1;
1770   else
1771     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1772   return t;
1773 }
1774 
1775 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1776    to a floating point type.  */
1777 
1778 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)1779 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1780 {
1781   REAL_VALUE_TYPE value;
1782   tree t;
1783 
1784   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1785   t = build_real (type, value);
1786 
1787   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1788   return t;
1789 }
1790 
1791 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1792    to another fixed-point type.  */
1793 
1794 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)1795 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1796 {
1797   FIXED_VALUE_TYPE value;
1798   tree t;
1799   bool overflow_p;
1800 
1801   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1802 			      TYPE_SATURATING (type));
1803   t = build_fixed (type, value);
1804 
1805   /* Propagate overflow flags.  */
1806   if (overflow_p | TREE_OVERFLOW (arg1))
1807     TREE_OVERFLOW (t) = 1;
1808   return t;
1809 }
1810 
1811 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1812    to a fixed-point type.  */
1813 
1814 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)1815 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1816 {
1817   FIXED_VALUE_TYPE value;
1818   tree t;
1819   bool overflow_p;
1820 
1821   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1822 				       TREE_INT_CST (arg1),
1823 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
1824 				       TYPE_SATURATING (type));
1825   t = build_fixed (type, value);
1826 
1827   /* Propagate overflow flags.  */
1828   if (overflow_p | TREE_OVERFLOW (arg1))
1829     TREE_OVERFLOW (t) = 1;
1830   return t;
1831 }
1832 
1833 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1834    to a fixed-point type.  */
1835 
1836 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)1837 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1838 {
1839   FIXED_VALUE_TYPE value;
1840   tree t;
1841   bool overflow_p;
1842 
1843   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1844 					&TREE_REAL_CST (arg1),
1845 					TYPE_SATURATING (type));
1846   t = build_fixed (type, value);
1847 
1848   /* Propagate overflow flags.  */
1849   if (overflow_p | TREE_OVERFLOW (arg1))
1850     TREE_OVERFLOW (t) = 1;
1851   return t;
1852 }
1853 
1854 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1855    type TYPE.  If no simplification can be done return NULL_TREE.  */
1856 
1857 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)1858 fold_convert_const (enum tree_code code, tree type, tree arg1)
1859 {
1860   if (TREE_TYPE (arg1) == type)
1861     return arg1;
1862 
1863   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1864       || TREE_CODE (type) == OFFSET_TYPE)
1865     {
1866       if (TREE_CODE (arg1) == INTEGER_CST)
1867 	return fold_convert_const_int_from_int (type, arg1);
1868       else if (TREE_CODE (arg1) == REAL_CST)
1869 	return fold_convert_const_int_from_real (code, type, arg1);
1870       else if (TREE_CODE (arg1) == FIXED_CST)
1871 	return fold_convert_const_int_from_fixed (type, arg1);
1872     }
1873   else if (TREE_CODE (type) == REAL_TYPE)
1874     {
1875       if (TREE_CODE (arg1) == INTEGER_CST)
1876 	return build_real_from_int_cst (type, arg1);
1877       else if (TREE_CODE (arg1) == REAL_CST)
1878 	return fold_convert_const_real_from_real (type, arg1);
1879       else if (TREE_CODE (arg1) == FIXED_CST)
1880 	return fold_convert_const_real_from_fixed (type, arg1);
1881     }
1882   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1883     {
1884       if (TREE_CODE (arg1) == FIXED_CST)
1885 	return fold_convert_const_fixed_from_fixed (type, arg1);
1886       else if (TREE_CODE (arg1) == INTEGER_CST)
1887 	return fold_convert_const_fixed_from_int (type, arg1);
1888       else if (TREE_CODE (arg1) == REAL_CST)
1889 	return fold_convert_const_fixed_from_real (type, arg1);
1890     }
1891   return NULL_TREE;
1892 }
1893 
1894 /* Construct a vector of zero elements of vector type TYPE.  */
1895 
1896 static tree
build_zero_vector(tree type)1897 build_zero_vector (tree type)
1898 {
1899   tree t;
1900 
1901   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1902   return build_vector_from_val (type, t);
1903 }
1904 
1905 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
1906 
1907 bool
fold_convertible_p(const_tree type,const_tree arg)1908 fold_convertible_p (const_tree type, const_tree arg)
1909 {
1910   tree orig = TREE_TYPE (arg);
1911 
1912   if (type == orig)
1913     return true;
1914 
1915   if (TREE_CODE (arg) == ERROR_MARK
1916       || TREE_CODE (type) == ERROR_MARK
1917       || TREE_CODE (orig) == ERROR_MARK)
1918     return false;
1919 
1920   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1921     return true;
1922 
1923   switch (TREE_CODE (type))
1924     {
1925     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1926     case POINTER_TYPE: case REFERENCE_TYPE:
1927     case OFFSET_TYPE:
1928       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1929 	  || TREE_CODE (orig) == OFFSET_TYPE)
1930         return true;
1931       return (TREE_CODE (orig) == VECTOR_TYPE
1932 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1933 
1934     case REAL_TYPE:
1935     case FIXED_POINT_TYPE:
1936     case COMPLEX_TYPE:
1937     case VECTOR_TYPE:
1938     case VOID_TYPE:
1939       return TREE_CODE (type) == TREE_CODE (orig);
1940 
1941     default:
1942       return false;
1943     }
1944 }
1945 
1946 /* Convert expression ARG to type TYPE.  Used by the middle-end for
1947    simple conversions in preference to calling the front-end's convert.  */
1948 
1949 tree
fold_convert_loc(location_t loc,tree type,tree arg)1950 fold_convert_loc (location_t loc, tree type, tree arg)
1951 {
1952   tree orig = TREE_TYPE (arg);
1953   tree tem;
1954 
1955   if (type == orig)
1956     return arg;
1957 
1958   if (TREE_CODE (arg) == ERROR_MARK
1959       || TREE_CODE (type) == ERROR_MARK
1960       || TREE_CODE (orig) == ERROR_MARK)
1961     return error_mark_node;
1962 
1963   switch (TREE_CODE (type))
1964     {
1965     case POINTER_TYPE:
1966     case REFERENCE_TYPE:
1967       /* Handle conversions between pointers to different address spaces.  */
1968       if (POINTER_TYPE_P (orig)
1969 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1970 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1971 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1972       /* fall through */
1973 
1974     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1975     case OFFSET_TYPE:
1976       if (TREE_CODE (arg) == INTEGER_CST)
1977 	{
1978 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1979 	  if (tem != NULL_TREE)
1980 	    return tem;
1981 	}
1982       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1983 	  || TREE_CODE (orig) == OFFSET_TYPE)
1984 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985       if (TREE_CODE (orig) == COMPLEX_TYPE)
1986 	return fold_convert_loc (loc, type,
1987 			     fold_build1_loc (loc, REALPART_EXPR,
1988 					  TREE_TYPE (orig), arg));
1989       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1990 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1991       return fold_build1_loc (loc, NOP_EXPR, type, arg);
1992 
1993     case REAL_TYPE:
1994       if (TREE_CODE (arg) == INTEGER_CST)
1995 	{
1996 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1997 	  if (tem != NULL_TREE)
1998 	    return tem;
1999 	}
2000       else if (TREE_CODE (arg) == REAL_CST)
2001 	{
2002 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2003 	  if (tem != NULL_TREE)
2004 	    return tem;
2005 	}
2006       else if (TREE_CODE (arg) == FIXED_CST)
2007 	{
2008 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2009 	  if (tem != NULL_TREE)
2010 	    return tem;
2011 	}
2012 
2013       switch (TREE_CODE (orig))
2014 	{
2015 	case INTEGER_TYPE:
2016 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2017 	case POINTER_TYPE: case REFERENCE_TYPE:
2018 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2019 
2020 	case REAL_TYPE:
2021 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2022 
2023 	case FIXED_POINT_TYPE:
2024 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2025 
2026 	case COMPLEX_TYPE:
2027 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2028 	  return fold_convert_loc (loc, type, tem);
2029 
2030 	default:
2031 	  gcc_unreachable ();
2032 	}
2033 
2034     case FIXED_POINT_TYPE:
2035       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2036 	  || TREE_CODE (arg) == REAL_CST)
2037 	{
2038 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2039 	  if (tem != NULL_TREE)
2040 	    goto fold_convert_exit;
2041 	}
2042 
2043       switch (TREE_CODE (orig))
2044 	{
2045 	case FIXED_POINT_TYPE:
2046 	case INTEGER_TYPE:
2047 	case ENUMERAL_TYPE:
2048 	case BOOLEAN_TYPE:
2049 	case REAL_TYPE:
2050 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2051 
2052 	case COMPLEX_TYPE:
2053 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2054 	  return fold_convert_loc (loc, type, tem);
2055 
2056 	default:
2057 	  gcc_unreachable ();
2058 	}
2059 
2060     case COMPLEX_TYPE:
2061       switch (TREE_CODE (orig))
2062 	{
2063 	case INTEGER_TYPE:
2064 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2065 	case POINTER_TYPE: case REFERENCE_TYPE:
2066 	case REAL_TYPE:
2067 	case FIXED_POINT_TYPE:
2068 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2069 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2070 			      fold_convert_loc (loc, TREE_TYPE (type),
2071 					    integer_zero_node));
2072 	case COMPLEX_TYPE:
2073 	  {
2074 	    tree rpart, ipart;
2075 
2076 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2077 	      {
2078 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2079 				      TREE_OPERAND (arg, 0));
2080 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2081 				      TREE_OPERAND (arg, 1));
2082 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2083 	      }
2084 
2085 	    arg = save_expr (arg);
2086 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2087 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2089 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2090 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2091 	  }
2092 
2093 	default:
2094 	  gcc_unreachable ();
2095 	}
2096 
2097     case VECTOR_TYPE:
2098       if (integer_zerop (arg))
2099 	return build_zero_vector (type);
2100       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 		  || TREE_CODE (orig) == VECTOR_TYPE);
2103       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2104 
2105     case VOID_TYPE:
2106       tem = fold_ignored_result (arg);
2107       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2108 
2109     default:
2110       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2111 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2112       gcc_unreachable ();
2113     }
2114  fold_convert_exit:
2115   protected_set_expr_location_unshare (tem, loc);
2116   return tem;
2117 }
2118 
2119 /* Return false if expr can be assumed not to be an lvalue, true
2120    otherwise.  */
2121 
2122 static bool
maybe_lvalue_p(const_tree x)2123 maybe_lvalue_p (const_tree x)
2124 {
2125   /* We only need to wrap lvalue tree codes.  */
2126   switch (TREE_CODE (x))
2127   {
2128   case VAR_DECL:
2129   case PARM_DECL:
2130   case RESULT_DECL:
2131   case LABEL_DECL:
2132   case FUNCTION_DECL:
2133   case SSA_NAME:
2134 
2135   case COMPONENT_REF:
2136   case MEM_REF:
2137   case INDIRECT_REF:
2138   case ARRAY_REF:
2139   case ARRAY_RANGE_REF:
2140   case BIT_FIELD_REF:
2141   case OBJ_TYPE_REF:
2142 
2143   case REALPART_EXPR:
2144   case IMAGPART_EXPR:
2145   case PREINCREMENT_EXPR:
2146   case PREDECREMENT_EXPR:
2147   case SAVE_EXPR:
2148   case TRY_CATCH_EXPR:
2149   case WITH_CLEANUP_EXPR:
2150   case COMPOUND_EXPR:
2151   case MODIFY_EXPR:
2152   case TARGET_EXPR:
2153   case COND_EXPR:
2154   case BIND_EXPR:
2155     break;
2156 
2157   default:
2158     /* Assume the worst for front-end tree codes.  */
2159     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2160       break;
2161     return false;
2162   }
2163 
2164   return true;
2165 }
2166 
2167 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2168 
2169 tree
non_lvalue_loc(location_t loc,tree x)2170 non_lvalue_loc (location_t loc, tree x)
2171 {
2172   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2173      us.  */
2174   if (in_gimple_form)
2175     return x;
2176 
2177   if (! maybe_lvalue_p (x))
2178     return x;
2179   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2180 }
2181 
2182 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2183    Zero means allow extended lvalues.  */
2184 
2185 int pedantic_lvalues;
2186 
2187 /* When pedantic, return an expr equal to X but certainly not valid as a
2188    pedantic lvalue.  Otherwise, return X.  */
2189 
2190 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2191 pedantic_non_lvalue_loc (location_t loc, tree x)
2192 {
2193   if (pedantic_lvalues)
2194     return non_lvalue_loc (loc, x);
2195 
2196   return protected_set_expr_location_unshare (x, loc);
2197 }
2198 
2199 /* Given a tree comparison code, return the code that is the logical inverse.
2200    It is generally not safe to do this for floating-point comparisons, except
2201    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2202    ERROR_MARK in this case.  */
2203 
2204 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2205 invert_tree_comparison (enum tree_code code, bool honor_nans)
2206 {
2207   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2208       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2209     return ERROR_MARK;
2210 
2211   switch (code)
2212     {
2213     case EQ_EXPR:
2214       return NE_EXPR;
2215     case NE_EXPR:
2216       return EQ_EXPR;
2217     case GT_EXPR:
2218       return honor_nans ? UNLE_EXPR : LE_EXPR;
2219     case GE_EXPR:
2220       return honor_nans ? UNLT_EXPR : LT_EXPR;
2221     case LT_EXPR:
2222       return honor_nans ? UNGE_EXPR : GE_EXPR;
2223     case LE_EXPR:
2224       return honor_nans ? UNGT_EXPR : GT_EXPR;
2225     case LTGT_EXPR:
2226       return UNEQ_EXPR;
2227     case UNEQ_EXPR:
2228       return LTGT_EXPR;
2229     case UNGT_EXPR:
2230       return LE_EXPR;
2231     case UNGE_EXPR:
2232       return LT_EXPR;
2233     case UNLT_EXPR:
2234       return GE_EXPR;
2235     case UNLE_EXPR:
2236       return GT_EXPR;
2237     case ORDERED_EXPR:
2238       return UNORDERED_EXPR;
2239     case UNORDERED_EXPR:
2240       return ORDERED_EXPR;
2241     default:
2242       gcc_unreachable ();
2243     }
2244 }
2245 
2246 /* Similar, but return the comparison that results if the operands are
2247    swapped.  This is safe for floating-point.  */
2248 
2249 enum tree_code
swap_tree_comparison(enum tree_code code)2250 swap_tree_comparison (enum tree_code code)
2251 {
2252   switch (code)
2253     {
2254     case EQ_EXPR:
2255     case NE_EXPR:
2256     case ORDERED_EXPR:
2257     case UNORDERED_EXPR:
2258     case LTGT_EXPR:
2259     case UNEQ_EXPR:
2260       return code;
2261     case GT_EXPR:
2262       return LT_EXPR;
2263     case GE_EXPR:
2264       return LE_EXPR;
2265     case LT_EXPR:
2266       return GT_EXPR;
2267     case LE_EXPR:
2268       return GE_EXPR;
2269     case UNGT_EXPR:
2270       return UNLT_EXPR;
2271     case UNGE_EXPR:
2272       return UNLE_EXPR;
2273     case UNLT_EXPR:
2274       return UNGT_EXPR;
2275     case UNLE_EXPR:
2276       return UNGE_EXPR;
2277     default:
2278       gcc_unreachable ();
2279     }
2280 }
2281 
2282 
2283 /* Convert a comparison tree code from an enum tree_code representation
2284    into a compcode bit-based encoding.  This function is the inverse of
2285    compcode_to_comparison.  */
2286 
2287 static enum comparison_code
comparison_to_compcode(enum tree_code code)2288 comparison_to_compcode (enum tree_code code)
2289 {
2290   switch (code)
2291     {
2292     case LT_EXPR:
2293       return COMPCODE_LT;
2294     case EQ_EXPR:
2295       return COMPCODE_EQ;
2296     case LE_EXPR:
2297       return COMPCODE_LE;
2298     case GT_EXPR:
2299       return COMPCODE_GT;
2300     case NE_EXPR:
2301       return COMPCODE_NE;
2302     case GE_EXPR:
2303       return COMPCODE_GE;
2304     case ORDERED_EXPR:
2305       return COMPCODE_ORD;
2306     case UNORDERED_EXPR:
2307       return COMPCODE_UNORD;
2308     case UNLT_EXPR:
2309       return COMPCODE_UNLT;
2310     case UNEQ_EXPR:
2311       return COMPCODE_UNEQ;
2312     case UNLE_EXPR:
2313       return COMPCODE_UNLE;
2314     case UNGT_EXPR:
2315       return COMPCODE_UNGT;
2316     case LTGT_EXPR:
2317       return COMPCODE_LTGT;
2318     case UNGE_EXPR:
2319       return COMPCODE_UNGE;
2320     default:
2321       gcc_unreachable ();
2322     }
2323 }
2324 
2325 /* Convert a compcode bit-based encoding of a comparison operator back
2326    to GCC's enum tree_code representation.  This function is the
2327    inverse of comparison_to_compcode.  */
2328 
2329 static enum tree_code
compcode_to_comparison(enum comparison_code code)2330 compcode_to_comparison (enum comparison_code code)
2331 {
2332   switch (code)
2333     {
2334     case COMPCODE_LT:
2335       return LT_EXPR;
2336     case COMPCODE_EQ:
2337       return EQ_EXPR;
2338     case COMPCODE_LE:
2339       return LE_EXPR;
2340     case COMPCODE_GT:
2341       return GT_EXPR;
2342     case COMPCODE_NE:
2343       return NE_EXPR;
2344     case COMPCODE_GE:
2345       return GE_EXPR;
2346     case COMPCODE_ORD:
2347       return ORDERED_EXPR;
2348     case COMPCODE_UNORD:
2349       return UNORDERED_EXPR;
2350     case COMPCODE_UNLT:
2351       return UNLT_EXPR;
2352     case COMPCODE_UNEQ:
2353       return UNEQ_EXPR;
2354     case COMPCODE_UNLE:
2355       return UNLE_EXPR;
2356     case COMPCODE_UNGT:
2357       return UNGT_EXPR;
2358     case COMPCODE_LTGT:
2359       return LTGT_EXPR;
2360     case COMPCODE_UNGE:
2361       return UNGE_EXPR;
2362     default:
2363       gcc_unreachable ();
2364     }
2365 }
2366 
2367 /* Return a tree for the comparison which is the combination of
2368    doing the AND or OR (depending on CODE) of the two operations LCODE
2369    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2370    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2371    if this makes the transformation invalid.  */
2372 
2373 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2374 combine_comparisons (location_t loc,
2375 		     enum tree_code code, enum tree_code lcode,
2376 		     enum tree_code rcode, tree truth_type,
2377 		     tree ll_arg, tree lr_arg)
2378 {
2379   bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2380   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2381   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2382   int compcode;
2383 
2384   switch (code)
2385     {
2386     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2387       compcode = lcompcode & rcompcode;
2388       break;
2389 
2390     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2391       compcode = lcompcode | rcompcode;
2392       break;
2393 
2394     default:
2395       return NULL_TREE;
2396     }
2397 
2398   if (!honor_nans)
2399     {
2400       /* Eliminate unordered comparisons, as well as LTGT and ORD
2401 	 which are not used unless the mode has NaNs.  */
2402       compcode &= ~COMPCODE_UNORD;
2403       if (compcode == COMPCODE_LTGT)
2404 	compcode = COMPCODE_NE;
2405       else if (compcode == COMPCODE_ORD)
2406 	compcode = COMPCODE_TRUE;
2407     }
2408    else if (flag_trapping_math)
2409      {
2410 	/* Check that the original operation and the optimized ones will trap
2411 	   under the same condition.  */
2412 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2413 		     && (lcompcode != COMPCODE_EQ)
2414 		     && (lcompcode != COMPCODE_ORD);
2415 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2416 		     && (rcompcode != COMPCODE_EQ)
2417 		     && (rcompcode != COMPCODE_ORD);
2418 	bool trap = (compcode & COMPCODE_UNORD) == 0
2419 		    && (compcode != COMPCODE_EQ)
2420 		    && (compcode != COMPCODE_ORD);
2421 
2422         /* In a short-circuited boolean expression the LHS might be
2423 	   such that the RHS, if evaluated, will never trap.  For
2424 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2425 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2426 	   example, the expression above will never trap, hence
2427 	   optimizing it to x < y would be invalid).  */
2428         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2429             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2430           rtrap = false;
2431 
2432         /* If the comparison was short-circuited, and only the RHS
2433 	   trapped, we may now generate a spurious trap.  */
2434 	if (rtrap && !ltrap
2435 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2436 	  return NULL_TREE;
2437 
2438 	/* If we changed the conditions that cause a trap, we lose.  */
2439 	if ((ltrap || rtrap) != trap)
2440 	  return NULL_TREE;
2441       }
2442 
2443   if (compcode == COMPCODE_TRUE)
2444     return constant_boolean_node (true, truth_type);
2445   else if (compcode == COMPCODE_FALSE)
2446     return constant_boolean_node (false, truth_type);
2447   else
2448     {
2449       enum tree_code tcode;
2450 
2451       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2452       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2453     }
2454 }
2455 
2456 /* Return nonzero if two operands (typically of the same tree node)
2457    are necessarily equal.  If either argument has side-effects this
2458    function returns zero.  FLAGS modifies behavior as follows:
2459 
2460    If OEP_ONLY_CONST is set, only return nonzero for constants.
2461    This function tests whether the operands are indistinguishable;
2462    it does not test whether they are equal using C's == operation.
2463    The distinction is important for IEEE floating point, because
2464    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2465    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2466 
2467    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2468    even though it may hold multiple values during a function.
2469    This is because a GCC tree node guarantees that nothing else is
2470    executed between the evaluation of its "operands" (which may often
2471    be evaluated in arbitrary order).  Hence if the operands themselves
2472    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2473    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2474    unset means assuming isochronic (or instantaneous) tree equivalence.
2475    Unless comparing arbitrary expression trees, such as from different
2476    statements, this flag can usually be left unset.
2477 
2478    If OEP_PURE_SAME is set, then pure functions with identical arguments
2479    are considered the same.  It is used when the caller has other ways
2480    to ensure that global memory is unchanged in between.  */
2481 
2482 int
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2483 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2484 {
2485   /* If either is ERROR_MARK, they aren't equal.  */
2486   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2487       || TREE_TYPE (arg0) == error_mark_node
2488       || TREE_TYPE (arg1) == error_mark_node)
2489     return 0;
2490 
2491   /* Similar, if either does not have a type (like a released SSA name),
2492      they aren't equal.  */
2493   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2494     return 0;
2495 
2496   /* Check equality of integer constants before bailing out due to
2497      precision differences.  */
2498   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2499     return tree_int_cst_equal (arg0, arg1);
2500 
2501   /* If both types don't have the same signedness, then we can't consider
2502      them equal.  We must check this before the STRIP_NOPS calls
2503      because they may change the signedness of the arguments.  As pointers
2504      strictly don't have a signedness, require either two pointers or
2505      two non-pointers as well.  */
2506   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2507       || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2508     return 0;
2509 
2510   /* We cannot consider pointers to different address space equal.  */
2511   if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2512       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2513 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2514     return 0;
2515 
2516   /* If both types don't have the same precision, then it is not safe
2517      to strip NOPs.  */
2518   if (element_precision (TREE_TYPE (arg0))
2519       != element_precision (TREE_TYPE (arg1)))
2520     return 0;
2521 
2522   STRIP_NOPS (arg0);
2523   STRIP_NOPS (arg1);
2524 
2525   /* In case both args are comparisons but with different comparison
2526      code, try to swap the comparison operands of one arg to produce
2527      a match and compare that variant.  */
2528   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2529       && COMPARISON_CLASS_P (arg0)
2530       && COMPARISON_CLASS_P (arg1))
2531     {
2532       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2533 
2534       if (TREE_CODE (arg0) == swap_code)
2535 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2536 			        TREE_OPERAND (arg1, 1), flags)
2537 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2538 				   TREE_OPERAND (arg1, 0), flags);
2539     }
2540 
2541   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2542       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
2543       && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2544     return 0;
2545 
2546   /* This is needed for conversions and for COMPONENT_REF.
2547      Might as well play it safe and always test this.  */
2548   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2549       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2550       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2551     return 0;
2552 
2553   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2554      We don't care about side effects in that case because the SAVE_EXPR
2555      takes care of that for us. In all other cases, two expressions are
2556      equal if they have no side effects.  If we have two identical
2557      expressions with side effects that should be treated the same due
2558      to the only side effects being identical SAVE_EXPR's, that will
2559      be detected in the recursive calls below.
2560      If we are taking an invariant address of two identical objects
2561      they are necessarily equal as well.  */
2562   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2563       && (TREE_CODE (arg0) == SAVE_EXPR
2564 	  || (flags & OEP_CONSTANT_ADDRESS_OF)
2565 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2566     return 1;
2567 
2568   /* Next handle constant cases, those for which we can return 1 even
2569      if ONLY_CONST is set.  */
2570   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2571     switch (TREE_CODE (arg0))
2572       {
2573       case INTEGER_CST:
2574 	return tree_int_cst_equal (arg0, arg1);
2575 
2576       case FIXED_CST:
2577 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2578 				       TREE_FIXED_CST (arg1));
2579 
2580       case REAL_CST:
2581 	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2582 				   TREE_REAL_CST (arg1)))
2583 	  return 1;
2584 
2585 
2586 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2587 	  {
2588 	    /* If we do not distinguish between signed and unsigned zero,
2589 	       consider them equal.  */
2590 	    if (real_zerop (arg0) && real_zerop (arg1))
2591 	      return 1;
2592 	  }
2593 	return 0;
2594 
2595       case VECTOR_CST:
2596 	{
2597 	  unsigned i;
2598 
2599 	  if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2600 	    return 0;
2601 
2602 	  for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2603 	    {
2604 	      if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2605 				    VECTOR_CST_ELT (arg1, i), flags))
2606 		return 0;
2607 	    }
2608 	  return 1;
2609 	}
2610 
2611       case COMPLEX_CST:
2612 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2613 				 flags)
2614 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2615 				    flags));
2616 
2617       case STRING_CST:
2618 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2619 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2620 			      TREE_STRING_POINTER (arg1),
2621 			      TREE_STRING_LENGTH (arg0)));
2622 
2623       case ADDR_EXPR:
2624 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2625 				TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2626 				? OEP_CONSTANT_ADDRESS_OF : 0);
2627       default:
2628 	break;
2629       }
2630 
2631   if (flags & OEP_ONLY_CONST)
2632     return 0;
2633 
2634 /* Define macros to test an operand from arg0 and arg1 for equality and a
2635    variant that allows null and views null as being different from any
2636    non-null value.  In the latter case, if either is null, the both
2637    must be; otherwise, do the normal comparison.  */
2638 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2639 				    TREE_OPERAND (arg1, N), flags)
2640 
2641 #define OP_SAME_WITH_NULL(N)				\
2642   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2643    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2644 
2645   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2646     {
2647     case tcc_unary:
2648       /* Two conversions are equal only if signedness and modes match.  */
2649       switch (TREE_CODE (arg0))
2650         {
2651 	CASE_CONVERT:
2652         case FIX_TRUNC_EXPR:
2653 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2654 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2655 	    return 0;
2656 	  break;
2657 	default:
2658 	  break;
2659 	}
2660 
2661       return OP_SAME (0);
2662 
2663 
2664     case tcc_comparison:
2665     case tcc_binary:
2666       if (OP_SAME (0) && OP_SAME (1))
2667 	return 1;
2668 
2669       /* For commutative ops, allow the other order.  */
2670       return (commutative_tree_code (TREE_CODE (arg0))
2671 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2672 				  TREE_OPERAND (arg1, 1), flags)
2673 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2674 				  TREE_OPERAND (arg1, 0), flags));
2675 
2676     case tcc_reference:
2677       /* If either of the pointer (or reference) expressions we are
2678 	 dereferencing contain a side effect, these cannot be equal,
2679 	 but their addresses can be.  */
2680       if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2681 	  && (TREE_SIDE_EFFECTS (arg0)
2682 	      || TREE_SIDE_EFFECTS (arg1)))
2683 	return 0;
2684 
2685       switch (TREE_CODE (arg0))
2686 	{
2687 	case INDIRECT_REF:
2688 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2689 	  return OP_SAME (0);
2690 
2691 	case REALPART_EXPR:
2692 	case IMAGPART_EXPR:
2693 	  return OP_SAME (0);
2694 
2695 	case TARGET_MEM_REF:
2696 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2697 	  /* Require equal extra operands and then fall through to MEM_REF
2698 	     handling of the two common operands.  */
2699 	  if (!OP_SAME_WITH_NULL (2)
2700 	      || !OP_SAME_WITH_NULL (3)
2701 	      || !OP_SAME_WITH_NULL (4))
2702 	    return 0;
2703 	  /* Fallthru.  */
2704 	case MEM_REF:
2705 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2706 	  /* Require equal access sizes, and similar pointer types.
2707 	     We can have incomplete types for array references of
2708 	     variable-sized arrays from the Fortran frontend
2709 	     though.  Also verify the types are compatible.  */
2710 	  return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2711 		   || (TYPE_SIZE (TREE_TYPE (arg0))
2712 		       && TYPE_SIZE (TREE_TYPE (arg1))
2713 		       && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2714 					   TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2715 		  && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2716 		  && alias_ptr_types_compatible_p
2717 		       (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2718 			TREE_TYPE (TREE_OPERAND (arg1, 1)))
2719 		  && OP_SAME (0) && OP_SAME (1));
2720 
2721 	case ARRAY_REF:
2722 	case ARRAY_RANGE_REF:
2723 	  /* Operands 2 and 3 may be null.
2724 	     Compare the array index by value if it is constant first as we
2725 	     may have different types but same value here.  */
2726 	  if (!OP_SAME (0))
2727 	    return 0;
2728 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2729 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2730 				       TREE_OPERAND (arg1, 1))
2731 		   || OP_SAME (1))
2732 		  && OP_SAME_WITH_NULL (2)
2733 		  && OP_SAME_WITH_NULL (3));
2734 
2735 	case COMPONENT_REF:
2736 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2737 	     may be NULL when we're called to compare MEM_EXPRs.  */
2738 	  if (!OP_SAME_WITH_NULL (0)
2739 	      || !OP_SAME (1))
2740 	    return 0;
2741 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2742 	  return OP_SAME_WITH_NULL (2);
2743 
2744 	case BIT_FIELD_REF:
2745 	  if (!OP_SAME (0))
2746 	    return 0;
2747 	  flags &= ~OEP_CONSTANT_ADDRESS_OF;
2748 	  return OP_SAME (1) && OP_SAME (2);
2749 
2750 	default:
2751 	  return 0;
2752 	}
2753 
2754     case tcc_expression:
2755       switch (TREE_CODE (arg0))
2756 	{
2757 	case ADDR_EXPR:
2758 	case TRUTH_NOT_EXPR:
2759 	  return OP_SAME (0);
2760 
2761 	case TRUTH_ANDIF_EXPR:
2762 	case TRUTH_ORIF_EXPR:
2763 	  return OP_SAME (0) && OP_SAME (1);
2764 
2765 	case FMA_EXPR:
2766 	case WIDEN_MULT_PLUS_EXPR:
2767 	case WIDEN_MULT_MINUS_EXPR:
2768 	  if (!OP_SAME (2))
2769 	    return 0;
2770 	  /* The multiplcation operands are commutative.  */
2771 	  /* FALLTHRU */
2772 
2773 	case TRUTH_AND_EXPR:
2774 	case TRUTH_OR_EXPR:
2775 	case TRUTH_XOR_EXPR:
2776 	  if (OP_SAME (0) && OP_SAME (1))
2777 	    return 1;
2778 
2779 	  /* Otherwise take into account this is a commutative operation.  */
2780 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2781 				   TREE_OPERAND (arg1, 1), flags)
2782 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2783 				      TREE_OPERAND (arg1, 0), flags));
2784 
2785 	case COND_EXPR:
2786 	case VEC_COND_EXPR:
2787 	case DOT_PROD_EXPR:
2788 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2789 
2790 	default:
2791 	  return 0;
2792 	}
2793 
2794     case tcc_vl_exp:
2795       switch (TREE_CODE (arg0))
2796 	{
2797 	case CALL_EXPR:
2798 	  /* If the CALL_EXPRs call different functions, then they
2799 	     clearly can not be equal.  */
2800 	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2801 				 flags))
2802 	    return 0;
2803 
2804 	  {
2805 	    unsigned int cef = call_expr_flags (arg0);
2806 	    if (flags & OEP_PURE_SAME)
2807 	      cef &= ECF_CONST | ECF_PURE;
2808 	    else
2809 	      cef &= ECF_CONST;
2810 	    if (!cef)
2811 	      return 0;
2812 	  }
2813 
2814 	  /* Now see if all the arguments are the same.  */
2815 	  {
2816 	    const_call_expr_arg_iterator iter0, iter1;
2817 	    const_tree a0, a1;
2818 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
2819 		   a1 = first_const_call_expr_arg (arg1, &iter1);
2820 		 a0 && a1;
2821 		 a0 = next_const_call_expr_arg (&iter0),
2822 		   a1 = next_const_call_expr_arg (&iter1))
2823 	      if (! operand_equal_p (a0, a1, flags))
2824 		return 0;
2825 
2826 	    /* If we get here and both argument lists are exhausted
2827 	       then the CALL_EXPRs are equal.  */
2828 	    return ! (a0 || a1);
2829 	  }
2830 	default:
2831 	  return 0;
2832 	}
2833 
2834     case tcc_declaration:
2835       /* Consider __builtin_sqrt equal to sqrt.  */
2836       return (TREE_CODE (arg0) == FUNCTION_DECL
2837 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2838 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2839 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2840 
2841     default:
2842       return 0;
2843     }
2844 
2845 #undef OP_SAME
2846 #undef OP_SAME_WITH_NULL
2847 }
2848 
2849 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2850    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2851 
2852    When in doubt, return 0.  */
2853 
2854 static int
operand_equal_for_comparison_p(tree arg0,tree arg1,tree other)2855 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2856 {
2857   int unsignedp1, unsignedpo;
2858   tree primarg0, primarg1, primother;
2859   unsigned int correct_width;
2860 
2861   if (operand_equal_p (arg0, arg1, 0))
2862     return 1;
2863 
2864   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2865       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2866     return 0;
2867 
2868   /* Discard any conversions that don't change the modes of ARG0 and ARG1
2869      and see if the inner values are the same.  This removes any
2870      signedness comparison, which doesn't matter here.  */
2871   primarg0 = arg0, primarg1 = arg1;
2872   STRIP_NOPS (primarg0);
2873   STRIP_NOPS (primarg1);
2874   if (operand_equal_p (primarg0, primarg1, 0))
2875     return 1;
2876 
2877   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2878      actual comparison operand, ARG0.
2879 
2880      First throw away any conversions to wider types
2881      already present in the operands.  */
2882 
2883   primarg1 = get_narrower (arg1, &unsignedp1);
2884   primother = get_narrower (other, &unsignedpo);
2885 
2886   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2887   if (unsignedp1 == unsignedpo
2888       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2889       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2890     {
2891       tree type = TREE_TYPE (arg0);
2892 
2893       /* Make sure shorter operand is extended the right way
2894 	 to match the longer operand.  */
2895       primarg1 = fold_convert (signed_or_unsigned_type_for
2896 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2897 
2898       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2899 	return 1;
2900     }
2901 
2902   return 0;
2903 }
2904 
2905 /* See if ARG is an expression that is either a comparison or is performing
2906    arithmetic on comparisons.  The comparisons must only be comparing
2907    two different values, which will be stored in *CVAL1 and *CVAL2; if
2908    they are nonzero it means that some operands have already been found.
2909    No variables may be used anywhere else in the expression except in the
2910    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2911    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2912 
2913    If this is true, return 1.  Otherwise, return zero.  */
2914 
2915 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2,int * save_p)2916 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2917 {
2918   enum tree_code code = TREE_CODE (arg);
2919   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2920 
2921   /* We can handle some of the tcc_expression cases here.  */
2922   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2923     tclass = tcc_unary;
2924   else if (tclass == tcc_expression
2925 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2926 	       || code == COMPOUND_EXPR))
2927     tclass = tcc_binary;
2928 
2929   else if (tclass == tcc_expression && code == SAVE_EXPR
2930 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2931     {
2932       /* If we've already found a CVAL1 or CVAL2, this expression is
2933 	 two complex to handle.  */
2934       if (*cval1 || *cval2)
2935 	return 0;
2936 
2937       tclass = tcc_unary;
2938       *save_p = 1;
2939     }
2940 
2941   switch (tclass)
2942     {
2943     case tcc_unary:
2944       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2945 
2946     case tcc_binary:
2947       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2948 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2949 				      cval1, cval2, save_p));
2950 
2951     case tcc_constant:
2952       return 1;
2953 
2954     case tcc_expression:
2955       if (code == COND_EXPR)
2956 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2957 				     cval1, cval2, save_p)
2958 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2959 					cval1, cval2, save_p)
2960 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2961 					cval1, cval2, save_p));
2962       return 0;
2963 
2964     case tcc_comparison:
2965       /* First see if we can handle the first operand, then the second.  For
2966 	 the second operand, we know *CVAL1 can't be zero.  It must be that
2967 	 one side of the comparison is each of the values; test for the
2968 	 case where this isn't true by failing if the two operands
2969 	 are the same.  */
2970 
2971       if (operand_equal_p (TREE_OPERAND (arg, 0),
2972 			   TREE_OPERAND (arg, 1), 0))
2973 	return 0;
2974 
2975       if (*cval1 == 0)
2976 	*cval1 = TREE_OPERAND (arg, 0);
2977       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2978 	;
2979       else if (*cval2 == 0)
2980 	*cval2 = TREE_OPERAND (arg, 0);
2981       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2982 	;
2983       else
2984 	return 0;
2985 
2986       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2987 	;
2988       else if (*cval2 == 0)
2989 	*cval2 = TREE_OPERAND (arg, 1);
2990       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2991 	;
2992       else
2993 	return 0;
2994 
2995       return 1;
2996 
2997     default:
2998       return 0;
2999     }
3000 }
3001 
3002 /* ARG is a tree that is known to contain just arithmetic operations and
3003    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3004    any occurrence of OLD0 as an operand of a comparison and likewise for
3005    NEW1 and OLD1.  */
3006 
3007 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)3008 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3009 	    tree old1, tree new1)
3010 {
3011   tree type = TREE_TYPE (arg);
3012   enum tree_code code = TREE_CODE (arg);
3013   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3014 
3015   /* We can handle some of the tcc_expression cases here.  */
3016   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3017     tclass = tcc_unary;
3018   else if (tclass == tcc_expression
3019 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3020     tclass = tcc_binary;
3021 
3022   switch (tclass)
3023     {
3024     case tcc_unary:
3025       return fold_build1_loc (loc, code, type,
3026 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3027 				      old0, new0, old1, new1));
3028 
3029     case tcc_binary:
3030       return fold_build2_loc (loc, code, type,
3031 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3032 				      old0, new0, old1, new1),
3033 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3034 				      old0, new0, old1, new1));
3035 
3036     case tcc_expression:
3037       switch (code)
3038 	{
3039 	case SAVE_EXPR:
3040 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3041 			     old1, new1);
3042 
3043 	case COMPOUND_EXPR:
3044 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3045 			     old1, new1);
3046 
3047 	case COND_EXPR:
3048 	  return fold_build3_loc (loc, code, type,
3049 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3050 					  old0, new0, old1, new1),
3051 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3052 					  old0, new0, old1, new1),
3053 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3054 					  old0, new0, old1, new1));
3055 	default:
3056 	  break;
3057 	}
3058       /* Fall through - ???  */
3059 
3060     case tcc_comparison:
3061       {
3062 	tree arg0 = TREE_OPERAND (arg, 0);
3063 	tree arg1 = TREE_OPERAND (arg, 1);
3064 
3065 	/* We need to check both for exact equality and tree equality.  The
3066 	   former will be true if the operand has a side-effect.  In that
3067 	   case, we know the operand occurred exactly once.  */
3068 
3069 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3070 	  arg0 = new0;
3071 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3072 	  arg0 = new1;
3073 
3074 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3075 	  arg1 = new0;
3076 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3077 	  arg1 = new1;
3078 
3079 	return fold_build2_loc (loc, code, type, arg0, arg1);
3080       }
3081 
3082     default:
3083       return arg;
3084     }
3085 }
3086 
3087 /* Return a tree for the case when the result of an expression is RESULT
3088    converted to TYPE and OMITTED was previously an operand of the expression
3089    but is now not needed (e.g., we folded OMITTED * 0).
3090 
3091    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3092    the conversion of RESULT to TYPE.  */
3093 
3094 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3095 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3096 {
3097   tree t = fold_convert_loc (loc, type, result);
3098 
3099   /* If the resulting operand is an empty statement, just return the omitted
3100      statement casted to void. */
3101   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3102     return build1_loc (loc, NOP_EXPR, void_type_node,
3103 		       fold_ignored_result (omitted));
3104 
3105   if (TREE_SIDE_EFFECTS (omitted))
3106     return build2_loc (loc, COMPOUND_EXPR, type,
3107 		       fold_ignored_result (omitted), t);
3108 
3109   return non_lvalue_loc (loc, t);
3110 }
3111 
3112 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3113 
3114 static tree
pedantic_omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3115 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3116 			       tree omitted)
3117 {
3118   tree t = fold_convert_loc (loc, type, result);
3119 
3120   /* If the resulting operand is an empty statement, just return the omitted
3121      statement casted to void. */
3122   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3123     return build1_loc (loc, NOP_EXPR, void_type_node,
3124 		       fold_ignored_result (omitted));
3125 
3126   if (TREE_SIDE_EFFECTS (omitted))
3127     return build2_loc (loc, COMPOUND_EXPR, type,
3128 		       fold_ignored_result (omitted), t);
3129 
3130   return pedantic_non_lvalue_loc (loc, t);
3131 }
3132 
3133 /* Return a tree for the case when the result of an expression is RESULT
3134    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3135    of the expression but are now not needed.
3136 
3137    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3138    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3139    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3140    just do the conversion of RESULT to TYPE.  */
3141 
3142 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)3143 omit_two_operands_loc (location_t loc, tree type, tree result,
3144 		       tree omitted1, tree omitted2)
3145 {
3146   tree t = fold_convert_loc (loc, type, result);
3147 
3148   if (TREE_SIDE_EFFECTS (omitted2))
3149     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3150   if (TREE_SIDE_EFFECTS (omitted1))
3151     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3152 
3153   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3154 }
3155 
3156 
3157 /* Return a simplified tree node for the truth-negation of ARG.  This
3158    never alters ARG itself.  We assume that ARG is an operation that
3159    returns a truth value (0 or 1).
3160 
3161    FIXME: one would think we would fold the result, but it causes
3162    problems with the dominator optimizer.  */
3163 
3164 static tree
fold_truth_not_expr(location_t loc,tree arg)3165 fold_truth_not_expr (location_t loc, tree arg)
3166 {
3167   tree type = TREE_TYPE (arg);
3168   enum tree_code code = TREE_CODE (arg);
3169   location_t loc1, loc2;
3170 
3171   /* If this is a comparison, we can simply invert it, except for
3172      floating-point non-equality comparisons, in which case we just
3173      enclose a TRUTH_NOT_EXPR around what we have.  */
3174 
3175   if (TREE_CODE_CLASS (code) == tcc_comparison)
3176     {
3177       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3178       if (FLOAT_TYPE_P (op_type)
3179 	  && flag_trapping_math
3180 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3181 	  && code != NE_EXPR && code != EQ_EXPR)
3182 	return NULL_TREE;
3183 
3184       code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3185       if (code == ERROR_MARK)
3186 	return NULL_TREE;
3187 
3188       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3189 			 TREE_OPERAND (arg, 1));
3190     }
3191 
3192   switch (code)
3193     {
3194     case INTEGER_CST:
3195       return constant_boolean_node (integer_zerop (arg), type);
3196 
3197     case TRUTH_AND_EXPR:
3198       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3199       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3200       return build2_loc (loc, TRUTH_OR_EXPR, type,
3201 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3202 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3203 
3204     case TRUTH_OR_EXPR:
3205       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3207       return build2_loc (loc, TRUTH_AND_EXPR, type,
3208 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3209 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3210 
3211     case TRUTH_XOR_EXPR:
3212       /* Here we can invert either operand.  We invert the first operand
3213 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3214 	 result is the XOR of the first operand with the inside of the
3215 	 negation of the second operand.  */
3216 
3217       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3218 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3219 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3220       else
3221 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3222 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3223 			   TREE_OPERAND (arg, 1));
3224 
3225     case TRUTH_ANDIF_EXPR:
3226       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3227       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3228       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3229 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3230 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3231 
3232     case TRUTH_ORIF_EXPR:
3233       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3234       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3235       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3236 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3237 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3238 
3239     case TRUTH_NOT_EXPR:
3240       return TREE_OPERAND (arg, 0);
3241 
3242     case COND_EXPR:
3243       {
3244 	tree arg1 = TREE_OPERAND (arg, 1);
3245 	tree arg2 = TREE_OPERAND (arg, 2);
3246 
3247 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3248 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3249 
3250 	/* A COND_EXPR may have a throw as one operand, which
3251 	   then has void type.  Just leave void operands
3252 	   as they are.  */
3253 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3254 			   VOID_TYPE_P (TREE_TYPE (arg1))
3255 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3256 			   VOID_TYPE_P (TREE_TYPE (arg2))
3257 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3258       }
3259 
3260     case COMPOUND_EXPR:
3261       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3262       return build2_loc (loc, COMPOUND_EXPR, type,
3263 			 TREE_OPERAND (arg, 0),
3264 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3265 
3266     case NON_LVALUE_EXPR:
3267       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3268       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3269 
3270     CASE_CONVERT:
3271       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3272 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3273 
3274       /* ... fall through ...  */
3275 
3276     case FLOAT_EXPR:
3277       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3278       return build1_loc (loc, TREE_CODE (arg), type,
3279 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3280 
3281     case BIT_AND_EXPR:
3282       if (!integer_onep (TREE_OPERAND (arg, 1)))
3283 	return NULL_TREE;
3284       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3285 
3286     case SAVE_EXPR:
3287       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3288 
3289     case CLEANUP_POINT_EXPR:
3290       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3291       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3292 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3293 
3294     default:
3295       return NULL_TREE;
3296     }
3297 }
3298 
3299 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3300    assume that ARG is an operation that returns a truth value (0 or 1
3301    for scalars, 0 or -1 for vectors).  Return the folded expression if
3302    folding is successful.  Otherwise, return NULL_TREE.  */
3303 
3304 static tree
fold_invert_truthvalue(location_t loc,tree arg)3305 fold_invert_truthvalue (location_t loc, tree arg)
3306 {
3307   tree type = TREE_TYPE (arg);
3308   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3309 			      ? BIT_NOT_EXPR
3310 			      : TRUTH_NOT_EXPR,
3311 			 type, arg);
3312 }
3313 
3314 /* Return a simplified tree node for the truth-negation of ARG.  This
3315    never alters ARG itself.  We assume that ARG is an operation that
3316    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3317 
3318 tree
invert_truthvalue_loc(location_t loc,tree arg)3319 invert_truthvalue_loc (location_t loc, tree arg)
3320 {
3321   if (TREE_CODE (arg) == ERROR_MARK)
3322     return arg;
3323 
3324   tree type = TREE_TYPE (arg);
3325   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3326 			       ? BIT_NOT_EXPR
3327 			       : TRUTH_NOT_EXPR,
3328 			  type, arg);
3329 }
3330 
3331 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3332    operands are another bit-wise operation with a common input.  If so,
3333    distribute the bit operations to save an operation and possibly two if
3334    constants are involved.  For example, convert
3335 	(A | B) & (A | C) into A | (B & C)
3336    Further simplification will occur if B and C are constants.
3337 
3338    If this optimization cannot be done, 0 will be returned.  */
3339 
3340 static tree
distribute_bit_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)3341 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3342 		     tree arg0, tree arg1)
3343 {
3344   tree common;
3345   tree left, right;
3346 
3347   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3348       || TREE_CODE (arg0) == code
3349       || (TREE_CODE (arg0) != BIT_AND_EXPR
3350 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3351     return 0;
3352 
3353   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3354     {
3355       common = TREE_OPERAND (arg0, 0);
3356       left = TREE_OPERAND (arg0, 1);
3357       right = TREE_OPERAND (arg1, 1);
3358     }
3359   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3360     {
3361       common = TREE_OPERAND (arg0, 0);
3362       left = TREE_OPERAND (arg0, 1);
3363       right = TREE_OPERAND (arg1, 0);
3364     }
3365   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3366     {
3367       common = TREE_OPERAND (arg0, 1);
3368       left = TREE_OPERAND (arg0, 0);
3369       right = TREE_OPERAND (arg1, 1);
3370     }
3371   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3372     {
3373       common = TREE_OPERAND (arg0, 1);
3374       left = TREE_OPERAND (arg0, 0);
3375       right = TREE_OPERAND (arg1, 0);
3376     }
3377   else
3378     return 0;
3379 
3380   common = fold_convert_loc (loc, type, common);
3381   left = fold_convert_loc (loc, type, left);
3382   right = fold_convert_loc (loc, type, right);
3383   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3384 		      fold_build2_loc (loc, code, type, left, right));
3385 }
3386 
3387 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3388    with code CODE.  This optimization is unsafe.  */
3389 static tree
distribute_real_division(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)3390 distribute_real_division (location_t loc, enum tree_code code, tree type,
3391 			  tree arg0, tree arg1)
3392 {
3393   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3394   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3395 
3396   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3397   if (mul0 == mul1
3398       && operand_equal_p (TREE_OPERAND (arg0, 1),
3399 		       TREE_OPERAND (arg1, 1), 0))
3400     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3401 			fold_build2_loc (loc, code, type,
3402 				     TREE_OPERAND (arg0, 0),
3403 				     TREE_OPERAND (arg1, 0)),
3404 			TREE_OPERAND (arg0, 1));
3405 
3406   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3407   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3408 		       TREE_OPERAND (arg1, 0), 0)
3409       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3410       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3411     {
3412       REAL_VALUE_TYPE r0, r1;
3413       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3414       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3415       if (!mul0)
3416 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3417       if (!mul1)
3418         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3419       real_arithmetic (&r0, code, &r0, &r1);
3420       return fold_build2_loc (loc, MULT_EXPR, type,
3421 			  TREE_OPERAND (arg0, 0),
3422 			  build_real (type, r0));
3423     }
3424 
3425   return NULL_TREE;
3426 }
3427 
3428 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3429    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3430 
3431 static tree
make_bit_field_ref(location_t loc,tree inner,tree type,HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,int unsignedp)3432 make_bit_field_ref (location_t loc, tree inner, tree type,
3433 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3434 {
3435   tree result, bftype;
3436 
3437   if (bitpos == 0)
3438     {
3439       tree size = TYPE_SIZE (TREE_TYPE (inner));
3440       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3441 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3442 	  && tree_fits_shwi_p (size)
3443 	  && tree_to_shwi (size) == bitsize)
3444 	return fold_convert_loc (loc, type, inner);
3445     }
3446 
3447   bftype = type;
3448   if (TYPE_PRECISION (bftype) != bitsize
3449       || TYPE_UNSIGNED (bftype) == !unsignedp)
3450     bftype = build_nonstandard_integer_type (bitsize, 0);
3451 
3452   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3453 		       size_int (bitsize), bitsize_int (bitpos));
3454 
3455   if (bftype != type)
3456     result = fold_convert_loc (loc, type, result);
3457 
3458   return result;
3459 }
3460 
3461 /* Optimize a bit-field compare.
3462 
3463    There are two cases:  First is a compare against a constant and the
3464    second is a comparison of two items where the fields are at the same
3465    bit position relative to the start of a chunk (byte, halfword, word)
3466    large enough to contain it.  In these cases we can avoid the shift
3467    implicit in bitfield extractions.
3468 
3469    For constants, we emit a compare of the shifted constant with the
3470    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471    compared.  For two fields at the same position, we do the ANDs with the
3472    similar mask and compare the result of the ANDs.
3473 
3474    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476    are the left and right operands of the comparison, respectively.
3477 
3478    If the optimization described above can be done, we return the resulting
3479    tree.  Otherwise we return zero.  */
3480 
3481 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)3482 optimize_bit_field_compare (location_t loc, enum tree_code code,
3483 			    tree compare_type, tree lhs, tree rhs)
3484 {
3485   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486   tree type = TREE_TYPE (lhs);
3487   tree signed_type, unsigned_type;
3488   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489   enum machine_mode lmode, rmode, nmode;
3490   int lunsignedp, runsignedp;
3491   int lvolatilep = 0, rvolatilep = 0;
3492   tree linner, rinner = NULL_TREE;
3493   tree mask;
3494   tree offset;
3495 
3496   /* Get all the information about the extractions being done.  If the bit size
3497      if the same as the size of the underlying object, we aren't doing an
3498      extraction at all and so can do nothing.  We also don't want to
3499      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500      then will no longer be able to replace it.  */
3501   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 				&lunsignedp, &lvolatilep, false);
3503   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3505     return 0;
3506 
3507  if (!const_p)
3508    {
3509      /* If this is not a constant, we can only do something if bit positions,
3510 	sizes, and signedness are the same.  */
3511      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 				   &runsignedp, &rvolatilep, false);
3513 
3514      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 	 || lunsignedp != runsignedp || offset != 0
3516 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3517        return 0;
3518    }
3519 
3520   /* See if we can find a mode to refer to this field.  We should be able to,
3521      but fail if we can't.  */
3522   nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3523 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 				TYPE_ALIGN (TREE_TYPE (rinner))),
3526 			 word_mode, false);
3527   if (nmode == VOIDmode)
3528     return 0;
3529 
3530   /* Set signed and unsigned types of the precision of this mode for the
3531      shifts below.  */
3532   signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534 
3535   /* Compute the bit position and size for the new reference and our offset
3536      within it. If the new reference is the same size as the original, we
3537      won't optimize anything, so return zero.  */
3538   nbitsize = GET_MODE_BITSIZE (nmode);
3539   nbitpos = lbitpos & ~ (nbitsize - 1);
3540   lbitpos -= nbitpos;
3541   if (nbitsize == lbitsize)
3542     return 0;
3543 
3544   if (BYTES_BIG_ENDIAN)
3545     lbitpos = nbitsize - lbitsize - lbitpos;
3546 
3547   /* Make the mask to be used against the extracted field.  */
3548   mask = build_int_cst_type (unsigned_type, -1);
3549   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3550   mask = const_binop (RSHIFT_EXPR, mask,
3551 		      size_int (nbitsize - lbitsize - lbitpos));
3552 
3553   if (! const_p)
3554     /* If not comparing with constant, just rework the comparison
3555        and return.  */
3556     return fold_build2_loc (loc, code, compare_type,
3557 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3558 				     make_bit_field_ref (loc, linner,
3559 							 unsigned_type,
3560 							 nbitsize, nbitpos,
3561 							 1),
3562 				     mask),
3563 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3564 				     make_bit_field_ref (loc, rinner,
3565 							 unsigned_type,
3566 							 nbitsize, nbitpos,
3567 							 1),
3568 				     mask));
3569 
3570   /* Otherwise, we are handling the constant case. See if the constant is too
3571      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3572      this not only for its own sake, but to avoid having to test for this
3573      error case below.  If we didn't, we might generate wrong code.
3574 
3575      For unsigned fields, the constant shifted right by the field length should
3576      be all zero.  For signed fields, the high-order bits should agree with
3577      the sign bit.  */
3578 
3579   if (lunsignedp)
3580     {
3581       if (! integer_zerop (const_binop (RSHIFT_EXPR,
3582 					fold_convert_loc (loc,
3583 							  unsigned_type, rhs),
3584 					size_int (lbitsize))))
3585 	{
3586 	  warning (0, "comparison is always %d due to width of bit-field",
3587 		   code == NE_EXPR);
3588 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3589 	}
3590     }
3591   else
3592     {
3593       tree tem = const_binop (RSHIFT_EXPR,
3594 			      fold_convert_loc (loc, signed_type, rhs),
3595 			      size_int (lbitsize - 1));
3596       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3597 	{
3598 	  warning (0, "comparison is always %d due to width of bit-field",
3599 		   code == NE_EXPR);
3600 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3601 	}
3602     }
3603 
3604   /* Single-bit compares should always be against zero.  */
3605   if (lbitsize == 1 && ! integer_zerop (rhs))
3606     {
3607       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3608       rhs = build_int_cst (type, 0);
3609     }
3610 
3611   /* Make a new bitfield reference, shift the constant over the
3612      appropriate number of bits and mask it with the computed mask
3613      (in case this was a signed field).  If we changed it, make a new one.  */
3614   lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3615 
3616   rhs = const_binop (BIT_AND_EXPR,
3617 		     const_binop (LSHIFT_EXPR,
3618 				  fold_convert_loc (loc, unsigned_type, rhs),
3619 				  size_int (lbitpos)),
3620 		     mask);
3621 
3622   lhs = build2_loc (loc, code, compare_type,
3623 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3624   return lhs;
3625 }
3626 
3627 /* Subroutine for fold_truth_andor_1: decode a field reference.
3628 
3629    If EXP is a comparison reference, we return the innermost reference.
3630 
3631    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3632    set to the starting bit number.
3633 
3634    If the innermost field can be completely contained in a mode-sized
3635    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3636 
3637    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3638    otherwise it is not changed.
3639 
3640    *PUNSIGNEDP is set to the signedness of the field.
3641 
3642    *PMASK is set to the mask used.  This is either contained in a
3643    BIT_AND_EXPR or derived from the width of the field.
3644 
3645    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3646 
3647    Return 0 if this is not a component reference or is one that we can't
3648    do anything with.  */
3649 
3650 static tree
decode_field_reference(location_t loc,tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,tree * pmask,tree * pand_mask)3651 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3652 			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3653 			int *punsignedp, int *pvolatilep,
3654 			tree *pmask, tree *pand_mask)
3655 {
3656   tree outer_type = 0;
3657   tree and_mask = 0;
3658   tree mask, inner, offset;
3659   tree unsigned_type;
3660   unsigned int precision;
3661 
3662   /* All the optimizations using this function assume integer fields.
3663      There are problems with FP fields since the type_for_size call
3664      below can fail for, e.g., XFmode.  */
3665   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3666     return 0;
3667 
3668   /* We are interested in the bare arrangement of bits, so strip everything
3669      that doesn't affect the machine mode.  However, record the type of the
3670      outermost expression if it may matter below.  */
3671   if (CONVERT_EXPR_P (exp)
3672       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3673     outer_type = TREE_TYPE (exp);
3674   STRIP_NOPS (exp);
3675 
3676   if (TREE_CODE (exp) == BIT_AND_EXPR)
3677     {
3678       and_mask = TREE_OPERAND (exp, 1);
3679       exp = TREE_OPERAND (exp, 0);
3680       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3681       if (TREE_CODE (and_mask) != INTEGER_CST)
3682 	return 0;
3683     }
3684 
3685   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3686 			       punsignedp, pvolatilep, false);
3687   if ((inner == exp && and_mask == 0)
3688       || *pbitsize < 0 || offset != 0
3689       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3690     return 0;
3691 
3692   /* If the number of bits in the reference is the same as the bitsize of
3693      the outer type, then the outer type gives the signedness. Otherwise
3694      (in case of a small bitfield) the signedness is unchanged.  */
3695   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3696     *punsignedp = TYPE_UNSIGNED (outer_type);
3697 
3698   /* Compute the mask to access the bitfield.  */
3699   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3700   precision = TYPE_PRECISION (unsigned_type);
3701 
3702   mask = build_int_cst_type (unsigned_type, -1);
3703 
3704   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3705   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3706 
3707   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3708   if (and_mask != 0)
3709     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3710 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
3711 
3712   *pmask = mask;
3713   *pand_mask = and_mask;
3714   return inner;
3715 }
3716 
3717 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3718    bit positions.  */
3719 
3720 static int
all_ones_mask_p(const_tree mask,int size)3721 all_ones_mask_p (const_tree mask, int size)
3722 {
3723   tree type = TREE_TYPE (mask);
3724   unsigned int precision = TYPE_PRECISION (type);
3725   tree tmask;
3726 
3727   tmask = build_int_cst_type (signed_type_for (type), -1);
3728 
3729   return
3730     tree_int_cst_equal (mask,
3731 			const_binop (RSHIFT_EXPR,
3732 				     const_binop (LSHIFT_EXPR, tmask,
3733 						  size_int (precision - size)),
3734 				     size_int (precision - size)));
3735 }
3736 
3737 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3738    represents the sign bit of EXP's type.  If EXP represents a sign
3739    or zero extension, also test VAL against the unextended type.
3740    The return value is the (sub)expression whose sign bit is VAL,
3741    or NULL_TREE otherwise.  */
3742 
3743 static tree
sign_bit_p(tree exp,const_tree val)3744 sign_bit_p (tree exp, const_tree val)
3745 {
3746   unsigned HOST_WIDE_INT mask_lo, lo;
3747   HOST_WIDE_INT mask_hi, hi;
3748   int width;
3749   tree t;
3750 
3751   /* Tree EXP must have an integral type.  */
3752   t = TREE_TYPE (exp);
3753   if (! INTEGRAL_TYPE_P (t))
3754     return NULL_TREE;
3755 
3756   /* Tree VAL must be an integer constant.  */
3757   if (TREE_CODE (val) != INTEGER_CST
3758       || TREE_OVERFLOW (val))
3759     return NULL_TREE;
3760 
3761   width = TYPE_PRECISION (t);
3762   if (width > HOST_BITS_PER_WIDE_INT)
3763     {
3764       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3765       lo = 0;
3766 
3767       mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3768       mask_lo = -1;
3769     }
3770   else
3771     {
3772       hi = 0;
3773       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3774 
3775       mask_hi = 0;
3776       mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3777     }
3778 
3779   /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3780      treat VAL as if it were unsigned.  */
3781   if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3782       && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3783     return exp;
3784 
3785   /* Handle extension from a narrower type.  */
3786   if (TREE_CODE (exp) == NOP_EXPR
3787       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3788     return sign_bit_p (TREE_OPERAND (exp, 0), val);
3789 
3790   return NULL_TREE;
3791 }
3792 
3793 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3794    to be evaluated unconditionally.  */
3795 
3796 static int
simple_operand_p(const_tree exp)3797 simple_operand_p (const_tree exp)
3798 {
3799   /* Strip any conversions that don't change the machine mode.  */
3800   STRIP_NOPS (exp);
3801 
3802   return (CONSTANT_CLASS_P (exp)
3803   	  || TREE_CODE (exp) == SSA_NAME
3804 	  || (DECL_P (exp)
3805 	      && ! TREE_ADDRESSABLE (exp)
3806 	      && ! TREE_THIS_VOLATILE (exp)
3807 	      && ! DECL_NONLOCAL (exp)
3808 	      /* Don't regard global variables as simple.  They may be
3809 		 allocated in ways unknown to the compiler (shared memory,
3810 		 #pragma weak, etc).  */
3811 	      && ! TREE_PUBLIC (exp)
3812 	      && ! DECL_EXTERNAL (exp)
3813 	      /* Weakrefs are not safe to be read, since they can be NULL.
3814  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3815 		 have DECL_WEAK flag set.  */
3816 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3817 	      /* Loading a static variable is unduly expensive, but global
3818 		 registers aren't expensive.  */
3819 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3820 }
3821 
3822 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3823    to be evaluated unconditionally.
3824    I addition to simple_operand_p, we assume that comparisons, conversions,
3825    and logic-not operations are simple, if their operands are simple, too.  */
3826 
3827 static bool
simple_operand_p_2(tree exp)3828 simple_operand_p_2 (tree exp)
3829 {
3830   enum tree_code code;
3831 
3832   if (TREE_SIDE_EFFECTS (exp)
3833       || tree_could_trap_p (exp))
3834     return false;
3835 
3836   while (CONVERT_EXPR_P (exp))
3837     exp = TREE_OPERAND (exp, 0);
3838 
3839   code = TREE_CODE (exp);
3840 
3841   if (TREE_CODE_CLASS (code) == tcc_comparison)
3842     return (simple_operand_p (TREE_OPERAND (exp, 0))
3843 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
3844 
3845   if (code == TRUTH_NOT_EXPR)
3846       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3847 
3848   return simple_operand_p (exp);
3849 }
3850 
3851 
3852 /* The following functions are subroutines to fold_range_test and allow it to
3853    try to change a logical combination of comparisons into a range test.
3854 
3855    For example, both
3856 	X == 2 || X == 3 || X == 4 || X == 5
3857    and
3858 	X >= 2 && X <= 5
3859    are converted to
3860 	(unsigned) (X - 2) <= 3
3861 
3862    We describe each set of comparisons as being either inside or outside
3863    a range, using a variable named like IN_P, and then describe the
3864    range with a lower and upper bound.  If one of the bounds is omitted,
3865    it represents either the highest or lowest value of the type.
3866 
3867    In the comments below, we represent a range by two numbers in brackets
3868    preceded by a "+" to designate being inside that range, or a "-" to
3869    designate being outside that range, so the condition can be inverted by
3870    flipping the prefix.  An omitted bound is represented by a "-".  For
3871    example, "- [-, 10]" means being outside the range starting at the lowest
3872    possible value and ending at 10, in other words, being greater than 10.
3873    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3874    always false.
3875 
3876    We set up things so that the missing bounds are handled in a consistent
3877    manner so neither a missing bound nor "true" and "false" need to be
3878    handled using a special case.  */
3879 
3880 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3881    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3882    and UPPER1_P are nonzero if the respective argument is an upper bound
3883    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3884    must be specified for a comparison.  ARG1 will be converted to ARG0's
3885    type if both are specified.  */
3886 
3887 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)3888 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3889 	     tree arg1, int upper1_p)
3890 {
3891   tree tem;
3892   int result;
3893   int sgn0, sgn1;
3894 
3895   /* If neither arg represents infinity, do the normal operation.
3896      Else, if not a comparison, return infinity.  Else handle the special
3897      comparison rules. Note that most of the cases below won't occur, but
3898      are handled for consistency.  */
3899 
3900   if (arg0 != 0 && arg1 != 0)
3901     {
3902       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3903 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3904       STRIP_NOPS (tem);
3905       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3906     }
3907 
3908   if (TREE_CODE_CLASS (code) != tcc_comparison)
3909     return 0;
3910 
3911   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3912      for neither.  In real maths, we cannot assume open ended ranges are
3913      the same. But, this is computer arithmetic, where numbers are finite.
3914      We can therefore make the transformation of any unbounded range with
3915      the value Z, Z being greater than any representable number. This permits
3916      us to treat unbounded ranges as equal.  */
3917   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3918   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3919   switch (code)
3920     {
3921     case EQ_EXPR:
3922       result = sgn0 == sgn1;
3923       break;
3924     case NE_EXPR:
3925       result = sgn0 != sgn1;
3926       break;
3927     case LT_EXPR:
3928       result = sgn0 < sgn1;
3929       break;
3930     case LE_EXPR:
3931       result = sgn0 <= sgn1;
3932       break;
3933     case GT_EXPR:
3934       result = sgn0 > sgn1;
3935       break;
3936     case GE_EXPR:
3937       result = sgn0 >= sgn1;
3938       break;
3939     default:
3940       gcc_unreachable ();
3941     }
3942 
3943   return constant_boolean_node (result, type);
3944 }
3945 
3946 /* Helper routine for make_range.  Perform one step for it, return
3947    new expression if the loop should continue or NULL_TREE if it should
3948    stop.  */
3949 
3950 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)3951 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3952 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3953 		 bool *strict_overflow_p)
3954 {
3955   tree arg0_type = TREE_TYPE (arg0);
3956   tree n_low, n_high, low = *p_low, high = *p_high;
3957   int in_p = *p_in_p, n_in_p;
3958 
3959   switch (code)
3960     {
3961     case TRUTH_NOT_EXPR:
3962       /* We can only do something if the range is testing for zero.  */
3963       if (low == NULL_TREE || high == NULL_TREE
3964 	  || ! integer_zerop (low) || ! integer_zerop (high))
3965 	return NULL_TREE;
3966       *p_in_p = ! in_p;
3967       return arg0;
3968 
3969     case EQ_EXPR: case NE_EXPR:
3970     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3971       /* We can only do something if the range is testing for zero
3972 	 and if the second operand is an integer constant.  Note that
3973 	 saying something is "in" the range we make is done by
3974 	 complementing IN_P since it will set in the initial case of
3975 	 being not equal to zero; "out" is leaving it alone.  */
3976       if (low == NULL_TREE || high == NULL_TREE
3977 	  || ! integer_zerop (low) || ! integer_zerop (high)
3978 	  || TREE_CODE (arg1) != INTEGER_CST)
3979 	return NULL_TREE;
3980 
3981       switch (code)
3982 	{
3983 	case NE_EXPR:  /* - [c, c]  */
3984 	  low = high = arg1;
3985 	  break;
3986 	case EQ_EXPR:  /* + [c, c]  */
3987 	  in_p = ! in_p, low = high = arg1;
3988 	  break;
3989 	case GT_EXPR:  /* - [-, c] */
3990 	  low = 0, high = arg1;
3991 	  break;
3992 	case GE_EXPR:  /* + [c, -] */
3993 	  in_p = ! in_p, low = arg1, high = 0;
3994 	  break;
3995 	case LT_EXPR:  /* - [c, -] */
3996 	  low = arg1, high = 0;
3997 	  break;
3998 	case LE_EXPR:  /* + [-, c] */
3999 	  in_p = ! in_p, low = 0, high = arg1;
4000 	  break;
4001 	default:
4002 	  gcc_unreachable ();
4003 	}
4004 
4005       /* If this is an unsigned comparison, we also know that EXP is
4006 	 greater than or equal to zero.  We base the range tests we make
4007 	 on that fact, so we record it here so we can parse existing
4008 	 range tests.  We test arg0_type since often the return type
4009 	 of, e.g. EQ_EXPR, is boolean.  */
4010       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4011 	{
4012 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4013 			      in_p, low, high, 1,
4014 			      build_int_cst (arg0_type, 0),
4015 			      NULL_TREE))
4016 	    return NULL_TREE;
4017 
4018 	  in_p = n_in_p, low = n_low, high = n_high;
4019 
4020 	  /* If the high bound is missing, but we have a nonzero low
4021 	     bound, reverse the range so it goes from zero to the low bound
4022 	     minus 1.  */
4023 	  if (high == 0 && low && ! integer_zerop (low))
4024 	    {
4025 	      in_p = ! in_p;
4026 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4027 				  integer_one_node, 0);
4028 	      low = build_int_cst (arg0_type, 0);
4029 	    }
4030 	}
4031 
4032       *p_low = low;
4033       *p_high = high;
4034       *p_in_p = in_p;
4035       return arg0;
4036 
4037     case NEGATE_EXPR:
4038       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4039 	 low and high are non-NULL, then normalize will DTRT.  */
4040       if (!TYPE_UNSIGNED (arg0_type)
4041 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4042 	{
4043 	  if (low == NULL_TREE)
4044 	    low = TYPE_MIN_VALUE (arg0_type);
4045 	  if (high == NULL_TREE)
4046 	    high = TYPE_MAX_VALUE (arg0_type);
4047 	}
4048 
4049       /* (-x) IN [a,b] -> x in [-b, -a]  */
4050       n_low = range_binop (MINUS_EXPR, exp_type,
4051 			   build_int_cst (exp_type, 0),
4052 			   0, high, 1);
4053       n_high = range_binop (MINUS_EXPR, exp_type,
4054 			    build_int_cst (exp_type, 0),
4055 			    0, low, 0);
4056       if (n_high != 0 && TREE_OVERFLOW (n_high))
4057 	return NULL_TREE;
4058       goto normalize;
4059 
4060     case BIT_NOT_EXPR:
4061       /* ~ X -> -X - 1  */
4062       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4063 			 build_int_cst (exp_type, 1));
4064 
4065     case PLUS_EXPR:
4066     case MINUS_EXPR:
4067       if (TREE_CODE (arg1) != INTEGER_CST)
4068 	return NULL_TREE;
4069 
4070       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4071 	 move a constant to the other side.  */
4072       if (!TYPE_UNSIGNED (arg0_type)
4073 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4074 	return NULL_TREE;
4075 
4076       /* If EXP is signed, any overflow in the computation is undefined,
4077 	 so we don't worry about it so long as our computations on
4078 	 the bounds don't overflow.  For unsigned, overflow is defined
4079 	 and this is exactly the right thing.  */
4080       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4081 			   arg0_type, low, 0, arg1, 0);
4082       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4083 			    arg0_type, high, 1, arg1, 0);
4084       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4085 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4086 	return NULL_TREE;
4087 
4088       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4089 	*strict_overflow_p = true;
4090 
4091       normalize:
4092 	/* Check for an unsigned range which has wrapped around the maximum
4093 	   value thus making n_high < n_low, and normalize it.  */
4094 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4095 	  {
4096 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4097 			       integer_one_node, 0);
4098 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4099 				integer_one_node, 0);
4100 
4101 	    /* If the range is of the form +/- [ x+1, x ], we won't
4102 	       be able to normalize it.  But then, it represents the
4103 	       whole range or the empty set, so make it
4104 	       +/- [ -, - ].  */
4105 	    if (tree_int_cst_equal (n_low, low)
4106 		&& tree_int_cst_equal (n_high, high))
4107 	      low = high = 0;
4108 	    else
4109 	      in_p = ! in_p;
4110 	  }
4111 	else
4112 	  low = n_low, high = n_high;
4113 
4114 	*p_low = low;
4115 	*p_high = high;
4116 	*p_in_p = in_p;
4117 	return arg0;
4118 
4119     CASE_CONVERT:
4120     case NON_LVALUE_EXPR:
4121       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4122 	return NULL_TREE;
4123 
4124       if (! INTEGRAL_TYPE_P (arg0_type)
4125 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4126 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4127 	return NULL_TREE;
4128 
4129       n_low = low, n_high = high;
4130 
4131       if (n_low != 0)
4132 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4133 
4134       if (n_high != 0)
4135 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4136 
4137       /* If we're converting arg0 from an unsigned type, to exp,
4138 	 a signed type,  we will be doing the comparison as unsigned.
4139 	 The tests above have already verified that LOW and HIGH
4140 	 are both positive.
4141 
4142 	 So we have to ensure that we will handle large unsigned
4143 	 values the same way that the current signed bounds treat
4144 	 negative values.  */
4145 
4146       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4147 	{
4148 	  tree high_positive;
4149 	  tree equiv_type;
4150 	  /* For fixed-point modes, we need to pass the saturating flag
4151 	     as the 2nd parameter.  */
4152 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4153 	    equiv_type
4154 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4155 						TYPE_SATURATING (arg0_type));
4156 	  else
4157 	    equiv_type
4158 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4159 
4160 	  /* A range without an upper bound is, naturally, unbounded.
4161 	     Since convert would have cropped a very large value, use
4162 	     the max value for the destination type.  */
4163 	  high_positive
4164 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4165 	      : TYPE_MAX_VALUE (arg0_type);
4166 
4167 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4168 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4169 					     fold_convert_loc (loc, arg0_type,
4170 							       high_positive),
4171 					     build_int_cst (arg0_type, 1));
4172 
4173 	  /* If the low bound is specified, "and" the range with the
4174 	     range for which the original unsigned value will be
4175 	     positive.  */
4176 	  if (low != 0)
4177 	    {
4178 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4179 				  1, fold_convert_loc (loc, arg0_type,
4180 						       integer_zero_node),
4181 				  high_positive))
4182 		return NULL_TREE;
4183 
4184 	      in_p = (n_in_p == in_p);
4185 	    }
4186 	  else
4187 	    {
4188 	      /* Otherwise, "or" the range with the range of the input
4189 		 that will be interpreted as negative.  */
4190 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4191 				  1, fold_convert_loc (loc, arg0_type,
4192 						       integer_zero_node),
4193 				  high_positive))
4194 		return NULL_TREE;
4195 
4196 	      in_p = (in_p != n_in_p);
4197 	    }
4198 	}
4199 
4200       *p_low = n_low;
4201       *p_high = n_high;
4202       *p_in_p = in_p;
4203       return arg0;
4204 
4205     default:
4206       return NULL_TREE;
4207     }
4208 }
4209 
4210 /* Given EXP, a logical expression, set the range it is testing into
4211    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4212    actually being tested.  *PLOW and *PHIGH will be made of the same
4213    type as the returned expression.  If EXP is not a comparison, we
4214    will most likely not be returning a useful value and range.  Set
4215    *STRICT_OVERFLOW_P to true if the return value is only valid
4216    because signed overflow is undefined; otherwise, do not change
4217    *STRICT_OVERFLOW_P.  */
4218 
4219 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)4220 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4221 	    bool *strict_overflow_p)
4222 {
4223   enum tree_code code;
4224   tree arg0, arg1 = NULL_TREE;
4225   tree exp_type, nexp;
4226   int in_p;
4227   tree low, high;
4228   location_t loc = EXPR_LOCATION (exp);
4229 
4230   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4231      and see if we can refine the range.  Some of the cases below may not
4232      happen, but it doesn't seem worth worrying about this.  We "continue"
4233      the outer loop when we've changed something; otherwise we "break"
4234      the switch, which will "break" the while.  */
4235 
4236   in_p = 0;
4237   low = high = build_int_cst (TREE_TYPE (exp), 0);
4238 
4239   while (1)
4240     {
4241       code = TREE_CODE (exp);
4242       exp_type = TREE_TYPE (exp);
4243       arg0 = NULL_TREE;
4244 
4245       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4246 	{
4247 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4248 	    arg0 = TREE_OPERAND (exp, 0);
4249 	  if (TREE_CODE_CLASS (code) == tcc_binary
4250 	      || TREE_CODE_CLASS (code) == tcc_comparison
4251 	      || (TREE_CODE_CLASS (code) == tcc_expression
4252 		  && TREE_OPERAND_LENGTH (exp) > 1))
4253 	    arg1 = TREE_OPERAND (exp, 1);
4254 	}
4255       if (arg0 == NULL_TREE)
4256 	break;
4257 
4258       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4259 			      &high, &in_p, strict_overflow_p);
4260       if (nexp == NULL_TREE)
4261 	break;
4262       exp = nexp;
4263     }
4264 
4265   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4266   if (TREE_CODE (exp) == INTEGER_CST)
4267     {
4268       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4269 						 exp, 0, low, 0))
4270 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4271 						    exp, 1, high, 1)));
4272       low = high = 0;
4273       exp = 0;
4274     }
4275 
4276   *pin_p = in_p, *plow = low, *phigh = high;
4277   return exp;
4278 }
4279 
4280 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4281    type, TYPE, return an expression to test if EXP is in (or out of, depending
4282    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4283 
4284 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)4285 build_range_check (location_t loc, tree type, tree exp, int in_p,
4286 		   tree low, tree high)
4287 {
4288   tree etype = TREE_TYPE (exp), value;
4289 
4290 #ifdef HAVE_canonicalize_funcptr_for_compare
4291   /* Disable this optimization for function pointer expressions
4292      on targets that require function pointer canonicalization.  */
4293   if (HAVE_canonicalize_funcptr_for_compare
4294       && TREE_CODE (etype) == POINTER_TYPE
4295       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4296     return NULL_TREE;
4297 #endif
4298 
4299   if (! in_p)
4300     {
4301       value = build_range_check (loc, type, exp, 1, low, high);
4302       if (value != 0)
4303         return invert_truthvalue_loc (loc, value);
4304 
4305       return 0;
4306     }
4307 
4308   if (low == 0 && high == 0)
4309     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4310 
4311   if (low == 0)
4312     return fold_build2_loc (loc, LE_EXPR, type, exp,
4313 			fold_convert_loc (loc, etype, high));
4314 
4315   if (high == 0)
4316     return fold_build2_loc (loc, GE_EXPR, type, exp,
4317 			fold_convert_loc (loc, etype, low));
4318 
4319   if (operand_equal_p (low, high, 0))
4320     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4321 			fold_convert_loc (loc, etype, low));
4322 
4323   if (integer_zerop (low))
4324     {
4325       if (! TYPE_UNSIGNED (etype))
4326 	{
4327 	  etype = unsigned_type_for (etype);
4328 	  high = fold_convert_loc (loc, etype, high);
4329 	  exp = fold_convert_loc (loc, etype, exp);
4330 	}
4331       return build_range_check (loc, type, exp, 1, 0, high);
4332     }
4333 
4334   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4335   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4336     {
4337       unsigned HOST_WIDE_INT lo;
4338       HOST_WIDE_INT hi;
4339       int prec;
4340 
4341       prec = TYPE_PRECISION (etype);
4342       if (prec <= HOST_BITS_PER_WIDE_INT)
4343 	{
4344 	  hi = 0;
4345 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4346 	}
4347       else
4348 	{
4349 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4350 	  lo = HOST_WIDE_INT_M1U;
4351 	}
4352 
4353       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4354 	{
4355 	  if (TYPE_UNSIGNED (etype))
4356 	    {
4357 	      tree signed_etype = signed_type_for (etype);
4358 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4359 		etype
4360 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4361 	      else
4362 		etype = signed_etype;
4363 	      exp = fold_convert_loc (loc, etype, exp);
4364 	    }
4365 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4366 			      build_int_cst (etype, 0));
4367 	}
4368     }
4369 
4370   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4371      This requires wrap-around arithmetics for the type of the expression.
4372      First make sure that arithmetics in this type is valid, then make sure
4373      that it wraps around.  */
4374   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4375     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4376 					    TYPE_UNSIGNED (etype));
4377 
4378   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4379     {
4380       tree utype, minv, maxv;
4381 
4382       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4383 	 for the type in question, as we rely on this here.  */
4384       utype = unsigned_type_for (etype);
4385       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4386       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4387 			  integer_one_node, 1);
4388       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4389 
4390       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4391 				      minv, 1, maxv, 1)))
4392 	etype = utype;
4393       else
4394 	return 0;
4395     }
4396 
4397   high = fold_convert_loc (loc, etype, high);
4398   low = fold_convert_loc (loc, etype, low);
4399   exp = fold_convert_loc (loc, etype, exp);
4400 
4401   value = const_binop (MINUS_EXPR, high, low);
4402 
4403 
4404   if (POINTER_TYPE_P (etype))
4405     {
4406       if (value != 0 && !TREE_OVERFLOW (value))
4407 	{
4408 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4409           return build_range_check (loc, type,
4410 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4411 			            1, build_int_cst (etype, 0), value);
4412 	}
4413       return 0;
4414     }
4415 
4416   if (value != 0 && !TREE_OVERFLOW (value))
4417     return build_range_check (loc, type,
4418 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4419 			      1, build_int_cst (etype, 0), value);
4420 
4421   return 0;
4422 }
4423 
4424 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4425 
4426 static tree
range_predecessor(tree val)4427 range_predecessor (tree val)
4428 {
4429   tree type = TREE_TYPE (val);
4430 
4431   if (INTEGRAL_TYPE_P (type)
4432       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4433     return 0;
4434   else
4435     return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4436 }
4437 
4438 /* Return the successor of VAL in its type, handling the infinite case.  */
4439 
4440 static tree
range_successor(tree val)4441 range_successor (tree val)
4442 {
4443   tree type = TREE_TYPE (val);
4444 
4445   if (INTEGRAL_TYPE_P (type)
4446       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4447     return 0;
4448   else
4449     return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4450 }
4451 
4452 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4453    can, 0 if we can't.  Set the output range into the specified parameters.  */
4454 
4455 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)4456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4457 	      tree high0, int in1_p, tree low1, tree high1)
4458 {
4459   int no_overlap;
4460   int subset;
4461   int temp;
4462   tree tem;
4463   int in_p;
4464   tree low, high;
4465   int lowequal = ((low0 == 0 && low1 == 0)
4466 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4467 						low0, 0, low1, 0)));
4468   int highequal = ((high0 == 0 && high1 == 0)
4469 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4470 						 high0, 1, high1, 1)));
4471 
4472   /* Make range 0 be the range that starts first, or ends last if they
4473      start at the same value.  Swap them if it isn't.  */
4474   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4475 				 low0, 0, low1, 0))
4476       || (lowequal
4477 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4478 					high1, 1, high0, 1))))
4479     {
4480       temp = in0_p, in0_p = in1_p, in1_p = temp;
4481       tem = low0, low0 = low1, low1 = tem;
4482       tem = high0, high0 = high1, high1 = tem;
4483     }
4484 
4485   /* Now flag two cases, whether the ranges are disjoint or whether the
4486      second range is totally subsumed in the first.  Note that the tests
4487      below are simplified by the ones above.  */
4488   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4489 					  high0, 1, low1, 0));
4490   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4491 				      high1, 1, high0, 1));
4492 
4493   /* We now have four cases, depending on whether we are including or
4494      excluding the two ranges.  */
4495   if (in0_p && in1_p)
4496     {
4497       /* If they don't overlap, the result is false.  If the second range
4498 	 is a subset it is the result.  Otherwise, the range is from the start
4499 	 of the second to the end of the first.  */
4500       if (no_overlap)
4501 	in_p = 0, low = high = 0;
4502       else if (subset)
4503 	in_p = 1, low = low1, high = high1;
4504       else
4505 	in_p = 1, low = low1, high = high0;
4506     }
4507 
4508   else if (in0_p && ! in1_p)
4509     {
4510       /* If they don't overlap, the result is the first range.  If they are
4511 	 equal, the result is false.  If the second range is a subset of the
4512 	 first, and the ranges begin at the same place, we go from just after
4513 	 the end of the second range to the end of the first.  If the second
4514 	 range is not a subset of the first, or if it is a subset and both
4515 	 ranges end at the same place, the range starts at the start of the
4516 	 first range and ends just before the second range.
4517 	 Otherwise, we can't describe this as a single range.  */
4518       if (no_overlap)
4519 	in_p = 1, low = low0, high = high0;
4520       else if (lowequal && highequal)
4521 	in_p = 0, low = high = 0;
4522       else if (subset && lowequal)
4523 	{
4524 	  low = range_successor (high1);
4525 	  high = high0;
4526 	  in_p = 1;
4527 	  if (low == 0)
4528 	    {
4529 	      /* We are in the weird situation where high0 > high1 but
4530 		 high1 has no successor.  Punt.  */
4531 	      return 0;
4532 	    }
4533 	}
4534       else if (! subset || highequal)
4535 	{
4536 	  low = low0;
4537 	  high = range_predecessor (low1);
4538 	  in_p = 1;
4539 	  if (high == 0)
4540 	    {
4541 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4542 	      return 0;
4543 	    }
4544 	}
4545       else
4546 	return 0;
4547     }
4548 
4549   else if (! in0_p && in1_p)
4550     {
4551       /* If they don't overlap, the result is the second range.  If the second
4552 	 is a subset of the first, the result is false.  Otherwise,
4553 	 the range starts just after the first range and ends at the
4554 	 end of the second.  */
4555       if (no_overlap)
4556 	in_p = 1, low = low1, high = high1;
4557       else if (subset || highequal)
4558 	in_p = 0, low = high = 0;
4559       else
4560 	{
4561 	  low = range_successor (high0);
4562 	  high = high1;
4563 	  in_p = 1;
4564 	  if (low == 0)
4565 	    {
4566 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4567 	      return 0;
4568 	    }
4569 	}
4570     }
4571 
4572   else
4573     {
4574       /* The case where we are excluding both ranges.  Here the complex case
4575 	 is if they don't overlap.  In that case, the only time we have a
4576 	 range is if they are adjacent.  If the second is a subset of the
4577 	 first, the result is the first.  Otherwise, the range to exclude
4578 	 starts at the beginning of the first range and ends at the end of the
4579 	 second.  */
4580       if (no_overlap)
4581 	{
4582 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4583 					 range_successor (high0),
4584 					 1, low1, 0)))
4585 	    in_p = 0, low = low0, high = high1;
4586 	  else
4587 	    {
4588 	      /* Canonicalize - [min, x] into - [-, x].  */
4589 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4590 		switch (TREE_CODE (TREE_TYPE (low0)))
4591 		  {
4592 		  case ENUMERAL_TYPE:
4593 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4594 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4595 		      break;
4596 		    /* FALLTHROUGH */
4597 		  case INTEGER_TYPE:
4598 		    if (tree_int_cst_equal (low0,
4599 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4600 		      low0 = 0;
4601 		    break;
4602 		  case POINTER_TYPE:
4603 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4604 			&& integer_zerop (low0))
4605 		      low0 = 0;
4606 		    break;
4607 		  default:
4608 		    break;
4609 		  }
4610 
4611 	      /* Canonicalize - [x, max] into - [x, -].  */
4612 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4613 		switch (TREE_CODE (TREE_TYPE (high1)))
4614 		  {
4615 		  case ENUMERAL_TYPE:
4616 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4617 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4618 		      break;
4619 		    /* FALLTHROUGH */
4620 		  case INTEGER_TYPE:
4621 		    if (tree_int_cst_equal (high1,
4622 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4623 		      high1 = 0;
4624 		    break;
4625 		  case POINTER_TYPE:
4626 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4627 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4628 						       high1, 1,
4629 						       integer_one_node, 1)))
4630 		      high1 = 0;
4631 		    break;
4632 		  default:
4633 		    break;
4634 		  }
4635 
4636 	      /* The ranges might be also adjacent between the maximum and
4637 	         minimum values of the given type.  For
4638 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4639 	         return + [x + 1, y - 1].  */
4640 	      if (low0 == 0 && high1 == 0)
4641 	        {
4642 		  low = range_successor (high0);
4643 		  high = range_predecessor (low1);
4644 		  if (low == 0 || high == 0)
4645 		    return 0;
4646 
4647 		  in_p = 1;
4648 		}
4649 	      else
4650 		return 0;
4651 	    }
4652 	}
4653       else if (subset)
4654 	in_p = 0, low = low0, high = high0;
4655       else
4656 	in_p = 0, low = low0, high = high1;
4657     }
4658 
4659   *pin_p = in_p, *plow = low, *phigh = high;
4660   return 1;
4661 }
4662 
4663 
4664 /* Subroutine of fold, looking inside expressions of the form
4665    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4666    of the COND_EXPR.  This function is being used also to optimize
4667    A op B ? C : A, by reversing the comparison first.
4668 
4669    Return a folded expression whose code is not a COND_EXPR
4670    anymore, or NULL_TREE if no folding opportunity is found.  */
4671 
4672 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)4673 fold_cond_expr_with_comparison (location_t loc, tree type,
4674 				tree arg0, tree arg1, tree arg2)
4675 {
4676   enum tree_code comp_code = TREE_CODE (arg0);
4677   tree arg00 = TREE_OPERAND (arg0, 0);
4678   tree arg01 = TREE_OPERAND (arg0, 1);
4679   tree arg1_type = TREE_TYPE (arg1);
4680   tree tem;
4681 
4682   STRIP_NOPS (arg1);
4683   STRIP_NOPS (arg2);
4684 
4685   /* If we have A op 0 ? A : -A, consider applying the following
4686      transformations:
4687 
4688      A == 0? A : -A    same as -A
4689      A != 0? A : -A    same as A
4690      A >= 0? A : -A    same as abs (A)
4691      A > 0?  A : -A    same as abs (A)
4692      A <= 0? A : -A    same as -abs (A)
4693      A < 0?  A : -A    same as -abs (A)
4694 
4695      None of these transformations work for modes with signed
4696      zeros.  If A is +/-0, the first two transformations will
4697      change the sign of the result (from +0 to -0, or vice
4698      versa).  The last four will fix the sign of the result,
4699      even though the original expressions could be positive or
4700      negative, depending on the sign of A.
4701 
4702      Note that all these transformations are correct if A is
4703      NaN, since the two alternatives (A and -A) are also NaNs.  */
4704   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4705       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4706 	  ? real_zerop (arg01)
4707 	  : integer_zerop (arg01))
4708       && ((TREE_CODE (arg2) == NEGATE_EXPR
4709 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4710 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4711 	        have already been folded to Y-X, check for that. */
4712 	  || (TREE_CODE (arg1) == MINUS_EXPR
4713 	      && TREE_CODE (arg2) == MINUS_EXPR
4714 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4715 				  TREE_OPERAND (arg2, 1), 0)
4716 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4717 				  TREE_OPERAND (arg2, 0), 0))))
4718     switch (comp_code)
4719       {
4720       case EQ_EXPR:
4721       case UNEQ_EXPR:
4722 	tem = fold_convert_loc (loc, arg1_type, arg1);
4723 	return pedantic_non_lvalue_loc (loc,
4724 				    fold_convert_loc (loc, type,
4725 						  negate_expr (tem)));
4726       case NE_EXPR:
4727       case LTGT_EXPR:
4728 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4729       case UNGE_EXPR:
4730       case UNGT_EXPR:
4731 	if (flag_trapping_math)
4732 	  break;
4733 	/* Fall through.  */
4734       case GE_EXPR:
4735       case GT_EXPR:
4736 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4737 	  arg1 = fold_convert_loc (loc, signed_type_for
4738 			       (TREE_TYPE (arg1)), arg1);
4739 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4740 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4741       case UNLE_EXPR:
4742       case UNLT_EXPR:
4743 	if (flag_trapping_math)
4744 	  break;
4745       case LE_EXPR:
4746       case LT_EXPR:
4747 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4748 	  arg1 = fold_convert_loc (loc, signed_type_for
4749 			       (TREE_TYPE (arg1)), arg1);
4750 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4751 	return negate_expr (fold_convert_loc (loc, type, tem));
4752       default:
4753 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4754 	break;
4755       }
4756 
4757   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4758      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4759      both transformations are correct when A is NaN: A != 0
4760      is then true, and A == 0 is false.  */
4761 
4762   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4763       && integer_zerop (arg01) && integer_zerop (arg2))
4764     {
4765       if (comp_code == NE_EXPR)
4766 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4767       else if (comp_code == EQ_EXPR)
4768 	return build_zero_cst (type);
4769     }
4770 
4771   /* Try some transformations of A op B ? A : B.
4772 
4773      A == B? A : B    same as B
4774      A != B? A : B    same as A
4775      A >= B? A : B    same as max (A, B)
4776      A > B?  A : B    same as max (B, A)
4777      A <= B? A : B    same as min (A, B)
4778      A < B?  A : B    same as min (B, A)
4779 
4780      As above, these transformations don't work in the presence
4781      of signed zeros.  For example, if A and B are zeros of
4782      opposite sign, the first two transformations will change
4783      the sign of the result.  In the last four, the original
4784      expressions give different results for (A=+0, B=-0) and
4785      (A=-0, B=+0), but the transformed expressions do not.
4786 
4787      The first two transformations are correct if either A or B
4788      is a NaN.  In the first transformation, the condition will
4789      be false, and B will indeed be chosen.  In the case of the
4790      second transformation, the condition A != B will be true,
4791      and A will be chosen.
4792 
4793      The conversions to max() and min() are not correct if B is
4794      a number and A is not.  The conditions in the original
4795      expressions will be false, so all four give B.  The min()
4796      and max() versions would give a NaN instead.  */
4797   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4798       && operand_equal_for_comparison_p (arg01, arg2, arg00)
4799       /* Avoid these transformations if the COND_EXPR may be used
4800 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4801       && (in_gimple_form
4802 	  || VECTOR_TYPE_P (type)
4803 	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4804 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4805 	  || ! maybe_lvalue_p (arg1)
4806 	  || ! maybe_lvalue_p (arg2)))
4807     {
4808       tree comp_op0 = arg00;
4809       tree comp_op1 = arg01;
4810       tree comp_type = TREE_TYPE (comp_op0);
4811 
4812       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4813       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4814 	{
4815 	  comp_type = type;
4816 	  comp_op0 = arg1;
4817 	  comp_op1 = arg2;
4818 	}
4819 
4820       switch (comp_code)
4821 	{
4822 	case EQ_EXPR:
4823 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4824 	case NE_EXPR:
4825 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4826 	case LE_EXPR:
4827 	case LT_EXPR:
4828 	case UNLE_EXPR:
4829 	case UNLT_EXPR:
4830 	  /* In C++ a ?: expression can be an lvalue, so put the
4831 	     operand which will be used if they are equal first
4832 	     so that we can convert this back to the
4833 	     corresponding COND_EXPR.  */
4834 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4835 	    {
4836 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4837 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4838 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4839 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4840 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
4841 				   comp_op1, comp_op0);
4842 	      return pedantic_non_lvalue_loc (loc,
4843 					  fold_convert_loc (loc, type, tem));
4844 	    }
4845 	  break;
4846 	case GE_EXPR:
4847 	case GT_EXPR:
4848 	case UNGE_EXPR:
4849 	case UNGT_EXPR:
4850 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4851 	    {
4852 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4853 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4854 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4855 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4856 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
4857 				   comp_op1, comp_op0);
4858 	      return pedantic_non_lvalue_loc (loc,
4859 					  fold_convert_loc (loc, type, tem));
4860 	    }
4861 	  break;
4862 	case UNEQ_EXPR:
4863 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4864 	    return pedantic_non_lvalue_loc (loc,
4865 					fold_convert_loc (loc, type, arg2));
4866 	  break;
4867 	case LTGT_EXPR:
4868 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4869 	    return pedantic_non_lvalue_loc (loc,
4870 					fold_convert_loc (loc, type, arg1));
4871 	  break;
4872 	default:
4873 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4874 	  break;
4875 	}
4876     }
4877 
4878   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4879      we might still be able to simplify this.  For example,
4880      if C1 is one less or one more than C2, this might have started
4881      out as a MIN or MAX and been transformed by this function.
4882      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4883 
4884   if (INTEGRAL_TYPE_P (type)
4885       && TREE_CODE (arg01) == INTEGER_CST
4886       && TREE_CODE (arg2) == INTEGER_CST)
4887     switch (comp_code)
4888       {
4889       case EQ_EXPR:
4890 	if (TREE_CODE (arg1) == INTEGER_CST)
4891 	  break;
4892 	/* We can replace A with C1 in this case.  */
4893 	arg1 = fold_convert_loc (loc, type, arg01);
4894 	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4895 
4896       case LT_EXPR:
4897 	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4898 	   MIN_EXPR, to preserve the signedness of the comparison.  */
4899 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4900 			       OEP_ONLY_CONST)
4901 	    && operand_equal_p (arg01,
4902 				const_binop (PLUS_EXPR, arg2,
4903 					     build_int_cst (type, 1)),
4904 				OEP_ONLY_CONST))
4905 	  {
4906 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4907 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4908 						     arg2));
4909 	    return pedantic_non_lvalue_loc (loc,
4910 					    fold_convert_loc (loc, type, tem));
4911 	  }
4912 	break;
4913 
4914       case LE_EXPR:
4915 	/* If C1 is C2 - 1, this is min(A, C2), with the same care
4916 	   as above.  */
4917 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4918 			       OEP_ONLY_CONST)
4919 	    && operand_equal_p (arg01,
4920 				const_binop (MINUS_EXPR, arg2,
4921 					     build_int_cst (type, 1)),
4922 				OEP_ONLY_CONST))
4923 	  {
4924 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4925 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4926 						     arg2));
4927 	    return pedantic_non_lvalue_loc (loc,
4928 					    fold_convert_loc (loc, type, tem));
4929 	  }
4930 	break;
4931 
4932       case GT_EXPR:
4933 	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4934 	   MAX_EXPR, to preserve the signedness of the comparison.  */
4935 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4936 			       OEP_ONLY_CONST)
4937 	    && operand_equal_p (arg01,
4938 				const_binop (MINUS_EXPR, arg2,
4939 					     build_int_cst (type, 1)),
4940 				OEP_ONLY_CONST))
4941 	  {
4942 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4943 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4944 						     arg2));
4945 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4946 	  }
4947 	break;
4948 
4949       case GE_EXPR:
4950 	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
4951 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4952 			       OEP_ONLY_CONST)
4953 	    && operand_equal_p (arg01,
4954 				const_binop (PLUS_EXPR, arg2,
4955 					     build_int_cst (type, 1)),
4956 				OEP_ONLY_CONST))
4957 	  {
4958 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4959 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4960 						     arg2));
4961 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4962 	  }
4963 	break;
4964       case NE_EXPR:
4965 	break;
4966       default:
4967 	gcc_unreachable ();
4968       }
4969 
4970   return NULL_TREE;
4971 }
4972 
4973 
4974 
4975 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4976 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4977   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4978 		false) >= 2)
4979 #endif
4980 
4981 /* EXP is some logical combination of boolean tests.  See if we can
4982    merge it into some range test.  Return the new tree if so.  */
4983 
4984 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)4985 fold_range_test (location_t loc, enum tree_code code, tree type,
4986 		 tree op0, tree op1)
4987 {
4988   int or_op = (code == TRUTH_ORIF_EXPR
4989 	       || code == TRUTH_OR_EXPR);
4990   int in0_p, in1_p, in_p;
4991   tree low0, low1, low, high0, high1, high;
4992   bool strict_overflow_p = false;
4993   tree tem, lhs, rhs;
4994   const char * const warnmsg = G_("assuming signed overflow does not occur "
4995 				  "when simplifying range test");
4996 
4997   if (!INTEGRAL_TYPE_P (type))
4998     return 0;
4999 
5000   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5001   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5002 
5003   /* If this is an OR operation, invert both sides; we will invert
5004      again at the end.  */
5005   if (or_op)
5006     in0_p = ! in0_p, in1_p = ! in1_p;
5007 
5008   /* If both expressions are the same, if we can merge the ranges, and we
5009      can build the range test, return it or it inverted.  If one of the
5010      ranges is always true or always false, consider it to be the same
5011      expression as the other.  */
5012   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5013       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5014 		       in1_p, low1, high1)
5015       && 0 != (tem = (build_range_check (loc, type,
5016 					 lhs != 0 ? lhs
5017 					 : rhs != 0 ? rhs : integer_zero_node,
5018 					 in_p, low, high))))
5019     {
5020       if (strict_overflow_p)
5021 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5022       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5023     }
5024 
5025   /* On machines where the branch cost is expensive, if this is a
5026      short-circuited branch and the underlying object on both sides
5027      is the same, make a non-short-circuit operation.  */
5028   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5029 	   && lhs != 0 && rhs != 0
5030 	   && (code == TRUTH_ANDIF_EXPR
5031 	       || code == TRUTH_ORIF_EXPR)
5032 	   && operand_equal_p (lhs, rhs, 0))
5033     {
5034       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5035 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5036 	 which cases we can't do this.  */
5037       if (simple_operand_p (lhs))
5038 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5039 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5040 			   type, op0, op1);
5041 
5042       else if (!lang_hooks.decls.global_bindings_p ()
5043 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5044 	{
5045 	  tree common = save_expr (lhs);
5046 
5047 	  if (0 != (lhs = build_range_check (loc, type, common,
5048 					     or_op ? ! in0_p : in0_p,
5049 					     low0, high0))
5050 	      && (0 != (rhs = build_range_check (loc, type, common,
5051 						 or_op ? ! in1_p : in1_p,
5052 						 low1, high1))))
5053 	    {
5054 	      if (strict_overflow_p)
5055 		fold_overflow_warning (warnmsg,
5056 				       WARN_STRICT_OVERFLOW_COMPARISON);
5057 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5058 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5059 				 type, lhs, rhs);
5060 	    }
5061 	}
5062     }
5063 
5064   return 0;
5065 }
5066 
5067 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5068    bit value.  Arrange things so the extra bits will be set to zero if and
5069    only if C is signed-extended to its full width.  If MASK is nonzero,
5070    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5071 
5072 static tree
unextend(tree c,int p,int unsignedp,tree mask)5073 unextend (tree c, int p, int unsignedp, tree mask)
5074 {
5075   tree type = TREE_TYPE (c);
5076   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5077   tree temp;
5078 
5079   if (p == modesize || unsignedp)
5080     return c;
5081 
5082   /* We work by getting just the sign bit into the low-order bit, then
5083      into the high-order bit, then sign-extend.  We then XOR that value
5084      with C.  */
5085   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5086   temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5087 
5088   /* We must use a signed type in order to get an arithmetic right shift.
5089      However, we must also avoid introducing accidental overflows, so that
5090      a subsequent call to integer_zerop will work.  Hence we must
5091      do the type conversion here.  At this point, the constant is either
5092      zero or one, and the conversion to a signed type can never overflow.
5093      We could get an overflow if this conversion is done anywhere else.  */
5094   if (TYPE_UNSIGNED (type))
5095     temp = fold_convert (signed_type_for (type), temp);
5096 
5097   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5098   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5099   if (mask != 0)
5100     temp = const_binop (BIT_AND_EXPR, temp,
5101 			fold_convert (TREE_TYPE (c), mask));
5102   /* If necessary, convert the type back to match the type of C.  */
5103   if (TYPE_UNSIGNED (type))
5104     temp = fold_convert (type, temp);
5105 
5106   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5107 }
5108 
5109 /* For an expression that has the form
5110      (A && B) || ~B
5111    or
5112      (A || B) && ~B,
5113    we can drop one of the inner expressions and simplify to
5114      A || ~B
5115    or
5116      A && ~B
5117    LOC is the location of the resulting expression.  OP is the inner
5118    logical operation; the left-hand side in the examples above, while CMPOP
5119    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5120    removing a condition that guards another, as in
5121      (A != NULL && A->...) || A == NULL
5122    which we must not transform.  If RHS_ONLY is true, only eliminate the
5123    right-most operand of the inner logical operation.  */
5124 
5125 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)5126 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5127 				 bool rhs_only)
5128 {
5129   tree type = TREE_TYPE (cmpop);
5130   enum tree_code code = TREE_CODE (cmpop);
5131   enum tree_code truthop_code = TREE_CODE (op);
5132   tree lhs = TREE_OPERAND (op, 0);
5133   tree rhs = TREE_OPERAND (op, 1);
5134   tree orig_lhs = lhs, orig_rhs = rhs;
5135   enum tree_code rhs_code = TREE_CODE (rhs);
5136   enum tree_code lhs_code = TREE_CODE (lhs);
5137   enum tree_code inv_code;
5138 
5139   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5140     return NULL_TREE;
5141 
5142   if (TREE_CODE_CLASS (code) != tcc_comparison)
5143     return NULL_TREE;
5144 
5145   if (rhs_code == truthop_code)
5146     {
5147       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5148       if (newrhs != NULL_TREE)
5149 	{
5150 	  rhs = newrhs;
5151 	  rhs_code = TREE_CODE (rhs);
5152 	}
5153     }
5154   if (lhs_code == truthop_code && !rhs_only)
5155     {
5156       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5157       if (newlhs != NULL_TREE)
5158 	{
5159 	  lhs = newlhs;
5160 	  lhs_code = TREE_CODE (lhs);
5161 	}
5162     }
5163 
5164   inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5165   if (inv_code == rhs_code
5166       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5167       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5168     return lhs;
5169   if (!rhs_only && inv_code == lhs_code
5170       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5171       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5172     return rhs;
5173   if (rhs != orig_rhs || lhs != orig_lhs)
5174     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5175 			    lhs, rhs);
5176   return NULL_TREE;
5177 }
5178 
5179 /* Find ways of folding logical expressions of LHS and RHS:
5180    Try to merge two comparisons to the same innermost item.
5181    Look for range tests like "ch >= '0' && ch <= '9'".
5182    Look for combinations of simple terms on machines with expensive branches
5183    and evaluate the RHS unconditionally.
5184 
5185    For example, if we have p->a == 2 && p->b == 4 and we can make an
5186    object large enough to span both A and B, we can do this with a comparison
5187    against the object ANDed with the a mask.
5188 
5189    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5190    operations to do this with one comparison.
5191 
5192    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5193    function and the one above.
5194 
5195    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5196    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5197 
5198    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5199    two operands.
5200 
5201    We return the simplified tree or 0 if no optimization is possible.  */
5202 
5203 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)5204 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5205 		    tree lhs, tree rhs)
5206 {
5207   /* If this is the "or" of two comparisons, we can do something if
5208      the comparisons are NE_EXPR.  If this is the "and", we can do something
5209      if the comparisons are EQ_EXPR.  I.e.,
5210 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5211 
5212      WANTED_CODE is this operation code.  For single bit fields, we can
5213      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5214      comparison for one-bit fields.  */
5215 
5216   enum tree_code wanted_code;
5217   enum tree_code lcode, rcode;
5218   tree ll_arg, lr_arg, rl_arg, rr_arg;
5219   tree ll_inner, lr_inner, rl_inner, rr_inner;
5220   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5221   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5222   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5223   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5224   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5225   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5226   enum machine_mode lnmode, rnmode;
5227   tree ll_mask, lr_mask, rl_mask, rr_mask;
5228   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5229   tree l_const, r_const;
5230   tree lntype, rntype, result;
5231   HOST_WIDE_INT first_bit, end_bit;
5232   int volatilep;
5233 
5234   /* Start by getting the comparison codes.  Fail if anything is volatile.
5235      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5236      it were surrounded with a NE_EXPR.  */
5237 
5238   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5239     return 0;
5240 
5241   lcode = TREE_CODE (lhs);
5242   rcode = TREE_CODE (rhs);
5243 
5244   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5245     {
5246       lhs = build2 (NE_EXPR, truth_type, lhs,
5247 		    build_int_cst (TREE_TYPE (lhs), 0));
5248       lcode = NE_EXPR;
5249     }
5250 
5251   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5252     {
5253       rhs = build2 (NE_EXPR, truth_type, rhs,
5254 		    build_int_cst (TREE_TYPE (rhs), 0));
5255       rcode = NE_EXPR;
5256     }
5257 
5258   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5259       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5260     return 0;
5261 
5262   ll_arg = TREE_OPERAND (lhs, 0);
5263   lr_arg = TREE_OPERAND (lhs, 1);
5264   rl_arg = TREE_OPERAND (rhs, 0);
5265   rr_arg = TREE_OPERAND (rhs, 1);
5266 
5267   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5268   if (simple_operand_p (ll_arg)
5269       && simple_operand_p (lr_arg))
5270     {
5271       if (operand_equal_p (ll_arg, rl_arg, 0)
5272           && operand_equal_p (lr_arg, rr_arg, 0))
5273 	{
5274           result = combine_comparisons (loc, code, lcode, rcode,
5275 					truth_type, ll_arg, lr_arg);
5276 	  if (result)
5277 	    return result;
5278 	}
5279       else if (operand_equal_p (ll_arg, rr_arg, 0)
5280                && operand_equal_p (lr_arg, rl_arg, 0))
5281 	{
5282           result = combine_comparisons (loc, code, lcode,
5283 					swap_tree_comparison (rcode),
5284 					truth_type, ll_arg, lr_arg);
5285 	  if (result)
5286 	    return result;
5287 	}
5288     }
5289 
5290   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5291 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5292 
5293   /* If the RHS can be evaluated unconditionally and its operands are
5294      simple, it wins to evaluate the RHS unconditionally on machines
5295      with expensive branches.  In this case, this isn't a comparison
5296      that can be merged.  */
5297 
5298   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5299 		   false) >= 2
5300       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5301       && simple_operand_p (rl_arg)
5302       && simple_operand_p (rr_arg))
5303     {
5304       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5305       if (code == TRUTH_OR_EXPR
5306 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5307 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5308 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5309 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5310 	return build2_loc (loc, NE_EXPR, truth_type,
5311 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5312 				   ll_arg, rl_arg),
5313 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5314 
5315       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5316       if (code == TRUTH_AND_EXPR
5317 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5318 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5319 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5320 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5321 	return build2_loc (loc, EQ_EXPR, truth_type,
5322 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5323 				   ll_arg, rl_arg),
5324 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5325     }
5326 
5327   /* See if the comparisons can be merged.  Then get all the parameters for
5328      each side.  */
5329 
5330   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5331       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5332     return 0;
5333 
5334   volatilep = 0;
5335   ll_inner = decode_field_reference (loc, ll_arg,
5336 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5337 				     &ll_unsignedp, &volatilep, &ll_mask,
5338 				     &ll_and_mask);
5339   lr_inner = decode_field_reference (loc, lr_arg,
5340 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5341 				     &lr_unsignedp, &volatilep, &lr_mask,
5342 				     &lr_and_mask);
5343   rl_inner = decode_field_reference (loc, rl_arg,
5344 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5345 				     &rl_unsignedp, &volatilep, &rl_mask,
5346 				     &rl_and_mask);
5347   rr_inner = decode_field_reference (loc, rr_arg,
5348 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5349 				     &rr_unsignedp, &volatilep, &rr_mask,
5350 				     &rr_and_mask);
5351 
5352   /* It must be true that the inner operation on the lhs of each
5353      comparison must be the same if we are to be able to do anything.
5354      Then see if we have constants.  If not, the same must be true for
5355      the rhs's.  */
5356   if (volatilep || ll_inner == 0 || rl_inner == 0
5357       || ! operand_equal_p (ll_inner, rl_inner, 0))
5358     return 0;
5359 
5360   if (TREE_CODE (lr_arg) == INTEGER_CST
5361       && TREE_CODE (rr_arg) == INTEGER_CST)
5362     l_const = lr_arg, r_const = rr_arg;
5363   else if (lr_inner == 0 || rr_inner == 0
5364 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5365     return 0;
5366   else
5367     l_const = r_const = 0;
5368 
5369   /* If either comparison code is not correct for our logical operation,
5370      fail.  However, we can convert a one-bit comparison against zero into
5371      the opposite comparison against that bit being set in the field.  */
5372 
5373   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5374   if (lcode != wanted_code)
5375     {
5376       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5377 	{
5378 	  /* Make the left operand unsigned, since we are only interested
5379 	     in the value of one bit.  Otherwise we are doing the wrong
5380 	     thing below.  */
5381 	  ll_unsignedp = 1;
5382 	  l_const = ll_mask;
5383 	}
5384       else
5385 	return 0;
5386     }
5387 
5388   /* This is analogous to the code for l_const above.  */
5389   if (rcode != wanted_code)
5390     {
5391       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5392 	{
5393 	  rl_unsignedp = 1;
5394 	  r_const = rl_mask;
5395 	}
5396       else
5397 	return 0;
5398     }
5399 
5400   /* See if we can find a mode that contains both fields being compared on
5401      the left.  If we can't, fail.  Otherwise, update all constants and masks
5402      to be relative to a field of that size.  */
5403   first_bit = MIN (ll_bitpos, rl_bitpos);
5404   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5405   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5406 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5407 			  volatilep);
5408   if (lnmode == VOIDmode)
5409     return 0;
5410 
5411   lnbitsize = GET_MODE_BITSIZE (lnmode);
5412   lnbitpos = first_bit & ~ (lnbitsize - 1);
5413   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5414   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5415 
5416   if (BYTES_BIG_ENDIAN)
5417     {
5418       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5419       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5420     }
5421 
5422   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5423 			 size_int (xll_bitpos));
5424   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5425 			 size_int (xrl_bitpos));
5426 
5427   if (l_const)
5428     {
5429       l_const = fold_convert_loc (loc, lntype, l_const);
5430       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5431       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5432       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5433 					fold_build1_loc (loc, BIT_NOT_EXPR,
5434 						     lntype, ll_mask))))
5435 	{
5436 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5437 
5438 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5439 	}
5440     }
5441   if (r_const)
5442     {
5443       r_const = fold_convert_loc (loc, lntype, r_const);
5444       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5445       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5446       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5447 					fold_build1_loc (loc, BIT_NOT_EXPR,
5448 						     lntype, rl_mask))))
5449 	{
5450 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5451 
5452 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5453 	}
5454     }
5455 
5456   /* If the right sides are not constant, do the same for it.  Also,
5457      disallow this optimization if a size or signedness mismatch occurs
5458      between the left and right sides.  */
5459   if (l_const == 0)
5460     {
5461       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5462 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5463 	  /* Make sure the two fields on the right
5464 	     correspond to the left without being swapped.  */
5465 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5466 	return 0;
5467 
5468       first_bit = MIN (lr_bitpos, rr_bitpos);
5469       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5470       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5471 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5472 			      volatilep);
5473       if (rnmode == VOIDmode)
5474 	return 0;
5475 
5476       rnbitsize = GET_MODE_BITSIZE (rnmode);
5477       rnbitpos = first_bit & ~ (rnbitsize - 1);
5478       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5479       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5480 
5481       if (BYTES_BIG_ENDIAN)
5482 	{
5483 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5484 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5485 	}
5486 
5487       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5488 							    rntype, lr_mask),
5489 			     size_int (xlr_bitpos));
5490       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5491 							    rntype, rr_mask),
5492 			     size_int (xrr_bitpos));
5493 
5494       /* Make a mask that corresponds to both fields being compared.
5495 	 Do this for both items being compared.  If the operands are the
5496 	 same size and the bits being compared are in the same position
5497 	 then we can do this by masking both and comparing the masked
5498 	 results.  */
5499       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5500       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5501       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5502 	{
5503 	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5504 				    ll_unsignedp || rl_unsignedp);
5505 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5506 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5507 
5508 	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5509 				    lr_unsignedp || rr_unsignedp);
5510 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5511 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5512 
5513 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5514 	}
5515 
5516       /* There is still another way we can do something:  If both pairs of
5517 	 fields being compared are adjacent, we may be able to make a wider
5518 	 field containing them both.
5519 
5520 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5521 	 the mask must be shifted to account for the shift done by
5522 	 make_bit_field_ref.  */
5523       if ((ll_bitsize + ll_bitpos == rl_bitpos
5524 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5525 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5526 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5527 	{
5528 	  tree type;
5529 
5530 	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
5531 				    ll_bitsize + rl_bitsize,
5532 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5533 	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
5534 				    lr_bitsize + rr_bitsize,
5535 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5536 
5537 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5538 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5539 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5540 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5541 
5542 	  /* Convert to the smaller type before masking out unwanted bits.  */
5543 	  type = lntype;
5544 	  if (lntype != rntype)
5545 	    {
5546 	      if (lnbitsize > rnbitsize)
5547 		{
5548 		  lhs = fold_convert_loc (loc, rntype, lhs);
5549 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5550 		  type = rntype;
5551 		}
5552 	      else if (lnbitsize < rnbitsize)
5553 		{
5554 		  rhs = fold_convert_loc (loc, lntype, rhs);
5555 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5556 		  type = lntype;
5557 		}
5558 	    }
5559 
5560 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5561 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5562 
5563 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5564 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5565 
5566 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5567 	}
5568 
5569       return 0;
5570     }
5571 
5572   /* Handle the case of comparisons with constants.  If there is something in
5573      common between the masks, those bits of the constants must be the same.
5574      If not, the condition is always false.  Test for this to avoid generating
5575      incorrect code below.  */
5576   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5577   if (! integer_zerop (result)
5578       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5579 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5580     {
5581       if (wanted_code == NE_EXPR)
5582 	{
5583 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5584 	  return constant_boolean_node (true, truth_type);
5585 	}
5586       else
5587 	{
5588 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5589 	  return constant_boolean_node (false, truth_type);
5590 	}
5591     }
5592 
5593   /* Construct the expression we will return.  First get the component
5594      reference we will make.  Unless the mask is all ones the width of
5595      that field, perform the mask operation.  Then compare with the
5596      merged constant.  */
5597   result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5598 			       ll_unsignedp || rl_unsignedp);
5599 
5600   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5601   if (! all_ones_mask_p (ll_mask, lnbitsize))
5602     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5603 
5604   return build2_loc (loc, wanted_code, truth_type, result,
5605 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5606 }
5607 
5608 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5609    constant.  */
5610 
5611 static tree
optimize_minmax_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5612 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5613 			    tree op0, tree op1)
5614 {
5615   tree arg0 = op0;
5616   enum tree_code op_code;
5617   tree comp_const;
5618   tree minmax_const;
5619   int consts_equal, consts_lt;
5620   tree inner;
5621 
5622   STRIP_SIGN_NOPS (arg0);
5623 
5624   op_code = TREE_CODE (arg0);
5625   minmax_const = TREE_OPERAND (arg0, 1);
5626   comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5627   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5628   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5629   inner = TREE_OPERAND (arg0, 0);
5630 
5631   /* If something does not permit us to optimize, return the original tree.  */
5632   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5633       || TREE_CODE (comp_const) != INTEGER_CST
5634       || TREE_OVERFLOW (comp_const)
5635       || TREE_CODE (minmax_const) != INTEGER_CST
5636       || TREE_OVERFLOW (minmax_const))
5637     return NULL_TREE;
5638 
5639   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5640      and GT_EXPR, doing the rest with recursive calls using logical
5641      simplifications.  */
5642   switch (code)
5643     {
5644     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5645       {
5646 	tree tem
5647 	  = optimize_minmax_comparison (loc,
5648 					invert_tree_comparison (code, false),
5649 					type, op0, op1);
5650 	if (tem)
5651 	  return invert_truthvalue_loc (loc, tem);
5652 	return NULL_TREE;
5653       }
5654 
5655     case GE_EXPR:
5656       return
5657 	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5658 		     optimize_minmax_comparison
5659 		     (loc, EQ_EXPR, type, arg0, comp_const),
5660 		     optimize_minmax_comparison
5661 		     (loc, GT_EXPR, type, arg0, comp_const));
5662 
5663     case EQ_EXPR:
5664       if (op_code == MAX_EXPR && consts_equal)
5665 	/* MAX (X, 0) == 0  ->  X <= 0  */
5666 	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5667 
5668       else if (op_code == MAX_EXPR && consts_lt)
5669 	/* MAX (X, 0) == 5  ->  X == 5   */
5670 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5671 
5672       else if (op_code == MAX_EXPR)
5673 	/* MAX (X, 0) == -1  ->  false  */
5674 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5675 
5676       else if (consts_equal)
5677 	/* MIN (X, 0) == 0  ->  X >= 0  */
5678 	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5679 
5680       else if (consts_lt)
5681 	/* MIN (X, 0) == 5  ->  false  */
5682 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5683 
5684       else
5685 	/* MIN (X, 0) == -1  ->  X == -1  */
5686 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5687 
5688     case GT_EXPR:
5689       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5690 	/* MAX (X, 0) > 0  ->  X > 0
5691 	   MAX (X, 0) > 5  ->  X > 5  */
5692 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5693 
5694       else if (op_code == MAX_EXPR)
5695 	/* MAX (X, 0) > -1  ->  true  */
5696 	return omit_one_operand_loc (loc, type, integer_one_node, inner);
5697 
5698       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5699 	/* MIN (X, 0) > 0  ->  false
5700 	   MIN (X, 0) > 5  ->  false  */
5701 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5702 
5703       else
5704 	/* MIN (X, 0) > -1  ->  X > -1  */
5705 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5706 
5707     default:
5708       return NULL_TREE;
5709     }
5710 }
5711 
5712 /* T is an integer expression that is being multiplied, divided, or taken a
5713    modulus (CODE says which and what kind of divide or modulus) by a
5714    constant C.  See if we can eliminate that operation by folding it with
5715    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5716    should be used for the computation if wider than our type.
5717 
5718    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5719    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5720    expression would not overflow or that overflow is undefined for the type
5721    in the language in question.
5722 
5723    If we return a non-null expression, it is an equivalent form of the
5724    original computation, but need not be in the original type.
5725 
5726    We set *STRICT_OVERFLOW_P to true if the return values depends on
5727    signed overflow being undefined.  Otherwise we do not change
5728    *STRICT_OVERFLOW_P.  */
5729 
5730 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5731 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5732 		bool *strict_overflow_p)
5733 {
5734   /* To avoid exponential search depth, refuse to allow recursion past
5735      three levels.  Beyond that (1) it's highly unlikely that we'll find
5736      something interesting and (2) we've probably processed it before
5737      when we built the inner expression.  */
5738 
5739   static int depth;
5740   tree ret;
5741 
5742   if (depth > 3)
5743     return NULL;
5744 
5745   depth++;
5746   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5747   depth--;
5748 
5749   return ret;
5750 }
5751 
5752 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5753 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5754 		  bool *strict_overflow_p)
5755 {
5756   tree type = TREE_TYPE (t);
5757   enum tree_code tcode = TREE_CODE (t);
5758   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5759 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5760 		? wide_type : type);
5761   tree t1, t2;
5762   int same_p = tcode == code;
5763   tree op0 = NULL_TREE, op1 = NULL_TREE;
5764   bool sub_strict_overflow_p;
5765 
5766   /* Don't deal with constants of zero here; they confuse the code below.  */
5767   if (integer_zerop (c))
5768     return NULL_TREE;
5769 
5770   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5771     op0 = TREE_OPERAND (t, 0);
5772 
5773   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5774     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5775 
5776   /* Note that we need not handle conditional operations here since fold
5777      already handles those cases.  So just do arithmetic here.  */
5778   switch (tcode)
5779     {
5780     case INTEGER_CST:
5781       /* For a constant, we can always simplify if we are a multiply
5782 	 or (for divide and modulus) if it is a multiple of our constant.  */
5783       if (code == MULT_EXPR
5784 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5785 	return const_binop (code, fold_convert (ctype, t),
5786 			    fold_convert (ctype, c));
5787       break;
5788 
5789     CASE_CONVERT: case NON_LVALUE_EXPR:
5790       /* If op0 is an expression ...  */
5791       if ((COMPARISON_CLASS_P (op0)
5792 	   || UNARY_CLASS_P (op0)
5793 	   || BINARY_CLASS_P (op0)
5794 	   || VL_EXP_CLASS_P (op0)
5795 	   || EXPRESSION_CLASS_P (op0))
5796 	  /* ... and has wrapping overflow, and its type is smaller
5797 	     than ctype, then we cannot pass through as widening.  */
5798 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5799 	       && (TYPE_PRECISION (ctype)
5800 	           > TYPE_PRECISION (TREE_TYPE (op0))))
5801 	      /* ... or this is a truncation (t is narrower than op0),
5802 		 then we cannot pass through this narrowing.  */
5803 	      || (TYPE_PRECISION (type)
5804 		  < TYPE_PRECISION (TREE_TYPE (op0)))
5805 	      /* ... or signedness changes for division or modulus,
5806 		 then we cannot pass through this conversion.  */
5807 	      || (code != MULT_EXPR
5808 		  && (TYPE_UNSIGNED (ctype)
5809 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
5810 	      /* ... or has undefined overflow while the converted to
5811 		 type has not, we cannot do the operation in the inner type
5812 		 as that would introduce undefined overflow.  */
5813 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5814 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
5815 	break;
5816 
5817       /* Pass the constant down and see if we can make a simplification.  If
5818 	 we can, replace this expression with the inner simplification for
5819 	 possible later conversion to our or some other type.  */
5820       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5821 	  && TREE_CODE (t2) == INTEGER_CST
5822 	  && !TREE_OVERFLOW (t2)
5823 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5824 					 code == MULT_EXPR
5825 					 ? ctype : NULL_TREE,
5826 					 strict_overflow_p))))
5827 	return t1;
5828       break;
5829 
5830     case ABS_EXPR:
5831       /* If widening the type changes it from signed to unsigned, then we
5832          must avoid building ABS_EXPR itself as unsigned.  */
5833       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5834         {
5835           tree cstype = (*signed_type_for) (ctype);
5836           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5837 	      != 0)
5838             {
5839               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5840               return fold_convert (ctype, t1);
5841             }
5842           break;
5843         }
5844       /* If the constant is negative, we cannot simplify this.  */
5845       if (tree_int_cst_sgn (c) == -1)
5846         break;
5847       /* FALLTHROUGH */
5848     case NEGATE_EXPR:
5849       /* For division and modulus, type can't be unsigned, as e.g.
5850 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5851 	 For signed types, even with wrapping overflow, this is fine.  */
5852       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5853 	break;
5854       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5855 	  != 0)
5856 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5857       break;
5858 
5859     case MIN_EXPR:  case MAX_EXPR:
5860       /* If widening the type changes the signedness, then we can't perform
5861 	 this optimization as that changes the result.  */
5862       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5863 	break;
5864 
5865       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5866       sub_strict_overflow_p = false;
5867       if ((t1 = extract_muldiv (op0, c, code, wide_type,
5868 				&sub_strict_overflow_p)) != 0
5869 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5870 				   &sub_strict_overflow_p)) != 0)
5871 	{
5872 	  if (tree_int_cst_sgn (c) < 0)
5873 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5874 	  if (sub_strict_overflow_p)
5875 	    *strict_overflow_p = true;
5876 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5877 			      fold_convert (ctype, t2));
5878 	}
5879       break;
5880 
5881     case LSHIFT_EXPR:  case RSHIFT_EXPR:
5882       /* If the second operand is constant, this is a multiplication
5883 	 or floor division, by a power of two, so we can treat it that
5884 	 way unless the multiplier or divisor overflows.  Signed
5885 	 left-shift overflow is implementation-defined rather than
5886 	 undefined in C90, so do not convert signed left shift into
5887 	 multiplication.  */
5888       if (TREE_CODE (op1) == INTEGER_CST
5889 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5890 	  /* const_binop may not detect overflow correctly,
5891 	     so check for it explicitly here.  */
5892 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5893 	  && TREE_INT_CST_HIGH (op1) == 0
5894 	  && 0 != (t1 = fold_convert (ctype,
5895 				      const_binop (LSHIFT_EXPR,
5896 						   size_one_node,
5897 						   op1)))
5898 	  && !TREE_OVERFLOW (t1))
5899 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5900 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5901 				       ctype,
5902 				       fold_convert (ctype, op0),
5903 				       t1),
5904 			       c, code, wide_type, strict_overflow_p);
5905       break;
5906 
5907     case PLUS_EXPR:  case MINUS_EXPR:
5908       /* See if we can eliminate the operation on both sides.  If we can, we
5909 	 can return a new PLUS or MINUS.  If we can't, the only remaining
5910 	 cases where we can do anything are if the second operand is a
5911 	 constant.  */
5912       sub_strict_overflow_p = false;
5913       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5914       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5915       if (t1 != 0 && t2 != 0
5916 	  && (code == MULT_EXPR
5917 	      /* If not multiplication, we can only do this if both operands
5918 		 are divisible by c.  */
5919 	      || (multiple_of_p (ctype, op0, c)
5920 	          && multiple_of_p (ctype, op1, c))))
5921 	{
5922 	  if (sub_strict_overflow_p)
5923 	    *strict_overflow_p = true;
5924 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5925 			      fold_convert (ctype, t2));
5926 	}
5927 
5928       /* If this was a subtraction, negate OP1 and set it to be an addition.
5929 	 This simplifies the logic below.  */
5930       if (tcode == MINUS_EXPR)
5931 	{
5932 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
5933 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
5934 	  if (TREE_CODE (op0) == INTEGER_CST)
5935 	    {
5936 	      tree tem = op0;
5937 	      op0 = op1;
5938 	      op1 = tem;
5939 	      tem = t1;
5940 	      t1 = t2;
5941 	      t2 = tem;
5942 	    }
5943 	}
5944 
5945       if (TREE_CODE (op1) != INTEGER_CST)
5946 	break;
5947 
5948       /* If either OP1 or C are negative, this optimization is not safe for
5949 	 some of the division and remainder types while for others we need
5950 	 to change the code.  */
5951       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5952 	{
5953 	  if (code == CEIL_DIV_EXPR)
5954 	    code = FLOOR_DIV_EXPR;
5955 	  else if (code == FLOOR_DIV_EXPR)
5956 	    code = CEIL_DIV_EXPR;
5957 	  else if (code != MULT_EXPR
5958 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5959 	    break;
5960 	}
5961 
5962       /* If it's a multiply or a division/modulus operation of a multiple
5963          of our constant, do the operation and verify it doesn't overflow.  */
5964       if (code == MULT_EXPR
5965 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5966 	{
5967 	  op1 = const_binop (code, fold_convert (ctype, op1),
5968 			     fold_convert (ctype, c));
5969 	  /* We allow the constant to overflow with wrapping semantics.  */
5970 	  if (op1 == 0
5971 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5972 	    break;
5973 	}
5974       else
5975 	break;
5976 
5977       /* If we have an unsigned type, we cannot widen the operation since it
5978 	 will change the result if the original computation overflowed.  */
5979       if (TYPE_UNSIGNED (ctype) && ctype != type)
5980 	break;
5981 
5982       /* If we were able to eliminate our operation from the first side,
5983 	 apply our operation to the second side and reform the PLUS.  */
5984       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5985 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5986 
5987       /* The last case is if we are a multiply.  In that case, we can
5988 	 apply the distributive law to commute the multiply and addition
5989 	 if the multiplication of the constants doesn't overflow
5990 	 and overflow is defined.  With undefined overflow
5991 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
5992       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5993 	return fold_build2 (tcode, ctype,
5994 			    fold_build2 (code, ctype,
5995 					 fold_convert (ctype, op0),
5996 					 fold_convert (ctype, c)),
5997 			    op1);
5998 
5999       break;
6000 
6001     case MULT_EXPR:
6002       /* We have a special case here if we are doing something like
6003 	 (C * 8) % 4 since we know that's zero.  */
6004       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6005 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6006 	  /* If the multiplication can overflow we cannot optimize this.  */
6007 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6008 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6009 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6010 	{
6011 	  *strict_overflow_p = true;
6012 	  return omit_one_operand (type, integer_zero_node, op0);
6013 	}
6014 
6015       /* ... fall through ...  */
6016 
6017     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6018     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6019       /* If we can extract our operation from the LHS, do so and return a
6020 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6021 	 do something only if the second operand is a constant.  */
6022       if (same_p
6023 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6024 				   strict_overflow_p)) != 0)
6025 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6026 			    fold_convert (ctype, op1));
6027       else if (tcode == MULT_EXPR && code == MULT_EXPR
6028 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6029 					strict_overflow_p)) != 0)
6030 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6031 			    fold_convert (ctype, t1));
6032       else if (TREE_CODE (op1) != INTEGER_CST)
6033 	return 0;
6034 
6035       /* If these are the same operation types, we can associate them
6036 	 assuming no overflow.  */
6037       if (tcode == code)
6038 	{
6039 	  double_int mul;
6040 	  bool overflow_p;
6041 	  unsigned prec = TYPE_PRECISION (ctype);
6042 	  bool uns = TYPE_UNSIGNED (ctype);
6043 	  double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6044 	  double_int dic = tree_to_double_int (c).ext (prec, uns);
6045 	  mul = diop1.mul_with_sign (dic, false, &overflow_p);
6046 	  overflow_p = ((!uns && overflow_p)
6047 			| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6048 	  if (!double_int_fits_to_tree_p (ctype, mul)
6049 	      && ((uns && tcode != MULT_EXPR) || !uns))
6050 	    overflow_p = 1;
6051 	  if (!overflow_p)
6052 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6053 				double_int_to_tree (ctype, mul));
6054 	}
6055 
6056       /* If these operations "cancel" each other, we have the main
6057 	 optimizations of this pass, which occur when either constant is a
6058 	 multiple of the other, in which case we replace this with either an
6059 	 operation or CODE or TCODE.
6060 
6061 	 If we have an unsigned type, we cannot do this since it will change
6062 	 the result if the original computation overflowed.  */
6063       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6064 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6065 	      || (tcode == MULT_EXPR
6066 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6067 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6068 		  && code != MULT_EXPR)))
6069 	{
6070 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6071 	    {
6072 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6073 		*strict_overflow_p = true;
6074 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6075 				  fold_convert (ctype,
6076 						const_binop (TRUNC_DIV_EXPR,
6077 							     op1, c)));
6078 	    }
6079 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6080 	    {
6081 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6082 		*strict_overflow_p = true;
6083 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6084 				  fold_convert (ctype,
6085 						const_binop (TRUNC_DIV_EXPR,
6086 							     c, op1)));
6087 	    }
6088 	}
6089       break;
6090 
6091     default:
6092       break;
6093     }
6094 
6095   return 0;
6096 }
6097 
6098 /* Return a node which has the indicated constant VALUE (either 0 or
6099    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6100    and is of the indicated TYPE.  */
6101 
6102 tree
constant_boolean_node(bool value,tree type)6103 constant_boolean_node (bool value, tree type)
6104 {
6105   if (type == integer_type_node)
6106     return value ? integer_one_node : integer_zero_node;
6107   else if (type == boolean_type_node)
6108     return value ? boolean_true_node : boolean_false_node;
6109   else if (TREE_CODE (type) == VECTOR_TYPE)
6110     return build_vector_from_val (type,
6111 				  build_int_cst (TREE_TYPE (type),
6112 						 value ? -1 : 0));
6113   else
6114     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6115 }
6116 
6117 
6118 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6119    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6120    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6121    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6122    COND is the first argument to CODE; otherwise (as in the example
6123    given here), it is the second argument.  TYPE is the type of the
6124    original expression.  Return NULL_TREE if no simplification is
6125    possible.  */
6126 
6127 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)6128 fold_binary_op_with_conditional_arg (location_t loc,
6129 				     enum tree_code code,
6130 				     tree type, tree op0, tree op1,
6131 				     tree cond, tree arg, int cond_first_p)
6132 {
6133   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6134   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6135   tree test, true_value, false_value;
6136   tree lhs = NULL_TREE;
6137   tree rhs = NULL_TREE;
6138   enum tree_code cond_code = COND_EXPR;
6139 
6140   if (TREE_CODE (cond) == COND_EXPR
6141       || TREE_CODE (cond) == VEC_COND_EXPR)
6142     {
6143       test = TREE_OPERAND (cond, 0);
6144       true_value = TREE_OPERAND (cond, 1);
6145       false_value = TREE_OPERAND (cond, 2);
6146       /* If this operand throws an expression, then it does not make
6147 	 sense to try to perform a logical or arithmetic operation
6148 	 involving it.  */
6149       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6150 	lhs = true_value;
6151       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6152 	rhs = false_value;
6153     }
6154   else
6155     {
6156       tree testtype = TREE_TYPE (cond);
6157       test = cond;
6158       true_value = constant_boolean_node (true, testtype);
6159       false_value = constant_boolean_node (false, testtype);
6160     }
6161 
6162   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6163     cond_code = VEC_COND_EXPR;
6164 
6165   /* This transformation is only worthwhile if we don't have to wrap ARG
6166      in a SAVE_EXPR and the operation can be simplified without recursing
6167      on at least one of the branches once its pushed inside the COND_EXPR.  */
6168   if (!TREE_CONSTANT (arg)
6169       && (TREE_SIDE_EFFECTS (arg)
6170 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6171 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6172     return NULL_TREE;
6173 
6174   arg = fold_convert_loc (loc, arg_type, arg);
6175   if (lhs == 0)
6176     {
6177       true_value = fold_convert_loc (loc, cond_type, true_value);
6178       if (cond_first_p)
6179 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6180       else
6181 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6182     }
6183   if (rhs == 0)
6184     {
6185       false_value = fold_convert_loc (loc, cond_type, false_value);
6186       if (cond_first_p)
6187 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6188       else
6189 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6190     }
6191 
6192   /* Check that we have simplified at least one of the branches.  */
6193   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6194     return NULL_TREE;
6195 
6196   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6197 }
6198 
6199 
6200 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6201 
6202    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6203    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6204    ADDEND is the same as X.
6205 
6206    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6207    and finite.  The problematic cases are when X is zero, and its mode
6208    has signed zeros.  In the case of rounding towards -infinity,
6209    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6210    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6211 
6212 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)6213 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6214 {
6215   if (!real_zerop (addend))
6216     return false;
6217 
6218   /* Don't allow the fold with -fsignaling-nans.  */
6219   if (HONOR_SNANS (TYPE_MODE (type)))
6220     return false;
6221 
6222   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6223   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6224     return true;
6225 
6226   /* In a vector or complex, we would need to check the sign of all zeros.  */
6227   if (TREE_CODE (addend) != REAL_CST)
6228     return false;
6229 
6230   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6231   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6232     negate = !negate;
6233 
6234   /* The mode has signed zeros, and we have to honor their sign.
6235      In this situation, there is only one case we can return true for.
6236      X - 0 is the same as X unless rounding towards -infinity is
6237      supported.  */
6238   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6239 }
6240 
6241 /* Subroutine of fold() that checks comparisons of built-in math
6242    functions against real constants.
6243 
6244    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6245    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6246    is the type of the result and ARG0 and ARG1 are the operands of the
6247    comparison.  ARG1 must be a TREE_REAL_CST.
6248 
6249    The function returns the constant folded tree if a simplification
6250    can be made, and NULL_TREE otherwise.  */
6251 
6252 static tree
fold_mathfn_compare(location_t loc,enum built_in_function fcode,enum tree_code code,tree type,tree arg0,tree arg1)6253 fold_mathfn_compare (location_t loc,
6254 		     enum built_in_function fcode, enum tree_code code,
6255 		     tree type, tree arg0, tree arg1)
6256 {
6257   REAL_VALUE_TYPE c;
6258 
6259   if (BUILTIN_SQRT_P (fcode))
6260     {
6261       tree arg = CALL_EXPR_ARG (arg0, 0);
6262       enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6263 
6264       c = TREE_REAL_CST (arg1);
6265       if (REAL_VALUE_NEGATIVE (c))
6266 	{
6267 	  /* sqrt(x) < y is always false, if y is negative.  */
6268 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6269 	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6270 
6271 	  /* sqrt(x) > y is always true, if y is negative and we
6272 	     don't care about NaNs, i.e. negative values of x.  */
6273 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6274 	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6275 
6276 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6277 	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6278 			      build_real (TREE_TYPE (arg), dconst0));
6279 	}
6280       else if (code == GT_EXPR || code == GE_EXPR)
6281 	{
6282 	  REAL_VALUE_TYPE c2;
6283 
6284 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6285 	  real_convert (&c2, mode, &c2);
6286 
6287 	  if (REAL_VALUE_ISINF (c2))
6288 	    {
6289 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6290 	      if (HONOR_INFINITIES (mode))
6291 		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6292 				    build_real (TREE_TYPE (arg), c2));
6293 
6294 	      /* sqrt(x) > y is always false, when y is very large
6295 		 and we don't care about infinities.  */
6296 	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6297 	    }
6298 
6299 	  /* sqrt(x) > c is the same as x > c*c.  */
6300 	  return fold_build2_loc (loc, code, type, arg,
6301 			      build_real (TREE_TYPE (arg), c2));
6302 	}
6303       else if (code == LT_EXPR || code == LE_EXPR)
6304 	{
6305 	  REAL_VALUE_TYPE c2;
6306 
6307 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6308 	  real_convert (&c2, mode, &c2);
6309 
6310 	  if (REAL_VALUE_ISINF (c2))
6311 	    {
6312 	      /* sqrt(x) < y is always true, when y is a very large
6313 		 value and we don't care about NaNs or Infinities.  */
6314 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6315 		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6316 
6317 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6318 		 don't care about NaNs.  */
6319 	      if (! HONOR_NANS (mode))
6320 		return fold_build2_loc (loc, NE_EXPR, type, arg,
6321 				    build_real (TREE_TYPE (arg), c2));
6322 
6323 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6324 		 don't care about Infinities.  */
6325 	      if (! HONOR_INFINITIES (mode))
6326 		return fold_build2_loc (loc, GE_EXPR, type, arg,
6327 				    build_real (TREE_TYPE (arg), dconst0));
6328 
6329 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6330 	      arg = save_expr (arg);
6331 	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6332 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6333 					       build_real (TREE_TYPE (arg),
6334 							   dconst0)),
6335 				  fold_build2_loc (loc, NE_EXPR, type, arg,
6336 					       build_real (TREE_TYPE (arg),
6337 							   c2)));
6338 	    }
6339 
6340 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6341 	  if (! HONOR_NANS (mode))
6342 	    return fold_build2_loc (loc, code, type, arg,
6343 				build_real (TREE_TYPE (arg), c2));
6344 
6345 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6346 	  arg = save_expr (arg);
6347 	  return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6348 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6349 					       build_real (TREE_TYPE (arg),
6350 							   dconst0)),
6351 				  fold_build2_loc (loc, code, type, arg,
6352 					       build_real (TREE_TYPE (arg),
6353 							   c2)));
6354 	}
6355     }
6356 
6357   return NULL_TREE;
6358 }
6359 
6360 /* Subroutine of fold() that optimizes comparisons against Infinities,
6361    either +Inf or -Inf.
6362 
6363    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6364    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6365    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6366 
6367    The function returns the constant folded tree if a simplification
6368    can be made, and NULL_TREE otherwise.  */
6369 
6370 static tree
fold_inf_compare(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6371 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6372 		  tree arg0, tree arg1)
6373 {
6374   enum machine_mode mode;
6375   REAL_VALUE_TYPE max;
6376   tree temp;
6377   bool neg;
6378 
6379   mode = TYPE_MODE (TREE_TYPE (arg0));
6380 
6381   /* For negative infinity swap the sense of the comparison.  */
6382   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6383   if (neg)
6384     code = swap_tree_comparison (code);
6385 
6386   switch (code)
6387     {
6388     case GT_EXPR:
6389       /* x > +Inf is always false, if with ignore sNANs.  */
6390       if (HONOR_SNANS (mode))
6391         return NULL_TREE;
6392       return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6393 
6394     case LE_EXPR:
6395       /* x <= +Inf is always true, if we don't case about NaNs.  */
6396       if (! HONOR_NANS (mode))
6397 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6398 
6399       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6400       arg0 = save_expr (arg0);
6401       return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6402 
6403     case EQ_EXPR:
6404     case GE_EXPR:
6405       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6406       real_maxval (&max, neg, mode);
6407       return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6408 			  arg0, build_real (TREE_TYPE (arg0), max));
6409 
6410     case LT_EXPR:
6411       /* x < +Inf is always equal to x <= DBL_MAX.  */
6412       real_maxval (&max, neg, mode);
6413       return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6414 			  arg0, build_real (TREE_TYPE (arg0), max));
6415 
6416     case NE_EXPR:
6417       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6418       real_maxval (&max, neg, mode);
6419       if (! HONOR_NANS (mode))
6420 	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6421 			    arg0, build_real (TREE_TYPE (arg0), max));
6422 
6423       temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6424 			  arg0, build_real (TREE_TYPE (arg0), max));
6425       return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6426 
6427     default:
6428       break;
6429     }
6430 
6431   return NULL_TREE;
6432 }
6433 
6434 /* Subroutine of fold() that optimizes comparisons of a division by
6435    a nonzero integer constant against an integer constant, i.e.
6436    X/C1 op C2.
6437 
6438    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6439    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6440    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6441 
6442    The function returns the constant folded tree if a simplification
6443    can be made, and NULL_TREE otherwise.  */
6444 
6445 static tree
fold_div_compare(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6446 fold_div_compare (location_t loc,
6447 		  enum tree_code code, tree type, tree arg0, tree arg1)
6448 {
6449   tree prod, tmp, hi, lo;
6450   tree arg00 = TREE_OPERAND (arg0, 0);
6451   tree arg01 = TREE_OPERAND (arg0, 1);
6452   double_int val;
6453   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6454   bool neg_overflow;
6455   bool overflow;
6456 
6457   /* We have to do this the hard way to detect unsigned overflow.
6458      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6459   val = TREE_INT_CST (arg01)
6460 	.mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6461   prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6462   neg_overflow = false;
6463 
6464   if (unsigned_p)
6465     {
6466       tmp = int_const_binop (MINUS_EXPR, arg01,
6467                              build_int_cst (TREE_TYPE (arg01), 1));
6468       lo = prod;
6469 
6470       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6471       val = TREE_INT_CST (prod)
6472 	    .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6473       hi = force_fit_type_double (TREE_TYPE (arg00), val,
6474 				  -1, overflow | TREE_OVERFLOW (prod));
6475     }
6476   else if (tree_int_cst_sgn (arg01) >= 0)
6477     {
6478       tmp = int_const_binop (MINUS_EXPR, arg01,
6479 			     build_int_cst (TREE_TYPE (arg01), 1));
6480       switch (tree_int_cst_sgn (arg1))
6481 	{
6482 	case -1:
6483 	  neg_overflow = true;
6484 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6485 	  hi = prod;
6486 	  break;
6487 
6488 	case  0:
6489 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6490 	  hi = tmp;
6491 	  break;
6492 
6493 	case  1:
6494           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6495 	  lo = prod;
6496 	  break;
6497 
6498 	default:
6499 	  gcc_unreachable ();
6500 	}
6501     }
6502   else
6503     {
6504       /* A negative divisor reverses the relational operators.  */
6505       code = swap_tree_comparison (code);
6506 
6507       tmp = int_const_binop (PLUS_EXPR, arg01,
6508 			     build_int_cst (TREE_TYPE (arg01), 1));
6509       switch (tree_int_cst_sgn (arg1))
6510 	{
6511 	case -1:
6512 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6513 	  lo = prod;
6514 	  break;
6515 
6516 	case  0:
6517 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6518 	  lo = tmp;
6519 	  break;
6520 
6521 	case  1:
6522 	  neg_overflow = true;
6523 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6524 	  hi = prod;
6525 	  break;
6526 
6527 	default:
6528 	  gcc_unreachable ();
6529 	}
6530     }
6531 
6532   switch (code)
6533     {
6534     case EQ_EXPR:
6535       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6536 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6537       if (TREE_OVERFLOW (hi))
6538 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6539       if (TREE_OVERFLOW (lo))
6540 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6541       return build_range_check (loc, type, arg00, 1, lo, hi);
6542 
6543     case NE_EXPR:
6544       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6545 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6546       if (TREE_OVERFLOW (hi))
6547 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6548       if (TREE_OVERFLOW (lo))
6549 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6550       return build_range_check (loc, type, arg00, 0, lo, hi);
6551 
6552     case LT_EXPR:
6553       if (TREE_OVERFLOW (lo))
6554 	{
6555 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6556 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6557 	}
6558       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6559 
6560     case LE_EXPR:
6561       if (TREE_OVERFLOW (hi))
6562 	{
6563 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6564 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6565 	}
6566       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6567 
6568     case GT_EXPR:
6569       if (TREE_OVERFLOW (hi))
6570 	{
6571 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6572 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6573 	}
6574       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6575 
6576     case GE_EXPR:
6577       if (TREE_OVERFLOW (lo))
6578 	{
6579 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6580 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6581 	}
6582       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6583 
6584     default:
6585       break;
6586     }
6587 
6588   return NULL_TREE;
6589 }
6590 
6591 
6592 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6593    equality/inequality test, then return a simplified form of the test
6594    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6595    result type.  */
6596 
6597 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6598 fold_single_bit_test_into_sign_test (location_t loc,
6599 				     enum tree_code code, tree arg0, tree arg1,
6600 				     tree result_type)
6601 {
6602   /* If this is testing a single bit, we can optimize the test.  */
6603   if ((code == NE_EXPR || code == EQ_EXPR)
6604       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6605       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6606     {
6607       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6608 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6609       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6610 
6611       if (arg00 != NULL_TREE
6612 	  /* This is only a win if casting to a signed type is cheap,
6613 	     i.e. when arg00's type is not a partial mode.  */
6614 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6615 	     == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6616 	{
6617 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6618 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6619 			      result_type,
6620 			      fold_convert_loc (loc, stype, arg00),
6621 			      build_int_cst (stype, 0));
6622 	}
6623     }
6624 
6625   return NULL_TREE;
6626 }
6627 
6628 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6629    equality/inequality test, then return a simplified form of
6630    the test using shifts and logical operations.  Otherwise return
6631    NULL.  TYPE is the desired result type.  */
6632 
6633 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6634 fold_single_bit_test (location_t loc, enum tree_code code,
6635 		      tree arg0, tree arg1, tree result_type)
6636 {
6637   /* If this is testing a single bit, we can optimize the test.  */
6638   if ((code == NE_EXPR || code == EQ_EXPR)
6639       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6640       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6641     {
6642       tree inner = TREE_OPERAND (arg0, 0);
6643       tree type = TREE_TYPE (arg0);
6644       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6645       enum machine_mode operand_mode = TYPE_MODE (type);
6646       int ops_unsigned;
6647       tree signed_type, unsigned_type, intermediate_type;
6648       tree tem, one;
6649 
6650       /* First, see if we can fold the single bit test into a sign-bit
6651 	 test.  */
6652       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6653 						 result_type);
6654       if (tem)
6655 	return tem;
6656 
6657       /* Otherwise we have (A & C) != 0 where C is a single bit,
6658 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6659 	 Similarly for (A & C) == 0.  */
6660 
6661       /* If INNER is a right shift of a constant and it plus BITNUM does
6662 	 not overflow, adjust BITNUM and INNER.  */
6663       if (TREE_CODE (inner) == RSHIFT_EXPR
6664 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6665 	  && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6666 	  && bitnum < TYPE_PRECISION (type)
6667 	  && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6668 	      < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6669 	{
6670 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6671 	  inner = TREE_OPERAND (inner, 0);
6672 	}
6673 
6674       /* If we are going to be able to omit the AND below, we must do our
6675 	 operations as unsigned.  If we must use the AND, we have a choice.
6676 	 Normally unsigned is faster, but for some machines signed is.  */
6677 #ifdef LOAD_EXTEND_OP
6678       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6679 		      && !flag_syntax_only) ? 0 : 1;
6680 #else
6681       ops_unsigned = 1;
6682 #endif
6683 
6684       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6685       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6686       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6687       inner = fold_convert_loc (loc, intermediate_type, inner);
6688 
6689       if (bitnum != 0)
6690 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6691 			inner, size_int (bitnum));
6692 
6693       one = build_int_cst (intermediate_type, 1);
6694 
6695       if (code == EQ_EXPR)
6696 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6697 
6698       /* Put the AND last so it can combine with more things.  */
6699       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6700 
6701       /* Make sure to return the proper type.  */
6702       inner = fold_convert_loc (loc, result_type, inner);
6703 
6704       return inner;
6705     }
6706   return NULL_TREE;
6707 }
6708 
6709 /* Check whether we are allowed to reorder operands arg0 and arg1,
6710    such that the evaluation of arg1 occurs before arg0.  */
6711 
6712 static bool
reorder_operands_p(const_tree arg0,const_tree arg1)6713 reorder_operands_p (const_tree arg0, const_tree arg1)
6714 {
6715   if (! flag_evaluation_order)
6716       return true;
6717   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6718     return true;
6719   return ! TREE_SIDE_EFFECTS (arg0)
6720 	 && ! TREE_SIDE_EFFECTS (arg1);
6721 }
6722 
6723 /* Test whether it is preferable two swap two operands, ARG0 and
6724    ARG1, for example because ARG0 is an integer constant and ARG1
6725    isn't.  If REORDER is true, only recommend swapping if we can
6726    evaluate the operands in reverse order.  */
6727 
6728 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1,bool reorder)6729 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6730 {
6731   STRIP_SIGN_NOPS (arg0);
6732   STRIP_SIGN_NOPS (arg1);
6733 
6734   if (TREE_CODE (arg1) == INTEGER_CST)
6735     return 0;
6736   if (TREE_CODE (arg0) == INTEGER_CST)
6737     return 1;
6738 
6739   if (TREE_CODE (arg1) == REAL_CST)
6740     return 0;
6741   if (TREE_CODE (arg0) == REAL_CST)
6742     return 1;
6743 
6744   if (TREE_CODE (arg1) == FIXED_CST)
6745     return 0;
6746   if (TREE_CODE (arg0) == FIXED_CST)
6747     return 1;
6748 
6749   if (TREE_CODE (arg1) == COMPLEX_CST)
6750     return 0;
6751   if (TREE_CODE (arg0) == COMPLEX_CST)
6752     return 1;
6753 
6754   if (TREE_CONSTANT (arg1))
6755     return 0;
6756   if (TREE_CONSTANT (arg0))
6757     return 1;
6758 
6759   if (optimize_function_for_size_p (cfun))
6760     return 0;
6761 
6762   if (reorder && flag_evaluation_order
6763       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6764     return 0;
6765 
6766   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6767      for commutative and comparison operators.  Ensuring a canonical
6768      form allows the optimizers to find additional redundancies without
6769      having to explicitly check for both orderings.  */
6770   if (TREE_CODE (arg0) == SSA_NAME
6771       && TREE_CODE (arg1) == SSA_NAME
6772       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6773     return 1;
6774 
6775   /* Put SSA_NAMEs last.  */
6776   if (TREE_CODE (arg1) == SSA_NAME)
6777     return 0;
6778   if (TREE_CODE (arg0) == SSA_NAME)
6779     return 1;
6780 
6781   /* Put variables last.  */
6782   if (DECL_P (arg1))
6783     return 0;
6784   if (DECL_P (arg0))
6785     return 1;
6786 
6787   return 0;
6788 }
6789 
6790 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6791    ARG0 is extended to a wider type.  */
6792 
6793 static tree
fold_widened_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6794 fold_widened_comparison (location_t loc, enum tree_code code,
6795 			 tree type, tree arg0, tree arg1)
6796 {
6797   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6798   tree arg1_unw;
6799   tree shorter_type, outer_type;
6800   tree min, max;
6801   bool above, below;
6802 
6803   if (arg0_unw == arg0)
6804     return NULL_TREE;
6805   shorter_type = TREE_TYPE (arg0_unw);
6806 
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808   /* Disable this optimization if we're casting a function pointer
6809      type on targets that require function pointer canonicalization.  */
6810   if (HAVE_canonicalize_funcptr_for_compare
6811       && TREE_CODE (shorter_type) == POINTER_TYPE
6812       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6813     return NULL_TREE;
6814 #endif
6815 
6816   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6817     return NULL_TREE;
6818 
6819   arg1_unw = get_unwidened (arg1, NULL_TREE);
6820 
6821   /* If possible, express the comparison in the shorter mode.  */
6822   if ((code == EQ_EXPR || code == NE_EXPR
6823        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6824       && (TREE_TYPE (arg1_unw) == shorter_type
6825 	  || ((TYPE_PRECISION (shorter_type)
6826 	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6827 	      && (TYPE_UNSIGNED (shorter_type)
6828 		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6829 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6830 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6831 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6832 	      && int_fits_type_p (arg1_unw, shorter_type))))
6833     return fold_build2_loc (loc, code, type, arg0_unw,
6834 			fold_convert_loc (loc, shorter_type, arg1_unw));
6835 
6836   if (TREE_CODE (arg1_unw) != INTEGER_CST
6837       || TREE_CODE (shorter_type) != INTEGER_TYPE
6838       || !int_fits_type_p (arg1_unw, shorter_type))
6839     return NULL_TREE;
6840 
6841   /* If we are comparing with the integer that does not fit into the range
6842      of the shorter type, the result is known.  */
6843   outer_type = TREE_TYPE (arg1_unw);
6844   min = lower_bound_in_type (outer_type, shorter_type);
6845   max = upper_bound_in_type (outer_type, shorter_type);
6846 
6847   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6848 						   max, arg1_unw));
6849   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6850 						   arg1_unw, min));
6851 
6852   switch (code)
6853     {
6854     case EQ_EXPR:
6855       if (above || below)
6856 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6857       break;
6858 
6859     case NE_EXPR:
6860       if (above || below)
6861 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6862       break;
6863 
6864     case LT_EXPR:
6865     case LE_EXPR:
6866       if (above)
6867 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6868       else if (below)
6869 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6870 
6871     case GT_EXPR:
6872     case GE_EXPR:
6873       if (above)
6874 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6875       else if (below)
6876 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6877 
6878     default:
6879       break;
6880     }
6881 
6882   return NULL_TREE;
6883 }
6884 
6885 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6886    ARG0 just the signedness is changed.  */
6887 
6888 static tree
fold_sign_changed_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6889 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6890 			      tree arg0, tree arg1)
6891 {
6892   tree arg0_inner;
6893   tree inner_type, outer_type;
6894 
6895   if (!CONVERT_EXPR_P (arg0))
6896     return NULL_TREE;
6897 
6898   outer_type = TREE_TYPE (arg0);
6899   arg0_inner = TREE_OPERAND (arg0, 0);
6900   inner_type = TREE_TYPE (arg0_inner);
6901 
6902 #ifdef HAVE_canonicalize_funcptr_for_compare
6903   /* Disable this optimization if we're casting a function pointer
6904      type on targets that require function pointer canonicalization.  */
6905   if (HAVE_canonicalize_funcptr_for_compare
6906       && TREE_CODE (inner_type) == POINTER_TYPE
6907       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6908     return NULL_TREE;
6909 #endif
6910 
6911   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6912     return NULL_TREE;
6913 
6914   if (TREE_CODE (arg1) != INTEGER_CST
6915       && !(CONVERT_EXPR_P (arg1)
6916 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6917     return NULL_TREE;
6918 
6919   if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6920       && code != NE_EXPR
6921       && code != EQ_EXPR)
6922     return NULL_TREE;
6923 
6924   if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6925     return NULL_TREE;
6926 
6927   if (TREE_CODE (arg1) == INTEGER_CST)
6928     arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6929 				  0, TREE_OVERFLOW (arg1));
6930   else
6931     arg1 = fold_convert_loc (loc, inner_type, arg1);
6932 
6933   return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6934 }
6935 
6936 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6937    step of the array.  Reconstructs s and delta in the case of s *
6938    delta being an integer constant (and thus already folded).  ADDR is
6939    the address. MULT is the multiplicative expression.  If the
6940    function succeeds, the new address expression is returned.
6941    Otherwise NULL_TREE is returned.  LOC is the location of the
6942    resulting expression.  */
6943 
6944 static tree
try_move_mult_to_index(location_t loc,tree addr,tree op1)6945 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6946 {
6947   tree s, delta, step;
6948   tree ref = TREE_OPERAND (addr, 0), pref;
6949   tree ret, pos;
6950   tree itype;
6951   bool mdim = false;
6952 
6953   /*  Strip the nops that might be added when converting op1 to sizetype. */
6954   STRIP_NOPS (op1);
6955 
6956   /* Canonicalize op1 into a possibly non-constant delta
6957      and an INTEGER_CST s.  */
6958   if (TREE_CODE (op1) == MULT_EXPR)
6959     {
6960       tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6961 
6962       STRIP_NOPS (arg0);
6963       STRIP_NOPS (arg1);
6964 
6965       if (TREE_CODE (arg0) == INTEGER_CST)
6966         {
6967           s = arg0;
6968           delta = arg1;
6969         }
6970       else if (TREE_CODE (arg1) == INTEGER_CST)
6971         {
6972           s = arg1;
6973           delta = arg0;
6974         }
6975       else
6976         return NULL_TREE;
6977     }
6978   else if (TREE_CODE (op1) == INTEGER_CST)
6979     {
6980       delta = op1;
6981       s = NULL_TREE;
6982     }
6983   else
6984     {
6985       /* Simulate we are delta * 1.  */
6986       delta = op1;
6987       s = integer_one_node;
6988     }
6989 
6990   /* Handle &x.array the same as we would handle &x.array[0].  */
6991   if (TREE_CODE (ref) == COMPONENT_REF
6992       && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6993     {
6994       tree domain;
6995 
6996       /* Remember if this was a multi-dimensional array.  */
6997       if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6998 	mdim = true;
6999 
7000       domain = TYPE_DOMAIN (TREE_TYPE (ref));
7001       if (! domain)
7002 	goto cont;
7003       itype = TREE_TYPE (domain);
7004 
7005       step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
7006       if (TREE_CODE (step) != INTEGER_CST)
7007 	goto cont;
7008 
7009       if (s)
7010 	{
7011 	  if (! tree_int_cst_equal (step, s))
7012 	    goto cont;
7013 	}
7014       else
7015 	{
7016 	  /* Try if delta is a multiple of step.  */
7017 	  tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7018 	  if (! tmp)
7019 	    goto cont;
7020 	  delta = tmp;
7021 	}
7022 
7023       /* Only fold here if we can verify we do not overflow one
7024 	 dimension of a multi-dimensional array.  */
7025       if (mdim)
7026 	{
7027 	  tree tmp;
7028 
7029 	  if (!TYPE_MIN_VALUE (domain)
7030 	      || !TYPE_MAX_VALUE (domain)
7031 	      || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7032 	    goto cont;
7033 
7034 	  tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7035 				 fold_convert_loc (loc, itype,
7036 						   TYPE_MIN_VALUE (domain)),
7037 				 fold_convert_loc (loc, itype, delta));
7038 	  if (TREE_CODE (tmp) != INTEGER_CST
7039 	      || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7040 	    goto cont;
7041 	}
7042 
7043       /* We found a suitable component reference.  */
7044 
7045       pref = TREE_OPERAND (addr, 0);
7046       ret = copy_node (pref);
7047       SET_EXPR_LOCATION (ret, loc);
7048 
7049       ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7050 			fold_build2_loc
7051 			  (loc, PLUS_EXPR, itype,
7052 			   fold_convert_loc (loc, itype,
7053 					     TYPE_MIN_VALUE
7054 					       (TYPE_DOMAIN (TREE_TYPE (ref)))),
7055 			   fold_convert_loc (loc, itype, delta)),
7056 			NULL_TREE, NULL_TREE);
7057       return build_fold_addr_expr_loc (loc, ret);
7058     }
7059 
7060 cont:
7061 
7062   for (;; ref = TREE_OPERAND (ref, 0))
7063     {
7064       if (TREE_CODE (ref) == ARRAY_REF)
7065 	{
7066 	  tree domain;
7067 
7068 	  /* Remember if this was a multi-dimensional array.  */
7069 	  if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7070 	    mdim = true;
7071 
7072 	  domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7073 	  if (! domain)
7074 	    continue;
7075 	  itype = TREE_TYPE (domain);
7076 
7077 	  step = array_ref_element_size (ref);
7078 	  if (TREE_CODE (step) != INTEGER_CST)
7079 	    continue;
7080 
7081 	  if (s)
7082 	    {
7083 	      if (! tree_int_cst_equal (step, s))
7084                 continue;
7085 	    }
7086 	  else
7087 	    {
7088 	      /* Try if delta is a multiple of step.  */
7089 	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7090 	      if (! tmp)
7091 		continue;
7092 	      delta = tmp;
7093 	    }
7094 
7095 	  /* Only fold here if we can verify we do not overflow one
7096 	     dimension of a multi-dimensional array.  */
7097 	  if (mdim)
7098 	    {
7099 	      tree tmp;
7100 
7101 	      if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7102 		  || !TYPE_MAX_VALUE (domain)
7103 		  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7104 		continue;
7105 
7106 	      tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7107 				     fold_convert_loc (loc, itype,
7108 						       TREE_OPERAND (ref, 1)),
7109 				     fold_convert_loc (loc, itype, delta));
7110 	      if (!tmp
7111 		  || TREE_CODE (tmp) != INTEGER_CST
7112 		  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7113 		continue;
7114 	    }
7115 
7116 	  break;
7117 	}
7118       else
7119 	mdim = false;
7120 
7121       if (!handled_component_p (ref))
7122 	return NULL_TREE;
7123     }
7124 
7125   /* We found the suitable array reference.  So copy everything up to it,
7126      and replace the index.  */
7127 
7128   pref = TREE_OPERAND (addr, 0);
7129   ret = copy_node (pref);
7130   SET_EXPR_LOCATION (ret, loc);
7131   pos = ret;
7132 
7133   while (pref != ref)
7134     {
7135       pref = TREE_OPERAND (pref, 0);
7136       TREE_OPERAND (pos, 0) = copy_node (pref);
7137       pos = TREE_OPERAND (pos, 0);
7138     }
7139 
7140   TREE_OPERAND (pos, 1)
7141     = fold_build2_loc (loc, PLUS_EXPR, itype,
7142 		       fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7143 		       fold_convert_loc (loc, itype, delta));
7144   return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7145 }
7146 
7147 
7148 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7149    means A >= Y && A != MAX, but in this case we know that
7150    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7151 
7152 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7153 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7154 {
7155   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7156 
7157   if (TREE_CODE (bound) == LT_EXPR)
7158     a = TREE_OPERAND (bound, 0);
7159   else if (TREE_CODE (bound) == GT_EXPR)
7160     a = TREE_OPERAND (bound, 1);
7161   else
7162     return NULL_TREE;
7163 
7164   typea = TREE_TYPE (a);
7165   if (!INTEGRAL_TYPE_P (typea)
7166       && !POINTER_TYPE_P (typea))
7167     return NULL_TREE;
7168 
7169   if (TREE_CODE (ineq) == LT_EXPR)
7170     {
7171       a1 = TREE_OPERAND (ineq, 1);
7172       y = TREE_OPERAND (ineq, 0);
7173     }
7174   else if (TREE_CODE (ineq) == GT_EXPR)
7175     {
7176       a1 = TREE_OPERAND (ineq, 0);
7177       y = TREE_OPERAND (ineq, 1);
7178     }
7179   else
7180     return NULL_TREE;
7181 
7182   if (TREE_TYPE (a1) != typea)
7183     return NULL_TREE;
7184 
7185   if (POINTER_TYPE_P (typea))
7186     {
7187       /* Convert the pointer types into integer before taking the difference.  */
7188       tree ta = fold_convert_loc (loc, ssizetype, a);
7189       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7190       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7191     }
7192   else
7193     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7194 
7195   if (!diff || !integer_onep (diff))
7196    return NULL_TREE;
7197 
7198   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7199 }
7200 
7201 /* Fold a sum or difference of at least one multiplication.
7202    Returns the folded tree or NULL if no simplification could be made.  */
7203 
7204 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7205 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7206 			  tree arg0, tree arg1)
7207 {
7208   tree arg00, arg01, arg10, arg11;
7209   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7210 
7211   /* (A * C) +- (B * C) -> (A+-B) * C.
7212      (A * C) +- A -> A * (C+-1).
7213      We are most concerned about the case where C is a constant,
7214      but other combinations show up during loop reduction.  Since
7215      it is not difficult, try all four possibilities.  */
7216 
7217   if (TREE_CODE (arg0) == MULT_EXPR)
7218     {
7219       arg00 = TREE_OPERAND (arg0, 0);
7220       arg01 = TREE_OPERAND (arg0, 1);
7221     }
7222   else if (TREE_CODE (arg0) == INTEGER_CST)
7223     {
7224       arg00 = build_one_cst (type);
7225       arg01 = arg0;
7226     }
7227   else
7228     {
7229       /* We cannot generate constant 1 for fract.  */
7230       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7231 	return NULL_TREE;
7232       arg00 = arg0;
7233       arg01 = build_one_cst (type);
7234     }
7235   if (TREE_CODE (arg1) == MULT_EXPR)
7236     {
7237       arg10 = TREE_OPERAND (arg1, 0);
7238       arg11 = TREE_OPERAND (arg1, 1);
7239     }
7240   else if (TREE_CODE (arg1) == INTEGER_CST)
7241     {
7242       arg10 = build_one_cst (type);
7243       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7244 	 the purpose of this canonicalization.  */
7245       if (TREE_INT_CST_HIGH (arg1) == -1
7246 	  && negate_expr_p (arg1)
7247 	  && code == PLUS_EXPR)
7248 	{
7249 	  arg11 = negate_expr (arg1);
7250 	  code = MINUS_EXPR;
7251 	}
7252       else
7253 	arg11 = arg1;
7254     }
7255   else
7256     {
7257       /* We cannot generate constant 1 for fract.  */
7258       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7259 	return NULL_TREE;
7260       arg10 = arg1;
7261       arg11 = build_one_cst (type);
7262     }
7263   same = NULL_TREE;
7264 
7265   if (operand_equal_p (arg01, arg11, 0))
7266     same = arg01, alt0 = arg00, alt1 = arg10;
7267   else if (operand_equal_p (arg00, arg10, 0))
7268     same = arg00, alt0 = arg01, alt1 = arg11;
7269   else if (operand_equal_p (arg00, arg11, 0))
7270     same = arg00, alt0 = arg01, alt1 = arg10;
7271   else if (operand_equal_p (arg01, arg10, 0))
7272     same = arg01, alt0 = arg00, alt1 = arg11;
7273 
7274   /* No identical multiplicands; see if we can find a common
7275      power-of-two factor in non-power-of-two multiplies.  This
7276      can help in multi-dimensional array access.  */
7277   else if (tree_fits_shwi_p (arg01)
7278 	   && tree_fits_shwi_p (arg11))
7279     {
7280       HOST_WIDE_INT int01, int11, tmp;
7281       bool swap = false;
7282       tree maybe_same;
7283       int01 = tree_to_shwi (arg01);
7284       int11 = tree_to_shwi (arg11);
7285 
7286       /* Move min of absolute values to int11.  */
7287       if (absu_hwi (int01) < absu_hwi (int11))
7288         {
7289 	  tmp = int01, int01 = int11, int11 = tmp;
7290 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7291 	  maybe_same = arg01;
7292 	  swap = true;
7293 	}
7294       else
7295 	maybe_same = arg11;
7296 
7297       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7298 	  /* The remainder should not be a constant, otherwise we
7299 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7300 	     increased the number of multiplications necessary.  */
7301 	  && TREE_CODE (arg10) != INTEGER_CST)
7302         {
7303 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7304 			      build_int_cst (TREE_TYPE (arg00),
7305 					     int01 / int11));
7306 	  alt1 = arg10;
7307 	  same = maybe_same;
7308 	  if (swap)
7309 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7310 	}
7311     }
7312 
7313   if (same)
7314     return fold_build2_loc (loc, MULT_EXPR, type,
7315 			fold_build2_loc (loc, code, type,
7316 				     fold_convert_loc (loc, type, alt0),
7317 				     fold_convert_loc (loc, type, alt1)),
7318 			fold_convert_loc (loc, type, same));
7319 
7320   return NULL_TREE;
7321 }
7322 
7323 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7324    specified by EXPR into the buffer PTR of length LEN bytes.
7325    Return the number of bytes placed in the buffer, or zero
7326    upon failure.  */
7327 
7328 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len)7329 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7330 {
7331   tree type = TREE_TYPE (expr);
7332   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7333   int byte, offset, word, words;
7334   unsigned char value;
7335 
7336   if (total_bytes > len)
7337     return 0;
7338   words = total_bytes / UNITS_PER_WORD;
7339 
7340   for (byte = 0; byte < total_bytes; byte++)
7341     {
7342       int bitpos = byte * BITS_PER_UNIT;
7343       if (bitpos < HOST_BITS_PER_WIDE_INT)
7344 	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7345       else
7346 	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7347 				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7348 
7349       if (total_bytes > UNITS_PER_WORD)
7350 	{
7351 	  word = byte / UNITS_PER_WORD;
7352 	  if (WORDS_BIG_ENDIAN)
7353 	    word = (words - 1) - word;
7354 	  offset = word * UNITS_PER_WORD;
7355 	  if (BYTES_BIG_ENDIAN)
7356 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7357 	  else
7358 	    offset += byte % UNITS_PER_WORD;
7359 	}
7360       else
7361 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7362       ptr[offset] = value;
7363     }
7364   return total_bytes;
7365 }
7366 
7367 
7368 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7369    specified by EXPR into the buffer PTR of length LEN bytes.
7370    Return the number of bytes placed in the buffer, or zero
7371    upon failure.  */
7372 
7373 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len)7374 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7375 {
7376   tree type = TREE_TYPE (expr);
7377   enum machine_mode mode = TYPE_MODE (type);
7378   int total_bytes = GET_MODE_SIZE (mode);
7379   FIXED_VALUE_TYPE value;
7380   tree i_value, i_type;
7381 
7382   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7383     return 0;
7384 
7385   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7386 
7387   if (NULL_TREE == i_type
7388       || TYPE_PRECISION (i_type) != total_bytes)
7389     return 0;
7390 
7391   value = TREE_FIXED_CST (expr);
7392   i_value = double_int_to_tree (i_type, value.data);
7393 
7394   return native_encode_int (i_value, ptr, len);
7395 }
7396 
7397 
7398 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7399    specified by EXPR into the buffer PTR of length LEN bytes.
7400    Return the number of bytes placed in the buffer, or zero
7401    upon failure.  */
7402 
7403 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len)7404 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7405 {
7406   tree type = TREE_TYPE (expr);
7407   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7408   int byte, offset, word, words, bitpos;
7409   unsigned char value;
7410 
7411   /* There are always 32 bits in each long, no matter the size of
7412      the hosts long.  We handle floating point representations with
7413      up to 192 bits.  */
7414   long tmp[6];
7415 
7416   if (total_bytes > len)
7417     return 0;
7418   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7419 
7420   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7421 
7422   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7423        bitpos += BITS_PER_UNIT)
7424     {
7425       byte = (bitpos / BITS_PER_UNIT) & 3;
7426       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7427 
7428       if (UNITS_PER_WORD < 4)
7429 	{
7430 	  word = byte / UNITS_PER_WORD;
7431 	  if (WORDS_BIG_ENDIAN)
7432 	    word = (words - 1) - word;
7433 	  offset = word * UNITS_PER_WORD;
7434 	  if (BYTES_BIG_ENDIAN)
7435 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7436 	  else
7437 	    offset += byte % UNITS_PER_WORD;
7438 	}
7439       else
7440 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7441       ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7442     }
7443   return total_bytes;
7444 }
7445 
7446 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7447    specified by EXPR into the buffer PTR of length LEN bytes.
7448    Return the number of bytes placed in the buffer, or zero
7449    upon failure.  */
7450 
7451 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len)7452 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7453 {
7454   int rsize, isize;
7455   tree part;
7456 
7457   part = TREE_REALPART (expr);
7458   rsize = native_encode_expr (part, ptr, len);
7459   if (rsize == 0)
7460     return 0;
7461   part = TREE_IMAGPART (expr);
7462   isize = native_encode_expr (part, ptr+rsize, len-rsize);
7463   if (isize != rsize)
7464     return 0;
7465   return rsize + isize;
7466 }
7467 
7468 
7469 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7470    specified by EXPR into the buffer PTR of length LEN bytes.
7471    Return the number of bytes placed in the buffer, or zero
7472    upon failure.  */
7473 
7474 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len)7475 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7476 {
7477   unsigned i, count;
7478   int size, offset;
7479   tree itype, elem;
7480 
7481   offset = 0;
7482   count = VECTOR_CST_NELTS (expr);
7483   itype = TREE_TYPE (TREE_TYPE (expr));
7484   size = GET_MODE_SIZE (TYPE_MODE (itype));
7485   for (i = 0; i < count; i++)
7486     {
7487       elem = VECTOR_CST_ELT (expr, i);
7488       if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7489 	return 0;
7490       offset += size;
7491     }
7492   return offset;
7493 }
7494 
7495 
7496 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7497    specified by EXPR into the buffer PTR of length LEN bytes.
7498    Return the number of bytes placed in the buffer, or zero
7499    upon failure.  */
7500 
7501 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len)7502 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7503 {
7504   tree type = TREE_TYPE (expr);
7505   HOST_WIDE_INT total_bytes;
7506 
7507   if (TREE_CODE (type) != ARRAY_TYPE
7508       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7509       || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7510       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7511     return 0;
7512   total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7513   if (total_bytes > len)
7514     return 0;
7515   if (TREE_STRING_LENGTH (expr) < total_bytes)
7516     {
7517       memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7518       memset (ptr + TREE_STRING_LENGTH (expr), 0,
7519 	      total_bytes - TREE_STRING_LENGTH (expr));
7520     }
7521   else
7522     memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7523   return total_bytes;
7524 }
7525 
7526 
7527 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7528    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7529    buffer PTR of length LEN bytes.  Return the number of bytes
7530    placed in the buffer, or zero upon failure.  */
7531 
7532 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len)7533 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7534 {
7535   switch (TREE_CODE (expr))
7536     {
7537     case INTEGER_CST:
7538       return native_encode_int (expr, ptr, len);
7539 
7540     case REAL_CST:
7541       return native_encode_real (expr, ptr, len);
7542 
7543     case FIXED_CST:
7544       return native_encode_fixed (expr, ptr, len);
7545 
7546     case COMPLEX_CST:
7547       return native_encode_complex (expr, ptr, len);
7548 
7549     case VECTOR_CST:
7550       return native_encode_vector (expr, ptr, len);
7551 
7552     case STRING_CST:
7553       return native_encode_string (expr, ptr, len);
7554 
7555     default:
7556       return 0;
7557     }
7558 }
7559 
7560 
7561 /* Subroutine of native_interpret_expr.  Interpret the contents of
7562    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7563    If the buffer cannot be interpreted, return NULL_TREE.  */
7564 
7565 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)7566 native_interpret_int (tree type, const unsigned char *ptr, int len)
7567 {
7568   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7569   double_int result;
7570 
7571   if (total_bytes > len
7572       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7573     return NULL_TREE;
7574 
7575   result = double_int::from_buffer (ptr, total_bytes);
7576 
7577   return double_int_to_tree (type, result);
7578 }
7579 
7580 
7581 /* Subroutine of native_interpret_expr.  Interpret the contents of
7582    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7583    If the buffer cannot be interpreted, return NULL_TREE.  */
7584 
7585 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)7586 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7587 {
7588   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7589   double_int result;
7590   FIXED_VALUE_TYPE fixed_value;
7591 
7592   if (total_bytes > len
7593       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7594     return NULL_TREE;
7595 
7596   result = double_int::from_buffer (ptr, total_bytes);
7597   fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7598 
7599   return build_fixed (type, fixed_value);
7600 }
7601 
7602 
7603 /* Subroutine of native_interpret_expr.  Interpret the contents of
7604    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7605    If the buffer cannot be interpreted, return NULL_TREE.  */
7606 
7607 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)7608 native_interpret_real (tree type, const unsigned char *ptr, int len)
7609 {
7610   enum machine_mode mode = TYPE_MODE (type);
7611   int total_bytes = GET_MODE_SIZE (mode);
7612   int byte, offset, word, words, bitpos;
7613   unsigned char value;
7614   /* There are always 32 bits in each long, no matter the size of
7615      the hosts long.  We handle floating point representations with
7616      up to 192 bits.  */
7617   REAL_VALUE_TYPE r;
7618   long tmp[6];
7619 
7620   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7621   if (total_bytes > len || total_bytes > 24)
7622     return NULL_TREE;
7623   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7624 
7625   memset (tmp, 0, sizeof (tmp));
7626   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7627        bitpos += BITS_PER_UNIT)
7628     {
7629       byte = (bitpos / BITS_PER_UNIT) & 3;
7630       if (UNITS_PER_WORD < 4)
7631 	{
7632 	  word = byte / UNITS_PER_WORD;
7633 	  if (WORDS_BIG_ENDIAN)
7634 	    word = (words - 1) - word;
7635 	  offset = word * UNITS_PER_WORD;
7636 	  if (BYTES_BIG_ENDIAN)
7637 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7638 	  else
7639 	    offset += byte % UNITS_PER_WORD;
7640 	}
7641       else
7642 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7643       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7644 
7645       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7646     }
7647 
7648   real_from_target (&r, tmp, mode);
7649   return build_real (type, r);
7650 }
7651 
7652 
7653 /* Subroutine of native_interpret_expr.  Interpret the contents of
7654    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7655    If the buffer cannot be interpreted, return NULL_TREE.  */
7656 
7657 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)7658 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7659 {
7660   tree etype, rpart, ipart;
7661   int size;
7662 
7663   etype = TREE_TYPE (type);
7664   size = GET_MODE_SIZE (TYPE_MODE (etype));
7665   if (size * 2 > len)
7666     return NULL_TREE;
7667   rpart = native_interpret_expr (etype, ptr, size);
7668   if (!rpart)
7669     return NULL_TREE;
7670   ipart = native_interpret_expr (etype, ptr+size, size);
7671   if (!ipart)
7672     return NULL_TREE;
7673   return build_complex (type, rpart, ipart);
7674 }
7675 
7676 
7677 /* Subroutine of native_interpret_expr.  Interpret the contents of
7678    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7679    If the buffer cannot be interpreted, return NULL_TREE.  */
7680 
7681 static tree
native_interpret_vector(tree type,const unsigned char * ptr,int len)7682 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7683 {
7684   tree etype, elem;
7685   int i, size, count;
7686   tree *elements;
7687 
7688   etype = TREE_TYPE (type);
7689   size = GET_MODE_SIZE (TYPE_MODE (etype));
7690   count = TYPE_VECTOR_SUBPARTS (type);
7691   if (size * count > len)
7692     return NULL_TREE;
7693 
7694   elements = XALLOCAVEC (tree, count);
7695   for (i = count - 1; i >= 0; i--)
7696     {
7697       elem = native_interpret_expr (etype, ptr+(i*size), size);
7698       if (!elem)
7699 	return NULL_TREE;
7700       elements[i] = elem;
7701     }
7702   return build_vector (type, elements);
7703 }
7704 
7705 
7706 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7707    the buffer PTR of length LEN as a constant of type TYPE.  For
7708    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7709    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7710    return NULL_TREE.  */
7711 
7712 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)7713 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7714 {
7715   switch (TREE_CODE (type))
7716     {
7717     case INTEGER_TYPE:
7718     case ENUMERAL_TYPE:
7719     case BOOLEAN_TYPE:
7720     case POINTER_TYPE:
7721     case REFERENCE_TYPE:
7722       return native_interpret_int (type, ptr, len);
7723 
7724     case REAL_TYPE:
7725       return native_interpret_real (type, ptr, len);
7726 
7727     case FIXED_POINT_TYPE:
7728       return native_interpret_fixed (type, ptr, len);
7729 
7730     case COMPLEX_TYPE:
7731       return native_interpret_complex (type, ptr, len);
7732 
7733     case VECTOR_TYPE:
7734       return native_interpret_vector (type, ptr, len);
7735 
7736     default:
7737       return NULL_TREE;
7738     }
7739 }
7740 
7741 /* Returns true if we can interpret the contents of a native encoding
7742    as TYPE.  */
7743 
7744 static bool
can_native_interpret_type_p(tree type)7745 can_native_interpret_type_p (tree type)
7746 {
7747   switch (TREE_CODE (type))
7748     {
7749     case INTEGER_TYPE:
7750     case ENUMERAL_TYPE:
7751     case BOOLEAN_TYPE:
7752     case POINTER_TYPE:
7753     case REFERENCE_TYPE:
7754     case FIXED_POINT_TYPE:
7755     case REAL_TYPE:
7756     case COMPLEX_TYPE:
7757     case VECTOR_TYPE:
7758       return true;
7759     default:
7760       return false;
7761     }
7762 }
7763 
7764 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7765    TYPE at compile-time.  If we're unable to perform the conversion
7766    return NULL_TREE.  */
7767 
7768 static tree
fold_view_convert_expr(tree type,tree expr)7769 fold_view_convert_expr (tree type, tree expr)
7770 {
7771   /* We support up to 512-bit values (for V8DFmode).  */
7772   unsigned char buffer[64];
7773   int len;
7774 
7775   /* Check that the host and target are sane.  */
7776   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7777     return NULL_TREE;
7778 
7779   len = native_encode_expr (expr, buffer, sizeof (buffer));
7780   if (len == 0)
7781     return NULL_TREE;
7782 
7783   return native_interpret_expr (type, buffer, len);
7784 }
7785 
7786 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7787    to avoid confusing the gimplify process.  */
7788 
7789 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)7790 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7791 {
7792   /* The size of the object is not relevant when talking about its address.  */
7793   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7794     t = TREE_OPERAND (t, 0);
7795 
7796   if (TREE_CODE (t) == INDIRECT_REF)
7797     {
7798       t = TREE_OPERAND (t, 0);
7799 
7800       if (TREE_TYPE (t) != ptrtype)
7801 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7802     }
7803   else if (TREE_CODE (t) == MEM_REF
7804 	   && integer_zerop (TREE_OPERAND (t, 1)))
7805     return TREE_OPERAND (t, 0);
7806   else if (TREE_CODE (t) == MEM_REF
7807 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7808     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7809 			TREE_OPERAND (t, 0),
7810 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7811   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7812     {
7813       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7814 
7815       if (TREE_TYPE (t) != ptrtype)
7816 	t = fold_convert_loc (loc, ptrtype, t);
7817     }
7818   else
7819     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7820 
7821   return t;
7822 }
7823 
7824 /* Build an expression for the address of T.  */
7825 
7826 tree
build_fold_addr_expr_loc(location_t loc,tree t)7827 build_fold_addr_expr_loc (location_t loc, tree t)
7828 {
7829   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7830 
7831   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7832 }
7833 
7834 static bool vec_cst_ctor_to_array (tree, tree *);
7835 
7836 /* Fold a unary expression of code CODE and type TYPE with operand
7837    OP0.  Return the folded expression if folding is successful.
7838    Otherwise, return NULL_TREE.  */
7839 
7840 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)7841 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7842 {
7843   tree tem;
7844   tree arg0;
7845   enum tree_code_class kind = TREE_CODE_CLASS (code);
7846 
7847   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7848 	      && TREE_CODE_LENGTH (code) == 1);
7849 
7850   arg0 = op0;
7851   if (arg0)
7852     {
7853       if (CONVERT_EXPR_CODE_P (code)
7854 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7855 	{
7856 	  /* Don't use STRIP_NOPS, because signedness of argument type
7857 	     matters.  */
7858 	  STRIP_SIGN_NOPS (arg0);
7859 	}
7860       else
7861 	{
7862 	  /* Strip any conversions that don't change the mode.  This
7863 	     is safe for every expression, except for a comparison
7864 	     expression because its signedness is derived from its
7865 	     operands.
7866 
7867 	     Note that this is done as an internal manipulation within
7868 	     the constant folder, in order to find the simplest
7869 	     representation of the arguments so that their form can be
7870 	     studied.  In any cases, the appropriate type conversions
7871 	     should be put back in the tree that will get out of the
7872 	     constant folder.  */
7873 	  STRIP_NOPS (arg0);
7874 	}
7875     }
7876 
7877   if (TREE_CODE_CLASS (code) == tcc_unary)
7878     {
7879       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7880 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7881 		       fold_build1_loc (loc, code, type,
7882 				    fold_convert_loc (loc, TREE_TYPE (op0),
7883 						      TREE_OPERAND (arg0, 1))));
7884       else if (TREE_CODE (arg0) == COND_EXPR)
7885 	{
7886 	  tree arg01 = TREE_OPERAND (arg0, 1);
7887 	  tree arg02 = TREE_OPERAND (arg0, 2);
7888 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7889 	    arg01 = fold_build1_loc (loc, code, type,
7890 				 fold_convert_loc (loc,
7891 						   TREE_TYPE (op0), arg01));
7892 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7893 	    arg02 = fold_build1_loc (loc, code, type,
7894 				 fold_convert_loc (loc,
7895 						   TREE_TYPE (op0), arg02));
7896 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7897 			     arg01, arg02);
7898 
7899 	  /* If this was a conversion, and all we did was to move into
7900 	     inside the COND_EXPR, bring it back out.  But leave it if
7901 	     it is a conversion from integer to integer and the
7902 	     result precision is no wider than a word since such a
7903 	     conversion is cheap and may be optimized away by combine,
7904 	     while it couldn't if it were outside the COND_EXPR.  Then return
7905 	     so we don't get into an infinite recursion loop taking the
7906 	     conversion out and then back in.  */
7907 
7908 	  if ((CONVERT_EXPR_CODE_P (code)
7909 	       || code == NON_LVALUE_EXPR)
7910 	      && TREE_CODE (tem) == COND_EXPR
7911 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7912 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7913 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7914 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7915 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7916 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7917 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7918 		     && (INTEGRAL_TYPE_P
7919 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7920 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7921 		  || flag_syntax_only))
7922 	    tem = build1_loc (loc, code, type,
7923 			      build3 (COND_EXPR,
7924 				      TREE_TYPE (TREE_OPERAND
7925 						 (TREE_OPERAND (tem, 1), 0)),
7926 				      TREE_OPERAND (tem, 0),
7927 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7928 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7929 						    0)));
7930 	  return tem;
7931 	}
7932    }
7933 
7934   switch (code)
7935     {
7936     case PAREN_EXPR:
7937       /* Re-association barriers around constants and other re-association
7938 	 barriers can be removed.  */
7939       if (CONSTANT_CLASS_P (op0)
7940 	  || TREE_CODE (op0) == PAREN_EXPR)
7941 	return fold_convert_loc (loc, type, op0);
7942       return NULL_TREE;
7943 
7944     CASE_CONVERT:
7945     case FLOAT_EXPR:
7946     case FIX_TRUNC_EXPR:
7947       if (TREE_TYPE (op0) == type)
7948 	return op0;
7949 
7950       if (COMPARISON_CLASS_P (op0))
7951 	{
7952 	  /* If we have (type) (a CMP b) and type is an integral type, return
7953 	     new expression involving the new type.  Canonicalize
7954 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7955 	     non-integral type.
7956 	     Do not fold the result as that would not simplify further, also
7957 	     folding again results in recursions.  */
7958 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7959 	    return build2_loc (loc, TREE_CODE (op0), type,
7960 			       TREE_OPERAND (op0, 0),
7961 			       TREE_OPERAND (op0, 1));
7962 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7963 		   && TREE_CODE (type) != VECTOR_TYPE)
7964 	    return build3_loc (loc, COND_EXPR, type, op0,
7965 			       constant_boolean_node (true, type),
7966 			       constant_boolean_node (false, type));
7967 	}
7968 
7969       /* Handle cases of two conversions in a row.  */
7970       if (CONVERT_EXPR_P (op0))
7971 	{
7972 	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7973 	  tree inter_type = TREE_TYPE (op0);
7974 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7975 	  int inside_ptr = POINTER_TYPE_P (inside_type);
7976 	  int inside_float = FLOAT_TYPE_P (inside_type);
7977 	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7978 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7979 	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7980 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7981 	  int inter_ptr = POINTER_TYPE_P (inter_type);
7982 	  int inter_float = FLOAT_TYPE_P (inter_type);
7983 	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7984 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7985 	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7986 	  int final_int = INTEGRAL_TYPE_P (type);
7987 	  int final_ptr = POINTER_TYPE_P (type);
7988 	  int final_float = FLOAT_TYPE_P (type);
7989 	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7990 	  unsigned int final_prec = TYPE_PRECISION (type);
7991 	  int final_unsignedp = TYPE_UNSIGNED (type);
7992 
7993 	  /* In addition to the cases of two conversions in a row
7994 	     handled below, if we are converting something to its own
7995 	     type via an object of identical or wider precision, neither
7996 	     conversion is needed.  */
7997 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7998 	      && (((inter_int || inter_ptr) && final_int)
7999 		  || (inter_float && final_float))
8000 	      && inter_prec >= final_prec)
8001 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8002 
8003 	  /* Likewise, if the intermediate and initial types are either both
8004 	     float or both integer, we don't need the middle conversion if the
8005 	     former is wider than the latter and doesn't change the signedness
8006 	     (for integers).  Avoid this if the final type is a pointer since
8007 	     then we sometimes need the middle conversion.  Likewise if the
8008 	     final type has a precision not equal to the size of its mode.  */
8009 	  if (((inter_int && inside_int)
8010 	       || (inter_float && inside_float)
8011 	       || (inter_vec && inside_vec))
8012 	      && inter_prec >= inside_prec
8013 	      && (inter_float || inter_vec
8014 		  || inter_unsignedp == inside_unsignedp)
8015 	      && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8016 		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
8017 	      && ! final_ptr
8018 	      && (! final_vec || inter_prec == inside_prec))
8019 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8020 
8021 	  /* If we have a sign-extension of a zero-extended value, we can
8022 	     replace that by a single zero-extension.  Likewise if the
8023 	     final conversion does not change precision we can drop the
8024 	     intermediate conversion.  */
8025 	  if (inside_int && inter_int && final_int
8026 	      && ((inside_prec < inter_prec && inter_prec < final_prec
8027 		   && inside_unsignedp && !inter_unsignedp)
8028 		  || final_prec == inter_prec))
8029 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8030 
8031 	  /* Two conversions in a row are not needed unless:
8032 	     - some conversion is floating-point (overstrict for now), or
8033 	     - some conversion is a vector (overstrict for now), or
8034 	     - the intermediate type is narrower than both initial and
8035 	       final, or
8036 	     - the intermediate type and innermost type differ in signedness,
8037 	       and the outermost type is wider than the intermediate, or
8038 	     - the initial type is a pointer type and the precisions of the
8039 	       intermediate and final types differ, or
8040 	     - the final type is a pointer type and the precisions of the
8041 	       initial and intermediate types differ.  */
8042 	  if (! inside_float && ! inter_float && ! final_float
8043 	      && ! inside_vec && ! inter_vec && ! final_vec
8044 	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
8045 	      && ! (inside_int && inter_int
8046 		    && inter_unsignedp != inside_unsignedp
8047 		    && inter_prec < final_prec)
8048 	      && ((inter_unsignedp && inter_prec > inside_prec)
8049 		  == (final_unsignedp && final_prec > inter_prec))
8050 	      && ! (inside_ptr && inter_prec != final_prec)
8051 	      && ! (final_ptr && inside_prec != inter_prec)
8052 	      && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8053 		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8054 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8055 	}
8056 
8057       /* Handle (T *)&A.B.C for A being of type T and B and C
8058 	 living at offset zero.  This occurs frequently in
8059 	 C++ upcasting and then accessing the base.  */
8060       if (TREE_CODE (op0) == ADDR_EXPR
8061 	  && POINTER_TYPE_P (type)
8062 	  && handled_component_p (TREE_OPERAND (op0, 0)))
8063         {
8064 	  HOST_WIDE_INT bitsize, bitpos;
8065 	  tree offset;
8066 	  enum machine_mode mode;
8067 	  int unsignedp, volatilep;
8068           tree base = TREE_OPERAND (op0, 0);
8069 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8070 				      &mode, &unsignedp, &volatilep, false);
8071 	  /* If the reference was to a (constant) zero offset, we can use
8072 	     the address of the base if it has the same base type
8073 	     as the result type and the pointer type is unqualified.  */
8074 	  if (! offset && bitpos == 0
8075 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8076 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8077 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8078 	    return fold_convert_loc (loc, type,
8079 				     build_fold_addr_expr_loc (loc, base));
8080         }
8081 
8082       if (TREE_CODE (op0) == MODIFY_EXPR
8083 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8084 	  /* Detect assigning a bitfield.  */
8085 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8086 	       && DECL_BIT_FIELD
8087 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8088 	{
8089 	  /* Don't leave an assignment inside a conversion
8090 	     unless assigning a bitfield.  */
8091 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8092 	  /* First do the assignment, then return converted constant.  */
8093 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8094 	  TREE_NO_WARNING (tem) = 1;
8095 	  TREE_USED (tem) = 1;
8096 	  return tem;
8097 	}
8098 
8099       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8100 	 constants (if x has signed type, the sign bit cannot be set
8101 	 in c).  This folds extension into the BIT_AND_EXPR.
8102 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8103 	 very likely don't have maximal range for their precision and this
8104 	 transformation effectively doesn't preserve non-maximal ranges.  */
8105       if (TREE_CODE (type) == INTEGER_TYPE
8106 	  && TREE_CODE (op0) == BIT_AND_EXPR
8107 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8108 	{
8109 	  tree and_expr = op0;
8110 	  tree and0 = TREE_OPERAND (and_expr, 0);
8111 	  tree and1 = TREE_OPERAND (and_expr, 1);
8112 	  int change = 0;
8113 
8114 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8115 	      || (TYPE_PRECISION (type)
8116 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8117 	    change = 1;
8118 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8119 		   <= HOST_BITS_PER_WIDE_INT
8120 		   && tree_fits_uhwi_p (and1))
8121 	    {
8122 	      unsigned HOST_WIDE_INT cst;
8123 
8124 	      cst = tree_to_uhwi (and1);
8125 	      cst &= HOST_WIDE_INT_M1U
8126 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8127 	      change = (cst == 0);
8128 #ifdef LOAD_EXTEND_OP
8129 	      if (change
8130 		  && !flag_syntax_only
8131 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8132 		      == ZERO_EXTEND))
8133 		{
8134 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8135 		  and0 = fold_convert_loc (loc, uns, and0);
8136 		  and1 = fold_convert_loc (loc, uns, and1);
8137 		}
8138 #endif
8139 	    }
8140 	  if (change)
8141 	    {
8142 	      tem = force_fit_type_double (type, tree_to_double_int (and1),
8143 					   0, TREE_OVERFLOW (and1));
8144 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8145 				  fold_convert_loc (loc, type, and0), tem);
8146 	    }
8147 	}
8148 
8149       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8150          when one of the new casts will fold away. Conservatively we assume
8151 	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8152       if (POINTER_TYPE_P (type)
8153 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8154 	  && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8155 	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8156 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8157 	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8158 	{
8159 	  tree arg00 = TREE_OPERAND (arg0, 0);
8160 	  tree arg01 = TREE_OPERAND (arg0, 1);
8161 
8162 	  return fold_build_pointer_plus_loc
8163 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8164 	}
8165 
8166       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8167 	 of the same precision, and X is an integer type not narrower than
8168 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8169       if (INTEGRAL_TYPE_P (type)
8170 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8171 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8172 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8173 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8174 	{
8175 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8176 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8177 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8178 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8179 				fold_convert_loc (loc, type, tem));
8180 	}
8181 
8182       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8183 	 type of X and Y (integer types only).  */
8184       if (INTEGRAL_TYPE_P (type)
8185 	  && TREE_CODE (op0) == MULT_EXPR
8186 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8187 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8188 	{
8189 	  /* Be careful not to introduce new overflows.  */
8190 	  tree mult_type;
8191           if (TYPE_OVERFLOW_WRAPS (type))
8192 	    mult_type = type;
8193 	  else
8194 	    mult_type = unsigned_type_for (type);
8195 
8196 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8197 	    {
8198 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8199 				 fold_convert_loc (loc, mult_type,
8200 						   TREE_OPERAND (op0, 0)),
8201 				 fold_convert_loc (loc, mult_type,
8202 						   TREE_OPERAND (op0, 1)));
8203 	      return fold_convert_loc (loc, type, tem);
8204 	    }
8205 	}
8206 
8207       tem = fold_convert_const (code, type, op0);
8208       return tem ? tem : NULL_TREE;
8209 
8210     case ADDR_SPACE_CONVERT_EXPR:
8211       if (integer_zerop (arg0))
8212 	return fold_convert_const (code, type, arg0);
8213       return NULL_TREE;
8214 
8215     case FIXED_CONVERT_EXPR:
8216       tem = fold_convert_const (code, type, arg0);
8217       return tem ? tem : NULL_TREE;
8218 
8219     case VIEW_CONVERT_EXPR:
8220       if (TREE_TYPE (op0) == type)
8221 	return op0;
8222       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8223 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8224 			    type, TREE_OPERAND (op0, 0));
8225       if (TREE_CODE (op0) == MEM_REF)
8226 	return fold_build2_loc (loc, MEM_REF, type,
8227 				TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8228 
8229       /* For integral conversions with the same precision or pointer
8230 	 conversions use a NOP_EXPR instead.  */
8231       if ((INTEGRAL_TYPE_P (type)
8232 	   || POINTER_TYPE_P (type))
8233 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8234 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8235 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8236 	return fold_convert_loc (loc, type, op0);
8237 
8238       /* Strip inner integral conversions that do not change the precision.  */
8239       if (CONVERT_EXPR_P (op0)
8240 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8241 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8242 	  && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8243 	      || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8244 	  && (TYPE_PRECISION (TREE_TYPE (op0))
8245 	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8246 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8247 			    type, TREE_OPERAND (op0, 0));
8248 
8249       return fold_view_convert_expr (type, op0);
8250 
8251     case NEGATE_EXPR:
8252       tem = fold_negate_expr (loc, arg0);
8253       if (tem)
8254 	return fold_convert_loc (loc, type, tem);
8255       return NULL_TREE;
8256 
8257     case ABS_EXPR:
8258       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8259 	return fold_abs_const (arg0, type);
8260       else if (TREE_CODE (arg0) == NEGATE_EXPR)
8261 	return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8262       /* Convert fabs((double)float) into (double)fabsf(float).  */
8263       else if (TREE_CODE (arg0) == NOP_EXPR
8264 	       && TREE_CODE (type) == REAL_TYPE)
8265 	{
8266 	  tree targ0 = strip_float_extensions (arg0);
8267 	  if (targ0 != arg0)
8268 	    return fold_convert_loc (loc, type,
8269 				     fold_build1_loc (loc, ABS_EXPR,
8270 						  TREE_TYPE (targ0),
8271 						  targ0));
8272 	}
8273       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8274       else if (TREE_CODE (arg0) == ABS_EXPR)
8275 	return arg0;
8276       else if (tree_expr_nonnegative_p (arg0))
8277 	return arg0;
8278 
8279       /* Strip sign ops from argument.  */
8280       if (TREE_CODE (type) == REAL_TYPE)
8281 	{
8282 	  tem = fold_strip_sign_ops (arg0);
8283 	  if (tem)
8284 	    return fold_build1_loc (loc, ABS_EXPR, type,
8285 				fold_convert_loc (loc, type, tem));
8286 	}
8287       return NULL_TREE;
8288 
8289     case CONJ_EXPR:
8290       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8291 	return fold_convert_loc (loc, type, arg0);
8292       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8293 	{
8294 	  tree itype = TREE_TYPE (type);
8295 	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8296 	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8297 	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8298 			      negate_expr (ipart));
8299 	}
8300       if (TREE_CODE (arg0) == COMPLEX_CST)
8301 	{
8302 	  tree itype = TREE_TYPE (type);
8303 	  tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8304 	  tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8305 	  return build_complex (type, rpart, negate_expr (ipart));
8306 	}
8307       if (TREE_CODE (arg0) == CONJ_EXPR)
8308 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8309       return NULL_TREE;
8310 
8311     case BIT_NOT_EXPR:
8312       if (TREE_CODE (arg0) == INTEGER_CST)
8313         return fold_not_const (arg0, type);
8314       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8315 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8316       /* Convert ~ (-A) to A - 1.  */
8317       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8318 	return fold_build2_loc (loc, MINUS_EXPR, type,
8319 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8320 			    build_int_cst (type, 1));
8321       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8322       else if (INTEGRAL_TYPE_P (type)
8323 	       && ((TREE_CODE (arg0) == MINUS_EXPR
8324 		    && integer_onep (TREE_OPERAND (arg0, 1)))
8325 		   || (TREE_CODE (arg0) == PLUS_EXPR
8326 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8327 	return fold_build1_loc (loc, NEGATE_EXPR, type,
8328 			    fold_convert_loc (loc, type,
8329 					      TREE_OPERAND (arg0, 0)));
8330       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8331       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8332 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8333 			       	     fold_convert_loc (loc, type,
8334 						       TREE_OPERAND (arg0, 0)))))
8335 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8336 			    fold_convert_loc (loc, type,
8337 					      TREE_OPERAND (arg0, 1)));
8338       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8339 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8340 			       	     fold_convert_loc (loc, type,
8341 						       TREE_OPERAND (arg0, 1)))))
8342 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8343 			    fold_convert_loc (loc, type,
8344 					      TREE_OPERAND (arg0, 0)), tem);
8345       /* Perform BIT_NOT_EXPR on each element individually.  */
8346       else if (TREE_CODE (arg0) == VECTOR_CST)
8347 	{
8348 	  tree *elements;
8349 	  tree elem;
8350 	  unsigned count = VECTOR_CST_NELTS (arg0), i;
8351 
8352 	  elements = XALLOCAVEC (tree, count);
8353 	  for (i = 0; i < count; i++)
8354 	    {
8355 	      elem = VECTOR_CST_ELT (arg0, i);
8356 	      elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8357 	      if (elem == NULL_TREE)
8358 		break;
8359 	      elements[i] = elem;
8360 	    }
8361 	  if (i == count)
8362 	    return build_vector (type, elements);
8363 	}
8364       else if (COMPARISON_CLASS_P (arg0)
8365 	       && (VECTOR_TYPE_P (type)
8366 		   || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8367 	{
8368 	  tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8369 	  enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8370 				     HONOR_NANS (TYPE_MODE (op_type)));
8371 	  if (subcode != ERROR_MARK)
8372 	    return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8373 			       TREE_OPERAND (arg0, 1));
8374 	}
8375 
8376 
8377       return NULL_TREE;
8378 
8379     case TRUTH_NOT_EXPR:
8380       /* Note that the operand of this must be an int
8381 	 and its values must be 0 or 1.
8382 	 ("true" is a fixed value perhaps depending on the language,
8383 	 but we don't handle values other than 1 correctly yet.)  */
8384       tem = fold_truth_not_expr (loc, arg0);
8385       if (!tem)
8386 	return NULL_TREE;
8387       return fold_convert_loc (loc, type, tem);
8388 
8389     case REALPART_EXPR:
8390       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8391 	return fold_convert_loc (loc, type, arg0);
8392       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8393 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8394 				 TREE_OPERAND (arg0, 1));
8395       if (TREE_CODE (arg0) == COMPLEX_CST)
8396 	return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8397       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8398 	{
8399 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8400 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8401 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8402 					  TREE_OPERAND (arg0, 0)),
8403 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8404 					  TREE_OPERAND (arg0, 1)));
8405 	  return fold_convert_loc (loc, type, tem);
8406 	}
8407       if (TREE_CODE (arg0) == CONJ_EXPR)
8408 	{
8409 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8410 	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8411 			     TREE_OPERAND (arg0, 0));
8412 	  return fold_convert_loc (loc, type, tem);
8413 	}
8414       if (TREE_CODE (arg0) == CALL_EXPR)
8415 	{
8416 	  tree fn = get_callee_fndecl (arg0);
8417 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8418 	    switch (DECL_FUNCTION_CODE (fn))
8419 	      {
8420 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8421 	        fn = mathfn_built_in (type, BUILT_IN_COS);
8422 		if (fn)
8423 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8424 		break;
8425 
8426 	      default:
8427 		break;
8428 	      }
8429 	}
8430       return NULL_TREE;
8431 
8432     case IMAGPART_EXPR:
8433       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8434 	return build_zero_cst (type);
8435       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8436 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8437 				 TREE_OPERAND (arg0, 0));
8438       if (TREE_CODE (arg0) == COMPLEX_CST)
8439 	return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8440       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8441 	{
8442 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8443 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8444 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8445 					  TREE_OPERAND (arg0, 0)),
8446 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8447 					  TREE_OPERAND (arg0, 1)));
8448 	  return fold_convert_loc (loc, type, tem);
8449 	}
8450       if (TREE_CODE (arg0) == CONJ_EXPR)
8451 	{
8452 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8453 	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8454 	  return fold_convert_loc (loc, type, negate_expr (tem));
8455 	}
8456       if (TREE_CODE (arg0) == CALL_EXPR)
8457 	{
8458 	  tree fn = get_callee_fndecl (arg0);
8459 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8460 	    switch (DECL_FUNCTION_CODE (fn))
8461 	      {
8462 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8463 	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8464 		if (fn)
8465 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8466 		break;
8467 
8468 	      default:
8469 		break;
8470 	      }
8471 	}
8472       return NULL_TREE;
8473 
8474     case INDIRECT_REF:
8475       /* Fold *&X to X if X is an lvalue.  */
8476       if (TREE_CODE (op0) == ADDR_EXPR)
8477 	{
8478 	  tree op00 = TREE_OPERAND (op0, 0);
8479 	  if ((TREE_CODE (op00) == VAR_DECL
8480 	       || TREE_CODE (op00) == PARM_DECL
8481 	       || TREE_CODE (op00) == RESULT_DECL)
8482 	      && !TREE_READONLY (op00))
8483 	    return op00;
8484 	}
8485       return NULL_TREE;
8486 
8487     case VEC_UNPACK_LO_EXPR:
8488     case VEC_UNPACK_HI_EXPR:
8489     case VEC_UNPACK_FLOAT_LO_EXPR:
8490     case VEC_UNPACK_FLOAT_HI_EXPR:
8491       {
8492 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8493 	tree *elts;
8494 	enum tree_code subcode;
8495 
8496 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8497 	if (TREE_CODE (arg0) != VECTOR_CST)
8498 	  return NULL_TREE;
8499 
8500 	elts = XALLOCAVEC (tree, nelts * 2);
8501 	if (!vec_cst_ctor_to_array (arg0, elts))
8502 	  return NULL_TREE;
8503 
8504 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8505 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
8506 	  elts += nelts;
8507 
8508 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8509 	  subcode = NOP_EXPR;
8510 	else
8511 	  subcode = FLOAT_EXPR;
8512 
8513 	for (i = 0; i < nelts; i++)
8514 	  {
8515 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8516 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8517 	      return NULL_TREE;
8518 	  }
8519 
8520 	return build_vector (type, elts);
8521       }
8522 
8523     case REDUC_MIN_EXPR:
8524     case REDUC_MAX_EXPR:
8525     case REDUC_PLUS_EXPR:
8526       {
8527 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8528 	tree *elts;
8529 	enum tree_code subcode;
8530 
8531 	if (TREE_CODE (op0) != VECTOR_CST)
8532 	  return NULL_TREE;
8533 
8534 	elts = XALLOCAVEC (tree, nelts);
8535 	if (!vec_cst_ctor_to_array (op0, elts))
8536 	  return NULL_TREE;
8537 
8538 	switch (code)
8539 	  {
8540 	  case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8541 	  case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8542 	  case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8543 	  default: gcc_unreachable ();
8544 	  }
8545 
8546 	for (i = 1; i < nelts; i++)
8547 	  {
8548 	    elts[0] = const_binop (subcode, elts[0], elts[i]);
8549 	    if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8550 	      return NULL_TREE;
8551 	    elts[i] = build_zero_cst (TREE_TYPE (type));
8552 	  }
8553 
8554 	return build_vector (type, elts);
8555       }
8556 
8557     default:
8558       return NULL_TREE;
8559     } /* switch (code) */
8560 }
8561 
8562 
8563 /* If the operation was a conversion do _not_ mark a resulting constant
8564    with TREE_OVERFLOW if the original constant was not.  These conversions
8565    have implementation defined behavior and retaining the TREE_OVERFLOW
8566    flag here would confuse later passes such as VRP.  */
8567 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)8568 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8569 				tree type, tree op0)
8570 {
8571   tree res = fold_unary_loc (loc, code, type, op0);
8572   if (res
8573       && TREE_CODE (res) == INTEGER_CST
8574       && TREE_CODE (op0) == INTEGER_CST
8575       && CONVERT_EXPR_CODE_P (code))
8576     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8577 
8578   return res;
8579 }
8580 
8581 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8582    operands OP0 and OP1.  LOC is the location of the resulting expression.
8583    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8584    Return the folded expression if folding is successful.  Otherwise,
8585    return NULL_TREE.  */
8586 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)8587 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8588 		  tree arg0, tree arg1, tree op0, tree op1)
8589 {
8590   tree tem;
8591 
8592   /* We only do these simplifications if we are optimizing.  */
8593   if (!optimize)
8594     return NULL_TREE;
8595 
8596   /* Check for things like (A || B) && (A || C).  We can convert this
8597      to A || (B && C).  Note that either operator can be any of the four
8598      truth and/or operations and the transformation will still be
8599      valid.   Also note that we only care about order for the
8600      ANDIF and ORIF operators.  If B contains side effects, this
8601      might change the truth-value of A.  */
8602   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8603       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8604 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8605 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8606 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8607       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8608     {
8609       tree a00 = TREE_OPERAND (arg0, 0);
8610       tree a01 = TREE_OPERAND (arg0, 1);
8611       tree a10 = TREE_OPERAND (arg1, 0);
8612       tree a11 = TREE_OPERAND (arg1, 1);
8613       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8614 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8615 			 && (code == TRUTH_AND_EXPR
8616 			     || code == TRUTH_OR_EXPR));
8617 
8618       if (operand_equal_p (a00, a10, 0))
8619 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8620 			    fold_build2_loc (loc, code, type, a01, a11));
8621       else if (commutative && operand_equal_p (a00, a11, 0))
8622 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8623 			    fold_build2_loc (loc, code, type, a01, a10));
8624       else if (commutative && operand_equal_p (a01, a10, 0))
8625 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8626 			    fold_build2_loc (loc, code, type, a00, a11));
8627 
8628       /* This case if tricky because we must either have commutative
8629 	 operators or else A10 must not have side-effects.  */
8630 
8631       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8632 	       && operand_equal_p (a01, a11, 0))
8633 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8634 			    fold_build2_loc (loc, code, type, a00, a10),
8635 			    a01);
8636     }
8637 
8638   /* See if we can build a range comparison.  */
8639   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8640     return tem;
8641 
8642   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8643       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8644     {
8645       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8646       if (tem)
8647 	return fold_build2_loc (loc, code, type, tem, arg1);
8648     }
8649 
8650   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8651       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8652     {
8653       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8654       if (tem)
8655 	return fold_build2_loc (loc, code, type, arg0, tem);
8656     }
8657 
8658   /* Check for the possibility of merging component references.  If our
8659      lhs is another similar operation, try to merge its rhs with our
8660      rhs.  Then try to merge our lhs and rhs.  */
8661   if (TREE_CODE (arg0) == code
8662       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8663 					 TREE_OPERAND (arg0, 1), arg1)))
8664     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8665 
8666   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8667     return tem;
8668 
8669   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8670       && (code == TRUTH_AND_EXPR
8671           || code == TRUTH_ANDIF_EXPR
8672           || code == TRUTH_OR_EXPR
8673           || code == TRUTH_ORIF_EXPR))
8674     {
8675       enum tree_code ncode, icode;
8676 
8677       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8678 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8679       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8680 
8681       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8682 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8683 	 We don't want to pack more than two leafs to a non-IF AND/OR
8684 	 expression.
8685 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8686 	 equal to IF-CODE, then we don't want to add right-hand operand.
8687 	 If the inner right-hand side of left-hand operand has
8688 	 side-effects, or isn't simple, then we can't add to it,
8689 	 as otherwise we might destroy if-sequence.  */
8690       if (TREE_CODE (arg0) == icode
8691 	  && simple_operand_p_2 (arg1)
8692 	  /* Needed for sequence points to handle trappings, and
8693 	     side-effects.  */
8694 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8695 	{
8696 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8697 				 arg1);
8698 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8699 				  tem);
8700 	}
8701 	/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8702 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8703       else if (TREE_CODE (arg1) == icode
8704 	  && simple_operand_p_2 (arg0)
8705 	  /* Needed for sequence points to handle trappings, and
8706 	     side-effects.  */
8707 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8708 	{
8709 	  tem = fold_build2_loc (loc, ncode, type,
8710 				 arg0, TREE_OPERAND (arg1, 0));
8711 	  return fold_build2_loc (loc, icode, type, tem,
8712 				  TREE_OPERAND (arg1, 1));
8713 	}
8714       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8715 	 into (A OR B).
8716 	 For sequence point consistancy, we need to check for trapping,
8717 	 and side-effects.  */
8718       else if (code == icode && simple_operand_p_2 (arg0)
8719                && simple_operand_p_2 (arg1))
8720 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8721     }
8722 
8723   return NULL_TREE;
8724 }
8725 
8726 /* Fold a binary expression of code CODE and type TYPE with operands
8727    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8728    Return the folded expression if folding is successful.  Otherwise,
8729    return NULL_TREE.  */
8730 
8731 static tree
fold_minmax(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8732 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8733 {
8734   enum tree_code compl_code;
8735 
8736   if (code == MIN_EXPR)
8737     compl_code = MAX_EXPR;
8738   else if (code == MAX_EXPR)
8739     compl_code = MIN_EXPR;
8740   else
8741     gcc_unreachable ();
8742 
8743   /* MIN (MAX (a, b), b) == b.  */
8744   if (TREE_CODE (op0) == compl_code
8745       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8746     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8747 
8748   /* MIN (MAX (b, a), b) == b.  */
8749   if (TREE_CODE (op0) == compl_code
8750       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8751       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8752     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8753 
8754   /* MIN (a, MAX (a, b)) == a.  */
8755   if (TREE_CODE (op1) == compl_code
8756       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8757       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8758     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8759 
8760   /* MIN (a, MAX (b, a)) == a.  */
8761   if (TREE_CODE (op1) == compl_code
8762       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8763       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8764     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8765 
8766   return NULL_TREE;
8767 }
8768 
8769 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8770    by changing CODE to reduce the magnitude of constants involved in
8771    ARG0 of the comparison.
8772    Returns a canonicalized comparison tree if a simplification was
8773    possible, otherwise returns NULL_TREE.
8774    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8775    valid if signed overflow is undefined.  */
8776 
8777 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)8778 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8779 				 tree arg0, tree arg1,
8780 				 bool *strict_overflow_p)
8781 {
8782   enum tree_code code0 = TREE_CODE (arg0);
8783   tree t, cst0 = NULL_TREE;
8784   int sgn0;
8785   bool swap = false;
8786 
8787   /* Match A +- CST code arg1 and CST code arg1.  We can change the
8788      first form only if overflow is undefined.  */
8789   if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8790 	 /* In principle pointers also have undefined overflow behavior,
8791 	    but that causes problems elsewhere.  */
8792 	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8793 	 && (code0 == MINUS_EXPR
8794 	     || code0 == PLUS_EXPR)
8795          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8796 	|| code0 == INTEGER_CST))
8797     return NULL_TREE;
8798 
8799   /* Identify the constant in arg0 and its sign.  */
8800   if (code0 == INTEGER_CST)
8801     cst0 = arg0;
8802   else
8803     cst0 = TREE_OPERAND (arg0, 1);
8804   sgn0 = tree_int_cst_sgn (cst0);
8805 
8806   /* Overflowed constants and zero will cause problems.  */
8807   if (integer_zerop (cst0)
8808       || TREE_OVERFLOW (cst0))
8809     return NULL_TREE;
8810 
8811   /* See if we can reduce the magnitude of the constant in
8812      arg0 by changing the comparison code.  */
8813   if (code0 == INTEGER_CST)
8814     {
8815       /* CST <= arg1  ->  CST-1 < arg1.  */
8816       if (code == LE_EXPR && sgn0 == 1)
8817 	code = LT_EXPR;
8818       /* -CST < arg1  ->  -CST-1 <= arg1.  */
8819       else if (code == LT_EXPR && sgn0 == -1)
8820 	code = LE_EXPR;
8821       /* CST > arg1  ->  CST-1 >= arg1.  */
8822       else if (code == GT_EXPR && sgn0 == 1)
8823 	code = GE_EXPR;
8824       /* -CST >= arg1  ->  -CST-1 > arg1.  */
8825       else if (code == GE_EXPR && sgn0 == -1)
8826 	code = GT_EXPR;
8827       else
8828         return NULL_TREE;
8829       /* arg1 code' CST' might be more canonical.  */
8830       swap = true;
8831     }
8832   else
8833     {
8834       /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8835       if (code == LT_EXPR
8836 	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8837 	code = LE_EXPR;
8838       /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8839       else if (code == GT_EXPR
8840 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8841 	code = GE_EXPR;
8842       /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8843       else if (code == LE_EXPR
8844 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8845 	code = LT_EXPR;
8846       /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8847       else if (code == GE_EXPR
8848 	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8849 	code = GT_EXPR;
8850       else
8851 	return NULL_TREE;
8852       *strict_overflow_p = true;
8853     }
8854 
8855   /* Now build the constant reduced in magnitude.  But not if that
8856      would produce one outside of its types range.  */
8857   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8858       && ((sgn0 == 1
8859 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8860 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8861 	  || (sgn0 == -1
8862 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8863 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8864     /* We cannot swap the comparison here as that would cause us to
8865        endlessly recurse.  */
8866     return NULL_TREE;
8867 
8868   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8869 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8870   if (code0 != INTEGER_CST)
8871     t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8872   t = fold_convert (TREE_TYPE (arg1), t);
8873 
8874   /* If swapping might yield to a more canonical form, do so.  */
8875   if (swap)
8876     return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8877   else
8878     return fold_build2_loc (loc, code, type, t, arg1);
8879 }
8880 
8881 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8882    overflow further.  Try to decrease the magnitude of constants involved
8883    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8884    and put sole constants at the second argument position.
8885    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8886 
8887 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)8888 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8889 			       tree arg0, tree arg1)
8890 {
8891   tree t;
8892   bool strict_overflow_p;
8893   const char * const warnmsg = G_("assuming signed overflow does not occur "
8894 				  "when reducing constant in comparison");
8895 
8896   /* Try canonicalization by simplifying arg0.  */
8897   strict_overflow_p = false;
8898   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8899 				       &strict_overflow_p);
8900   if (t)
8901     {
8902       if (strict_overflow_p)
8903 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8904       return t;
8905     }
8906 
8907   /* Try canonicalization by simplifying arg1 using the swapped
8908      comparison.  */
8909   code = swap_tree_comparison (code);
8910   strict_overflow_p = false;
8911   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8912 				       &strict_overflow_p);
8913   if (t && strict_overflow_p)
8914     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8915   return t;
8916 }
8917 
8918 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8919    space.  This is used to avoid issuing overflow warnings for
8920    expressions like &p->x which can not wrap.  */
8921 
8922 static bool
pointer_may_wrap_p(tree base,tree offset,HOST_WIDE_INT bitpos)8923 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8924 {
8925   double_int di_offset, total;
8926 
8927   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8928     return true;
8929 
8930   if (bitpos < 0)
8931     return true;
8932 
8933   if (offset == NULL_TREE)
8934     di_offset = double_int_zero;
8935   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8936     return true;
8937   else
8938     di_offset = TREE_INT_CST (offset);
8939 
8940   bool overflow;
8941   double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8942   total = di_offset.add_with_sign (units, true, &overflow);
8943   if (overflow)
8944     return true;
8945 
8946   if (total.high != 0)
8947     return true;
8948 
8949   HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8950   if (size <= 0)
8951     return true;
8952 
8953   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8954      array.  */
8955   if (TREE_CODE (base) == ADDR_EXPR)
8956     {
8957       HOST_WIDE_INT base_size;
8958 
8959       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8960       if (base_size > 0 && size < base_size)
8961 	size = base_size;
8962     }
8963 
8964   return total.low > (unsigned HOST_WIDE_INT) size;
8965 }
8966 
8967 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8968    kind INTEGER_CST.  This makes sure to properly sign-extend the
8969    constant.  */
8970 
8971 static HOST_WIDE_INT
size_low_cst(const_tree t)8972 size_low_cst (const_tree t)
8973 {
8974   double_int d = tree_to_double_int (t);
8975   return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8976 }
8977 
8978 /* Subroutine of fold_binary.  This routine performs all of the
8979    transformations that are common to the equality/inequality
8980    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8981    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8982    fold_binary should call fold_binary.  Fold a comparison with
8983    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8984    the folded comparison or NULL_TREE.  */
8985 
8986 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8987 fold_comparison (location_t loc, enum tree_code code, tree type,
8988 		 tree op0, tree op1)
8989 {
8990   tree arg0, arg1, tem;
8991 
8992   arg0 = op0;
8993   arg1 = op1;
8994 
8995   STRIP_SIGN_NOPS (arg0);
8996   STRIP_SIGN_NOPS (arg1);
8997 
8998   tem = fold_relational_const (code, type, arg0, arg1);
8999   if (tem != NULL_TREE)
9000     return tem;
9001 
9002   /* If one arg is a real or integer constant, put it last.  */
9003   if (tree_swap_operands_p (arg0, arg1, true))
9004     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9005 
9006   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9007   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9008       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9009 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9010 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9011       && (TREE_CODE (arg1) == INTEGER_CST
9012 	  && !TREE_OVERFLOW (arg1)))
9013     {
9014       tree const1 = TREE_OPERAND (arg0, 1);
9015       tree const2 = arg1;
9016       tree variable = TREE_OPERAND (arg0, 0);
9017       tree lhs;
9018       int lhs_add;
9019       lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9020 
9021       lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9022 			 TREE_TYPE (arg1), const2, const1);
9023 
9024       /* If the constant operation overflowed this can be
9025 	 simplified as a comparison against INT_MAX/INT_MIN.  */
9026       if (TREE_CODE (lhs) == INTEGER_CST
9027 	  && TREE_OVERFLOW (lhs))
9028 	{
9029 	  int const1_sgn = tree_int_cst_sgn (const1);
9030 	  enum tree_code code2 = code;
9031 
9032 	  /* Get the sign of the constant on the lhs if the
9033 	     operation were VARIABLE + CONST1.  */
9034 	  if (TREE_CODE (arg0) == MINUS_EXPR)
9035 	    const1_sgn = -const1_sgn;
9036 
9037 	  /* The sign of the constant determines if we overflowed
9038 	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9039 	     Canonicalize to the INT_MIN overflow by swapping the comparison
9040 	     if necessary.  */
9041 	  if (const1_sgn == -1)
9042 	    code2 = swap_tree_comparison (code);
9043 
9044 	  /* We now can look at the canonicalized case
9045 	       VARIABLE + 1  CODE2  INT_MIN
9046 	     and decide on the result.  */
9047 	  if (code2 == LT_EXPR
9048 	      || code2 == LE_EXPR
9049 	      || code2 == EQ_EXPR)
9050 	    return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9051 	  else if (code2 == NE_EXPR
9052 		   || code2 == GE_EXPR
9053 		   || code2 == GT_EXPR)
9054 	    return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9055 	}
9056 
9057       if (TREE_CODE (lhs) == TREE_CODE (arg1)
9058 	  && (TREE_CODE (lhs) != INTEGER_CST
9059 	      || !TREE_OVERFLOW (lhs)))
9060 	{
9061 	  if (code != EQ_EXPR && code != NE_EXPR)
9062 	    fold_overflow_warning ("assuming signed overflow does not occur "
9063 				   "when changing X +- C1 cmp C2 to "
9064 				   "X cmp C1 +- C2",
9065 				   WARN_STRICT_OVERFLOW_COMPARISON);
9066 	  return fold_build2_loc (loc, code, type, variable, lhs);
9067 	}
9068     }
9069 
9070   /* For comparisons of pointers we can decompose it to a compile time
9071      comparison of the base objects and the offsets into the object.
9072      This requires at least one operand being an ADDR_EXPR or a
9073      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9074   if (POINTER_TYPE_P (TREE_TYPE (arg0))
9075       && (TREE_CODE (arg0) == ADDR_EXPR
9076 	  || TREE_CODE (arg1) == ADDR_EXPR
9077 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9078 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9079     {
9080       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9081       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9082       enum machine_mode mode;
9083       int volatilep, unsignedp;
9084       bool indirect_base0 = false, indirect_base1 = false;
9085 
9086       /* Get base and offset for the access.  Strip ADDR_EXPR for
9087 	 get_inner_reference, but put it back by stripping INDIRECT_REF
9088 	 off the base object if possible.  indirect_baseN will be true
9089 	 if baseN is not an address but refers to the object itself.  */
9090       base0 = arg0;
9091       if (TREE_CODE (arg0) == ADDR_EXPR)
9092 	{
9093 	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9094 				       &bitsize, &bitpos0, &offset0, &mode,
9095 				       &unsignedp, &volatilep, false);
9096 	  if (TREE_CODE (base0) == INDIRECT_REF)
9097 	    base0 = TREE_OPERAND (base0, 0);
9098 	  else
9099 	    indirect_base0 = true;
9100 	}
9101       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9102 	{
9103 	  base0 = TREE_OPERAND (arg0, 0);
9104 	  STRIP_SIGN_NOPS (base0);
9105 	  if (TREE_CODE (base0) == ADDR_EXPR)
9106 	    {
9107 	      base0 = TREE_OPERAND (base0, 0);
9108 	      indirect_base0 = true;
9109 	    }
9110 	  offset0 = TREE_OPERAND (arg0, 1);
9111 	  if (tree_fits_shwi_p (offset0))
9112 	    {
9113 	      HOST_WIDE_INT off = size_low_cst (offset0);
9114 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9115 				   * BITS_PER_UNIT)
9116 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9117 		{
9118 		  bitpos0 = off * BITS_PER_UNIT;
9119 		  offset0 = NULL_TREE;
9120 		}
9121 	    }
9122 	}
9123 
9124       base1 = arg1;
9125       if (TREE_CODE (arg1) == ADDR_EXPR)
9126 	{
9127 	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9128 				       &bitsize, &bitpos1, &offset1, &mode,
9129 				       &unsignedp, &volatilep, false);
9130 	  if (TREE_CODE (base1) == INDIRECT_REF)
9131 	    base1 = TREE_OPERAND (base1, 0);
9132 	  else
9133 	    indirect_base1 = true;
9134 	}
9135       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9136 	{
9137 	  base1 = TREE_OPERAND (arg1, 0);
9138 	  STRIP_SIGN_NOPS (base1);
9139 	  if (TREE_CODE (base1) == ADDR_EXPR)
9140 	    {
9141 	      base1 = TREE_OPERAND (base1, 0);
9142 	      indirect_base1 = true;
9143 	    }
9144 	  offset1 = TREE_OPERAND (arg1, 1);
9145 	  if (tree_fits_shwi_p (offset1))
9146 	    {
9147 	      HOST_WIDE_INT off = size_low_cst (offset1);
9148 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9149 				   * BITS_PER_UNIT)
9150 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9151 		{
9152 		  bitpos1 = off * BITS_PER_UNIT;
9153 		  offset1 = NULL_TREE;
9154 		}
9155 	    }
9156 	}
9157 
9158       /* A local variable can never be pointed to by
9159          the default SSA name of an incoming parameter.  */
9160       if ((TREE_CODE (arg0) == ADDR_EXPR
9161            && indirect_base0
9162            && TREE_CODE (base0) == VAR_DECL
9163            && auto_var_in_fn_p (base0, current_function_decl)
9164            && !indirect_base1
9165            && TREE_CODE (base1) == SSA_NAME
9166            && SSA_NAME_IS_DEFAULT_DEF (base1)
9167 	   && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9168           || (TREE_CODE (arg1) == ADDR_EXPR
9169               && indirect_base1
9170               && TREE_CODE (base1) == VAR_DECL
9171               && auto_var_in_fn_p (base1, current_function_decl)
9172               && !indirect_base0
9173               && TREE_CODE (base0) == SSA_NAME
9174               && SSA_NAME_IS_DEFAULT_DEF (base0)
9175 	      && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9176         {
9177           if (code == NE_EXPR)
9178             return constant_boolean_node (1, type);
9179           else if (code == EQ_EXPR)
9180             return constant_boolean_node (0, type);
9181         }
9182       /* If we have equivalent bases we might be able to simplify.  */
9183       else if (indirect_base0 == indirect_base1
9184                && operand_equal_p (base0, base1, 0))
9185 	{
9186 	  /* We can fold this expression to a constant if the non-constant
9187 	     offset parts are equal.  */
9188 	  if ((offset0 == offset1
9189 	       || (offset0 && offset1
9190 		   && operand_equal_p (offset0, offset1, 0)))
9191 	      && (code == EQ_EXPR
9192 		  || code == NE_EXPR
9193 		  || (indirect_base0 && DECL_P (base0))
9194 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9195 
9196 	    {
9197 	      if (code != EQ_EXPR
9198 		  && code != NE_EXPR
9199 		  && bitpos0 != bitpos1
9200 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9201 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9202 		fold_overflow_warning (("assuming pointer wraparound does not "
9203 					"occur when comparing P +- C1 with "
9204 					"P +- C2"),
9205 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9206 
9207 	      switch (code)
9208 		{
9209 		case EQ_EXPR:
9210 		  return constant_boolean_node (bitpos0 == bitpos1, type);
9211 		case NE_EXPR:
9212 		  return constant_boolean_node (bitpos0 != bitpos1, type);
9213 		case LT_EXPR:
9214 		  return constant_boolean_node (bitpos0 < bitpos1, type);
9215 		case LE_EXPR:
9216 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
9217 		case GE_EXPR:
9218 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
9219 		case GT_EXPR:
9220 		  return constant_boolean_node (bitpos0 > bitpos1, type);
9221 		default:;
9222 		}
9223 	    }
9224 	  /* We can simplify the comparison to a comparison of the variable
9225 	     offset parts if the constant offset parts are equal.
9226 	     Be careful to use signed sizetype here because otherwise we
9227 	     mess with array offsets in the wrong way.  This is possible
9228 	     because pointer arithmetic is restricted to retain within an
9229 	     object and overflow on pointer differences is undefined as of
9230 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9231 	  else if (bitpos0 == bitpos1
9232 		   && ((code == EQ_EXPR || code == NE_EXPR)
9233 		       || (indirect_base0 && DECL_P (base0))
9234 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9235 	    {
9236 	      /* By converting to signed sizetype we cover middle-end pointer
9237 	         arithmetic which operates on unsigned pointer types of size
9238 	         type size and ARRAY_REF offsets which are properly sign or
9239 	         zero extended from their type in case it is narrower than
9240 	         sizetype.  */
9241 	      if (offset0 == NULL_TREE)
9242 		offset0 = build_int_cst (ssizetype, 0);
9243 	      else
9244 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9245 	      if (offset1 == NULL_TREE)
9246 		offset1 = build_int_cst (ssizetype, 0);
9247 	      else
9248 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9249 
9250 	      if (code != EQ_EXPR
9251 		  && code != NE_EXPR
9252 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9253 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9254 		fold_overflow_warning (("assuming pointer wraparound does not "
9255 					"occur when comparing P +- C1 with "
9256 					"P +- C2"),
9257 				       WARN_STRICT_OVERFLOW_COMPARISON);
9258 
9259 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9260 	    }
9261 	}
9262       /* For non-equal bases we can simplify if they are addresses
9263 	 of local binding decls or constants.  */
9264       else if (indirect_base0 && indirect_base1
9265 	       /* We know that !operand_equal_p (base0, base1, 0)
9266 		  because the if condition was false.  But make
9267 		  sure two decls are not the same.  */
9268 	       && base0 != base1
9269 	       && TREE_CODE (arg0) == ADDR_EXPR
9270 	       && TREE_CODE (arg1) == ADDR_EXPR
9271 	       && (((TREE_CODE (base0) == VAR_DECL
9272 		     || TREE_CODE (base0) == PARM_DECL)
9273 		    && (targetm.binds_local_p (base0)
9274 			|| CONSTANT_CLASS_P (base1)))
9275 		   || CONSTANT_CLASS_P (base0))
9276 	       && (((TREE_CODE (base1) == VAR_DECL
9277 		     || TREE_CODE (base1) == PARM_DECL)
9278 		    && (targetm.binds_local_p (base1)
9279 			|| CONSTANT_CLASS_P (base0)))
9280 		   || CONSTANT_CLASS_P (base1)))
9281 	{
9282 	  if (code == EQ_EXPR)
9283 	    return omit_two_operands_loc (loc, type, boolean_false_node,
9284 				      arg0, arg1);
9285 	  else if (code == NE_EXPR)
9286 	    return omit_two_operands_loc (loc, type, boolean_true_node,
9287 				      arg0, arg1);
9288 	}
9289       /* For equal offsets we can simplify to a comparison of the
9290 	 base addresses.  */
9291       else if (bitpos0 == bitpos1
9292 	       && (indirect_base0
9293 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9294 	       && (indirect_base1
9295 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9296 	       && ((offset0 == offset1)
9297 		   || (offset0 && offset1
9298 		       && operand_equal_p (offset0, offset1, 0))))
9299 	{
9300 	  if (indirect_base0)
9301 	    base0 = build_fold_addr_expr_loc (loc, base0);
9302 	  if (indirect_base1)
9303 	    base1 = build_fold_addr_expr_loc (loc, base1);
9304 	  return fold_build2_loc (loc, code, type, base0, base1);
9305 	}
9306     }
9307 
9308   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9309      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9310      the resulting offset is smaller in absolute value than the
9311      original one and has the same sign.  */
9312   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9313       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9314       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9315 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9316       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9317       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9318 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9319     {
9320       tree const1 = TREE_OPERAND (arg0, 1);
9321       tree const2 = TREE_OPERAND (arg1, 1);
9322       tree variable1 = TREE_OPERAND (arg0, 0);
9323       tree variable2 = TREE_OPERAND (arg1, 0);
9324       tree cst;
9325       const char * const warnmsg = G_("assuming signed overflow does not "
9326 				      "occur when combining constants around "
9327 				      "a comparison");
9328 
9329       /* Put the constant on the side where it doesn't overflow and is
9330 	 of lower absolute value and of same sign than before.  */
9331       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9332 			     ? MINUS_EXPR : PLUS_EXPR,
9333 			     const2, const1);
9334       if (!TREE_OVERFLOW (cst)
9335 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9336 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9337 	{
9338 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9339 	  return fold_build2_loc (loc, code, type,
9340 				  variable1,
9341 				  fold_build2_loc (loc, TREE_CODE (arg1),
9342 						   TREE_TYPE (arg1),
9343 						   variable2, cst));
9344 	}
9345 
9346       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9347 			     ? MINUS_EXPR : PLUS_EXPR,
9348 			     const1, const2);
9349       if (!TREE_OVERFLOW (cst)
9350 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9351 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9352 	{
9353 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9354 	  return fold_build2_loc (loc, code, type,
9355 				  fold_build2_loc (loc, TREE_CODE (arg0),
9356 						   TREE_TYPE (arg0),
9357 						   variable1, cst),
9358 				  variable2);
9359 	}
9360     }
9361 
9362   /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9363      signed arithmetic case.  That form is created by the compiler
9364      often enough for folding it to be of value.  One example is in
9365      computing loop trip counts after Operator Strength Reduction.  */
9366   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9367       && TREE_CODE (arg0) == MULT_EXPR
9368       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9369           && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9370       && integer_zerop (arg1))
9371     {
9372       tree const1 = TREE_OPERAND (arg0, 1);
9373       tree const2 = arg1;                       /* zero */
9374       tree variable1 = TREE_OPERAND (arg0, 0);
9375       enum tree_code cmp_code = code;
9376 
9377       /* Handle unfolded multiplication by zero.  */
9378       if (integer_zerop (const1))
9379 	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9380 
9381       fold_overflow_warning (("assuming signed overflow does not occur when "
9382 			      "eliminating multiplication in comparison "
9383 			      "with zero"),
9384 			     WARN_STRICT_OVERFLOW_COMPARISON);
9385 
9386       /* If const1 is negative we swap the sense of the comparison.  */
9387       if (tree_int_cst_sgn (const1) < 0)
9388         cmp_code = swap_tree_comparison (cmp_code);
9389 
9390       return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9391     }
9392 
9393   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9394   if (tem)
9395     return tem;
9396 
9397   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9398     {
9399       tree targ0 = strip_float_extensions (arg0);
9400       tree targ1 = strip_float_extensions (arg1);
9401       tree newtype = TREE_TYPE (targ0);
9402 
9403       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9404 	newtype = TREE_TYPE (targ1);
9405 
9406       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9407       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9408 	return fold_build2_loc (loc, code, type,
9409 			    fold_convert_loc (loc, newtype, targ0),
9410 			    fold_convert_loc (loc, newtype, targ1));
9411 
9412       /* (-a) CMP (-b) -> b CMP a  */
9413       if (TREE_CODE (arg0) == NEGATE_EXPR
9414 	  && TREE_CODE (arg1) == NEGATE_EXPR)
9415 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9416 			    TREE_OPERAND (arg0, 0));
9417 
9418       if (TREE_CODE (arg1) == REAL_CST)
9419 	{
9420 	  REAL_VALUE_TYPE cst;
9421 	  cst = TREE_REAL_CST (arg1);
9422 
9423 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9424 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9425 	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9426 				TREE_OPERAND (arg0, 0),
9427 				build_real (TREE_TYPE (arg1),
9428 					    real_value_negate (&cst)));
9429 
9430 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9431 	  /* a CMP (-0) -> a CMP 0  */
9432 	  if (REAL_VALUE_MINUS_ZERO (cst))
9433 	    return fold_build2_loc (loc, code, type, arg0,
9434 				build_real (TREE_TYPE (arg1), dconst0));
9435 
9436 	  /* x != NaN is always true, other ops are always false.  */
9437 	  if (REAL_VALUE_ISNAN (cst)
9438 	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9439 	    {
9440 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9441 	      return omit_one_operand_loc (loc, type, tem, arg0);
9442 	    }
9443 
9444 	  /* Fold comparisons against infinity.  */
9445 	  if (REAL_VALUE_ISINF (cst)
9446 	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9447 	    {
9448 	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9449 	      if (tem != NULL_TREE)
9450 		return tem;
9451 	    }
9452 	}
9453 
9454       /* If this is a comparison of a real constant with a PLUS_EXPR
9455 	 or a MINUS_EXPR of a real constant, we can convert it into a
9456 	 comparison with a revised real constant as long as no overflow
9457 	 occurs when unsafe_math_optimizations are enabled.  */
9458       if (flag_unsafe_math_optimizations
9459 	  && TREE_CODE (arg1) == REAL_CST
9460 	  && (TREE_CODE (arg0) == PLUS_EXPR
9461 	      || TREE_CODE (arg0) == MINUS_EXPR)
9462 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9463 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9464 				      ? MINUS_EXPR : PLUS_EXPR,
9465 				      arg1, TREE_OPERAND (arg0, 1)))
9466 	  && !TREE_OVERFLOW (tem))
9467 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9468 
9469       /* Likewise, we can simplify a comparison of a real constant with
9470          a MINUS_EXPR whose first operand is also a real constant, i.e.
9471          (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9472          floating-point types only if -fassociative-math is set.  */
9473       if (flag_associative_math
9474 	  && TREE_CODE (arg1) == REAL_CST
9475 	  && TREE_CODE (arg0) == MINUS_EXPR
9476 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9477 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9478 				      arg1))
9479 	  && !TREE_OVERFLOW (tem))
9480 	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9481 			    TREE_OPERAND (arg0, 1), tem);
9482 
9483       /* Fold comparisons against built-in math functions.  */
9484       if (TREE_CODE (arg1) == REAL_CST
9485 	  && flag_unsafe_math_optimizations
9486 	  && ! flag_errno_math)
9487 	{
9488 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9489 
9490 	  if (fcode != END_BUILTINS)
9491 	    {
9492 	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9493 	      if (tem != NULL_TREE)
9494 		return tem;
9495 	    }
9496 	}
9497     }
9498 
9499   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9500       && CONVERT_EXPR_P (arg0))
9501     {
9502       /* If we are widening one operand of an integer comparison,
9503 	 see if the other operand is similarly being widened.  Perhaps we
9504 	 can do the comparison in the narrower type.  */
9505       tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9506       if (tem)
9507 	return tem;
9508 
9509       /* Or if we are changing signedness.  */
9510       tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9511       if (tem)
9512 	return tem;
9513     }
9514 
9515   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9516      constant, we can simplify it.  */
9517   if (TREE_CODE (arg1) == INTEGER_CST
9518       && (TREE_CODE (arg0) == MIN_EXPR
9519 	  || TREE_CODE (arg0) == MAX_EXPR)
9520       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9521     {
9522       tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9523       if (tem)
9524 	return tem;
9525     }
9526 
9527   /* Simplify comparison of something with itself.  (For IEEE
9528      floating-point, we can only do some of these simplifications.)  */
9529   if (operand_equal_p (arg0, arg1, 0))
9530     {
9531       switch (code)
9532 	{
9533 	case EQ_EXPR:
9534 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9535 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9536 	    return constant_boolean_node (1, type);
9537 	  break;
9538 
9539 	case GE_EXPR:
9540 	case LE_EXPR:
9541 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9542 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9543 	    return constant_boolean_node (1, type);
9544 	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9545 
9546 	case NE_EXPR:
9547 	  /* For NE, we can only do this simplification if integer
9548 	     or we don't honor IEEE floating point NaNs.  */
9549 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9550 	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9551 	    break;
9552 	  /* ... fall through ...  */
9553 	case GT_EXPR:
9554 	case LT_EXPR:
9555 	  return constant_boolean_node (0, type);
9556 	default:
9557 	  gcc_unreachable ();
9558 	}
9559     }
9560 
9561   /* If we are comparing an expression that just has comparisons
9562      of two integer values, arithmetic expressions of those comparisons,
9563      and constants, we can simplify it.  There are only three cases
9564      to check: the two values can either be equal, the first can be
9565      greater, or the second can be greater.  Fold the expression for
9566      those three values.  Since each value must be 0 or 1, we have
9567      eight possibilities, each of which corresponds to the constant 0
9568      or 1 or one of the six possible comparisons.
9569 
9570      This handles common cases like (a > b) == 0 but also handles
9571      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9572      occur in macroized code.  */
9573 
9574   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9575     {
9576       tree cval1 = 0, cval2 = 0;
9577       int save_p = 0;
9578 
9579       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9580 	  /* Don't handle degenerate cases here; they should already
9581 	     have been handled anyway.  */
9582 	  && cval1 != 0 && cval2 != 0
9583 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9584 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9585 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9586 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9587 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9588 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9589 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9590 	{
9591 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9592 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9593 
9594 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9595 	     was the same as ARG1.  */
9596 
9597 	  tree high_result
9598 		= fold_build2_loc (loc, code, type,
9599 			       eval_subst (loc, arg0, cval1, maxval,
9600 					   cval2, minval),
9601 			       arg1);
9602 	  tree equal_result
9603 		= fold_build2_loc (loc, code, type,
9604 			       eval_subst (loc, arg0, cval1, maxval,
9605 					   cval2, maxval),
9606 			       arg1);
9607 	  tree low_result
9608 		= fold_build2_loc (loc, code, type,
9609 			       eval_subst (loc, arg0, cval1, minval,
9610 					   cval2, maxval),
9611 			       arg1);
9612 
9613 	  /* All three of these results should be 0 or 1.  Confirm they are.
9614 	     Then use those values to select the proper code to use.  */
9615 
9616 	  if (TREE_CODE (high_result) == INTEGER_CST
9617 	      && TREE_CODE (equal_result) == INTEGER_CST
9618 	      && TREE_CODE (low_result) == INTEGER_CST)
9619 	    {
9620 	      /* Make a 3-bit mask with the high-order bit being the
9621 		 value for `>', the next for '=', and the low for '<'.  */
9622 	      switch ((integer_onep (high_result) * 4)
9623 		      + (integer_onep (equal_result) * 2)
9624 		      + integer_onep (low_result))
9625 		{
9626 		case 0:
9627 		  /* Always false.  */
9628 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9629 		case 1:
9630 		  code = LT_EXPR;
9631 		  break;
9632 		case 2:
9633 		  code = EQ_EXPR;
9634 		  break;
9635 		case 3:
9636 		  code = LE_EXPR;
9637 		  break;
9638 		case 4:
9639 		  code = GT_EXPR;
9640 		  break;
9641 		case 5:
9642 		  code = NE_EXPR;
9643 		  break;
9644 		case 6:
9645 		  code = GE_EXPR;
9646 		  break;
9647 		case 7:
9648 		  /* Always true.  */
9649 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9650 		}
9651 
9652 	      if (save_p)
9653 		{
9654 		  tem = save_expr (build2 (code, type, cval1, cval2));
9655 		  SET_EXPR_LOCATION (tem, loc);
9656 		  return tem;
9657 		}
9658 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9659 	    }
9660 	}
9661     }
9662 
9663   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9664      into a single range test.  */
9665   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9666        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9667       && TREE_CODE (arg1) == INTEGER_CST
9668       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9669       && !integer_zerop (TREE_OPERAND (arg0, 1))
9670       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9671       && !TREE_OVERFLOW (arg1))
9672     {
9673       tem = fold_div_compare (loc, code, type, arg0, arg1);
9674       if (tem != NULL_TREE)
9675 	return tem;
9676     }
9677 
9678   /* Fold ~X op ~Y as Y op X.  */
9679   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9680       && TREE_CODE (arg1) == BIT_NOT_EXPR)
9681     {
9682       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9683       return fold_build2_loc (loc, code, type,
9684 			  fold_convert_loc (loc, cmp_type,
9685 					    TREE_OPERAND (arg1, 0)),
9686 			  TREE_OPERAND (arg0, 0));
9687     }
9688 
9689   /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9690   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9691       && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9692     {
9693       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9694       return fold_build2_loc (loc, swap_tree_comparison (code), type,
9695 			  TREE_OPERAND (arg0, 0),
9696 			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9697 				       fold_convert_loc (loc, cmp_type, arg1)));
9698     }
9699 
9700   return NULL_TREE;
9701 }
9702 
9703 
9704 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9705    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9706    argument EXPR represents the expression "z" of type TYPE.  */
9707 
9708 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)9709 fold_mult_zconjz (location_t loc, tree type, tree expr)
9710 {
9711   tree itype = TREE_TYPE (type);
9712   tree rpart, ipart, tem;
9713 
9714   if (TREE_CODE (expr) == COMPLEX_EXPR)
9715     {
9716       rpart = TREE_OPERAND (expr, 0);
9717       ipart = TREE_OPERAND (expr, 1);
9718     }
9719   else if (TREE_CODE (expr) == COMPLEX_CST)
9720     {
9721       rpart = TREE_REALPART (expr);
9722       ipart = TREE_IMAGPART (expr);
9723     }
9724   else
9725     {
9726       expr = save_expr (expr);
9727       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9728       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9729     }
9730 
9731   rpart = save_expr (rpart);
9732   ipart = save_expr (ipart);
9733   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9734 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9735 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9736   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9737 			  build_zero_cst (itype));
9738 }
9739 
9740 
9741 /* Subroutine of fold_binary.  If P is the value of EXPR, computes
9742    power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9743    guarantees that P and N have the same least significant log2(M) bits.
9744    N is not otherwise constrained.  In particular, N is not normalized to
9745    0 <= N < M as is common.  In general, the precise value of P is unknown.
9746    M is chosen as large as possible such that constant N can be determined.
9747 
9748    Returns M and sets *RESIDUE to N.
9749 
9750    If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9751    account.  This is not always possible due to PR 35705.
9752  */
9753 
9754 static unsigned HOST_WIDE_INT
get_pointer_modulus_and_residue(tree expr,unsigned HOST_WIDE_INT * residue,bool allow_func_align)9755 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9756 				 bool allow_func_align)
9757 {
9758   enum tree_code code;
9759 
9760   *residue = 0;
9761 
9762   code = TREE_CODE (expr);
9763   if (code == ADDR_EXPR)
9764     {
9765       unsigned int bitalign;
9766       get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9767       *residue /= BITS_PER_UNIT;
9768       return bitalign / BITS_PER_UNIT;
9769     }
9770   else if (code == POINTER_PLUS_EXPR)
9771     {
9772       tree op0, op1;
9773       unsigned HOST_WIDE_INT modulus;
9774       enum tree_code inner_code;
9775 
9776       op0 = TREE_OPERAND (expr, 0);
9777       STRIP_NOPS (op0);
9778       modulus = get_pointer_modulus_and_residue (op0, residue,
9779 						 allow_func_align);
9780 
9781       op1 = TREE_OPERAND (expr, 1);
9782       STRIP_NOPS (op1);
9783       inner_code = TREE_CODE (op1);
9784       if (inner_code == INTEGER_CST)
9785 	{
9786 	  *residue += TREE_INT_CST_LOW (op1);
9787 	  return modulus;
9788 	}
9789       else if (inner_code == MULT_EXPR)
9790 	{
9791 	  op1 = TREE_OPERAND (op1, 1);
9792 	  if (TREE_CODE (op1) == INTEGER_CST)
9793 	    {
9794 	      unsigned HOST_WIDE_INT align;
9795 
9796 	      /* Compute the greatest power-of-2 divisor of op1.  */
9797 	      align = TREE_INT_CST_LOW (op1);
9798 	      align &= -align;
9799 
9800 	      /* If align is non-zero and less than *modulus, replace
9801 		 *modulus with align., If align is 0, then either op1 is 0
9802 		 or the greatest power-of-2 divisor of op1 doesn't fit in an
9803 		 unsigned HOST_WIDE_INT.  In either case, no additional
9804 		 constraint is imposed.  */
9805 	      if (align)
9806 		modulus = MIN (modulus, align);
9807 
9808 	      return modulus;
9809 	    }
9810 	}
9811     }
9812 
9813   /* If we get here, we were unable to determine anything useful about the
9814      expression.  */
9815   return 1;
9816 }
9817 
9818 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9819    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
9820 
9821 static bool
vec_cst_ctor_to_array(tree arg,tree * elts)9822 vec_cst_ctor_to_array (tree arg, tree *elts)
9823 {
9824   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9825 
9826   if (TREE_CODE (arg) == VECTOR_CST)
9827     {
9828       for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9829 	elts[i] = VECTOR_CST_ELT (arg, i);
9830     }
9831   else if (TREE_CODE (arg) == CONSTRUCTOR)
9832     {
9833       constructor_elt *elt;
9834 
9835       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9836 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9837 	  return false;
9838 	else
9839 	  elts[i] = elt->value;
9840     }
9841   else
9842     return false;
9843   for (; i < nelts; i++)
9844     elts[i]
9845       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9846   return true;
9847 }
9848 
9849 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9850    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9851    NULL_TREE otherwise.  */
9852 
9853 static tree
fold_vec_perm(tree type,tree arg0,tree arg1,const unsigned char * sel)9854 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9855 {
9856   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9857   tree *elts;
9858   bool need_ctor = false;
9859 
9860   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9861 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9862   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9863       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9864     return NULL_TREE;
9865 
9866   elts = XALLOCAVEC (tree, nelts * 3);
9867   if (!vec_cst_ctor_to_array (arg0, elts)
9868       || !vec_cst_ctor_to_array (arg1, elts + nelts))
9869     return NULL_TREE;
9870 
9871   for (i = 0; i < nelts; i++)
9872     {
9873       if (!CONSTANT_CLASS_P (elts[sel[i]]))
9874 	need_ctor = true;
9875       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9876     }
9877 
9878   if (need_ctor)
9879     {
9880       vec<constructor_elt, va_gc> *v;
9881       vec_alloc (v, nelts);
9882       for (i = 0; i < nelts; i++)
9883 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9884       return build_constructor (type, v);
9885     }
9886   else
9887     return build_vector (type, &elts[2 * nelts]);
9888 }
9889 
9890 /* Try to fold a pointer difference of type TYPE two address expressions of
9891    array references AREF0 and AREF1 using location LOC.  Return a
9892    simplified expression for the difference or NULL_TREE.  */
9893 
9894 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1)9895 fold_addr_of_array_ref_difference (location_t loc, tree type,
9896 				   tree aref0, tree aref1)
9897 {
9898   tree base0 = TREE_OPERAND (aref0, 0);
9899   tree base1 = TREE_OPERAND (aref1, 0);
9900   tree base_offset = build_int_cst (type, 0);
9901 
9902   /* If the bases are array references as well, recurse.  If the bases
9903      are pointer indirections compute the difference of the pointers.
9904      If the bases are equal, we are set.  */
9905   if ((TREE_CODE (base0) == ARRAY_REF
9906        && TREE_CODE (base1) == ARRAY_REF
9907        && (base_offset
9908 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9909       || (INDIRECT_REF_P (base0)
9910 	  && INDIRECT_REF_P (base1)
9911 	  && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9912 					     TREE_OPERAND (base0, 0),
9913 					     TREE_OPERAND (base1, 0))))
9914       || operand_equal_p (base0, base1, 0))
9915     {
9916       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9917       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9918       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9919       tree diff = build2 (MINUS_EXPR, type, op0, op1);
9920       return fold_build2_loc (loc, PLUS_EXPR, type,
9921 			      base_offset,
9922 			      fold_build2_loc (loc, MULT_EXPR, type,
9923 					       diff, esz));
9924     }
9925   return NULL_TREE;
9926 }
9927 
9928 /* If the real or vector real constant CST of type TYPE has an exact
9929    inverse, return it, else return NULL.  */
9930 
9931 static tree
exact_inverse(tree type,tree cst)9932 exact_inverse (tree type, tree cst)
9933 {
9934   REAL_VALUE_TYPE r;
9935   tree unit_type, *elts;
9936   enum machine_mode mode;
9937   unsigned vec_nelts, i;
9938 
9939   switch (TREE_CODE (cst))
9940     {
9941     case REAL_CST:
9942       r = TREE_REAL_CST (cst);
9943 
9944       if (exact_real_inverse (TYPE_MODE (type), &r))
9945 	return build_real (type, r);
9946 
9947       return NULL_TREE;
9948 
9949     case VECTOR_CST:
9950       vec_nelts = VECTOR_CST_NELTS (cst);
9951       elts = XALLOCAVEC (tree, vec_nelts);
9952       unit_type = TREE_TYPE (type);
9953       mode = TYPE_MODE (unit_type);
9954 
9955       for (i = 0; i < vec_nelts; i++)
9956 	{
9957 	  r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9958 	  if (!exact_real_inverse (mode, &r))
9959 	    return NULL_TREE;
9960 	  elts[i] = build_real (unit_type, r);
9961 	}
9962 
9963       return build_vector (type, elts);
9964 
9965     default:
9966       return NULL_TREE;
9967     }
9968 }
9969 
9970 /*  Mask out the tz least significant bits of X of type TYPE where
9971     tz is the number of trailing zeroes in Y.  */
9972 static double_int
mask_with_tz(tree type,double_int x,double_int y)9973 mask_with_tz (tree type, double_int x, double_int y)
9974 {
9975   int tz = y.trailing_zeros ();
9976 
9977   if (tz > 0)
9978     {
9979       double_int mask;
9980 
9981       mask = ~double_int::mask (tz);
9982       mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9983       return mask & x;
9984     }
9985   return x;
9986 }
9987 
9988 /* Return true when T is an address and is known to be nonzero.
9989    For floating point we further ensure that T is not denormal.
9990    Similar logic is present in nonzero_address in rtlanal.h.
9991 
9992    If the return value is based on the assumption that signed overflow
9993    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9994    change *STRICT_OVERFLOW_P.  */
9995 
9996 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)9997 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9998 {
9999   tree type = TREE_TYPE (t);
10000   enum tree_code code;
10001 
10002   /* Doing something useful for floating point would need more work.  */
10003   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10004     return false;
10005 
10006   code = TREE_CODE (t);
10007   switch (TREE_CODE_CLASS (code))
10008     {
10009     case tcc_unary:
10010       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10011 					      strict_overflow_p);
10012     case tcc_binary:
10013     case tcc_comparison:
10014       return tree_binary_nonzero_warnv_p (code, type,
10015 					       TREE_OPERAND (t, 0),
10016 					       TREE_OPERAND (t, 1),
10017 					       strict_overflow_p);
10018     case tcc_constant:
10019     case tcc_declaration:
10020     case tcc_reference:
10021       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10022 
10023     default:
10024       break;
10025     }
10026 
10027   switch (code)
10028     {
10029     case TRUTH_NOT_EXPR:
10030       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10031 					      strict_overflow_p);
10032 
10033     case TRUTH_AND_EXPR:
10034     case TRUTH_OR_EXPR:
10035     case TRUTH_XOR_EXPR:
10036       return tree_binary_nonzero_warnv_p (code, type,
10037 					       TREE_OPERAND (t, 0),
10038 					       TREE_OPERAND (t, 1),
10039 					       strict_overflow_p);
10040 
10041     case COND_EXPR:
10042     case CONSTRUCTOR:
10043     case OBJ_TYPE_REF:
10044     case ASSERT_EXPR:
10045     case ADDR_EXPR:
10046     case WITH_SIZE_EXPR:
10047     case SSA_NAME:
10048       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10049 
10050     case COMPOUND_EXPR:
10051     case MODIFY_EXPR:
10052     case BIND_EXPR:
10053       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10054 					strict_overflow_p);
10055 
10056     case SAVE_EXPR:
10057       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10058 					strict_overflow_p);
10059 
10060     case CALL_EXPR:
10061       {
10062 	tree fndecl = get_callee_fndecl (t);
10063 	if (!fndecl) return false;
10064 	if (flag_delete_null_pointer_checks && !flag_check_new
10065 	    && DECL_IS_OPERATOR_NEW (fndecl)
10066 	    && !TREE_NOTHROW (fndecl))
10067 	  return true;
10068 	if (flag_delete_null_pointer_checks
10069 	    && lookup_attribute ("returns_nonnull",
10070 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10071 	  return true;
10072 	return alloca_call_p (t);
10073       }
10074 
10075     default:
10076       break;
10077     }
10078   return false;
10079 }
10080 
10081 /* Return true when T is an address and is known to be nonzero.
10082    Handle warnings about undefined signed overflow.  */
10083 
10084 static bool
tree_expr_nonzero_p(tree t)10085 tree_expr_nonzero_p (tree t)
10086 {
10087   bool ret, strict_overflow_p;
10088 
10089   strict_overflow_p = false;
10090   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10091   if (strict_overflow_p)
10092     fold_overflow_warning (("assuming signed overflow does not occur when "
10093 			    "determining that expression is always "
10094 			    "non-zero"),
10095 			   WARN_STRICT_OVERFLOW_MISC);
10096   return ret;
10097 }
10098 
10099 /* Fold a binary expression of code CODE and type TYPE with operands
10100    OP0 and OP1.  LOC is the location of the resulting expression.
10101    Return the folded expression if folding is successful.  Otherwise,
10102    return NULL_TREE.  */
10103 
10104 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10105 fold_binary_loc (location_t loc,
10106 	     enum tree_code code, tree type, tree op0, tree op1)
10107 {
10108   enum tree_code_class kind = TREE_CODE_CLASS (code);
10109   tree arg0, arg1, tem;
10110   tree t1 = NULL_TREE;
10111   bool strict_overflow_p;
10112   unsigned int prec;
10113 
10114   gcc_assert (IS_EXPR_CODE_CLASS (kind)
10115 	      && TREE_CODE_LENGTH (code) == 2
10116 	      && op0 != NULL_TREE
10117 	      && op1 != NULL_TREE);
10118 
10119   arg0 = op0;
10120   arg1 = op1;
10121 
10122   /* Strip any conversions that don't change the mode.  This is
10123      safe for every expression, except for a comparison expression
10124      because its signedness is derived from its operands.  So, in
10125      the latter case, only strip conversions that don't change the
10126      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10127      preserved.
10128 
10129      Note that this is done as an internal manipulation within the
10130      constant folder, in order to find the simplest representation
10131      of the arguments so that their form can be studied.  In any
10132      cases, the appropriate type conversions should be put back in
10133      the tree that will get out of the constant folder.  */
10134 
10135   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10136     {
10137       STRIP_SIGN_NOPS (arg0);
10138       STRIP_SIGN_NOPS (arg1);
10139     }
10140   else
10141     {
10142       STRIP_NOPS (arg0);
10143       STRIP_NOPS (arg1);
10144     }
10145 
10146   /* Note that TREE_CONSTANT isn't enough: static var addresses are
10147      constant but we can't do arithmetic on them.  */
10148   if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10149       || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10150       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10151       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10152       || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10153       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10154       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10155     {
10156       if (kind == tcc_binary)
10157 	{
10158 	  /* Make sure type and arg0 have the same saturating flag.  */
10159 	  gcc_assert (TYPE_SATURATING (type)
10160 		      == TYPE_SATURATING (TREE_TYPE (arg0)));
10161 	  tem = const_binop (code, arg0, arg1);
10162 	}
10163       else if (kind == tcc_comparison)
10164 	tem = fold_relational_const (code, type, arg0, arg1);
10165       else
10166 	tem = NULL_TREE;
10167 
10168       if (tem != NULL_TREE)
10169 	{
10170 	  if (TREE_TYPE (tem) != type)
10171 	    tem = fold_convert_loc (loc, type, tem);
10172 	  return tem;
10173 	}
10174     }
10175 
10176   /* If this is a commutative operation, and ARG0 is a constant, move it
10177      to ARG1 to reduce the number of tests below.  */
10178   if (commutative_tree_code (code)
10179       && tree_swap_operands_p (arg0, arg1, true))
10180     return fold_build2_loc (loc, code, type, op1, op0);
10181 
10182   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10183 
10184      First check for cases where an arithmetic operation is applied to a
10185      compound, conditional, or comparison operation.  Push the arithmetic
10186      operation inside the compound or conditional to see if any folding
10187      can then be done.  Convert comparison to conditional for this purpose.
10188      The also optimizes non-constant cases that used to be done in
10189      expand_expr.
10190 
10191      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10192      one of the operands is a comparison and the other is a comparison, a
10193      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10194      code below would make the expression more complex.  Change it to a
10195      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10196      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10197 
10198   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10199        || code == EQ_EXPR || code == NE_EXPR)
10200       && TREE_CODE (type) != VECTOR_TYPE
10201       && ((truth_value_p (TREE_CODE (arg0))
10202 	   && (truth_value_p (TREE_CODE (arg1))
10203 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10204 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10205 	  || (truth_value_p (TREE_CODE (arg1))
10206 	      && (truth_value_p (TREE_CODE (arg0))
10207 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10208 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10209     {
10210       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10211 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10212 			 : TRUTH_XOR_EXPR,
10213 			 boolean_type_node,
10214 			 fold_convert_loc (loc, boolean_type_node, arg0),
10215 			 fold_convert_loc (loc, boolean_type_node, arg1));
10216 
10217       if (code == EQ_EXPR)
10218 	tem = invert_truthvalue_loc (loc, tem);
10219 
10220       return fold_convert_loc (loc, type, tem);
10221     }
10222 
10223   if (TREE_CODE_CLASS (code) == tcc_binary
10224       || TREE_CODE_CLASS (code) == tcc_comparison)
10225     {
10226       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10227 	{
10228 	  tem = fold_build2_loc (loc, code, type,
10229 			     fold_convert_loc (loc, TREE_TYPE (op0),
10230 					       TREE_OPERAND (arg0, 1)), op1);
10231 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10232 			     tem);
10233 	}
10234       if (TREE_CODE (arg1) == COMPOUND_EXPR
10235 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10236 	{
10237 	  tem = fold_build2_loc (loc, code, type, op0,
10238 			     fold_convert_loc (loc, TREE_TYPE (op1),
10239 					       TREE_OPERAND (arg1, 1)));
10240 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10241 			     tem);
10242 	}
10243 
10244       if (TREE_CODE (arg0) == COND_EXPR
10245 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10246 	  || COMPARISON_CLASS_P (arg0))
10247 	{
10248 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10249 						     arg0, arg1,
10250 						     /*cond_first_p=*/1);
10251 	  if (tem != NULL_TREE)
10252 	    return tem;
10253 	}
10254 
10255       if (TREE_CODE (arg1) == COND_EXPR
10256 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10257 	  || COMPARISON_CLASS_P (arg1))
10258 	{
10259 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10260 						     arg1, arg0,
10261 					             /*cond_first_p=*/0);
10262 	  if (tem != NULL_TREE)
10263 	    return tem;
10264 	}
10265     }
10266 
10267   switch (code)
10268     {
10269     case MEM_REF:
10270       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10271       if (TREE_CODE (arg0) == ADDR_EXPR
10272 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10273 	{
10274 	  tree iref = TREE_OPERAND (arg0, 0);
10275 	  return fold_build2 (MEM_REF, type,
10276 			      TREE_OPERAND (iref, 0),
10277 			      int_const_binop (PLUS_EXPR, arg1,
10278 					       TREE_OPERAND (iref, 1)));
10279 	}
10280 
10281       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10282       if (TREE_CODE (arg0) == ADDR_EXPR
10283 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10284 	{
10285 	  tree base;
10286 	  HOST_WIDE_INT coffset;
10287 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10288 						&coffset);
10289 	  if (!base)
10290 	    return NULL_TREE;
10291 	  return fold_build2 (MEM_REF, type,
10292 			      build_fold_addr_expr (base),
10293 			      int_const_binop (PLUS_EXPR, arg1,
10294 					       size_int (coffset)));
10295 	}
10296 
10297       return NULL_TREE;
10298 
10299     case POINTER_PLUS_EXPR:
10300       /* 0 +p index -> (type)index */
10301       if (integer_zerop (arg0))
10302 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10303 
10304       /* PTR +p 0 -> PTR */
10305       if (integer_zerop (arg1))
10306 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10307 
10308       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10309       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10310 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10311         return fold_convert_loc (loc, type,
10312 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10313 					      fold_convert_loc (loc, sizetype,
10314 								arg1),
10315 					      fold_convert_loc (loc, sizetype,
10316 								arg0)));
10317 
10318       /* (PTR +p B) +p A -> PTR +p (B + A) */
10319       if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10320 	{
10321 	  tree inner;
10322 	  tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10323 	  tree arg00 = TREE_OPERAND (arg0, 0);
10324 	  inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10325 			       arg01, fold_convert_loc (loc, sizetype, arg1));
10326 	  return fold_convert_loc (loc, type,
10327 				   fold_build_pointer_plus_loc (loc,
10328 								arg00, inner));
10329 	}
10330 
10331       /* PTR_CST +p CST -> CST1 */
10332       if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10333 	return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10334 			    fold_convert_loc (loc, type, arg1));
10335 
10336      /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10337 	of the array.  Loop optimizer sometimes produce this type of
10338 	expressions.  */
10339       if (TREE_CODE (arg0) == ADDR_EXPR)
10340 	{
10341 	  tem = try_move_mult_to_index (loc, arg0,
10342 					fold_convert_loc (loc,
10343 							  ssizetype, arg1));
10344 	  if (tem)
10345 	    return fold_convert_loc (loc, type, tem);
10346 	}
10347 
10348       return NULL_TREE;
10349 
10350     case PLUS_EXPR:
10351       /* A + (-B) -> A - B */
10352       if (TREE_CODE (arg1) == NEGATE_EXPR
10353 	  && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10354 	return fold_build2_loc (loc, MINUS_EXPR, type,
10355 			    fold_convert_loc (loc, type, arg0),
10356 			    fold_convert_loc (loc, type,
10357 					      TREE_OPERAND (arg1, 0)));
10358       /* (-A) + B -> B - A */
10359       if (TREE_CODE (arg0) == NEGATE_EXPR
10360 	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)
10361 	  && (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
10362 	return fold_build2_loc (loc, MINUS_EXPR, type,
10363 			    fold_convert_loc (loc, type, arg1),
10364 			    fold_convert_loc (loc, type,
10365 					      TREE_OPERAND (arg0, 0)));
10366 
10367       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10368 	{
10369 	  /* Convert ~A + 1 to -A.  */
10370 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10371 	      && integer_onep (arg1))
10372 	    return fold_build1_loc (loc, NEGATE_EXPR, type,
10373 				fold_convert_loc (loc, type,
10374 						  TREE_OPERAND (arg0, 0)));
10375 
10376 	  /* ~X + X is -1.  */
10377 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10378 	      && !TYPE_OVERFLOW_TRAPS (type))
10379 	    {
10380 	      tree tem = TREE_OPERAND (arg0, 0);
10381 
10382 	      STRIP_NOPS (tem);
10383 	      if (operand_equal_p (tem, arg1, 0))
10384 		{
10385 		  t1 = build_all_ones_cst (type);
10386 		  return omit_one_operand_loc (loc, type, t1, arg1);
10387 		}
10388 	    }
10389 
10390 	  /* X + ~X is -1.  */
10391 	  if (TREE_CODE (arg1) == BIT_NOT_EXPR
10392 	      && !TYPE_OVERFLOW_TRAPS (type))
10393 	    {
10394 	      tree tem = TREE_OPERAND (arg1, 0);
10395 
10396 	      STRIP_NOPS (tem);
10397 	      if (operand_equal_p (arg0, tem, 0))
10398 		{
10399 		  t1 = build_all_ones_cst (type);
10400 		  return omit_one_operand_loc (loc, type, t1, arg0);
10401 		}
10402 	    }
10403 
10404 	  /* X + (X / CST) * -CST is X % CST.  */
10405 	  if (TREE_CODE (arg1) == MULT_EXPR
10406 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10407 	      && operand_equal_p (arg0,
10408 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10409 	    {
10410 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10411 	      tree cst1 = TREE_OPERAND (arg1, 1);
10412 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10413 				      cst1, cst0);
10414 	      if (sum && integer_zerop (sum))
10415 		return fold_convert_loc (loc, type,
10416 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10417 						      TREE_TYPE (arg0), arg0,
10418 						      cst0));
10419 	    }
10420 	}
10421 
10422       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10423 	 one.  Make sure the type is not saturating and has the signedness of
10424 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10425 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10426       if ((TREE_CODE (arg0) == MULT_EXPR
10427 	   || TREE_CODE (arg1) == MULT_EXPR)
10428 	  && !TYPE_SATURATING (type)
10429 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10430 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10431 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10432         {
10433 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10434 	  if (tem)
10435 	    return tem;
10436 	}
10437 
10438       if (! FLOAT_TYPE_P (type))
10439 	{
10440 	  if (integer_zerop (arg1))
10441 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10442 
10443 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10444 	     with a constant, and the two constants have no bits in common,
10445 	     we should treat this as a BIT_IOR_EXPR since this may produce more
10446 	     simplifications.  */
10447 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10448 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10449 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10450 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10451 	      && integer_zerop (const_binop (BIT_AND_EXPR,
10452 					     TREE_OPERAND (arg0, 1),
10453 					     TREE_OPERAND (arg1, 1))))
10454 	    {
10455 	      code = BIT_IOR_EXPR;
10456 	      goto bit_ior;
10457 	    }
10458 
10459 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10460 	     (plus (plus (mult) (mult)) (foo)) so that we can
10461 	     take advantage of the factoring cases below.  */
10462 	  if (TYPE_OVERFLOW_WRAPS (type)
10463 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10464 		    || TREE_CODE (arg0) == MINUS_EXPR)
10465 		   && TREE_CODE (arg1) == MULT_EXPR)
10466 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10467 		       || TREE_CODE (arg1) == MINUS_EXPR)
10468 		      && TREE_CODE (arg0) == MULT_EXPR)))
10469 	    {
10470 	      tree parg0, parg1, parg, marg;
10471 	      enum tree_code pcode;
10472 
10473 	      if (TREE_CODE (arg1) == MULT_EXPR)
10474 		parg = arg0, marg = arg1;
10475 	      else
10476 		parg = arg1, marg = arg0;
10477 	      pcode = TREE_CODE (parg);
10478 	      parg0 = TREE_OPERAND (parg, 0);
10479 	      parg1 = TREE_OPERAND (parg, 1);
10480 	      STRIP_NOPS (parg0);
10481 	      STRIP_NOPS (parg1);
10482 
10483 	      if (TREE_CODE (parg0) == MULT_EXPR
10484 		  && TREE_CODE (parg1) != MULT_EXPR)
10485 		return fold_build2_loc (loc, pcode, type,
10486 				    fold_build2_loc (loc, PLUS_EXPR, type,
10487 						 fold_convert_loc (loc, type,
10488 								   parg0),
10489 						 fold_convert_loc (loc, type,
10490 								   marg)),
10491 				    fold_convert_loc (loc, type, parg1));
10492 	      if (TREE_CODE (parg0) != MULT_EXPR
10493 		  && TREE_CODE (parg1) == MULT_EXPR)
10494 		return
10495 		  fold_build2_loc (loc, PLUS_EXPR, type,
10496 			       fold_convert_loc (loc, type, parg0),
10497 			       fold_build2_loc (loc, pcode, type,
10498 					    fold_convert_loc (loc, type, marg),
10499 					    fold_convert_loc (loc, type,
10500 							      parg1)));
10501 	    }
10502 	}
10503       else
10504 	{
10505 	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10506 	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10507 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10508 
10509 	  /* Likewise if the operands are reversed.  */
10510 	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10511 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10512 
10513 	  /* Convert X + -C into X - C.  */
10514 	  if (TREE_CODE (arg1) == REAL_CST
10515 	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10516 	    {
10517 	      tem = fold_negate_const (arg1, type);
10518 	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10519 		return fold_build2_loc (loc, MINUS_EXPR, type,
10520 				    fold_convert_loc (loc, type, arg0),
10521 				    fold_convert_loc (loc, type, tem));
10522 	    }
10523 
10524 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10525 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10526 	     if signed zeros are involved.  */
10527 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10528               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10529 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10530 	    {
10531 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10532 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10533 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10534 	      bool arg0rz = false, arg0iz = false;
10535 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10536 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10537 		{
10538 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10539 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10540 		  if (arg0rz && arg1i && real_zerop (arg1i))
10541 		    {
10542 		      tree rp = arg1r ? arg1r
10543 				  : build1 (REALPART_EXPR, rtype, arg1);
10544 		      tree ip = arg0i ? arg0i
10545 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10546 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10547 		    }
10548 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10549 		    {
10550 		      tree rp = arg0r ? arg0r
10551 				  : build1 (REALPART_EXPR, rtype, arg0);
10552 		      tree ip = arg1i ? arg1i
10553 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10554 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10555 		    }
10556 		}
10557 	    }
10558 
10559 	  if (flag_unsafe_math_optimizations
10560 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10561 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10562 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10563 	    return tem;
10564 
10565 	  /* Convert x+x into x*2.0.  */
10566 	  if (operand_equal_p (arg0, arg1, 0)
10567 	      && SCALAR_FLOAT_TYPE_P (type))
10568 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10569 				build_real (type, dconst2));
10570 
10571           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10572              We associate floats only if the user has specified
10573              -fassociative-math.  */
10574           if (flag_associative_math
10575               && TREE_CODE (arg1) == PLUS_EXPR
10576               && TREE_CODE (arg0) != MULT_EXPR)
10577             {
10578               tree tree10 = TREE_OPERAND (arg1, 0);
10579               tree tree11 = TREE_OPERAND (arg1, 1);
10580               if (TREE_CODE (tree11) == MULT_EXPR
10581 		  && TREE_CODE (tree10) == MULT_EXPR)
10582                 {
10583                   tree tree0;
10584                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10585                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10586                 }
10587             }
10588           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10589              We associate floats only if the user has specified
10590              -fassociative-math.  */
10591           if (flag_associative_math
10592               && TREE_CODE (arg0) == PLUS_EXPR
10593               && TREE_CODE (arg1) != MULT_EXPR)
10594             {
10595               tree tree00 = TREE_OPERAND (arg0, 0);
10596               tree tree01 = TREE_OPERAND (arg0, 1);
10597               if (TREE_CODE (tree01) == MULT_EXPR
10598 		  && TREE_CODE (tree00) == MULT_EXPR)
10599                 {
10600                   tree tree0;
10601                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10602                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10603                 }
10604             }
10605 	}
10606 
10607      bit_rotate:
10608       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10609 	 is a rotate of A by C1 bits.  */
10610       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10611 	 is a rotate of A by B bits.  */
10612       {
10613 	enum tree_code code0, code1;
10614 	tree rtype;
10615 	code0 = TREE_CODE (arg0);
10616 	code1 = TREE_CODE (arg1);
10617 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10618 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10619 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10620 			        TREE_OPERAND (arg1, 0), 0)
10621 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10622 	        TYPE_UNSIGNED (rtype))
10623 	    /* Only create rotates in complete modes.  Other cases are not
10624 	       expanded properly.  */
10625 	    && (element_precision (rtype)
10626 		== element_precision (TYPE_MODE (rtype))))
10627 	  {
10628 	    tree tree01, tree11;
10629 	    enum tree_code code01, code11;
10630 
10631 	    tree01 = TREE_OPERAND (arg0, 1);
10632 	    tree11 = TREE_OPERAND (arg1, 1);
10633 	    STRIP_NOPS (tree01);
10634 	    STRIP_NOPS (tree11);
10635 	    code01 = TREE_CODE (tree01);
10636 	    code11 = TREE_CODE (tree11);
10637 	    if (code01 == INTEGER_CST
10638 		&& code11 == INTEGER_CST
10639 		&& TREE_INT_CST_HIGH (tree01) == 0
10640 		&& TREE_INT_CST_HIGH (tree11) == 0
10641 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10642 		    == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10643 	      {
10644 		tem = build2_loc (loc, LROTATE_EXPR,
10645 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
10646 				  TREE_OPERAND (arg0, 0),
10647 				  code0 == LSHIFT_EXPR ? tree01 : tree11);
10648 		return fold_convert_loc (loc, type, tem);
10649 	      }
10650 	    else if (code11 == MINUS_EXPR)
10651 	      {
10652 		tree tree110, tree111;
10653 		tree110 = TREE_OPERAND (tree11, 0);
10654 		tree111 = TREE_OPERAND (tree11, 1);
10655 		STRIP_NOPS (tree110);
10656 		STRIP_NOPS (tree111);
10657 		if (TREE_CODE (tree110) == INTEGER_CST
10658 		    && 0 == compare_tree_int (tree110,
10659 					      element_precision
10660 					      (TREE_TYPE (TREE_OPERAND
10661 							  (arg0, 0))))
10662 		    && operand_equal_p (tree01, tree111, 0))
10663 		  return
10664 		    fold_convert_loc (loc, type,
10665 				      build2 ((code0 == LSHIFT_EXPR
10666 					       ? LROTATE_EXPR
10667 					       : RROTATE_EXPR),
10668 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10669 					      TREE_OPERAND (arg0, 0), tree01));
10670 	      }
10671 	    else if (code01 == MINUS_EXPR)
10672 	      {
10673 		tree tree010, tree011;
10674 		tree010 = TREE_OPERAND (tree01, 0);
10675 		tree011 = TREE_OPERAND (tree01, 1);
10676 		STRIP_NOPS (tree010);
10677 		STRIP_NOPS (tree011);
10678 		if (TREE_CODE (tree010) == INTEGER_CST
10679 		    && 0 == compare_tree_int (tree010,
10680 					      element_precision
10681 					      (TREE_TYPE (TREE_OPERAND
10682 							  (arg0, 0))))
10683 		    && operand_equal_p (tree11, tree011, 0))
10684 		    return fold_convert_loc
10685 		      (loc, type,
10686 		       build2 ((code0 != LSHIFT_EXPR
10687 				? LROTATE_EXPR
10688 				: RROTATE_EXPR),
10689 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10690 			       TREE_OPERAND (arg0, 0), tree11));
10691 	      }
10692 	  }
10693       }
10694 
10695     associate:
10696       /* In most languages, can't associate operations on floats through
10697 	 parentheses.  Rather than remember where the parentheses were, we
10698 	 don't associate floats at all, unless the user has specified
10699 	 -fassociative-math.
10700 	 And, we need to make sure type is not saturating.  */
10701 
10702       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10703 	  && !TYPE_SATURATING (type))
10704 	{
10705 	  tree var0, con0, lit0, minus_lit0;
10706 	  tree var1, con1, lit1, minus_lit1;
10707 	  tree atype = type;
10708 	  bool ok = true;
10709 
10710 	  /* Split both trees into variables, constants, and literals.  Then
10711 	     associate each group together, the constants with literals,
10712 	     then the result with variables.  This increases the chances of
10713 	     literals being recombined later and of generating relocatable
10714 	     expressions for the sum of a constant and literal.  */
10715 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10716 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10717 			     code == MINUS_EXPR);
10718 
10719 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10720 	  if (code == MINUS_EXPR)
10721 	    code = PLUS_EXPR;
10722 
10723 	  /* With undefined overflow prefer doing association in a type
10724 	     which wraps on overflow, if that is one of the operand types.  */
10725 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10726 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10727 	    {
10728 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10729 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10730 		atype = TREE_TYPE (arg0);
10731 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10732 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10733 		atype = TREE_TYPE (arg1);
10734 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10735 	    }
10736 
10737 	  /* With undefined overflow we can only associate constants with one
10738 	     variable, and constants whose association doesn't overflow.  */
10739 	  if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10740 	      || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10741 	    {
10742 	      if (var0 && var1)
10743 		{
10744 		  tree tmp0 = var0;
10745 		  tree tmp1 = var1;
10746 
10747 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10748 		    tmp0 = TREE_OPERAND (tmp0, 0);
10749 		  if (CONVERT_EXPR_P (tmp0)
10750 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10751 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10752 			  <= TYPE_PRECISION (atype)))
10753 		    tmp0 = TREE_OPERAND (tmp0, 0);
10754 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10755 		    tmp1 = TREE_OPERAND (tmp1, 0);
10756 		  if (CONVERT_EXPR_P (tmp1)
10757 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10758 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10759 			  <= TYPE_PRECISION (atype)))
10760 		    tmp1 = TREE_OPERAND (tmp1, 0);
10761 		  /* The only case we can still associate with two variables
10762 		     is if they are the same, modulo negation and bit-pattern
10763 		     preserving conversions.  */
10764 		  if (!operand_equal_p (tmp0, tmp1, 0))
10765 		    ok = false;
10766 		}
10767 	    }
10768 
10769 	  /* Only do something if we found more than two objects.  Otherwise,
10770 	     nothing has changed and we risk infinite recursion.  */
10771 	  if (ok
10772 	      && (2 < ((var0 != 0) + (var1 != 0)
10773 		       + (con0 != 0) + (con1 != 0)
10774 		       + (lit0 != 0) + (lit1 != 0)
10775 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10776 	    {
10777 	      bool any_overflows = false;
10778 	      if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10779 	      if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10780 	      if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10781 	      if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10782 	      var0 = associate_trees (loc, var0, var1, code, atype);
10783 	      con0 = associate_trees (loc, con0, con1, code, atype);
10784 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
10785 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10786 					    code, atype);
10787 
10788 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10789 		 greater than the positive part.  Otherwise, the multiplicative
10790 		 folding code (i.e extract_muldiv) may be fooled in case
10791 		 unsigned constants are subtracted, like in the following
10792 		 example: ((X*2 + 4) - 8U)/2.  */
10793 	      if (minus_lit0 && lit0)
10794 		{
10795 		  if (TREE_CODE (lit0) == INTEGER_CST
10796 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10797 		      && tree_int_cst_lt (lit0, minus_lit0))
10798 		    {
10799 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10800 						    MINUS_EXPR, atype);
10801 		      lit0 = 0;
10802 		    }
10803 		  else
10804 		    {
10805 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10806 					      MINUS_EXPR, atype);
10807 		      minus_lit0 = 0;
10808 		    }
10809 		}
10810 
10811 	      /* Don't introduce overflows through reassociation.  */
10812 	      if (!any_overflows
10813 		  && ((lit0 && TREE_OVERFLOW (lit0))
10814 		      || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10815 		return NULL_TREE;
10816 
10817 	      if (minus_lit0)
10818 		{
10819 		  if (con0 == 0)
10820 		    return
10821 		      fold_convert_loc (loc, type,
10822 					associate_trees (loc, var0, minus_lit0,
10823 							 MINUS_EXPR, atype));
10824 		  else
10825 		    {
10826 		      con0 = associate_trees (loc, con0, minus_lit0,
10827 					      MINUS_EXPR, atype);
10828 		      return
10829 			fold_convert_loc (loc, type,
10830 					  associate_trees (loc, var0, con0,
10831 							   PLUS_EXPR, atype));
10832 		    }
10833 		}
10834 
10835 	      con0 = associate_trees (loc, con0, lit0, code, atype);
10836 	      return
10837 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10838 							      code, atype));
10839 	    }
10840 	}
10841 
10842       return NULL_TREE;
10843 
10844     case MINUS_EXPR:
10845       /* Pointer simplifications for subtraction, simple reassociations. */
10846       if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10847 	{
10848 	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10849 	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10850 	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10851 	    {
10852 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10853 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10854 	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10855 	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10856 	      return fold_build2_loc (loc, PLUS_EXPR, type,
10857 				  fold_build2_loc (loc, MINUS_EXPR, type,
10858 					       arg00, arg10),
10859 				  fold_build2_loc (loc, MINUS_EXPR, type,
10860 					       arg01, arg11));
10861 	    }
10862 	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10863 	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10864 	    {
10865 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10866 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10867 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10868 				      fold_convert_loc (loc, type, arg1));
10869 	      if (tmp)
10870 	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10871 	    }
10872 	}
10873       /* A - (-B) -> A + B */
10874       if (TREE_CODE (arg1) == NEGATE_EXPR)
10875 	return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10876 			    fold_convert_loc (loc, type,
10877 					      TREE_OPERAND (arg1, 0)));
10878       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10879       if (TREE_CODE (arg0) == NEGATE_EXPR
10880 	  && negate_expr_p (arg1)
10881 	  && reorder_operands_p (arg0, arg1))
10882 	return fold_build2_loc (loc, MINUS_EXPR, type,
10883 			    fold_convert_loc (loc, type,
10884 					      negate_expr (arg1)),
10885 			    fold_convert_loc (loc, type,
10886 					      TREE_OPERAND (arg0, 0)));
10887       /* Convert -A - 1 to ~A.  */
10888       if (TREE_CODE (type) != COMPLEX_TYPE
10889 	  && TREE_CODE (arg0) == NEGATE_EXPR
10890 	  && integer_onep (arg1)
10891 	  && !TYPE_OVERFLOW_TRAPS (type))
10892 	return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10893 			    fold_convert_loc (loc, type,
10894 					      TREE_OPERAND (arg0, 0)));
10895 
10896       /* Convert -1 - A to ~A.  */
10897       if (TREE_CODE (type) != COMPLEX_TYPE
10898 	  && integer_all_onesp (arg0))
10899 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10900 
10901 
10902       /* X - (X / Y) * Y is X % Y.  */
10903       if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10904 	  && TREE_CODE (arg1) == MULT_EXPR
10905 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10906 	  && operand_equal_p (arg0,
10907 			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10908 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10909 			      TREE_OPERAND (arg1, 1), 0))
10910 	return
10911 	  fold_convert_loc (loc, type,
10912 			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10913 					 arg0, TREE_OPERAND (arg1, 1)));
10914 
10915       if (! FLOAT_TYPE_P (type))
10916 	{
10917 	  if (integer_zerop (arg0))
10918 	    return negate_expr (fold_convert_loc (loc, type, arg1));
10919 	  if (integer_zerop (arg1))
10920 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10921 
10922 	  /* Fold A - (A & B) into ~B & A.  */
10923 	  if (!TREE_SIDE_EFFECTS (arg0)
10924 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10925 	    {
10926 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10927 		{
10928 		  tree arg10 = fold_convert_loc (loc, type,
10929 						 TREE_OPERAND (arg1, 0));
10930 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10931 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10932 						   type, arg10),
10933 				      fold_convert_loc (loc, type, arg0));
10934 		}
10935 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10936 		{
10937 		  tree arg11 = fold_convert_loc (loc,
10938 						 type, TREE_OPERAND (arg1, 1));
10939 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10940 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10941 						   type, arg11),
10942 				      fold_convert_loc (loc, type, arg0));
10943 		}
10944 	    }
10945 
10946 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10947 	     any power of 2 minus 1.  */
10948 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10949 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10950 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10951 				  TREE_OPERAND (arg1, 0), 0))
10952 	    {
10953 	      tree mask0 = TREE_OPERAND (arg0, 1);
10954 	      tree mask1 = TREE_OPERAND (arg1, 1);
10955 	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10956 
10957 	      if (operand_equal_p (tem, mask1, 0))
10958 		{
10959 		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10960 				     TREE_OPERAND (arg0, 0), mask1);
10961 		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10962 		}
10963 	    }
10964 	}
10965 
10966       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10967       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10968 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10969 
10970       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10971 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10972 	 (-ARG1 + ARG0) reduces to -ARG1.  */
10973       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10974 	return negate_expr (fold_convert_loc (loc, type, arg1));
10975 
10976       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10977 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10978 	 signed zeros are involved.  */
10979       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10980 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10981 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10982         {
10983 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10984 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10985 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10986 	  bool arg0rz = false, arg0iz = false;
10987 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10988 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10989 	    {
10990 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10991 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10992 	      if (arg0rz && arg1i && real_zerop (arg1i))
10993 	        {
10994 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10995 					 arg1r ? arg1r
10996 					 : build1 (REALPART_EXPR, rtype, arg1));
10997 		  tree ip = arg0i ? arg0i
10998 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10999 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11000 		}
11001 	      else if (arg0iz && arg1r && real_zerop (arg1r))
11002 	        {
11003 		  tree rp = arg0r ? arg0r
11004 		    : build1 (REALPART_EXPR, rtype, arg0);
11005 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11006 					 arg1i ? arg1i
11007 					 : build1 (IMAGPART_EXPR, rtype, arg1));
11008 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11009 		}
11010 	    }
11011 	}
11012 
11013       /* Fold &x - &x.  This can happen from &x.foo - &x.
11014 	 This is unsafe for certain floats even in non-IEEE formats.
11015 	 In IEEE, it is unsafe because it does wrong for NaNs.
11016 	 Also note that operand_equal_p is always false if an operand
11017 	 is volatile.  */
11018 
11019       if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11020 	  && operand_equal_p (arg0, arg1, 0))
11021 	return build_zero_cst (type);
11022 
11023       /* A - B -> A + (-B) if B is easily negatable.  */
11024       if (negate_expr_p (arg1)
11025 	  && ((FLOAT_TYPE_P (type)
11026                /* Avoid this transformation if B is a positive REAL_CST.  */
11027 	       && (TREE_CODE (arg1) != REAL_CST
11028 		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11029 	      || INTEGRAL_TYPE_P (type)))
11030 	return fold_build2_loc (loc, PLUS_EXPR, type,
11031 			    fold_convert_loc (loc, type, arg0),
11032 			    fold_convert_loc (loc, type,
11033 					      negate_expr (arg1)));
11034 
11035       /* Try folding difference of addresses.  */
11036       {
11037 	HOST_WIDE_INT diff;
11038 
11039 	if ((TREE_CODE (arg0) == ADDR_EXPR
11040 	     || TREE_CODE (arg1) == ADDR_EXPR)
11041 	    && ptr_difference_const (arg0, arg1, &diff))
11042 	  return build_int_cst_type (type, diff);
11043       }
11044 
11045       /* Fold &a[i] - &a[j] to i-j.  */
11046       if (TREE_CODE (arg0) == ADDR_EXPR
11047 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11048 	  && TREE_CODE (arg1) == ADDR_EXPR
11049 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11050         {
11051 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
11052 							TREE_OPERAND (arg0, 0),
11053 							TREE_OPERAND (arg1, 0));
11054 	  if (tem)
11055 	    return tem;
11056 	}
11057 
11058       if (FLOAT_TYPE_P (type)
11059 	  && flag_unsafe_math_optimizations
11060 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11061 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11062 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11063 	return tem;
11064 
11065       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11066 	 one.  Make sure the type is not saturating and has the signedness of
11067 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11068 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
11069       if ((TREE_CODE (arg0) == MULT_EXPR
11070 	   || TREE_CODE (arg1) == MULT_EXPR)
11071 	  && !TYPE_SATURATING (type)
11072 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11073 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11074 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
11075         {
11076 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11077 	  if (tem)
11078 	    return tem;
11079 	}
11080 
11081       goto associate;
11082 
11083     case MULT_EXPR:
11084       /* (-A) * (-B) -> A * B  */
11085       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11086 	return fold_build2_loc (loc, MULT_EXPR, type,
11087 			    fold_convert_loc (loc, type,
11088 					      TREE_OPERAND (arg0, 0)),
11089 			    fold_convert_loc (loc, type,
11090 					      negate_expr (arg1)));
11091       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11092 	return fold_build2_loc (loc, MULT_EXPR, type,
11093 			    fold_convert_loc (loc, type,
11094 					      negate_expr (arg0)),
11095 			    fold_convert_loc (loc, type,
11096 					      TREE_OPERAND (arg1, 0)));
11097 
11098       if (! FLOAT_TYPE_P (type))
11099 	{
11100 	  if (integer_zerop (arg1))
11101 	    return omit_one_operand_loc (loc, type, arg1, arg0);
11102 	  if (integer_onep (arg1))
11103 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11104 	  /* Transform x * -1 into -x.  Make sure to do the negation
11105 	     on the original operand with conversions not stripped
11106 	     because we can only strip non-sign-changing conversions.  */
11107 	  if (integer_minus_onep (arg1))
11108 	    return fold_convert_loc (loc, type, negate_expr (op0));
11109 	  /* Transform x * -C into -x * C if x is easily negatable.  */
11110 	  if (TREE_CODE (arg1) == INTEGER_CST
11111 	      && tree_int_cst_sgn (arg1) == -1
11112 	      && negate_expr_p (arg0)
11113 	      && (tem = negate_expr (arg1)) != arg1
11114 	      && !TREE_OVERFLOW (tem))
11115 	    return fold_build2_loc (loc, MULT_EXPR, type,
11116 	    			fold_convert_loc (loc, type,
11117 						  negate_expr (arg0)),
11118 				tem);
11119 
11120 	  /* (a * (1 << b)) is (a << b)  */
11121 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
11122 	      && integer_onep (TREE_OPERAND (arg1, 0)))
11123 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11124 				TREE_OPERAND (arg1, 1));
11125 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
11126 	      && integer_onep (TREE_OPERAND (arg0, 0)))
11127 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11128 				TREE_OPERAND (arg0, 1));
11129 
11130 	  /* (A + A) * C -> A * 2 * C  */
11131 	  if (TREE_CODE (arg0) == PLUS_EXPR
11132 	      && TREE_CODE (arg1) == INTEGER_CST
11133 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
11134 			          TREE_OPERAND (arg0, 1), 0))
11135 	    return fold_build2_loc (loc, MULT_EXPR, type,
11136 				omit_one_operand_loc (loc, type,
11137 						  TREE_OPERAND (arg0, 0),
11138 						  TREE_OPERAND (arg0, 1)),
11139 				fold_build2_loc (loc, MULT_EXPR, type,
11140 					     build_int_cst (type, 2) , arg1));
11141 
11142 	  /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11143 	     sign-changing only.  */
11144 	  if (TREE_CODE (arg1) == INTEGER_CST
11145 	      && TREE_CODE (arg0) == EXACT_DIV_EXPR
11146 	      && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11147 	    return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11148 
11149 	  strict_overflow_p = false;
11150 	  if (TREE_CODE (arg1) == INTEGER_CST
11151 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11152 					     &strict_overflow_p)))
11153 	    {
11154 	      if (strict_overflow_p)
11155 		fold_overflow_warning (("assuming signed overflow does not "
11156 					"occur when simplifying "
11157 					"multiplication"),
11158 				       WARN_STRICT_OVERFLOW_MISC);
11159 	      return fold_convert_loc (loc, type, tem);
11160 	    }
11161 
11162 	  /* Optimize z * conj(z) for integer complex numbers.  */
11163 	  if (TREE_CODE (arg0) == CONJ_EXPR
11164 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11165 	    return fold_mult_zconjz (loc, type, arg1);
11166 	  if (TREE_CODE (arg1) == CONJ_EXPR
11167 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11168 	    return fold_mult_zconjz (loc, type, arg0);
11169 	}
11170       else
11171 	{
11172 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
11173 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
11174 	     same in modes with signed zeros, since multiplying a
11175 	     negative value by 0 gives -0, not +0.  */
11176 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11177 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11178 	      && real_zerop (arg1))
11179 	    return omit_one_operand_loc (loc, type, arg1, arg0);
11180 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
11181 	     Likewise for complex arithmetic with signed zeros.  */
11182 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11183 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11184 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11185 	      && real_onep (arg1))
11186 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11187 
11188 	  /* Transform x * -1.0 into -x.  */
11189 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11190 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11191 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11192 	      && real_minus_onep (arg1))
11193 	    return fold_convert_loc (loc, type, negate_expr (arg0));
11194 
11195 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
11196              the result for floating point types due to rounding so it is applied
11197              only if -fassociative-math was specify.  */
11198 	  if (flag_associative_math
11199 	      && TREE_CODE (arg0) == RDIV_EXPR
11200 	      && TREE_CODE (arg1) == REAL_CST
11201 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11202 	    {
11203 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11204 				      arg1);
11205 	      if (tem)
11206 		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11207 				    TREE_OPERAND (arg0, 1));
11208 	    }
11209 
11210           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
11211 	  if (operand_equal_p (arg0, arg1, 0))
11212 	    {
11213 	      tree tem = fold_strip_sign_ops (arg0);
11214 	      if (tem != NULL_TREE)
11215 		{
11216 		  tem = fold_convert_loc (loc, type, tem);
11217 		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11218 		}
11219 	    }
11220 
11221 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11222 	     This is not the same for NaNs or if signed zeros are
11223 	     involved.  */
11224 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11225               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11226 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11227 	      && TREE_CODE (arg1) == COMPLEX_CST
11228 	      && real_zerop (TREE_REALPART (arg1)))
11229 	    {
11230 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11231 	      if (real_onep (TREE_IMAGPART (arg1)))
11232 		return
11233 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11234 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11235 							     rtype, arg0)),
11236 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11237 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11238 		return
11239 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11240 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11241 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11242 							     rtype, arg0)));
11243 	    }
11244 
11245 	  /* Optimize z * conj(z) for floating point complex numbers.
11246 	     Guarded by flag_unsafe_math_optimizations as non-finite
11247 	     imaginary components don't produce scalar results.  */
11248 	  if (flag_unsafe_math_optimizations
11249 	      && TREE_CODE (arg0) == CONJ_EXPR
11250 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11251 	    return fold_mult_zconjz (loc, type, arg1);
11252 	  if (flag_unsafe_math_optimizations
11253 	      && TREE_CODE (arg1) == CONJ_EXPR
11254 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11255 	    return fold_mult_zconjz (loc, type, arg0);
11256 
11257 	  if (flag_unsafe_math_optimizations)
11258 	    {
11259 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11260 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11261 
11262 	      /* Optimizations of root(...)*root(...).  */
11263 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11264 		{
11265 		  tree rootfn, arg;
11266 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11267 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11268 
11269 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
11270 		  if (BUILTIN_SQRT_P (fcode0)
11271 		      && operand_equal_p (arg00, arg10, 0)
11272 		      && ! HONOR_SNANS (TYPE_MODE (type)))
11273 		    return arg00;
11274 
11275 	          /* Optimize root(x)*root(y) as root(x*y).  */
11276 		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11277 		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11278 		  return build_call_expr_loc (loc, rootfn, 1, arg);
11279 		}
11280 
11281 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
11282 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11283 		{
11284 		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11285 		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11286 					  CALL_EXPR_ARG (arg0, 0),
11287 					  CALL_EXPR_ARG (arg1, 0));
11288 		  return build_call_expr_loc (loc, expfn, 1, arg);
11289 		}
11290 
11291 	      /* Optimizations of pow(...)*pow(...).  */
11292 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11293 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11294 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11295 		{
11296 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11297 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11298 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11299 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11300 
11301 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
11302 		  if (operand_equal_p (arg01, arg11, 0))
11303 		    {
11304 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11305 		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11306 					      arg00, arg10);
11307 		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11308 		    }
11309 
11310 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
11311 		  if (operand_equal_p (arg00, arg10, 0))
11312 		    {
11313 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11314 		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11315 					      arg01, arg11);
11316 		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11317 		    }
11318 		}
11319 
11320 	      /* Optimize tan(x)*cos(x) as sin(x).  */
11321 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11322 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11323 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11324 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11325 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11326 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11327 		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11328 				      CALL_EXPR_ARG (arg1, 0), 0))
11329 		{
11330 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11331 
11332 		  if (sinfn != NULL_TREE)
11333 		    return build_call_expr_loc (loc, sinfn, 1,
11334 					    CALL_EXPR_ARG (arg0, 0));
11335 		}
11336 
11337 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
11338 	      if (fcode1 == BUILT_IN_POW
11339 		  || fcode1 == BUILT_IN_POWF
11340 		  || fcode1 == BUILT_IN_POWL)
11341 		{
11342 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11343 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11344 		  if (TREE_CODE (arg11) == REAL_CST
11345 		      && !TREE_OVERFLOW (arg11)
11346 		      && operand_equal_p (arg0, arg10, 0))
11347 		    {
11348 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11349 		      REAL_VALUE_TYPE c;
11350 		      tree arg;
11351 
11352 		      c = TREE_REAL_CST (arg11);
11353 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11354 		      arg = build_real (type, c);
11355 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11356 		    }
11357 		}
11358 
11359 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
11360 	      if (fcode0 == BUILT_IN_POW
11361 		  || fcode0 == BUILT_IN_POWF
11362 		  || fcode0 == BUILT_IN_POWL)
11363 		{
11364 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11365 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11366 		  if (TREE_CODE (arg01) == REAL_CST
11367 		      && !TREE_OVERFLOW (arg01)
11368 		      && operand_equal_p (arg1, arg00, 0))
11369 		    {
11370 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11371 		      REAL_VALUE_TYPE c;
11372 		      tree arg;
11373 
11374 		      c = TREE_REAL_CST (arg01);
11375 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11376 		      arg = build_real (type, c);
11377 		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11378 		    }
11379 		}
11380 
11381 	      /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x.  */
11382 	      if (!in_gimple_form
11383 		  && optimize
11384 		  && operand_equal_p (arg0, arg1, 0))
11385 		{
11386 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11387 
11388 		  if (powfn)
11389 		    {
11390 		      tree arg = build_real (type, dconst2);
11391 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11392 		    }
11393 		}
11394 	    }
11395 	}
11396       goto associate;
11397 
11398     case BIT_IOR_EXPR:
11399     bit_ior:
11400       if (integer_all_onesp (arg1))
11401 	return omit_one_operand_loc (loc, type, arg1, arg0);
11402       if (integer_zerop (arg1))
11403 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11404       if (operand_equal_p (arg0, arg1, 0))
11405 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11406 
11407       /* ~X | X is -1.  */
11408       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11409 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11410 	{
11411 	  t1 = build_zero_cst (type);
11412 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11413 	  return omit_one_operand_loc (loc, type, t1, arg1);
11414 	}
11415 
11416       /* X | ~X is -1.  */
11417       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11418 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11419 	{
11420 	  t1 = build_zero_cst (type);
11421 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11422 	  return omit_one_operand_loc (loc, type, t1, arg0);
11423 	}
11424 
11425       /* Canonicalize (X & C1) | C2.  */
11426       if (TREE_CODE (arg0) == BIT_AND_EXPR
11427 	  && TREE_CODE (arg1) == INTEGER_CST
11428 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11429 	{
11430 	  double_int c1, c2, c3, msk;
11431 	  int width = TYPE_PRECISION (type), w;
11432 
11433 	  c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11434 	  c2 = tree_to_double_int (arg1);
11435 
11436 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11437 	  if ((c1 & c2) == c1)
11438 	    return omit_one_operand_loc (loc, type, arg1,
11439 					 TREE_OPERAND (arg0, 0));
11440 
11441 	  msk = double_int::mask (width);
11442 
11443 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11444 	  if (msk.and_not (c1 | c2).is_zero ())
11445 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11446 				    TREE_OPERAND (arg0, 0), arg1);
11447 
11448 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11449 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11450 	     mode which allows further optimizations.  */
11451 	  c1 &= msk;
11452 	  c2 &= msk;
11453 	  c3 = c1.and_not (c2);
11454 	  for (w = BITS_PER_UNIT;
11455 	       w <= width && w <= HOST_BITS_PER_WIDE_INT;
11456 	       w <<= 1)
11457 	    {
11458 	      unsigned HOST_WIDE_INT mask
11459 		= HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11460 	      if (((c1.low | c2.low) & mask) == mask
11461 		  && (c1.low & ~mask) == 0 && c1.high == 0)
11462 		{
11463 		  c3 = double_int::from_uhwi (mask);
11464 		  break;
11465 		}
11466 	    }
11467 
11468 	  if (c3 != c1)
11469 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11470 				    fold_build2_loc (loc, BIT_AND_EXPR, type,
11471 						     TREE_OPERAND (arg0, 0),
11472 						     double_int_to_tree (type,
11473 									 c3)),
11474 				    arg1);
11475 	}
11476 
11477       /* (X & Y) | Y is (X, Y).  */
11478       if (TREE_CODE (arg0) == BIT_AND_EXPR
11479 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11480 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11481       /* (X & Y) | X is (Y, X).  */
11482       if (TREE_CODE (arg0) == BIT_AND_EXPR
11483 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11484 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11485 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11486       /* X | (X & Y) is (Y, X).  */
11487       if (TREE_CODE (arg1) == BIT_AND_EXPR
11488 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11489 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11490 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11491       /* X | (Y & X) is (Y, X).  */
11492       if (TREE_CODE (arg1) == BIT_AND_EXPR
11493 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11494 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11495 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11496 
11497       /* (X & ~Y) | (~X & Y) is X ^ Y */
11498       if (TREE_CODE (arg0) == BIT_AND_EXPR
11499 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11500         {
11501 	  tree a0, a1, l0, l1, n0, n1;
11502 
11503 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11504 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11505 
11506 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11507 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11508 
11509 	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11510 	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11511 
11512 	  if ((operand_equal_p (n0, a0, 0)
11513 	       && operand_equal_p (n1, a1, 0))
11514 	      || (operand_equal_p (n0, a1, 0)
11515 		  && operand_equal_p (n1, a0, 0)))
11516 	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11517 	}
11518 
11519       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11520       if (t1 != NULL_TREE)
11521 	return t1;
11522 
11523       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11524 
11525 	 This results in more efficient code for machines without a NAND
11526 	 instruction.  Combine will canonicalize to the first form
11527 	 which will allow use of NAND instructions provided by the
11528 	 backend if they exist.  */
11529       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11530 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11531 	{
11532 	  return
11533 	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11534 			 build2 (BIT_AND_EXPR, type,
11535 				 fold_convert_loc (loc, type,
11536 						   TREE_OPERAND (arg0, 0)),
11537 				 fold_convert_loc (loc, type,
11538 						   TREE_OPERAND (arg1, 0))));
11539 	}
11540 
11541       /* See if this can be simplified into a rotate first.  If that
11542 	 is unsuccessful continue in the association code.  */
11543       goto bit_rotate;
11544 
11545     case BIT_XOR_EXPR:
11546       if (integer_zerop (arg1))
11547 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11548       if (integer_all_onesp (arg1))
11549 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11550       if (operand_equal_p (arg0, arg1, 0))
11551 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11552 
11553       /* ~X ^ X is -1.  */
11554       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11555 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11556 	{
11557 	  t1 = build_zero_cst (type);
11558 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11559 	  return omit_one_operand_loc (loc, type, t1, arg1);
11560 	}
11561 
11562       /* X ^ ~X is -1.  */
11563       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11564 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11565 	{
11566 	  t1 = build_zero_cst (type);
11567 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11568 	  return omit_one_operand_loc (loc, type, t1, arg0);
11569 	}
11570 
11571       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11572          with a constant, and the two constants have no bits in common,
11573 	 we should treat this as a BIT_IOR_EXPR since this may produce more
11574 	 simplifications.  */
11575       if (TREE_CODE (arg0) == BIT_AND_EXPR
11576 	  && TREE_CODE (arg1) == BIT_AND_EXPR
11577 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11578 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11579 	  && integer_zerop (const_binop (BIT_AND_EXPR,
11580 					 TREE_OPERAND (arg0, 1),
11581 					 TREE_OPERAND (arg1, 1))))
11582 	{
11583 	  code = BIT_IOR_EXPR;
11584 	  goto bit_ior;
11585 	}
11586 
11587       /* (X | Y) ^ X -> Y & ~ X*/
11588       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11589           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11590         {
11591 	  tree t2 = TREE_OPERAND (arg0, 1);
11592 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11593 			    arg1);
11594 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11595 			    fold_convert_loc (loc, type, t2),
11596 			    fold_convert_loc (loc, type, t1));
11597 	  return t1;
11598 	}
11599 
11600       /* (Y | X) ^ X -> Y & ~ X*/
11601       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11602           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11603         {
11604 	  tree t2 = TREE_OPERAND (arg0, 0);
11605 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11606 			    arg1);
11607 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11608 			    fold_convert_loc (loc, type, t2),
11609 			    fold_convert_loc (loc, type, t1));
11610 	  return t1;
11611 	}
11612 
11613       /* X ^ (X | Y) -> Y & ~ X*/
11614       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11615           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11616         {
11617 	  tree t2 = TREE_OPERAND (arg1, 1);
11618 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11619 			    arg0);
11620 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11621 			    fold_convert_loc (loc, type, t2),
11622 			    fold_convert_loc (loc, type, t1));
11623 	  return t1;
11624 	}
11625 
11626       /* X ^ (Y | X) -> Y & ~ X*/
11627       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11628           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11629         {
11630 	  tree t2 = TREE_OPERAND (arg1, 0);
11631 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11632 			    arg0);
11633 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11634 			    fold_convert_loc (loc, type, t2),
11635 			    fold_convert_loc (loc, type, t1));
11636 	  return t1;
11637 	}
11638 
11639       /* Convert ~X ^ ~Y to X ^ Y.  */
11640       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11641 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11642 	return fold_build2_loc (loc, code, type,
11643 			    fold_convert_loc (loc, type,
11644 					      TREE_OPERAND (arg0, 0)),
11645 			    fold_convert_loc (loc, type,
11646 					      TREE_OPERAND (arg1, 0)));
11647 
11648       /* Convert ~X ^ C to X ^ ~C.  */
11649       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11650 	  && TREE_CODE (arg1) == INTEGER_CST)
11651 	return fold_build2_loc (loc, code, type,
11652 			    fold_convert_loc (loc, type,
11653 					      TREE_OPERAND (arg0, 0)),
11654 			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11655 
11656       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11657       if (TREE_CODE (arg0) == BIT_AND_EXPR
11658 	  && integer_onep (TREE_OPERAND (arg0, 1))
11659 	  && integer_onep (arg1))
11660 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11661 				build_zero_cst (TREE_TYPE (arg0)));
11662 
11663       /* Fold (X & Y) ^ Y as ~X & Y.  */
11664       if (TREE_CODE (arg0) == BIT_AND_EXPR
11665 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11666 	{
11667 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11668 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11669 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11670 			      fold_convert_loc (loc, type, arg1));
11671 	}
11672       /* Fold (X & Y) ^ X as ~Y & X.  */
11673       if (TREE_CODE (arg0) == BIT_AND_EXPR
11674 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11675 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11676 	{
11677 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11678 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11679 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11680 			      fold_convert_loc (loc, type, arg1));
11681 	}
11682       /* Fold X ^ (X & Y) as X & ~Y.  */
11683       if (TREE_CODE (arg1) == BIT_AND_EXPR
11684 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11685 	{
11686 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11687 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11688 			      fold_convert_loc (loc, type, arg0),
11689 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11690 	}
11691       /* Fold X ^ (Y & X) as ~Y & X.  */
11692       if (TREE_CODE (arg1) == BIT_AND_EXPR
11693 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11694 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11695 	{
11696 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11697 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11698 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11699 			      fold_convert_loc (loc, type, arg0));
11700 	}
11701 
11702       /* See if this can be simplified into a rotate first.  If that
11703 	 is unsuccessful continue in the association code.  */
11704       goto bit_rotate;
11705 
11706     case BIT_AND_EXPR:
11707       if (integer_all_onesp (arg1))
11708 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11709       if (integer_zerop (arg1))
11710 	return omit_one_operand_loc (loc, type, arg1, arg0);
11711       if (operand_equal_p (arg0, arg1, 0))
11712 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11713 
11714       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
11715       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11716 	   || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11717 	   || (TREE_CODE (arg0) == EQ_EXPR
11718 	       && integer_zerop (TREE_OPERAND (arg0, 1))))
11719 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11720 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11721 
11722       /* X & ~X , X & (X == 0), and X & !X are always zero.  */
11723       if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11724 	   || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11725 	   || (TREE_CODE (arg1) == EQ_EXPR
11726 	       && integer_zerop (TREE_OPERAND (arg1, 1))))
11727 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11728 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11729 
11730       /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11731       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11732 	  && TREE_CODE (arg1) == INTEGER_CST
11733 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11734 	{
11735 	  tree tmp1 = fold_convert_loc (loc, type, arg1);
11736 	  tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11737 	  tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11738 	  tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11739 	  tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11740 	  return
11741 	    fold_convert_loc (loc, type,
11742 			      fold_build2_loc (loc, BIT_IOR_EXPR,
11743 					   type, tmp2, tmp3));
11744 	}
11745 
11746       /* (X | Y) & Y is (X, Y).  */
11747       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11748 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11749 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11750       /* (X | Y) & X is (Y, X).  */
11751       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11752 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11753 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11754 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11755       /* X & (X | Y) is (Y, X).  */
11756       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11757 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11758 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11759 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11760       /* X & (Y | X) is (Y, X).  */
11761       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11762 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11763 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11764 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11765 
11766       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11767       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11768 	  && integer_onep (TREE_OPERAND (arg0, 1))
11769 	  && integer_onep (arg1))
11770 	{
11771 	  tree tem2;
11772 	  tem = TREE_OPERAND (arg0, 0);
11773 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11774 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11775 				  tem, tem2);
11776 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11777 				  build_zero_cst (TREE_TYPE (tem)));
11778 	}
11779       /* Fold ~X & 1 as (X & 1) == 0.  */
11780       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11781 	  && integer_onep (arg1))
11782 	{
11783 	  tree tem2;
11784 	  tem = TREE_OPERAND (arg0, 0);
11785 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11786 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11787 				  tem, tem2);
11788 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11789 				  build_zero_cst (TREE_TYPE (tem)));
11790 	}
11791       /* Fold !X & 1 as X == 0.  */
11792       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11793 	  && integer_onep (arg1))
11794 	{
11795 	  tem = TREE_OPERAND (arg0, 0);
11796 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11797 				  build_zero_cst (TREE_TYPE (tem)));
11798 	}
11799 
11800       /* Fold (X ^ Y) & Y as ~X & Y.  */
11801       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11802 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11803 	{
11804 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11805 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11806 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11807 			      fold_convert_loc (loc, type, arg1));
11808 	}
11809       /* Fold (X ^ Y) & X as ~Y & X.  */
11810       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11811 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11812 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11813 	{
11814 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11815 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11816 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11817 			      fold_convert_loc (loc, type, arg1));
11818 	}
11819       /* Fold X & (X ^ Y) as X & ~Y.  */
11820       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11821 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11822 	{
11823 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11824 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11825 			      fold_convert_loc (loc, type, arg0),
11826 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11827 	}
11828       /* Fold X & (Y ^ X) as ~Y & X.  */
11829       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11830 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11831 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11832 	{
11833 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11834 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11835 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11836 			      fold_convert_loc (loc, type, arg0));
11837 	}
11838 
11839       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11840          multiple of 1 << CST.  */
11841       if (TREE_CODE (arg1) == INTEGER_CST)
11842 	{
11843 	  double_int cst1 = tree_to_double_int (arg1);
11844 	  double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11845 					  TYPE_UNSIGNED (TREE_TYPE (arg1)));
11846 	  if ((cst1 & ncst1) == ncst1
11847 	      && multiple_of_p (type, arg0,
11848 				double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11849 	    return fold_convert_loc (loc, type, arg0);
11850 	}
11851 
11852       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11853          bits from CST2.  */
11854       if (TREE_CODE (arg1) == INTEGER_CST
11855 	  && TREE_CODE (arg0) == MULT_EXPR
11856 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11857 	{
11858 	  double_int darg1 = tree_to_double_int (arg1);
11859 	  double_int masked
11860 	    = mask_with_tz (type, darg1,
11861 	                    tree_to_double_int (TREE_OPERAND (arg0, 1)));
11862 
11863 	  if (masked.is_zero ())
11864 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11865 	                                  arg0, arg1);
11866 	  else if (masked != darg1)
11867 	    {
11868 	      /* Avoid the transform if arg1 is a mask of some
11869 	         mode which allows further optimizations.  */
11870 	      int pop = darg1.popcount ();
11871 	      if (!(pop >= BITS_PER_UNIT
11872 		    && exact_log2 (pop) != -1
11873 		    && double_int::mask (pop) == darg1))
11874 		return fold_build2_loc (loc, code, type, op0,
11875 					double_int_to_tree (type, masked));
11876 	    }
11877 	}
11878 
11879       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11880 	 ((A & N) + B) & M -> (A + B) & M
11881 	 Similarly if (N & M) == 0,
11882 	 ((A | N) + B) & M -> (A + B) & M
11883 	 and for - instead of + (or unary - instead of +)
11884 	 and/or ^ instead of |.
11885 	 If B is constant and (B & M) == 0, fold into A & M.  */
11886       if (tree_fits_uhwi_p (arg1))
11887 	{
11888 	  unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11889 	  if (~cst1 && (cst1 & (cst1 + 1)) == 0
11890 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11891 	      && (TREE_CODE (arg0) == PLUS_EXPR
11892 		  || TREE_CODE (arg0) == MINUS_EXPR
11893 		  || TREE_CODE (arg0) == NEGATE_EXPR)
11894 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11895 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11896 	    {
11897 	      tree pmop[2];
11898 	      int which = 0;
11899 	      unsigned HOST_WIDE_INT cst0;
11900 
11901 	      /* Now we know that arg0 is (C + D) or (C - D) or
11902 		 -C and arg1 (M) is == (1LL << cst) - 1.
11903 		 Store C into PMOP[0] and D into PMOP[1].  */
11904 	      pmop[0] = TREE_OPERAND (arg0, 0);
11905 	      pmop[1] = NULL;
11906 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
11907 		{
11908 		  pmop[1] = TREE_OPERAND (arg0, 1);
11909 		  which = 1;
11910 		}
11911 
11912 	      if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11913 		  || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11914 		      & cst1) != cst1)
11915 		which = -1;
11916 
11917 	      for (; which >= 0; which--)
11918 		switch (TREE_CODE (pmop[which]))
11919 		  {
11920 		  case BIT_AND_EXPR:
11921 		  case BIT_IOR_EXPR:
11922 		  case BIT_XOR_EXPR:
11923 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11924 			!= INTEGER_CST)
11925 		      break;
11926 		    /* tree_to_[su]hwi not used, because we don't care about
11927 		       the upper bits.  */
11928 		    cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11929 		    cst0 &= cst1;
11930 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11931 		      {
11932 			if (cst0 != cst1)
11933 			  break;
11934 		      }
11935 		    else if (cst0 != 0)
11936 		      break;
11937 		    /* If C or D is of the form (A & N) where
11938 		       (N & M) == M, or of the form (A | N) or
11939 		       (A ^ N) where (N & M) == 0, replace it with A.  */
11940 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
11941 		    break;
11942 		  case INTEGER_CST:
11943 		    /* If C or D is a N where (N & M) == 0, it can be
11944 		       omitted (assumed 0).  */
11945 		    if ((TREE_CODE (arg0) == PLUS_EXPR
11946 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11947 			&& (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11948 		      pmop[which] = NULL;
11949 		    break;
11950 		  default:
11951 		    break;
11952 		  }
11953 
11954 	      /* Only build anything new if we optimized one or both arguments
11955 		 above.  */
11956 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
11957 		  || (TREE_CODE (arg0) != NEGATE_EXPR
11958 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
11959 		{
11960 		  tree utype = TREE_TYPE (arg0);
11961 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11962 		    {
11963 		      /* Perform the operations in a type that has defined
11964 			 overflow behavior.  */
11965 		      utype = unsigned_type_for (TREE_TYPE (arg0));
11966 		      if (pmop[0] != NULL)
11967 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11968 		      if (pmop[1] != NULL)
11969 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11970 		    }
11971 
11972 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
11973 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11974 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
11975 		    {
11976 		      if (pmop[0] != NULL && pmop[1] != NULL)
11977 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11978 					       pmop[0], pmop[1]);
11979 		      else if (pmop[0] != NULL)
11980 			tem = pmop[0];
11981 		      else if (pmop[1] != NULL)
11982 			tem = pmop[1];
11983 		      else
11984 			return build_int_cst (type, 0);
11985 		    }
11986 		  else if (pmop[0] == NULL)
11987 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11988 		  else
11989 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11990 					   pmop[0], pmop[1]);
11991 		  /* TEM is now the new binary +, - or unary - replacement.  */
11992 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11993 					 fold_convert_loc (loc, utype, arg1));
11994 		  return fold_convert_loc (loc, type, tem);
11995 		}
11996 	    }
11997 	}
11998 
11999       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
12000       if (t1 != NULL_TREE)
12001 	return t1;
12002       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
12003       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
12004 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
12005 	{
12006 	  prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
12007 
12008 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
12009 	      && (~TREE_INT_CST_LOW (arg1)
12010 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12011 	    return
12012 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12013 	}
12014 
12015       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12016 
12017 	 This results in more efficient code for machines without a NOR
12018 	 instruction.  Combine will canonicalize to the first form
12019 	 which will allow use of NOR instructions provided by the
12020 	 backend if they exist.  */
12021       if (TREE_CODE (arg0) == BIT_NOT_EXPR
12022 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
12023 	{
12024 	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12025 			      build2 (BIT_IOR_EXPR, type,
12026 				      fold_convert_loc (loc, type,
12027 							TREE_OPERAND (arg0, 0)),
12028 				      fold_convert_loc (loc, type,
12029 							TREE_OPERAND (arg1, 0))));
12030 	}
12031 
12032       /* If arg0 is derived from the address of an object or function, we may
12033 	 be able to fold this expression using the object or function's
12034 	 alignment.  */
12035       if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12036 	{
12037 	  unsigned HOST_WIDE_INT modulus, residue;
12038 	  unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12039 
12040 	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
12041 						     integer_onep (arg1));
12042 
12043 	  /* This works because modulus is a power of 2.  If this weren't the
12044 	     case, we'd have to replace it by its greatest power-of-2
12045 	     divisor: modulus & -modulus.  */
12046 	  if (low < modulus)
12047 	    return build_int_cst (type, residue & low);
12048 	}
12049 
12050       /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12051 	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12052 	 if the new mask might be further optimized.  */
12053       if ((TREE_CODE (arg0) == LSHIFT_EXPR
12054 	   || TREE_CODE (arg0) == RSHIFT_EXPR)
12055 	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12056 	  && TREE_CODE (arg1) == INTEGER_CST
12057 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12058 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12059 	  && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12060 	      < TYPE_PRECISION (TREE_TYPE (arg0))))
12061 	{
12062 	  unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12063 	  unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12064 	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
12065 	  tree shift_type = TREE_TYPE (arg0);
12066 
12067 	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
12068 	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12069 	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
12070 		   && TYPE_PRECISION (TREE_TYPE (arg0))
12071 		      == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12072 	    {
12073 	      prec = TYPE_PRECISION (TREE_TYPE (arg0));
12074 	      tree arg00 = TREE_OPERAND (arg0, 0);
12075 	      /* See if more bits can be proven as zero because of
12076 		 zero extension.  */
12077 	      if (TREE_CODE (arg00) == NOP_EXPR
12078 		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12079 		{
12080 		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12081 		  if (TYPE_PRECISION (inner_type)
12082 		      == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12083 		      && TYPE_PRECISION (inner_type) < prec)
12084 		    {
12085 		      prec = TYPE_PRECISION (inner_type);
12086 		      /* See if we can shorten the right shift.  */
12087 		      if (shiftc < prec)
12088 			shift_type = inner_type;
12089 		      /* Otherwise X >> C1 is all zeros, so we'll optimize
12090 			 it into (X, 0) later on by making sure zerobits
12091 			 is all ones.  */
12092 		    }
12093 		}
12094 	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
12095 	      if (shiftc < prec)
12096 		{
12097 		  zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12098 		  zerobits <<= prec - shiftc;
12099 		}
12100 	      /* For arithmetic shift if sign bit could be set, zerobits
12101 		 can contain actually sign bits, so no transformation is
12102 		 possible, unless MASK masks them all away.  In that
12103 		 case the shift needs to be converted into logical shift.  */
12104 	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12105 		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12106 		{
12107 		  if ((mask & zerobits) == 0)
12108 		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
12109 		  else
12110 		    zerobits = 0;
12111 		}
12112 	    }
12113 
12114 	  /* ((X << 16) & 0xff00) is (X, 0).  */
12115 	  if ((mask & zerobits) == mask)
12116 	    return omit_one_operand_loc (loc, type,
12117 					 build_int_cst (type, 0), arg0);
12118 
12119 	  newmask = mask | zerobits;
12120 	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
12121 	    {
12122 	      /* Only do the transformation if NEWMASK is some integer
12123 		 mode's mask.  */
12124 	      for (prec = BITS_PER_UNIT;
12125 		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12126 		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12127 		  break;
12128 	      if (prec < HOST_BITS_PER_WIDE_INT
12129 		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
12130 		{
12131 		  tree newmaskt;
12132 
12133 		  if (shift_type != TREE_TYPE (arg0))
12134 		    {
12135 		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12136 					 fold_convert_loc (loc, shift_type,
12137 							   TREE_OPERAND (arg0, 0)),
12138 					 TREE_OPERAND (arg0, 1));
12139 		      tem = fold_convert_loc (loc, type, tem);
12140 		    }
12141 		  else
12142 		    tem = op0;
12143 		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12144 		  if (!tree_int_cst_equal (newmaskt, arg1))
12145 		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12146 		}
12147 	    }
12148 	}
12149 
12150       goto associate;
12151 
12152     case RDIV_EXPR:
12153       /* Don't touch a floating-point divide by zero unless the mode
12154 	 of the constant can represent infinity.  */
12155       if (TREE_CODE (arg1) == REAL_CST
12156 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12157 	  && real_zerop (arg1))
12158 	return NULL_TREE;
12159 
12160       /* Optimize A / A to 1.0 if we don't care about
12161 	 NaNs or Infinities.  Skip the transformation
12162 	 for non-real operands.  */
12163       if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12164 	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12165 	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12166 	  && operand_equal_p (arg0, arg1, 0))
12167 	{
12168 	  tree r = build_real (TREE_TYPE (arg0), dconst1);
12169 
12170 	  return omit_two_operands_loc (loc, type, r, arg0, arg1);
12171 	}
12172 
12173       /* The complex version of the above A / A optimization.  */
12174       if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12175 	  && operand_equal_p (arg0, arg1, 0))
12176 	{
12177 	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12178 	  if (! HONOR_NANS (TYPE_MODE (elem_type))
12179 	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12180 	    {
12181 	      tree r = build_real (elem_type, dconst1);
12182 	      /* omit_two_operands will call fold_convert for us.  */
12183 	      return omit_two_operands_loc (loc, type, r, arg0, arg1);
12184 	    }
12185 	}
12186 
12187       /* (-A) / (-B) -> A / B  */
12188       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12189 	return fold_build2_loc (loc, RDIV_EXPR, type,
12190 			    TREE_OPERAND (arg0, 0),
12191 			    negate_expr (arg1));
12192       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12193 	return fold_build2_loc (loc, RDIV_EXPR, type,
12194 			    negate_expr (arg0),
12195 			    TREE_OPERAND (arg1, 0));
12196 
12197       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
12198       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12199 	  && real_onep (arg1))
12200 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12201 
12202       /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
12203       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12204 	  && real_minus_onep (arg1))
12205 	return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12206 						  negate_expr (arg0)));
12207 
12208       /* If ARG1 is a constant, we can convert this to a multiply by the
12209 	 reciprocal.  This does not have the same rounding properties,
12210 	 so only do this if -freciprocal-math.  We can actually
12211 	 always safely do it if ARG1 is a power of two, but it's hard to
12212 	 tell if it is or not in a portable manner.  */
12213       if (optimize
12214 	  && (TREE_CODE (arg1) == REAL_CST
12215 	      || (TREE_CODE (arg1) == COMPLEX_CST
12216 		  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12217 	      || (TREE_CODE (arg1) == VECTOR_CST
12218 		  && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12219 	{
12220 	  if (flag_reciprocal_math
12221 	      && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12222 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12223 	  /* Find the reciprocal if optimizing and the result is exact.
12224 	     TODO: Complex reciprocal not implemented.  */
12225 	  if (TREE_CODE (arg1) != COMPLEX_CST)
12226 	    {
12227 	      tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12228 
12229 	      if (inverse)
12230 		return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12231 	    }
12232 	}
12233       /* Convert A/B/C to A/(B*C).  */
12234       if (flag_reciprocal_math
12235 	  && TREE_CODE (arg0) == RDIV_EXPR)
12236 	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12237 			    fold_build2_loc (loc, MULT_EXPR, type,
12238 					 TREE_OPERAND (arg0, 1), arg1));
12239 
12240       /* Convert A/(B/C) to (A/B)*C.  */
12241       if (flag_reciprocal_math
12242 	  && TREE_CODE (arg1) == RDIV_EXPR)
12243 	return fold_build2_loc (loc, MULT_EXPR, type,
12244 			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12245 					 TREE_OPERAND (arg1, 0)),
12246 			    TREE_OPERAND (arg1, 1));
12247 
12248       /* Convert C1/(X*C2) into (C1/C2)/X.  */
12249       if (flag_reciprocal_math
12250 	  && TREE_CODE (arg1) == MULT_EXPR
12251 	  && TREE_CODE (arg0) == REAL_CST
12252 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12253 	{
12254 	  tree tem = const_binop (RDIV_EXPR, arg0,
12255 				  TREE_OPERAND (arg1, 1));
12256 	  if (tem)
12257 	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12258 				TREE_OPERAND (arg1, 0));
12259 	}
12260 
12261       if (flag_unsafe_math_optimizations)
12262 	{
12263 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12264 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12265 
12266 	  /* Optimize sin(x)/cos(x) as tan(x).  */
12267 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12268 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12269 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12270 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12271 				  CALL_EXPR_ARG (arg1, 0), 0))
12272 	    {
12273 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12274 
12275 	      if (tanfn != NULL_TREE)
12276 		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12277 	    }
12278 
12279 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
12280 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12281 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12282 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12283 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12284 				  CALL_EXPR_ARG (arg1, 0), 0))
12285 	    {
12286 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12287 
12288 	      if (tanfn != NULL_TREE)
12289 		{
12290 		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
12291 					      CALL_EXPR_ARG (arg0, 0));
12292 		  return fold_build2_loc (loc, RDIV_EXPR, type,
12293 				      build_real (type, dconst1), tmp);
12294 		}
12295 	    }
12296 
12297  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12298 	     NaNs or Infinities.  */
12299  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12300  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12301  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12302 	    {
12303 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12304 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
12305 
12306 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12307 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12308 		  && operand_equal_p (arg00, arg01, 0))
12309 		{
12310 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12311 
12312 		  if (cosfn != NULL_TREE)
12313 		    return build_call_expr_loc (loc, cosfn, 1, arg00);
12314 		}
12315 	    }
12316 
12317  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12318 	     NaNs or Infinities.  */
12319  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12320  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12321  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12322 	    {
12323 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12324 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
12325 
12326 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12327 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12328 		  && operand_equal_p (arg00, arg01, 0))
12329 		{
12330 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12331 
12332 		  if (cosfn != NULL_TREE)
12333 		    {
12334 		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12335 		      return fold_build2_loc (loc, RDIV_EXPR, type,
12336 					  build_real (type, dconst1),
12337 					  tmp);
12338 		    }
12339 		}
12340 	    }
12341 
12342 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
12343 	  if (fcode0 == BUILT_IN_POW
12344 	      || fcode0 == BUILT_IN_POWF
12345 	      || fcode0 == BUILT_IN_POWL)
12346 	    {
12347 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12348 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
12349 	      if (TREE_CODE (arg01) == REAL_CST
12350 		  && !TREE_OVERFLOW (arg01)
12351 		  && operand_equal_p (arg1, arg00, 0))
12352 		{
12353 		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12354 		  REAL_VALUE_TYPE c;
12355 		  tree arg;
12356 
12357 		  c = TREE_REAL_CST (arg01);
12358 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12359 		  arg = build_real (type, c);
12360 		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12361 		}
12362 	    }
12363 
12364 	  /* Optimize a/root(b/c) into a*root(c/b).  */
12365 	  if (BUILTIN_ROOT_P (fcode1))
12366 	    {
12367 	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
12368 
12369 	      if (TREE_CODE (rootarg) == RDIV_EXPR)
12370 		{
12371 		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12372 		  tree b = TREE_OPERAND (rootarg, 0);
12373 		  tree c = TREE_OPERAND (rootarg, 1);
12374 
12375 		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12376 
12377 		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12378 		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12379 		}
12380 	    }
12381 
12382 	  /* Optimize x/expN(y) into x*expN(-y).  */
12383 	  if (BUILTIN_EXPONENT_P (fcode1))
12384 	    {
12385 	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12386 	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12387 	      arg1 = build_call_expr_loc (loc,
12388 				      expfn, 1,
12389 				      fold_convert_loc (loc, type, arg));
12390 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12391 	    }
12392 
12393 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
12394 	  if (fcode1 == BUILT_IN_POW
12395 	      || fcode1 == BUILT_IN_POWF
12396 	      || fcode1 == BUILT_IN_POWL)
12397 	    {
12398 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12399 	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
12400 	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
12401 	      tree neg11 = fold_convert_loc (loc, type,
12402 					     negate_expr (arg11));
12403 	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12404 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12405 	    }
12406 	}
12407       return NULL_TREE;
12408 
12409     case TRUNC_DIV_EXPR:
12410       /* Optimize (X & (-A)) / A where A is a power of 2,
12411 	 to X >> log2(A) */
12412       if (TREE_CODE (arg0) == BIT_AND_EXPR
12413 	  && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12414 	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12415 	{
12416 	  tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12417 				      arg1, TREE_OPERAND (arg0, 1));
12418 	  if (sum && integer_zerop (sum)) {
12419 	    unsigned long pow2;
12420 
12421 	    if (TREE_INT_CST_LOW (arg1))
12422 	      pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12423 	    else
12424 	      pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12425 		      + HOST_BITS_PER_WIDE_INT;
12426 
12427 	    return fold_build2_loc (loc, RSHIFT_EXPR, type,
12428 			  TREE_OPERAND (arg0, 0),
12429 			  build_int_cst (integer_type_node, pow2));
12430 	  }
12431 	}
12432 
12433       /* Fall through */
12434 
12435     case FLOOR_DIV_EXPR:
12436       /* Simplify A / (B << N) where A and B are positive and B is
12437 	 a power of 2, to A >> (N + log2(B)).  */
12438       strict_overflow_p = false;
12439       if (TREE_CODE (arg1) == LSHIFT_EXPR
12440 	  && (TYPE_UNSIGNED (type)
12441 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12442 	{
12443 	  tree sval = TREE_OPERAND (arg1, 0);
12444 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12445 	    {
12446 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
12447 	      unsigned long pow2;
12448 
12449 	      if (TREE_INT_CST_LOW (sval))
12450 		pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12451 	      else
12452 		pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12453 		       + HOST_BITS_PER_WIDE_INT;
12454 
12455 	      if (strict_overflow_p)
12456 		fold_overflow_warning (("assuming signed overflow does not "
12457 					"occur when simplifying A / (B << N)"),
12458 				       WARN_STRICT_OVERFLOW_MISC);
12459 
12460 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12461 					sh_cnt,
12462 					build_int_cst (TREE_TYPE (sh_cnt),
12463 						       pow2));
12464 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
12465 				  fold_convert_loc (loc, type, arg0), sh_cnt);
12466 	    }
12467 	}
12468 
12469       /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12470 	 TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12471       if (INTEGRAL_TYPE_P (type)
12472 	  && TYPE_UNSIGNED (type)
12473 	  && code == FLOOR_DIV_EXPR)
12474 	return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12475 
12476       /* Fall through */
12477 
12478     case ROUND_DIV_EXPR:
12479     case CEIL_DIV_EXPR:
12480     case EXACT_DIV_EXPR:
12481       if (integer_onep (arg1))
12482 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483       if (integer_zerop (arg1))
12484 	return NULL_TREE;
12485       /* X / -1 is -X.  */
12486       if (!TYPE_UNSIGNED (type)
12487 	  && TREE_CODE (arg1) == INTEGER_CST
12488 	  && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12489 	  && TREE_INT_CST_HIGH (arg1) == -1)
12490 	return fold_convert_loc (loc, type, negate_expr (arg0));
12491 
12492       /* Convert -A / -B to A / B when the type is signed and overflow is
12493 	 undefined.  */
12494       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12495 	  && TREE_CODE (arg0) == NEGATE_EXPR
12496 	  && negate_expr_p (arg1))
12497 	{
12498 	  if (INTEGRAL_TYPE_P (type))
12499 	    fold_overflow_warning (("assuming signed overflow does not occur "
12500 				    "when distributing negation across "
12501 				    "division"),
12502 				   WARN_STRICT_OVERFLOW_MISC);
12503 	  return fold_build2_loc (loc, code, type,
12504 			      fold_convert_loc (loc, type,
12505 						TREE_OPERAND (arg0, 0)),
12506 			      fold_convert_loc (loc, type,
12507 						negate_expr (arg1)));
12508 	}
12509       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12510 	  && TREE_CODE (arg1) == NEGATE_EXPR
12511 	  && negate_expr_p (arg0))
12512 	{
12513 	  if (INTEGRAL_TYPE_P (type))
12514 	    fold_overflow_warning (("assuming signed overflow does not occur "
12515 				    "when distributing negation across "
12516 				    "division"),
12517 				   WARN_STRICT_OVERFLOW_MISC);
12518 	  return fold_build2_loc (loc, code, type,
12519 			      fold_convert_loc (loc, type,
12520 						negate_expr (arg0)),
12521 			      fold_convert_loc (loc, type,
12522 						TREE_OPERAND (arg1, 0)));
12523 	}
12524 
12525       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12526 	 operation, EXACT_DIV_EXPR.
12527 
12528 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12529 	 At one time others generated faster code, it's not clear if they do
12530 	 after the last round to changes to the DIV code in expmed.c.  */
12531       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12532 	  && multiple_of_p (type, arg0, arg1))
12533 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12534 
12535       strict_overflow_p = false;
12536       if (TREE_CODE (arg1) == INTEGER_CST
12537 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12538 					 &strict_overflow_p)))
12539 	{
12540 	  if (strict_overflow_p)
12541 	    fold_overflow_warning (("assuming signed overflow does not occur "
12542 				    "when simplifying division"),
12543 				   WARN_STRICT_OVERFLOW_MISC);
12544 	  return fold_convert_loc (loc, type, tem);
12545 	}
12546 
12547       return NULL_TREE;
12548 
12549     case CEIL_MOD_EXPR:
12550     case FLOOR_MOD_EXPR:
12551     case ROUND_MOD_EXPR:
12552     case TRUNC_MOD_EXPR:
12553       /* X % 1 is always zero, but be sure to preserve any side
12554 	 effects in X.  */
12555       if (integer_onep (arg1))
12556 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12557 
12558       /* X % 0, return X % 0 unchanged so that we can get the
12559 	 proper warnings and errors.  */
12560       if (integer_zerop (arg1))
12561 	return NULL_TREE;
12562 
12563       /* 0 % X is always zero, but be sure to preserve any side
12564 	 effects in X.  Place this after checking for X == 0.  */
12565       if (integer_zerop (arg0))
12566 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12567 
12568       /* X % -1 is zero.  */
12569       if (!TYPE_UNSIGNED (type)
12570 	  && TREE_CODE (arg1) == INTEGER_CST
12571 	  && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12572 	  && TREE_INT_CST_HIGH (arg1) == -1)
12573 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12574 
12575       /* X % -C is the same as X % C.  */
12576       if (code == TRUNC_MOD_EXPR
12577 	  && !TYPE_UNSIGNED (type)
12578 	  && TREE_CODE (arg1) == INTEGER_CST
12579 	  && !TREE_OVERFLOW (arg1)
12580 	  && TREE_INT_CST_HIGH (arg1) < 0
12581 	  && !TYPE_OVERFLOW_TRAPS (type)
12582 	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12583 	  && !sign_bit_p (arg1, arg1))
12584 	return fold_build2_loc (loc, code, type,
12585 			    fold_convert_loc (loc, type, arg0),
12586 			    fold_convert_loc (loc, type,
12587 					      negate_expr (arg1)));
12588 
12589       /* X % -Y is the same as X % Y.  */
12590       if (code == TRUNC_MOD_EXPR
12591 	  && !TYPE_UNSIGNED (type)
12592 	  && TREE_CODE (arg1) == NEGATE_EXPR
12593 	  && !TYPE_OVERFLOW_TRAPS (type))
12594 	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12595 			    fold_convert_loc (loc, type,
12596 					      TREE_OPERAND (arg1, 0)));
12597 
12598       strict_overflow_p = false;
12599       if (TREE_CODE (arg1) == INTEGER_CST
12600 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12601 					 &strict_overflow_p)))
12602 	{
12603 	  if (strict_overflow_p)
12604 	    fold_overflow_warning (("assuming signed overflow does not occur "
12605 				    "when simplifying modulus"),
12606 				   WARN_STRICT_OVERFLOW_MISC);
12607 	  return fold_convert_loc (loc, type, tem);
12608 	}
12609 
12610       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12611          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12612       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12613 	  && (TYPE_UNSIGNED (type)
12614 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12615 	{
12616 	  tree c = arg1;
12617 	  /* Also optimize A % (C << N)  where C is a power of 2,
12618 	     to A & ((C << N) - 1).  */
12619 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12620 	    c = TREE_OPERAND (arg1, 0);
12621 
12622 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12623 	    {
12624 	      tree mask
12625 		= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12626 				   build_int_cst (TREE_TYPE (arg1), 1));
12627 	      if (strict_overflow_p)
12628 		fold_overflow_warning (("assuming signed overflow does not "
12629 					"occur when simplifying "
12630 					"X % (power of two)"),
12631 				       WARN_STRICT_OVERFLOW_MISC);
12632 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12633 				      fold_convert_loc (loc, type, arg0),
12634 				      fold_convert_loc (loc, type, mask));
12635 	    }
12636 	}
12637 
12638       return NULL_TREE;
12639 
12640     case LROTATE_EXPR:
12641     case RROTATE_EXPR:
12642       if (integer_all_onesp (arg0))
12643 	return omit_one_operand_loc (loc, type, arg0, arg1);
12644       goto shift;
12645 
12646     case RSHIFT_EXPR:
12647       /* Optimize -1 >> x for arithmetic right shifts.  */
12648       if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12649 	  && tree_expr_nonnegative_p (arg1))
12650 	return omit_one_operand_loc (loc, type, arg0, arg1);
12651       /* ... fall through ...  */
12652 
12653     case LSHIFT_EXPR:
12654     shift:
12655       if (integer_zerop (arg1))
12656 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12657       if (integer_zerop (arg0))
12658 	return omit_one_operand_loc (loc, type, arg0, arg1);
12659 
12660       /* Prefer vector1 << scalar to vector1 << vector2
12661 	 if vector2 is uniform.  */
12662       if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12663 	  && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12664 	return fold_build2_loc (loc, code, type, op0, tem);
12665 
12666       /* Since negative shift count is not well-defined,
12667 	 don't try to compute it in the compiler.  */
12668       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12669 	return NULL_TREE;
12670 
12671       prec = element_precision (type);
12672 
12673       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12674       if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12675 	  && tree_to_uhwi (arg1) < prec
12676 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12677 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12678 	{
12679 	  unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12680 			      + tree_to_uhwi (arg1));
12681 
12682 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12683 	     being well defined.  */
12684 	  if (low >= prec)
12685 	    {
12686 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12687 	        low = low % prec;
12688 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12689 		return omit_one_operand_loc (loc, type, build_zero_cst (type),
12690 					 TREE_OPERAND (arg0, 0));
12691 	      else
12692 		low = prec - 1;
12693 	    }
12694 
12695 	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12696 				  build_int_cst (TREE_TYPE (arg1), low));
12697 	}
12698 
12699       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12700          into x & ((unsigned)-1 >> c) for unsigned types.  */
12701       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12702            || (TYPE_UNSIGNED (type)
12703 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12704 	  && tree_fits_uhwi_p (arg1)
12705 	  && tree_to_uhwi (arg1) < prec
12706 	  && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12707 	  && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12708 	{
12709 	  HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12710 	  HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12711 	  tree lshift;
12712 	  tree arg00;
12713 
12714 	  if (low0 == low1)
12715 	    {
12716 	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12717 
12718 	      lshift = build_minus_one_cst (type);
12719 	      lshift = const_binop (code, lshift, arg1);
12720 
12721 	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12722 	    }
12723 	}
12724 
12725       /* Rewrite an LROTATE_EXPR by a constant into an
12726 	 RROTATE_EXPR by a new constant.  */
12727       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12728 	{
12729 	  tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12730 	  tem = const_binop (MINUS_EXPR, tem, arg1);
12731 	  return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12732 	}
12733 
12734       /* If we have a rotate of a bit operation with the rotate count and
12735 	 the second operand of the bit operation both constant,
12736 	 permute the two operations.  */
12737       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12738 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12739 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12740 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12741 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12742 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
12743 			    fold_build2_loc (loc, code, type,
12744 					 TREE_OPERAND (arg0, 0), arg1),
12745 			    fold_build2_loc (loc, code, type,
12746 					 TREE_OPERAND (arg0, 1), arg1));
12747 
12748       /* Two consecutive rotates adding up to the precision of the
12749 	 type can be ignored.  */
12750       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12751 	  && TREE_CODE (arg0) == RROTATE_EXPR
12752 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12753 	  && TREE_INT_CST_HIGH (arg1) == 0
12754 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12755 	  && ((TREE_INT_CST_LOW (arg1)
12756 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12757 	      == prec))
12758 	return TREE_OPERAND (arg0, 0);
12759 
12760       /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12761 	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12762 	 if the latter can be further optimized.  */
12763       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12764 	  && TREE_CODE (arg0) == BIT_AND_EXPR
12765 	  && TREE_CODE (arg1) == INTEGER_CST
12766 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12767 	{
12768 	  tree mask = fold_build2_loc (loc, code, type,
12769 				   fold_convert_loc (loc, type,
12770 						     TREE_OPERAND (arg0, 1)),
12771 				   arg1);
12772 	  tree shift = fold_build2_loc (loc, code, type,
12773 				    fold_convert_loc (loc, type,
12774 						      TREE_OPERAND (arg0, 0)),
12775 				    arg1);
12776 	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12777 	  if (tem)
12778 	    return tem;
12779 	}
12780 
12781       return NULL_TREE;
12782 
12783     case MIN_EXPR:
12784       if (operand_equal_p (arg0, arg1, 0))
12785 	return omit_one_operand_loc (loc, type, arg0, arg1);
12786       if (INTEGRAL_TYPE_P (type)
12787 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12788 	return omit_one_operand_loc (loc, type, arg1, arg0);
12789       tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12790       if (tem)
12791 	return tem;
12792       goto associate;
12793 
12794     case MAX_EXPR:
12795       if (operand_equal_p (arg0, arg1, 0))
12796 	return omit_one_operand_loc (loc, type, arg0, arg1);
12797       if (INTEGRAL_TYPE_P (type)
12798 	  && TYPE_MAX_VALUE (type)
12799 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12800 	return omit_one_operand_loc (loc, type, arg1, arg0);
12801       tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12802       if (tem)
12803 	return tem;
12804       goto associate;
12805 
12806     case TRUTH_ANDIF_EXPR:
12807       /* Note that the operands of this must be ints
12808 	 and their values must be 0 or 1.
12809 	 ("true" is a fixed value perhaps depending on the language.)  */
12810       /* If first arg is constant zero, return it.  */
12811       if (integer_zerop (arg0))
12812 	return fold_convert_loc (loc, type, arg0);
12813     case TRUTH_AND_EXPR:
12814       /* If either arg is constant true, drop it.  */
12815       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12816 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12817       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12818 	  /* Preserve sequence points.  */
12819 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12820 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12821       /* If second arg is constant zero, result is zero, but first arg
12822 	 must be evaluated.  */
12823       if (integer_zerop (arg1))
12824 	return omit_one_operand_loc (loc, type, arg1, arg0);
12825       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12826 	 case will be handled here.  */
12827       if (integer_zerop (arg0))
12828 	return omit_one_operand_loc (loc, type, arg0, arg1);
12829 
12830       /* !X && X is always false.  */
12831       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12832 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12833 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12834       /* X && !X is always false.  */
12835       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12836 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12837 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12838 
12839       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12840 	 means A >= Y && A != MAX, but in this case we know that
12841 	 A < X <= MAX.  */
12842 
12843       if (!TREE_SIDE_EFFECTS (arg0)
12844 	  && !TREE_SIDE_EFFECTS (arg1))
12845 	{
12846 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12847 	  if (tem && !operand_equal_p (tem, arg0, 0))
12848 	    return fold_build2_loc (loc, code, type, tem, arg1);
12849 
12850 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12851 	  if (tem && !operand_equal_p (tem, arg1, 0))
12852 	    return fold_build2_loc (loc, code, type, arg0, tem);
12853 	}
12854 
12855       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12856           != NULL_TREE)
12857         return tem;
12858 
12859       return NULL_TREE;
12860 
12861     case TRUTH_ORIF_EXPR:
12862       /* Note that the operands of this must be ints
12863 	 and their values must be 0 or true.
12864 	 ("true" is a fixed value perhaps depending on the language.)  */
12865       /* If first arg is constant true, return it.  */
12866       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12867 	return fold_convert_loc (loc, type, arg0);
12868     case TRUTH_OR_EXPR:
12869       /* If either arg is constant zero, drop it.  */
12870       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12871 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12872       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12873 	  /* Preserve sequence points.  */
12874 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12875 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12876       /* If second arg is constant true, result is true, but we must
12877 	 evaluate first arg.  */
12878       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12879 	return omit_one_operand_loc (loc, type, arg1, arg0);
12880       /* Likewise for first arg, but note this only occurs here for
12881 	 TRUTH_OR_EXPR.  */
12882       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12883 	return omit_one_operand_loc (loc, type, arg0, arg1);
12884 
12885       /* !X || X is always true.  */
12886       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12887 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12888 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12889       /* X || !X is always true.  */
12890       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12891 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12892 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12893 
12894       /* (X && !Y) || (!X && Y) is X ^ Y */
12895       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12896 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12897         {
12898 	  tree a0, a1, l0, l1, n0, n1;
12899 
12900 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12901 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12902 
12903 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12904 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12905 
12906 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12907 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12908 
12909 	  if ((operand_equal_p (n0, a0, 0)
12910 	       && operand_equal_p (n1, a1, 0))
12911 	      || (operand_equal_p (n0, a1, 0)
12912 		  && operand_equal_p (n1, a0, 0)))
12913 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12914 	}
12915 
12916       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12917           != NULL_TREE)
12918         return tem;
12919 
12920       return NULL_TREE;
12921 
12922     case TRUTH_XOR_EXPR:
12923       /* If the second arg is constant zero, drop it.  */
12924       if (integer_zerop (arg1))
12925 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12926       /* If the second arg is constant true, this is a logical inversion.  */
12927       if (integer_onep (arg1))
12928 	{
12929 	  tem = invert_truthvalue_loc (loc, arg0);
12930 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12931 	}
12932       /* Identical arguments cancel to zero.  */
12933       if (operand_equal_p (arg0, arg1, 0))
12934 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12935 
12936       /* !X ^ X is always true.  */
12937       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12938 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12939 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12940 
12941       /* X ^ !X is always true.  */
12942       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12943 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12944 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12945 
12946       return NULL_TREE;
12947 
12948     case EQ_EXPR:
12949     case NE_EXPR:
12950       STRIP_NOPS (arg0);
12951       STRIP_NOPS (arg1);
12952 
12953       tem = fold_comparison (loc, code, type, op0, op1);
12954       if (tem != NULL_TREE)
12955 	return tem;
12956 
12957       /* bool_var != 0 becomes bool_var. */
12958       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12959           && code == NE_EXPR)
12960         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12961 
12962       /* bool_var == 1 becomes bool_var. */
12963       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12964           && code == EQ_EXPR)
12965         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12966 
12967       /* bool_var != 1 becomes !bool_var. */
12968       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12969           && code == NE_EXPR)
12970         return fold_convert_loc (loc, type,
12971 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12972 						  TREE_TYPE (arg0), arg0));
12973 
12974       /* bool_var == 0 becomes !bool_var. */
12975       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12976           && code == EQ_EXPR)
12977         return fold_convert_loc (loc, type,
12978 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12979 						  TREE_TYPE (arg0), arg0));
12980 
12981       /* !exp != 0 becomes !exp */
12982       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12983 	  && code == NE_EXPR)
12984         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12985 
12986       /* If this is an equality comparison of the address of two non-weak,
12987 	 unaliased symbols neither of which are extern (since we do not
12988 	 have access to attributes for externs), then we know the result.  */
12989       if (TREE_CODE (arg0) == ADDR_EXPR
12990 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12991 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12992 	  && ! lookup_attribute ("alias",
12993 				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12994 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12995 	  && TREE_CODE (arg1) == ADDR_EXPR
12996 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12997 	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12998 	  && ! lookup_attribute ("alias",
12999 				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
13000 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
13001 	{
13002 	  /* We know that we're looking at the address of two
13003 	     non-weak, unaliased, static _DECL nodes.
13004 
13005 	     It is both wasteful and incorrect to call operand_equal_p
13006 	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
13007 	     all we need to do is test pointer equality for the arguments
13008 	     to the two ADDR_EXPR nodes.  It is incorrect to use
13009 	     operand_equal_p as that function is NOT equivalent to a
13010 	     C equality test.  It can in fact return false for two
13011 	     objects which would test as equal using the C equality
13012 	     operator.  */
13013 	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
13014 	  return constant_boolean_node (equal
13015 				        ? code == EQ_EXPR : code != EQ_EXPR,
13016 				        type);
13017 	}
13018 
13019       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13020 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
13021 	 a revised constant as long as no overflow occurs.  */
13022       if (TREE_CODE (arg1) == INTEGER_CST
13023 	  && (TREE_CODE (arg0) == PLUS_EXPR
13024 	      || TREE_CODE (arg0) == MINUS_EXPR)
13025 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13026 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13027 				      ? MINUS_EXPR : PLUS_EXPR,
13028 				      fold_convert_loc (loc, TREE_TYPE (arg0),
13029 							arg1),
13030 				      TREE_OPERAND (arg0, 1)))
13031 	  && !TREE_OVERFLOW (tem))
13032 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13033 
13034       /* Similarly for a NEGATE_EXPR.  */
13035       if (TREE_CODE (arg0) == NEGATE_EXPR
13036 	  && TREE_CODE (arg1) == INTEGER_CST
13037 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13038 							arg1)))
13039 	  && TREE_CODE (tem) == INTEGER_CST
13040 	  && !TREE_OVERFLOW (tem))
13041 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13042 
13043       /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
13044       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13045 	  && TREE_CODE (arg1) == INTEGER_CST
13046 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13047 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13048 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13049 					 fold_convert_loc (loc,
13050 							   TREE_TYPE (arg0),
13051 							   arg1),
13052 					 TREE_OPERAND (arg0, 1)));
13053 
13054       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
13055       if ((TREE_CODE (arg0) == PLUS_EXPR
13056 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13057 	   || TREE_CODE (arg0) == MINUS_EXPR)
13058 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13059 									0)),
13060 			      arg1, 0)
13061 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13062 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
13063 	{
13064 	  tree val = TREE_OPERAND (arg0, 1);
13065 	  return omit_two_operands_loc (loc, type,
13066 				    fold_build2_loc (loc, code, type,
13067 						 val,
13068 						 build_int_cst (TREE_TYPE (val),
13069 								0)),
13070 				    TREE_OPERAND (arg0, 0), arg1);
13071 	}
13072 
13073       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
13074       if (TREE_CODE (arg0) == MINUS_EXPR
13075 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13076 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13077 									1)),
13078 			      arg1, 0)
13079 	  && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13080 	{
13081 	  return omit_two_operands_loc (loc, type,
13082 				    code == NE_EXPR
13083 				    ? boolean_true_node : boolean_false_node,
13084 				    TREE_OPERAND (arg0, 1), arg1);
13085 	}
13086 
13087       /* If we have X - Y == 0, we can convert that to X == Y and similarly
13088 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
13089       if (TREE_CODE (arg0) == MINUS_EXPR
13090 	  && integer_zerop (arg1))
13091 	return fold_build2_loc (loc, code, type,
13092 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13093 
13094       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
13095       if (TREE_CODE (arg0) == ABS_EXPR
13096 	  && (integer_zerop (arg1) || real_zerop (arg1)))
13097 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13098 
13099       /* If this is an EQ or NE comparison with zero and ARG0 is
13100 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
13101 	 two operations, but the latter can be done in one less insn
13102 	 on machines that have only two-operand insns or on which a
13103 	 constant cannot be the first operand.  */
13104       if (TREE_CODE (arg0) == BIT_AND_EXPR
13105 	  && integer_zerop (arg1))
13106 	{
13107 	  tree arg00 = TREE_OPERAND (arg0, 0);
13108 	  tree arg01 = TREE_OPERAND (arg0, 1);
13109 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
13110 	      && integer_onep (TREE_OPERAND (arg00, 0)))
13111 	    {
13112 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13113 				      arg01, TREE_OPERAND (arg00, 1));
13114 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13115 				 build_int_cst (TREE_TYPE (arg0), 1));
13116 	      return fold_build2_loc (loc, code, type,
13117 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13118 				  arg1);
13119 	    }
13120 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
13121 		   && integer_onep (TREE_OPERAND (arg01, 0)))
13122 	    {
13123 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13124 				      arg00, TREE_OPERAND (arg01, 1));
13125 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13126 				 build_int_cst (TREE_TYPE (arg0), 1));
13127 	      return fold_build2_loc (loc, code, type,
13128 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13129 				  arg1);
13130 	    }
13131 	}
13132 
13133       /* If this is an NE or EQ comparison of zero against the result of a
13134 	 signed MOD operation whose second operand is a power of 2, make
13135 	 the MOD operation unsigned since it is simpler and equivalent.  */
13136       if (integer_zerop (arg1)
13137 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13138 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13139 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
13140 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13141 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13142 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
13143 	{
13144 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13145 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13146 				     fold_convert_loc (loc, newtype,
13147 						       TREE_OPERAND (arg0, 0)),
13148 				     fold_convert_loc (loc, newtype,
13149 						       TREE_OPERAND (arg0, 1)));
13150 
13151 	  return fold_build2_loc (loc, code, type, newmod,
13152 			      fold_convert_loc (loc, newtype, arg1));
13153 	}
13154 
13155       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13156 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
13157 	 a single bit.  */
13158       if (TREE_CODE (arg0) == BIT_AND_EXPR
13159 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13160 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13161 	     == INTEGER_CST
13162 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13163 	  && integer_zerop (arg1))
13164 	{
13165 	  tree itype = TREE_TYPE (arg0);
13166 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13167 	  prec = TYPE_PRECISION (itype);
13168 
13169 	  /* Check for a valid shift count.  */
13170 	  if (TREE_INT_CST_HIGH (arg001) == 0
13171 	      && TREE_INT_CST_LOW (arg001) < prec)
13172 	    {
13173 	      tree arg01 = TREE_OPERAND (arg0, 1);
13174 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13175 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13176 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13177 		 can be rewritten as (X & (C2 << C1)) != 0.  */
13178 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13179 		{
13180 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13181 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13182 		  return fold_build2_loc (loc, code, type, tem,
13183 					  fold_convert_loc (loc, itype, arg1));
13184 		}
13185 	      /* Otherwise, for signed (arithmetic) shifts,
13186 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13187 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
13188 	      else if (!TYPE_UNSIGNED (itype))
13189 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13190 				    arg000, build_int_cst (itype, 0));
13191 	      /* Otherwise, of unsigned (logical) shifts,
13192 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13193 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
13194 	      else
13195 		return omit_one_operand_loc (loc, type,
13196 					 code == EQ_EXPR ? integer_one_node
13197 							 : integer_zero_node,
13198 					 arg000);
13199 	    }
13200 	}
13201 
13202       /* If we have (A & C) == C where C is a power of 2, convert this into
13203 	 (A & C) != 0.  Similarly for NE_EXPR.  */
13204       if (TREE_CODE (arg0) == BIT_AND_EXPR
13205 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13206 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13207 	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13208 			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13209 						    integer_zero_node));
13210 
13211       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13212 	 bit, then fold the expression into A < 0 or A >= 0.  */
13213       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13214       if (tem)
13215 	return tem;
13216 
13217       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13218 	 Similarly for NE_EXPR.  */
13219       if (TREE_CODE (arg0) == BIT_AND_EXPR
13220 	  && TREE_CODE (arg1) == INTEGER_CST
13221 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13222 	{
13223 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13224 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
13225 				   TREE_OPERAND (arg0, 1));
13226 	  tree dandnotc
13227 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13228 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13229 			       notc);
13230 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13231 	  if (integer_nonzerop (dandnotc))
13232 	    return omit_one_operand_loc (loc, type, rslt, arg0);
13233 	}
13234 
13235       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13236 	 Similarly for NE_EXPR.  */
13237       if (TREE_CODE (arg0) == BIT_IOR_EXPR
13238 	  && TREE_CODE (arg1) == INTEGER_CST
13239 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13240 	{
13241 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13242 	  tree candnotd
13243 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13244 			       TREE_OPERAND (arg0, 1),
13245 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13246 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13247 	  if (integer_nonzerop (candnotd))
13248 	    return omit_one_operand_loc (loc, type, rslt, arg0);
13249 	}
13250 
13251       /* If this is a comparison of a field, we may be able to simplify it.  */
13252       if ((TREE_CODE (arg0) == COMPONENT_REF
13253 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
13254 	  /* Handle the constant case even without -O
13255 	     to make sure the warnings are given.  */
13256 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13257 	{
13258 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13259 	  if (t1)
13260 	    return t1;
13261 	}
13262 
13263       /* Optimize comparisons of strlen vs zero to a compare of the
13264 	 first character of the string vs zero.  To wit,
13265 		strlen(ptr) == 0   =>  *ptr == 0
13266 		strlen(ptr) != 0   =>  *ptr != 0
13267 	 Other cases should reduce to one of these two (or a constant)
13268 	 due to the return value of strlen being unsigned.  */
13269       if (TREE_CODE (arg0) == CALL_EXPR
13270 	  && integer_zerop (arg1))
13271 	{
13272 	  tree fndecl = get_callee_fndecl (arg0);
13273 
13274 	  if (fndecl
13275 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13276 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13277 	      && call_expr_nargs (arg0) == 1
13278 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13279 	    {
13280 	      tree iref = build_fold_indirect_ref_loc (loc,
13281 						   CALL_EXPR_ARG (arg0, 0));
13282 	      return fold_build2_loc (loc, code, type, iref,
13283 				  build_int_cst (TREE_TYPE (iref), 0));
13284 	    }
13285 	}
13286 
13287       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13288 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
13289       if (TREE_CODE (arg0) == RSHIFT_EXPR
13290 	  && integer_zerop (arg1)
13291 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13292 	{
13293 	  tree arg00 = TREE_OPERAND (arg0, 0);
13294 	  tree arg01 = TREE_OPERAND (arg0, 1);
13295 	  tree itype = TREE_TYPE (arg00);
13296 	  if (TREE_INT_CST_HIGH (arg01) == 0
13297 	      && TREE_INT_CST_LOW (arg01)
13298 		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13299 	    {
13300 	      if (TYPE_UNSIGNED (itype))
13301 		{
13302 		  itype = signed_type_for (itype);
13303 		  arg00 = fold_convert_loc (loc, itype, arg00);
13304 		}
13305 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13306 				  type, arg00, build_zero_cst (itype));
13307 	    }
13308 	}
13309 
13310       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
13311       if (integer_zerop (arg1)
13312 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
13313 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13314 			    TREE_OPERAND (arg0, 1));
13315 
13316       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
13317       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13318 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13319 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13320 				build_zero_cst (TREE_TYPE (arg0)));
13321       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
13322       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13323 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13324 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13325 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13326 				build_zero_cst (TREE_TYPE (arg0)));
13327 
13328       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
13329       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13330 	  && TREE_CODE (arg1) == INTEGER_CST
13331 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13332 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13333 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13334 					 TREE_OPERAND (arg0, 1), arg1));
13335 
13336       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13337 	 (X & C) == 0 when C is a single bit.  */
13338       if (TREE_CODE (arg0) == BIT_AND_EXPR
13339 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13340 	  && integer_zerop (arg1)
13341 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
13342 	{
13343 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13344 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13345 				 TREE_OPERAND (arg0, 1));
13346 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13347 				  type, tem,
13348 				  fold_convert_loc (loc, TREE_TYPE (arg0),
13349 						    arg1));
13350 	}
13351 
13352       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13353 	 constant C is a power of two, i.e. a single bit.  */
13354       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13355 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13356 	  && integer_zerop (arg1)
13357 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13358 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13359 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13360 	{
13361 	  tree arg00 = TREE_OPERAND (arg0, 0);
13362 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13363 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
13364 	}
13365 
13366       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13367 	 when is C is a power of two, i.e. a single bit.  */
13368       if (TREE_CODE (arg0) == BIT_AND_EXPR
13369 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13370 	  && integer_zerop (arg1)
13371 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13372 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13373 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13374 	{
13375 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13376 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13377 			     arg000, TREE_OPERAND (arg0, 1));
13378 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13379 			      tem, build_int_cst (TREE_TYPE (tem), 0));
13380 	}
13381 
13382       if (integer_zerop (arg1)
13383 	  && tree_expr_nonzero_p (arg0))
13384         {
13385 	  tree res = constant_boolean_node (code==NE_EXPR, type);
13386 	  return omit_one_operand_loc (loc, type, res, arg0);
13387 	}
13388 
13389       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
13390       if (TREE_CODE (arg0) == NEGATE_EXPR
13391           && TREE_CODE (arg1) == NEGATE_EXPR)
13392 	return fold_build2_loc (loc, code, type,
13393 				TREE_OPERAND (arg0, 0),
13394 				fold_convert_loc (loc, TREE_TYPE (arg0),
13395 						  TREE_OPERAND (arg1, 0)));
13396 
13397       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
13398       if (TREE_CODE (arg0) == BIT_AND_EXPR
13399 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
13400 	{
13401 	  tree arg00 = TREE_OPERAND (arg0, 0);
13402 	  tree arg01 = TREE_OPERAND (arg0, 1);
13403 	  tree arg10 = TREE_OPERAND (arg1, 0);
13404 	  tree arg11 = TREE_OPERAND (arg1, 1);
13405 	  tree itype = TREE_TYPE (arg0);
13406 
13407 	  if (operand_equal_p (arg01, arg11, 0))
13408 	    return fold_build2_loc (loc, code, type,
13409 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13410 					     fold_build2_loc (loc,
13411 							  BIT_XOR_EXPR, itype,
13412 							  arg00, arg10),
13413 					     arg01),
13414 				build_zero_cst (itype));
13415 
13416 	  if (operand_equal_p (arg01, arg10, 0))
13417 	    return fold_build2_loc (loc, code, type,
13418 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13419 					     fold_build2_loc (loc,
13420 							  BIT_XOR_EXPR, itype,
13421 							  arg00, arg11),
13422 					     arg01),
13423 				build_zero_cst (itype));
13424 
13425 	  if (operand_equal_p (arg00, arg11, 0))
13426 	    return fold_build2_loc (loc, code, type,
13427 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13428 					     fold_build2_loc (loc,
13429 							  BIT_XOR_EXPR, itype,
13430 							  arg01, arg10),
13431 					     arg00),
13432 				build_zero_cst (itype));
13433 
13434 	  if (operand_equal_p (arg00, arg10, 0))
13435 	    return fold_build2_loc (loc, code, type,
13436 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13437 					     fold_build2_loc (loc,
13438 							  BIT_XOR_EXPR, itype,
13439 							  arg01, arg11),
13440 					     arg00),
13441 				build_zero_cst (itype));
13442 	}
13443 
13444       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13445 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
13446 	{
13447 	  tree arg00 = TREE_OPERAND (arg0, 0);
13448 	  tree arg01 = TREE_OPERAND (arg0, 1);
13449 	  tree arg10 = TREE_OPERAND (arg1, 0);
13450 	  tree arg11 = TREE_OPERAND (arg1, 1);
13451 	  tree itype = TREE_TYPE (arg0);
13452 
13453 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13454 	     operand_equal_p guarantees no side-effects so we don't need
13455 	     to use omit_one_operand on Z.  */
13456 	  if (operand_equal_p (arg01, arg11, 0))
13457 	    return fold_build2_loc (loc, code, type, arg00,
13458 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13459 						      arg10));
13460 	  if (operand_equal_p (arg01, arg10, 0))
13461 	    return fold_build2_loc (loc, code, type, arg00,
13462 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13463 						      arg11));
13464 	  if (operand_equal_p (arg00, arg11, 0))
13465 	    return fold_build2_loc (loc, code, type, arg01,
13466 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13467 						      arg10));
13468 	  if (operand_equal_p (arg00, arg10, 0))
13469 	    return fold_build2_loc (loc, code, type, arg01,
13470 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13471 						      arg11));
13472 
13473 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13474 	  if (TREE_CODE (arg01) == INTEGER_CST
13475 	      && TREE_CODE (arg11) == INTEGER_CST)
13476 	    {
13477 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13478 				     fold_convert_loc (loc, itype, arg11));
13479 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13480 	      return fold_build2_loc (loc, code, type, tem,
13481 				      fold_convert_loc (loc, itype, arg10));
13482 	    }
13483 	}
13484 
13485       /* Attempt to simplify equality/inequality comparisons of complex
13486 	 values.  Only lower the comparison if the result is known or
13487 	 can be simplified to a single scalar comparison.  */
13488       if ((TREE_CODE (arg0) == COMPLEX_EXPR
13489 	   || TREE_CODE (arg0) == COMPLEX_CST)
13490 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
13491 	      || TREE_CODE (arg1) == COMPLEX_CST))
13492 	{
13493 	  tree real0, imag0, real1, imag1;
13494 	  tree rcond, icond;
13495 
13496 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
13497 	    {
13498 	      real0 = TREE_OPERAND (arg0, 0);
13499 	      imag0 = TREE_OPERAND (arg0, 1);
13500 	    }
13501 	  else
13502 	    {
13503 	      real0 = TREE_REALPART (arg0);
13504 	      imag0 = TREE_IMAGPART (arg0);
13505 	    }
13506 
13507 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
13508 	    {
13509 	      real1 = TREE_OPERAND (arg1, 0);
13510 	      imag1 = TREE_OPERAND (arg1, 1);
13511 	    }
13512 	  else
13513 	    {
13514 	      real1 = TREE_REALPART (arg1);
13515 	      imag1 = TREE_IMAGPART (arg1);
13516 	    }
13517 
13518 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
13519 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13520 	    {
13521 	      if (integer_zerop (rcond))
13522 		{
13523 		  if (code == EQ_EXPR)
13524 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13525 					      imag0, imag1);
13526 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13527 		}
13528 	      else
13529 		{
13530 		  if (code == NE_EXPR)
13531 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13532 					      imag0, imag1);
13533 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13534 		}
13535 	    }
13536 
13537 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
13538 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
13539 	    {
13540 	      if (integer_zerop (icond))
13541 		{
13542 		  if (code == EQ_EXPR)
13543 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13544 					      real0, real1);
13545 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13546 		}
13547 	      else
13548 		{
13549 		  if (code == NE_EXPR)
13550 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13551 					      real0, real1);
13552 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13553 		}
13554 	    }
13555 	}
13556 
13557       return NULL_TREE;
13558 
13559     case LT_EXPR:
13560     case GT_EXPR:
13561     case LE_EXPR:
13562     case GE_EXPR:
13563       tem = fold_comparison (loc, code, type, op0, op1);
13564       if (tem != NULL_TREE)
13565 	return tem;
13566 
13567       /* Transform comparisons of the form X +- C CMP X.  */
13568       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13569 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13570 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13571 	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13572 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13573 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13574 	{
13575 	  tree arg01 = TREE_OPERAND (arg0, 1);
13576 	  enum tree_code code0 = TREE_CODE (arg0);
13577 	  int is_positive;
13578 
13579 	  if (TREE_CODE (arg01) == REAL_CST)
13580 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13581 	  else
13582 	    is_positive = tree_int_cst_sgn (arg01);
13583 
13584 	  /* (X - c) > X becomes false.  */
13585 	  if (code == GT_EXPR
13586 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13587 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13588 	    {
13589 	      if (TREE_CODE (arg01) == INTEGER_CST
13590 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13591 		fold_overflow_warning (("assuming signed overflow does not "
13592 					"occur when assuming that (X - c) > X "
13593 					"is always false"),
13594 				       WARN_STRICT_OVERFLOW_ALL);
13595 	      return constant_boolean_node (0, type);
13596 	    }
13597 
13598 	  /* Likewise (X + c) < X becomes false.  */
13599 	  if (code == LT_EXPR
13600 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13601 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13602 	    {
13603 	      if (TREE_CODE (arg01) == INTEGER_CST
13604 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13605 		fold_overflow_warning (("assuming signed overflow does not "
13606 					"occur when assuming that "
13607 					"(X + c) < X is always false"),
13608 				       WARN_STRICT_OVERFLOW_ALL);
13609 	      return constant_boolean_node (0, type);
13610 	    }
13611 
13612 	  /* Convert (X - c) <= X to true.  */
13613 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13614 	      && code == LE_EXPR
13615 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13616 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13617 	    {
13618 	      if (TREE_CODE (arg01) == INTEGER_CST
13619 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13620 		fold_overflow_warning (("assuming signed overflow does not "
13621 					"occur when assuming that "
13622 					"(X - c) <= X is always true"),
13623 				       WARN_STRICT_OVERFLOW_ALL);
13624 	      return constant_boolean_node (1, type);
13625 	    }
13626 
13627 	  /* Convert (X + c) >= X to true.  */
13628 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13629 	      && code == GE_EXPR
13630 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13631 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13632 	    {
13633 	      if (TREE_CODE (arg01) == INTEGER_CST
13634 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13635 		fold_overflow_warning (("assuming signed overflow does not "
13636 					"occur when assuming that "
13637 					"(X + c) >= X is always true"),
13638 				       WARN_STRICT_OVERFLOW_ALL);
13639 	      return constant_boolean_node (1, type);
13640 	    }
13641 
13642 	  if (TREE_CODE (arg01) == INTEGER_CST)
13643 	    {
13644 	      /* Convert X + c > X and X - c < X to true for integers.  */
13645 	      if (code == GT_EXPR
13646 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13647 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13648 		{
13649 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13650 		    fold_overflow_warning (("assuming signed overflow does "
13651 					    "not occur when assuming that "
13652 					    "(X + c) > X is always true"),
13653 					   WARN_STRICT_OVERFLOW_ALL);
13654 		  return constant_boolean_node (1, type);
13655 		}
13656 
13657 	      if (code == LT_EXPR
13658 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13659 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13660 		{
13661 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13662 		    fold_overflow_warning (("assuming signed overflow does "
13663 					    "not occur when assuming that "
13664 					    "(X - c) < X is always true"),
13665 					   WARN_STRICT_OVERFLOW_ALL);
13666 		  return constant_boolean_node (1, type);
13667 		}
13668 
13669 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13670 	      if (code == LE_EXPR
13671 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13672 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13673 		{
13674 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13675 		    fold_overflow_warning (("assuming signed overflow does "
13676 					    "not occur when assuming that "
13677 					    "(X + c) <= X is always false"),
13678 					   WARN_STRICT_OVERFLOW_ALL);
13679 		  return constant_boolean_node (0, type);
13680 		}
13681 
13682 	      if (code == GE_EXPR
13683 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13684 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13685 		{
13686 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13687 		    fold_overflow_warning (("assuming signed overflow does "
13688 					    "not occur when assuming that "
13689 					    "(X - c) >= X is always false"),
13690 					   WARN_STRICT_OVERFLOW_ALL);
13691 		  return constant_boolean_node (0, type);
13692 		}
13693 	    }
13694 	}
13695 
13696       /* Comparisons with the highest or lowest possible integer of
13697 	 the specified precision will have known values.  */
13698       {
13699 	tree arg1_type = TREE_TYPE (arg1);
13700 	unsigned int width = TYPE_PRECISION (arg1_type);
13701 
13702 	if (TREE_CODE (arg1) == INTEGER_CST
13703 	    && width <= HOST_BITS_PER_DOUBLE_INT
13704 	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13705 	  {
13706 	    HOST_WIDE_INT signed_max_hi;
13707 	    unsigned HOST_WIDE_INT signed_max_lo;
13708 	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13709 
13710 	    if (width <= HOST_BITS_PER_WIDE_INT)
13711 	      {
13712 		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13713 				- 1;
13714 		signed_max_hi = 0;
13715 		max_hi = 0;
13716 
13717 		if (TYPE_UNSIGNED (arg1_type))
13718 		  {
13719 		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13720 		    min_lo = 0;
13721 		    min_hi = 0;
13722 		  }
13723 		else
13724 		  {
13725 		    max_lo = signed_max_lo;
13726 		    min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13727 		    min_hi = -1;
13728 		  }
13729 	      }
13730 	    else
13731 	      {
13732 		width -= HOST_BITS_PER_WIDE_INT;
13733 		signed_max_lo = -1;
13734 		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13735 				- 1;
13736 		max_lo = -1;
13737 		min_lo = 0;
13738 
13739 		if (TYPE_UNSIGNED (arg1_type))
13740 		  {
13741 		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13742 		    min_hi = 0;
13743 		  }
13744 		else
13745 		  {
13746 		    max_hi = signed_max_hi;
13747 		    min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13748 		  }
13749 	      }
13750 
13751 	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13752 		&& TREE_INT_CST_LOW (arg1) == max_lo)
13753 	      switch (code)
13754 		{
13755 		case GT_EXPR:
13756 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13757 
13758 		case GE_EXPR:
13759 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13760 
13761 		case LE_EXPR:
13762 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13763 
13764 		case LT_EXPR:
13765 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13766 
13767 		/* The GE_EXPR and LT_EXPR cases above are not normally
13768 		   reached because of previous transformations.  */
13769 
13770 		default:
13771 		  break;
13772 		}
13773 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13774 		     == max_hi
13775 		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13776 	      switch (code)
13777 		{
13778 		case GT_EXPR:
13779 		  arg1 = const_binop (PLUS_EXPR, arg1,
13780 				      build_int_cst (TREE_TYPE (arg1), 1));
13781 		  return fold_build2_loc (loc, EQ_EXPR, type,
13782 				      fold_convert_loc (loc,
13783 							TREE_TYPE (arg1), arg0),
13784 				      arg1);
13785 		case LE_EXPR:
13786 		  arg1 = const_binop (PLUS_EXPR, arg1,
13787 				      build_int_cst (TREE_TYPE (arg1), 1));
13788 		  return fold_build2_loc (loc, NE_EXPR, type,
13789 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13790 							arg0),
13791 				      arg1);
13792 		default:
13793 		  break;
13794 		}
13795 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13796 		     == min_hi
13797 		     && TREE_INT_CST_LOW (arg1) == min_lo)
13798 	      switch (code)
13799 		{
13800 		case LT_EXPR:
13801 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13802 
13803 		case LE_EXPR:
13804 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13805 
13806 		case GE_EXPR:
13807 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13808 
13809 		case GT_EXPR:
13810 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13811 
13812 		default:
13813 		  break;
13814 		}
13815 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13816 		     == min_hi
13817 		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13818 	      switch (code)
13819 		{
13820 		case GE_EXPR:
13821 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13822 		  return fold_build2_loc (loc, NE_EXPR, type,
13823 				      fold_convert_loc (loc,
13824 							TREE_TYPE (arg1), arg0),
13825 				      arg1);
13826 		case LT_EXPR:
13827 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13828 		  return fold_build2_loc (loc, EQ_EXPR, type,
13829 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13830 							arg0),
13831 				      arg1);
13832 		default:
13833 		  break;
13834 		}
13835 
13836 	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13837 		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13838 		     && TYPE_UNSIGNED (arg1_type)
13839 		     /* We will flip the signedness of the comparison operator
13840 			associated with the mode of arg1, so the sign bit is
13841 			specified by this mode.  Check that arg1 is the signed
13842 			max associated with this sign bit.  */
13843 		     && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13844 		     /* signed_type does not work on pointer types.  */
13845 		     && INTEGRAL_TYPE_P (arg1_type))
13846 	      {
13847 		/* The following case also applies to X < signed_max+1
13848 		   and X >= signed_max+1 because previous transformations.  */
13849 		if (code == LE_EXPR || code == GT_EXPR)
13850 		  {
13851 		    tree st = signed_type_for (arg1_type);
13852 		    return fold_build2_loc (loc,
13853 					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13854 					type, fold_convert_loc (loc, st, arg0),
13855 					build_int_cst (st, 0));
13856 		  }
13857 	      }
13858 	  }
13859       }
13860 
13861       /* If we are comparing an ABS_EXPR with a constant, we can
13862 	 convert all the cases into explicit comparisons, but they may
13863 	 well not be faster than doing the ABS and one comparison.
13864 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13865 	 and a comparison, and is probably faster.  */
13866       if (code == LE_EXPR
13867 	  && TREE_CODE (arg1) == INTEGER_CST
13868 	  && TREE_CODE (arg0) == ABS_EXPR
13869 	  && ! TREE_SIDE_EFFECTS (arg0)
13870 	  && (0 != (tem = negate_expr (arg1)))
13871 	  && TREE_CODE (tem) == INTEGER_CST
13872 	  && !TREE_OVERFLOW (tem))
13873 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13874 			    build2 (GE_EXPR, type,
13875 				    TREE_OPERAND (arg0, 0), tem),
13876 			    build2 (LE_EXPR, type,
13877 				    TREE_OPERAND (arg0, 0), arg1));
13878 
13879       /* Convert ABS_EXPR<x> >= 0 to true.  */
13880       strict_overflow_p = false;
13881       if (code == GE_EXPR
13882 	  && (integer_zerop (arg1)
13883 	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13884 		  && real_zerop (arg1)))
13885 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13886 	{
13887 	  if (strict_overflow_p)
13888 	    fold_overflow_warning (("assuming signed overflow does not occur "
13889 				    "when simplifying comparison of "
13890 				    "absolute value and zero"),
13891 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13892 	  return omit_one_operand_loc (loc, type,
13893 				       constant_boolean_node (true, type),
13894 				       arg0);
13895 	}
13896 
13897       /* Convert ABS_EXPR<x> < 0 to false.  */
13898       strict_overflow_p = false;
13899       if (code == LT_EXPR
13900 	  && (integer_zerop (arg1) || real_zerop (arg1))
13901 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13902 	{
13903 	  if (strict_overflow_p)
13904 	    fold_overflow_warning (("assuming signed overflow does not occur "
13905 				    "when simplifying comparison of "
13906 				    "absolute value and zero"),
13907 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13908 	  return omit_one_operand_loc (loc, type,
13909 				       constant_boolean_node (false, type),
13910 				       arg0);
13911 	}
13912 
13913       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13914 	 and similarly for >= into !=.  */
13915       if ((code == LT_EXPR || code == GE_EXPR)
13916 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13917 	  && TREE_CODE (arg1) == LSHIFT_EXPR
13918 	  && integer_onep (TREE_OPERAND (arg1, 0)))
13919 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13920 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13921 				   TREE_OPERAND (arg1, 1)),
13922 			   build_zero_cst (TREE_TYPE (arg0)));
13923 
13924       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
13925 	 otherwise Y might be >= # of bits in X's type and thus e.g.
13926 	 (unsigned char) (1 << Y) for Y 15 might be 0.
13927 	 If the cast is widening, then 1 << Y should have unsigned type,
13928 	 otherwise if Y is number of bits in the signed shift type minus 1,
13929 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
13930 	 31 might be 0xffffffff80000000.  */
13931       if ((code == LT_EXPR || code == GE_EXPR)
13932 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13933 	  && CONVERT_EXPR_P (arg1)
13934 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13935 	  && (TYPE_PRECISION (TREE_TYPE (arg1))
13936 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13937 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13938 	      || (TYPE_PRECISION (TREE_TYPE (arg1))
13939 		  == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13940 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13941 	{
13942 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13943 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13944 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13945 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13946 			     build_zero_cst (TREE_TYPE (arg0)));
13947 	}
13948 
13949       return NULL_TREE;
13950 
13951     case UNORDERED_EXPR:
13952     case ORDERED_EXPR:
13953     case UNLT_EXPR:
13954     case UNLE_EXPR:
13955     case UNGT_EXPR:
13956     case UNGE_EXPR:
13957     case UNEQ_EXPR:
13958     case LTGT_EXPR:
13959       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13960 	{
13961 	  t1 = fold_relational_const (code, type, arg0, arg1);
13962 	  if (t1 != NULL_TREE)
13963 	    return t1;
13964 	}
13965 
13966       /* If the first operand is NaN, the result is constant.  */
13967       if (TREE_CODE (arg0) == REAL_CST
13968 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13969 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13970 	{
13971 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13972 	       ? integer_zero_node
13973 	       : integer_one_node;
13974 	  return omit_one_operand_loc (loc, type, t1, arg1);
13975 	}
13976 
13977       /* If the second operand is NaN, the result is constant.  */
13978       if (TREE_CODE (arg1) == REAL_CST
13979 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13980 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13981 	{
13982 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13983 	       ? integer_zero_node
13984 	       : integer_one_node;
13985 	  return omit_one_operand_loc (loc, type, t1, arg0);
13986 	}
13987 
13988       /* Simplify unordered comparison of something with itself.  */
13989       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13990 	  && operand_equal_p (arg0, arg1, 0))
13991 	return constant_boolean_node (1, type);
13992 
13993       if (code == LTGT_EXPR
13994 	  && !flag_trapping_math
13995 	  && operand_equal_p (arg0, arg1, 0))
13996 	return constant_boolean_node (0, type);
13997 
13998       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13999       {
14000 	tree targ0 = strip_float_extensions (arg0);
14001 	tree targ1 = strip_float_extensions (arg1);
14002 	tree newtype = TREE_TYPE (targ0);
14003 
14004 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
14005 	  newtype = TREE_TYPE (targ1);
14006 
14007 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
14008 	  return fold_build2_loc (loc, code, type,
14009 			      fold_convert_loc (loc, newtype, targ0),
14010 			      fold_convert_loc (loc, newtype, targ1));
14011       }
14012 
14013       return NULL_TREE;
14014 
14015     case COMPOUND_EXPR:
14016       /* When pedantic, a compound expression can be neither an lvalue
14017 	 nor an integer constant expression.  */
14018       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14019 	return NULL_TREE;
14020       /* Don't let (0, 0) be null pointer constant.  */
14021       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14022 				 : fold_convert_loc (loc, type, arg1);
14023       return pedantic_non_lvalue_loc (loc, tem);
14024 
14025     case COMPLEX_EXPR:
14026       if ((TREE_CODE (arg0) == REAL_CST
14027 	   && TREE_CODE (arg1) == REAL_CST)
14028 	  || (TREE_CODE (arg0) == INTEGER_CST
14029 	      && TREE_CODE (arg1) == INTEGER_CST))
14030 	return build_complex (type, arg0, arg1);
14031       if (TREE_CODE (arg0) == REALPART_EXPR
14032 	  && TREE_CODE (arg1) == IMAGPART_EXPR
14033 	  && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14034 	  && operand_equal_p (TREE_OPERAND (arg0, 0),
14035 			      TREE_OPERAND (arg1, 0), 0))
14036 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14037 				     TREE_OPERAND (arg1, 0));
14038       return NULL_TREE;
14039 
14040     case ASSERT_EXPR:
14041       /* An ASSERT_EXPR should never be passed to fold_binary.  */
14042       gcc_unreachable ();
14043 
14044     case VEC_PACK_TRUNC_EXPR:
14045     case VEC_PACK_FIX_TRUNC_EXPR:
14046       {
14047 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14048 	tree *elts;
14049 
14050 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14051 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14052 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14053 	  return NULL_TREE;
14054 
14055 	elts = XALLOCAVEC (tree, nelts);
14056 	if (!vec_cst_ctor_to_array (arg0, elts)
14057 	    || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14058 	  return NULL_TREE;
14059 
14060 	for (i = 0; i < nelts; i++)
14061 	  {
14062 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14063 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
14064 					  TREE_TYPE (type), elts[i]);
14065 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14066 	      return NULL_TREE;
14067 	  }
14068 
14069 	return build_vector (type, elts);
14070       }
14071 
14072     case VEC_WIDEN_MULT_LO_EXPR:
14073     case VEC_WIDEN_MULT_HI_EXPR:
14074     case VEC_WIDEN_MULT_EVEN_EXPR:
14075     case VEC_WIDEN_MULT_ODD_EXPR:
14076       {
14077 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14078 	unsigned int out, ofs, scale;
14079 	tree *elts;
14080 
14081 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14082 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14083 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14084 	  return NULL_TREE;
14085 
14086 	elts = XALLOCAVEC (tree, nelts * 4);
14087 	if (!vec_cst_ctor_to_array (arg0, elts)
14088 	    || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14089 	  return NULL_TREE;
14090 
14091 	if (code == VEC_WIDEN_MULT_LO_EXPR)
14092 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14093 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
14094 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14095 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14096 	  scale = 1, ofs = 0;
14097 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14098 	  scale = 1, ofs = 1;
14099 
14100 	for (out = 0; out < nelts; out++)
14101 	  {
14102 	    unsigned int in1 = (out << scale) + ofs;
14103 	    unsigned int in2 = in1 + nelts * 2;
14104 	    tree t1, t2;
14105 
14106 	    t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14107 	    t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14108 
14109 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
14110 	      return NULL_TREE;
14111 	    elts[out] = const_binop (MULT_EXPR, t1, t2);
14112 	    if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14113 	      return NULL_TREE;
14114 	  }
14115 
14116 	return build_vector (type, elts);
14117       }
14118 
14119     default:
14120       return NULL_TREE;
14121     } /* switch (code) */
14122 }
14123 
14124 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
14125    a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
14126    of GOTO_EXPR.  */
14127 
14128 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)14129 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14130 {
14131   switch (TREE_CODE (*tp))
14132     {
14133     case LABEL_EXPR:
14134       return *tp;
14135 
14136     case GOTO_EXPR:
14137       *walk_subtrees = 0;
14138 
14139       /* ... fall through ...  */
14140 
14141     default:
14142       return NULL_TREE;
14143     }
14144 }
14145 
14146 /* Return whether the sub-tree ST contains a label which is accessible from
14147    outside the sub-tree.  */
14148 
14149 static bool
contains_label_p(tree st)14150 contains_label_p (tree st)
14151 {
14152   return
14153    (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14154 }
14155 
14156 /* Fold a ternary expression of code CODE and type TYPE with operands
14157    OP0, OP1, and OP2.  Return the folded expression if folding is
14158    successful.  Otherwise, return NULL_TREE.  */
14159 
14160 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)14161 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14162 		  tree op0, tree op1, tree op2)
14163 {
14164   tree tem;
14165   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14166   enum tree_code_class kind = TREE_CODE_CLASS (code);
14167 
14168   gcc_assert (IS_EXPR_CODE_CLASS (kind)
14169 	      && TREE_CODE_LENGTH (code) == 3);
14170 
14171   /* Strip any conversions that don't change the mode.  This is safe
14172      for every expression, except for a comparison expression because
14173      its signedness is derived from its operands.  So, in the latter
14174      case, only strip conversions that don't change the signedness.
14175 
14176      Note that this is done as an internal manipulation within the
14177      constant folder, in order to find the simplest representation of
14178      the arguments so that their form can be studied.  In any cases,
14179      the appropriate type conversions should be put back in the tree
14180      that will get out of the constant folder.  */
14181   if (op0)
14182     {
14183       arg0 = op0;
14184       STRIP_NOPS (arg0);
14185     }
14186 
14187   if (op1)
14188     {
14189       arg1 = op1;
14190       STRIP_NOPS (arg1);
14191     }
14192 
14193   if (op2)
14194     {
14195       arg2 = op2;
14196       STRIP_NOPS (arg2);
14197     }
14198 
14199   switch (code)
14200     {
14201     case COMPONENT_REF:
14202       if (TREE_CODE (arg0) == CONSTRUCTOR
14203 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14204 	{
14205 	  unsigned HOST_WIDE_INT idx;
14206 	  tree field, value;
14207 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14208 	    if (field == arg1)
14209 	      return value;
14210 	}
14211       return NULL_TREE;
14212 
14213     case COND_EXPR:
14214     case VEC_COND_EXPR:
14215       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14216 	 so all simple results must be passed through pedantic_non_lvalue.  */
14217       if (TREE_CODE (arg0) == INTEGER_CST)
14218 	{
14219 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
14220 	  tem = integer_zerop (arg0) ? op2 : op1;
14221 	  /* Only optimize constant conditions when the selected branch
14222 	     has the same type as the COND_EXPR.  This avoids optimizing
14223              away "c ? x : throw", where the throw has a void type.
14224              Avoid throwing away that operand which contains label.  */
14225           if ((!TREE_SIDE_EFFECTS (unused_op)
14226                || !contains_label_p (unused_op))
14227               && (! VOID_TYPE_P (TREE_TYPE (tem))
14228                   || VOID_TYPE_P (type)))
14229 	    return pedantic_non_lvalue_loc (loc, tem);
14230 	  return NULL_TREE;
14231 	}
14232       else if (TREE_CODE (arg0) == VECTOR_CST)
14233 	{
14234 	  if (integer_all_onesp (arg0))
14235 	    return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14236 	  if (integer_zerop (arg0))
14237 	    return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14238 
14239 	  if ((TREE_CODE (arg1) == VECTOR_CST
14240 	       || TREE_CODE (arg1) == CONSTRUCTOR)
14241 	      && (TREE_CODE (arg2) == VECTOR_CST
14242 		  || TREE_CODE (arg2) == CONSTRUCTOR))
14243 	    {
14244 	      unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14245 	      unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14246 	      gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14247 	      for (i = 0; i < nelts; i++)
14248 		{
14249 		  tree val = VECTOR_CST_ELT (arg0, i);
14250 		  if (integer_all_onesp (val))
14251 		    sel[i] = i;
14252 		  else if (integer_zerop (val))
14253 		    sel[i] = nelts + i;
14254 		  else /* Currently unreachable.  */
14255 		    return NULL_TREE;
14256 		}
14257 	      tree t = fold_vec_perm (type, arg1, arg2, sel);
14258 	      if (t != NULL_TREE)
14259 		return t;
14260 	    }
14261 	}
14262 
14263       if (operand_equal_p (arg1, op2, 0))
14264 	return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14265 
14266       /* If we have A op B ? A : C, we may be able to convert this to a
14267 	 simpler expression, depending on the operation and the values
14268 	 of B and C.  Signed zeros prevent all of these transformations,
14269 	 for reasons given above each one.
14270 
14271          Also try swapping the arguments and inverting the conditional.  */
14272       if (COMPARISON_CLASS_P (arg0)
14273 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14274 					     arg1, TREE_OPERAND (arg0, 1))
14275 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14276 	{
14277 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14278 	  if (tem)
14279 	    return tem;
14280 	}
14281 
14282       if (COMPARISON_CLASS_P (arg0)
14283 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14284 					     op2,
14285 					     TREE_OPERAND (arg0, 1))
14286 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14287 	{
14288 	  location_t loc0 = expr_location_or (arg0, loc);
14289 	  tem = fold_invert_truthvalue (loc0, arg0);
14290 	  if (tem && COMPARISON_CLASS_P (tem))
14291 	    {
14292 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14293 	      if (tem)
14294 		return tem;
14295 	    }
14296 	}
14297 
14298       /* If the second operand is simpler than the third, swap them
14299 	 since that produces better jump optimization results.  */
14300       if (truth_value_p (TREE_CODE (arg0))
14301 	  && tree_swap_operands_p (op1, op2, false))
14302 	{
14303 	  location_t loc0 = expr_location_or (arg0, loc);
14304 	  /* See if this can be inverted.  If it can't, possibly because
14305 	     it was a floating-point inequality comparison, don't do
14306 	     anything.  */
14307 	  tem = fold_invert_truthvalue (loc0, arg0);
14308 	  if (tem)
14309 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
14310 	}
14311 
14312       /* Convert A ? 1 : 0 to simply A.  */
14313       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14314 				 : (integer_onep (op1)
14315 				    && !VECTOR_TYPE_P (type)))
14316 	  && integer_zerop (op2)
14317 	  /* If we try to convert OP0 to our type, the
14318 	     call to fold will try to move the conversion inside
14319 	     a COND, which will recurse.  In that case, the COND_EXPR
14320 	     is probably the best choice, so leave it alone.  */
14321 	  && type == TREE_TYPE (arg0))
14322 	return pedantic_non_lvalue_loc (loc, arg0);
14323 
14324       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
14325 	 over COND_EXPR in cases such as floating point comparisons.  */
14326       if (integer_zerop (op1)
14327 	  && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14328 				    : (integer_onep (op2)
14329 				       && !VECTOR_TYPE_P (type)))
14330 	  && truth_value_p (TREE_CODE (arg0)))
14331 	return pedantic_non_lvalue_loc (loc,
14332 				    fold_convert_loc (loc, type,
14333 					      invert_truthvalue_loc (loc,
14334 								     arg0)));
14335 
14336       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
14337       if (TREE_CODE (arg0) == LT_EXPR
14338 	  && integer_zerop (TREE_OPERAND (arg0, 1))
14339 	  && integer_zerop (op2)
14340 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14341 	{
14342 	  /* sign_bit_p looks through both zero and sign extensions,
14343 	     but for this optimization only sign extensions are
14344 	     usable.  */
14345 	  tree tem2 = TREE_OPERAND (arg0, 0);
14346 	  while (tem != tem2)
14347 	    {
14348 	      if (TREE_CODE (tem2) != NOP_EXPR
14349 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14350 		{
14351 		  tem = NULL_TREE;
14352 		  break;
14353 		}
14354 	      tem2 = TREE_OPERAND (tem2, 0);
14355 	    }
14356 	  /* sign_bit_p only checks ARG1 bits within A's precision.
14357 	     If <sign bit of A> has wider type than A, bits outside
14358 	     of A's precision in <sign bit of A> need to be checked.
14359 	     If they are all 0, this optimization needs to be done
14360 	     in unsigned A's type, if they are all 1 in signed A's type,
14361 	     otherwise this can't be done.  */
14362 	  if (tem
14363 	      && TYPE_PRECISION (TREE_TYPE (tem))
14364 		 < TYPE_PRECISION (TREE_TYPE (arg1))
14365 	      && TYPE_PRECISION (TREE_TYPE (tem))
14366 		 < TYPE_PRECISION (type))
14367 	    {
14368 	      unsigned HOST_WIDE_INT mask_lo;
14369 	      HOST_WIDE_INT mask_hi;
14370 	      int inner_width, outer_width;
14371 	      tree tem_type;
14372 
14373 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14374 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14375 	      if (outer_width > TYPE_PRECISION (type))
14376 		outer_width = TYPE_PRECISION (type);
14377 
14378 	      if (outer_width > HOST_BITS_PER_WIDE_INT)
14379 		{
14380 		  mask_hi = (HOST_WIDE_INT_M1U
14381 			     >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14382 		  mask_lo = -1;
14383 		}
14384 	      else
14385 		{
14386 		  mask_hi = 0;
14387 		  mask_lo = (HOST_WIDE_INT_M1U
14388 			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
14389 		}
14390 	      if (inner_width > HOST_BITS_PER_WIDE_INT)
14391 		{
14392 		  mask_hi &= ~(HOST_WIDE_INT_M1U
14393 			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
14394 		  mask_lo = 0;
14395 		}
14396 	      else
14397 		mask_lo &= ~(HOST_WIDE_INT_M1U
14398 			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
14399 
14400 	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14401 		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14402 		{
14403 		  tem_type = signed_type_for (TREE_TYPE (tem));
14404 		  tem = fold_convert_loc (loc, tem_type, tem);
14405 		}
14406 	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14407 		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14408 		{
14409 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
14410 		  tem = fold_convert_loc (loc, tem_type, tem);
14411 		}
14412 	      else
14413 		tem = NULL;
14414 	    }
14415 
14416 	  if (tem)
14417 	    return
14418 	      fold_convert_loc (loc, type,
14419 				fold_build2_loc (loc, BIT_AND_EXPR,
14420 					     TREE_TYPE (tem), tem,
14421 					     fold_convert_loc (loc,
14422 							       TREE_TYPE (tem),
14423 							       arg1)));
14424 	}
14425 
14426       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
14427 	 already handled above.  */
14428       if (TREE_CODE (arg0) == BIT_AND_EXPR
14429 	  && integer_onep (TREE_OPERAND (arg0, 1))
14430 	  && integer_zerop (op2)
14431 	  && integer_pow2p (arg1))
14432 	{
14433 	  tree tem = TREE_OPERAND (arg0, 0);
14434 	  STRIP_NOPS (tem);
14435 	  if (TREE_CODE (tem) == RSHIFT_EXPR
14436               && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14437               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14438 	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14439 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
14440 				TREE_OPERAND (tem, 0), arg1);
14441 	}
14442 
14443       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
14444 	 is probably obsolete because the first operand should be a
14445 	 truth value (that's why we have the two cases above), but let's
14446 	 leave it in until we can confirm this for all front-ends.  */
14447       if (integer_zerop (op2)
14448 	  && TREE_CODE (arg0) == NE_EXPR
14449 	  && integer_zerop (TREE_OPERAND (arg0, 1))
14450 	  && integer_pow2p (arg1)
14451 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14452 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14453 			      arg1, OEP_ONLY_CONST))
14454 	return pedantic_non_lvalue_loc (loc,
14455 				    fold_convert_loc (loc, type,
14456 						      TREE_OPERAND (arg0, 0)));
14457 
14458       /* Disable the transformations below for vectors, since
14459 	 fold_binary_op_with_conditional_arg may undo them immediately,
14460 	 yielding an infinite loop.  */
14461       if (code == VEC_COND_EXPR)
14462 	return NULL_TREE;
14463 
14464       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
14465       if (integer_zerop (op2)
14466 	  && truth_value_p (TREE_CODE (arg0))
14467 	  && truth_value_p (TREE_CODE (arg1))
14468 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14469 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14470 							   : TRUTH_ANDIF_EXPR,
14471 				type, fold_convert_loc (loc, type, arg0), arg1);
14472 
14473       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
14474       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14475 	  && truth_value_p (TREE_CODE (arg0))
14476 	  && truth_value_p (TREE_CODE (arg1))
14477 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14478 	{
14479 	  location_t loc0 = expr_location_or (arg0, loc);
14480 	  /* Only perform transformation if ARG0 is easily inverted.  */
14481 	  tem = fold_invert_truthvalue (loc0, arg0);
14482 	  if (tem)
14483 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
14484 					 ? BIT_IOR_EXPR
14485 					 : TRUTH_ORIF_EXPR,
14486 				    type, fold_convert_loc (loc, type, tem),
14487 				    arg1);
14488 	}
14489 
14490       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
14491       if (integer_zerop (arg1)
14492 	  && truth_value_p (TREE_CODE (arg0))
14493 	  && truth_value_p (TREE_CODE (op2))
14494 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14495 	{
14496 	  location_t loc0 = expr_location_or (arg0, loc);
14497 	  /* Only perform transformation if ARG0 is easily inverted.  */
14498 	  tem = fold_invert_truthvalue (loc0, arg0);
14499 	  if (tem)
14500 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
14501 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14502 				    type, fold_convert_loc (loc, type, tem),
14503 				    op2);
14504 	}
14505 
14506       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
14507       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14508 	  && truth_value_p (TREE_CODE (arg0))
14509 	  && truth_value_p (TREE_CODE (op2))
14510 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14511 	return fold_build2_loc (loc, code == VEC_COND_EXPR
14512 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14513 				type, fold_convert_loc (loc, type, arg0), op2);
14514 
14515       return NULL_TREE;
14516 
14517     case CALL_EXPR:
14518       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14519 	 of fold_ternary on them.  */
14520       gcc_unreachable ();
14521 
14522     case BIT_FIELD_REF:
14523       if ((TREE_CODE (arg0) == VECTOR_CST
14524 	   || (TREE_CODE (arg0) == CONSTRUCTOR
14525 	       && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14526 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
14527 	      || (TREE_CODE (type) == VECTOR_TYPE
14528 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14529 	{
14530 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14531 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14532 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14533 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14534 
14535 	  if (n != 0
14536 	      && (idx % width) == 0
14537 	      && (n % width) == 0
14538 	      && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14539 	    {
14540 	      idx = idx / width;
14541 	      n = n / width;
14542 
14543 	      if (TREE_CODE (arg0) == VECTOR_CST)
14544 		{
14545 		  if (n == 1)
14546 		    return VECTOR_CST_ELT (arg0, idx);
14547 
14548 		  tree *vals = XALLOCAVEC (tree, n);
14549 		  for (unsigned i = 0; i < n; ++i)
14550 		    vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14551 		  return build_vector (type, vals);
14552 		}
14553 
14554 	      /* Constructor elements can be subvectors.  */
14555 	      unsigned HOST_WIDE_INT k = 1;
14556 	      if (CONSTRUCTOR_NELTS (arg0) != 0)
14557 		{
14558 		  tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14559 		  if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14560 		    k = TYPE_VECTOR_SUBPARTS (cons_elem);
14561 		}
14562 
14563 	      /* We keep an exact subset of the constructor elements.  */
14564 	      if ((idx % k) == 0 && (n % k) == 0)
14565 		{
14566 		  if (CONSTRUCTOR_NELTS (arg0) == 0)
14567 		    return build_constructor (type, NULL);
14568 		  idx /= k;
14569 		  n /= k;
14570 		  if (n == 1)
14571 		    {
14572 		      if (idx < CONSTRUCTOR_NELTS (arg0))
14573 			return CONSTRUCTOR_ELT (arg0, idx)->value;
14574 		      return build_zero_cst (type);
14575 		    }
14576 
14577 		  vec<constructor_elt, va_gc> *vals;
14578 		  vec_alloc (vals, n);
14579 		  for (unsigned i = 0;
14580 		       i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14581 		       ++i)
14582 		    CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14583 					    CONSTRUCTOR_ELT
14584 					      (arg0, idx + i)->value);
14585 		  return build_constructor (type, vals);
14586 		}
14587 	      /* The bitfield references a single constructor element.  */
14588 	      else if (idx + n <= (idx / k + 1) * k)
14589 		{
14590 		  if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14591 		    return build_zero_cst (type);
14592 		  else if (n == k)
14593 		    return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14594 		  else
14595 		    return fold_build3_loc (loc, code, type,
14596 		      CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14597 		      build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14598 		}
14599 	    }
14600 	}
14601 
14602       /* A bit-field-ref that referenced the full argument can be stripped.  */
14603       if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14604 	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14605 	  && integer_zerop (op2))
14606 	return fold_convert_loc (loc, type, arg0);
14607 
14608       /* On constants we can use native encode/interpret to constant
14609          fold (nearly) all BIT_FIELD_REFs.  */
14610       if (CONSTANT_CLASS_P (arg0)
14611 	  && can_native_interpret_type_p (type)
14612 	  && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14613 	  /* This limitation should not be necessary, we just need to
14614 	     round this up to mode size.  */
14615 	  && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14616 	  /* Need bit-shifting of the buffer to relax the following.  */
14617 	  && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14618 	{
14619 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14620 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14621 	  unsigned HOST_WIDE_INT clen;
14622 	  clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14623 	  /* ???  We cannot tell native_encode_expr to start at
14624 	     some random byte only.  So limit us to a reasonable amount
14625 	     of work.  */
14626 	  if (clen <= 4096)
14627 	    {
14628 	      unsigned char *b = XALLOCAVEC (unsigned char, clen);
14629 	      unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14630 	      if (len > 0
14631 		  && len * BITS_PER_UNIT >= bitpos + bitsize)
14632 		{
14633 		  tree v = native_interpret_expr (type,
14634 						  b + bitpos / BITS_PER_UNIT,
14635 						  bitsize / BITS_PER_UNIT);
14636 		  if (v)
14637 		    return v;
14638 		}
14639 	    }
14640 	}
14641 
14642       return NULL_TREE;
14643 
14644     case FMA_EXPR:
14645       /* For integers we can decompose the FMA if possible.  */
14646       if (TREE_CODE (arg0) == INTEGER_CST
14647 	  && TREE_CODE (arg1) == INTEGER_CST)
14648 	return fold_build2_loc (loc, PLUS_EXPR, type,
14649 				const_binop (MULT_EXPR, arg0, arg1), arg2);
14650       if (integer_zerop (arg2))
14651 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14652 
14653       return fold_fma (loc, type, arg0, arg1, arg2);
14654 
14655     case VEC_PERM_EXPR:
14656       if (TREE_CODE (arg2) == VECTOR_CST)
14657 	{
14658 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14659 	  unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14660 	  tree t;
14661 	  bool need_mask_canon = false;
14662 	  bool all_in_vec0 = true;
14663 	  bool all_in_vec1 = true;
14664 	  bool maybe_identity = true;
14665 	  bool single_arg = (op0 == op1);
14666 	  bool changed = false;
14667 
14668 	  mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14669 	  gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14670 	  for (i = 0; i < nelts; i++)
14671 	    {
14672 	      tree val = VECTOR_CST_ELT (arg2, i);
14673 	      if (TREE_CODE (val) != INTEGER_CST)
14674 		return NULL_TREE;
14675 
14676 	      sel[i] = TREE_INT_CST_LOW (val) & mask;
14677 	      if (TREE_INT_CST_HIGH (val)
14678 		  || ((unsigned HOST_WIDE_INT)
14679 		      TREE_INT_CST_LOW (val) != sel[i]))
14680 		need_mask_canon = true;
14681 
14682 	      if (sel[i] < nelts)
14683 		all_in_vec1 = false;
14684 	      else
14685 		all_in_vec0 = false;
14686 
14687 	      if ((sel[i] & (nelts-1)) != i)
14688 		maybe_identity = false;
14689 	    }
14690 
14691 	  if (maybe_identity)
14692 	    {
14693 	      if (all_in_vec0)
14694 		return op0;
14695 	      if (all_in_vec1)
14696 		return op1;
14697 	    }
14698 
14699 	  if (all_in_vec0)
14700 	    op1 = op0;
14701 	  else if (all_in_vec1)
14702 	    {
14703 	      op0 = op1;
14704 	      for (i = 0; i < nelts; i++)
14705 		sel[i] -= nelts;
14706 	      need_mask_canon = true;
14707 	    }
14708 
14709 	  if ((TREE_CODE (op0) == VECTOR_CST
14710 	       || TREE_CODE (op0) == CONSTRUCTOR)
14711 	      && (TREE_CODE (op1) == VECTOR_CST
14712 		  || TREE_CODE (op1) == CONSTRUCTOR))
14713 	    {
14714 	      t = fold_vec_perm (type, op0, op1, sel);
14715 	      if (t != NULL_TREE)
14716 		return t;
14717 	    }
14718 
14719 	  if (op0 == op1 && !single_arg)
14720 	    changed = true;
14721 
14722 	  if (need_mask_canon && arg2 == op2)
14723 	    {
14724 	      tree *tsel = XALLOCAVEC (tree, nelts);
14725 	      tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14726 	      for (i = 0; i < nelts; i++)
14727 		tsel[i] = build_int_cst (eltype, sel[i]);
14728 	      op2 = build_vector (TREE_TYPE (arg2), tsel);
14729 	      changed = true;
14730 	    }
14731 
14732 	  if (changed)
14733 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14734 	}
14735       return NULL_TREE;
14736 
14737     default:
14738       return NULL_TREE;
14739     } /* switch (code) */
14740 }
14741 
14742 /* Perform constant folding and related simplification of EXPR.
14743    The related simplifications include x*1 => x, x*0 => 0, etc.,
14744    and application of the associative law.
14745    NOP_EXPR conversions may be removed freely (as long as we
14746    are careful not to change the type of the overall expression).
14747    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14748    but we can constant-fold them if they have constant operands.  */
14749 
14750 #ifdef ENABLE_FOLD_CHECKING
14751 # define fold(x) fold_1 (x)
14752 static tree fold_1 (tree);
14753 static
14754 #endif
14755 tree
fold(tree expr)14756 fold (tree expr)
14757 {
14758   const tree t = expr;
14759   enum tree_code code = TREE_CODE (t);
14760   enum tree_code_class kind = TREE_CODE_CLASS (code);
14761   tree tem;
14762   location_t loc = EXPR_LOCATION (expr);
14763 
14764   /* Return right away if a constant.  */
14765   if (kind == tcc_constant)
14766     return t;
14767 
14768   /* CALL_EXPR-like objects with variable numbers of operands are
14769      treated specially.  */
14770   if (kind == tcc_vl_exp)
14771     {
14772       if (code == CALL_EXPR)
14773 	{
14774 	  tem = fold_call_expr (loc, expr, false);
14775 	  return tem ? tem : expr;
14776 	}
14777       return expr;
14778     }
14779 
14780   if (IS_EXPR_CODE_CLASS (kind))
14781     {
14782       tree type = TREE_TYPE (t);
14783       tree op0, op1, op2;
14784 
14785       switch (TREE_CODE_LENGTH (code))
14786 	{
14787 	case 1:
14788 	  op0 = TREE_OPERAND (t, 0);
14789 	  tem = fold_unary_loc (loc, code, type, op0);
14790 	  return tem ? tem : expr;
14791 	case 2:
14792 	  op0 = TREE_OPERAND (t, 0);
14793 	  op1 = TREE_OPERAND (t, 1);
14794 	  tem = fold_binary_loc (loc, code, type, op0, op1);
14795 	  return tem ? tem : expr;
14796 	case 3:
14797 	  op0 = TREE_OPERAND (t, 0);
14798 	  op1 = TREE_OPERAND (t, 1);
14799 	  op2 = TREE_OPERAND (t, 2);
14800 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14801 	  return tem ? tem : expr;
14802 	default:
14803 	  break;
14804 	}
14805     }
14806 
14807   switch (code)
14808     {
14809     case ARRAY_REF:
14810       {
14811 	tree op0 = TREE_OPERAND (t, 0);
14812 	tree op1 = TREE_OPERAND (t, 1);
14813 
14814 	if (TREE_CODE (op1) == INTEGER_CST
14815 	    && TREE_CODE (op0) == CONSTRUCTOR
14816 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14817 	  {
14818 	    vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14819 	    unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14820 	    unsigned HOST_WIDE_INT begin = 0;
14821 
14822 	    /* Find a matching index by means of a binary search.  */
14823 	    while (begin != end)
14824 	      {
14825 		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14826 		tree index = (*elts)[middle].index;
14827 
14828 		if (TREE_CODE (index) == INTEGER_CST
14829 		    && tree_int_cst_lt (index, op1))
14830 		  begin = middle + 1;
14831 		else if (TREE_CODE (index) == INTEGER_CST
14832 			 && tree_int_cst_lt (op1, index))
14833 		  end = middle;
14834 		else if (TREE_CODE (index) == RANGE_EXPR
14835 			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14836 		  begin = middle + 1;
14837 		else if (TREE_CODE (index) == RANGE_EXPR
14838 			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14839 		  end = middle;
14840 		else
14841 		  return (*elts)[middle].value;
14842 	      }
14843 	  }
14844 
14845 	return t;
14846       }
14847 
14848       /* Return a VECTOR_CST if possible.  */
14849     case CONSTRUCTOR:
14850       {
14851 	tree type = TREE_TYPE (t);
14852 	if (TREE_CODE (type) != VECTOR_TYPE)
14853 	  return t;
14854 
14855 	tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14856 	unsigned HOST_WIDE_INT idx, pos = 0;
14857 	tree value;
14858 
14859 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14860 	  {
14861 	    if (!CONSTANT_CLASS_P (value))
14862 	      return t;
14863 	    if (TREE_CODE (value) == VECTOR_CST)
14864 	      {
14865 		for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14866 		  vec[pos++] = VECTOR_CST_ELT (value, i);
14867 	      }
14868 	    else
14869 	      vec[pos++] = value;
14870 	  }
14871 	for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14872 	  vec[pos] = build_zero_cst (TREE_TYPE (type));
14873 
14874 	return build_vector (type, vec);
14875       }
14876 
14877     case CONST_DECL:
14878       return fold (DECL_INITIAL (t));
14879 
14880     default:
14881       return t;
14882     } /* switch (code) */
14883 }
14884 
14885 #ifdef ENABLE_FOLD_CHECKING
14886 #undef fold
14887 
14888 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14889 				hash_table <pointer_hash <tree_node> >);
14890 static void fold_check_failed (const_tree, const_tree);
14891 void print_fold_checksum (const_tree);
14892 
14893 /* When --enable-checking=fold, compute a digest of expr before
14894    and after actual fold call to see if fold did not accidentally
14895    change original expr.  */
14896 
14897 tree
fold(tree expr)14898 fold (tree expr)
14899 {
14900   tree ret;
14901   struct md5_ctx ctx;
14902   unsigned char checksum_before[16], checksum_after[16];
14903   hash_table <pointer_hash <tree_node> > ht;
14904 
14905   ht.create (32);
14906   md5_init_ctx (&ctx);
14907   fold_checksum_tree (expr, &ctx, ht);
14908   md5_finish_ctx (&ctx, checksum_before);
14909   ht.empty ();
14910 
14911   ret = fold_1 (expr);
14912 
14913   md5_init_ctx (&ctx);
14914   fold_checksum_tree (expr, &ctx, ht);
14915   md5_finish_ctx (&ctx, checksum_after);
14916   ht.dispose ();
14917 
14918   if (memcmp (checksum_before, checksum_after, 16))
14919     fold_check_failed (expr, ret);
14920 
14921   return ret;
14922 }
14923 
14924 void
print_fold_checksum(const_tree expr)14925 print_fold_checksum (const_tree expr)
14926 {
14927   struct md5_ctx ctx;
14928   unsigned char checksum[16], cnt;
14929   hash_table <pointer_hash <tree_node> > ht;
14930 
14931   ht.create (32);
14932   md5_init_ctx (&ctx);
14933   fold_checksum_tree (expr, &ctx, ht);
14934   md5_finish_ctx (&ctx, checksum);
14935   ht.dispose ();
14936   for (cnt = 0; cnt < 16; ++cnt)
14937     fprintf (stderr, "%02x", checksum[cnt]);
14938   putc ('\n', stderr);
14939 }
14940 
14941 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)14942 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14943 {
14944   internal_error ("fold check: original tree changed by fold");
14945 }
14946 
14947 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<pointer_hash<tree_node>> ht)14948 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14949 		    hash_table <pointer_hash <tree_node> > ht)
14950 {
14951   tree_node **slot;
14952   enum tree_code code;
14953   union tree_node buf;
14954   int i, len;
14955 
14956  recursive_label:
14957   if (expr == NULL)
14958     return;
14959   slot = ht.find_slot (expr, INSERT);
14960   if (*slot != NULL)
14961     return;
14962   *slot = CONST_CAST_TREE (expr);
14963   code = TREE_CODE (expr);
14964   if (TREE_CODE_CLASS (code) == tcc_declaration
14965       && DECL_ASSEMBLER_NAME_SET_P (expr))
14966     {
14967       /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14968       memcpy ((char *) &buf, expr, tree_size (expr));
14969       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14970       expr = (tree) &buf;
14971     }
14972   else if (TREE_CODE_CLASS (code) == tcc_type
14973 	   && (TYPE_POINTER_TO (expr)
14974 	       || TYPE_REFERENCE_TO (expr)
14975 	       || TYPE_CACHED_VALUES_P (expr)
14976 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14977 	       || TYPE_NEXT_VARIANT (expr)))
14978     {
14979       /* Allow these fields to be modified.  */
14980       tree tmp;
14981       memcpy ((char *) &buf, expr, tree_size (expr));
14982       expr = tmp = (tree) &buf;
14983       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14984       TYPE_POINTER_TO (tmp) = NULL;
14985       TYPE_REFERENCE_TO (tmp) = NULL;
14986       TYPE_NEXT_VARIANT (tmp) = NULL;
14987       if (TYPE_CACHED_VALUES_P (tmp))
14988 	{
14989 	  TYPE_CACHED_VALUES_P (tmp) = 0;
14990 	  TYPE_CACHED_VALUES (tmp) = NULL;
14991 	}
14992     }
14993   md5_process_bytes (expr, tree_size (expr), ctx);
14994   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14995     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14996   if (TREE_CODE_CLASS (code) != tcc_type
14997       && TREE_CODE_CLASS (code) != tcc_declaration
14998       && code != TREE_LIST
14999       && code != SSA_NAME
15000       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
15001     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
15002   switch (TREE_CODE_CLASS (code))
15003     {
15004     case tcc_constant:
15005       switch (code)
15006 	{
15007 	case STRING_CST:
15008 	  md5_process_bytes (TREE_STRING_POINTER (expr),
15009 			     TREE_STRING_LENGTH (expr), ctx);
15010 	  break;
15011 	case COMPLEX_CST:
15012 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
15013 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
15014 	  break;
15015 	case VECTOR_CST:
15016 	  for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15017 	    fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15018 	  break;
15019 	default:
15020 	  break;
15021 	}
15022       break;
15023     case tcc_exceptional:
15024       switch (code)
15025 	{
15026 	case TREE_LIST:
15027 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15028 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15029 	  expr = TREE_CHAIN (expr);
15030 	  goto recursive_label;
15031 	  break;
15032 	case TREE_VEC:
15033 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15034 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15035 	  break;
15036 	default:
15037 	  break;
15038 	}
15039       break;
15040     case tcc_expression:
15041     case tcc_reference:
15042     case tcc_comparison:
15043     case tcc_unary:
15044     case tcc_binary:
15045     case tcc_statement:
15046     case tcc_vl_exp:
15047       len = TREE_OPERAND_LENGTH (expr);
15048       for (i = 0; i < len; ++i)
15049 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15050       break;
15051     case tcc_declaration:
15052       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15053       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15054       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15055 	{
15056 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15057 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15058 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15059 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15060 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15061 	}
15062       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15063 	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15064 
15065       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15066 	{
15067 	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15068 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15069 	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15070 	}
15071       break;
15072     case tcc_type:
15073       if (TREE_CODE (expr) == ENUMERAL_TYPE)
15074         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15075       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15076       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15077       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15078       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15079       if (INTEGRAL_TYPE_P (expr)
15080           || SCALAR_FLOAT_TYPE_P (expr))
15081 	{
15082 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15083 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15084 	}
15085       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15086       if (TREE_CODE (expr) == RECORD_TYPE
15087 	  || TREE_CODE (expr) == UNION_TYPE
15088 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
15089 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15090       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15091       break;
15092     default:
15093       break;
15094     }
15095 }
15096 
15097 /* Helper function for outputting the checksum of a tree T.  When
15098    debugging with gdb, you can "define mynext" to be "next" followed
15099    by "call debug_fold_checksum (op0)", then just trace down till the
15100    outputs differ.  */
15101 
15102 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)15103 debug_fold_checksum (const_tree t)
15104 {
15105   int i;
15106   unsigned char checksum[16];
15107   struct md5_ctx ctx;
15108   hash_table <pointer_hash <tree_node> > ht;
15109   ht.create (32);
15110 
15111   md5_init_ctx (&ctx);
15112   fold_checksum_tree (t, &ctx, ht);
15113   md5_finish_ctx (&ctx, checksum);
15114   ht.empty ();
15115 
15116   for (i = 0; i < 16; i++)
15117     fprintf (stderr, "%d ", checksum[i]);
15118 
15119   fprintf (stderr, "\n");
15120 }
15121 
15122 #endif
15123 
15124 /* Fold a unary tree expression with code CODE of type TYPE with an
15125    operand OP0.  LOC is the location of the resulting expression.
15126    Return a folded expression if successful.  Otherwise, return a tree
15127    expression with code CODE of type TYPE with an operand OP0.  */
15128 
15129 tree
fold_build1_stat_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)15130 fold_build1_stat_loc (location_t loc,
15131 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15132 {
15133   tree tem;
15134 #ifdef ENABLE_FOLD_CHECKING
15135   unsigned char checksum_before[16], checksum_after[16];
15136   struct md5_ctx ctx;
15137   hash_table <pointer_hash <tree_node> > ht;
15138 
15139   ht.create (32);
15140   md5_init_ctx (&ctx);
15141   fold_checksum_tree (op0, &ctx, ht);
15142   md5_finish_ctx (&ctx, checksum_before);
15143   ht.empty ();
15144 #endif
15145 
15146   tem = fold_unary_loc (loc, code, type, op0);
15147   if (!tem)
15148     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15149 
15150 #ifdef ENABLE_FOLD_CHECKING
15151   md5_init_ctx (&ctx);
15152   fold_checksum_tree (op0, &ctx, ht);
15153   md5_finish_ctx (&ctx, checksum_after);
15154   ht.dispose ();
15155 
15156   if (memcmp (checksum_before, checksum_after, 16))
15157     fold_check_failed (op0, tem);
15158 #endif
15159   return tem;
15160 }
15161 
15162 /* Fold a binary tree expression with code CODE of type TYPE with
15163    operands OP0 and OP1.  LOC is the location of the resulting
15164    expression.  Return a folded expression if successful.  Otherwise,
15165    return a tree expression with code CODE of type TYPE with operands
15166    OP0 and OP1.  */
15167 
15168 tree
fold_build2_stat_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)15169 fold_build2_stat_loc (location_t loc,
15170 		      enum tree_code code, tree type, tree op0, tree op1
15171 		      MEM_STAT_DECL)
15172 {
15173   tree tem;
15174 #ifdef ENABLE_FOLD_CHECKING
15175   unsigned char checksum_before_op0[16],
15176                 checksum_before_op1[16],
15177 		checksum_after_op0[16],
15178 		checksum_after_op1[16];
15179   struct md5_ctx ctx;
15180   hash_table <pointer_hash <tree_node> > ht;
15181 
15182   ht.create (32);
15183   md5_init_ctx (&ctx);
15184   fold_checksum_tree (op0, &ctx, ht);
15185   md5_finish_ctx (&ctx, checksum_before_op0);
15186   ht.empty ();
15187 
15188   md5_init_ctx (&ctx);
15189   fold_checksum_tree (op1, &ctx, ht);
15190   md5_finish_ctx (&ctx, checksum_before_op1);
15191   ht.empty ();
15192 #endif
15193 
15194   tem = fold_binary_loc (loc, code, type, op0, op1);
15195   if (!tem)
15196     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15197 
15198 #ifdef ENABLE_FOLD_CHECKING
15199   md5_init_ctx (&ctx);
15200   fold_checksum_tree (op0, &ctx, ht);
15201   md5_finish_ctx (&ctx, checksum_after_op0);
15202   ht.empty ();
15203 
15204   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15205     fold_check_failed (op0, tem);
15206 
15207   md5_init_ctx (&ctx);
15208   fold_checksum_tree (op1, &ctx, ht);
15209   md5_finish_ctx (&ctx, checksum_after_op1);
15210   ht.dispose ();
15211 
15212   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15213     fold_check_failed (op1, tem);
15214 #endif
15215   return tem;
15216 }
15217 
15218 /* Fold a ternary tree expression with code CODE of type TYPE with
15219    operands OP0, OP1, and OP2.  Return a folded expression if
15220    successful.  Otherwise, return a tree expression with code CODE of
15221    type TYPE with operands OP0, OP1, and OP2.  */
15222 
15223 tree
fold_build3_stat_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)15224 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15225 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
15226 {
15227   tree tem;
15228 #ifdef ENABLE_FOLD_CHECKING
15229   unsigned char checksum_before_op0[16],
15230                 checksum_before_op1[16],
15231                 checksum_before_op2[16],
15232 		checksum_after_op0[16],
15233 		checksum_after_op1[16],
15234 		checksum_after_op2[16];
15235   struct md5_ctx ctx;
15236   hash_table <pointer_hash <tree_node> > ht;
15237 
15238   ht.create (32);
15239   md5_init_ctx (&ctx);
15240   fold_checksum_tree (op0, &ctx, ht);
15241   md5_finish_ctx (&ctx, checksum_before_op0);
15242   ht.empty ();
15243 
15244   md5_init_ctx (&ctx);
15245   fold_checksum_tree (op1, &ctx, ht);
15246   md5_finish_ctx (&ctx, checksum_before_op1);
15247   ht.empty ();
15248 
15249   md5_init_ctx (&ctx);
15250   fold_checksum_tree (op2, &ctx, ht);
15251   md5_finish_ctx (&ctx, checksum_before_op2);
15252   ht.empty ();
15253 #endif
15254 
15255   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15256   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15257   if (!tem)
15258     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15259 
15260 #ifdef ENABLE_FOLD_CHECKING
15261   md5_init_ctx (&ctx);
15262   fold_checksum_tree (op0, &ctx, ht);
15263   md5_finish_ctx (&ctx, checksum_after_op0);
15264   ht.empty ();
15265 
15266   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15267     fold_check_failed (op0, tem);
15268 
15269   md5_init_ctx (&ctx);
15270   fold_checksum_tree (op1, &ctx, ht);
15271   md5_finish_ctx (&ctx, checksum_after_op1);
15272   ht.empty ();
15273 
15274   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15275     fold_check_failed (op1, tem);
15276 
15277   md5_init_ctx (&ctx);
15278   fold_checksum_tree (op2, &ctx, ht);
15279   md5_finish_ctx (&ctx, checksum_after_op2);
15280   ht.dispose ();
15281 
15282   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15283     fold_check_failed (op2, tem);
15284 #endif
15285   return tem;
15286 }
15287 
15288 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15289    arguments in ARGARRAY, and a null static chain.
15290    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
15291    of type TYPE from the given operands as constructed by build_call_array.  */
15292 
15293 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)15294 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15295 			   int nargs, tree *argarray)
15296 {
15297   tree tem;
15298 #ifdef ENABLE_FOLD_CHECKING
15299   unsigned char checksum_before_fn[16],
15300                 checksum_before_arglist[16],
15301 		checksum_after_fn[16],
15302 		checksum_after_arglist[16];
15303   struct md5_ctx ctx;
15304   hash_table <pointer_hash <tree_node> > ht;
15305   int i;
15306 
15307   ht.create (32);
15308   md5_init_ctx (&ctx);
15309   fold_checksum_tree (fn, &ctx, ht);
15310   md5_finish_ctx (&ctx, checksum_before_fn);
15311   ht.empty ();
15312 
15313   md5_init_ctx (&ctx);
15314   for (i = 0; i < nargs; i++)
15315     fold_checksum_tree (argarray[i], &ctx, ht);
15316   md5_finish_ctx (&ctx, checksum_before_arglist);
15317   ht.empty ();
15318 #endif
15319 
15320   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15321 
15322 #ifdef ENABLE_FOLD_CHECKING
15323   md5_init_ctx (&ctx);
15324   fold_checksum_tree (fn, &ctx, ht);
15325   md5_finish_ctx (&ctx, checksum_after_fn);
15326   ht.empty ();
15327 
15328   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15329     fold_check_failed (fn, tem);
15330 
15331   md5_init_ctx (&ctx);
15332   for (i = 0; i < nargs; i++)
15333     fold_checksum_tree (argarray[i], &ctx, ht);
15334   md5_finish_ctx (&ctx, checksum_after_arglist);
15335   ht.dispose ();
15336 
15337   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15338     fold_check_failed (NULL_TREE, tem);
15339 #endif
15340   return tem;
15341 }
15342 
15343 /* Perform constant folding and related simplification of initializer
15344    expression EXPR.  These behave identically to "fold_buildN" but ignore
15345    potential run-time traps and exceptions that fold must preserve.  */
15346 
15347 #define START_FOLD_INIT \
15348   int saved_signaling_nans = flag_signaling_nans;\
15349   int saved_trapping_math = flag_trapping_math;\
15350   int saved_rounding_math = flag_rounding_math;\
15351   int saved_trapv = flag_trapv;\
15352   int saved_folding_initializer = folding_initializer;\
15353   flag_signaling_nans = 0;\
15354   flag_trapping_math = 0;\
15355   flag_rounding_math = 0;\
15356   flag_trapv = 0;\
15357   folding_initializer = 1;
15358 
15359 #define END_FOLD_INIT \
15360   flag_signaling_nans = saved_signaling_nans;\
15361   flag_trapping_math = saved_trapping_math;\
15362   flag_rounding_math = saved_rounding_math;\
15363   flag_trapv = saved_trapv;\
15364   folding_initializer = saved_folding_initializer;
15365 
15366 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)15367 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15368 			     tree type, tree op)
15369 {
15370   tree result;
15371   START_FOLD_INIT;
15372 
15373   result = fold_build1_loc (loc, code, type, op);
15374 
15375   END_FOLD_INIT;
15376   return result;
15377 }
15378 
15379 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)15380 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15381 			     tree type, tree op0, tree op1)
15382 {
15383   tree result;
15384   START_FOLD_INIT;
15385 
15386   result = fold_build2_loc (loc, code, type, op0, op1);
15387 
15388   END_FOLD_INIT;
15389   return result;
15390 }
15391 
15392 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)15393 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15394 				       int nargs, tree *argarray)
15395 {
15396   tree result;
15397   START_FOLD_INIT;
15398 
15399   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15400 
15401   END_FOLD_INIT;
15402   return result;
15403 }
15404 
15405 #undef START_FOLD_INIT
15406 #undef END_FOLD_INIT
15407 
15408 /* Determine if first argument is a multiple of second argument.  Return 0 if
15409    it is not, or we cannot easily determined it to be.
15410 
15411    An example of the sort of thing we care about (at this point; this routine
15412    could surely be made more general, and expanded to do what the *_DIV_EXPR's
15413    fold cases do now) is discovering that
15414 
15415      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15416 
15417    is a multiple of
15418 
15419      SAVE_EXPR (J * 8)
15420 
15421    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15422 
15423    This code also handles discovering that
15424 
15425      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15426 
15427    is a multiple of 8 so we don't have to worry about dealing with a
15428    possible remainder.
15429 
15430    Note that we *look* inside a SAVE_EXPR only to determine how it was
15431    calculated; it is not safe for fold to do much of anything else with the
15432    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15433    at run time.  For example, the latter example above *cannot* be implemented
15434    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15435    evaluation time of the original SAVE_EXPR is not necessarily the same at
15436    the time the new expression is evaluated.  The only optimization of this
15437    sort that would be valid is changing
15438 
15439      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15440 
15441    divided by 8 to
15442 
15443      SAVE_EXPR (I) * SAVE_EXPR (J)
15444 
15445    (where the same SAVE_EXPR (J) is used in the original and the
15446    transformed version).  */
15447 
15448 int
multiple_of_p(tree type,const_tree top,const_tree bottom)15449 multiple_of_p (tree type, const_tree top, const_tree bottom)
15450 {
15451   if (operand_equal_p (top, bottom, 0))
15452     return 1;
15453 
15454   if (TREE_CODE (type) != INTEGER_TYPE)
15455     return 0;
15456 
15457   switch (TREE_CODE (top))
15458     {
15459     case BIT_AND_EXPR:
15460       /* Bitwise and provides a power of two multiple.  If the mask is
15461 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
15462       if (!integer_pow2p (bottom))
15463 	return 0;
15464       /* FALLTHRU */
15465 
15466     case MULT_EXPR:
15467       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15468 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15469 
15470     case PLUS_EXPR:
15471     case MINUS_EXPR:
15472       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15473 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15474 
15475     case LSHIFT_EXPR:
15476       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15477 	{
15478 	  tree op1, t1;
15479 
15480 	  op1 = TREE_OPERAND (top, 1);
15481 	  /* const_binop may not detect overflow correctly,
15482 	     so check for it explicitly here.  */
15483 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15484 	      > TREE_INT_CST_LOW (op1)
15485 	      && TREE_INT_CST_HIGH (op1) == 0
15486 	      && 0 != (t1 = fold_convert (type,
15487 					  const_binop (LSHIFT_EXPR,
15488 						       size_one_node,
15489 						       op1)))
15490 	      && !TREE_OVERFLOW (t1))
15491 	    return multiple_of_p (type, t1, bottom);
15492 	}
15493       return 0;
15494 
15495     case NOP_EXPR:
15496       /* Can't handle conversions from non-integral or wider integral type.  */
15497       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15498 	  || (TYPE_PRECISION (type)
15499 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15500 	return 0;
15501 
15502       /* .. fall through ...  */
15503 
15504     case SAVE_EXPR:
15505       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15506 
15507     case COND_EXPR:
15508       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15509 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15510 
15511     case INTEGER_CST:
15512       if (TREE_CODE (bottom) != INTEGER_CST
15513 	  || integer_zerop (bottom)
15514 	  || (TYPE_UNSIGNED (type)
15515 	      && (tree_int_cst_sgn (top) < 0
15516 		  || tree_int_cst_sgn (bottom) < 0)))
15517 	return 0;
15518       return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15519 					     top, bottom));
15520 
15521     default:
15522       return 0;
15523     }
15524 }
15525 
15526 /* Return true if CODE or TYPE is known to be non-negative. */
15527 
15528 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)15529 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15530 {
15531   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15532       && truth_value_p (code))
15533     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15534        have a signed:1 type (where the value is -1 and 0).  */
15535     return true;
15536   return false;
15537 }
15538 
15539 /* Return true if (CODE OP0) is known to be non-negative.  If the return
15540    value is based on the assumption that signed overflow is undefined,
15541    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15542    *STRICT_OVERFLOW_P.  */
15543 
15544 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)15545 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15546 				bool *strict_overflow_p)
15547 {
15548   if (TYPE_UNSIGNED (type))
15549     return true;
15550 
15551   switch (code)
15552     {
15553     case ABS_EXPR:
15554       /* We can't return 1 if flag_wrapv is set because
15555 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
15556       if (!INTEGRAL_TYPE_P (type))
15557 	return true;
15558       if (TYPE_OVERFLOW_UNDEFINED (type))
15559 	{
15560 	  *strict_overflow_p = true;
15561 	  return true;
15562 	}
15563       break;
15564 
15565     case NON_LVALUE_EXPR:
15566     case FLOAT_EXPR:
15567     case FIX_TRUNC_EXPR:
15568       return tree_expr_nonnegative_warnv_p (op0,
15569 					    strict_overflow_p);
15570 
15571     case NOP_EXPR:
15572       {
15573 	tree inner_type = TREE_TYPE (op0);
15574 	tree outer_type = type;
15575 
15576 	if (TREE_CODE (outer_type) == REAL_TYPE)
15577 	  {
15578 	    if (TREE_CODE (inner_type) == REAL_TYPE)
15579 	      return tree_expr_nonnegative_warnv_p (op0,
15580 						    strict_overflow_p);
15581 	    if (INTEGRAL_TYPE_P (inner_type))
15582 	      {
15583 		if (TYPE_UNSIGNED (inner_type))
15584 		  return true;
15585 		return tree_expr_nonnegative_warnv_p (op0,
15586 						      strict_overflow_p);
15587 	      }
15588 	  }
15589 	else if (INTEGRAL_TYPE_P (outer_type))
15590 	  {
15591 	    if (TREE_CODE (inner_type) == REAL_TYPE)
15592 	      return tree_expr_nonnegative_warnv_p (op0,
15593 						    strict_overflow_p);
15594 	    if (INTEGRAL_TYPE_P (inner_type))
15595 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15596 		      && TYPE_UNSIGNED (inner_type);
15597 	  }
15598       }
15599       break;
15600 
15601     default:
15602       return tree_simple_nonnegative_warnv_p (code, type);
15603     }
15604 
15605   /* We don't know sign of `t', so be conservative and return false.  */
15606   return false;
15607 }
15608 
15609 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
15610    value is based on the assumption that signed overflow is undefined,
15611    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15612    *STRICT_OVERFLOW_P.  */
15613 
15614 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)15615 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15616 				      tree op1, bool *strict_overflow_p)
15617 {
15618   if (TYPE_UNSIGNED (type))
15619     return true;
15620 
15621   switch (code)
15622     {
15623     case POINTER_PLUS_EXPR:
15624     case PLUS_EXPR:
15625       if (FLOAT_TYPE_P (type))
15626 	return (tree_expr_nonnegative_warnv_p (op0,
15627 					       strict_overflow_p)
15628 		&& tree_expr_nonnegative_warnv_p (op1,
15629 						  strict_overflow_p));
15630 
15631       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15632 	 both unsigned and at least 2 bits shorter than the result.  */
15633       if (TREE_CODE (type) == INTEGER_TYPE
15634 	  && TREE_CODE (op0) == NOP_EXPR
15635 	  && TREE_CODE (op1) == NOP_EXPR)
15636 	{
15637 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15638 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15639 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15640 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15641 	    {
15642 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
15643 				       TYPE_PRECISION (inner2)) + 1;
15644 	      return prec < TYPE_PRECISION (type);
15645 	    }
15646 	}
15647       break;
15648 
15649     case MULT_EXPR:
15650       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15651 	{
15652 	  /* x * x is always non-negative for floating point x
15653 	     or without overflow.  */
15654 	  if (operand_equal_p (op0, op1, 0)
15655 	      || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15656 		  && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15657 	    {
15658 	      if (TYPE_OVERFLOW_UNDEFINED (type))
15659 		*strict_overflow_p = true;
15660 	      return true;
15661 	    }
15662 	}
15663 
15664       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15665 	 both unsigned and their total bits is shorter than the result.  */
15666       if (TREE_CODE (type) == INTEGER_TYPE
15667 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15668 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15669 	{
15670 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15671 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
15672 	    : TREE_TYPE (op0);
15673 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15674 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
15675 	    : TREE_TYPE (op1);
15676 
15677 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
15678 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
15679 
15680 	  if (TREE_CODE (op0) == INTEGER_CST)
15681 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15682 
15683 	  if (TREE_CODE (op1) == INTEGER_CST)
15684 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15685 
15686 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15687 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15688 	    {
15689 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15690 		? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15691 		: TYPE_PRECISION (inner0);
15692 
15693 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15694 		? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15695 		: TYPE_PRECISION (inner1);
15696 
15697 	      return precision0 + precision1 < TYPE_PRECISION (type);
15698 	    }
15699 	}
15700       return false;
15701 
15702     case BIT_AND_EXPR:
15703     case MAX_EXPR:
15704       return (tree_expr_nonnegative_warnv_p (op0,
15705 					     strict_overflow_p)
15706 	      || tree_expr_nonnegative_warnv_p (op1,
15707 						strict_overflow_p));
15708 
15709     case BIT_IOR_EXPR:
15710     case BIT_XOR_EXPR:
15711     case MIN_EXPR:
15712     case RDIV_EXPR:
15713     case TRUNC_DIV_EXPR:
15714     case CEIL_DIV_EXPR:
15715     case FLOOR_DIV_EXPR:
15716     case ROUND_DIV_EXPR:
15717       return (tree_expr_nonnegative_warnv_p (op0,
15718 					     strict_overflow_p)
15719 	      && tree_expr_nonnegative_warnv_p (op1,
15720 						strict_overflow_p));
15721 
15722     case TRUNC_MOD_EXPR:
15723     case CEIL_MOD_EXPR:
15724     case FLOOR_MOD_EXPR:
15725     case ROUND_MOD_EXPR:
15726       return tree_expr_nonnegative_warnv_p (op0,
15727 					    strict_overflow_p);
15728     default:
15729       return tree_simple_nonnegative_warnv_p (code, type);
15730     }
15731 
15732   /* We don't know sign of `t', so be conservative and return false.  */
15733   return false;
15734 }
15735 
15736 /* Return true if T is known to be non-negative.  If the return
15737    value is based on the assumption that signed overflow is undefined,
15738    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15739    *STRICT_OVERFLOW_P.  */
15740 
15741 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15742 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15743 {
15744   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15745     return true;
15746 
15747   switch (TREE_CODE (t))
15748     {
15749     case INTEGER_CST:
15750       return tree_int_cst_sgn (t) >= 0;
15751 
15752     case REAL_CST:
15753       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15754 
15755     case FIXED_CST:
15756       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15757 
15758     case COND_EXPR:
15759       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15760 					     strict_overflow_p)
15761 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15762 						strict_overflow_p));
15763     default:
15764       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15765 						   TREE_TYPE (t));
15766     }
15767   /* We don't know sign of `t', so be conservative and return false.  */
15768   return false;
15769 }
15770 
15771 /* Return true if T is known to be non-negative.  If the return
15772    value is based on the assumption that signed overflow is undefined,
15773    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15774    *STRICT_OVERFLOW_P.  */
15775 
15776 bool
tree_call_nonnegative_warnv_p(tree type,tree fndecl,tree arg0,tree arg1,bool * strict_overflow_p)15777 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15778 			       tree arg0, tree arg1, bool *strict_overflow_p)
15779 {
15780   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15781     switch (DECL_FUNCTION_CODE (fndecl))
15782       {
15783 	CASE_FLT_FN (BUILT_IN_ACOS):
15784 	CASE_FLT_FN (BUILT_IN_ACOSH):
15785 	CASE_FLT_FN (BUILT_IN_CABS):
15786 	CASE_FLT_FN (BUILT_IN_COSH):
15787 	CASE_FLT_FN (BUILT_IN_ERFC):
15788 	CASE_FLT_FN (BUILT_IN_EXP):
15789 	CASE_FLT_FN (BUILT_IN_EXP10):
15790 	CASE_FLT_FN (BUILT_IN_EXP2):
15791 	CASE_FLT_FN (BUILT_IN_FABS):
15792 	CASE_FLT_FN (BUILT_IN_FDIM):
15793 	CASE_FLT_FN (BUILT_IN_HYPOT):
15794 	CASE_FLT_FN (BUILT_IN_POW10):
15795 	CASE_INT_FN (BUILT_IN_FFS):
15796 	CASE_INT_FN (BUILT_IN_PARITY):
15797 	CASE_INT_FN (BUILT_IN_POPCOUNT):
15798 	CASE_INT_FN (BUILT_IN_CLZ):
15799 	CASE_INT_FN (BUILT_IN_CLRSB):
15800       case BUILT_IN_BSWAP32:
15801       case BUILT_IN_BSWAP64:
15802 	/* Always true.  */
15803 	return true;
15804 
15805 	CASE_FLT_FN (BUILT_IN_SQRT):
15806 	/* sqrt(-0.0) is -0.0.  */
15807 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15808 	  return true;
15809 	return tree_expr_nonnegative_warnv_p (arg0,
15810 					      strict_overflow_p);
15811 
15812 	CASE_FLT_FN (BUILT_IN_ASINH):
15813 	CASE_FLT_FN (BUILT_IN_ATAN):
15814 	CASE_FLT_FN (BUILT_IN_ATANH):
15815 	CASE_FLT_FN (BUILT_IN_CBRT):
15816 	CASE_FLT_FN (BUILT_IN_CEIL):
15817 	CASE_FLT_FN (BUILT_IN_ERF):
15818 	CASE_FLT_FN (BUILT_IN_EXPM1):
15819 	CASE_FLT_FN (BUILT_IN_FLOOR):
15820 	CASE_FLT_FN (BUILT_IN_FMOD):
15821 	CASE_FLT_FN (BUILT_IN_FREXP):
15822 	CASE_FLT_FN (BUILT_IN_ICEIL):
15823 	CASE_FLT_FN (BUILT_IN_IFLOOR):
15824 	CASE_FLT_FN (BUILT_IN_IRINT):
15825 	CASE_FLT_FN (BUILT_IN_IROUND):
15826 	CASE_FLT_FN (BUILT_IN_LCEIL):
15827 	CASE_FLT_FN (BUILT_IN_LDEXP):
15828 	CASE_FLT_FN (BUILT_IN_LFLOOR):
15829 	CASE_FLT_FN (BUILT_IN_LLCEIL):
15830 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15831 	CASE_FLT_FN (BUILT_IN_LLRINT):
15832 	CASE_FLT_FN (BUILT_IN_LLROUND):
15833 	CASE_FLT_FN (BUILT_IN_LRINT):
15834 	CASE_FLT_FN (BUILT_IN_LROUND):
15835 	CASE_FLT_FN (BUILT_IN_MODF):
15836 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15837 	CASE_FLT_FN (BUILT_IN_RINT):
15838 	CASE_FLT_FN (BUILT_IN_ROUND):
15839 	CASE_FLT_FN (BUILT_IN_SCALB):
15840 	CASE_FLT_FN (BUILT_IN_SCALBLN):
15841 	CASE_FLT_FN (BUILT_IN_SCALBN):
15842 	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15843 	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15844 	CASE_FLT_FN (BUILT_IN_SINH):
15845 	CASE_FLT_FN (BUILT_IN_TANH):
15846 	CASE_FLT_FN (BUILT_IN_TRUNC):
15847 	/* True if the 1st argument is nonnegative.  */
15848 	return tree_expr_nonnegative_warnv_p (arg0,
15849 					      strict_overflow_p);
15850 
15851 	CASE_FLT_FN (BUILT_IN_FMAX):
15852 	/* True if the 1st OR 2nd arguments are nonnegative.  */
15853 	return (tree_expr_nonnegative_warnv_p (arg0,
15854 					       strict_overflow_p)
15855 		|| (tree_expr_nonnegative_warnv_p (arg1,
15856 						   strict_overflow_p)));
15857 
15858 	CASE_FLT_FN (BUILT_IN_FMIN):
15859 	/* True if the 1st AND 2nd arguments are nonnegative.  */
15860 	return (tree_expr_nonnegative_warnv_p (arg0,
15861 					       strict_overflow_p)
15862 		&& (tree_expr_nonnegative_warnv_p (arg1,
15863 						   strict_overflow_p)));
15864 
15865 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15866 	/* True if the 2nd argument is nonnegative.  */
15867 	return tree_expr_nonnegative_warnv_p (arg1,
15868 					      strict_overflow_p);
15869 
15870 	CASE_FLT_FN (BUILT_IN_POWI):
15871 	/* True if the 1st argument is nonnegative or the second
15872 	   argument is an even integer.  */
15873 	if (TREE_CODE (arg1) == INTEGER_CST
15874 	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15875 	  return true;
15876 	return tree_expr_nonnegative_warnv_p (arg0,
15877 					      strict_overflow_p);
15878 
15879 	CASE_FLT_FN (BUILT_IN_POW):
15880 	/* True if the 1st argument is nonnegative or the second
15881 	   argument is an even integer valued real.  */
15882 	if (TREE_CODE (arg1) == REAL_CST)
15883 	  {
15884 	    REAL_VALUE_TYPE c;
15885 	    HOST_WIDE_INT n;
15886 
15887 	    c = TREE_REAL_CST (arg1);
15888 	    n = real_to_integer (&c);
15889 	    if ((n & 1) == 0)
15890 	      {
15891 		REAL_VALUE_TYPE cint;
15892 		real_from_integer (&cint, VOIDmode, n,
15893 				   n < 0 ? -1 : 0, 0);
15894 		if (real_identical (&c, &cint))
15895 		  return true;
15896 	      }
15897 	  }
15898 	return tree_expr_nonnegative_warnv_p (arg0,
15899 					      strict_overflow_p);
15900 
15901       default:
15902 	break;
15903       }
15904   return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15905 					  type);
15906 }
15907 
15908 /* Return true if T is known to be non-negative.  If the return
15909    value is based on the assumption that signed overflow is undefined,
15910    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15911    *STRICT_OVERFLOW_P.  */
15912 
15913 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15914 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15915 {
15916   enum tree_code code = TREE_CODE (t);
15917   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15918     return true;
15919 
15920   switch (code)
15921     {
15922     case TARGET_EXPR:
15923       {
15924 	tree temp = TARGET_EXPR_SLOT (t);
15925 	t = TARGET_EXPR_INITIAL (t);
15926 
15927 	/* If the initializer is non-void, then it's a normal expression
15928 	   that will be assigned to the slot.  */
15929 	if (!VOID_TYPE_P (t))
15930 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15931 
15932 	/* Otherwise, the initializer sets the slot in some way.  One common
15933 	   way is an assignment statement at the end of the initializer.  */
15934 	while (1)
15935 	  {
15936 	    if (TREE_CODE (t) == BIND_EXPR)
15937 	      t = expr_last (BIND_EXPR_BODY (t));
15938 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15939 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15940 	      t = expr_last (TREE_OPERAND (t, 0));
15941 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15942 	      t = expr_last (t);
15943 	    else
15944 	      break;
15945 	  }
15946 	if (TREE_CODE (t) == MODIFY_EXPR
15947 	    && TREE_OPERAND (t, 0) == temp)
15948 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15949 						strict_overflow_p);
15950 
15951 	return false;
15952       }
15953 
15954     case CALL_EXPR:
15955       {
15956 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15957 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15958 
15959 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15960 					      get_callee_fndecl (t),
15961 					      arg0,
15962 					      arg1,
15963 					      strict_overflow_p);
15964       }
15965     case COMPOUND_EXPR:
15966     case MODIFY_EXPR:
15967       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15968 					    strict_overflow_p);
15969     case BIND_EXPR:
15970       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15971 					    strict_overflow_p);
15972     case SAVE_EXPR:
15973       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15974 					    strict_overflow_p);
15975 
15976     default:
15977       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15978 						   TREE_TYPE (t));
15979     }
15980 
15981   /* We don't know sign of `t', so be conservative and return false.  */
15982   return false;
15983 }
15984 
15985 /* Return true if T is known to be non-negative.  If the return
15986    value is based on the assumption that signed overflow is undefined,
15987    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15988    *STRICT_OVERFLOW_P.  */
15989 
15990 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15991 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15992 {
15993   enum tree_code code;
15994   if (t == error_mark_node)
15995     return false;
15996 
15997   code = TREE_CODE (t);
15998   switch (TREE_CODE_CLASS (code))
15999     {
16000     case tcc_binary:
16001     case tcc_comparison:
16002       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16003 					      TREE_TYPE (t),
16004 					      TREE_OPERAND (t, 0),
16005 					      TREE_OPERAND (t, 1),
16006 					      strict_overflow_p);
16007 
16008     case tcc_unary:
16009       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16010 					     TREE_TYPE (t),
16011 					     TREE_OPERAND (t, 0),
16012 					     strict_overflow_p);
16013 
16014     case tcc_constant:
16015     case tcc_declaration:
16016     case tcc_reference:
16017       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16018 
16019     default:
16020       break;
16021     }
16022 
16023   switch (code)
16024     {
16025     case TRUTH_AND_EXPR:
16026     case TRUTH_OR_EXPR:
16027     case TRUTH_XOR_EXPR:
16028       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16029 					      TREE_TYPE (t),
16030 					      TREE_OPERAND (t, 0),
16031 					      TREE_OPERAND (t, 1),
16032 					      strict_overflow_p);
16033     case TRUTH_NOT_EXPR:
16034       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16035 					     TREE_TYPE (t),
16036 					     TREE_OPERAND (t, 0),
16037 					     strict_overflow_p);
16038 
16039     case COND_EXPR:
16040     case CONSTRUCTOR:
16041     case OBJ_TYPE_REF:
16042     case ASSERT_EXPR:
16043     case ADDR_EXPR:
16044     case WITH_SIZE_EXPR:
16045     case SSA_NAME:
16046       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16047 
16048     default:
16049       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16050     }
16051 }
16052 
16053 /* Return true if `t' is known to be non-negative.  Handle warnings
16054    about undefined signed overflow.  */
16055 
16056 bool
tree_expr_nonnegative_p(tree t)16057 tree_expr_nonnegative_p (tree t)
16058 {
16059   bool ret, strict_overflow_p;
16060 
16061   strict_overflow_p = false;
16062   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16063   if (strict_overflow_p)
16064     fold_overflow_warning (("assuming signed overflow does not occur when "
16065 			    "determining that expression is always "
16066 			    "non-negative"),
16067 			   WARN_STRICT_OVERFLOW_MISC);
16068   return ret;
16069 }
16070 
16071 
16072 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16073    For floating point we further ensure that T is not denormal.
16074    Similar logic is present in nonzero_address in rtlanal.h.
16075 
16076    If the return value is based on the assumption that signed overflow
16077    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16078    change *STRICT_OVERFLOW_P.  */
16079 
16080 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)16081 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16082 				 bool *strict_overflow_p)
16083 {
16084   switch (code)
16085     {
16086     case ABS_EXPR:
16087       return tree_expr_nonzero_warnv_p (op0,
16088 					strict_overflow_p);
16089 
16090     case NOP_EXPR:
16091       {
16092 	tree inner_type = TREE_TYPE (op0);
16093 	tree outer_type = type;
16094 
16095 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16096 		&& tree_expr_nonzero_warnv_p (op0,
16097 					      strict_overflow_p));
16098       }
16099       break;
16100 
16101     case NON_LVALUE_EXPR:
16102       return tree_expr_nonzero_warnv_p (op0,
16103 					strict_overflow_p);
16104 
16105     default:
16106       break;
16107   }
16108 
16109   return false;
16110 }
16111 
16112 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16113    For floating point we further ensure that T is not denormal.
16114    Similar logic is present in nonzero_address in rtlanal.h.
16115 
16116    If the return value is based on the assumption that signed overflow
16117    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16118    change *STRICT_OVERFLOW_P.  */
16119 
16120 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)16121 tree_binary_nonzero_warnv_p (enum tree_code code,
16122 			     tree type,
16123 			     tree op0,
16124 			     tree op1, bool *strict_overflow_p)
16125 {
16126   bool sub_strict_overflow_p;
16127   switch (code)
16128     {
16129     case POINTER_PLUS_EXPR:
16130     case PLUS_EXPR:
16131       if (TYPE_OVERFLOW_UNDEFINED (type))
16132 	{
16133 	  /* With the presence of negative values it is hard
16134 	     to say something.  */
16135 	  sub_strict_overflow_p = false;
16136 	  if (!tree_expr_nonnegative_warnv_p (op0,
16137 					      &sub_strict_overflow_p)
16138 	      || !tree_expr_nonnegative_warnv_p (op1,
16139 						 &sub_strict_overflow_p))
16140 	    return false;
16141 	  /* One of operands must be positive and the other non-negative.  */
16142 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
16143 	     overflows, on a twos-complement machine the sum of two
16144 	     nonnegative numbers can never be zero.  */
16145 	  return (tree_expr_nonzero_warnv_p (op0,
16146 					     strict_overflow_p)
16147 		  || tree_expr_nonzero_warnv_p (op1,
16148 						strict_overflow_p));
16149 	}
16150       break;
16151 
16152     case MULT_EXPR:
16153       if (TYPE_OVERFLOW_UNDEFINED (type))
16154 	{
16155 	  if (tree_expr_nonzero_warnv_p (op0,
16156 					 strict_overflow_p)
16157 	      && tree_expr_nonzero_warnv_p (op1,
16158 					    strict_overflow_p))
16159 	    {
16160 	      *strict_overflow_p = true;
16161 	      return true;
16162 	    }
16163 	}
16164       break;
16165 
16166     case MIN_EXPR:
16167       sub_strict_overflow_p = false;
16168       if (tree_expr_nonzero_warnv_p (op0,
16169 				     &sub_strict_overflow_p)
16170 	  && tree_expr_nonzero_warnv_p (op1,
16171 					&sub_strict_overflow_p))
16172 	{
16173 	  if (sub_strict_overflow_p)
16174 	    *strict_overflow_p = true;
16175 	}
16176       break;
16177 
16178     case MAX_EXPR:
16179       sub_strict_overflow_p = false;
16180       if (tree_expr_nonzero_warnv_p (op0,
16181 				     &sub_strict_overflow_p))
16182 	{
16183 	  if (sub_strict_overflow_p)
16184 	    *strict_overflow_p = true;
16185 
16186 	  /* When both operands are nonzero, then MAX must be too.  */
16187 	  if (tree_expr_nonzero_warnv_p (op1,
16188 					 strict_overflow_p))
16189 	    return true;
16190 
16191 	  /* MAX where operand 0 is positive is positive.  */
16192 	  return tree_expr_nonnegative_warnv_p (op0,
16193 					       strict_overflow_p);
16194 	}
16195       /* MAX where operand 1 is positive is positive.  */
16196       else if (tree_expr_nonzero_warnv_p (op1,
16197 					  &sub_strict_overflow_p)
16198 	       && tree_expr_nonnegative_warnv_p (op1,
16199 						 &sub_strict_overflow_p))
16200 	{
16201 	  if (sub_strict_overflow_p)
16202 	    *strict_overflow_p = true;
16203 	  return true;
16204 	}
16205       break;
16206 
16207     case BIT_IOR_EXPR:
16208       return (tree_expr_nonzero_warnv_p (op1,
16209 					 strict_overflow_p)
16210 	      || tree_expr_nonzero_warnv_p (op0,
16211 					    strict_overflow_p));
16212 
16213     default:
16214       break;
16215   }
16216 
16217   return false;
16218 }
16219 
16220 /* Return true when T is an address and is known to be nonzero.
16221    For floating point we further ensure that T is not denormal.
16222    Similar logic is present in nonzero_address in rtlanal.h.
16223 
16224    If the return value is based on the assumption that signed overflow
16225    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16226    change *STRICT_OVERFLOW_P.  */
16227 
16228 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)16229 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16230 {
16231   bool sub_strict_overflow_p;
16232   switch (TREE_CODE (t))
16233     {
16234     case INTEGER_CST:
16235       return !integer_zerop (t);
16236 
16237     case ADDR_EXPR:
16238       {
16239 	tree base = TREE_OPERAND (t, 0);
16240 	if (!DECL_P (base))
16241 	  base = get_base_address (base);
16242 
16243 	if (!base)
16244 	  return false;
16245 
16246 	/* Weak declarations may link to NULL.  Other things may also be NULL
16247 	   so protect with -fdelete-null-pointer-checks; but not variables
16248 	   allocated on the stack.  */
16249 	if (DECL_P (base)
16250 	    && (flag_delete_null_pointer_checks
16251 		|| (DECL_CONTEXT (base)
16252 		    && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16253 		    && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16254 	  return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16255 
16256 	/* Constants are never weak.  */
16257 	if (CONSTANT_CLASS_P (base))
16258 	  return true;
16259 
16260 	return false;
16261       }
16262 
16263     case COND_EXPR:
16264       sub_strict_overflow_p = false;
16265       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16266 				     &sub_strict_overflow_p)
16267 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16268 					&sub_strict_overflow_p))
16269 	{
16270 	  if (sub_strict_overflow_p)
16271 	    *strict_overflow_p = true;
16272 	  return true;
16273 	}
16274       break;
16275 
16276     default:
16277       break;
16278     }
16279   return false;
16280 }
16281 
16282 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16283    attempt to fold the expression to a constant without modifying TYPE,
16284    OP0 or OP1.
16285 
16286    If the expression could be simplified to a constant, then return
16287    the constant.  If the expression would not be simplified to a
16288    constant, then return NULL_TREE.  */
16289 
16290 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)16291 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16292 {
16293   tree tem = fold_binary (code, type, op0, op1);
16294   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16295 }
16296 
16297 /* Given the components of a unary expression CODE, TYPE and OP0,
16298    attempt to fold the expression to a constant without modifying
16299    TYPE or OP0.
16300 
16301    If the expression could be simplified to a constant, then return
16302    the constant.  If the expression would not be simplified to a
16303    constant, then return NULL_TREE.  */
16304 
16305 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)16306 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16307 {
16308   tree tem = fold_unary (code, type, op0);
16309   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16310 }
16311 
16312 /* If EXP represents referencing an element in a constant string
16313    (either via pointer arithmetic or array indexing), return the
16314    tree representing the value accessed, otherwise return NULL.  */
16315 
16316 tree
fold_read_from_constant_string(tree exp)16317 fold_read_from_constant_string (tree exp)
16318 {
16319   if ((TREE_CODE (exp) == INDIRECT_REF
16320        || TREE_CODE (exp) == ARRAY_REF)
16321       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16322     {
16323       tree exp1 = TREE_OPERAND (exp, 0);
16324       tree index;
16325       tree string;
16326       location_t loc = EXPR_LOCATION (exp);
16327 
16328       if (TREE_CODE (exp) == INDIRECT_REF)
16329 	string = string_constant (exp1, &index);
16330       else
16331 	{
16332 	  tree low_bound = array_ref_low_bound (exp);
16333 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16334 
16335 	  /* Optimize the special-case of a zero lower bound.
16336 
16337 	     We convert the low_bound to sizetype to avoid some problems
16338 	     with constant folding.  (E.g. suppose the lower bound is 1,
16339 	     and its mode is QI.  Without the conversion,l (ARRAY
16340 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16341 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
16342 	  if (! integer_zerop (low_bound))
16343 	    index = size_diffop_loc (loc, index,
16344 				 fold_convert_loc (loc, sizetype, low_bound));
16345 
16346 	  string = exp1;
16347 	}
16348 
16349       if (string
16350 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16351 	  && TREE_CODE (string) == STRING_CST
16352 	  && TREE_CODE (index) == INTEGER_CST
16353 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16354 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16355 	      == MODE_INT)
16356 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16357 	return build_int_cst_type (TREE_TYPE (exp),
16358 				   (TREE_STRING_POINTER (string)
16359 				    [TREE_INT_CST_LOW (index)]));
16360     }
16361   return NULL;
16362 }
16363 
16364 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16365    an integer constant, real, or fixed-point constant.
16366 
16367    TYPE is the type of the result.  */
16368 
16369 static tree
fold_negate_const(tree arg0,tree type)16370 fold_negate_const (tree arg0, tree type)
16371 {
16372   tree t = NULL_TREE;
16373 
16374   switch (TREE_CODE (arg0))
16375     {
16376     case INTEGER_CST:
16377       {
16378 	double_int val = tree_to_double_int (arg0);
16379 	bool overflow;
16380 	val = val.neg_with_overflow (&overflow);
16381 	t = force_fit_type_double (type, val, 1,
16382 				   (overflow | TREE_OVERFLOW (arg0))
16383 				   && !TYPE_UNSIGNED (type));
16384 	break;
16385       }
16386 
16387     case REAL_CST:
16388       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16389       break;
16390 
16391     case FIXED_CST:
16392       {
16393         FIXED_VALUE_TYPE f;
16394         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16395 					    &(TREE_FIXED_CST (arg0)), NULL,
16396 					    TYPE_SATURATING (type));
16397 	t = build_fixed (type, f);
16398 	/* Propagate overflow flags.  */
16399 	if (overflow_p | TREE_OVERFLOW (arg0))
16400 	  TREE_OVERFLOW (t) = 1;
16401 	break;
16402       }
16403 
16404     default:
16405       gcc_unreachable ();
16406     }
16407 
16408   return t;
16409 }
16410 
16411 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16412    an integer constant or real constant.
16413 
16414    TYPE is the type of the result.  */
16415 
16416 tree
fold_abs_const(tree arg0,tree type)16417 fold_abs_const (tree arg0, tree type)
16418 {
16419   tree t = NULL_TREE;
16420 
16421   switch (TREE_CODE (arg0))
16422     {
16423     case INTEGER_CST:
16424       {
16425 	double_int val = tree_to_double_int (arg0);
16426 
16427         /* If the value is unsigned or non-negative, then the absolute value
16428 	   is the same as the ordinary value.  */
16429 	if (TYPE_UNSIGNED (type)
16430 	    || !val.is_negative ())
16431 	  t = arg0;
16432 
16433 	/* If the value is negative, then the absolute value is
16434 	   its negation.  */
16435 	else
16436 	  {
16437 	    bool overflow;
16438 	    val = val.neg_with_overflow (&overflow);
16439 	    t = force_fit_type_double (type, val, -1,
16440 				       overflow | TREE_OVERFLOW (arg0));
16441 	  }
16442       }
16443       break;
16444 
16445     case REAL_CST:
16446       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16447 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16448       else
16449 	t =  arg0;
16450       break;
16451 
16452     default:
16453       gcc_unreachable ();
16454     }
16455 
16456   return t;
16457 }
16458 
16459 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16460    constant.  TYPE is the type of the result.  */
16461 
16462 static tree
fold_not_const(const_tree arg0,tree type)16463 fold_not_const (const_tree arg0, tree type)
16464 {
16465   double_int val;
16466 
16467   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16468 
16469   val = ~tree_to_double_int (arg0);
16470   return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16471 }
16472 
16473 /* Given CODE, a relational operator, the target type, TYPE and two
16474    constant operands OP0 and OP1, return the result of the
16475    relational operation.  If the result is not a compile time
16476    constant, then return NULL_TREE.  */
16477 
16478 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)16479 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16480 {
16481   int result, invert;
16482 
16483   /* From here on, the only cases we handle are when the result is
16484      known to be a constant.  */
16485 
16486   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16487     {
16488       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16489       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16490 
16491       /* Handle the cases where either operand is a NaN.  */
16492       if (real_isnan (c0) || real_isnan (c1))
16493 	{
16494 	  switch (code)
16495 	    {
16496 	    case EQ_EXPR:
16497 	    case ORDERED_EXPR:
16498 	      result = 0;
16499 	      break;
16500 
16501 	    case NE_EXPR:
16502 	    case UNORDERED_EXPR:
16503 	    case UNLT_EXPR:
16504 	    case UNLE_EXPR:
16505 	    case UNGT_EXPR:
16506 	    case UNGE_EXPR:
16507 	    case UNEQ_EXPR:
16508               result = 1;
16509 	      break;
16510 
16511 	    case LT_EXPR:
16512 	    case LE_EXPR:
16513 	    case GT_EXPR:
16514 	    case GE_EXPR:
16515 	    case LTGT_EXPR:
16516 	      if (flag_trapping_math)
16517 		return NULL_TREE;
16518 	      result = 0;
16519 	      break;
16520 
16521 	    default:
16522 	      gcc_unreachable ();
16523 	    }
16524 
16525 	  return constant_boolean_node (result, type);
16526 	}
16527 
16528       return constant_boolean_node (real_compare (code, c0, c1), type);
16529     }
16530 
16531   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16532     {
16533       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16534       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16535       return constant_boolean_node (fixed_compare (code, c0, c1), type);
16536     }
16537 
16538   /* Handle equality/inequality of complex constants.  */
16539   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16540     {
16541       tree rcond = fold_relational_const (code, type,
16542 					  TREE_REALPART (op0),
16543 					  TREE_REALPART (op1));
16544       tree icond = fold_relational_const (code, type,
16545 					  TREE_IMAGPART (op0),
16546 					  TREE_IMAGPART (op1));
16547       if (code == EQ_EXPR)
16548 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16549       else if (code == NE_EXPR)
16550 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16551       else
16552 	return NULL_TREE;
16553     }
16554 
16555   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16556     {
16557       unsigned count = VECTOR_CST_NELTS (op0);
16558       tree *elts =  XALLOCAVEC (tree, count);
16559       gcc_assert (VECTOR_CST_NELTS (op1) == count
16560 		  && TYPE_VECTOR_SUBPARTS (type) == count);
16561 
16562       for (unsigned i = 0; i < count; i++)
16563 	{
16564 	  tree elem_type = TREE_TYPE (type);
16565 	  tree elem0 = VECTOR_CST_ELT (op0, i);
16566 	  tree elem1 = VECTOR_CST_ELT (op1, i);
16567 
16568 	  tree tem = fold_relational_const (code, elem_type,
16569 					    elem0, elem1);
16570 
16571 	  if (tem == NULL_TREE)
16572 	    return NULL_TREE;
16573 
16574 	  elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16575 	}
16576 
16577       return build_vector (type, elts);
16578     }
16579 
16580   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16581 
16582      To compute GT, swap the arguments and do LT.
16583      To compute GE, do LT and invert the result.
16584      To compute LE, swap the arguments, do LT and invert the result.
16585      To compute NE, do EQ and invert the result.
16586 
16587      Therefore, the code below must handle only EQ and LT.  */
16588 
16589   if (code == LE_EXPR || code == GT_EXPR)
16590     {
16591       tree tem = op0;
16592       op0 = op1;
16593       op1 = tem;
16594       code = swap_tree_comparison (code);
16595     }
16596 
16597   /* Note that it is safe to invert for real values here because we
16598      have already handled the one case that it matters.  */
16599 
16600   invert = 0;
16601   if (code == NE_EXPR || code == GE_EXPR)
16602     {
16603       invert = 1;
16604       code = invert_tree_comparison (code, false);
16605     }
16606 
16607   /* Compute a result for LT or EQ if args permit;
16608      Otherwise return T.  */
16609   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16610     {
16611       if (code == EQ_EXPR)
16612 	result = tree_int_cst_equal (op0, op1);
16613       else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16614 	result = INT_CST_LT_UNSIGNED (op0, op1);
16615       else
16616 	result = INT_CST_LT (op0, op1);
16617     }
16618   else
16619     return NULL_TREE;
16620 
16621   if (invert)
16622     result ^= 1;
16623   return constant_boolean_node (result, type);
16624 }
16625 
16626 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16627    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16628    itself.  */
16629 
16630 tree
fold_build_cleanup_point_expr(tree type,tree expr)16631 fold_build_cleanup_point_expr (tree type, tree expr)
16632 {
16633   /* If the expression does not have side effects then we don't have to wrap
16634      it with a cleanup point expression.  */
16635   if (!TREE_SIDE_EFFECTS (expr))
16636     return expr;
16637 
16638   /* If the expression is a return, check to see if the expression inside the
16639      return has no side effects or the right hand side of the modify expression
16640      inside the return. If either don't have side effects set we don't need to
16641      wrap the expression in a cleanup point expression.  Note we don't check the
16642      left hand side of the modify because it should always be a return decl.  */
16643   if (TREE_CODE (expr) == RETURN_EXPR)
16644     {
16645       tree op = TREE_OPERAND (expr, 0);
16646       if (!op || !TREE_SIDE_EFFECTS (op))
16647         return expr;
16648       op = TREE_OPERAND (op, 1);
16649       if (!TREE_SIDE_EFFECTS (op))
16650         return expr;
16651     }
16652 
16653   return build1 (CLEANUP_POINT_EXPR, type, expr);
16654 }
16655 
16656 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16657    of an indirection through OP0, or NULL_TREE if no simplification is
16658    possible.  */
16659 
16660 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)16661 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16662 {
16663   tree sub = op0;
16664   tree subtype;
16665 
16666   STRIP_NOPS (sub);
16667   subtype = TREE_TYPE (sub);
16668   if (!POINTER_TYPE_P (subtype))
16669     return NULL_TREE;
16670 
16671   if (TREE_CODE (sub) == ADDR_EXPR)
16672     {
16673       tree op = TREE_OPERAND (sub, 0);
16674       tree optype = TREE_TYPE (op);
16675       /* *&CONST_DECL -> to the value of the const decl.  */
16676       if (TREE_CODE (op) == CONST_DECL)
16677 	return DECL_INITIAL (op);
16678       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16679       if (type == optype)
16680 	{
16681 	  tree fop = fold_read_from_constant_string (op);
16682 	  if (fop)
16683 	    return fop;
16684 	  else
16685 	    return op;
16686 	}
16687       /* *(foo *)&fooarray => fooarray[0] */
16688       else if (TREE_CODE (optype) == ARRAY_TYPE
16689 	       && type == TREE_TYPE (optype)
16690 	       && (!in_gimple_form
16691 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16692 	{
16693 	  tree type_domain = TYPE_DOMAIN (optype);
16694 	  tree min_val = size_zero_node;
16695 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16696 	    min_val = TYPE_MIN_VALUE (type_domain);
16697 	  if (in_gimple_form
16698 	      && TREE_CODE (min_val) != INTEGER_CST)
16699 	    return NULL_TREE;
16700 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
16701 			     NULL_TREE, NULL_TREE);
16702 	}
16703       /* *(foo *)&complexfoo => __real__ complexfoo */
16704       else if (TREE_CODE (optype) == COMPLEX_TYPE
16705 	       && type == TREE_TYPE (optype))
16706 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16707       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16708       else if (TREE_CODE (optype) == VECTOR_TYPE
16709 	       && type == TREE_TYPE (optype))
16710 	{
16711 	  tree part_width = TYPE_SIZE (type);
16712 	  tree index = bitsize_int (0);
16713 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16714 	}
16715     }
16716 
16717   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16718       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16719     {
16720       tree op00 = TREE_OPERAND (sub, 0);
16721       tree op01 = TREE_OPERAND (sub, 1);
16722 
16723       STRIP_NOPS (op00);
16724       if (TREE_CODE (op00) == ADDR_EXPR)
16725 	{
16726 	  tree op00type;
16727 	  op00 = TREE_OPERAND (op00, 0);
16728 	  op00type = TREE_TYPE (op00);
16729 
16730 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16731 	  if (TREE_CODE (op00type) == VECTOR_TYPE
16732 	      && type == TREE_TYPE (op00type))
16733 	    {
16734 	      HOST_WIDE_INT offset = tree_to_shwi (op01);
16735 	      tree part_width = TYPE_SIZE (type);
16736 	      unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16737 	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16738 	      tree index = bitsize_int (indexi);
16739 
16740 	      if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
16741 		return fold_build3_loc (loc,
16742 					BIT_FIELD_REF, type, op00,
16743 					part_width, index);
16744 
16745 	    }
16746 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16747 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
16748 		   && type == TREE_TYPE (op00type))
16749 	    {
16750 	      tree size = TYPE_SIZE_UNIT (type);
16751 	      if (tree_int_cst_equal (size, op01))
16752 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16753 	    }
16754 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
16755 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
16756 		   && type == TREE_TYPE (op00type))
16757 	    {
16758 	      tree type_domain = TYPE_DOMAIN (op00type);
16759 	      tree min_val = size_zero_node;
16760 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
16761 		min_val = TYPE_MIN_VALUE (type_domain);
16762 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16763 				     TYPE_SIZE_UNIT (type));
16764 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16765 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
16766 				 NULL_TREE, NULL_TREE);
16767 	    }
16768 	}
16769     }
16770 
16771   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16772   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16773       && type == TREE_TYPE (TREE_TYPE (subtype))
16774       && (!in_gimple_form
16775 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16776     {
16777       tree type_domain;
16778       tree min_val = size_zero_node;
16779       sub = build_fold_indirect_ref_loc (loc, sub);
16780       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16781       if (type_domain && TYPE_MIN_VALUE (type_domain))
16782 	min_val = TYPE_MIN_VALUE (type_domain);
16783       if (in_gimple_form
16784 	  && TREE_CODE (min_val) != INTEGER_CST)
16785 	return NULL_TREE;
16786       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16787 			 NULL_TREE);
16788     }
16789 
16790   return NULL_TREE;
16791 }
16792 
16793 /* Builds an expression for an indirection through T, simplifying some
16794    cases.  */
16795 
16796 tree
build_fold_indirect_ref_loc(location_t loc,tree t)16797 build_fold_indirect_ref_loc (location_t loc, tree t)
16798 {
16799   tree type = TREE_TYPE (TREE_TYPE (t));
16800   tree sub = fold_indirect_ref_1 (loc, type, t);
16801 
16802   if (sub)
16803     return sub;
16804 
16805   return build1_loc (loc, INDIRECT_REF, type, t);
16806 }
16807 
16808 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16809 
16810 tree
fold_indirect_ref_loc(location_t loc,tree t)16811 fold_indirect_ref_loc (location_t loc, tree t)
16812 {
16813   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16814 
16815   if (sub)
16816     return sub;
16817   else
16818     return t;
16819 }
16820 
16821 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16822    whose result is ignored.  The type of the returned tree need not be
16823    the same as the original expression.  */
16824 
16825 tree
fold_ignored_result(tree t)16826 fold_ignored_result (tree t)
16827 {
16828   if (!TREE_SIDE_EFFECTS (t))
16829     return integer_zero_node;
16830 
16831   for (;;)
16832     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16833       {
16834       case tcc_unary:
16835 	t = TREE_OPERAND (t, 0);
16836 	break;
16837 
16838       case tcc_binary:
16839       case tcc_comparison:
16840 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16841 	  t = TREE_OPERAND (t, 0);
16842 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16843 	  t = TREE_OPERAND (t, 1);
16844 	else
16845 	  return t;
16846 	break;
16847 
16848       case tcc_expression:
16849 	switch (TREE_CODE (t))
16850 	  {
16851 	  case COMPOUND_EXPR:
16852 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16853 	      return t;
16854 	    t = TREE_OPERAND (t, 0);
16855 	    break;
16856 
16857 	  case COND_EXPR:
16858 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16859 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16860 	      return t;
16861 	    t = TREE_OPERAND (t, 0);
16862 	    break;
16863 
16864 	  default:
16865 	    return t;
16866 	  }
16867 	break;
16868 
16869       default:
16870 	return t;
16871       }
16872 }
16873 
16874 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16875    This can only be applied to objects of a sizetype.  */
16876 
16877 tree
round_up_loc(location_t loc,tree value,int divisor)16878 round_up_loc (location_t loc, tree value, int divisor)
16879 {
16880   tree div = NULL_TREE;
16881 
16882   gcc_assert (divisor > 0);
16883   if (divisor == 1)
16884     return value;
16885 
16886   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16887      have to do anything.  Only do this when we are not given a const,
16888      because in that case, this check is more expensive than just
16889      doing it.  */
16890   if (TREE_CODE (value) != INTEGER_CST)
16891     {
16892       div = build_int_cst (TREE_TYPE (value), divisor);
16893 
16894       if (multiple_of_p (TREE_TYPE (value), value, div))
16895 	return value;
16896     }
16897 
16898   /* If divisor is a power of two, simplify this to bit manipulation.  */
16899   if (divisor == (divisor & -divisor))
16900     {
16901       if (TREE_CODE (value) == INTEGER_CST)
16902 	{
16903 	  double_int val = tree_to_double_int (value);
16904 	  bool overflow_p;
16905 
16906 	  if ((val.low & (divisor - 1)) == 0)
16907 	    return value;
16908 
16909 	  overflow_p = TREE_OVERFLOW (value);
16910 	  val.low &= ~(divisor - 1);
16911 	  val.low += divisor;
16912 	  if (val.low == 0)
16913 	    {
16914 	      val.high++;
16915 	      if (val.high == 0)
16916 		overflow_p = true;
16917 	    }
16918 
16919 	  return force_fit_type_double (TREE_TYPE (value), val,
16920 					-1, overflow_p);
16921 	}
16922       else
16923 	{
16924 	  tree t;
16925 
16926 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16927 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16928 	  t = build_int_cst (TREE_TYPE (value), -divisor);
16929 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16930 	}
16931     }
16932   else
16933     {
16934       if (!div)
16935 	div = build_int_cst (TREE_TYPE (value), divisor);
16936       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16937       value = size_binop_loc (loc, MULT_EXPR, value, div);
16938     }
16939 
16940   return value;
16941 }
16942 
16943 /* Likewise, but round down.  */
16944 
16945 tree
round_down_loc(location_t loc,tree value,int divisor)16946 round_down_loc (location_t loc, tree value, int divisor)
16947 {
16948   tree div = NULL_TREE;
16949 
16950   gcc_assert (divisor > 0);
16951   if (divisor == 1)
16952     return value;
16953 
16954   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16955      have to do anything.  Only do this when we are not given a const,
16956      because in that case, this check is more expensive than just
16957      doing it.  */
16958   if (TREE_CODE (value) != INTEGER_CST)
16959     {
16960       div = build_int_cst (TREE_TYPE (value), divisor);
16961 
16962       if (multiple_of_p (TREE_TYPE (value), value, div))
16963 	return value;
16964     }
16965 
16966   /* If divisor is a power of two, simplify this to bit manipulation.  */
16967   if (divisor == (divisor & -divisor))
16968     {
16969       tree t;
16970 
16971       t = build_int_cst (TREE_TYPE (value), -divisor);
16972       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16973     }
16974   else
16975     {
16976       if (!div)
16977 	div = build_int_cst (TREE_TYPE (value), divisor);
16978       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16979       value = size_binop_loc (loc, MULT_EXPR, value, div);
16980     }
16981 
16982   return value;
16983 }
16984 
16985 /* Returns the pointer to the base of the object addressed by EXP and
16986    extracts the information about the offset of the access, storing it
16987    to PBITPOS and POFFSET.  */
16988 
16989 static tree
split_address_to_core_and_offset(tree exp,HOST_WIDE_INT * pbitpos,tree * poffset)16990 split_address_to_core_and_offset (tree exp,
16991 				  HOST_WIDE_INT *pbitpos, tree *poffset)
16992 {
16993   tree core;
16994   enum machine_mode mode;
16995   int unsignedp, volatilep;
16996   HOST_WIDE_INT bitsize;
16997   location_t loc = EXPR_LOCATION (exp);
16998 
16999   if (TREE_CODE (exp) == ADDR_EXPR)
17000     {
17001       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
17002 				  poffset, &mode, &unsignedp, &volatilep,
17003 				  false);
17004       core = build_fold_addr_expr_loc (loc, core);
17005     }
17006   else
17007     {
17008       core = exp;
17009       *pbitpos = 0;
17010       *poffset = NULL_TREE;
17011     }
17012 
17013   return core;
17014 }
17015 
17016 /* Returns true if addresses of E1 and E2 differ by a constant, false
17017    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
17018 
17019 bool
ptr_difference_const(tree e1,tree e2,HOST_WIDE_INT * diff)17020 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17021 {
17022   tree core1, core2;
17023   HOST_WIDE_INT bitpos1, bitpos2;
17024   tree toffset1, toffset2, tdiff, type;
17025 
17026   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17027   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17028 
17029   if (bitpos1 % BITS_PER_UNIT != 0
17030       || bitpos2 % BITS_PER_UNIT != 0
17031       || !operand_equal_p (core1, core2, 0))
17032     return false;
17033 
17034   if (toffset1 && toffset2)
17035     {
17036       type = TREE_TYPE (toffset1);
17037       if (type != TREE_TYPE (toffset2))
17038 	toffset2 = fold_convert (type, toffset2);
17039 
17040       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17041       if (!cst_and_fits_in_hwi (tdiff))
17042 	return false;
17043 
17044       *diff = int_cst_value (tdiff);
17045     }
17046   else if (toffset1 || toffset2)
17047     {
17048       /* If only one of the offsets is non-constant, the difference cannot
17049 	 be a constant.  */
17050       return false;
17051     }
17052   else
17053     *diff = 0;
17054 
17055   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17056   return true;
17057 }
17058 
17059 /* Simplify the floating point expression EXP when the sign of the
17060    result is not significant.  Return NULL_TREE if no simplification
17061    is possible.  */
17062 
17063 tree
fold_strip_sign_ops(tree exp)17064 fold_strip_sign_ops (tree exp)
17065 {
17066   tree arg0, arg1;
17067   location_t loc = EXPR_LOCATION (exp);
17068 
17069   switch (TREE_CODE (exp))
17070     {
17071     case ABS_EXPR:
17072     case NEGATE_EXPR:
17073       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17074       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17075 
17076     case MULT_EXPR:
17077     case RDIV_EXPR:
17078       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17079 	return NULL_TREE;
17080       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17081       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17082       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17083 	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17084 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
17085 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
17086       break;
17087 
17088     case COMPOUND_EXPR:
17089       arg0 = TREE_OPERAND (exp, 0);
17090       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17091       if (arg1)
17092 	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17093       break;
17094 
17095     case COND_EXPR:
17096       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17097       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17098       if (arg0 || arg1)
17099 	return fold_build3_loc (loc,
17100 			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17101 			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
17102 			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
17103       break;
17104 
17105     case CALL_EXPR:
17106       {
17107 	const enum built_in_function fcode = builtin_mathfn_code (exp);
17108 	switch (fcode)
17109 	{
17110 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
17111 	  /* Strip copysign function call, return the 1st argument. */
17112 	  arg0 = CALL_EXPR_ARG (exp, 0);
17113 	  arg1 = CALL_EXPR_ARG (exp, 1);
17114 	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17115 
17116 	default:
17117 	  /* Strip sign ops from the argument of "odd" math functions.  */
17118 	  if (negate_mathfn_p (fcode))
17119             {
17120 	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17121 	      if (arg0)
17122 		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17123 	    }
17124 	  break;
17125 	}
17126       }
17127       break;
17128 
17129     default:
17130       break;
17131     }
17132   return NULL_TREE;
17133 }
17134