xref: /dragonfly/contrib/gcc-4.7/gcc/fold-const.c (revision 0720b42f)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4    2012 Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /*@@ This file should be rewritten to use an arbitrary precision
23   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25   @@ The routines that translate from the ap rep should
26   @@ warn if precision et. al. is lost.
27   @@ This would also make life easier when this technology is used
28   @@ for cross-compilers.  */
29 
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31 
32    fold takes a tree as argument and returns a simplified tree.
33 
34    size_binop takes a tree code for an arithmetic operation
35    and two operands that are trees, and produces a tree for the
36    result, assuming the type comes from `sizetype'.
37 
38    size_int takes an integer value, and creates a tree constant
39    with type from `sizetype'.
40 
41    Note: Since the folders get called on non-gimple code as well as
42    gimple code, we need to handle GIMPLE tuples as well as their
43    corresponding tree equivalents.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64 
65 /* Nonzero if we are folding constants inside an initializer; zero
66    otherwise.  */
67 int folding_initializer = 0;
68 
69 /* The following constants represent a bit based encoding of GCC's
70    comparison operators.  This encoding simplifies transformations
71    on relational comparison operators, such as AND and OR.  */
72 enum comparison_code {
73   COMPCODE_FALSE = 0,
74   COMPCODE_LT = 1,
75   COMPCODE_EQ = 2,
76   COMPCODE_LE = 3,
77   COMPCODE_GT = 4,
78   COMPCODE_LTGT = 5,
79   COMPCODE_GE = 6,
80   COMPCODE_ORD = 7,
81   COMPCODE_UNORD = 8,
82   COMPCODE_UNLT = 9,
83   COMPCODE_UNEQ = 10,
84   COMPCODE_UNLE = 11,
85   COMPCODE_UNGT = 12,
86   COMPCODE_NE = 13,
87   COMPCODE_UNGE = 14,
88   COMPCODE_TRUE = 15
89 };
90 
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 				HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 					tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 				    HOST_WIDE_INT *,
110 				    enum machine_mode *, int *, int *,
111 				    tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 					tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 						 enum tree_code, tree,
128 						 tree, tree,
129 						 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 				 enum built_in_function, enum tree_code,
132 				 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142    Otherwise, return LOC.  */
143 
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147   location_t tloc = EXPR_LOCATION (t);
148   return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150 
151 /* Similar to protected_set_expr_location, but never modify x in place,
152    if location can and needs to be set, unshare it.  */
153 
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157   if (CAN_HAVE_LOCATION_P (x)
158       && EXPR_LOCATION (x) != loc
159       && !(TREE_CODE (x) == SAVE_EXPR
160 	   || TREE_CODE (x) == TARGET_EXPR
161 	   || TREE_CODE (x) == BIND_EXPR))
162     {
163       x = copy_node (x);
164       SET_EXPR_LOCATION (x, loc);
165     }
166   return x;
167 }
168 
169 
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171    overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
172    and SUM1.  Then this yields nonzero if overflow occurred during the
173    addition.
174 
175    Overflow occurs if A and B have the same sign, but A and SUM differ in
176    sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
177    sign.  */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181    of type CODE and returns the quotient.
182    Otherwise returns NULL_TREE.  */
183 
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187   double_int quo, rem;
188   int uns;
189 
190   /* The sign of the division is according to operand two, that
191      does the correct thing for POINTER_PLUS_EXPR where we want
192      a signed division.  */
193   uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194   if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195       && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196     uns = false;
197 
198   quo = double_int_divmod (tree_to_double_int (arg1),
199 			   tree_to_double_int (arg2),
200 			   uns, code, &rem);
201 
202   if (double_int_zero_p (rem))
203     return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
204 
205   return NULL_TREE;
206 }
207 
208 /* This is nonzero if we should defer warnings about undefined
209    overflow.  This facility exists because these warnings are a
210    special case.  The code to estimate loop iterations does not want
211    to issue any warnings, since it works with expressions which do not
212    occur in user code.  Various bits of cleanup code call fold(), but
213    only use the result if it has certain characteristics (e.g., is a
214    constant); that code only wants to issue a warning if the result is
215    used.  */
216 
217 static int fold_deferring_overflow_warnings;
218 
219 /* If a warning about undefined overflow is deferred, this is the
220    warning.  Note that this may cause us to turn two warnings into
221    one, but that is fine since it is sufficient to only give one
222    warning per expression.  */
223 
224 static const char* fold_deferred_overflow_warning;
225 
226 /* If a warning about undefined overflow is deferred, this is the
227    level at which the warning should be emitted.  */
228 
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
230 
231 /* Start deferring overflow warnings.  We could use a stack here to
232    permit nested calls, but at present it is not necessary.  */
233 
234 void
235 fold_defer_overflow_warnings (void)
236 {
237   ++fold_deferring_overflow_warnings;
238 }
239 
240 /* Stop deferring overflow warnings.  If there is a pending warning,
241    and ISSUE is true, then issue the warning if appropriate.  STMT is
242    the statement with which the warning should be associated (used for
243    location information); STMT may be NULL.  CODE is the level of the
244    warning--a warn_strict_overflow_code value.  This function will use
245    the smaller of CODE and the deferred code when deciding whether to
246    issue the warning.  CODE may be zero to mean to always use the
247    deferred code.  */
248 
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
251 {
252   const char *warnmsg;
253   location_t locus;
254 
255   gcc_assert (fold_deferring_overflow_warnings > 0);
256   --fold_deferring_overflow_warnings;
257   if (fold_deferring_overflow_warnings > 0)
258     {
259       if (fold_deferred_overflow_warning != NULL
260 	  && code != 0
261 	  && code < (int) fold_deferred_overflow_code)
262 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263       return;
264     }
265 
266   warnmsg = fold_deferred_overflow_warning;
267   fold_deferred_overflow_warning = NULL;
268 
269   if (!issue || warnmsg == NULL)
270     return;
271 
272   if (gimple_no_warning_p (stmt))
273     return;
274 
275   /* Use the smallest code level when deciding to issue the
276      warning.  */
277   if (code == 0 || code > (int) fold_deferred_overflow_code)
278     code = fold_deferred_overflow_code;
279 
280   if (!issue_strict_overflow_warning (code))
281     return;
282 
283   if (stmt == NULL)
284     locus = input_location;
285   else
286     locus = gimple_location (stmt);
287   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 }
289 
290 /* Stop deferring overflow warnings, ignoring any deferred
291    warnings.  */
292 
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
295 {
296   fold_undefer_overflow_warnings (false, NULL, 0);
297 }
298 
299 /* Whether we are deferring overflow warnings.  */
300 
301 bool
302 fold_deferring_overflow_warnings_p (void)
303 {
304   return fold_deferring_overflow_warnings > 0;
305 }
306 
307 /* This is called when we fold something based on the fact that signed
308    overflow is undefined.  */
309 
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
312 {
313   if (fold_deferring_overflow_warnings > 0)
314     {
315       if (fold_deferred_overflow_warning == NULL
316 	  || wc < fold_deferred_overflow_code)
317 	{
318 	  fold_deferred_overflow_warning = gmsgid;
319 	  fold_deferred_overflow_code = wc;
320 	}
321     }
322   else if (issue_strict_overflow_warning (wc))
323     warning (OPT_Wstrict_overflow, gmsgid);
324 }
325 
326 /* Return true if the built-in mathematical function specified by CODE
327    is odd, i.e. -f(x) == f(-x).  */
328 
329 static bool
330 negate_mathfn_p (enum built_in_function code)
331 {
332   switch (code)
333     {
334     CASE_FLT_FN (BUILT_IN_ASIN):
335     CASE_FLT_FN (BUILT_IN_ASINH):
336     CASE_FLT_FN (BUILT_IN_ATAN):
337     CASE_FLT_FN (BUILT_IN_ATANH):
338     CASE_FLT_FN (BUILT_IN_CASIN):
339     CASE_FLT_FN (BUILT_IN_CASINH):
340     CASE_FLT_FN (BUILT_IN_CATAN):
341     CASE_FLT_FN (BUILT_IN_CATANH):
342     CASE_FLT_FN (BUILT_IN_CBRT):
343     CASE_FLT_FN (BUILT_IN_CPROJ):
344     CASE_FLT_FN (BUILT_IN_CSIN):
345     CASE_FLT_FN (BUILT_IN_CSINH):
346     CASE_FLT_FN (BUILT_IN_CTAN):
347     CASE_FLT_FN (BUILT_IN_CTANH):
348     CASE_FLT_FN (BUILT_IN_ERF):
349     CASE_FLT_FN (BUILT_IN_LLROUND):
350     CASE_FLT_FN (BUILT_IN_LROUND):
351     CASE_FLT_FN (BUILT_IN_ROUND):
352     CASE_FLT_FN (BUILT_IN_SIN):
353     CASE_FLT_FN (BUILT_IN_SINH):
354     CASE_FLT_FN (BUILT_IN_TAN):
355     CASE_FLT_FN (BUILT_IN_TANH):
356     CASE_FLT_FN (BUILT_IN_TRUNC):
357       return true;
358 
359     CASE_FLT_FN (BUILT_IN_LLRINT):
360     CASE_FLT_FN (BUILT_IN_LRINT):
361     CASE_FLT_FN (BUILT_IN_NEARBYINT):
362     CASE_FLT_FN (BUILT_IN_RINT):
363       return !flag_rounding_math;
364 
365     default:
366       break;
367     }
368   return false;
369 }
370 
371 /* Check whether we may negate an integer constant T without causing
372    overflow.  */
373 
374 bool
375 may_negate_without_overflow_p (const_tree t)
376 {
377   unsigned HOST_WIDE_INT val;
378   unsigned int prec;
379   tree type;
380 
381   gcc_assert (TREE_CODE (t) == INTEGER_CST);
382 
383   type = TREE_TYPE (t);
384   if (TYPE_UNSIGNED (type))
385     return false;
386 
387   prec = TYPE_PRECISION (type);
388   if (prec > HOST_BITS_PER_WIDE_INT)
389     {
390       if (TREE_INT_CST_LOW (t) != 0)
391 	return true;
392       prec -= HOST_BITS_PER_WIDE_INT;
393       val = TREE_INT_CST_HIGH (t);
394     }
395   else
396     val = TREE_INT_CST_LOW (t);
397   if (prec < HOST_BITS_PER_WIDE_INT)
398     val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399   return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
400 }
401 
402 /* Determine whether an expression T can be cheaply negated using
403    the function negate_expr without introducing undefined overflow.  */
404 
405 static bool
406 negate_expr_p (tree t)
407 {
408   tree type;
409 
410   if (t == 0)
411     return false;
412 
413   type = TREE_TYPE (t);
414 
415   STRIP_SIGN_NOPS (t);
416   switch (TREE_CODE (t))
417     {
418     case INTEGER_CST:
419       if (TYPE_OVERFLOW_WRAPS (type))
420 	return true;
421 
422       /* Check that -CST will not overflow type.  */
423       return may_negate_without_overflow_p (t);
424     case BIT_NOT_EXPR:
425       return (INTEGRAL_TYPE_P (type)
426 	      && TYPE_OVERFLOW_WRAPS (type));
427 
428     case FIXED_CST:
429     case NEGATE_EXPR:
430       return true;
431 
432     case REAL_CST:
433       /* We want to canonicalize to positive real constants.  Pretend
434          that only negative ones can be easily negated.  */
435       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
436 
437     case COMPLEX_CST:
438       return negate_expr_p (TREE_REALPART (t))
439 	     && negate_expr_p (TREE_IMAGPART (t));
440 
441     case COMPLEX_EXPR:
442       return negate_expr_p (TREE_OPERAND (t, 0))
443 	     && negate_expr_p (TREE_OPERAND (t, 1));
444 
445     case CONJ_EXPR:
446       return negate_expr_p (TREE_OPERAND (t, 0));
447 
448     case PLUS_EXPR:
449       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 	  || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 	return false;
452       /* -(A + B) -> (-B) - A.  */
453       if (negate_expr_p (TREE_OPERAND (t, 1))
454 	  && reorder_operands_p (TREE_OPERAND (t, 0),
455 				 TREE_OPERAND (t, 1)))
456 	return true;
457       /* -(A + B) -> (-A) - B.  */
458       return negate_expr_p (TREE_OPERAND (t, 0));
459 
460     case MINUS_EXPR:
461       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
462       return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 	     && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 	     && reorder_operands_p (TREE_OPERAND (t, 0),
465 				    TREE_OPERAND (t, 1));
466 
467     case MULT_EXPR:
468       if (TYPE_UNSIGNED (TREE_TYPE (t)))
469         break;
470 
471       /* Fall through.  */
472 
473     case RDIV_EXPR:
474       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 	return negate_expr_p (TREE_OPERAND (t, 1))
476 	       || negate_expr_p (TREE_OPERAND (t, 0));
477       break;
478 
479     case TRUNC_DIV_EXPR:
480     case ROUND_DIV_EXPR:
481     case FLOOR_DIV_EXPR:
482     case CEIL_DIV_EXPR:
483     case EXACT_DIV_EXPR:
484       /* In general we can't negate A / B, because if A is INT_MIN and
485 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 	 and actually traps on some architectures.  But if overflow is
487 	 undefined, we can negate, because - (INT_MIN / 1) is an
488 	 overflow.  */
489       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
490 	{
491 	  if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
492 	    break;
493 	  /* If overflow is undefined then we have to be careful because
494 	     we ask whether it's ok to associate the negate with the
495 	     division which is not ok for example for
496 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
497 	     overflow because of negating INT_MIN.  So do not use
498 	     negate_expr_p here but open-code the two important cases.  */
499 	  if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
500 	      || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
501 		  && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
502 	    return true;
503 	}
504       else if (negate_expr_p (TREE_OPERAND (t, 0)))
505 	return true;
506       return negate_expr_p (TREE_OPERAND (t, 1));
507 
508     case NOP_EXPR:
509       /* Negate -((double)float) as (double)(-float).  */
510       if (TREE_CODE (type) == REAL_TYPE)
511 	{
512 	  tree tem = strip_float_extensions (t);
513 	  if (tem != t)
514 	    return negate_expr_p (tem);
515 	}
516       break;
517 
518     case CALL_EXPR:
519       /* Negate -f(x) as f(-x).  */
520       if (negate_mathfn_p (builtin_mathfn_code (t)))
521 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
522       break;
523 
524     case RSHIFT_EXPR:
525       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
526       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
527 	{
528 	  tree op1 = TREE_OPERAND (t, 1);
529 	  if (TREE_INT_CST_HIGH (op1) == 0
530 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
531 		 == TREE_INT_CST_LOW (op1))
532 	    return true;
533 	}
534       break;
535 
536     default:
537       break;
538     }
539   return false;
540 }
541 
542 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
543    simplification is possible.
544    If negate_expr_p would return true for T, NULL_TREE will never be
545    returned.  */
546 
547 static tree
548 fold_negate_expr (location_t loc, tree t)
549 {
550   tree type = TREE_TYPE (t);
551   tree tem;
552 
553   switch (TREE_CODE (t))
554     {
555     /* Convert - (~A) to A + 1.  */
556     case BIT_NOT_EXPR:
557       if (INTEGRAL_TYPE_P (type))
558         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
559                             build_int_cst (type, 1));
560       break;
561 
562     case INTEGER_CST:
563       tem = fold_negate_const (t, type);
564       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
565 	  || !TYPE_OVERFLOW_TRAPS (type))
566 	return tem;
567       break;
568 
569     case REAL_CST:
570       tem = fold_negate_const (t, type);
571       /* Two's complement FP formats, such as c4x, may overflow.  */
572       if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
573 	return tem;
574       break;
575 
576     case FIXED_CST:
577       tem = fold_negate_const (t, type);
578       return tem;
579 
580     case COMPLEX_CST:
581       {
582 	tree rpart = negate_expr (TREE_REALPART (t));
583 	tree ipart = negate_expr (TREE_IMAGPART (t));
584 
585 	if ((TREE_CODE (rpart) == REAL_CST
586 	     && TREE_CODE (ipart) == REAL_CST)
587 	    || (TREE_CODE (rpart) == INTEGER_CST
588 		&& TREE_CODE (ipart) == INTEGER_CST))
589 	  return build_complex (type, rpart, ipart);
590       }
591       break;
592 
593     case COMPLEX_EXPR:
594       if (negate_expr_p (t))
595 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
596 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
597 			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
598       break;
599 
600     case CONJ_EXPR:
601       if (negate_expr_p (t))
602 	return fold_build1_loc (loc, CONJ_EXPR, type,
603 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
604       break;
605 
606     case NEGATE_EXPR:
607       return TREE_OPERAND (t, 0);
608 
609     case PLUS_EXPR:
610       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
611 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
612 	{
613 	  /* -(A + B) -> (-B) - A.  */
614 	  if (negate_expr_p (TREE_OPERAND (t, 1))
615 	      && reorder_operands_p (TREE_OPERAND (t, 0),
616 				     TREE_OPERAND (t, 1)))
617 	    {
618 	      tem = negate_expr (TREE_OPERAND (t, 1));
619 	      return fold_build2_loc (loc, MINUS_EXPR, type,
620 				  tem, TREE_OPERAND (t, 0));
621 	    }
622 
623 	  /* -(A + B) -> (-A) - B.  */
624 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
625 	    {
626 	      tem = negate_expr (TREE_OPERAND (t, 0));
627 	      return fold_build2_loc (loc, MINUS_EXPR, type,
628 				  tem, TREE_OPERAND (t, 1));
629 	    }
630 	}
631       break;
632 
633     case MINUS_EXPR:
634       /* - (A - B) -> B - A  */
635       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
636 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
637 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
638 	return fold_build2_loc (loc, MINUS_EXPR, type,
639 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
640       break;
641 
642     case MULT_EXPR:
643       if (TYPE_UNSIGNED (type))
644         break;
645 
646       /* Fall through.  */
647 
648     case RDIV_EXPR:
649       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
650 	{
651 	  tem = TREE_OPERAND (t, 1);
652 	  if (negate_expr_p (tem))
653 	    return fold_build2_loc (loc, TREE_CODE (t), type,
654 				TREE_OPERAND (t, 0), negate_expr (tem));
655 	  tem = TREE_OPERAND (t, 0);
656 	  if (negate_expr_p (tem))
657 	    return fold_build2_loc (loc, TREE_CODE (t), type,
658 				negate_expr (tem), TREE_OPERAND (t, 1));
659 	}
660       break;
661 
662     case TRUNC_DIV_EXPR:
663     case ROUND_DIV_EXPR:
664     case FLOOR_DIV_EXPR:
665     case CEIL_DIV_EXPR:
666     case EXACT_DIV_EXPR:
667       /* In general we can't negate A / B, because if A is INT_MIN and
668 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 	 and actually traps on some architectures.  But if overflow is
670 	 undefined, we can negate, because - (INT_MIN / 1) is an
671 	 overflow.  */
672       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
673         {
674 	  const char * const warnmsg = G_("assuming signed overflow does not "
675 					  "occur when negating a division");
676           tem = TREE_OPERAND (t, 1);
677           if (negate_expr_p (tem))
678 	    {
679 	      if (INTEGRAL_TYPE_P (type)
680 		  && (TREE_CODE (tem) != INTEGER_CST
681 		      || integer_onep (tem)))
682 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 	      return fold_build2_loc (loc, TREE_CODE (t), type,
684 				  TREE_OPERAND (t, 0), negate_expr (tem));
685 	    }
686 	  /* If overflow is undefined then we have to be careful because
687 	     we ask whether it's ok to associate the negate with the
688 	     division which is not ok for example for
689 	     -((a - b) / c) where (-(a - b)) / c may invoke undefined
690 	     overflow because of negating INT_MIN.  So do not use
691 	     negate_expr_p here but open-code the two important cases.  */
692           tem = TREE_OPERAND (t, 0);
693 	  if ((INTEGRAL_TYPE_P (type)
694 	       && (TREE_CODE (tem) == NEGATE_EXPR
695 		   || (TREE_CODE (tem) == INTEGER_CST
696 		       && may_negate_without_overflow_p (tem))))
697 	      || !INTEGRAL_TYPE_P (type))
698 	    return fold_build2_loc (loc, TREE_CODE (t), type,
699 				    negate_expr (tem), TREE_OPERAND (t, 1));
700         }
701       break;
702 
703     case NOP_EXPR:
704       /* Convert -((double)float) into (double)(-float).  */
705       if (TREE_CODE (type) == REAL_TYPE)
706 	{
707 	  tem = strip_float_extensions (t);
708 	  if (tem != t && negate_expr_p (tem))
709 	    return fold_convert_loc (loc, type, negate_expr (tem));
710 	}
711       break;
712 
713     case CALL_EXPR:
714       /* Negate -f(x) as f(-x).  */
715       if (negate_mathfn_p (builtin_mathfn_code (t))
716 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
717 	{
718 	  tree fndecl, arg;
719 
720 	  fndecl = get_callee_fndecl (t);
721 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
722 	  return build_call_expr_loc (loc, fndecl, 1, arg);
723 	}
724       break;
725 
726     case RSHIFT_EXPR:
727       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
728       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
729 	{
730 	  tree op1 = TREE_OPERAND (t, 1);
731 	  if (TREE_INT_CST_HIGH (op1) == 0
732 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
733 		 == TREE_INT_CST_LOW (op1))
734 	    {
735 	      tree ntype = TYPE_UNSIGNED (type)
736 			   ? signed_type_for (type)
737 			   : unsigned_type_for (type);
738 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
739 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
740 	      return fold_convert_loc (loc, type, temp);
741 	    }
742 	}
743       break;
744 
745     default:
746       break;
747     }
748 
749   return NULL_TREE;
750 }
751 
752 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
753    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
754    return NULL_TREE. */
755 
756 static tree
757 negate_expr (tree t)
758 {
759   tree type, tem;
760   location_t loc;
761 
762   if (t == NULL_TREE)
763     return NULL_TREE;
764 
765   loc = EXPR_LOCATION (t);
766   type = TREE_TYPE (t);
767   STRIP_SIGN_NOPS (t);
768 
769   tem = fold_negate_expr (loc, t);
770   if (!tem)
771     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
772   return fold_convert_loc (loc, type, tem);
773 }
774 
775 /* Split a tree IN into a constant, literal and variable parts that could be
776    combined with CODE to make IN.  "constant" means an expression with
777    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
778    commutative arithmetic operation.  Store the constant part into *CONP,
779    the literal in *LITP and return the variable part.  If a part isn't
780    present, set it to null.  If the tree does not decompose in this way,
781    return the entire tree as the variable part and the other parts as null.
782 
783    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
784    case, we negate an operand that was subtracted.  Except if it is a
785    literal for which we use *MINUS_LITP instead.
786 
787    If NEGATE_P is true, we are negating all of IN, again except a literal
788    for which we use *MINUS_LITP instead.
789 
790    If IN is itself a literal or constant, return it as appropriate.
791 
792    Note that we do not guarantee that any of the three values will be the
793    same type as IN, but they will have the same signedness and mode.  */
794 
795 static tree
796 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
797 	    tree *minus_litp, int negate_p)
798 {
799   tree var = 0;
800 
801   *conp = 0;
802   *litp = 0;
803   *minus_litp = 0;
804 
805   /* Strip any conversions that don't change the machine mode or signedness.  */
806   STRIP_SIGN_NOPS (in);
807 
808   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
809       || TREE_CODE (in) == FIXED_CST)
810     *litp = in;
811   else if (TREE_CODE (in) == code
812 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
813 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
814 	       /* We can associate addition and subtraction together (even
815 		  though the C standard doesn't say so) for integers because
816 		  the value is not affected.  For reals, the value might be
817 		  affected, so we can't.  */
818 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
819 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
820     {
821       tree op0 = TREE_OPERAND (in, 0);
822       tree op1 = TREE_OPERAND (in, 1);
823       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
824       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
825 
826       /* First see if either of the operands is a literal, then a constant.  */
827       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
828 	  || TREE_CODE (op0) == FIXED_CST)
829 	*litp = op0, op0 = 0;
830       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
831 	       || TREE_CODE (op1) == FIXED_CST)
832 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
833 
834       if (op0 != 0 && TREE_CONSTANT (op0))
835 	*conp = op0, op0 = 0;
836       else if (op1 != 0 && TREE_CONSTANT (op1))
837 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
838 
839       /* If we haven't dealt with either operand, this is not a case we can
840 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
841       if (op0 != 0 && op1 != 0)
842 	var = in;
843       else if (op0 != 0)
844 	var = op0;
845       else
846 	var = op1, neg_var_p = neg1_p;
847 
848       /* Now do any needed negations.  */
849       if (neg_litp_p)
850 	*minus_litp = *litp, *litp = 0;
851       if (neg_conp_p)
852 	*conp = negate_expr (*conp);
853       if (neg_var_p)
854 	var = negate_expr (var);
855     }
856   else if (TREE_CONSTANT (in))
857     *conp = in;
858   else
859     var = in;
860 
861   if (negate_p)
862     {
863       if (*litp)
864 	*minus_litp = *litp, *litp = 0;
865       else if (*minus_litp)
866 	*litp = *minus_litp, *minus_litp = 0;
867       *conp = negate_expr (*conp);
868       var = negate_expr (var);
869     }
870 
871   return var;
872 }
873 
874 /* Re-associate trees split by the above function.  T1 and T2 are
875    either expressions to associate or null.  Return the new
876    expression, if any.  LOC is the location of the new expression.  If
877    we build an operation, do it in TYPE and with CODE.  */
878 
879 static tree
880 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
881 {
882   if (t1 == 0)
883     return t2;
884   else if (t2 == 0)
885     return t1;
886 
887   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
888      try to fold this since we will have infinite recursion.  But do
889      deal with any NEGATE_EXPRs.  */
890   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
891       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
892     {
893       if (code == PLUS_EXPR)
894 	{
895 	  if (TREE_CODE (t1) == NEGATE_EXPR)
896 	    return build2_loc (loc, MINUS_EXPR, type,
897 			       fold_convert_loc (loc, type, t2),
898 			       fold_convert_loc (loc, type,
899 						 TREE_OPERAND (t1, 0)));
900 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
901 	    return build2_loc (loc, MINUS_EXPR, type,
902 			       fold_convert_loc (loc, type, t1),
903 			       fold_convert_loc (loc, type,
904 						 TREE_OPERAND (t2, 0)));
905 	  else if (integer_zerop (t2))
906 	    return fold_convert_loc (loc, type, t1);
907 	}
908       else if (code == MINUS_EXPR)
909 	{
910 	  if (integer_zerop (t2))
911 	    return fold_convert_loc (loc, type, t1);
912 	}
913 
914       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
915 			 fold_convert_loc (loc, type, t2));
916     }
917 
918   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
919 			  fold_convert_loc (loc, type, t2));
920 }
921 
922 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
923    for use in int_const_binop, size_binop and size_diffop.  */
924 
925 static bool
926 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
927 {
928   if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
929     return false;
930   if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931     return false;
932 
933   switch (code)
934     {
935     case LSHIFT_EXPR:
936     case RSHIFT_EXPR:
937     case LROTATE_EXPR:
938     case RROTATE_EXPR:
939       return true;
940 
941     default:
942       break;
943     }
944 
945   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
946 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
947 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
948 }
949 
950 
951 /* Combine two integer constants ARG1 and ARG2 under operation CODE
952    to produce a new constant.  Return NULL_TREE if we don't know how
953    to evaluate CODE at compile-time.  */
954 
955 tree
956 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
957 {
958   double_int op1, op2, res, tmp;
959   tree t;
960   tree type = TREE_TYPE (arg1);
961   bool uns = TYPE_UNSIGNED (type);
962   bool is_sizetype
963     = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
964   bool overflow = false;
965 
966   op1 = tree_to_double_int (arg1);
967   op2 = tree_to_double_int (arg2);
968 
969   switch (code)
970     {
971     case BIT_IOR_EXPR:
972       res = double_int_ior (op1, op2);
973       break;
974 
975     case BIT_XOR_EXPR:
976       res = double_int_xor (op1, op2);
977       break;
978 
979     case BIT_AND_EXPR:
980       res = double_int_and (op1, op2);
981       break;
982 
983     case RSHIFT_EXPR:
984       res = double_int_rshift (op1, double_int_to_shwi (op2),
985 			       TYPE_PRECISION (type), !uns);
986       break;
987 
988     case LSHIFT_EXPR:
989       /* It's unclear from the C standard whether shifts can overflow.
990 	 The following code ignores overflow; perhaps a C standard
991 	 interpretation ruling is needed.  */
992       res = double_int_lshift (op1, double_int_to_shwi (op2),
993 			       TYPE_PRECISION (type), !uns);
994       break;
995 
996     case RROTATE_EXPR:
997       res = double_int_rrotate (op1, double_int_to_shwi (op2),
998 				TYPE_PRECISION (type));
999       break;
1000 
1001     case LROTATE_EXPR:
1002       res = double_int_lrotate (op1, double_int_to_shwi (op2),
1003 				TYPE_PRECISION (type));
1004       break;
1005 
1006     case PLUS_EXPR:
1007       overflow = add_double (op1.low, op1.high, op2.low, op2.high,
1008 			     &res.low, &res.high);
1009       break;
1010 
1011     case MINUS_EXPR:
1012       neg_double (op2.low, op2.high, &res.low, &res.high);
1013       add_double (op1.low, op1.high, res.low, res.high,
1014 		  &res.low, &res.high);
1015       overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1016       break;
1017 
1018     case MULT_EXPR:
1019       overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1020 			     &res.low, &res.high);
1021       break;
1022 
1023     case TRUNC_DIV_EXPR:
1024     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1025     case EXACT_DIV_EXPR:
1026       /* This is a shortcut for a common special case.  */
1027       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1028 	  && !TREE_OVERFLOW (arg1)
1029 	  && !TREE_OVERFLOW (arg2)
1030 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1031 	{
1032 	  if (code == CEIL_DIV_EXPR)
1033 	    op1.low += op2.low - 1;
1034 
1035 	  res.low = op1.low / op2.low, res.high = 0;
1036 	  break;
1037 	}
1038 
1039       /* ... fall through ...  */
1040 
1041     case ROUND_DIV_EXPR:
1042       if (double_int_zero_p (op2))
1043 	return NULL_TREE;
1044       if (double_int_one_p (op2))
1045 	{
1046 	  res = op1;
1047 	  break;
1048 	}
1049       if (double_int_equal_p (op1, op2)
1050 	  && ! double_int_zero_p (op1))
1051 	{
1052 	  res = double_int_one;
1053 	  break;
1054 	}
1055       overflow = div_and_round_double (code, uns,
1056 				       op1.low, op1.high, op2.low, op2.high,
1057 				       &res.low, &res.high,
1058 				       &tmp.low, &tmp.high);
1059       break;
1060 
1061     case TRUNC_MOD_EXPR:
1062     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1063       /* This is a shortcut for a common special case.  */
1064       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1065 	  && !TREE_OVERFLOW (arg1)
1066 	  && !TREE_OVERFLOW (arg2)
1067 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1068 	{
1069 	  if (code == CEIL_MOD_EXPR)
1070 	    op1.low += op2.low - 1;
1071 	  res.low = op1.low % op2.low, res.high = 0;
1072 	  break;
1073 	}
1074 
1075       /* ... fall through ...  */
1076 
1077     case ROUND_MOD_EXPR:
1078       if (double_int_zero_p (op2))
1079 	return NULL_TREE;
1080       overflow = div_and_round_double (code, uns,
1081 				       op1.low, op1.high, op2.low, op2.high,
1082 				       &tmp.low, &tmp.high,
1083 				       &res.low, &res.high);
1084       break;
1085 
1086     case MIN_EXPR:
1087       res = double_int_min (op1, op2, uns);
1088       break;
1089 
1090     case MAX_EXPR:
1091       res = double_int_max (op1, op2, uns);
1092       break;
1093 
1094     default:
1095       return NULL_TREE;
1096     }
1097 
1098   t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1099 			     ((!uns || is_sizetype) && overflow)
1100 			     | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1101 
1102   return t;
1103 }
1104 
1105 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1106    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1107    are the same kind of constant and the same machine mode.  Return zero if
1108    combining the constants is not allowed in the current operating mode.  */
1109 
1110 static tree
1111 const_binop (enum tree_code code, tree arg1, tree arg2)
1112 {
1113   /* Sanity check for the recursive cases.  */
1114   if (!arg1 || !arg2)
1115     return NULL_TREE;
1116 
1117   STRIP_NOPS (arg1);
1118   STRIP_NOPS (arg2);
1119 
1120   if (TREE_CODE (arg1) == INTEGER_CST)
1121     return int_const_binop (code, arg1, arg2);
1122 
1123   if (TREE_CODE (arg1) == REAL_CST)
1124     {
1125       enum machine_mode mode;
1126       REAL_VALUE_TYPE d1;
1127       REAL_VALUE_TYPE d2;
1128       REAL_VALUE_TYPE value;
1129       REAL_VALUE_TYPE result;
1130       bool inexact;
1131       tree t, type;
1132 
1133       /* The following codes are handled by real_arithmetic.  */
1134       switch (code)
1135 	{
1136 	case PLUS_EXPR:
1137 	case MINUS_EXPR:
1138 	case MULT_EXPR:
1139 	case RDIV_EXPR:
1140 	case MIN_EXPR:
1141 	case MAX_EXPR:
1142 	  break;
1143 
1144 	default:
1145 	  return NULL_TREE;
1146 	}
1147 
1148       d1 = TREE_REAL_CST (arg1);
1149       d2 = TREE_REAL_CST (arg2);
1150 
1151       type = TREE_TYPE (arg1);
1152       mode = TYPE_MODE (type);
1153 
1154       /* Don't perform operation if we honor signaling NaNs and
1155 	 either operand is a NaN.  */
1156       if (HONOR_SNANS (mode)
1157 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1158 	return NULL_TREE;
1159 
1160       /* Don't perform operation if it would raise a division
1161 	 by zero exception.  */
1162       if (code == RDIV_EXPR
1163 	  && REAL_VALUES_EQUAL (d2, dconst0)
1164 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1165 	return NULL_TREE;
1166 
1167       /* If either operand is a NaN, just return it.  Otherwise, set up
1168 	 for floating-point trap; we return an overflow.  */
1169       if (REAL_VALUE_ISNAN (d1))
1170 	return arg1;
1171       else if (REAL_VALUE_ISNAN (d2))
1172 	return arg2;
1173 
1174       inexact = real_arithmetic (&value, code, &d1, &d2);
1175       real_convert (&result, mode, &value);
1176 
1177       /* Don't constant fold this floating point operation if
1178 	 the result has overflowed and flag_trapping_math.  */
1179       if (flag_trapping_math
1180 	  && MODE_HAS_INFINITIES (mode)
1181 	  && REAL_VALUE_ISINF (result)
1182 	  && !REAL_VALUE_ISINF (d1)
1183 	  && !REAL_VALUE_ISINF (d2))
1184 	return NULL_TREE;
1185 
1186       /* Don't constant fold this floating point operation if the
1187 	 result may dependent upon the run-time rounding mode and
1188 	 flag_rounding_math is set, or if GCC's software emulation
1189 	 is unable to accurately represent the result.  */
1190       if ((flag_rounding_math
1191 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1192 	  && (inexact || !real_identical (&result, &value)))
1193 	return NULL_TREE;
1194 
1195       t = build_real (type, result);
1196 
1197       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1198       return t;
1199     }
1200 
1201   if (TREE_CODE (arg1) == FIXED_CST)
1202     {
1203       FIXED_VALUE_TYPE f1;
1204       FIXED_VALUE_TYPE f2;
1205       FIXED_VALUE_TYPE result;
1206       tree t, type;
1207       int sat_p;
1208       bool overflow_p;
1209 
1210       /* The following codes are handled by fixed_arithmetic.  */
1211       switch (code)
1212         {
1213 	case PLUS_EXPR:
1214 	case MINUS_EXPR:
1215 	case MULT_EXPR:
1216 	case TRUNC_DIV_EXPR:
1217 	  f2 = TREE_FIXED_CST (arg2);
1218 	  break;
1219 
1220 	case LSHIFT_EXPR:
1221 	case RSHIFT_EXPR:
1222 	  f2.data.high = TREE_INT_CST_HIGH (arg2);
1223 	  f2.data.low = TREE_INT_CST_LOW (arg2);
1224 	  f2.mode = SImode;
1225 	  break;
1226 
1227         default:
1228 	  return NULL_TREE;
1229         }
1230 
1231       f1 = TREE_FIXED_CST (arg1);
1232       type = TREE_TYPE (arg1);
1233       sat_p = TYPE_SATURATING (type);
1234       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1235       t = build_fixed (type, result);
1236       /* Propagate overflow flags.  */
1237       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1238 	TREE_OVERFLOW (t) = 1;
1239       return t;
1240     }
1241 
1242   if (TREE_CODE (arg1) == COMPLEX_CST)
1243     {
1244       tree type = TREE_TYPE (arg1);
1245       tree r1 = TREE_REALPART (arg1);
1246       tree i1 = TREE_IMAGPART (arg1);
1247       tree r2 = TREE_REALPART (arg2);
1248       tree i2 = TREE_IMAGPART (arg2);
1249       tree real, imag;
1250 
1251       switch (code)
1252 	{
1253 	case PLUS_EXPR:
1254 	case MINUS_EXPR:
1255 	  real = const_binop (code, r1, r2);
1256 	  imag = const_binop (code, i1, i2);
1257 	  break;
1258 
1259 	case MULT_EXPR:
1260 	  if (COMPLEX_FLOAT_TYPE_P (type))
1261 	    return do_mpc_arg2 (arg1, arg2, type,
1262 				/* do_nonfinite= */ folding_initializer,
1263 				mpc_mul);
1264 
1265 	  real = const_binop (MINUS_EXPR,
1266 			      const_binop (MULT_EXPR, r1, r2),
1267 			      const_binop (MULT_EXPR, i1, i2));
1268 	  imag = const_binop (PLUS_EXPR,
1269 			      const_binop (MULT_EXPR, r1, i2),
1270 			      const_binop (MULT_EXPR, i1, r2));
1271 	  break;
1272 
1273 	case RDIV_EXPR:
1274 	  if (COMPLEX_FLOAT_TYPE_P (type))
1275 	    return do_mpc_arg2 (arg1, arg2, type,
1276                                 /* do_nonfinite= */ folding_initializer,
1277 				mpc_div);
1278 	  /* Fallthru ... */
1279 	case TRUNC_DIV_EXPR:
1280 	case CEIL_DIV_EXPR:
1281 	case FLOOR_DIV_EXPR:
1282 	case ROUND_DIV_EXPR:
1283 	  if (flag_complex_method == 0)
1284 	  {
1285 	    /* Keep this algorithm in sync with
1286 	       tree-complex.c:expand_complex_div_straight().
1287 
1288 	       Expand complex division to scalars, straightforward algorithm.
1289 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1290 	       t = br*br + bi*bi
1291 	    */
1292 	    tree magsquared
1293 	      = const_binop (PLUS_EXPR,
1294 			     const_binop (MULT_EXPR, r2, r2),
1295 			     const_binop (MULT_EXPR, i2, i2));
1296 	    tree t1
1297 	      = const_binop (PLUS_EXPR,
1298 			     const_binop (MULT_EXPR, r1, r2),
1299 			     const_binop (MULT_EXPR, i1, i2));
1300 	    tree t2
1301 	      = const_binop (MINUS_EXPR,
1302 			     const_binop (MULT_EXPR, i1, r2),
1303 			     const_binop (MULT_EXPR, r1, i2));
1304 
1305 	    real = const_binop (code, t1, magsquared);
1306 	    imag = const_binop (code, t2, magsquared);
1307 	  }
1308 	  else
1309 	  {
1310 	    /* Keep this algorithm in sync with
1311                tree-complex.c:expand_complex_div_wide().
1312 
1313 	       Expand complex division to scalars, modified algorithm to minimize
1314 	       overflow with wide input ranges.  */
1315 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1316 					fold_abs_const (r2, TREE_TYPE (type)),
1317 					fold_abs_const (i2, TREE_TYPE (type)));
1318 
1319 	    if (integer_nonzerop (compare))
1320 	      {
1321 		/* In the TRUE branch, we compute
1322 		   ratio = br/bi;
1323 		   div = (br * ratio) + bi;
1324 		   tr = (ar * ratio) + ai;
1325 		   ti = (ai * ratio) - ar;
1326 		   tr = tr / div;
1327 		   ti = ti / div;  */
1328 		tree ratio = const_binop (code, r2, i2);
1329 		tree div = const_binop (PLUS_EXPR, i2,
1330 					const_binop (MULT_EXPR, r2, ratio));
1331 		real = const_binop (MULT_EXPR, r1, ratio);
1332 		real = const_binop (PLUS_EXPR, real, i1);
1333 		real = const_binop (code, real, div);
1334 
1335 		imag = const_binop (MULT_EXPR, i1, ratio);
1336 		imag = const_binop (MINUS_EXPR, imag, r1);
1337 		imag = const_binop (code, imag, div);
1338 	      }
1339 	    else
1340 	      {
1341 		/* In the FALSE branch, we compute
1342 		   ratio = d/c;
1343 		   divisor = (d * ratio) + c;
1344 		   tr = (b * ratio) + a;
1345 		   ti = b - (a * ratio);
1346 		   tr = tr / div;
1347 		   ti = ti / div;  */
1348 		tree ratio = const_binop (code, i2, r2);
1349 		tree div = const_binop (PLUS_EXPR, r2,
1350                                         const_binop (MULT_EXPR, i2, ratio));
1351 
1352 		real = const_binop (MULT_EXPR, i1, ratio);
1353 		real = const_binop (PLUS_EXPR, real, r1);
1354 		real = const_binop (code, real, div);
1355 
1356 		imag = const_binop (MULT_EXPR, r1, ratio);
1357 		imag = const_binop (MINUS_EXPR, i1, imag);
1358 		imag = const_binop (code, imag, div);
1359 	      }
1360 	  }
1361 	  break;
1362 
1363 	default:
1364 	  return NULL_TREE;
1365 	}
1366 
1367       if (real && imag)
1368 	return build_complex (type, real, imag);
1369     }
1370 
1371   if (TREE_CODE (arg1) == VECTOR_CST)
1372     {
1373       tree type = TREE_TYPE(arg1);
1374       int count = TYPE_VECTOR_SUBPARTS (type), i;
1375       tree elements1, elements2, list = NULL_TREE;
1376 
1377       if(TREE_CODE(arg2) != VECTOR_CST)
1378         return NULL_TREE;
1379 
1380       elements1 = TREE_VECTOR_CST_ELTS (arg1);
1381       elements2 = TREE_VECTOR_CST_ELTS (arg2);
1382 
1383       for (i = 0; i < count; i++)
1384 	{
1385           tree elem1, elem2, elem;
1386 
1387           /* The trailing elements can be empty and should be treated as 0 */
1388           if(!elements1)
1389             elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1390           else
1391             {
1392               elem1 = TREE_VALUE(elements1);
1393               elements1 = TREE_CHAIN (elements1);
1394             }
1395 
1396           if(!elements2)
1397             elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1398           else
1399             {
1400               elem2 = TREE_VALUE(elements2);
1401               elements2 = TREE_CHAIN (elements2);
1402             }
1403 
1404           elem = const_binop (code, elem1, elem2);
1405 
1406           /* It is possible that const_binop cannot handle the given
1407             code and return NULL_TREE */
1408           if(elem == NULL_TREE)
1409             return NULL_TREE;
1410 
1411           list = tree_cons (NULL_TREE, elem, list);
1412 	}
1413       return build_vector(type, nreverse(list));
1414     }
1415   return NULL_TREE;
1416 }
1417 
1418 /* Create a size type INT_CST node with NUMBER sign extended.  KIND
1419    indicates which particular sizetype to create.  */
1420 
1421 tree
1422 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1423 {
1424   return build_int_cst (sizetype_tab[(int) kind], number);
1425 }
1426 
1427 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1428    is a tree code.  The type of the result is taken from the operands.
1429    Both must be equivalent integer types, ala int_binop_types_match_p.
1430    If the operands are constant, so is the result.  */
1431 
1432 tree
1433 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1434 {
1435   tree type = TREE_TYPE (arg0);
1436 
1437   if (arg0 == error_mark_node || arg1 == error_mark_node)
1438     return error_mark_node;
1439 
1440   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1441                                        TREE_TYPE (arg1)));
1442 
1443   /* Handle the special case of two integer constants faster.  */
1444   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1445     {
1446       /* And some specific cases even faster than that.  */
1447       if (code == PLUS_EXPR)
1448 	{
1449 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1450 	    return arg1;
1451 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1452 	    return arg0;
1453 	}
1454       else if (code == MINUS_EXPR)
1455 	{
1456 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1457 	    return arg0;
1458 	}
1459       else if (code == MULT_EXPR)
1460 	{
1461 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1462 	    return arg1;
1463 	}
1464 
1465       /* Handle general case of two integer constants.  */
1466       return int_const_binop (code, arg0, arg1);
1467     }
1468 
1469   return fold_build2_loc (loc, code, type, arg0, arg1);
1470 }
1471 
1472 /* Given two values, either both of sizetype or both of bitsizetype,
1473    compute the difference between the two values.  Return the value
1474    in signed type corresponding to the type of the operands.  */
1475 
1476 tree
1477 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1478 {
1479   tree type = TREE_TYPE (arg0);
1480   tree ctype;
1481 
1482   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1483 				       TREE_TYPE (arg1)));
1484 
1485   /* If the type is already signed, just do the simple thing.  */
1486   if (!TYPE_UNSIGNED (type))
1487     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1488 
1489   if (type == sizetype)
1490     ctype = ssizetype;
1491   else if (type == bitsizetype)
1492     ctype = sbitsizetype;
1493   else
1494     ctype = signed_type_for (type);
1495 
1496   /* If either operand is not a constant, do the conversions to the signed
1497      type and subtract.  The hardware will do the right thing with any
1498      overflow in the subtraction.  */
1499   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1500     return size_binop_loc (loc, MINUS_EXPR,
1501 			   fold_convert_loc (loc, ctype, arg0),
1502 			   fold_convert_loc (loc, ctype, arg1));
1503 
1504   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1505      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1506      overflow) and negate (which can't either).  Special-case a result
1507      of zero while we're here.  */
1508   if (tree_int_cst_equal (arg0, arg1))
1509     return build_int_cst (ctype, 0);
1510   else if (tree_int_cst_lt (arg1, arg0))
1511     return fold_convert_loc (loc, ctype,
1512 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1513   else
1514     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1515 			   fold_convert_loc (loc, ctype,
1516 					     size_binop_loc (loc,
1517 							     MINUS_EXPR,
1518 							     arg1, arg0)));
1519 }
1520 
1521 /* A subroutine of fold_convert_const handling conversions of an
1522    INTEGER_CST to another integer type.  */
1523 
1524 static tree
1525 fold_convert_const_int_from_int (tree type, const_tree arg1)
1526 {
1527   tree t;
1528 
1529   /* Given an integer constant, make new constant with new type,
1530      appropriately sign-extended or truncated.  */
1531   t = force_fit_type_double (type, tree_to_double_int (arg1),
1532 			     !POINTER_TYPE_P (TREE_TYPE (arg1)),
1533 			     (TREE_INT_CST_HIGH (arg1) < 0
1534 		 	      && (TYPE_UNSIGNED (type)
1535 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1536 			     | TREE_OVERFLOW (arg1));
1537 
1538   return t;
1539 }
1540 
1541 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1542    to an integer type.  */
1543 
1544 static tree
1545 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1546 {
1547   int overflow = 0;
1548   tree t;
1549 
1550   /* The following code implements the floating point to integer
1551      conversion rules required by the Java Language Specification,
1552      that IEEE NaNs are mapped to zero and values that overflow
1553      the target precision saturate, i.e. values greater than
1554      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1555      are mapped to INT_MIN.  These semantics are allowed by the
1556      C and C++ standards that simply state that the behavior of
1557      FP-to-integer conversion is unspecified upon overflow.  */
1558 
1559   double_int val;
1560   REAL_VALUE_TYPE r;
1561   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1562 
1563   switch (code)
1564     {
1565     case FIX_TRUNC_EXPR:
1566       real_trunc (&r, VOIDmode, &x);
1567       break;
1568 
1569     default:
1570       gcc_unreachable ();
1571     }
1572 
1573   /* If R is NaN, return zero and show we have an overflow.  */
1574   if (REAL_VALUE_ISNAN (r))
1575     {
1576       overflow = 1;
1577       val = double_int_zero;
1578     }
1579 
1580   /* See if R is less than the lower bound or greater than the
1581      upper bound.  */
1582 
1583   if (! overflow)
1584     {
1585       tree lt = TYPE_MIN_VALUE (type);
1586       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1587       if (REAL_VALUES_LESS (r, l))
1588 	{
1589 	  overflow = 1;
1590 	  val = tree_to_double_int (lt);
1591 	}
1592     }
1593 
1594   if (! overflow)
1595     {
1596       tree ut = TYPE_MAX_VALUE (type);
1597       if (ut)
1598 	{
1599 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1600 	  if (REAL_VALUES_LESS (u, r))
1601 	    {
1602 	      overflow = 1;
1603 	      val = tree_to_double_int (ut);
1604 	    }
1605 	}
1606     }
1607 
1608   if (! overflow)
1609     real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1610 
1611   t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1612   return t;
1613 }
1614 
1615 /* A subroutine of fold_convert_const handling conversions of a
1616    FIXED_CST to an integer type.  */
1617 
1618 static tree
1619 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1620 {
1621   tree t;
1622   double_int temp, temp_trunc;
1623   unsigned int mode;
1624 
1625   /* Right shift FIXED_CST to temp by fbit.  */
1626   temp = TREE_FIXED_CST (arg1).data;
1627   mode = TREE_FIXED_CST (arg1).mode;
1628   if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1629     {
1630       temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1631 			        HOST_BITS_PER_DOUBLE_INT,
1632 			        SIGNED_FIXED_POINT_MODE_P (mode));
1633 
1634       /* Left shift temp to temp_trunc by fbit.  */
1635       temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1636 				      HOST_BITS_PER_DOUBLE_INT,
1637 				      SIGNED_FIXED_POINT_MODE_P (mode));
1638     }
1639   else
1640     {
1641       temp = double_int_zero;
1642       temp_trunc = double_int_zero;
1643     }
1644 
1645   /* If FIXED_CST is negative, we need to round the value toward 0.
1646      By checking if the fractional bits are not zero to add 1 to temp.  */
1647   if (SIGNED_FIXED_POINT_MODE_P (mode)
1648       && double_int_negative_p (temp_trunc)
1649       && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1650     temp = double_int_add (temp, double_int_one);
1651 
1652   /* Given a fixed-point constant, make new constant with new type,
1653      appropriately sign-extended or truncated.  */
1654   t = force_fit_type_double (type, temp, -1,
1655 			     (double_int_negative_p (temp)
1656 		 	      && (TYPE_UNSIGNED (type)
1657 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1658 			     | TREE_OVERFLOW (arg1));
1659 
1660   return t;
1661 }
1662 
1663 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1664    to another floating point type.  */
1665 
1666 static tree
1667 fold_convert_const_real_from_real (tree type, const_tree arg1)
1668 {
1669   REAL_VALUE_TYPE value;
1670   tree t;
1671 
1672   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1673   t = build_real (type, value);
1674 
1675   /* If converting an infinity or NAN to a representation that doesn't
1676      have one, set the overflow bit so that we can produce some kind of
1677      error message at the appropriate point if necessary.  It's not the
1678      most user-friendly message, but it's better than nothing.  */
1679   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1680       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1681     TREE_OVERFLOW (t) = 1;
1682   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1683 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
1684     TREE_OVERFLOW (t) = 1;
1685   /* Regular overflow, conversion produced an infinity in a mode that
1686      can't represent them.  */
1687   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1688 	   && REAL_VALUE_ISINF (value)
1689 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1690     TREE_OVERFLOW (t) = 1;
1691   else
1692     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1693   return t;
1694 }
1695 
1696 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1697    to a floating point type.  */
1698 
1699 static tree
1700 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1701 {
1702   REAL_VALUE_TYPE value;
1703   tree t;
1704 
1705   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1706   t = build_real (type, value);
1707 
1708   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1709   return t;
1710 }
1711 
1712 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1713    to another fixed-point type.  */
1714 
1715 static tree
1716 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1717 {
1718   FIXED_VALUE_TYPE value;
1719   tree t;
1720   bool overflow_p;
1721 
1722   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1723 			      TYPE_SATURATING (type));
1724   t = build_fixed (type, value);
1725 
1726   /* Propagate overflow flags.  */
1727   if (overflow_p | TREE_OVERFLOW (arg1))
1728     TREE_OVERFLOW (t) = 1;
1729   return t;
1730 }
1731 
1732 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1733    to a fixed-point type.  */
1734 
1735 static tree
1736 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1737 {
1738   FIXED_VALUE_TYPE value;
1739   tree t;
1740   bool overflow_p;
1741 
1742   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1743 				       TREE_INT_CST (arg1),
1744 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
1745 				       TYPE_SATURATING (type));
1746   t = build_fixed (type, value);
1747 
1748   /* Propagate overflow flags.  */
1749   if (overflow_p | TREE_OVERFLOW (arg1))
1750     TREE_OVERFLOW (t) = 1;
1751   return t;
1752 }
1753 
1754 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1755    to a fixed-point type.  */
1756 
1757 static tree
1758 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1759 {
1760   FIXED_VALUE_TYPE value;
1761   tree t;
1762   bool overflow_p;
1763 
1764   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1765 					&TREE_REAL_CST (arg1),
1766 					TYPE_SATURATING (type));
1767   t = build_fixed (type, value);
1768 
1769   /* Propagate overflow flags.  */
1770   if (overflow_p | TREE_OVERFLOW (arg1))
1771     TREE_OVERFLOW (t) = 1;
1772   return t;
1773 }
1774 
1775 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1776    type TYPE.  If no simplification can be done return NULL_TREE.  */
1777 
1778 static tree
1779 fold_convert_const (enum tree_code code, tree type, tree arg1)
1780 {
1781   if (TREE_TYPE (arg1) == type)
1782     return arg1;
1783 
1784   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1785       || TREE_CODE (type) == OFFSET_TYPE)
1786     {
1787       if (TREE_CODE (arg1) == INTEGER_CST)
1788 	return fold_convert_const_int_from_int (type, arg1);
1789       else if (TREE_CODE (arg1) == REAL_CST)
1790 	return fold_convert_const_int_from_real (code, type, arg1);
1791       else if (TREE_CODE (arg1) == FIXED_CST)
1792 	return fold_convert_const_int_from_fixed (type, arg1);
1793     }
1794   else if (TREE_CODE (type) == REAL_TYPE)
1795     {
1796       if (TREE_CODE (arg1) == INTEGER_CST)
1797 	return build_real_from_int_cst (type, arg1);
1798       else if (TREE_CODE (arg1) == REAL_CST)
1799 	return fold_convert_const_real_from_real (type, arg1);
1800       else if (TREE_CODE (arg1) == FIXED_CST)
1801 	return fold_convert_const_real_from_fixed (type, arg1);
1802     }
1803   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1804     {
1805       if (TREE_CODE (arg1) == FIXED_CST)
1806 	return fold_convert_const_fixed_from_fixed (type, arg1);
1807       else if (TREE_CODE (arg1) == INTEGER_CST)
1808 	return fold_convert_const_fixed_from_int (type, arg1);
1809       else if (TREE_CODE (arg1) == REAL_CST)
1810 	return fold_convert_const_fixed_from_real (type, arg1);
1811     }
1812   return NULL_TREE;
1813 }
1814 
1815 /* Construct a vector of zero elements of vector type TYPE.  */
1816 
1817 static tree
1818 build_zero_vector (tree type)
1819 {
1820   tree t;
1821 
1822   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1823   return build_vector_from_val (type, t);
1824 }
1825 
1826 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
1827 
1828 bool
1829 fold_convertible_p (const_tree type, const_tree arg)
1830 {
1831   tree orig = TREE_TYPE (arg);
1832 
1833   if (type == orig)
1834     return true;
1835 
1836   if (TREE_CODE (arg) == ERROR_MARK
1837       || TREE_CODE (type) == ERROR_MARK
1838       || TREE_CODE (orig) == ERROR_MARK)
1839     return false;
1840 
1841   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1842     return true;
1843 
1844   switch (TREE_CODE (type))
1845     {
1846     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1847     case POINTER_TYPE: case REFERENCE_TYPE:
1848     case OFFSET_TYPE:
1849       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1850 	  || TREE_CODE (orig) == OFFSET_TYPE)
1851         return true;
1852       return (TREE_CODE (orig) == VECTOR_TYPE
1853 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1854 
1855     case REAL_TYPE:
1856     case FIXED_POINT_TYPE:
1857     case COMPLEX_TYPE:
1858     case VECTOR_TYPE:
1859     case VOID_TYPE:
1860       return TREE_CODE (type) == TREE_CODE (orig);
1861 
1862     default:
1863       return false;
1864     }
1865 }
1866 
1867 /* Convert expression ARG to type TYPE.  Used by the middle-end for
1868    simple conversions in preference to calling the front-end's convert.  */
1869 
1870 tree
1871 fold_convert_loc (location_t loc, tree type, tree arg)
1872 {
1873   tree orig = TREE_TYPE (arg);
1874   tree tem;
1875 
1876   if (type == orig)
1877     return arg;
1878 
1879   if (TREE_CODE (arg) == ERROR_MARK
1880       || TREE_CODE (type) == ERROR_MARK
1881       || TREE_CODE (orig) == ERROR_MARK)
1882     return error_mark_node;
1883 
1884   switch (TREE_CODE (type))
1885     {
1886     case POINTER_TYPE:
1887     case REFERENCE_TYPE:
1888       /* Handle conversions between pointers to different address spaces.  */
1889       if (POINTER_TYPE_P (orig)
1890 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1891 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1892 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1893       /* fall through */
1894 
1895     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1896     case OFFSET_TYPE:
1897       if (TREE_CODE (arg) == INTEGER_CST)
1898 	{
1899 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1900 	  if (tem != NULL_TREE)
1901 	    return tem;
1902 	}
1903       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1904 	  || TREE_CODE (orig) == OFFSET_TYPE)
1905 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
1906       if (TREE_CODE (orig) == COMPLEX_TYPE)
1907 	return fold_convert_loc (loc, type,
1908 			     fold_build1_loc (loc, REALPART_EXPR,
1909 					  TREE_TYPE (orig), arg));
1910       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1911 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1912       return fold_build1_loc (loc, NOP_EXPR, type, arg);
1913 
1914     case REAL_TYPE:
1915       if (TREE_CODE (arg) == INTEGER_CST)
1916 	{
1917 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1918 	  if (tem != NULL_TREE)
1919 	    return tem;
1920 	}
1921       else if (TREE_CODE (arg) == REAL_CST)
1922 	{
1923 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1924 	  if (tem != NULL_TREE)
1925 	    return tem;
1926 	}
1927       else if (TREE_CODE (arg) == FIXED_CST)
1928 	{
1929 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1930 	  if (tem != NULL_TREE)
1931 	    return tem;
1932 	}
1933 
1934       switch (TREE_CODE (orig))
1935 	{
1936 	case INTEGER_TYPE:
1937 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1938 	case POINTER_TYPE: case REFERENCE_TYPE:
1939 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1940 
1941 	case REAL_TYPE:
1942 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
1943 
1944 	case FIXED_POINT_TYPE:
1945 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1946 
1947 	case COMPLEX_TYPE:
1948 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1949 	  return fold_convert_loc (loc, type, tem);
1950 
1951 	default:
1952 	  gcc_unreachable ();
1953 	}
1954 
1955     case FIXED_POINT_TYPE:
1956       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1957 	  || TREE_CODE (arg) == REAL_CST)
1958 	{
1959 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1960 	  if (tem != NULL_TREE)
1961 	    goto fold_convert_exit;
1962 	}
1963 
1964       switch (TREE_CODE (orig))
1965 	{
1966 	case FIXED_POINT_TYPE:
1967 	case INTEGER_TYPE:
1968 	case ENUMERAL_TYPE:
1969 	case BOOLEAN_TYPE:
1970 	case REAL_TYPE:
1971 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1972 
1973 	case COMPLEX_TYPE:
1974 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1975 	  return fold_convert_loc (loc, type, tem);
1976 
1977 	default:
1978 	  gcc_unreachable ();
1979 	}
1980 
1981     case COMPLEX_TYPE:
1982       switch (TREE_CODE (orig))
1983 	{
1984 	case INTEGER_TYPE:
1985 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1986 	case POINTER_TYPE: case REFERENCE_TYPE:
1987 	case REAL_TYPE:
1988 	case FIXED_POINT_TYPE:
1989 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
1990 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
1991 			      fold_convert_loc (loc, TREE_TYPE (type),
1992 					    integer_zero_node));
1993 	case COMPLEX_TYPE:
1994 	  {
1995 	    tree rpart, ipart;
1996 
1997 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
1998 	      {
1999 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2000 				      TREE_OPERAND (arg, 0));
2001 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2002 				      TREE_OPERAND (arg, 1));
2003 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2004 	      }
2005 
2006 	    arg = save_expr (arg);
2007 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2008 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2009 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2010 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2011 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2012 	  }
2013 
2014 	default:
2015 	  gcc_unreachable ();
2016 	}
2017 
2018     case VECTOR_TYPE:
2019       if (integer_zerop (arg))
2020 	return build_zero_vector (type);
2021       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2023 		  || TREE_CODE (orig) == VECTOR_TYPE);
2024       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2025 
2026     case VOID_TYPE:
2027       tem = fold_ignored_result (arg);
2028       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2029 
2030     default:
2031       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2032 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2033       gcc_unreachable ();
2034     }
2035  fold_convert_exit:
2036   protected_set_expr_location_unshare (tem, loc);
2037   return tem;
2038 }
2039 
2040 /* Return false if expr can be assumed not to be an lvalue, true
2041    otherwise.  */
2042 
2043 static bool
2044 maybe_lvalue_p (const_tree x)
2045 {
2046   /* We only need to wrap lvalue tree codes.  */
2047   switch (TREE_CODE (x))
2048   {
2049   case VAR_DECL:
2050   case PARM_DECL:
2051   case RESULT_DECL:
2052   case LABEL_DECL:
2053   case FUNCTION_DECL:
2054   case SSA_NAME:
2055 
2056   case COMPONENT_REF:
2057   case MEM_REF:
2058   case INDIRECT_REF:
2059   case ARRAY_REF:
2060   case ARRAY_RANGE_REF:
2061   case BIT_FIELD_REF:
2062   case OBJ_TYPE_REF:
2063 
2064   case REALPART_EXPR:
2065   case IMAGPART_EXPR:
2066   case PREINCREMENT_EXPR:
2067   case PREDECREMENT_EXPR:
2068   case SAVE_EXPR:
2069   case TRY_CATCH_EXPR:
2070   case WITH_CLEANUP_EXPR:
2071   case COMPOUND_EXPR:
2072   case MODIFY_EXPR:
2073   case TARGET_EXPR:
2074   case COND_EXPR:
2075   case BIND_EXPR:
2076     break;
2077 
2078   default:
2079     /* Assume the worst for front-end tree codes.  */
2080     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2081       break;
2082     return false;
2083   }
2084 
2085   return true;
2086 }
2087 
2088 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2089 
2090 tree
2091 non_lvalue_loc (location_t loc, tree x)
2092 {
2093   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2094      us.  */
2095   if (in_gimple_form)
2096     return x;
2097 
2098   if (! maybe_lvalue_p (x))
2099     return x;
2100   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2101 }
2102 
2103 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2104    Zero means allow extended lvalues.  */
2105 
2106 int pedantic_lvalues;
2107 
2108 /* When pedantic, return an expr equal to X but certainly not valid as a
2109    pedantic lvalue.  Otherwise, return X.  */
2110 
2111 static tree
2112 pedantic_non_lvalue_loc (location_t loc, tree x)
2113 {
2114   if (pedantic_lvalues)
2115     return non_lvalue_loc (loc, x);
2116 
2117   return protected_set_expr_location_unshare (x, loc);
2118 }
2119 
2120 /* Given a tree comparison code, return the code that is the logical inverse.
2121    It is generally not safe to do this for floating-point comparisons, except
2122    for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case.  */
2123 
2124 enum tree_code
2125 invert_tree_comparison (enum tree_code code, bool honor_nans)
2126 {
2127   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2128     return ERROR_MARK;
2129 
2130   switch (code)
2131     {
2132     case EQ_EXPR:
2133       return NE_EXPR;
2134     case NE_EXPR:
2135       return EQ_EXPR;
2136     case GT_EXPR:
2137       return honor_nans ? UNLE_EXPR : LE_EXPR;
2138     case GE_EXPR:
2139       return honor_nans ? UNLT_EXPR : LT_EXPR;
2140     case LT_EXPR:
2141       return honor_nans ? UNGE_EXPR : GE_EXPR;
2142     case LE_EXPR:
2143       return honor_nans ? UNGT_EXPR : GT_EXPR;
2144     case LTGT_EXPR:
2145       return UNEQ_EXPR;
2146     case UNEQ_EXPR:
2147       return LTGT_EXPR;
2148     case UNGT_EXPR:
2149       return LE_EXPR;
2150     case UNGE_EXPR:
2151       return LT_EXPR;
2152     case UNLT_EXPR:
2153       return GE_EXPR;
2154     case UNLE_EXPR:
2155       return GT_EXPR;
2156     case ORDERED_EXPR:
2157       return UNORDERED_EXPR;
2158     case UNORDERED_EXPR:
2159       return ORDERED_EXPR;
2160     default:
2161       gcc_unreachable ();
2162     }
2163 }
2164 
2165 /* Similar, but return the comparison that results if the operands are
2166    swapped.  This is safe for floating-point.  */
2167 
2168 enum tree_code
2169 swap_tree_comparison (enum tree_code code)
2170 {
2171   switch (code)
2172     {
2173     case EQ_EXPR:
2174     case NE_EXPR:
2175     case ORDERED_EXPR:
2176     case UNORDERED_EXPR:
2177     case LTGT_EXPR:
2178     case UNEQ_EXPR:
2179       return code;
2180     case GT_EXPR:
2181       return LT_EXPR;
2182     case GE_EXPR:
2183       return LE_EXPR;
2184     case LT_EXPR:
2185       return GT_EXPR;
2186     case LE_EXPR:
2187       return GE_EXPR;
2188     case UNGT_EXPR:
2189       return UNLT_EXPR;
2190     case UNGE_EXPR:
2191       return UNLE_EXPR;
2192     case UNLT_EXPR:
2193       return UNGT_EXPR;
2194     case UNLE_EXPR:
2195       return UNGE_EXPR;
2196     default:
2197       gcc_unreachable ();
2198     }
2199 }
2200 
2201 
2202 /* Convert a comparison tree code from an enum tree_code representation
2203    into a compcode bit-based encoding.  This function is the inverse of
2204    compcode_to_comparison.  */
2205 
2206 static enum comparison_code
2207 comparison_to_compcode (enum tree_code code)
2208 {
2209   switch (code)
2210     {
2211     case LT_EXPR:
2212       return COMPCODE_LT;
2213     case EQ_EXPR:
2214       return COMPCODE_EQ;
2215     case LE_EXPR:
2216       return COMPCODE_LE;
2217     case GT_EXPR:
2218       return COMPCODE_GT;
2219     case NE_EXPR:
2220       return COMPCODE_NE;
2221     case GE_EXPR:
2222       return COMPCODE_GE;
2223     case ORDERED_EXPR:
2224       return COMPCODE_ORD;
2225     case UNORDERED_EXPR:
2226       return COMPCODE_UNORD;
2227     case UNLT_EXPR:
2228       return COMPCODE_UNLT;
2229     case UNEQ_EXPR:
2230       return COMPCODE_UNEQ;
2231     case UNLE_EXPR:
2232       return COMPCODE_UNLE;
2233     case UNGT_EXPR:
2234       return COMPCODE_UNGT;
2235     case LTGT_EXPR:
2236       return COMPCODE_LTGT;
2237     case UNGE_EXPR:
2238       return COMPCODE_UNGE;
2239     default:
2240       gcc_unreachable ();
2241     }
2242 }
2243 
2244 /* Convert a compcode bit-based encoding of a comparison operator back
2245    to GCC's enum tree_code representation.  This function is the
2246    inverse of comparison_to_compcode.  */
2247 
2248 static enum tree_code
2249 compcode_to_comparison (enum comparison_code code)
2250 {
2251   switch (code)
2252     {
2253     case COMPCODE_LT:
2254       return LT_EXPR;
2255     case COMPCODE_EQ:
2256       return EQ_EXPR;
2257     case COMPCODE_LE:
2258       return LE_EXPR;
2259     case COMPCODE_GT:
2260       return GT_EXPR;
2261     case COMPCODE_NE:
2262       return NE_EXPR;
2263     case COMPCODE_GE:
2264       return GE_EXPR;
2265     case COMPCODE_ORD:
2266       return ORDERED_EXPR;
2267     case COMPCODE_UNORD:
2268       return UNORDERED_EXPR;
2269     case COMPCODE_UNLT:
2270       return UNLT_EXPR;
2271     case COMPCODE_UNEQ:
2272       return UNEQ_EXPR;
2273     case COMPCODE_UNLE:
2274       return UNLE_EXPR;
2275     case COMPCODE_UNGT:
2276       return UNGT_EXPR;
2277     case COMPCODE_LTGT:
2278       return LTGT_EXPR;
2279     case COMPCODE_UNGE:
2280       return UNGE_EXPR;
2281     default:
2282       gcc_unreachable ();
2283     }
2284 }
2285 
2286 /* Return a tree for the comparison which is the combination of
2287    doing the AND or OR (depending on CODE) of the two operations LCODE
2288    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2289    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2290    if this makes the transformation invalid.  */
2291 
2292 tree
2293 combine_comparisons (location_t loc,
2294 		     enum tree_code code, enum tree_code lcode,
2295 		     enum tree_code rcode, tree truth_type,
2296 		     tree ll_arg, tree lr_arg)
2297 {
2298   bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2299   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2300   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2301   int compcode;
2302 
2303   switch (code)
2304     {
2305     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2306       compcode = lcompcode & rcompcode;
2307       break;
2308 
2309     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2310       compcode = lcompcode | rcompcode;
2311       break;
2312 
2313     default:
2314       return NULL_TREE;
2315     }
2316 
2317   if (!honor_nans)
2318     {
2319       /* Eliminate unordered comparisons, as well as LTGT and ORD
2320 	 which are not used unless the mode has NaNs.  */
2321       compcode &= ~COMPCODE_UNORD;
2322       if (compcode == COMPCODE_LTGT)
2323 	compcode = COMPCODE_NE;
2324       else if (compcode == COMPCODE_ORD)
2325 	compcode = COMPCODE_TRUE;
2326     }
2327    else if (flag_trapping_math)
2328      {
2329 	/* Check that the original operation and the optimized ones will trap
2330 	   under the same condition.  */
2331 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2332 		     && (lcompcode != COMPCODE_EQ)
2333 		     && (lcompcode != COMPCODE_ORD);
2334 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2335 		     && (rcompcode != COMPCODE_EQ)
2336 		     && (rcompcode != COMPCODE_ORD);
2337 	bool trap = (compcode & COMPCODE_UNORD) == 0
2338 		    && (compcode != COMPCODE_EQ)
2339 		    && (compcode != COMPCODE_ORD);
2340 
2341         /* In a short-circuited boolean expression the LHS might be
2342 	   such that the RHS, if evaluated, will never trap.  For
2343 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2344 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2345 	   example, the expression above will never trap, hence
2346 	   optimizing it to x < y would be invalid).  */
2347         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2348             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2349           rtrap = false;
2350 
2351         /* If the comparison was short-circuited, and only the RHS
2352 	   trapped, we may now generate a spurious trap.  */
2353 	if (rtrap && !ltrap
2354 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2355 	  return NULL_TREE;
2356 
2357 	/* If we changed the conditions that cause a trap, we lose.  */
2358 	if ((ltrap || rtrap) != trap)
2359 	  return NULL_TREE;
2360       }
2361 
2362   if (compcode == COMPCODE_TRUE)
2363     return constant_boolean_node (true, truth_type);
2364   else if (compcode == COMPCODE_FALSE)
2365     return constant_boolean_node (false, truth_type);
2366   else
2367     {
2368       enum tree_code tcode;
2369 
2370       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2371       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2372     }
2373 }
2374 
2375 /* Return nonzero if two operands (typically of the same tree node)
2376    are necessarily equal.  If either argument has side-effects this
2377    function returns zero.  FLAGS modifies behavior as follows:
2378 
2379    If OEP_ONLY_CONST is set, only return nonzero for constants.
2380    This function tests whether the operands are indistinguishable;
2381    it does not test whether they are equal using C's == operation.
2382    The distinction is important for IEEE floating point, because
2383    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2384    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2385 
2386    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2387    even though it may hold multiple values during a function.
2388    This is because a GCC tree node guarantees that nothing else is
2389    executed between the evaluation of its "operands" (which may often
2390    be evaluated in arbitrary order).  Hence if the operands themselves
2391    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2392    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2393    unset means assuming isochronic (or instantaneous) tree equivalence.
2394    Unless comparing arbitrary expression trees, such as from different
2395    statements, this flag can usually be left unset.
2396 
2397    If OEP_PURE_SAME is set, then pure functions with identical arguments
2398    are considered the same.  It is used when the caller has other ways
2399    to ensure that global memory is unchanged in between.  */
2400 
2401 int
2402 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2403 {
2404   /* If either is ERROR_MARK, they aren't equal.  */
2405   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2406       || TREE_TYPE (arg0) == error_mark_node
2407       || TREE_TYPE (arg1) == error_mark_node)
2408     return 0;
2409 
2410   /* Similar, if either does not have a type (like a released SSA name),
2411      they aren't equal.  */
2412   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2413     return 0;
2414 
2415   /* Check equality of integer constants before bailing out due to
2416      precision differences.  */
2417   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2418     return tree_int_cst_equal (arg0, arg1);
2419 
2420   /* If both types don't have the same signedness, then we can't consider
2421      them equal.  We must check this before the STRIP_NOPS calls
2422      because they may change the signedness of the arguments.  As pointers
2423      strictly don't have a signedness, require either two pointers or
2424      two non-pointers as well.  */
2425   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2426       || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2427     return 0;
2428 
2429   /* We cannot consider pointers to different address space equal.  */
2430   if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2431       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2432 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2433     return 0;
2434 
2435   /* If both types don't have the same precision, then it is not safe
2436      to strip NOPs.  */
2437   if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2438     return 0;
2439 
2440   STRIP_NOPS (arg0);
2441   STRIP_NOPS (arg1);
2442 
2443   /* In case both args are comparisons but with different comparison
2444      code, try to swap the comparison operands of one arg to produce
2445      a match and compare that variant.  */
2446   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2447       && COMPARISON_CLASS_P (arg0)
2448       && COMPARISON_CLASS_P (arg1))
2449     {
2450       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2451 
2452       if (TREE_CODE (arg0) == swap_code)
2453 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2454 			        TREE_OPERAND (arg1, 1), flags)
2455 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2456 				   TREE_OPERAND (arg1, 0), flags);
2457     }
2458 
2459   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2460       /* This is needed for conversions and for COMPONENT_REF.
2461 	 Might as well play it safe and always test this.  */
2462       || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2463       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2464       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2465     return 0;
2466 
2467   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2468      We don't care about side effects in that case because the SAVE_EXPR
2469      takes care of that for us. In all other cases, two expressions are
2470      equal if they have no side effects.  If we have two identical
2471      expressions with side effects that should be treated the same due
2472      to the only side effects being identical SAVE_EXPR's, that will
2473      be detected in the recursive calls below.
2474      If we are taking an invariant address of two identical objects
2475      they are necessarily equal as well.  */
2476   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2477       && (TREE_CODE (arg0) == SAVE_EXPR
2478 	  || (flags & OEP_CONSTANT_ADDRESS_OF)
2479 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2480     return 1;
2481 
2482   /* Next handle constant cases, those for which we can return 1 even
2483      if ONLY_CONST is set.  */
2484   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2485     switch (TREE_CODE (arg0))
2486       {
2487       case INTEGER_CST:
2488 	return tree_int_cst_equal (arg0, arg1);
2489 
2490       case FIXED_CST:
2491 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2492 				       TREE_FIXED_CST (arg1));
2493 
2494       case REAL_CST:
2495 	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2496 				   TREE_REAL_CST (arg1)))
2497 	  return 1;
2498 
2499 
2500 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2501 	  {
2502 	    /* If we do not distinguish between signed and unsigned zero,
2503 	       consider them equal.  */
2504 	    if (real_zerop (arg0) && real_zerop (arg1))
2505 	      return 1;
2506 	  }
2507 	return 0;
2508 
2509       case VECTOR_CST:
2510 	{
2511 	  tree v1, v2;
2512 
2513 	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2514 	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2515 	  while (v1 && v2)
2516 	    {
2517 	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2518 				    flags))
2519 		return 0;
2520 	      v1 = TREE_CHAIN (v1);
2521 	      v2 = TREE_CHAIN (v2);
2522 	    }
2523 
2524 	  return v1 == v2;
2525 	}
2526 
2527       case COMPLEX_CST:
2528 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2529 				 flags)
2530 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2531 				    flags));
2532 
2533       case STRING_CST:
2534 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2535 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2536 			      TREE_STRING_POINTER (arg1),
2537 			      TREE_STRING_LENGTH (arg0)));
2538 
2539       case ADDR_EXPR:
2540 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2541 				TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2542 				? OEP_CONSTANT_ADDRESS_OF : 0);
2543       default:
2544 	break;
2545       }
2546 
2547   if (flags & OEP_ONLY_CONST)
2548     return 0;
2549 
2550 /* Define macros to test an operand from arg0 and arg1 for equality and a
2551    variant that allows null and views null as being different from any
2552    non-null value.  In the latter case, if either is null, the both
2553    must be; otherwise, do the normal comparison.  */
2554 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2555 				    TREE_OPERAND (arg1, N), flags)
2556 
2557 #define OP_SAME_WITH_NULL(N)				\
2558   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2559    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2560 
2561   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2562     {
2563     case tcc_unary:
2564       /* Two conversions are equal only if signedness and modes match.  */
2565       switch (TREE_CODE (arg0))
2566         {
2567 	CASE_CONVERT:
2568         case FIX_TRUNC_EXPR:
2569 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2570 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2571 	    return 0;
2572 	  break;
2573 	default:
2574 	  break;
2575 	}
2576 
2577       return OP_SAME (0);
2578 
2579 
2580     case tcc_comparison:
2581     case tcc_binary:
2582       if (OP_SAME (0) && OP_SAME (1))
2583 	return 1;
2584 
2585       /* For commutative ops, allow the other order.  */
2586       return (commutative_tree_code (TREE_CODE (arg0))
2587 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2588 				  TREE_OPERAND (arg1, 1), flags)
2589 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2590 				  TREE_OPERAND (arg1, 0), flags));
2591 
2592     case tcc_reference:
2593       /* If either of the pointer (or reference) expressions we are
2594 	 dereferencing contain a side effect, these cannot be equal.  */
2595       if (TREE_SIDE_EFFECTS (arg0)
2596 	  || TREE_SIDE_EFFECTS (arg1))
2597 	return 0;
2598 
2599       switch (TREE_CODE (arg0))
2600 	{
2601 	case INDIRECT_REF:
2602 	case REALPART_EXPR:
2603 	case IMAGPART_EXPR:
2604 	  return OP_SAME (0);
2605 
2606 	case MEM_REF:
2607 	  /* Require equal access sizes, and similar pointer types.
2608 	     We can have incomplete types for array references of
2609 	     variable-sized arrays from the Fortran frontent
2610 	     though.  */
2611 	  return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2612 		   || (TYPE_SIZE (TREE_TYPE (arg0))
2613 		       && TYPE_SIZE (TREE_TYPE (arg1))
2614 		       && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2615 					   TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2616 		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2617 		      == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2618 		  && OP_SAME (0) && OP_SAME (1));
2619 
2620 	case ARRAY_REF:
2621 	case ARRAY_RANGE_REF:
2622 	  /* Operands 2 and 3 may be null.
2623 	     Compare the array index by value if it is constant first as we
2624 	     may have different types but same value here.  */
2625 	  return (OP_SAME (0)
2626 		  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2627 					  TREE_OPERAND (arg1, 1))
2628 		      || OP_SAME (1))
2629 		  && OP_SAME_WITH_NULL (2)
2630 		  && OP_SAME_WITH_NULL (3));
2631 
2632 	case COMPONENT_REF:
2633 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2634 	     may be NULL when we're called to compare MEM_EXPRs.  */
2635 	  return OP_SAME_WITH_NULL (0)
2636 		 && OP_SAME (1)
2637 		 && OP_SAME_WITH_NULL (2);
2638 
2639 	case BIT_FIELD_REF:
2640 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2641 
2642 	default:
2643 	  return 0;
2644 	}
2645 
2646     case tcc_expression:
2647       switch (TREE_CODE (arg0))
2648 	{
2649 	case ADDR_EXPR:
2650 	case TRUTH_NOT_EXPR:
2651 	  return OP_SAME (0);
2652 
2653 	case TRUTH_ANDIF_EXPR:
2654 	case TRUTH_ORIF_EXPR:
2655 	  return OP_SAME (0) && OP_SAME (1);
2656 
2657 	case FMA_EXPR:
2658 	case WIDEN_MULT_PLUS_EXPR:
2659 	case WIDEN_MULT_MINUS_EXPR:
2660 	  if (!OP_SAME (2))
2661 	    return 0;
2662 	  /* The multiplcation operands are commutative.  */
2663 	  /* FALLTHRU */
2664 
2665 	case TRUTH_AND_EXPR:
2666 	case TRUTH_OR_EXPR:
2667 	case TRUTH_XOR_EXPR:
2668 	  if (OP_SAME (0) && OP_SAME (1))
2669 	    return 1;
2670 
2671 	  /* Otherwise take into account this is a commutative operation.  */
2672 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2673 				   TREE_OPERAND (arg1, 1), flags)
2674 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2675 				      TREE_OPERAND (arg1, 0), flags));
2676 
2677 	case COND_EXPR:
2678 	case VEC_COND_EXPR:
2679 	case DOT_PROD_EXPR:
2680 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2681 
2682 	default:
2683 	  return 0;
2684 	}
2685 
2686     case tcc_vl_exp:
2687       switch (TREE_CODE (arg0))
2688 	{
2689 	case CALL_EXPR:
2690 	  /* If the CALL_EXPRs call different functions, then they
2691 	     clearly can not be equal.  */
2692 	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2693 				 flags))
2694 	    return 0;
2695 
2696 	  {
2697 	    unsigned int cef = call_expr_flags (arg0);
2698 	    if (flags & OEP_PURE_SAME)
2699 	      cef &= ECF_CONST | ECF_PURE;
2700 	    else
2701 	      cef &= ECF_CONST;
2702 	    if (!cef)
2703 	      return 0;
2704 	  }
2705 
2706 	  /* Now see if all the arguments are the same.  */
2707 	  {
2708 	    const_call_expr_arg_iterator iter0, iter1;
2709 	    const_tree a0, a1;
2710 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
2711 		   a1 = first_const_call_expr_arg (arg1, &iter1);
2712 		 a0 && a1;
2713 		 a0 = next_const_call_expr_arg (&iter0),
2714 		   a1 = next_const_call_expr_arg (&iter1))
2715 	      if (! operand_equal_p (a0, a1, flags))
2716 		return 0;
2717 
2718 	    /* If we get here and both argument lists are exhausted
2719 	       then the CALL_EXPRs are equal.  */
2720 	    return ! (a0 || a1);
2721 	  }
2722 	default:
2723 	  return 0;
2724 	}
2725 
2726     case tcc_declaration:
2727       /* Consider __builtin_sqrt equal to sqrt.  */
2728       return (TREE_CODE (arg0) == FUNCTION_DECL
2729 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2730 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2731 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2732 
2733     default:
2734       return 0;
2735     }
2736 
2737 #undef OP_SAME
2738 #undef OP_SAME_WITH_NULL
2739 }
2740 
2741 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2742    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2743 
2744    When in doubt, return 0.  */
2745 
2746 static int
2747 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2748 {
2749   int unsignedp1, unsignedpo;
2750   tree primarg0, primarg1, primother;
2751   unsigned int correct_width;
2752 
2753   if (operand_equal_p (arg0, arg1, 0))
2754     return 1;
2755 
2756   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2757       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2758     return 0;
2759 
2760   /* Discard any conversions that don't change the modes of ARG0 and ARG1
2761      and see if the inner values are the same.  This removes any
2762      signedness comparison, which doesn't matter here.  */
2763   primarg0 = arg0, primarg1 = arg1;
2764   STRIP_NOPS (primarg0);
2765   STRIP_NOPS (primarg1);
2766   if (operand_equal_p (primarg0, primarg1, 0))
2767     return 1;
2768 
2769   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2770      actual comparison operand, ARG0.
2771 
2772      First throw away any conversions to wider types
2773      already present in the operands.  */
2774 
2775   primarg1 = get_narrower (arg1, &unsignedp1);
2776   primother = get_narrower (other, &unsignedpo);
2777 
2778   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2779   if (unsignedp1 == unsignedpo
2780       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2781       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2782     {
2783       tree type = TREE_TYPE (arg0);
2784 
2785       /* Make sure shorter operand is extended the right way
2786 	 to match the longer operand.  */
2787       primarg1 = fold_convert (signed_or_unsigned_type_for
2788 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2789 
2790       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2791 	return 1;
2792     }
2793 
2794   return 0;
2795 }
2796 
2797 /* See if ARG is an expression that is either a comparison or is performing
2798    arithmetic on comparisons.  The comparisons must only be comparing
2799    two different values, which will be stored in *CVAL1 and *CVAL2; if
2800    they are nonzero it means that some operands have already been found.
2801    No variables may be used anywhere else in the expression except in the
2802    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2803    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2804 
2805    If this is true, return 1.  Otherwise, return zero.  */
2806 
2807 static int
2808 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2809 {
2810   enum tree_code code = TREE_CODE (arg);
2811   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2812 
2813   /* We can handle some of the tcc_expression cases here.  */
2814   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2815     tclass = tcc_unary;
2816   else if (tclass == tcc_expression
2817 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2818 	       || code == COMPOUND_EXPR))
2819     tclass = tcc_binary;
2820 
2821   else if (tclass == tcc_expression && code == SAVE_EXPR
2822 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2823     {
2824       /* If we've already found a CVAL1 or CVAL2, this expression is
2825 	 two complex to handle.  */
2826       if (*cval1 || *cval2)
2827 	return 0;
2828 
2829       tclass = tcc_unary;
2830       *save_p = 1;
2831     }
2832 
2833   switch (tclass)
2834     {
2835     case tcc_unary:
2836       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2837 
2838     case tcc_binary:
2839       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2840 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2841 				      cval1, cval2, save_p));
2842 
2843     case tcc_constant:
2844       return 1;
2845 
2846     case tcc_expression:
2847       if (code == COND_EXPR)
2848 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2849 				     cval1, cval2, save_p)
2850 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2851 					cval1, cval2, save_p)
2852 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2853 					cval1, cval2, save_p));
2854       return 0;
2855 
2856     case tcc_comparison:
2857       /* First see if we can handle the first operand, then the second.  For
2858 	 the second operand, we know *CVAL1 can't be zero.  It must be that
2859 	 one side of the comparison is each of the values; test for the
2860 	 case where this isn't true by failing if the two operands
2861 	 are the same.  */
2862 
2863       if (operand_equal_p (TREE_OPERAND (arg, 0),
2864 			   TREE_OPERAND (arg, 1), 0))
2865 	return 0;
2866 
2867       if (*cval1 == 0)
2868 	*cval1 = TREE_OPERAND (arg, 0);
2869       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2870 	;
2871       else if (*cval2 == 0)
2872 	*cval2 = TREE_OPERAND (arg, 0);
2873       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2874 	;
2875       else
2876 	return 0;
2877 
2878       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2879 	;
2880       else if (*cval2 == 0)
2881 	*cval2 = TREE_OPERAND (arg, 1);
2882       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2883 	;
2884       else
2885 	return 0;
2886 
2887       return 1;
2888 
2889     default:
2890       return 0;
2891     }
2892 }
2893 
2894 /* ARG is a tree that is known to contain just arithmetic operations and
2895    comparisons.  Evaluate the operations in the tree substituting NEW0 for
2896    any occurrence of OLD0 as an operand of a comparison and likewise for
2897    NEW1 and OLD1.  */
2898 
2899 static tree
2900 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2901 	    tree old1, tree new1)
2902 {
2903   tree type = TREE_TYPE (arg);
2904   enum tree_code code = TREE_CODE (arg);
2905   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2906 
2907   /* We can handle some of the tcc_expression cases here.  */
2908   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2909     tclass = tcc_unary;
2910   else if (tclass == tcc_expression
2911 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2912     tclass = tcc_binary;
2913 
2914   switch (tclass)
2915     {
2916     case tcc_unary:
2917       return fold_build1_loc (loc, code, type,
2918 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2919 				      old0, new0, old1, new1));
2920 
2921     case tcc_binary:
2922       return fold_build2_loc (loc, code, type,
2923 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2924 				      old0, new0, old1, new1),
2925 			  eval_subst (loc, TREE_OPERAND (arg, 1),
2926 				      old0, new0, old1, new1));
2927 
2928     case tcc_expression:
2929       switch (code)
2930 	{
2931 	case SAVE_EXPR:
2932 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2933 			     old1, new1);
2934 
2935 	case COMPOUND_EXPR:
2936 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2937 			     old1, new1);
2938 
2939 	case COND_EXPR:
2940 	  return fold_build3_loc (loc, code, type,
2941 			      eval_subst (loc, TREE_OPERAND (arg, 0),
2942 					  old0, new0, old1, new1),
2943 			      eval_subst (loc, TREE_OPERAND (arg, 1),
2944 					  old0, new0, old1, new1),
2945 			      eval_subst (loc, TREE_OPERAND (arg, 2),
2946 					  old0, new0, old1, new1));
2947 	default:
2948 	  break;
2949 	}
2950       /* Fall through - ???  */
2951 
2952     case tcc_comparison:
2953       {
2954 	tree arg0 = TREE_OPERAND (arg, 0);
2955 	tree arg1 = TREE_OPERAND (arg, 1);
2956 
2957 	/* We need to check both for exact equality and tree equality.  The
2958 	   former will be true if the operand has a side-effect.  In that
2959 	   case, we know the operand occurred exactly once.  */
2960 
2961 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2962 	  arg0 = new0;
2963 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2964 	  arg0 = new1;
2965 
2966 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2967 	  arg1 = new0;
2968 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2969 	  arg1 = new1;
2970 
2971 	return fold_build2_loc (loc, code, type, arg0, arg1);
2972       }
2973 
2974     default:
2975       return arg;
2976     }
2977 }
2978 
2979 /* Return a tree for the case when the result of an expression is RESULT
2980    converted to TYPE and OMITTED was previously an operand of the expression
2981    but is now not needed (e.g., we folded OMITTED * 0).
2982 
2983    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2984    the conversion of RESULT to TYPE.  */
2985 
2986 tree
2987 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2988 {
2989   tree t = fold_convert_loc (loc, type, result);
2990 
2991   /* If the resulting operand is an empty statement, just return the omitted
2992      statement casted to void. */
2993   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2994     return build1_loc (loc, NOP_EXPR, void_type_node,
2995 		       fold_ignored_result (omitted));
2996 
2997   if (TREE_SIDE_EFFECTS (omitted))
2998     return build2_loc (loc, COMPOUND_EXPR, type,
2999 		       fold_ignored_result (omitted), t);
3000 
3001   return non_lvalue_loc (loc, t);
3002 }
3003 
3004 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3005 
3006 static tree
3007 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3008 			       tree omitted)
3009 {
3010   tree t = fold_convert_loc (loc, type, result);
3011 
3012   /* If the resulting operand is an empty statement, just return the omitted
3013      statement casted to void. */
3014   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3015     return build1_loc (loc, NOP_EXPR, void_type_node,
3016 		       fold_ignored_result (omitted));
3017 
3018   if (TREE_SIDE_EFFECTS (omitted))
3019     return build2_loc (loc, COMPOUND_EXPR, type,
3020 		       fold_ignored_result (omitted), t);
3021 
3022   return pedantic_non_lvalue_loc (loc, t);
3023 }
3024 
3025 /* Return a tree for the case when the result of an expression is RESULT
3026    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3027    of the expression but are now not needed.
3028 
3029    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3030    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3031    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3032    just do the conversion of RESULT to TYPE.  */
3033 
3034 tree
3035 omit_two_operands_loc (location_t loc, tree type, tree result,
3036 		       tree omitted1, tree omitted2)
3037 {
3038   tree t = fold_convert_loc (loc, type, result);
3039 
3040   if (TREE_SIDE_EFFECTS (omitted2))
3041     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3042   if (TREE_SIDE_EFFECTS (omitted1))
3043     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3044 
3045   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3046 }
3047 
3048 
3049 /* Return a simplified tree node for the truth-negation of ARG.  This
3050    never alters ARG itself.  We assume that ARG is an operation that
3051    returns a truth value (0 or 1).
3052 
3053    FIXME: one would think we would fold the result, but it causes
3054    problems with the dominator optimizer.  */
3055 
3056 tree
3057 fold_truth_not_expr (location_t loc, tree arg)
3058 {
3059   tree type = TREE_TYPE (arg);
3060   enum tree_code code = TREE_CODE (arg);
3061   location_t loc1, loc2;
3062 
3063   /* If this is a comparison, we can simply invert it, except for
3064      floating-point non-equality comparisons, in which case we just
3065      enclose a TRUTH_NOT_EXPR around what we have.  */
3066 
3067   if (TREE_CODE_CLASS (code) == tcc_comparison)
3068     {
3069       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3070       if (FLOAT_TYPE_P (op_type)
3071 	  && flag_trapping_math
3072 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3073 	  && code != NE_EXPR && code != EQ_EXPR)
3074 	return NULL_TREE;
3075 
3076       code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3077       if (code == ERROR_MARK)
3078 	return NULL_TREE;
3079 
3080       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3081 			 TREE_OPERAND (arg, 1));
3082     }
3083 
3084   switch (code)
3085     {
3086     case INTEGER_CST:
3087       return constant_boolean_node (integer_zerop (arg), type);
3088 
3089     case TRUTH_AND_EXPR:
3090       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3091       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3092       return build2_loc (loc, TRUTH_OR_EXPR, type,
3093 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3094 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3095 
3096     case TRUTH_OR_EXPR:
3097       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3098       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3099       return build2_loc (loc, TRUTH_AND_EXPR, type,
3100 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3101 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3102 
3103     case TRUTH_XOR_EXPR:
3104       /* Here we can invert either operand.  We invert the first operand
3105 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3106 	 result is the XOR of the first operand with the inside of the
3107 	 negation of the second operand.  */
3108 
3109       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3110 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3111 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3112       else
3113 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3114 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3115 			   TREE_OPERAND (arg, 1));
3116 
3117     case TRUTH_ANDIF_EXPR:
3118       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3119       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3120       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3121 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3122 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3123 
3124     case TRUTH_ORIF_EXPR:
3125       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3126       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3127       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3128 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3129 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3130 
3131     case TRUTH_NOT_EXPR:
3132       return TREE_OPERAND (arg, 0);
3133 
3134     case COND_EXPR:
3135       {
3136 	tree arg1 = TREE_OPERAND (arg, 1);
3137 	tree arg2 = TREE_OPERAND (arg, 2);
3138 
3139 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3140 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3141 
3142 	/* A COND_EXPR may have a throw as one operand, which
3143 	   then has void type.  Just leave void operands
3144 	   as they are.  */
3145 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3146 			   VOID_TYPE_P (TREE_TYPE (arg1))
3147 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3148 			   VOID_TYPE_P (TREE_TYPE (arg2))
3149 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3150       }
3151 
3152     case COMPOUND_EXPR:
3153       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3154       return build2_loc (loc, COMPOUND_EXPR, type,
3155 			 TREE_OPERAND (arg, 0),
3156 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3157 
3158     case NON_LVALUE_EXPR:
3159       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3161 
3162     CASE_CONVERT:
3163       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3164 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3165 
3166       /* ... fall through ...  */
3167 
3168     case FLOAT_EXPR:
3169       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3170       return build1_loc (loc, TREE_CODE (arg), type,
3171 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3172 
3173     case BIT_AND_EXPR:
3174       if (!integer_onep (TREE_OPERAND (arg, 1)))
3175 	return NULL_TREE;
3176       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3177 
3178     case SAVE_EXPR:
3179       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3180 
3181     case CLEANUP_POINT_EXPR:
3182       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3183       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3184 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3185 
3186     default:
3187       return NULL_TREE;
3188     }
3189 }
3190 
3191 /* Return a simplified tree node for the truth-negation of ARG.  This
3192    never alters ARG itself.  We assume that ARG is an operation that
3193    returns a truth value (0 or 1).
3194 
3195    FIXME: one would think we would fold the result, but it causes
3196    problems with the dominator optimizer.  */
3197 
3198 tree
3199 invert_truthvalue_loc (location_t loc, tree arg)
3200 {
3201   tree tem;
3202 
3203   if (TREE_CODE (arg) == ERROR_MARK)
3204     return arg;
3205 
3206   tem = fold_truth_not_expr (loc, arg);
3207   if (!tem)
3208     tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3209 
3210   return tem;
3211 }
3212 
3213 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3214    operands are another bit-wise operation with a common input.  If so,
3215    distribute the bit operations to save an operation and possibly two if
3216    constants are involved.  For example, convert
3217 	(A | B) & (A | C) into A | (B & C)
3218    Further simplification will occur if B and C are constants.
3219 
3220    If this optimization cannot be done, 0 will be returned.  */
3221 
3222 static tree
3223 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3224 		     tree arg0, tree arg1)
3225 {
3226   tree common;
3227   tree left, right;
3228 
3229   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3230       || TREE_CODE (arg0) == code
3231       || (TREE_CODE (arg0) != BIT_AND_EXPR
3232 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3233     return 0;
3234 
3235   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3236     {
3237       common = TREE_OPERAND (arg0, 0);
3238       left = TREE_OPERAND (arg0, 1);
3239       right = TREE_OPERAND (arg1, 1);
3240     }
3241   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3242     {
3243       common = TREE_OPERAND (arg0, 0);
3244       left = TREE_OPERAND (arg0, 1);
3245       right = TREE_OPERAND (arg1, 0);
3246     }
3247   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3248     {
3249       common = TREE_OPERAND (arg0, 1);
3250       left = TREE_OPERAND (arg0, 0);
3251       right = TREE_OPERAND (arg1, 1);
3252     }
3253   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3254     {
3255       common = TREE_OPERAND (arg0, 1);
3256       left = TREE_OPERAND (arg0, 0);
3257       right = TREE_OPERAND (arg1, 0);
3258     }
3259   else
3260     return 0;
3261 
3262   common = fold_convert_loc (loc, type, common);
3263   left = fold_convert_loc (loc, type, left);
3264   right = fold_convert_loc (loc, type, right);
3265   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3266 		      fold_build2_loc (loc, code, type, left, right));
3267 }
3268 
3269 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3270    with code CODE.  This optimization is unsafe.  */
3271 static tree
3272 distribute_real_division (location_t loc, enum tree_code code, tree type,
3273 			  tree arg0, tree arg1)
3274 {
3275   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3276   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3277 
3278   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3279   if (mul0 == mul1
3280       && operand_equal_p (TREE_OPERAND (arg0, 1),
3281 		       TREE_OPERAND (arg1, 1), 0))
3282     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3283 			fold_build2_loc (loc, code, type,
3284 				     TREE_OPERAND (arg0, 0),
3285 				     TREE_OPERAND (arg1, 0)),
3286 			TREE_OPERAND (arg0, 1));
3287 
3288   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3289   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3290 		       TREE_OPERAND (arg1, 0), 0)
3291       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3292       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3293     {
3294       REAL_VALUE_TYPE r0, r1;
3295       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3296       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3297       if (!mul0)
3298 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3299       if (!mul1)
3300         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3301       real_arithmetic (&r0, code, &r0, &r1);
3302       return fold_build2_loc (loc, MULT_EXPR, type,
3303 			  TREE_OPERAND (arg0, 0),
3304 			  build_real (type, r0));
3305     }
3306 
3307   return NULL_TREE;
3308 }
3309 
3310 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3311    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3312 
3313 static tree
3314 make_bit_field_ref (location_t loc, tree inner, tree type,
3315 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3316 {
3317   tree result, bftype;
3318 
3319   if (bitpos == 0)
3320     {
3321       tree size = TYPE_SIZE (TREE_TYPE (inner));
3322       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3323 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3324 	  && host_integerp (size, 0)
3325 	  && tree_low_cst (size, 0) == bitsize)
3326 	return fold_convert_loc (loc, type, inner);
3327     }
3328 
3329   bftype = type;
3330   if (TYPE_PRECISION (bftype) != bitsize
3331       || TYPE_UNSIGNED (bftype) == !unsignedp)
3332     bftype = build_nonstandard_integer_type (bitsize, 0);
3333 
3334   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3335 		       size_int (bitsize), bitsize_int (bitpos));
3336 
3337   if (bftype != type)
3338     result = fold_convert_loc (loc, type, result);
3339 
3340   return result;
3341 }
3342 
3343 /* Optimize a bit-field compare.
3344 
3345    There are two cases:  First is a compare against a constant and the
3346    second is a comparison of two items where the fields are at the same
3347    bit position relative to the start of a chunk (byte, halfword, word)
3348    large enough to contain it.  In these cases we can avoid the shift
3349    implicit in bitfield extractions.
3350 
3351    For constants, we emit a compare of the shifted constant with the
3352    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3353    compared.  For two fields at the same position, we do the ANDs with the
3354    similar mask and compare the result of the ANDs.
3355 
3356    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3357    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3358    are the left and right operands of the comparison, respectively.
3359 
3360    If the optimization described above can be done, we return the resulting
3361    tree.  Otherwise we return zero.  */
3362 
3363 static tree
3364 optimize_bit_field_compare (location_t loc, enum tree_code code,
3365 			    tree compare_type, tree lhs, tree rhs)
3366 {
3367   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3368   tree type = TREE_TYPE (lhs);
3369   tree signed_type, unsigned_type;
3370   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3371   enum machine_mode lmode, rmode, nmode;
3372   int lunsignedp, runsignedp;
3373   int lvolatilep = 0, rvolatilep = 0;
3374   tree linner, rinner = NULL_TREE;
3375   tree mask;
3376   tree offset;
3377 
3378   /* Get all the information about the extractions being done.  If the bit size
3379      if the same as the size of the underlying object, we aren't doing an
3380      extraction at all and so can do nothing.  We also don't want to
3381      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3382      then will no longer be able to replace it.  */
3383   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3384 				&lunsignedp, &lvolatilep, false);
3385   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3386       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3387     return 0;
3388 
3389  if (!const_p)
3390    {
3391      /* If this is not a constant, we can only do something if bit positions,
3392 	sizes, and signedness are the same.  */
3393      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3394 				   &runsignedp, &rvolatilep, false);
3395 
3396      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3397 	 || lunsignedp != runsignedp || offset != 0
3398 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3399        return 0;
3400    }
3401 
3402   /* See if we can find a mode to refer to this field.  We should be able to,
3403      but fail if we can't.  */
3404   if (lvolatilep
3405       && GET_MODE_BITSIZE (lmode) > 0
3406       && flag_strict_volatile_bitfields > 0)
3407     nmode = lmode;
3408   else
3409     nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3410 			   const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3411 			   : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3412 				  TYPE_ALIGN (TREE_TYPE (rinner))),
3413 			   word_mode, lvolatilep || rvolatilep);
3414   if (nmode == VOIDmode)
3415     return 0;
3416 
3417   /* Set signed and unsigned types of the precision of this mode for the
3418      shifts below.  */
3419   signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3420   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3421 
3422   /* Compute the bit position and size for the new reference and our offset
3423      within it. If the new reference is the same size as the original, we
3424      won't optimize anything, so return zero.  */
3425   nbitsize = GET_MODE_BITSIZE (nmode);
3426   nbitpos = lbitpos & ~ (nbitsize - 1);
3427   lbitpos -= nbitpos;
3428   if (nbitsize == lbitsize)
3429     return 0;
3430 
3431   if (BYTES_BIG_ENDIAN)
3432     lbitpos = nbitsize - lbitsize - lbitpos;
3433 
3434   /* Make the mask to be used against the extracted field.  */
3435   mask = build_int_cst_type (unsigned_type, -1);
3436   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3437   mask = const_binop (RSHIFT_EXPR, mask,
3438 		      size_int (nbitsize - lbitsize - lbitpos));
3439 
3440   if (! const_p)
3441     /* If not comparing with constant, just rework the comparison
3442        and return.  */
3443     return fold_build2_loc (loc, code, compare_type,
3444 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3445 				     make_bit_field_ref (loc, linner,
3446 							 unsigned_type,
3447 							 nbitsize, nbitpos,
3448 							 1),
3449 				     mask),
3450 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3451 				     make_bit_field_ref (loc, rinner,
3452 							 unsigned_type,
3453 							 nbitsize, nbitpos,
3454 							 1),
3455 				     mask));
3456 
3457   /* Otherwise, we are handling the constant case. See if the constant is too
3458      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3459      this not only for its own sake, but to avoid having to test for this
3460      error case below.  If we didn't, we might generate wrong code.
3461 
3462      For unsigned fields, the constant shifted right by the field length should
3463      be all zero.  For signed fields, the high-order bits should agree with
3464      the sign bit.  */
3465 
3466   if (lunsignedp)
3467     {
3468       if (! integer_zerop (const_binop (RSHIFT_EXPR,
3469 					fold_convert_loc (loc,
3470 							  unsigned_type, rhs),
3471 					size_int (lbitsize))))
3472 	{
3473 	  warning (0, "comparison is always %d due to width of bit-field",
3474 		   code == NE_EXPR);
3475 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3476 	}
3477     }
3478   else
3479     {
3480       tree tem = const_binop (RSHIFT_EXPR,
3481 			      fold_convert_loc (loc, signed_type, rhs),
3482 			      size_int (lbitsize - 1));
3483       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3484 	{
3485 	  warning (0, "comparison is always %d due to width of bit-field",
3486 		   code == NE_EXPR);
3487 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3488 	}
3489     }
3490 
3491   /* Single-bit compares should always be against zero.  */
3492   if (lbitsize == 1 && ! integer_zerop (rhs))
3493     {
3494       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3495       rhs = build_int_cst (type, 0);
3496     }
3497 
3498   /* Make a new bitfield reference, shift the constant over the
3499      appropriate number of bits and mask it with the computed mask
3500      (in case this was a signed field).  If we changed it, make a new one.  */
3501   lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3502   if (lvolatilep)
3503     {
3504       TREE_SIDE_EFFECTS (lhs) = 1;
3505       TREE_THIS_VOLATILE (lhs) = 1;
3506     }
3507 
3508   rhs = const_binop (BIT_AND_EXPR,
3509 		     const_binop (LSHIFT_EXPR,
3510 				  fold_convert_loc (loc, unsigned_type, rhs),
3511 				  size_int (lbitpos)),
3512 		     mask);
3513 
3514   lhs = build2_loc (loc, code, compare_type,
3515 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3516   return lhs;
3517 }
3518 
3519 /* Subroutine for fold_truth_andor_1: decode a field reference.
3520 
3521    If EXP is a comparison reference, we return the innermost reference.
3522 
3523    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3524    set to the starting bit number.
3525 
3526    If the innermost field can be completely contained in a mode-sized
3527    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3528 
3529    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3530    otherwise it is not changed.
3531 
3532    *PUNSIGNEDP is set to the signedness of the field.
3533 
3534    *PMASK is set to the mask used.  This is either contained in a
3535    BIT_AND_EXPR or derived from the width of the field.
3536 
3537    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3538 
3539    Return 0 if this is not a component reference or is one that we can't
3540    do anything with.  */
3541 
3542 static tree
3543 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3544 			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3545 			int *punsignedp, int *pvolatilep,
3546 			tree *pmask, tree *pand_mask)
3547 {
3548   tree outer_type = 0;
3549   tree and_mask = 0;
3550   tree mask, inner, offset;
3551   tree unsigned_type;
3552   unsigned int precision;
3553 
3554   /* All the optimizations using this function assume integer fields.
3555      There are problems with FP fields since the type_for_size call
3556      below can fail for, e.g., XFmode.  */
3557   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3558     return 0;
3559 
3560   /* We are interested in the bare arrangement of bits, so strip everything
3561      that doesn't affect the machine mode.  However, record the type of the
3562      outermost expression if it may matter below.  */
3563   if (CONVERT_EXPR_P (exp)
3564       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3565     outer_type = TREE_TYPE (exp);
3566   STRIP_NOPS (exp);
3567 
3568   if (TREE_CODE (exp) == BIT_AND_EXPR)
3569     {
3570       and_mask = TREE_OPERAND (exp, 1);
3571       exp = TREE_OPERAND (exp, 0);
3572       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3573       if (TREE_CODE (and_mask) != INTEGER_CST)
3574 	return 0;
3575     }
3576 
3577   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3578 			       punsignedp, pvolatilep, false);
3579   if ((inner == exp && and_mask == 0)
3580       || *pbitsize < 0 || offset != 0
3581       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3582     return 0;
3583 
3584   /* If the number of bits in the reference is the same as the bitsize of
3585      the outer type, then the outer type gives the signedness. Otherwise
3586      (in case of a small bitfield) the signedness is unchanged.  */
3587   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3588     *punsignedp = TYPE_UNSIGNED (outer_type);
3589 
3590   /* Compute the mask to access the bitfield.  */
3591   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3592   precision = TYPE_PRECISION (unsigned_type);
3593 
3594   mask = build_int_cst_type (unsigned_type, -1);
3595 
3596   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3597   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3598 
3599   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3600   if (and_mask != 0)
3601     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3602 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
3603 
3604   *pmask = mask;
3605   *pand_mask = and_mask;
3606   return inner;
3607 }
3608 
3609 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3610    bit positions.  */
3611 
3612 static int
3613 all_ones_mask_p (const_tree mask, int size)
3614 {
3615   tree type = TREE_TYPE (mask);
3616   unsigned int precision = TYPE_PRECISION (type);
3617   tree tmask;
3618 
3619   tmask = build_int_cst_type (signed_type_for (type), -1);
3620 
3621   return
3622     tree_int_cst_equal (mask,
3623 			const_binop (RSHIFT_EXPR,
3624 				     const_binop (LSHIFT_EXPR, tmask,
3625 						  size_int (precision - size)),
3626 				     size_int (precision - size)));
3627 }
3628 
3629 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3630    represents the sign bit of EXP's type.  If EXP represents a sign
3631    or zero extension, also test VAL against the unextended type.
3632    The return value is the (sub)expression whose sign bit is VAL,
3633    or NULL_TREE otherwise.  */
3634 
3635 static tree
3636 sign_bit_p (tree exp, const_tree val)
3637 {
3638   unsigned HOST_WIDE_INT mask_lo, lo;
3639   HOST_WIDE_INT mask_hi, hi;
3640   int width;
3641   tree t;
3642 
3643   /* Tree EXP must have an integral type.  */
3644   t = TREE_TYPE (exp);
3645   if (! INTEGRAL_TYPE_P (t))
3646     return NULL_TREE;
3647 
3648   /* Tree VAL must be an integer constant.  */
3649   if (TREE_CODE (val) != INTEGER_CST
3650       || TREE_OVERFLOW (val))
3651     return NULL_TREE;
3652 
3653   width = TYPE_PRECISION (t);
3654   if (width > HOST_BITS_PER_WIDE_INT)
3655     {
3656       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3657       lo = 0;
3658 
3659       mask_hi = ((unsigned HOST_WIDE_INT) -1
3660 		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3661       mask_lo = -1;
3662     }
3663   else
3664     {
3665       hi = 0;
3666       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3667 
3668       mask_hi = 0;
3669       mask_lo = ((unsigned HOST_WIDE_INT) -1
3670 		 >> (HOST_BITS_PER_WIDE_INT - width));
3671     }
3672 
3673   /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3674      treat VAL as if it were unsigned.  */
3675   if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3676       && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3677     return exp;
3678 
3679   /* Handle extension from a narrower type.  */
3680   if (TREE_CODE (exp) == NOP_EXPR
3681       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3682     return sign_bit_p (TREE_OPERAND (exp, 0), val);
3683 
3684   return NULL_TREE;
3685 }
3686 
3687 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3688    to be evaluated unconditionally.  */
3689 
3690 static int
3691 simple_operand_p (const_tree exp)
3692 {
3693   /* Strip any conversions that don't change the machine mode.  */
3694   STRIP_NOPS (exp);
3695 
3696   return (CONSTANT_CLASS_P (exp)
3697   	  || TREE_CODE (exp) == SSA_NAME
3698 	  || (DECL_P (exp)
3699 	      && ! TREE_ADDRESSABLE (exp)
3700 	      && ! TREE_THIS_VOLATILE (exp)
3701 	      && ! DECL_NONLOCAL (exp)
3702 	      /* Don't regard global variables as simple.  They may be
3703 		 allocated in ways unknown to the compiler (shared memory,
3704 		 #pragma weak, etc).  */
3705 	      && ! TREE_PUBLIC (exp)
3706 	      && ! DECL_EXTERNAL (exp)
3707 	      /* Loading a static variable is unduly expensive, but global
3708 		 registers aren't expensive.  */
3709 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3710 }
3711 
3712 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3713    to be evaluated unconditionally.
3714    I addition to simple_operand_p, we assume that comparisons, conversions,
3715    and logic-not operations are simple, if their operands are simple, too.  */
3716 
3717 static bool
3718 simple_operand_p_2 (tree exp)
3719 {
3720   enum tree_code code;
3721 
3722   if (TREE_SIDE_EFFECTS (exp)
3723       || tree_could_trap_p (exp))
3724     return false;
3725 
3726   while (CONVERT_EXPR_P (exp))
3727     exp = TREE_OPERAND (exp, 0);
3728 
3729   code = TREE_CODE (exp);
3730 
3731   if (TREE_CODE_CLASS (code) == tcc_comparison)
3732     return (simple_operand_p (TREE_OPERAND (exp, 0))
3733 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
3734 
3735   if (code == TRUTH_NOT_EXPR)
3736       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3737 
3738   return simple_operand_p (exp);
3739 }
3740 
3741 
3742 /* The following functions are subroutines to fold_range_test and allow it to
3743    try to change a logical combination of comparisons into a range test.
3744 
3745    For example, both
3746 	X == 2 || X == 3 || X == 4 || X == 5
3747    and
3748 	X >= 2 && X <= 5
3749    are converted to
3750 	(unsigned) (X - 2) <= 3
3751 
3752    We describe each set of comparisons as being either inside or outside
3753    a range, using a variable named like IN_P, and then describe the
3754    range with a lower and upper bound.  If one of the bounds is omitted,
3755    it represents either the highest or lowest value of the type.
3756 
3757    In the comments below, we represent a range by two numbers in brackets
3758    preceded by a "+" to designate being inside that range, or a "-" to
3759    designate being outside that range, so the condition can be inverted by
3760    flipping the prefix.  An omitted bound is represented by a "-".  For
3761    example, "- [-, 10]" means being outside the range starting at the lowest
3762    possible value and ending at 10, in other words, being greater than 10.
3763    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3764    always false.
3765 
3766    We set up things so that the missing bounds are handled in a consistent
3767    manner so neither a missing bound nor "true" and "false" need to be
3768    handled using a special case.  */
3769 
3770 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3771    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3772    and UPPER1_P are nonzero if the respective argument is an upper bound
3773    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3774    must be specified for a comparison.  ARG1 will be converted to ARG0's
3775    type if both are specified.  */
3776 
3777 static tree
3778 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3779 	     tree arg1, int upper1_p)
3780 {
3781   tree tem;
3782   int result;
3783   int sgn0, sgn1;
3784 
3785   /* If neither arg represents infinity, do the normal operation.
3786      Else, if not a comparison, return infinity.  Else handle the special
3787      comparison rules. Note that most of the cases below won't occur, but
3788      are handled for consistency.  */
3789 
3790   if (arg0 != 0 && arg1 != 0)
3791     {
3792       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3793 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3794       STRIP_NOPS (tem);
3795       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3796     }
3797 
3798   if (TREE_CODE_CLASS (code) != tcc_comparison)
3799     return 0;
3800 
3801   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3802      for neither.  In real maths, we cannot assume open ended ranges are
3803      the same. But, this is computer arithmetic, where numbers are finite.
3804      We can therefore make the transformation of any unbounded range with
3805      the value Z, Z being greater than any representable number. This permits
3806      us to treat unbounded ranges as equal.  */
3807   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3808   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3809   switch (code)
3810     {
3811     case EQ_EXPR:
3812       result = sgn0 == sgn1;
3813       break;
3814     case NE_EXPR:
3815       result = sgn0 != sgn1;
3816       break;
3817     case LT_EXPR:
3818       result = sgn0 < sgn1;
3819       break;
3820     case LE_EXPR:
3821       result = sgn0 <= sgn1;
3822       break;
3823     case GT_EXPR:
3824       result = sgn0 > sgn1;
3825       break;
3826     case GE_EXPR:
3827       result = sgn0 >= sgn1;
3828       break;
3829     default:
3830       gcc_unreachable ();
3831     }
3832 
3833   return constant_boolean_node (result, type);
3834 }
3835 
3836 /* Helper routine for make_range.  Perform one step for it, return
3837    new expression if the loop should continue or NULL_TREE if it should
3838    stop.  */
3839 
3840 tree
3841 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3842 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3843 		 bool *strict_overflow_p)
3844 {
3845   tree arg0_type = TREE_TYPE (arg0);
3846   tree n_low, n_high, low = *p_low, high = *p_high;
3847   int in_p = *p_in_p, n_in_p;
3848 
3849   switch (code)
3850     {
3851     case TRUTH_NOT_EXPR:
3852       /* We can only do something if the range is testing for zero.  */
3853       if (low == NULL_TREE || high == NULL_TREE
3854 	  || ! integer_zerop (low) || ! integer_zerop (high))
3855 	return NULL_TREE;
3856       *p_in_p = ! in_p;
3857       return arg0;
3858 
3859     case EQ_EXPR: case NE_EXPR:
3860     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3861       /* We can only do something if the range is testing for zero
3862 	 and if the second operand is an integer constant.  Note that
3863 	 saying something is "in" the range we make is done by
3864 	 complementing IN_P since it will set in the initial case of
3865 	 being not equal to zero; "out" is leaving it alone.  */
3866       if (low == NULL_TREE || high == NULL_TREE
3867 	  || ! integer_zerop (low) || ! integer_zerop (high)
3868 	  || TREE_CODE (arg1) != INTEGER_CST)
3869 	return NULL_TREE;
3870 
3871       switch (code)
3872 	{
3873 	case NE_EXPR:  /* - [c, c]  */
3874 	  low = high = arg1;
3875 	  break;
3876 	case EQ_EXPR:  /* + [c, c]  */
3877 	  in_p = ! in_p, low = high = arg1;
3878 	  break;
3879 	case GT_EXPR:  /* - [-, c] */
3880 	  low = 0, high = arg1;
3881 	  break;
3882 	case GE_EXPR:  /* + [c, -] */
3883 	  in_p = ! in_p, low = arg1, high = 0;
3884 	  break;
3885 	case LT_EXPR:  /* - [c, -] */
3886 	  low = arg1, high = 0;
3887 	  break;
3888 	case LE_EXPR:  /* + [-, c] */
3889 	  in_p = ! in_p, low = 0, high = arg1;
3890 	  break;
3891 	default:
3892 	  gcc_unreachable ();
3893 	}
3894 
3895       /* If this is an unsigned comparison, we also know that EXP is
3896 	 greater than or equal to zero.  We base the range tests we make
3897 	 on that fact, so we record it here so we can parse existing
3898 	 range tests.  We test arg0_type since often the return type
3899 	 of, e.g. EQ_EXPR, is boolean.  */
3900       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3901 	{
3902 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3903 			      in_p, low, high, 1,
3904 			      build_int_cst (arg0_type, 0),
3905 			      NULL_TREE))
3906 	    return NULL_TREE;
3907 
3908 	  in_p = n_in_p, low = n_low, high = n_high;
3909 
3910 	  /* If the high bound is missing, but we have a nonzero low
3911 	     bound, reverse the range so it goes from zero to the low bound
3912 	     minus 1.  */
3913 	  if (high == 0 && low && ! integer_zerop (low))
3914 	    {
3915 	      in_p = ! in_p;
3916 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3917 				  integer_one_node, 0);
3918 	      low = build_int_cst (arg0_type, 0);
3919 	    }
3920 	}
3921 
3922       *p_low = low;
3923       *p_high = high;
3924       *p_in_p = in_p;
3925       return arg0;
3926 
3927     case NEGATE_EXPR:
3928       /* If flag_wrapv and ARG0_TYPE is signed, make sure
3929 	 low and high are non-NULL, then normalize will DTRT.  */
3930       if (!TYPE_UNSIGNED (arg0_type)
3931 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3932 	{
3933 	  if (low == NULL_TREE)
3934 	    low = TYPE_MIN_VALUE (arg0_type);
3935 	  if (high == NULL_TREE)
3936 	    high = TYPE_MAX_VALUE (arg0_type);
3937 	}
3938 
3939       /* (-x) IN [a,b] -> x in [-b, -a]  */
3940       n_low = range_binop (MINUS_EXPR, exp_type,
3941 			   build_int_cst (exp_type, 0),
3942 			   0, high, 1);
3943       n_high = range_binop (MINUS_EXPR, exp_type,
3944 			    build_int_cst (exp_type, 0),
3945 			    0, low, 0);
3946       if (n_high != 0 && TREE_OVERFLOW (n_high))
3947 	return NULL_TREE;
3948       goto normalize;
3949 
3950     case BIT_NOT_EXPR:
3951       /* ~ X -> -X - 1  */
3952       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3953 			 build_int_cst (exp_type, 1));
3954 
3955     case PLUS_EXPR:
3956     case MINUS_EXPR:
3957       if (TREE_CODE (arg1) != INTEGER_CST)
3958 	return NULL_TREE;
3959 
3960       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3961 	 move a constant to the other side.  */
3962       if (!TYPE_UNSIGNED (arg0_type)
3963 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3964 	return NULL_TREE;
3965 
3966       /* If EXP is signed, any overflow in the computation is undefined,
3967 	 so we don't worry about it so long as our computations on
3968 	 the bounds don't overflow.  For unsigned, overflow is defined
3969 	 and this is exactly the right thing.  */
3970       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3971 			   arg0_type, low, 0, arg1, 0);
3972       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3973 			    arg0_type, high, 1, arg1, 0);
3974       if ((n_low != 0 && TREE_OVERFLOW (n_low))
3975 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
3976 	return NULL_TREE;
3977 
3978       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3979 	*strict_overflow_p = true;
3980 
3981       normalize:
3982 	/* Check for an unsigned range which has wrapped around the maximum
3983 	   value thus making n_high < n_low, and normalize it.  */
3984 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3985 	  {
3986 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3987 			       integer_one_node, 0);
3988 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3989 				integer_one_node, 0);
3990 
3991 	    /* If the range is of the form +/- [ x+1, x ], we won't
3992 	       be able to normalize it.  But then, it represents the
3993 	       whole range or the empty set, so make it
3994 	       +/- [ -, - ].  */
3995 	    if (tree_int_cst_equal (n_low, low)
3996 		&& tree_int_cst_equal (n_high, high))
3997 	      low = high = 0;
3998 	    else
3999 	      in_p = ! in_p;
4000 	  }
4001 	else
4002 	  low = n_low, high = n_high;
4003 
4004 	*p_low = low;
4005 	*p_high = high;
4006 	*p_in_p = in_p;
4007 	return arg0;
4008 
4009     CASE_CONVERT:
4010     case NON_LVALUE_EXPR:
4011       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4012 	return NULL_TREE;
4013 
4014       if (! INTEGRAL_TYPE_P (arg0_type)
4015 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4016 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4017 	return NULL_TREE;
4018 
4019       n_low = low, n_high = high;
4020 
4021       if (n_low != 0)
4022 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4023 
4024       if (n_high != 0)
4025 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4026 
4027       /* If we're converting arg0 from an unsigned type, to exp,
4028 	 a signed type,  we will be doing the comparison as unsigned.
4029 	 The tests above have already verified that LOW and HIGH
4030 	 are both positive.
4031 
4032 	 So we have to ensure that we will handle large unsigned
4033 	 values the same way that the current signed bounds treat
4034 	 negative values.  */
4035 
4036       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4037 	{
4038 	  tree high_positive;
4039 	  tree equiv_type;
4040 	  /* For fixed-point modes, we need to pass the saturating flag
4041 	     as the 2nd parameter.  */
4042 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4043 	    equiv_type
4044 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4045 						TYPE_SATURATING (arg0_type));
4046 	  else
4047 	    equiv_type
4048 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4049 
4050 	  /* A range without an upper bound is, naturally, unbounded.
4051 	     Since convert would have cropped a very large value, use
4052 	     the max value for the destination type.  */
4053 	  high_positive
4054 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4055 	      : TYPE_MAX_VALUE (arg0_type);
4056 
4057 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4058 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4059 					     fold_convert_loc (loc, arg0_type,
4060 							       high_positive),
4061 					     build_int_cst (arg0_type, 1));
4062 
4063 	  /* If the low bound is specified, "and" the range with the
4064 	     range for which the original unsigned value will be
4065 	     positive.  */
4066 	  if (low != 0)
4067 	    {
4068 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4069 				  1, fold_convert_loc (loc, arg0_type,
4070 						       integer_zero_node),
4071 				  high_positive))
4072 		return NULL_TREE;
4073 
4074 	      in_p = (n_in_p == in_p);
4075 	    }
4076 	  else
4077 	    {
4078 	      /* Otherwise, "or" the range with the range of the input
4079 		 that will be interpreted as negative.  */
4080 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4081 				  1, fold_convert_loc (loc, arg0_type,
4082 						       integer_zero_node),
4083 				  high_positive))
4084 		return NULL_TREE;
4085 
4086 	      in_p = (in_p != n_in_p);
4087 	    }
4088 	}
4089 
4090       *p_low = n_low;
4091       *p_high = n_high;
4092       *p_in_p = in_p;
4093       return arg0;
4094 
4095     default:
4096       return NULL_TREE;
4097     }
4098 }
4099 
4100 /* Given EXP, a logical expression, set the range it is testing into
4101    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4102    actually being tested.  *PLOW and *PHIGH will be made of the same
4103    type as the returned expression.  If EXP is not a comparison, we
4104    will most likely not be returning a useful value and range.  Set
4105    *STRICT_OVERFLOW_P to true if the return value is only valid
4106    because signed overflow is undefined; otherwise, do not change
4107    *STRICT_OVERFLOW_P.  */
4108 
4109 tree
4110 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4111 	    bool *strict_overflow_p)
4112 {
4113   enum tree_code code;
4114   tree arg0, arg1 = NULL_TREE;
4115   tree exp_type, nexp;
4116   int in_p;
4117   tree low, high;
4118   location_t loc = EXPR_LOCATION (exp);
4119 
4120   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4121      and see if we can refine the range.  Some of the cases below may not
4122      happen, but it doesn't seem worth worrying about this.  We "continue"
4123      the outer loop when we've changed something; otherwise we "break"
4124      the switch, which will "break" the while.  */
4125 
4126   in_p = 0;
4127   low = high = build_int_cst (TREE_TYPE (exp), 0);
4128 
4129   while (1)
4130     {
4131       code = TREE_CODE (exp);
4132       exp_type = TREE_TYPE (exp);
4133       arg0 = NULL_TREE;
4134 
4135       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4136 	{
4137 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4138 	    arg0 = TREE_OPERAND (exp, 0);
4139 	  if (TREE_CODE_CLASS (code) == tcc_binary
4140 	      || TREE_CODE_CLASS (code) == tcc_comparison
4141 	      || (TREE_CODE_CLASS (code) == tcc_expression
4142 		  && TREE_OPERAND_LENGTH (exp) > 1))
4143 	    arg1 = TREE_OPERAND (exp, 1);
4144 	}
4145       if (arg0 == NULL_TREE)
4146 	break;
4147 
4148       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4149 			      &high, &in_p, strict_overflow_p);
4150       if (nexp == NULL_TREE)
4151 	break;
4152       exp = nexp;
4153     }
4154 
4155   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4156   if (TREE_CODE (exp) == INTEGER_CST)
4157     {
4158       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4159 						 exp, 0, low, 0))
4160 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4161 						    exp, 1, high, 1)));
4162       low = high = 0;
4163       exp = 0;
4164     }
4165 
4166   *pin_p = in_p, *plow = low, *phigh = high;
4167   return exp;
4168 }
4169 
4170 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4171    type, TYPE, return an expression to test if EXP is in (or out of, depending
4172    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4173 
4174 tree
4175 build_range_check (location_t loc, tree type, tree exp, int in_p,
4176 		   tree low, tree high)
4177 {
4178   tree etype = TREE_TYPE (exp), value;
4179 
4180 #ifdef HAVE_canonicalize_funcptr_for_compare
4181   /* Disable this optimization for function pointer expressions
4182      on targets that require function pointer canonicalization.  */
4183   if (HAVE_canonicalize_funcptr_for_compare
4184       && TREE_CODE (etype) == POINTER_TYPE
4185       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4186     return NULL_TREE;
4187 #endif
4188 
4189   if (! in_p)
4190     {
4191       value = build_range_check (loc, type, exp, 1, low, high);
4192       if (value != 0)
4193         return invert_truthvalue_loc (loc, value);
4194 
4195       return 0;
4196     }
4197 
4198   if (low == 0 && high == 0)
4199     return build_int_cst (type, 1);
4200 
4201   if (low == 0)
4202     return fold_build2_loc (loc, LE_EXPR, type, exp,
4203 			fold_convert_loc (loc, etype, high));
4204 
4205   if (high == 0)
4206     return fold_build2_loc (loc, GE_EXPR, type, exp,
4207 			fold_convert_loc (loc, etype, low));
4208 
4209   if (operand_equal_p (low, high, 0))
4210     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4211 			fold_convert_loc (loc, etype, low));
4212 
4213   if (integer_zerop (low))
4214     {
4215       if (! TYPE_UNSIGNED (etype))
4216 	{
4217 	  etype = unsigned_type_for (etype);
4218 	  high = fold_convert_loc (loc, etype, high);
4219 	  exp = fold_convert_loc (loc, etype, exp);
4220 	}
4221       return build_range_check (loc, type, exp, 1, 0, high);
4222     }
4223 
4224   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4225   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4226     {
4227       unsigned HOST_WIDE_INT lo;
4228       HOST_WIDE_INT hi;
4229       int prec;
4230 
4231       prec = TYPE_PRECISION (etype);
4232       if (prec <= HOST_BITS_PER_WIDE_INT)
4233 	{
4234 	  hi = 0;
4235 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4236 	}
4237       else
4238 	{
4239 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4240 	  lo = (unsigned HOST_WIDE_INT) -1;
4241 	}
4242 
4243       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4244 	{
4245 	  if (TYPE_UNSIGNED (etype))
4246 	    {
4247 	      tree signed_etype = signed_type_for (etype);
4248 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4249 		etype
4250 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4251 	      else
4252 		etype = signed_etype;
4253 	      exp = fold_convert_loc (loc, etype, exp);
4254 	    }
4255 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4256 			      build_int_cst (etype, 0));
4257 	}
4258     }
4259 
4260   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4261      This requires wrap-around arithmetics for the type of the expression.
4262      First make sure that arithmetics in this type is valid, then make sure
4263      that it wraps around.  */
4264   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4265     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4266 					    TYPE_UNSIGNED (etype));
4267 
4268   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4269     {
4270       tree utype, minv, maxv;
4271 
4272       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4273 	 for the type in question, as we rely on this here.  */
4274       utype = unsigned_type_for (etype);
4275       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4276       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4277 			  integer_one_node, 1);
4278       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4279 
4280       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4281 				      minv, 1, maxv, 1)))
4282 	etype = utype;
4283       else
4284 	return 0;
4285     }
4286 
4287   high = fold_convert_loc (loc, etype, high);
4288   low = fold_convert_loc (loc, etype, low);
4289   exp = fold_convert_loc (loc, etype, exp);
4290 
4291   value = const_binop (MINUS_EXPR, high, low);
4292 
4293 
4294   if (POINTER_TYPE_P (etype))
4295     {
4296       if (value != 0 && !TREE_OVERFLOW (value))
4297 	{
4298 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4299           return build_range_check (loc, type,
4300 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4301 			            1, build_int_cst (etype, 0), value);
4302 	}
4303       return 0;
4304     }
4305 
4306   if (value != 0 && !TREE_OVERFLOW (value))
4307     return build_range_check (loc, type,
4308 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4309 			      1, build_int_cst (etype, 0), value);
4310 
4311   return 0;
4312 }
4313 
4314 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4315 
4316 static tree
4317 range_predecessor (tree val)
4318 {
4319   tree type = TREE_TYPE (val);
4320 
4321   if (INTEGRAL_TYPE_P (type)
4322       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4323     return 0;
4324   else
4325     return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4326 }
4327 
4328 /* Return the successor of VAL in its type, handling the infinite case.  */
4329 
4330 static tree
4331 range_successor (tree val)
4332 {
4333   tree type = TREE_TYPE (val);
4334 
4335   if (INTEGRAL_TYPE_P (type)
4336       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4337     return 0;
4338   else
4339     return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4340 }
4341 
4342 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4343    can, 0 if we can't.  Set the output range into the specified parameters.  */
4344 
4345 bool
4346 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4347 	      tree high0, int in1_p, tree low1, tree high1)
4348 {
4349   int no_overlap;
4350   int subset;
4351   int temp;
4352   tree tem;
4353   int in_p;
4354   tree low, high;
4355   int lowequal = ((low0 == 0 && low1 == 0)
4356 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4357 						low0, 0, low1, 0)));
4358   int highequal = ((high0 == 0 && high1 == 0)
4359 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4360 						 high0, 1, high1, 1)));
4361 
4362   /* Make range 0 be the range that starts first, or ends last if they
4363      start at the same value.  Swap them if it isn't.  */
4364   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4365 				 low0, 0, low1, 0))
4366       || (lowequal
4367 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4368 					high1, 1, high0, 1))))
4369     {
4370       temp = in0_p, in0_p = in1_p, in1_p = temp;
4371       tem = low0, low0 = low1, low1 = tem;
4372       tem = high0, high0 = high1, high1 = tem;
4373     }
4374 
4375   /* Now flag two cases, whether the ranges are disjoint or whether the
4376      second range is totally subsumed in the first.  Note that the tests
4377      below are simplified by the ones above.  */
4378   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4379 					  high0, 1, low1, 0));
4380   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4381 				      high1, 1, high0, 1));
4382 
4383   /* We now have four cases, depending on whether we are including or
4384      excluding the two ranges.  */
4385   if (in0_p && in1_p)
4386     {
4387       /* If they don't overlap, the result is false.  If the second range
4388 	 is a subset it is the result.  Otherwise, the range is from the start
4389 	 of the second to the end of the first.  */
4390       if (no_overlap)
4391 	in_p = 0, low = high = 0;
4392       else if (subset)
4393 	in_p = 1, low = low1, high = high1;
4394       else
4395 	in_p = 1, low = low1, high = high0;
4396     }
4397 
4398   else if (in0_p && ! in1_p)
4399     {
4400       /* If they don't overlap, the result is the first range.  If they are
4401 	 equal, the result is false.  If the second range is a subset of the
4402 	 first, and the ranges begin at the same place, we go from just after
4403 	 the end of the second range to the end of the first.  If the second
4404 	 range is not a subset of the first, or if it is a subset and both
4405 	 ranges end at the same place, the range starts at the start of the
4406 	 first range and ends just before the second range.
4407 	 Otherwise, we can't describe this as a single range.  */
4408       if (no_overlap)
4409 	in_p = 1, low = low0, high = high0;
4410       else if (lowequal && highequal)
4411 	in_p = 0, low = high = 0;
4412       else if (subset && lowequal)
4413 	{
4414 	  low = range_successor (high1);
4415 	  high = high0;
4416 	  in_p = 1;
4417 	  if (low == 0)
4418 	    {
4419 	      /* We are in the weird situation where high0 > high1 but
4420 		 high1 has no successor.  Punt.  */
4421 	      return 0;
4422 	    }
4423 	}
4424       else if (! subset || highequal)
4425 	{
4426 	  low = low0;
4427 	  high = range_predecessor (low1);
4428 	  in_p = 1;
4429 	  if (high == 0)
4430 	    {
4431 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4432 	      return 0;
4433 	    }
4434 	}
4435       else
4436 	return 0;
4437     }
4438 
4439   else if (! in0_p && in1_p)
4440     {
4441       /* If they don't overlap, the result is the second range.  If the second
4442 	 is a subset of the first, the result is false.  Otherwise,
4443 	 the range starts just after the first range and ends at the
4444 	 end of the second.  */
4445       if (no_overlap)
4446 	in_p = 1, low = low1, high = high1;
4447       else if (subset || highequal)
4448 	in_p = 0, low = high = 0;
4449       else
4450 	{
4451 	  low = range_successor (high0);
4452 	  high = high1;
4453 	  in_p = 1;
4454 	  if (low == 0)
4455 	    {
4456 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4457 	      return 0;
4458 	    }
4459 	}
4460     }
4461 
4462   else
4463     {
4464       /* The case where we are excluding both ranges.  Here the complex case
4465 	 is if they don't overlap.  In that case, the only time we have a
4466 	 range is if they are adjacent.  If the second is a subset of the
4467 	 first, the result is the first.  Otherwise, the range to exclude
4468 	 starts at the beginning of the first range and ends at the end of the
4469 	 second.  */
4470       if (no_overlap)
4471 	{
4472 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4473 					 range_successor (high0),
4474 					 1, low1, 0)))
4475 	    in_p = 0, low = low0, high = high1;
4476 	  else
4477 	    {
4478 	      /* Canonicalize - [min, x] into - [-, x].  */
4479 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4480 		switch (TREE_CODE (TREE_TYPE (low0)))
4481 		  {
4482 		  case ENUMERAL_TYPE:
4483 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4484 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4485 		      break;
4486 		    /* FALLTHROUGH */
4487 		  case INTEGER_TYPE:
4488 		    if (tree_int_cst_equal (low0,
4489 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4490 		      low0 = 0;
4491 		    break;
4492 		  case POINTER_TYPE:
4493 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4494 			&& integer_zerop (low0))
4495 		      low0 = 0;
4496 		    break;
4497 		  default:
4498 		    break;
4499 		  }
4500 
4501 	      /* Canonicalize - [x, max] into - [x, -].  */
4502 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4503 		switch (TREE_CODE (TREE_TYPE (high1)))
4504 		  {
4505 		  case ENUMERAL_TYPE:
4506 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4507 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4508 		      break;
4509 		    /* FALLTHROUGH */
4510 		  case INTEGER_TYPE:
4511 		    if (tree_int_cst_equal (high1,
4512 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4513 		      high1 = 0;
4514 		    break;
4515 		  case POINTER_TYPE:
4516 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4517 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4518 						       high1, 1,
4519 						       integer_one_node, 1)))
4520 		      high1 = 0;
4521 		    break;
4522 		  default:
4523 		    break;
4524 		  }
4525 
4526 	      /* The ranges might be also adjacent between the maximum and
4527 	         minimum values of the given type.  For
4528 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4529 	         return + [x + 1, y - 1].  */
4530 	      if (low0 == 0 && high1 == 0)
4531 	        {
4532 		  low = range_successor (high0);
4533 		  high = range_predecessor (low1);
4534 		  if (low == 0 || high == 0)
4535 		    return 0;
4536 
4537 		  in_p = 1;
4538 		}
4539 	      else
4540 		return 0;
4541 	    }
4542 	}
4543       else if (subset)
4544 	in_p = 0, low = low0, high = high0;
4545       else
4546 	in_p = 0, low = low0, high = high1;
4547     }
4548 
4549   *pin_p = in_p, *plow = low, *phigh = high;
4550   return 1;
4551 }
4552 
4553 
4554 /* Subroutine of fold, looking inside expressions of the form
4555    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4556    of the COND_EXPR.  This function is being used also to optimize
4557    A op B ? C : A, by reversing the comparison first.
4558 
4559    Return a folded expression whose code is not a COND_EXPR
4560    anymore, or NULL_TREE if no folding opportunity is found.  */
4561 
4562 static tree
4563 fold_cond_expr_with_comparison (location_t loc, tree type,
4564 				tree arg0, tree arg1, tree arg2)
4565 {
4566   enum tree_code comp_code = TREE_CODE (arg0);
4567   tree arg00 = TREE_OPERAND (arg0, 0);
4568   tree arg01 = TREE_OPERAND (arg0, 1);
4569   tree arg1_type = TREE_TYPE (arg1);
4570   tree tem;
4571 
4572   STRIP_NOPS (arg1);
4573   STRIP_NOPS (arg2);
4574 
4575   /* If we have A op 0 ? A : -A, consider applying the following
4576      transformations:
4577 
4578      A == 0? A : -A    same as -A
4579      A != 0? A : -A    same as A
4580      A >= 0? A : -A    same as abs (A)
4581      A > 0?  A : -A    same as abs (A)
4582      A <= 0? A : -A    same as -abs (A)
4583      A < 0?  A : -A    same as -abs (A)
4584 
4585      None of these transformations work for modes with signed
4586      zeros.  If A is +/-0, the first two transformations will
4587      change the sign of the result (from +0 to -0, or vice
4588      versa).  The last four will fix the sign of the result,
4589      even though the original expressions could be positive or
4590      negative, depending on the sign of A.
4591 
4592      Note that all these transformations are correct if A is
4593      NaN, since the two alternatives (A and -A) are also NaNs.  */
4594   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4595       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4596 	  ? real_zerop (arg01)
4597 	  : integer_zerop (arg01))
4598       && ((TREE_CODE (arg2) == NEGATE_EXPR
4599 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4600 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4601 	        have already been folded to Y-X, check for that. */
4602 	  || (TREE_CODE (arg1) == MINUS_EXPR
4603 	      && TREE_CODE (arg2) == MINUS_EXPR
4604 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4605 				  TREE_OPERAND (arg2, 1), 0)
4606 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4607 				  TREE_OPERAND (arg2, 0), 0))))
4608     switch (comp_code)
4609       {
4610       case EQ_EXPR:
4611       case UNEQ_EXPR:
4612 	tem = fold_convert_loc (loc, arg1_type, arg1);
4613 	return pedantic_non_lvalue_loc (loc,
4614 				    fold_convert_loc (loc, type,
4615 						  negate_expr (tem)));
4616       case NE_EXPR:
4617       case LTGT_EXPR:
4618 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4619       case UNGE_EXPR:
4620       case UNGT_EXPR:
4621 	if (flag_trapping_math)
4622 	  break;
4623 	/* Fall through.  */
4624       case GE_EXPR:
4625       case GT_EXPR:
4626 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4627 	  arg1 = fold_convert_loc (loc, signed_type_for
4628 			       (TREE_TYPE (arg1)), arg1);
4629 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4630 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4631       case UNLE_EXPR:
4632       case UNLT_EXPR:
4633 	if (flag_trapping_math)
4634 	  break;
4635       case LE_EXPR:
4636       case LT_EXPR:
4637 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4638 	  arg1 = fold_convert_loc (loc, signed_type_for
4639 			       (TREE_TYPE (arg1)), arg1);
4640 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4641 	return negate_expr (fold_convert_loc (loc, type, tem));
4642       default:
4643 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4644 	break;
4645       }
4646 
4647   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4648      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4649      both transformations are correct when A is NaN: A != 0
4650      is then true, and A == 0 is false.  */
4651 
4652   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4653       && integer_zerop (arg01) && integer_zerop (arg2))
4654     {
4655       if (comp_code == NE_EXPR)
4656 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4657       else if (comp_code == EQ_EXPR)
4658 	return build_int_cst (type, 0);
4659     }
4660 
4661   /* Try some transformations of A op B ? A : B.
4662 
4663      A == B? A : B    same as B
4664      A != B? A : B    same as A
4665      A >= B? A : B    same as max (A, B)
4666      A > B?  A : B    same as max (B, A)
4667      A <= B? A : B    same as min (A, B)
4668      A < B?  A : B    same as min (B, A)
4669 
4670      As above, these transformations don't work in the presence
4671      of signed zeros.  For example, if A and B are zeros of
4672      opposite sign, the first two transformations will change
4673      the sign of the result.  In the last four, the original
4674      expressions give different results for (A=+0, B=-0) and
4675      (A=-0, B=+0), but the transformed expressions do not.
4676 
4677      The first two transformations are correct if either A or B
4678      is a NaN.  In the first transformation, the condition will
4679      be false, and B will indeed be chosen.  In the case of the
4680      second transformation, the condition A != B will be true,
4681      and A will be chosen.
4682 
4683      The conversions to max() and min() are not correct if B is
4684      a number and A is not.  The conditions in the original
4685      expressions will be false, so all four give B.  The min()
4686      and max() versions would give a NaN instead.  */
4687   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4688       && operand_equal_for_comparison_p (arg01, arg2, arg00)
4689       /* Avoid these transformations if the COND_EXPR may be used
4690 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4691       && (in_gimple_form
4692 	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4693 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4694 	  || ! maybe_lvalue_p (arg1)
4695 	  || ! maybe_lvalue_p (arg2)))
4696     {
4697       tree comp_op0 = arg00;
4698       tree comp_op1 = arg01;
4699       tree comp_type = TREE_TYPE (comp_op0);
4700 
4701       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4702       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4703 	{
4704 	  comp_type = type;
4705 	  comp_op0 = arg1;
4706 	  comp_op1 = arg2;
4707 	}
4708 
4709       switch (comp_code)
4710 	{
4711 	case EQ_EXPR:
4712 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4713 	case NE_EXPR:
4714 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4715 	case LE_EXPR:
4716 	case LT_EXPR:
4717 	case UNLE_EXPR:
4718 	case UNLT_EXPR:
4719 	  /* In C++ a ?: expression can be an lvalue, so put the
4720 	     operand which will be used if they are equal first
4721 	     so that we can convert this back to the
4722 	     corresponding COND_EXPR.  */
4723 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4724 	    {
4725 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4726 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4727 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4728 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4729 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
4730 				   comp_op1, comp_op0);
4731 	      return pedantic_non_lvalue_loc (loc,
4732 					  fold_convert_loc (loc, type, tem));
4733 	    }
4734 	  break;
4735 	case GE_EXPR:
4736 	case GT_EXPR:
4737 	case UNGE_EXPR:
4738 	case UNGT_EXPR:
4739 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4740 	    {
4741 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4742 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4743 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4744 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4745 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
4746 				   comp_op1, comp_op0);
4747 	      return pedantic_non_lvalue_loc (loc,
4748 					  fold_convert_loc (loc, type, tem));
4749 	    }
4750 	  break;
4751 	case UNEQ_EXPR:
4752 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4753 	    return pedantic_non_lvalue_loc (loc,
4754 					fold_convert_loc (loc, type, arg2));
4755 	  break;
4756 	case LTGT_EXPR:
4757 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4758 	    return pedantic_non_lvalue_loc (loc,
4759 					fold_convert_loc (loc, type, arg1));
4760 	  break;
4761 	default:
4762 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4763 	  break;
4764 	}
4765     }
4766 
4767   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4768      we might still be able to simplify this.  For example,
4769      if C1 is one less or one more than C2, this might have started
4770      out as a MIN or MAX and been transformed by this function.
4771      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4772 
4773   if (INTEGRAL_TYPE_P (type)
4774       && TREE_CODE (arg01) == INTEGER_CST
4775       && TREE_CODE (arg2) == INTEGER_CST)
4776     switch (comp_code)
4777       {
4778       case EQ_EXPR:
4779 	if (TREE_CODE (arg1) == INTEGER_CST)
4780 	  break;
4781 	/* We can replace A with C1 in this case.  */
4782 	arg1 = fold_convert_loc (loc, type, arg01);
4783 	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4784 
4785       case LT_EXPR:
4786 	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4787 	   MIN_EXPR, to preserve the signedness of the comparison.  */
4788 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4789 			       OEP_ONLY_CONST)
4790 	    && operand_equal_p (arg01,
4791 				const_binop (PLUS_EXPR, arg2,
4792 					     build_int_cst (type, 1)),
4793 				OEP_ONLY_CONST))
4794 	  {
4795 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4796 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4797 						     arg2));
4798 	    return pedantic_non_lvalue_loc (loc,
4799 					    fold_convert_loc (loc, type, tem));
4800 	  }
4801 	break;
4802 
4803       case LE_EXPR:
4804 	/* If C1 is C2 - 1, this is min(A, C2), with the same care
4805 	   as above.  */
4806 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4807 			       OEP_ONLY_CONST)
4808 	    && operand_equal_p (arg01,
4809 				const_binop (MINUS_EXPR, arg2,
4810 					     build_int_cst (type, 1)),
4811 				OEP_ONLY_CONST))
4812 	  {
4813 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4814 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4815 						     arg2));
4816 	    return pedantic_non_lvalue_loc (loc,
4817 					    fold_convert_loc (loc, type, tem));
4818 	  }
4819 	break;
4820 
4821       case GT_EXPR:
4822 	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4823 	   MAX_EXPR, to preserve the signedness of the comparison.  */
4824 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4825 			       OEP_ONLY_CONST)
4826 	    && operand_equal_p (arg01,
4827 				const_binop (MINUS_EXPR, arg2,
4828 					     build_int_cst (type, 1)),
4829 				OEP_ONLY_CONST))
4830 	  {
4831 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4832 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4833 						     arg2));
4834 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4835 	  }
4836 	break;
4837 
4838       case GE_EXPR:
4839 	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
4840 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4841 			       OEP_ONLY_CONST)
4842 	    && operand_equal_p (arg01,
4843 				const_binop (PLUS_EXPR, arg2,
4844 					     build_int_cst (type, 1)),
4845 				OEP_ONLY_CONST))
4846 	  {
4847 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4848 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4849 						     arg2));
4850 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4851 	  }
4852 	break;
4853       case NE_EXPR:
4854 	break;
4855       default:
4856 	gcc_unreachable ();
4857       }
4858 
4859   return NULL_TREE;
4860 }
4861 
4862 
4863 
4864 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4865 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4866   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4867 		false) >= 2)
4868 #endif
4869 
4870 /* EXP is some logical combination of boolean tests.  See if we can
4871    merge it into some range test.  Return the new tree if so.  */
4872 
4873 static tree
4874 fold_range_test (location_t loc, enum tree_code code, tree type,
4875 		 tree op0, tree op1)
4876 {
4877   int or_op = (code == TRUTH_ORIF_EXPR
4878 	       || code == TRUTH_OR_EXPR);
4879   int in0_p, in1_p, in_p;
4880   tree low0, low1, low, high0, high1, high;
4881   bool strict_overflow_p = false;
4882   tree tem, lhs, rhs;
4883   const char * const warnmsg = G_("assuming signed overflow does not occur "
4884 				  "when simplifying range test");
4885 
4886   if (!INTEGRAL_TYPE_P (type))
4887     return 0;
4888 
4889   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4890   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4891 
4892   /* If this is an OR operation, invert both sides; we will invert
4893      again at the end.  */
4894   if (or_op)
4895     in0_p = ! in0_p, in1_p = ! in1_p;
4896 
4897   /* If both expressions are the same, if we can merge the ranges, and we
4898      can build the range test, return it or it inverted.  If one of the
4899      ranges is always true or always false, consider it to be the same
4900      expression as the other.  */
4901   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4902       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4903 		       in1_p, low1, high1)
4904       && 0 != (tem = (build_range_check (loc, type,
4905 					 lhs != 0 ? lhs
4906 					 : rhs != 0 ? rhs : integer_zero_node,
4907 					 in_p, low, high))))
4908     {
4909       if (strict_overflow_p)
4910 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4911       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4912     }
4913 
4914   /* On machines where the branch cost is expensive, if this is a
4915      short-circuited branch and the underlying object on both sides
4916      is the same, make a non-short-circuit operation.  */
4917   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4918 	   && lhs != 0 && rhs != 0
4919 	   && (code == TRUTH_ANDIF_EXPR
4920 	       || code == TRUTH_ORIF_EXPR)
4921 	   && operand_equal_p (lhs, rhs, 0))
4922     {
4923       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4924 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4925 	 which cases we can't do this.  */
4926       if (simple_operand_p (lhs))
4927 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4928 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4929 			   type, op0, op1);
4930 
4931       else if (!lang_hooks.decls.global_bindings_p ()
4932 	       && !CONTAINS_PLACEHOLDER_P (lhs))
4933 	{
4934 	  tree common = save_expr (lhs);
4935 
4936 	  if (0 != (lhs = build_range_check (loc, type, common,
4937 					     or_op ? ! in0_p : in0_p,
4938 					     low0, high0))
4939 	      && (0 != (rhs = build_range_check (loc, type, common,
4940 						 or_op ? ! in1_p : in1_p,
4941 						 low1, high1))))
4942 	    {
4943 	      if (strict_overflow_p)
4944 		fold_overflow_warning (warnmsg,
4945 				       WARN_STRICT_OVERFLOW_COMPARISON);
4946 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4947 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4948 				 type, lhs, rhs);
4949 	    }
4950 	}
4951     }
4952 
4953   return 0;
4954 }
4955 
4956 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4957    bit value.  Arrange things so the extra bits will be set to zero if and
4958    only if C is signed-extended to its full width.  If MASK is nonzero,
4959    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4960 
4961 static tree
4962 unextend (tree c, int p, int unsignedp, tree mask)
4963 {
4964   tree type = TREE_TYPE (c);
4965   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4966   tree temp;
4967 
4968   if (p == modesize || unsignedp)
4969     return c;
4970 
4971   /* We work by getting just the sign bit into the low-order bit, then
4972      into the high-order bit, then sign-extend.  We then XOR that value
4973      with C.  */
4974   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4975   temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4976 
4977   /* We must use a signed type in order to get an arithmetic right shift.
4978      However, we must also avoid introducing accidental overflows, so that
4979      a subsequent call to integer_zerop will work.  Hence we must
4980      do the type conversion here.  At this point, the constant is either
4981      zero or one, and the conversion to a signed type can never overflow.
4982      We could get an overflow if this conversion is done anywhere else.  */
4983   if (TYPE_UNSIGNED (type))
4984     temp = fold_convert (signed_type_for (type), temp);
4985 
4986   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4987   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4988   if (mask != 0)
4989     temp = const_binop (BIT_AND_EXPR, temp,
4990 			fold_convert (TREE_TYPE (c), mask));
4991   /* If necessary, convert the type back to match the type of C.  */
4992   if (TYPE_UNSIGNED (type))
4993     temp = fold_convert (type, temp);
4994 
4995   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4996 }
4997 
4998 /* For an expression that has the form
4999      (A && B) || ~B
5000    or
5001      (A || B) && ~B,
5002    we can drop one of the inner expressions and simplify to
5003      A || ~B
5004    or
5005      A && ~B
5006    LOC is the location of the resulting expression.  OP is the inner
5007    logical operation; the left-hand side in the examples above, while CMPOP
5008    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5009    removing a condition that guards another, as in
5010      (A != NULL && A->...) || A == NULL
5011    which we must not transform.  If RHS_ONLY is true, only eliminate the
5012    right-most operand of the inner logical operation.  */
5013 
5014 static tree
5015 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5016 				 bool rhs_only)
5017 {
5018   tree type = TREE_TYPE (cmpop);
5019   enum tree_code code = TREE_CODE (cmpop);
5020   enum tree_code truthop_code = TREE_CODE (op);
5021   tree lhs = TREE_OPERAND (op, 0);
5022   tree rhs = TREE_OPERAND (op, 1);
5023   tree orig_lhs = lhs, orig_rhs = rhs;
5024   enum tree_code rhs_code = TREE_CODE (rhs);
5025   enum tree_code lhs_code = TREE_CODE (lhs);
5026   enum tree_code inv_code;
5027 
5028   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5029     return NULL_TREE;
5030 
5031   if (TREE_CODE_CLASS (code) != tcc_comparison)
5032     return NULL_TREE;
5033 
5034   if (rhs_code == truthop_code)
5035     {
5036       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5037       if (newrhs != NULL_TREE)
5038 	{
5039 	  rhs = newrhs;
5040 	  rhs_code = TREE_CODE (rhs);
5041 	}
5042     }
5043   if (lhs_code == truthop_code && !rhs_only)
5044     {
5045       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5046       if (newlhs != NULL_TREE)
5047 	{
5048 	  lhs = newlhs;
5049 	  lhs_code = TREE_CODE (lhs);
5050 	}
5051     }
5052 
5053   inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5054   if (inv_code == rhs_code
5055       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5056       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5057     return lhs;
5058   if (!rhs_only && inv_code == lhs_code
5059       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5060       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5061     return rhs;
5062   if (rhs != orig_rhs || lhs != orig_lhs)
5063     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5064 			    lhs, rhs);
5065   return NULL_TREE;
5066 }
5067 
5068 /* Find ways of folding logical expressions of LHS and RHS:
5069    Try to merge two comparisons to the same innermost item.
5070    Look for range tests like "ch >= '0' && ch <= '9'".
5071    Look for combinations of simple terms on machines with expensive branches
5072    and evaluate the RHS unconditionally.
5073 
5074    For example, if we have p->a == 2 && p->b == 4 and we can make an
5075    object large enough to span both A and B, we can do this with a comparison
5076    against the object ANDed with the a mask.
5077 
5078    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5079    operations to do this with one comparison.
5080 
5081    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5082    function and the one above.
5083 
5084    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5085    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5086 
5087    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5088    two operands.
5089 
5090    We return the simplified tree or 0 if no optimization is possible.  */
5091 
5092 static tree
5093 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5094 		    tree lhs, tree rhs)
5095 {
5096   /* If this is the "or" of two comparisons, we can do something if
5097      the comparisons are NE_EXPR.  If this is the "and", we can do something
5098      if the comparisons are EQ_EXPR.  I.e.,
5099 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5100 
5101      WANTED_CODE is this operation code.  For single bit fields, we can
5102      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5103      comparison for one-bit fields.  */
5104 
5105   enum tree_code wanted_code;
5106   enum tree_code lcode, rcode;
5107   tree ll_arg, lr_arg, rl_arg, rr_arg;
5108   tree ll_inner, lr_inner, rl_inner, rr_inner;
5109   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5110   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5111   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5112   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5113   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5114   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5115   enum machine_mode lnmode, rnmode;
5116   tree ll_mask, lr_mask, rl_mask, rr_mask;
5117   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5118   tree l_const, r_const;
5119   tree lntype, rntype, result;
5120   HOST_WIDE_INT first_bit, end_bit;
5121   int volatilep;
5122 
5123   /* Start by getting the comparison codes.  Fail if anything is volatile.
5124      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5125      it were surrounded with a NE_EXPR.  */
5126 
5127   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5128     return 0;
5129 
5130   lcode = TREE_CODE (lhs);
5131   rcode = TREE_CODE (rhs);
5132 
5133   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5134     {
5135       lhs = build2 (NE_EXPR, truth_type, lhs,
5136 		    build_int_cst (TREE_TYPE (lhs), 0));
5137       lcode = NE_EXPR;
5138     }
5139 
5140   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5141     {
5142       rhs = build2 (NE_EXPR, truth_type, rhs,
5143 		    build_int_cst (TREE_TYPE (rhs), 0));
5144       rcode = NE_EXPR;
5145     }
5146 
5147   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5148       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5149     return 0;
5150 
5151   ll_arg = TREE_OPERAND (lhs, 0);
5152   lr_arg = TREE_OPERAND (lhs, 1);
5153   rl_arg = TREE_OPERAND (rhs, 0);
5154   rr_arg = TREE_OPERAND (rhs, 1);
5155 
5156   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5157   if (simple_operand_p (ll_arg)
5158       && simple_operand_p (lr_arg))
5159     {
5160       if (operand_equal_p (ll_arg, rl_arg, 0)
5161           && operand_equal_p (lr_arg, rr_arg, 0))
5162 	{
5163           result = combine_comparisons (loc, code, lcode, rcode,
5164 					truth_type, ll_arg, lr_arg);
5165 	  if (result)
5166 	    return result;
5167 	}
5168       else if (operand_equal_p (ll_arg, rr_arg, 0)
5169                && operand_equal_p (lr_arg, rl_arg, 0))
5170 	{
5171           result = combine_comparisons (loc, code, lcode,
5172 					swap_tree_comparison (rcode),
5173 					truth_type, ll_arg, lr_arg);
5174 	  if (result)
5175 	    return result;
5176 	}
5177     }
5178 
5179   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5180 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5181 
5182   /* If the RHS can be evaluated unconditionally and its operands are
5183      simple, it wins to evaluate the RHS unconditionally on machines
5184      with expensive branches.  In this case, this isn't a comparison
5185      that can be merged.  */
5186 
5187   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5188 		   false) >= 2
5189       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5190       && simple_operand_p (rl_arg)
5191       && simple_operand_p (rr_arg))
5192     {
5193       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5194       if (code == TRUTH_OR_EXPR
5195 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5196 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5197 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5198 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5199 	return build2_loc (loc, NE_EXPR, truth_type,
5200 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5201 				   ll_arg, rl_arg),
5202 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5203 
5204       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5205       if (code == TRUTH_AND_EXPR
5206 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5207 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5208 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5209 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5210 	return build2_loc (loc, EQ_EXPR, truth_type,
5211 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5212 				   ll_arg, rl_arg),
5213 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5214     }
5215 
5216   /* See if the comparisons can be merged.  Then get all the parameters for
5217      each side.  */
5218 
5219   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5220       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5221     return 0;
5222 
5223   volatilep = 0;
5224   ll_inner = decode_field_reference (loc, ll_arg,
5225 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5226 				     &ll_unsignedp, &volatilep, &ll_mask,
5227 				     &ll_and_mask);
5228   lr_inner = decode_field_reference (loc, lr_arg,
5229 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5230 				     &lr_unsignedp, &volatilep, &lr_mask,
5231 				     &lr_and_mask);
5232   rl_inner = decode_field_reference (loc, rl_arg,
5233 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5234 				     &rl_unsignedp, &volatilep, &rl_mask,
5235 				     &rl_and_mask);
5236   rr_inner = decode_field_reference (loc, rr_arg,
5237 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5238 				     &rr_unsignedp, &volatilep, &rr_mask,
5239 				     &rr_and_mask);
5240 
5241   /* It must be true that the inner operation on the lhs of each
5242      comparison must be the same if we are to be able to do anything.
5243      Then see if we have constants.  If not, the same must be true for
5244      the rhs's.  */
5245   if (volatilep || ll_inner == 0 || rl_inner == 0
5246       || ! operand_equal_p (ll_inner, rl_inner, 0))
5247     return 0;
5248 
5249   if (TREE_CODE (lr_arg) == INTEGER_CST
5250       && TREE_CODE (rr_arg) == INTEGER_CST)
5251     l_const = lr_arg, r_const = rr_arg;
5252   else if (lr_inner == 0 || rr_inner == 0
5253 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5254     return 0;
5255   else
5256     l_const = r_const = 0;
5257 
5258   /* If either comparison code is not correct for our logical operation,
5259      fail.  However, we can convert a one-bit comparison against zero into
5260      the opposite comparison against that bit being set in the field.  */
5261 
5262   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5263   if (lcode != wanted_code)
5264     {
5265       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5266 	{
5267 	  /* Make the left operand unsigned, since we are only interested
5268 	     in the value of one bit.  Otherwise we are doing the wrong
5269 	     thing below.  */
5270 	  ll_unsignedp = 1;
5271 	  l_const = ll_mask;
5272 	}
5273       else
5274 	return 0;
5275     }
5276 
5277   /* This is analogous to the code for l_const above.  */
5278   if (rcode != wanted_code)
5279     {
5280       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5281 	{
5282 	  rl_unsignedp = 1;
5283 	  r_const = rl_mask;
5284 	}
5285       else
5286 	return 0;
5287     }
5288 
5289   /* See if we can find a mode that contains both fields being compared on
5290      the left.  If we can't, fail.  Otherwise, update all constants and masks
5291      to be relative to a field of that size.  */
5292   first_bit = MIN (ll_bitpos, rl_bitpos);
5293   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5294   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5295 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5296 			  volatilep);
5297   if (lnmode == VOIDmode)
5298     return 0;
5299 
5300   lnbitsize = GET_MODE_BITSIZE (lnmode);
5301   lnbitpos = first_bit & ~ (lnbitsize - 1);
5302   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5303   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5304 
5305   if (BYTES_BIG_ENDIAN)
5306     {
5307       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5308       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5309     }
5310 
5311   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5312 			 size_int (xll_bitpos));
5313   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5314 			 size_int (xrl_bitpos));
5315 
5316   if (l_const)
5317     {
5318       l_const = fold_convert_loc (loc, lntype, l_const);
5319       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5320       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5321       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5322 					fold_build1_loc (loc, BIT_NOT_EXPR,
5323 						     lntype, ll_mask))))
5324 	{
5325 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5326 
5327 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5328 	}
5329     }
5330   if (r_const)
5331     {
5332       r_const = fold_convert_loc (loc, lntype, r_const);
5333       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5334       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5335       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5336 					fold_build1_loc (loc, BIT_NOT_EXPR,
5337 						     lntype, rl_mask))))
5338 	{
5339 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5340 
5341 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5342 	}
5343     }
5344 
5345   /* If the right sides are not constant, do the same for it.  Also,
5346      disallow this optimization if a size or signedness mismatch occurs
5347      between the left and right sides.  */
5348   if (l_const == 0)
5349     {
5350       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5351 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5352 	  /* Make sure the two fields on the right
5353 	     correspond to the left without being swapped.  */
5354 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5355 	return 0;
5356 
5357       first_bit = MIN (lr_bitpos, rr_bitpos);
5358       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5359       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5360 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5361 			      volatilep);
5362       if (rnmode == VOIDmode)
5363 	return 0;
5364 
5365       rnbitsize = GET_MODE_BITSIZE (rnmode);
5366       rnbitpos = first_bit & ~ (rnbitsize - 1);
5367       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5368       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5369 
5370       if (BYTES_BIG_ENDIAN)
5371 	{
5372 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5373 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5374 	}
5375 
5376       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5377 							    rntype, lr_mask),
5378 			     size_int (xlr_bitpos));
5379       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5380 							    rntype, rr_mask),
5381 			     size_int (xrr_bitpos));
5382 
5383       /* Make a mask that corresponds to both fields being compared.
5384 	 Do this for both items being compared.  If the operands are the
5385 	 same size and the bits being compared are in the same position
5386 	 then we can do this by masking both and comparing the masked
5387 	 results.  */
5388       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5389       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5390       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5391 	{
5392 	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5393 				    ll_unsignedp || rl_unsignedp);
5394 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5395 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5396 
5397 	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5398 				    lr_unsignedp || rr_unsignedp);
5399 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5400 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5401 
5402 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5403 	}
5404 
5405       /* There is still another way we can do something:  If both pairs of
5406 	 fields being compared are adjacent, we may be able to make a wider
5407 	 field containing them both.
5408 
5409 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5410 	 the mask must be shifted to account for the shift done by
5411 	 make_bit_field_ref.  */
5412       if ((ll_bitsize + ll_bitpos == rl_bitpos
5413 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5414 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5415 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5416 	{
5417 	  tree type;
5418 
5419 	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
5420 				    ll_bitsize + rl_bitsize,
5421 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5422 	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
5423 				    lr_bitsize + rr_bitsize,
5424 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5425 
5426 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5427 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5428 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5429 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5430 
5431 	  /* Convert to the smaller type before masking out unwanted bits.  */
5432 	  type = lntype;
5433 	  if (lntype != rntype)
5434 	    {
5435 	      if (lnbitsize > rnbitsize)
5436 		{
5437 		  lhs = fold_convert_loc (loc, rntype, lhs);
5438 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5439 		  type = rntype;
5440 		}
5441 	      else if (lnbitsize < rnbitsize)
5442 		{
5443 		  rhs = fold_convert_loc (loc, lntype, rhs);
5444 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5445 		  type = lntype;
5446 		}
5447 	    }
5448 
5449 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5450 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5451 
5452 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5453 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5454 
5455 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5456 	}
5457 
5458       return 0;
5459     }
5460 
5461   /* Handle the case of comparisons with constants.  If there is something in
5462      common between the masks, those bits of the constants must be the same.
5463      If not, the condition is always false.  Test for this to avoid generating
5464      incorrect code below.  */
5465   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5466   if (! integer_zerop (result)
5467       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5468 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5469     {
5470       if (wanted_code == NE_EXPR)
5471 	{
5472 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5473 	  return constant_boolean_node (true, truth_type);
5474 	}
5475       else
5476 	{
5477 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5478 	  return constant_boolean_node (false, truth_type);
5479 	}
5480     }
5481 
5482   /* Construct the expression we will return.  First get the component
5483      reference we will make.  Unless the mask is all ones the width of
5484      that field, perform the mask operation.  Then compare with the
5485      merged constant.  */
5486   result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5487 			       ll_unsignedp || rl_unsignedp);
5488 
5489   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5490   if (! all_ones_mask_p (ll_mask, lnbitsize))
5491     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5492 
5493   return build2_loc (loc, wanted_code, truth_type, result,
5494 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5495 }
5496 
5497 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5498    constant.  */
5499 
5500 static tree
5501 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5502 			    tree op0, tree op1)
5503 {
5504   tree arg0 = op0;
5505   enum tree_code op_code;
5506   tree comp_const;
5507   tree minmax_const;
5508   int consts_equal, consts_lt;
5509   tree inner;
5510 
5511   STRIP_SIGN_NOPS (arg0);
5512 
5513   op_code = TREE_CODE (arg0);
5514   minmax_const = TREE_OPERAND (arg0, 1);
5515   comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5516   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5517   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5518   inner = TREE_OPERAND (arg0, 0);
5519 
5520   /* If something does not permit us to optimize, return the original tree.  */
5521   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5522       || TREE_CODE (comp_const) != INTEGER_CST
5523       || TREE_OVERFLOW (comp_const)
5524       || TREE_CODE (minmax_const) != INTEGER_CST
5525       || TREE_OVERFLOW (minmax_const))
5526     return NULL_TREE;
5527 
5528   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5529      and GT_EXPR, doing the rest with recursive calls using logical
5530      simplifications.  */
5531   switch (code)
5532     {
5533     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5534       {
5535 	tree tem
5536 	  = optimize_minmax_comparison (loc,
5537 					invert_tree_comparison (code, false),
5538 					type, op0, op1);
5539 	if (tem)
5540 	  return invert_truthvalue_loc (loc, tem);
5541 	return NULL_TREE;
5542       }
5543 
5544     case GE_EXPR:
5545       return
5546 	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5547 		     optimize_minmax_comparison
5548 		     (loc, EQ_EXPR, type, arg0, comp_const),
5549 		     optimize_minmax_comparison
5550 		     (loc, GT_EXPR, type, arg0, comp_const));
5551 
5552     case EQ_EXPR:
5553       if (op_code == MAX_EXPR && consts_equal)
5554 	/* MAX (X, 0) == 0  ->  X <= 0  */
5555 	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5556 
5557       else if (op_code == MAX_EXPR && consts_lt)
5558 	/* MAX (X, 0) == 5  ->  X == 5   */
5559 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5560 
5561       else if (op_code == MAX_EXPR)
5562 	/* MAX (X, 0) == -1  ->  false  */
5563 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5564 
5565       else if (consts_equal)
5566 	/* MIN (X, 0) == 0  ->  X >= 0  */
5567 	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5568 
5569       else if (consts_lt)
5570 	/* MIN (X, 0) == 5  ->  false  */
5571 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5572 
5573       else
5574 	/* MIN (X, 0) == -1  ->  X == -1  */
5575 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5576 
5577     case GT_EXPR:
5578       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5579 	/* MAX (X, 0) > 0  ->  X > 0
5580 	   MAX (X, 0) > 5  ->  X > 5  */
5581 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5582 
5583       else if (op_code == MAX_EXPR)
5584 	/* MAX (X, 0) > -1  ->  true  */
5585 	return omit_one_operand_loc (loc, type, integer_one_node, inner);
5586 
5587       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5588 	/* MIN (X, 0) > 0  ->  false
5589 	   MIN (X, 0) > 5  ->  false  */
5590 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5591 
5592       else
5593 	/* MIN (X, 0) > -1  ->  X > -1  */
5594 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5595 
5596     default:
5597       return NULL_TREE;
5598     }
5599 }
5600 
5601 /* T is an integer expression that is being multiplied, divided, or taken a
5602    modulus (CODE says which and what kind of divide or modulus) by a
5603    constant C.  See if we can eliminate that operation by folding it with
5604    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5605    should be used for the computation if wider than our type.
5606 
5607    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5608    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5609    expression would not overflow or that overflow is undefined for the type
5610    in the language in question.
5611 
5612    If we return a non-null expression, it is an equivalent form of the
5613    original computation, but need not be in the original type.
5614 
5615    We set *STRICT_OVERFLOW_P to true if the return values depends on
5616    signed overflow being undefined.  Otherwise we do not change
5617    *STRICT_OVERFLOW_P.  */
5618 
5619 static tree
5620 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5621 		bool *strict_overflow_p)
5622 {
5623   /* To avoid exponential search depth, refuse to allow recursion past
5624      three levels.  Beyond that (1) it's highly unlikely that we'll find
5625      something interesting and (2) we've probably processed it before
5626      when we built the inner expression.  */
5627 
5628   static int depth;
5629   tree ret;
5630 
5631   if (depth > 3)
5632     return NULL;
5633 
5634   depth++;
5635   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5636   depth--;
5637 
5638   return ret;
5639 }
5640 
5641 static tree
5642 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5643 		  bool *strict_overflow_p)
5644 {
5645   tree type = TREE_TYPE (t);
5646   enum tree_code tcode = TREE_CODE (t);
5647   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5648 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5649 		? wide_type : type);
5650   tree t1, t2;
5651   int same_p = tcode == code;
5652   tree op0 = NULL_TREE, op1 = NULL_TREE;
5653   bool sub_strict_overflow_p;
5654 
5655   /* Don't deal with constants of zero here; they confuse the code below.  */
5656   if (integer_zerop (c))
5657     return NULL_TREE;
5658 
5659   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5660     op0 = TREE_OPERAND (t, 0);
5661 
5662   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5663     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5664 
5665   /* Note that we need not handle conditional operations here since fold
5666      already handles those cases.  So just do arithmetic here.  */
5667   switch (tcode)
5668     {
5669     case INTEGER_CST:
5670       /* For a constant, we can always simplify if we are a multiply
5671 	 or (for divide and modulus) if it is a multiple of our constant.  */
5672       if (code == MULT_EXPR
5673 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5674 	return const_binop (code, fold_convert (ctype, t),
5675 			    fold_convert (ctype, c));
5676       break;
5677 
5678     CASE_CONVERT: case NON_LVALUE_EXPR:
5679       /* If op0 is an expression ...  */
5680       if ((COMPARISON_CLASS_P (op0)
5681 	   || UNARY_CLASS_P (op0)
5682 	   || BINARY_CLASS_P (op0)
5683 	   || VL_EXP_CLASS_P (op0)
5684 	   || EXPRESSION_CLASS_P (op0))
5685 	  /* ... and has wrapping overflow, and its type is smaller
5686 	     than ctype, then we cannot pass through as widening.  */
5687 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5688 	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5689 		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5690 	       && (TYPE_PRECISION (ctype)
5691 	           > TYPE_PRECISION (TREE_TYPE (op0))))
5692 	      /* ... or this is a truncation (t is narrower than op0),
5693 		 then we cannot pass through this narrowing.  */
5694 	      || (TYPE_PRECISION (type)
5695 		  < TYPE_PRECISION (TREE_TYPE (op0)))
5696 	      /* ... or signedness changes for division or modulus,
5697 		 then we cannot pass through this conversion.  */
5698 	      || (code != MULT_EXPR
5699 		  && (TYPE_UNSIGNED (ctype)
5700 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
5701 	      /* ... or has undefined overflow while the converted to
5702 		 type has not, we cannot do the operation in the inner type
5703 		 as that would introduce undefined overflow.  */
5704 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5705 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
5706 	break;
5707 
5708       /* Pass the constant down and see if we can make a simplification.  If
5709 	 we can, replace this expression with the inner simplification for
5710 	 possible later conversion to our or some other type.  */
5711       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5712 	  && TREE_CODE (t2) == INTEGER_CST
5713 	  && !TREE_OVERFLOW (t2)
5714 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5715 					 code == MULT_EXPR
5716 					 ? ctype : NULL_TREE,
5717 					 strict_overflow_p))))
5718 	return t1;
5719       break;
5720 
5721     case ABS_EXPR:
5722       /* If widening the type changes it from signed to unsigned, then we
5723          must avoid building ABS_EXPR itself as unsigned.  */
5724       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5725         {
5726           tree cstype = (*signed_type_for) (ctype);
5727           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5728 	      != 0)
5729             {
5730               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5731               return fold_convert (ctype, t1);
5732             }
5733           break;
5734         }
5735       /* If the constant is negative, we cannot simplify this.  */
5736       if (tree_int_cst_sgn (c) == -1)
5737         break;
5738       /* FALLTHROUGH */
5739     case NEGATE_EXPR:
5740       /* For division and modulus, type can't be unsigned, as e.g.
5741 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5742 	 For signed types, even with wrapping overflow, this is fine.  */
5743       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5744 	break;
5745       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5746 	  != 0)
5747 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5748       break;
5749 
5750     case MIN_EXPR:  case MAX_EXPR:
5751       /* If widening the type changes the signedness, then we can't perform
5752 	 this optimization as that changes the result.  */
5753       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5754 	break;
5755 
5756       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5757       sub_strict_overflow_p = false;
5758       if ((t1 = extract_muldiv (op0, c, code, wide_type,
5759 				&sub_strict_overflow_p)) != 0
5760 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5761 				   &sub_strict_overflow_p)) != 0)
5762 	{
5763 	  if (tree_int_cst_sgn (c) < 0)
5764 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5765 	  if (sub_strict_overflow_p)
5766 	    *strict_overflow_p = true;
5767 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5768 			      fold_convert (ctype, t2));
5769 	}
5770       break;
5771 
5772     case LSHIFT_EXPR:  case RSHIFT_EXPR:
5773       /* If the second operand is constant, this is a multiplication
5774 	 or floor division, by a power of two, so we can treat it that
5775 	 way unless the multiplier or divisor overflows.  Signed
5776 	 left-shift overflow is implementation-defined rather than
5777 	 undefined in C90, so do not convert signed left shift into
5778 	 multiplication.  */
5779       if (TREE_CODE (op1) == INTEGER_CST
5780 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5781 	  /* const_binop may not detect overflow correctly,
5782 	     so check for it explicitly here.  */
5783 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5784 	  && TREE_INT_CST_HIGH (op1) == 0
5785 	  && 0 != (t1 = fold_convert (ctype,
5786 				      const_binop (LSHIFT_EXPR,
5787 						   size_one_node,
5788 						   op1)))
5789 	  && !TREE_OVERFLOW (t1))
5790 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5791 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5792 				       ctype,
5793 				       fold_convert (ctype, op0),
5794 				       t1),
5795 			       c, code, wide_type, strict_overflow_p);
5796       break;
5797 
5798     case PLUS_EXPR:  case MINUS_EXPR:
5799       /* See if we can eliminate the operation on both sides.  If we can, we
5800 	 can return a new PLUS or MINUS.  If we can't, the only remaining
5801 	 cases where we can do anything are if the second operand is a
5802 	 constant.  */
5803       sub_strict_overflow_p = false;
5804       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5805       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5806       if (t1 != 0 && t2 != 0
5807 	  && (code == MULT_EXPR
5808 	      /* If not multiplication, we can only do this if both operands
5809 		 are divisible by c.  */
5810 	      || (multiple_of_p (ctype, op0, c)
5811 	          && multiple_of_p (ctype, op1, c))))
5812 	{
5813 	  if (sub_strict_overflow_p)
5814 	    *strict_overflow_p = true;
5815 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5816 			      fold_convert (ctype, t2));
5817 	}
5818 
5819       /* If this was a subtraction, negate OP1 and set it to be an addition.
5820 	 This simplifies the logic below.  */
5821       if (tcode == MINUS_EXPR)
5822 	{
5823 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
5824 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
5825 	  if (TREE_CODE (op0) == INTEGER_CST)
5826 	    {
5827 	      tree tem = op0;
5828 	      op0 = op1;
5829 	      op1 = tem;
5830 	      tem = t1;
5831 	      t1 = t2;
5832 	      t2 = tem;
5833 	    }
5834 	}
5835 
5836       if (TREE_CODE (op1) != INTEGER_CST)
5837 	break;
5838 
5839       /* If either OP1 or C are negative, this optimization is not safe for
5840 	 some of the division and remainder types while for others we need
5841 	 to change the code.  */
5842       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5843 	{
5844 	  if (code == CEIL_DIV_EXPR)
5845 	    code = FLOOR_DIV_EXPR;
5846 	  else if (code == FLOOR_DIV_EXPR)
5847 	    code = CEIL_DIV_EXPR;
5848 	  else if (code != MULT_EXPR
5849 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5850 	    break;
5851 	}
5852 
5853       /* If it's a multiply or a division/modulus operation of a multiple
5854          of our constant, do the operation and verify it doesn't overflow.  */
5855       if (code == MULT_EXPR
5856 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5857 	{
5858 	  op1 = const_binop (code, fold_convert (ctype, op1),
5859 			     fold_convert (ctype, c));
5860 	  /* We allow the constant to overflow with wrapping semantics.  */
5861 	  if (op1 == 0
5862 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5863 	    break;
5864 	}
5865       else
5866 	break;
5867 
5868       /* If we have an unsigned type is not a sizetype, we cannot widen
5869 	 the operation since it will change the result if the original
5870 	 computation overflowed.  */
5871       if (TYPE_UNSIGNED (ctype)
5872 	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5873 	  && ctype != type)
5874 	break;
5875 
5876       /* If we were able to eliminate our operation from the first side,
5877 	 apply our operation to the second side and reform the PLUS.  */
5878       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5879 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5880 
5881       /* The last case is if we are a multiply.  In that case, we can
5882 	 apply the distributive law to commute the multiply and addition
5883 	 if the multiplication of the constants doesn't overflow.  */
5884       if (code == MULT_EXPR)
5885 	return fold_build2 (tcode, ctype,
5886 			    fold_build2 (code, ctype,
5887 					 fold_convert (ctype, op0),
5888 					 fold_convert (ctype, c)),
5889 			    op1);
5890 
5891       break;
5892 
5893     case MULT_EXPR:
5894       /* We have a special case here if we are doing something like
5895 	 (C * 8) % 4 since we know that's zero.  */
5896       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5897 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5898 	  /* If the multiplication can overflow we cannot optimize this.
5899 	     ???  Until we can properly mark individual operations as
5900 	     not overflowing we need to treat sizetype special here as
5901 	     stor-layout relies on this opimization to make
5902 	     DECL_FIELD_BIT_OFFSET always a constant.  */
5903 	  && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5904 	      || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5905 		  && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5906 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5907 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5908 	{
5909 	  *strict_overflow_p = true;
5910 	  return omit_one_operand (type, integer_zero_node, op0);
5911 	}
5912 
5913       /* ... fall through ...  */
5914 
5915     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5916     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5917       /* If we can extract our operation from the LHS, do so and return a
5918 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5919 	 do something only if the second operand is a constant.  */
5920       if (same_p
5921 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5922 				   strict_overflow_p)) != 0)
5923 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5924 			    fold_convert (ctype, op1));
5925       else if (tcode == MULT_EXPR && code == MULT_EXPR
5926 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5927 					strict_overflow_p)) != 0)
5928 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5929 			    fold_convert (ctype, t1));
5930       else if (TREE_CODE (op1) != INTEGER_CST)
5931 	return 0;
5932 
5933       /* If these are the same operation types, we can associate them
5934 	 assuming no overflow.  */
5935       if (tcode == code)
5936 	{
5937 	  double_int mul;
5938 	  int overflow_p;
5939 	  mul = double_int_mul_with_sign
5940 	          (double_int_ext
5941 		     (tree_to_double_int (op1),
5942 		      TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5943 		   double_int_ext
5944 		     (tree_to_double_int (c),
5945 		      TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5946 		   false, &overflow_p);
5947 	  overflow_p = (((!TYPE_UNSIGNED (ctype)
5948 			  || (TREE_CODE (ctype) == INTEGER_TYPE
5949 			      && TYPE_IS_SIZETYPE (ctype)))
5950 			 && overflow_p)
5951 			| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5952 	  if (!double_int_fits_to_tree_p (ctype, mul)
5953 	      && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5954 		  || !TYPE_UNSIGNED (ctype)
5955 		  || (TREE_CODE (ctype) == INTEGER_TYPE
5956 		      && TYPE_IS_SIZETYPE (ctype))))
5957 	    overflow_p = 1;
5958 	  if (!overflow_p)
5959 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5960 				double_int_to_tree (ctype, mul));
5961 	}
5962 
5963       /* If these operations "cancel" each other, we have the main
5964 	 optimizations of this pass, which occur when either constant is a
5965 	 multiple of the other, in which case we replace this with either an
5966 	 operation or CODE or TCODE.
5967 
5968 	 If we have an unsigned type that is not a sizetype, we cannot do
5969 	 this since it will change the result if the original computation
5970 	 overflowed.  */
5971       if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5972 	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5973 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5974 	      || (tcode == MULT_EXPR
5975 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5976 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5977 		  && code != MULT_EXPR)))
5978 	{
5979 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5980 	    {
5981 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5982 		*strict_overflow_p = true;
5983 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5984 				  fold_convert (ctype,
5985 						const_binop (TRUNC_DIV_EXPR,
5986 							     op1, c)));
5987 	    }
5988 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5989 	    {
5990 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5991 		*strict_overflow_p = true;
5992 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
5993 				  fold_convert (ctype,
5994 						const_binop (TRUNC_DIV_EXPR,
5995 							     c, op1)));
5996 	    }
5997 	}
5998       break;
5999 
6000     default:
6001       break;
6002     }
6003 
6004   return 0;
6005 }
6006 
6007 /* Return a node which has the indicated constant VALUE (either 0 or
6008    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6009    and is of the indicated TYPE.  */
6010 
6011 tree
6012 constant_boolean_node (bool value, tree type)
6013 {
6014   if (type == integer_type_node)
6015     return value ? integer_one_node : integer_zero_node;
6016   else if (type == boolean_type_node)
6017     return value ? boolean_true_node : boolean_false_node;
6018   else if (TREE_CODE (type) == VECTOR_TYPE)
6019     return build_vector_from_val (type,
6020 				  build_int_cst (TREE_TYPE (type),
6021 						 value ? -1 : 0));
6022   else
6023     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6024 }
6025 
6026 
6027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6028    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6029    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6030    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6031    COND is the first argument to CODE; otherwise (as in the example
6032    given here), it is the second argument.  TYPE is the type of the
6033    original expression.  Return NULL_TREE if no simplification is
6034    possible.  */
6035 
6036 static tree
6037 fold_binary_op_with_conditional_arg (location_t loc,
6038 				     enum tree_code code,
6039 				     tree type, tree op0, tree op1,
6040 				     tree cond, tree arg, int cond_first_p)
6041 {
6042   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6043   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6044   tree test, true_value, false_value;
6045   tree lhs = NULL_TREE;
6046   tree rhs = NULL_TREE;
6047 
6048   if (TREE_CODE (cond) == COND_EXPR)
6049     {
6050       test = TREE_OPERAND (cond, 0);
6051       true_value = TREE_OPERAND (cond, 1);
6052       false_value = TREE_OPERAND (cond, 2);
6053       /* If this operand throws an expression, then it does not make
6054 	 sense to try to perform a logical or arithmetic operation
6055 	 involving it.  */
6056       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6057 	lhs = true_value;
6058       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6059 	rhs = false_value;
6060     }
6061   else
6062     {
6063       tree testtype = TREE_TYPE (cond);
6064       test = cond;
6065       true_value = constant_boolean_node (true, testtype);
6066       false_value = constant_boolean_node (false, testtype);
6067     }
6068 
6069   /* This transformation is only worthwhile if we don't have to wrap ARG
6070      in a SAVE_EXPR and the operation can be simplified without recursing
6071      on at least one of the branches once its pushed inside the COND_EXPR.  */
6072   if (!TREE_CONSTANT (arg)
6073       && (TREE_SIDE_EFFECTS (arg)
6074 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6075 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6076     return NULL_TREE;
6077 
6078   arg = fold_convert_loc (loc, arg_type, arg);
6079   if (lhs == 0)
6080     {
6081       true_value = fold_convert_loc (loc, cond_type, true_value);
6082       if (cond_first_p)
6083 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6084       else
6085 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6086     }
6087   if (rhs == 0)
6088     {
6089       false_value = fold_convert_loc (loc, cond_type, false_value);
6090       if (cond_first_p)
6091 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6092       else
6093 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6094     }
6095 
6096   /* Check that we have simplified at least one of the branches.  */
6097   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6098     return NULL_TREE;
6099 
6100   return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6101 }
6102 
6103 
6104 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6105 
6106    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6107    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6108    ADDEND is the same as X.
6109 
6110    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6111    and finite.  The problematic cases are when X is zero, and its mode
6112    has signed zeros.  In the case of rounding towards -infinity,
6113    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6114    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6115 
6116 bool
6117 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6118 {
6119   if (!real_zerop (addend))
6120     return false;
6121 
6122   /* Don't allow the fold with -fsignaling-nans.  */
6123   if (HONOR_SNANS (TYPE_MODE (type)))
6124     return false;
6125 
6126   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6127   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6128     return true;
6129 
6130   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6131   if (TREE_CODE (addend) == REAL_CST
6132       && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6133     negate = !negate;
6134 
6135   /* The mode has signed zeros, and we have to honor their sign.
6136      In this situation, there is only one case we can return true for.
6137      X - 0 is the same as X unless rounding towards -infinity is
6138      supported.  */
6139   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6140 }
6141 
6142 /* Subroutine of fold() that checks comparisons of built-in math
6143    functions against real constants.
6144 
6145    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6146    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6147    is the type of the result and ARG0 and ARG1 are the operands of the
6148    comparison.  ARG1 must be a TREE_REAL_CST.
6149 
6150    The function returns the constant folded tree if a simplification
6151    can be made, and NULL_TREE otherwise.  */
6152 
6153 static tree
6154 fold_mathfn_compare (location_t loc,
6155 		     enum built_in_function fcode, enum tree_code code,
6156 		     tree type, tree arg0, tree arg1)
6157 {
6158   REAL_VALUE_TYPE c;
6159 
6160   if (BUILTIN_SQRT_P (fcode))
6161     {
6162       tree arg = CALL_EXPR_ARG (arg0, 0);
6163       enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6164 
6165       c = TREE_REAL_CST (arg1);
6166       if (REAL_VALUE_NEGATIVE (c))
6167 	{
6168 	  /* sqrt(x) < y is always false, if y is negative.  */
6169 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6170 	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6171 
6172 	  /* sqrt(x) > y is always true, if y is negative and we
6173 	     don't care about NaNs, i.e. negative values of x.  */
6174 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6175 	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6176 
6177 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6178 	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6179 			      build_real (TREE_TYPE (arg), dconst0));
6180 	}
6181       else if (code == GT_EXPR || code == GE_EXPR)
6182 	{
6183 	  REAL_VALUE_TYPE c2;
6184 
6185 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6186 	  real_convert (&c2, mode, &c2);
6187 
6188 	  if (REAL_VALUE_ISINF (c2))
6189 	    {
6190 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6191 	      if (HONOR_INFINITIES (mode))
6192 		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6193 				    build_real (TREE_TYPE (arg), c2));
6194 
6195 	      /* sqrt(x) > y is always false, when y is very large
6196 		 and we don't care about infinities.  */
6197 	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6198 	    }
6199 
6200 	  /* sqrt(x) > c is the same as x > c*c.  */
6201 	  return fold_build2_loc (loc, code, type, arg,
6202 			      build_real (TREE_TYPE (arg), c2));
6203 	}
6204       else if (code == LT_EXPR || code == LE_EXPR)
6205 	{
6206 	  REAL_VALUE_TYPE c2;
6207 
6208 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6209 	  real_convert (&c2, mode, &c2);
6210 
6211 	  if (REAL_VALUE_ISINF (c2))
6212 	    {
6213 	      /* sqrt(x) < y is always true, when y is a very large
6214 		 value and we don't care about NaNs or Infinities.  */
6215 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6216 		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6217 
6218 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6219 		 don't care about NaNs.  */
6220 	      if (! HONOR_NANS (mode))
6221 		return fold_build2_loc (loc, NE_EXPR, type, arg,
6222 				    build_real (TREE_TYPE (arg), c2));
6223 
6224 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6225 		 don't care about Infinities.  */
6226 	      if (! HONOR_INFINITIES (mode))
6227 		return fold_build2_loc (loc, GE_EXPR, type, arg,
6228 				    build_real (TREE_TYPE (arg), dconst0));
6229 
6230 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6231 	      arg = save_expr (arg);
6232 	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6233 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6234 					       build_real (TREE_TYPE (arg),
6235 							   dconst0)),
6236 				  fold_build2_loc (loc, NE_EXPR, type, arg,
6237 					       build_real (TREE_TYPE (arg),
6238 							   c2)));
6239 	    }
6240 
6241 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6242 	  if (! HONOR_NANS (mode))
6243 	    return fold_build2_loc (loc, code, type, arg,
6244 				build_real (TREE_TYPE (arg), c2));
6245 
6246 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6247 	  arg = save_expr (arg);
6248 	  return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6249 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6250 					       build_real (TREE_TYPE (arg),
6251 							   dconst0)),
6252 				  fold_build2_loc (loc, code, type, arg,
6253 					       build_real (TREE_TYPE (arg),
6254 							   c2)));
6255 	}
6256     }
6257 
6258   return NULL_TREE;
6259 }
6260 
6261 /* Subroutine of fold() that optimizes comparisons against Infinities,
6262    either +Inf or -Inf.
6263 
6264    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6265    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6266    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6267 
6268    The function returns the constant folded tree if a simplification
6269    can be made, and NULL_TREE otherwise.  */
6270 
6271 static tree
6272 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6273 		  tree arg0, tree arg1)
6274 {
6275   enum machine_mode mode;
6276   REAL_VALUE_TYPE max;
6277   tree temp;
6278   bool neg;
6279 
6280   mode = TYPE_MODE (TREE_TYPE (arg0));
6281 
6282   /* For negative infinity swap the sense of the comparison.  */
6283   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6284   if (neg)
6285     code = swap_tree_comparison (code);
6286 
6287   switch (code)
6288     {
6289     case GT_EXPR:
6290       /* x > +Inf is always false, if with ignore sNANs.  */
6291       if (HONOR_SNANS (mode))
6292         return NULL_TREE;
6293       return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6294 
6295     case LE_EXPR:
6296       /* x <= +Inf is always true, if we don't case about NaNs.  */
6297       if (! HONOR_NANS (mode))
6298 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6299 
6300       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6301       arg0 = save_expr (arg0);
6302       return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6303 
6304     case EQ_EXPR:
6305     case GE_EXPR:
6306       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6307       real_maxval (&max, neg, mode);
6308       return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6309 			  arg0, build_real (TREE_TYPE (arg0), max));
6310 
6311     case LT_EXPR:
6312       /* x < +Inf is always equal to x <= DBL_MAX.  */
6313       real_maxval (&max, neg, mode);
6314       return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6315 			  arg0, build_real (TREE_TYPE (arg0), max));
6316 
6317     case NE_EXPR:
6318       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6319       real_maxval (&max, neg, mode);
6320       if (! HONOR_NANS (mode))
6321 	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6322 			    arg0, build_real (TREE_TYPE (arg0), max));
6323 
6324       temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6325 			  arg0, build_real (TREE_TYPE (arg0), max));
6326       return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6327 
6328     default:
6329       break;
6330     }
6331 
6332   return NULL_TREE;
6333 }
6334 
6335 /* Subroutine of fold() that optimizes comparisons of a division by
6336    a nonzero integer constant against an integer constant, i.e.
6337    X/C1 op C2.
6338 
6339    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6340    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6341    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6342 
6343    The function returns the constant folded tree if a simplification
6344    can be made, and NULL_TREE otherwise.  */
6345 
6346 static tree
6347 fold_div_compare (location_t loc,
6348 		  enum tree_code code, tree type, tree arg0, tree arg1)
6349 {
6350   tree prod, tmp, hi, lo;
6351   tree arg00 = TREE_OPERAND (arg0, 0);
6352   tree arg01 = TREE_OPERAND (arg0, 1);
6353   double_int val;
6354   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6355   bool neg_overflow;
6356   int overflow;
6357 
6358   /* We have to do this the hard way to detect unsigned overflow.
6359      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6360   overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6361 				   TREE_INT_CST_HIGH (arg01),
6362 				   TREE_INT_CST_LOW (arg1),
6363 				   TREE_INT_CST_HIGH (arg1),
6364 				   &val.low, &val.high, unsigned_p);
6365   prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6366   neg_overflow = false;
6367 
6368   if (unsigned_p)
6369     {
6370       tmp = int_const_binop (MINUS_EXPR, arg01,
6371                              build_int_cst (TREE_TYPE (arg01), 1));
6372       lo = prod;
6373 
6374       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6375       overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6376 				       TREE_INT_CST_HIGH (prod),
6377 				       TREE_INT_CST_LOW (tmp),
6378 				       TREE_INT_CST_HIGH (tmp),
6379 				       &val.low, &val.high, unsigned_p);
6380       hi = force_fit_type_double (TREE_TYPE (arg00), val,
6381 				  -1, overflow | TREE_OVERFLOW (prod));
6382     }
6383   else if (tree_int_cst_sgn (arg01) >= 0)
6384     {
6385       tmp = int_const_binop (MINUS_EXPR, arg01,
6386 			     build_int_cst (TREE_TYPE (arg01), 1));
6387       switch (tree_int_cst_sgn (arg1))
6388 	{
6389 	case -1:
6390 	  neg_overflow = true;
6391 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6392 	  hi = prod;
6393 	  break;
6394 
6395 	case  0:
6396 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6397 	  hi = tmp;
6398 	  break;
6399 
6400 	case  1:
6401           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6402 	  lo = prod;
6403 	  break;
6404 
6405 	default:
6406 	  gcc_unreachable ();
6407 	}
6408     }
6409   else
6410     {
6411       /* A negative divisor reverses the relational operators.  */
6412       code = swap_tree_comparison (code);
6413 
6414       tmp = int_const_binop (PLUS_EXPR, arg01,
6415 			     build_int_cst (TREE_TYPE (arg01), 1));
6416       switch (tree_int_cst_sgn (arg1))
6417 	{
6418 	case -1:
6419 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6420 	  lo = prod;
6421 	  break;
6422 
6423 	case  0:
6424 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6425 	  lo = tmp;
6426 	  break;
6427 
6428 	case  1:
6429 	  neg_overflow = true;
6430 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6431 	  hi = prod;
6432 	  break;
6433 
6434 	default:
6435 	  gcc_unreachable ();
6436 	}
6437     }
6438 
6439   switch (code)
6440     {
6441     case EQ_EXPR:
6442       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6443 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6444       if (TREE_OVERFLOW (hi))
6445 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6446       if (TREE_OVERFLOW (lo))
6447 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6448       return build_range_check (loc, type, arg00, 1, lo, hi);
6449 
6450     case NE_EXPR:
6451       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6453       if (TREE_OVERFLOW (hi))
6454 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6455       if (TREE_OVERFLOW (lo))
6456 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6457       return build_range_check (loc, type, arg00, 0, lo, hi);
6458 
6459     case LT_EXPR:
6460       if (TREE_OVERFLOW (lo))
6461 	{
6462 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6463 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6464 	}
6465       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6466 
6467     case LE_EXPR:
6468       if (TREE_OVERFLOW (hi))
6469 	{
6470 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6471 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6472 	}
6473       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6474 
6475     case GT_EXPR:
6476       if (TREE_OVERFLOW (hi))
6477 	{
6478 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6479 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6480 	}
6481       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6482 
6483     case GE_EXPR:
6484       if (TREE_OVERFLOW (lo))
6485 	{
6486 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6487 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6488 	}
6489       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6490 
6491     default:
6492       break;
6493     }
6494 
6495   return NULL_TREE;
6496 }
6497 
6498 
6499 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6500    equality/inequality test, then return a simplified form of the test
6501    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6502    result type.  */
6503 
6504 static tree
6505 fold_single_bit_test_into_sign_test (location_t loc,
6506 				     enum tree_code code, tree arg0, tree arg1,
6507 				     tree result_type)
6508 {
6509   /* If this is testing a single bit, we can optimize the test.  */
6510   if ((code == NE_EXPR || code == EQ_EXPR)
6511       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6512       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6513     {
6514       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6515 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6516       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6517 
6518       if (arg00 != NULL_TREE
6519 	  /* This is only a win if casting to a signed type is cheap,
6520 	     i.e. when arg00's type is not a partial mode.  */
6521 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6522 	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6523 	{
6524 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6525 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6526 			      result_type,
6527 			      fold_convert_loc (loc, stype, arg00),
6528 			      build_int_cst (stype, 0));
6529 	}
6530     }
6531 
6532   return NULL_TREE;
6533 }
6534 
6535 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6536    equality/inequality test, then return a simplified form of
6537    the test using shifts and logical operations.  Otherwise return
6538    NULL.  TYPE is the desired result type.  */
6539 
6540 tree
6541 fold_single_bit_test (location_t loc, enum tree_code code,
6542 		      tree arg0, tree arg1, tree result_type)
6543 {
6544   /* If this is testing a single bit, we can optimize the test.  */
6545   if ((code == NE_EXPR || code == EQ_EXPR)
6546       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6547       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6548     {
6549       tree inner = TREE_OPERAND (arg0, 0);
6550       tree type = TREE_TYPE (arg0);
6551       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6552       enum machine_mode operand_mode = TYPE_MODE (type);
6553       int ops_unsigned;
6554       tree signed_type, unsigned_type, intermediate_type;
6555       tree tem, one;
6556 
6557       /* First, see if we can fold the single bit test into a sign-bit
6558 	 test.  */
6559       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6560 						 result_type);
6561       if (tem)
6562 	return tem;
6563 
6564       /* Otherwise we have (A & C) != 0 where C is a single bit,
6565 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6566 	 Similarly for (A & C) == 0.  */
6567 
6568       /* If INNER is a right shift of a constant and it plus BITNUM does
6569 	 not overflow, adjust BITNUM and INNER.  */
6570       if (TREE_CODE (inner) == RSHIFT_EXPR
6571 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6572 	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6573 	  && bitnum < TYPE_PRECISION (type)
6574 	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6575 				   bitnum - TYPE_PRECISION (type)))
6576 	{
6577 	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6578 	  inner = TREE_OPERAND (inner, 0);
6579 	}
6580 
6581       /* If we are going to be able to omit the AND below, we must do our
6582 	 operations as unsigned.  If we must use the AND, we have a choice.
6583 	 Normally unsigned is faster, but for some machines signed is.  */
6584 #ifdef LOAD_EXTEND_OP
6585       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6586 		      && !flag_syntax_only) ? 0 : 1;
6587 #else
6588       ops_unsigned = 1;
6589 #endif
6590 
6591       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6592       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6593       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6594       inner = fold_convert_loc (loc, intermediate_type, inner);
6595 
6596       if (bitnum != 0)
6597 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6598 			inner, size_int (bitnum));
6599 
6600       one = build_int_cst (intermediate_type, 1);
6601 
6602       if (code == EQ_EXPR)
6603 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6604 
6605       /* Put the AND last so it can combine with more things.  */
6606       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6607 
6608       /* Make sure to return the proper type.  */
6609       inner = fold_convert_loc (loc, result_type, inner);
6610 
6611       return inner;
6612     }
6613   return NULL_TREE;
6614 }
6615 
6616 /* Check whether we are allowed to reorder operands arg0 and arg1,
6617    such that the evaluation of arg1 occurs before arg0.  */
6618 
6619 static bool
6620 reorder_operands_p (const_tree arg0, const_tree arg1)
6621 {
6622   if (! flag_evaluation_order)
6623       return true;
6624   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6625     return true;
6626   return ! TREE_SIDE_EFFECTS (arg0)
6627 	 && ! TREE_SIDE_EFFECTS (arg1);
6628 }
6629 
6630 /* Test whether it is preferable two swap two operands, ARG0 and
6631    ARG1, for example because ARG0 is an integer constant and ARG1
6632    isn't.  If REORDER is true, only recommend swapping if we can
6633    evaluate the operands in reverse order.  */
6634 
6635 bool
6636 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6637 {
6638   STRIP_SIGN_NOPS (arg0);
6639   STRIP_SIGN_NOPS (arg1);
6640 
6641   if (TREE_CODE (arg1) == INTEGER_CST)
6642     return 0;
6643   if (TREE_CODE (arg0) == INTEGER_CST)
6644     return 1;
6645 
6646   if (TREE_CODE (arg1) == REAL_CST)
6647     return 0;
6648   if (TREE_CODE (arg0) == REAL_CST)
6649     return 1;
6650 
6651   if (TREE_CODE (arg1) == FIXED_CST)
6652     return 0;
6653   if (TREE_CODE (arg0) == FIXED_CST)
6654     return 1;
6655 
6656   if (TREE_CODE (arg1) == COMPLEX_CST)
6657     return 0;
6658   if (TREE_CODE (arg0) == COMPLEX_CST)
6659     return 1;
6660 
6661   if (TREE_CONSTANT (arg1))
6662     return 0;
6663   if (TREE_CONSTANT (arg0))
6664     return 1;
6665 
6666   if (optimize_function_for_size_p (cfun))
6667     return 0;
6668 
6669   if (reorder && flag_evaluation_order
6670       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6671     return 0;
6672 
6673   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6674      for commutative and comparison operators.  Ensuring a canonical
6675      form allows the optimizers to find additional redundancies without
6676      having to explicitly check for both orderings.  */
6677   if (TREE_CODE (arg0) == SSA_NAME
6678       && TREE_CODE (arg1) == SSA_NAME
6679       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6680     return 1;
6681 
6682   /* Put SSA_NAMEs last.  */
6683   if (TREE_CODE (arg1) == SSA_NAME)
6684     return 0;
6685   if (TREE_CODE (arg0) == SSA_NAME)
6686     return 1;
6687 
6688   /* Put variables last.  */
6689   if (DECL_P (arg1))
6690     return 0;
6691   if (DECL_P (arg0))
6692     return 1;
6693 
6694   return 0;
6695 }
6696 
6697 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6698    ARG0 is extended to a wider type.  */
6699 
6700 static tree
6701 fold_widened_comparison (location_t loc, enum tree_code code,
6702 			 tree type, tree arg0, tree arg1)
6703 {
6704   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6705   tree arg1_unw;
6706   tree shorter_type, outer_type;
6707   tree min, max;
6708   bool above, below;
6709 
6710   if (arg0_unw == arg0)
6711     return NULL_TREE;
6712   shorter_type = TREE_TYPE (arg0_unw);
6713 
6714 #ifdef HAVE_canonicalize_funcptr_for_compare
6715   /* Disable this optimization if we're casting a function pointer
6716      type on targets that require function pointer canonicalization.  */
6717   if (HAVE_canonicalize_funcptr_for_compare
6718       && TREE_CODE (shorter_type) == POINTER_TYPE
6719       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6720     return NULL_TREE;
6721 #endif
6722 
6723   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6724     return NULL_TREE;
6725 
6726   arg1_unw = get_unwidened (arg1, NULL_TREE);
6727 
6728   /* If possible, express the comparison in the shorter mode.  */
6729   if ((code == EQ_EXPR || code == NE_EXPR
6730        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6731       && (TREE_TYPE (arg1_unw) == shorter_type
6732 	  || ((TYPE_PRECISION (shorter_type)
6733 	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6734 	      && (TYPE_UNSIGNED (shorter_type)
6735 		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6736 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6737 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6738 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6739 	      && int_fits_type_p (arg1_unw, shorter_type))))
6740     return fold_build2_loc (loc, code, type, arg0_unw,
6741 			fold_convert_loc (loc, shorter_type, arg1_unw));
6742 
6743   if (TREE_CODE (arg1_unw) != INTEGER_CST
6744       || TREE_CODE (shorter_type) != INTEGER_TYPE
6745       || !int_fits_type_p (arg1_unw, shorter_type))
6746     return NULL_TREE;
6747 
6748   /* If we are comparing with the integer that does not fit into the range
6749      of the shorter type, the result is known.  */
6750   outer_type = TREE_TYPE (arg1_unw);
6751   min = lower_bound_in_type (outer_type, shorter_type);
6752   max = upper_bound_in_type (outer_type, shorter_type);
6753 
6754   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6755 						   max, arg1_unw));
6756   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6757 						   arg1_unw, min));
6758 
6759   switch (code)
6760     {
6761     case EQ_EXPR:
6762       if (above || below)
6763 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6764       break;
6765 
6766     case NE_EXPR:
6767       if (above || below)
6768 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6769       break;
6770 
6771     case LT_EXPR:
6772     case LE_EXPR:
6773       if (above)
6774 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6775       else if (below)
6776 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6777 
6778     case GT_EXPR:
6779     case GE_EXPR:
6780       if (above)
6781 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6782       else if (below)
6783 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6784 
6785     default:
6786       break;
6787     }
6788 
6789   return NULL_TREE;
6790 }
6791 
6792 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6793    ARG0 just the signedness is changed.  */
6794 
6795 static tree
6796 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6797 			      tree arg0, tree arg1)
6798 {
6799   tree arg0_inner;
6800   tree inner_type, outer_type;
6801 
6802   if (!CONVERT_EXPR_P (arg0))
6803     return NULL_TREE;
6804 
6805   outer_type = TREE_TYPE (arg0);
6806   arg0_inner = TREE_OPERAND (arg0, 0);
6807   inner_type = TREE_TYPE (arg0_inner);
6808 
6809 #ifdef HAVE_canonicalize_funcptr_for_compare
6810   /* Disable this optimization if we're casting a function pointer
6811      type on targets that require function pointer canonicalization.  */
6812   if (HAVE_canonicalize_funcptr_for_compare
6813       && TREE_CODE (inner_type) == POINTER_TYPE
6814       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6815     return NULL_TREE;
6816 #endif
6817 
6818   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6819     return NULL_TREE;
6820 
6821   if (TREE_CODE (arg1) != INTEGER_CST
6822       && !(CONVERT_EXPR_P (arg1)
6823 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6824     return NULL_TREE;
6825 
6826   if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6827       && code != NE_EXPR
6828       && code != EQ_EXPR)
6829     return NULL_TREE;
6830 
6831   if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6832     return NULL_TREE;
6833 
6834   if (TREE_CODE (arg1) == INTEGER_CST)
6835     arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6836 				  0, TREE_OVERFLOW (arg1));
6837   else
6838     arg1 = fold_convert_loc (loc, inner_type, arg1);
6839 
6840   return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6841 }
6842 
6843 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6844    step of the array.  Reconstructs s and delta in the case of s *
6845    delta being an integer constant (and thus already folded).  ADDR is
6846    the address. MULT is the multiplicative expression.  If the
6847    function succeeds, the new address expression is returned.
6848    Otherwise NULL_TREE is returned.  LOC is the location of the
6849    resulting expression.  */
6850 
6851 static tree
6852 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6853 {
6854   tree s, delta, step;
6855   tree ref = TREE_OPERAND (addr, 0), pref;
6856   tree ret, pos;
6857   tree itype;
6858   bool mdim = false;
6859 
6860   /*  Strip the nops that might be added when converting op1 to sizetype. */
6861   STRIP_NOPS (op1);
6862 
6863   /* Canonicalize op1 into a possibly non-constant delta
6864      and an INTEGER_CST s.  */
6865   if (TREE_CODE (op1) == MULT_EXPR)
6866     {
6867       tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6868 
6869       STRIP_NOPS (arg0);
6870       STRIP_NOPS (arg1);
6871 
6872       if (TREE_CODE (arg0) == INTEGER_CST)
6873         {
6874           s = arg0;
6875           delta = arg1;
6876         }
6877       else if (TREE_CODE (arg1) == INTEGER_CST)
6878         {
6879           s = arg1;
6880           delta = arg0;
6881         }
6882       else
6883         return NULL_TREE;
6884     }
6885   else if (TREE_CODE (op1) == INTEGER_CST)
6886     {
6887       delta = op1;
6888       s = NULL_TREE;
6889     }
6890   else
6891     {
6892       /* Simulate we are delta * 1.  */
6893       delta = op1;
6894       s = integer_one_node;
6895     }
6896 
6897   /* Handle &x.array the same as we would handle &x.array[0].  */
6898   if (TREE_CODE (ref) == COMPONENT_REF
6899       && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6900     {
6901       tree domain;
6902 
6903       /* Remember if this was a multi-dimensional array.  */
6904       if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6905 	mdim = true;
6906 
6907       domain = TYPE_DOMAIN (TREE_TYPE (ref));
6908       if (! domain)
6909 	goto cont;
6910       itype = TREE_TYPE (domain);
6911 
6912       step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6913       if (TREE_CODE (step) != INTEGER_CST)
6914 	goto cont;
6915 
6916       if (s)
6917 	{
6918 	  if (! tree_int_cst_equal (step, s))
6919 	    goto cont;
6920 	}
6921       else
6922 	{
6923 	  /* Try if delta is a multiple of step.  */
6924 	  tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6925 	  if (! tmp)
6926 	    goto cont;
6927 	  delta = tmp;
6928 	}
6929 
6930       /* Only fold here if we can verify we do not overflow one
6931 	 dimension of a multi-dimensional array.  */
6932       if (mdim)
6933 	{
6934 	  tree tmp;
6935 
6936 	  if (!TYPE_MIN_VALUE (domain)
6937 	      || !TYPE_MAX_VALUE (domain)
6938 	      || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6939 	    goto cont;
6940 
6941 	  tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6942 				 fold_convert_loc (loc, itype,
6943 						   TYPE_MIN_VALUE (domain)),
6944 				 fold_convert_loc (loc, itype, delta));
6945 	  if (TREE_CODE (tmp) != INTEGER_CST
6946 	      || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6947 	    goto cont;
6948 	}
6949 
6950       /* We found a suitable component reference.  */
6951 
6952       pref = TREE_OPERAND (addr, 0);
6953       ret = copy_node (pref);
6954       SET_EXPR_LOCATION (ret, loc);
6955 
6956       ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6957 			fold_build2_loc
6958 			  (loc, PLUS_EXPR, itype,
6959 			   fold_convert_loc (loc, itype,
6960 					     TYPE_MIN_VALUE
6961 					       (TYPE_DOMAIN (TREE_TYPE (ref)))),
6962 			   fold_convert_loc (loc, itype, delta)),
6963 			NULL_TREE, NULL_TREE);
6964       return build_fold_addr_expr_loc (loc, ret);
6965     }
6966 
6967 cont:
6968 
6969   for (;; ref = TREE_OPERAND (ref, 0))
6970     {
6971       if (TREE_CODE (ref) == ARRAY_REF)
6972 	{
6973 	  tree domain;
6974 
6975 	  /* Remember if this was a multi-dimensional array.  */
6976 	  if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6977 	    mdim = true;
6978 
6979 	  domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6980 	  if (! domain)
6981 	    continue;
6982 	  itype = TREE_TYPE (domain);
6983 
6984 	  step = array_ref_element_size (ref);
6985 	  if (TREE_CODE (step) != INTEGER_CST)
6986 	    continue;
6987 
6988 	  if (s)
6989 	    {
6990 	      if (! tree_int_cst_equal (step, s))
6991                 continue;
6992 	    }
6993 	  else
6994 	    {
6995 	      /* Try if delta is a multiple of step.  */
6996 	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6997 	      if (! tmp)
6998 		continue;
6999 	      delta = tmp;
7000 	    }
7001 
7002 	  /* Only fold here if we can verify we do not overflow one
7003 	     dimension of a multi-dimensional array.  */
7004 	  if (mdim)
7005 	    {
7006 	      tree tmp;
7007 
7008 	      if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7009 		  || !TYPE_MAX_VALUE (domain)
7010 		  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7011 		continue;
7012 
7013 	      tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7014 				     fold_convert_loc (loc, itype,
7015 						       TREE_OPERAND (ref, 1)),
7016 				     fold_convert_loc (loc, itype, delta));
7017 	      if (!tmp
7018 		  || TREE_CODE (tmp) != INTEGER_CST
7019 		  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7020 		continue;
7021 	    }
7022 
7023 	  break;
7024 	}
7025       else
7026 	mdim = false;
7027 
7028       if (!handled_component_p (ref))
7029 	return NULL_TREE;
7030     }
7031 
7032   /* We found the suitable array reference.  So copy everything up to it,
7033      and replace the index.  */
7034 
7035   pref = TREE_OPERAND (addr, 0);
7036   ret = copy_node (pref);
7037   SET_EXPR_LOCATION (ret, loc);
7038   pos = ret;
7039 
7040   while (pref != ref)
7041     {
7042       pref = TREE_OPERAND (pref, 0);
7043       TREE_OPERAND (pos, 0) = copy_node (pref);
7044       pos = TREE_OPERAND (pos, 0);
7045     }
7046 
7047   TREE_OPERAND (pos, 1)
7048     = fold_build2_loc (loc, PLUS_EXPR, itype,
7049 		       fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7050 		       fold_convert_loc (loc, itype, delta));
7051   return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7052 }
7053 
7054 
7055 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7056    means A >= Y && A != MAX, but in this case we know that
7057    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7058 
7059 static tree
7060 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7061 {
7062   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7063 
7064   if (TREE_CODE (bound) == LT_EXPR)
7065     a = TREE_OPERAND (bound, 0);
7066   else if (TREE_CODE (bound) == GT_EXPR)
7067     a = TREE_OPERAND (bound, 1);
7068   else
7069     return NULL_TREE;
7070 
7071   typea = TREE_TYPE (a);
7072   if (!INTEGRAL_TYPE_P (typea)
7073       && !POINTER_TYPE_P (typea))
7074     return NULL_TREE;
7075 
7076   if (TREE_CODE (ineq) == LT_EXPR)
7077     {
7078       a1 = TREE_OPERAND (ineq, 1);
7079       y = TREE_OPERAND (ineq, 0);
7080     }
7081   else if (TREE_CODE (ineq) == GT_EXPR)
7082     {
7083       a1 = TREE_OPERAND (ineq, 0);
7084       y = TREE_OPERAND (ineq, 1);
7085     }
7086   else
7087     return NULL_TREE;
7088 
7089   if (TREE_TYPE (a1) != typea)
7090     return NULL_TREE;
7091 
7092   if (POINTER_TYPE_P (typea))
7093     {
7094       /* Convert the pointer types into integer before taking the difference.  */
7095       tree ta = fold_convert_loc (loc, ssizetype, a);
7096       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7097       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7098     }
7099   else
7100     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7101 
7102   if (!diff || !integer_onep (diff))
7103    return NULL_TREE;
7104 
7105   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7106 }
7107 
7108 /* Fold a sum or difference of at least one multiplication.
7109    Returns the folded tree or NULL if no simplification could be made.  */
7110 
7111 static tree
7112 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7113 			  tree arg0, tree arg1)
7114 {
7115   tree arg00, arg01, arg10, arg11;
7116   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7117 
7118   /* (A * C) +- (B * C) -> (A+-B) * C.
7119      (A * C) +- A -> A * (C+-1).
7120      We are most concerned about the case where C is a constant,
7121      but other combinations show up during loop reduction.  Since
7122      it is not difficult, try all four possibilities.  */
7123 
7124   if (TREE_CODE (arg0) == MULT_EXPR)
7125     {
7126       arg00 = TREE_OPERAND (arg0, 0);
7127       arg01 = TREE_OPERAND (arg0, 1);
7128     }
7129   else if (TREE_CODE (arg0) == INTEGER_CST)
7130     {
7131       arg00 = build_one_cst (type);
7132       arg01 = arg0;
7133     }
7134   else
7135     {
7136       /* We cannot generate constant 1 for fract.  */
7137       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7138 	return NULL_TREE;
7139       arg00 = arg0;
7140       arg01 = build_one_cst (type);
7141     }
7142   if (TREE_CODE (arg1) == MULT_EXPR)
7143     {
7144       arg10 = TREE_OPERAND (arg1, 0);
7145       arg11 = TREE_OPERAND (arg1, 1);
7146     }
7147   else if (TREE_CODE (arg1) == INTEGER_CST)
7148     {
7149       arg10 = build_one_cst (type);
7150       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7151 	 the purpose of this canonicalization.  */
7152       if (TREE_INT_CST_HIGH (arg1) == -1
7153 	  && negate_expr_p (arg1)
7154 	  && code == PLUS_EXPR)
7155 	{
7156 	  arg11 = negate_expr (arg1);
7157 	  code = MINUS_EXPR;
7158 	}
7159       else
7160 	arg11 = arg1;
7161     }
7162   else
7163     {
7164       /* We cannot generate constant 1 for fract.  */
7165       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7166 	return NULL_TREE;
7167       arg10 = arg1;
7168       arg11 = build_one_cst (type);
7169     }
7170   same = NULL_TREE;
7171 
7172   if (operand_equal_p (arg01, arg11, 0))
7173     same = arg01, alt0 = arg00, alt1 = arg10;
7174   else if (operand_equal_p (arg00, arg10, 0))
7175     same = arg00, alt0 = arg01, alt1 = arg11;
7176   else if (operand_equal_p (arg00, arg11, 0))
7177     same = arg00, alt0 = arg01, alt1 = arg10;
7178   else if (operand_equal_p (arg01, arg10, 0))
7179     same = arg01, alt0 = arg00, alt1 = arg11;
7180 
7181   /* No identical multiplicands; see if we can find a common
7182      power-of-two factor in non-power-of-two multiplies.  This
7183      can help in multi-dimensional array access.  */
7184   else if (host_integerp (arg01, 0)
7185 	   && host_integerp (arg11, 0))
7186     {
7187       HOST_WIDE_INT int01, int11, tmp;
7188       bool swap = false;
7189       tree maybe_same;
7190       int01 = TREE_INT_CST_LOW (arg01);
7191       int11 = TREE_INT_CST_LOW (arg11);
7192 
7193       /* Move min of absolute values to int11.  */
7194       if (absu_hwi (int01) < absu_hwi (int11))
7195         {
7196 	  tmp = int01, int01 = int11, int11 = tmp;
7197 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7198 	  maybe_same = arg01;
7199 	  swap = true;
7200 	}
7201       else
7202 	maybe_same = arg11;
7203 
7204       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7205 	  /* The remainder should not be a constant, otherwise we
7206 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7207 	     increased the number of multiplications necessary.  */
7208 	  && TREE_CODE (arg10) != INTEGER_CST)
7209         {
7210 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7211 			      build_int_cst (TREE_TYPE (arg00),
7212 					     int01 / int11));
7213 	  alt1 = arg10;
7214 	  same = maybe_same;
7215 	  if (swap)
7216 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7217 	}
7218     }
7219 
7220   if (same)
7221     return fold_build2_loc (loc, MULT_EXPR, type,
7222 			fold_build2_loc (loc, code, type,
7223 				     fold_convert_loc (loc, type, alt0),
7224 				     fold_convert_loc (loc, type, alt1)),
7225 			fold_convert_loc (loc, type, same));
7226 
7227   return NULL_TREE;
7228 }
7229 
7230 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7231    specified by EXPR into the buffer PTR of length LEN bytes.
7232    Return the number of bytes placed in the buffer, or zero
7233    upon failure.  */
7234 
7235 static int
7236 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7237 {
7238   tree type = TREE_TYPE (expr);
7239   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7240   int byte, offset, word, words;
7241   unsigned char value;
7242 
7243   if (total_bytes > len)
7244     return 0;
7245   words = total_bytes / UNITS_PER_WORD;
7246 
7247   for (byte = 0; byte < total_bytes; byte++)
7248     {
7249       int bitpos = byte * BITS_PER_UNIT;
7250       if (bitpos < HOST_BITS_PER_WIDE_INT)
7251 	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7252       else
7253 	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7254 				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7255 
7256       if (total_bytes > UNITS_PER_WORD)
7257 	{
7258 	  word = byte / UNITS_PER_WORD;
7259 	  if (WORDS_BIG_ENDIAN)
7260 	    word = (words - 1) - word;
7261 	  offset = word * UNITS_PER_WORD;
7262 	  if (BYTES_BIG_ENDIAN)
7263 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7264 	  else
7265 	    offset += byte % UNITS_PER_WORD;
7266 	}
7267       else
7268 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7269       ptr[offset] = value;
7270     }
7271   return total_bytes;
7272 }
7273 
7274 
7275 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7276    specified by EXPR into the buffer PTR of length LEN bytes.
7277    Return the number of bytes placed in the buffer, or zero
7278    upon failure.  */
7279 
7280 static int
7281 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7282 {
7283   tree type = TREE_TYPE (expr);
7284   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7285   int byte, offset, word, words, bitpos;
7286   unsigned char value;
7287 
7288   /* There are always 32 bits in each long, no matter the size of
7289      the hosts long.  We handle floating point representations with
7290      up to 192 bits.  */
7291   long tmp[6];
7292 
7293   if (total_bytes > len)
7294     return 0;
7295   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7296 
7297   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7298 
7299   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7300        bitpos += BITS_PER_UNIT)
7301     {
7302       byte = (bitpos / BITS_PER_UNIT) & 3;
7303       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7304 
7305       if (UNITS_PER_WORD < 4)
7306 	{
7307 	  word = byte / UNITS_PER_WORD;
7308 	  if (WORDS_BIG_ENDIAN)
7309 	    word = (words - 1) - word;
7310 	  offset = word * UNITS_PER_WORD;
7311 	  if (BYTES_BIG_ENDIAN)
7312 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7313 	  else
7314 	    offset += byte % UNITS_PER_WORD;
7315 	}
7316       else
7317 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7318       ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7319     }
7320   return total_bytes;
7321 }
7322 
7323 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7324    specified by EXPR into the buffer PTR of length LEN bytes.
7325    Return the number of bytes placed in the buffer, or zero
7326    upon failure.  */
7327 
7328 static int
7329 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7330 {
7331   int rsize, isize;
7332   tree part;
7333 
7334   part = TREE_REALPART (expr);
7335   rsize = native_encode_expr (part, ptr, len);
7336   if (rsize == 0)
7337     return 0;
7338   part = TREE_IMAGPART (expr);
7339   isize = native_encode_expr (part, ptr+rsize, len-rsize);
7340   if (isize != rsize)
7341     return 0;
7342   return rsize + isize;
7343 }
7344 
7345 
7346 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7347    specified by EXPR into the buffer PTR of length LEN bytes.
7348    Return the number of bytes placed in the buffer, or zero
7349    upon failure.  */
7350 
7351 static int
7352 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7353 {
7354   int i, size, offset, count;
7355   tree itype, elem, elements;
7356 
7357   offset = 0;
7358   elements = TREE_VECTOR_CST_ELTS (expr);
7359   count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7360   itype = TREE_TYPE (TREE_TYPE (expr));
7361   size = GET_MODE_SIZE (TYPE_MODE (itype));
7362   for (i = 0; i < count; i++)
7363     {
7364       if (elements)
7365 	{
7366 	  elem = TREE_VALUE (elements);
7367 	  elements = TREE_CHAIN (elements);
7368 	}
7369       else
7370 	elem = NULL_TREE;
7371 
7372       if (elem)
7373 	{
7374 	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7375 	    return 0;
7376 	}
7377       else
7378 	{
7379 	  if (offset + size > len)
7380 	    return 0;
7381 	  memset (ptr+offset, 0, size);
7382 	}
7383       offset += size;
7384     }
7385   return offset;
7386 }
7387 
7388 
7389 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7390    specified by EXPR into the buffer PTR of length LEN bytes.
7391    Return the number of bytes placed in the buffer, or zero
7392    upon failure.  */
7393 
7394 static int
7395 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7396 {
7397   tree type = TREE_TYPE (expr);
7398   HOST_WIDE_INT total_bytes;
7399 
7400   if (TREE_CODE (type) != ARRAY_TYPE
7401       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7402       || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7403       || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7404     return 0;
7405   total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7406   if (total_bytes > len)
7407     return 0;
7408   if (TREE_STRING_LENGTH (expr) < total_bytes)
7409     {
7410       memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7411       memset (ptr + TREE_STRING_LENGTH (expr), 0,
7412 	      total_bytes - TREE_STRING_LENGTH (expr));
7413     }
7414   else
7415     memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7416   return total_bytes;
7417 }
7418 
7419 
7420 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7421    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7422    buffer PTR of length LEN bytes.  Return the number of bytes
7423    placed in the buffer, or zero upon failure.  */
7424 
7425 int
7426 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7427 {
7428   switch (TREE_CODE (expr))
7429     {
7430     case INTEGER_CST:
7431       return native_encode_int (expr, ptr, len);
7432 
7433     case REAL_CST:
7434       return native_encode_real (expr, ptr, len);
7435 
7436     case COMPLEX_CST:
7437       return native_encode_complex (expr, ptr, len);
7438 
7439     case VECTOR_CST:
7440       return native_encode_vector (expr, ptr, len);
7441 
7442     case STRING_CST:
7443       return native_encode_string (expr, ptr, len);
7444 
7445     default:
7446       return 0;
7447     }
7448 }
7449 
7450 
7451 /* Subroutine of native_interpret_expr.  Interpret the contents of
7452    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7453    If the buffer cannot be interpreted, return NULL_TREE.  */
7454 
7455 static tree
7456 native_interpret_int (tree type, const unsigned char *ptr, int len)
7457 {
7458   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7459   int byte, offset, word, words;
7460   unsigned char value;
7461   double_int result;
7462 
7463   if (total_bytes > len)
7464     return NULL_TREE;
7465   if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7466     return NULL_TREE;
7467 
7468   result = double_int_zero;
7469   words = total_bytes / UNITS_PER_WORD;
7470 
7471   for (byte = 0; byte < total_bytes; byte++)
7472     {
7473       int bitpos = byte * BITS_PER_UNIT;
7474       if (total_bytes > UNITS_PER_WORD)
7475 	{
7476 	  word = byte / UNITS_PER_WORD;
7477 	  if (WORDS_BIG_ENDIAN)
7478 	    word = (words - 1) - word;
7479 	  offset = word * UNITS_PER_WORD;
7480 	  if (BYTES_BIG_ENDIAN)
7481 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7482 	  else
7483 	    offset += byte % UNITS_PER_WORD;
7484 	}
7485       else
7486 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7487       value = ptr[offset];
7488 
7489       if (bitpos < HOST_BITS_PER_WIDE_INT)
7490 	result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7491       else
7492 	result.high |= (unsigned HOST_WIDE_INT) value
7493 		       << (bitpos - HOST_BITS_PER_WIDE_INT);
7494     }
7495 
7496   return double_int_to_tree (type, result);
7497 }
7498 
7499 
7500 /* Subroutine of native_interpret_expr.  Interpret the contents of
7501    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7502    If the buffer cannot be interpreted, return NULL_TREE.  */
7503 
7504 static tree
7505 native_interpret_real (tree type, const unsigned char *ptr, int len)
7506 {
7507   enum machine_mode mode = TYPE_MODE (type);
7508   int total_bytes = GET_MODE_SIZE (mode);
7509   int byte, offset, word, words, bitpos;
7510   unsigned char value;
7511   /* There are always 32 bits in each long, no matter the size of
7512      the hosts long.  We handle floating point representations with
7513      up to 192 bits.  */
7514   REAL_VALUE_TYPE r;
7515   long tmp[6];
7516 
7517   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7518   if (total_bytes > len || total_bytes > 24)
7519     return NULL_TREE;
7520   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7521 
7522   memset (tmp, 0, sizeof (tmp));
7523   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7524        bitpos += BITS_PER_UNIT)
7525     {
7526       byte = (bitpos / BITS_PER_UNIT) & 3;
7527       if (UNITS_PER_WORD < 4)
7528 	{
7529 	  word = byte / UNITS_PER_WORD;
7530 	  if (WORDS_BIG_ENDIAN)
7531 	    word = (words - 1) - word;
7532 	  offset = word * UNITS_PER_WORD;
7533 	  if (BYTES_BIG_ENDIAN)
7534 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7535 	  else
7536 	    offset += byte % UNITS_PER_WORD;
7537 	}
7538       else
7539 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7540       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7541 
7542       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7543     }
7544 
7545   real_from_target (&r, tmp, mode);
7546   return build_real (type, r);
7547 }
7548 
7549 
7550 /* Subroutine of native_interpret_expr.  Interpret the contents of
7551    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7552    If the buffer cannot be interpreted, return NULL_TREE.  */
7553 
7554 static tree
7555 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7556 {
7557   tree etype, rpart, ipart;
7558   int size;
7559 
7560   etype = TREE_TYPE (type);
7561   size = GET_MODE_SIZE (TYPE_MODE (etype));
7562   if (size * 2 > len)
7563     return NULL_TREE;
7564   rpart = native_interpret_expr (etype, ptr, size);
7565   if (!rpart)
7566     return NULL_TREE;
7567   ipart = native_interpret_expr (etype, ptr+size, size);
7568   if (!ipart)
7569     return NULL_TREE;
7570   return build_complex (type, rpart, ipart);
7571 }
7572 
7573 
7574 /* Subroutine of native_interpret_expr.  Interpret the contents of
7575    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7576    If the buffer cannot be interpreted, return NULL_TREE.  */
7577 
7578 static tree
7579 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7580 {
7581   tree etype, elem, elements;
7582   int i, size, count;
7583 
7584   etype = TREE_TYPE (type);
7585   size = GET_MODE_SIZE (TYPE_MODE (etype));
7586   count = TYPE_VECTOR_SUBPARTS (type);
7587   if (size * count > len)
7588     return NULL_TREE;
7589 
7590   elements = NULL_TREE;
7591   for (i = count - 1; i >= 0; i--)
7592     {
7593       elem = native_interpret_expr (etype, ptr+(i*size), size);
7594       if (!elem)
7595 	return NULL_TREE;
7596       elements = tree_cons (NULL_TREE, elem, elements);
7597     }
7598   return build_vector (type, elements);
7599 }
7600 
7601 
7602 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7603    the buffer PTR of length LEN as a constant of type TYPE.  For
7604    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7605    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7606    return NULL_TREE.  */
7607 
7608 tree
7609 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7610 {
7611   switch (TREE_CODE (type))
7612     {
7613     case INTEGER_TYPE:
7614     case ENUMERAL_TYPE:
7615     case BOOLEAN_TYPE:
7616       return native_interpret_int (type, ptr, len);
7617 
7618     case REAL_TYPE:
7619       return native_interpret_real (type, ptr, len);
7620 
7621     case COMPLEX_TYPE:
7622       return native_interpret_complex (type, ptr, len);
7623 
7624     case VECTOR_TYPE:
7625       return native_interpret_vector (type, ptr, len);
7626 
7627     default:
7628       return NULL_TREE;
7629     }
7630 }
7631 
7632 
7633 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7634    TYPE at compile-time.  If we're unable to perform the conversion
7635    return NULL_TREE.  */
7636 
7637 static tree
7638 fold_view_convert_expr (tree type, tree expr)
7639 {
7640   /* We support up to 512-bit values (for V8DFmode).  */
7641   unsigned char buffer[64];
7642   int len;
7643 
7644   /* Check that the host and target are sane.  */
7645   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7646     return NULL_TREE;
7647 
7648   len = native_encode_expr (expr, buffer, sizeof (buffer));
7649   if (len == 0)
7650     return NULL_TREE;
7651 
7652   return native_interpret_expr (type, buffer, len);
7653 }
7654 
7655 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7656    to avoid confusing the gimplify process.  */
7657 
7658 tree
7659 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7660 {
7661   /* The size of the object is not relevant when talking about its address.  */
7662   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7663     t = TREE_OPERAND (t, 0);
7664 
7665   if (TREE_CODE (t) == INDIRECT_REF)
7666     {
7667       t = TREE_OPERAND (t, 0);
7668 
7669       if (TREE_TYPE (t) != ptrtype)
7670 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7671     }
7672   else if (TREE_CODE (t) == MEM_REF
7673       && integer_zerop (TREE_OPERAND (t, 1)))
7674     return TREE_OPERAND (t, 0);
7675   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7676     {
7677       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7678 
7679       if (TREE_TYPE (t) != ptrtype)
7680 	t = fold_convert_loc (loc, ptrtype, t);
7681     }
7682   else
7683     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7684 
7685   return t;
7686 }
7687 
7688 /* Build an expression for the address of T.  */
7689 
7690 tree
7691 build_fold_addr_expr_loc (location_t loc, tree t)
7692 {
7693   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7694 
7695   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7696 }
7697 
7698 static bool vec_cst_ctor_to_array (tree, tree *);
7699 
7700 /* Fold a unary expression of code CODE and type TYPE with operand
7701    OP0.  Return the folded expression if folding is successful.
7702    Otherwise, return NULL_TREE.  */
7703 
7704 tree
7705 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7706 {
7707   tree tem;
7708   tree arg0;
7709   enum tree_code_class kind = TREE_CODE_CLASS (code);
7710 
7711   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7712 	      && TREE_CODE_LENGTH (code) == 1);
7713 
7714   arg0 = op0;
7715   if (arg0)
7716     {
7717       if (CONVERT_EXPR_CODE_P (code)
7718 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7719 	{
7720 	  /* Don't use STRIP_NOPS, because signedness of argument type
7721 	     matters.  */
7722 	  STRIP_SIGN_NOPS (arg0);
7723 	}
7724       else
7725 	{
7726 	  /* Strip any conversions that don't change the mode.  This
7727 	     is safe for every expression, except for a comparison
7728 	     expression because its signedness is derived from its
7729 	     operands.
7730 
7731 	     Note that this is done as an internal manipulation within
7732 	     the constant folder, in order to find the simplest
7733 	     representation of the arguments so that their form can be
7734 	     studied.  In any cases, the appropriate type conversions
7735 	     should be put back in the tree that will get out of the
7736 	     constant folder.  */
7737 	  STRIP_NOPS (arg0);
7738 	}
7739     }
7740 
7741   if (TREE_CODE_CLASS (code) == tcc_unary)
7742     {
7743       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7744 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7745 		       fold_build1_loc (loc, code, type,
7746 				    fold_convert_loc (loc, TREE_TYPE (op0),
7747 						      TREE_OPERAND (arg0, 1))));
7748       else if (TREE_CODE (arg0) == COND_EXPR)
7749 	{
7750 	  tree arg01 = TREE_OPERAND (arg0, 1);
7751 	  tree arg02 = TREE_OPERAND (arg0, 2);
7752 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7753 	    arg01 = fold_build1_loc (loc, code, type,
7754 				 fold_convert_loc (loc,
7755 						   TREE_TYPE (op0), arg01));
7756 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7757 	    arg02 = fold_build1_loc (loc, code, type,
7758 				 fold_convert_loc (loc,
7759 						   TREE_TYPE (op0), arg02));
7760 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7761 			     arg01, arg02);
7762 
7763 	  /* If this was a conversion, and all we did was to move into
7764 	     inside the COND_EXPR, bring it back out.  But leave it if
7765 	     it is a conversion from integer to integer and the
7766 	     result precision is no wider than a word since such a
7767 	     conversion is cheap and may be optimized away by combine,
7768 	     while it couldn't if it were outside the COND_EXPR.  Then return
7769 	     so we don't get into an infinite recursion loop taking the
7770 	     conversion out and then back in.  */
7771 
7772 	  if ((CONVERT_EXPR_CODE_P (code)
7773 	       || code == NON_LVALUE_EXPR)
7774 	      && TREE_CODE (tem) == COND_EXPR
7775 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7776 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7777 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7778 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7779 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7780 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7781 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7782 		     && (INTEGRAL_TYPE_P
7783 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7784 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7785 		  || flag_syntax_only))
7786 	    tem = build1_loc (loc, code, type,
7787 			      build3 (COND_EXPR,
7788 				      TREE_TYPE (TREE_OPERAND
7789 						 (TREE_OPERAND (tem, 1), 0)),
7790 				      TREE_OPERAND (tem, 0),
7791 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7792 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7793 						    0)));
7794 	  return tem;
7795 	}
7796    }
7797 
7798   switch (code)
7799     {
7800     case PAREN_EXPR:
7801       /* Re-association barriers around constants and other re-association
7802 	 barriers can be removed.  */
7803       if (CONSTANT_CLASS_P (op0)
7804 	  || TREE_CODE (op0) == PAREN_EXPR)
7805 	return fold_convert_loc (loc, type, op0);
7806       return NULL_TREE;
7807 
7808     CASE_CONVERT:
7809     case FLOAT_EXPR:
7810     case FIX_TRUNC_EXPR:
7811       if (TREE_TYPE (op0) == type)
7812 	return op0;
7813 
7814       if (COMPARISON_CLASS_P (op0))
7815 	{
7816 	  /* If we have (type) (a CMP b) and type is an integral type, return
7817 	     new expression involving the new type.  Canonicalize
7818 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7819 	     non-integral type.
7820 	     Do not fold the result as that would not simplify further, also
7821 	     folding again results in recursions.  */
7822 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7823 	    return build2_loc (loc, TREE_CODE (op0), type,
7824 			       TREE_OPERAND (op0, 0),
7825 			       TREE_OPERAND (op0, 1));
7826 	  else if (!INTEGRAL_TYPE_P (type))
7827 	    return build3_loc (loc, COND_EXPR, type, op0,
7828 			       constant_boolean_node (true, type),
7829 			       constant_boolean_node (false, type));
7830 	}
7831 
7832       /* Handle cases of two conversions in a row.  */
7833       if (CONVERT_EXPR_P (op0))
7834 	{
7835 	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7836 	  tree inter_type = TREE_TYPE (op0);
7837 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7838 	  int inside_ptr = POINTER_TYPE_P (inside_type);
7839 	  int inside_float = FLOAT_TYPE_P (inside_type);
7840 	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7841 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7842 	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7843 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7844 	  int inter_ptr = POINTER_TYPE_P (inter_type);
7845 	  int inter_float = FLOAT_TYPE_P (inter_type);
7846 	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7847 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7848 	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7849 	  int final_int = INTEGRAL_TYPE_P (type);
7850 	  int final_ptr = POINTER_TYPE_P (type);
7851 	  int final_float = FLOAT_TYPE_P (type);
7852 	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7853 	  unsigned int final_prec = TYPE_PRECISION (type);
7854 	  int final_unsignedp = TYPE_UNSIGNED (type);
7855 
7856 	  /* In addition to the cases of two conversions in a row
7857 	     handled below, if we are converting something to its own
7858 	     type via an object of identical or wider precision, neither
7859 	     conversion is needed.  */
7860 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7861 	      && (((inter_int || inter_ptr) && final_int)
7862 		  || (inter_float && final_float))
7863 	      && inter_prec >= final_prec)
7864 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7865 
7866 	  /* Likewise, if the intermediate and initial types are either both
7867 	     float or both integer, we don't need the middle conversion if the
7868 	     former is wider than the latter and doesn't change the signedness
7869 	     (for integers).  Avoid this if the final type is a pointer since
7870 	     then we sometimes need the middle conversion.  Likewise if the
7871 	     final type has a precision not equal to the size of its mode.  */
7872 	  if (((inter_int && inside_int)
7873 	       || (inter_float && inside_float)
7874 	       || (inter_vec && inside_vec))
7875 	      && inter_prec >= inside_prec
7876 	      && (inter_float || inter_vec
7877 		  || inter_unsignedp == inside_unsignedp)
7878 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7879 		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7880 	      && ! final_ptr
7881 	      && (! final_vec || inter_prec == inside_prec))
7882 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7883 
7884 	  /* If we have a sign-extension of a zero-extended value, we can
7885 	     replace that by a single zero-extension.  */
7886 	  if (inside_int && inter_int && final_int
7887 	      && inside_prec < inter_prec && inter_prec < final_prec
7888 	      && inside_unsignedp && !inter_unsignedp)
7889 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7890 
7891 	  /* Two conversions in a row are not needed unless:
7892 	     - some conversion is floating-point (overstrict for now), or
7893 	     - some conversion is a vector (overstrict for now), or
7894 	     - the intermediate type is narrower than both initial and
7895 	       final, or
7896 	     - the intermediate type and innermost type differ in signedness,
7897 	       and the outermost type is wider than the intermediate, or
7898 	     - the initial type is a pointer type and the precisions of the
7899 	       intermediate and final types differ, or
7900 	     - the final type is a pointer type and the precisions of the
7901 	       initial and intermediate types differ.  */
7902 	  if (! inside_float && ! inter_float && ! final_float
7903 	      && ! inside_vec && ! inter_vec && ! final_vec
7904 	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7905 	      && ! (inside_int && inter_int
7906 		    && inter_unsignedp != inside_unsignedp
7907 		    && inter_prec < final_prec)
7908 	      && ((inter_unsignedp && inter_prec > inside_prec)
7909 		  == (final_unsignedp && final_prec > inter_prec))
7910 	      && ! (inside_ptr && inter_prec != final_prec)
7911 	      && ! (final_ptr && inside_prec != inter_prec)
7912 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7913 		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7914 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7915 	}
7916 
7917       /* Handle (T *)&A.B.C for A being of type T and B and C
7918 	 living at offset zero.  This occurs frequently in
7919 	 C++ upcasting and then accessing the base.  */
7920       if (TREE_CODE (op0) == ADDR_EXPR
7921 	  && POINTER_TYPE_P (type)
7922 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7923         {
7924 	  HOST_WIDE_INT bitsize, bitpos;
7925 	  tree offset;
7926 	  enum machine_mode mode;
7927 	  int unsignedp, volatilep;
7928           tree base = TREE_OPERAND (op0, 0);
7929 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7930 				      &mode, &unsignedp, &volatilep, false);
7931 	  /* If the reference was to a (constant) zero offset, we can use
7932 	     the address of the base if it has the same base type
7933 	     as the result type and the pointer type is unqualified.  */
7934 	  if (! offset && bitpos == 0
7935 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7936 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7937 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7938 	    return fold_convert_loc (loc, type,
7939 				     build_fold_addr_expr_loc (loc, base));
7940         }
7941 
7942       if (TREE_CODE (op0) == MODIFY_EXPR
7943 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7944 	  /* Detect assigning a bitfield.  */
7945 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7946 	       && DECL_BIT_FIELD
7947 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7948 	{
7949 	  /* Don't leave an assignment inside a conversion
7950 	     unless assigning a bitfield.  */
7951 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7952 	  /* First do the assignment, then return converted constant.  */
7953 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7954 	  TREE_NO_WARNING (tem) = 1;
7955 	  TREE_USED (tem) = 1;
7956 	  return tem;
7957 	}
7958 
7959       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7960 	 constants (if x has signed type, the sign bit cannot be set
7961 	 in c).  This folds extension into the BIT_AND_EXPR.
7962 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7963 	 very likely don't have maximal range for their precision and this
7964 	 transformation effectively doesn't preserve non-maximal ranges.  */
7965       if (TREE_CODE (type) == INTEGER_TYPE
7966 	  && TREE_CODE (op0) == BIT_AND_EXPR
7967 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7968 	{
7969 	  tree and_expr = op0;
7970 	  tree and0 = TREE_OPERAND (and_expr, 0);
7971 	  tree and1 = TREE_OPERAND (and_expr, 1);
7972 	  int change = 0;
7973 
7974 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7975 	      || (TYPE_PRECISION (type)
7976 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7977 	    change = 1;
7978 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7979 		   <= HOST_BITS_PER_WIDE_INT
7980 		   && host_integerp (and1, 1))
7981 	    {
7982 	      unsigned HOST_WIDE_INT cst;
7983 
7984 	      cst = tree_low_cst (and1, 1);
7985 	      cst &= (HOST_WIDE_INT) -1
7986 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7987 	      change = (cst == 0);
7988 #ifdef LOAD_EXTEND_OP
7989 	      if (change
7990 		  && !flag_syntax_only
7991 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7992 		      == ZERO_EXTEND))
7993 		{
7994 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7995 		  and0 = fold_convert_loc (loc, uns, and0);
7996 		  and1 = fold_convert_loc (loc, uns, and1);
7997 		}
7998 #endif
7999 	    }
8000 	  if (change)
8001 	    {
8002 	      tem = force_fit_type_double (type, tree_to_double_int (and1),
8003 					   0, TREE_OVERFLOW (and1));
8004 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8005 				  fold_convert_loc (loc, type, and0), tem);
8006 	    }
8007 	}
8008 
8009       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8010          when one of the new casts will fold away. Conservatively we assume
8011 	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8012       if (POINTER_TYPE_P (type)
8013 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8014 	  && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8015 	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8016 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8017 	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8018 	{
8019 	  tree arg00 = TREE_OPERAND (arg0, 0);
8020 	  tree arg01 = TREE_OPERAND (arg0, 1);
8021 
8022 	  return fold_build_pointer_plus_loc
8023 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8024 	}
8025 
8026       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8027 	 of the same precision, and X is an integer type not narrower than
8028 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8029       if (INTEGRAL_TYPE_P (type)
8030 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8031 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8032 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8033 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8034 	{
8035 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8036 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8037 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8038 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8039 				fold_convert_loc (loc, type, tem));
8040 	}
8041 
8042       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8043 	 type of X and Y (integer types only).  */
8044       if (INTEGRAL_TYPE_P (type)
8045 	  && TREE_CODE (op0) == MULT_EXPR
8046 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8047 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8048 	{
8049 	  /* Be careful not to introduce new overflows.  */
8050 	  tree mult_type;
8051           if (TYPE_OVERFLOW_WRAPS (type))
8052 	    mult_type = type;
8053 	  else
8054 	    mult_type = unsigned_type_for (type);
8055 
8056 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8057 	    {
8058 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8059 				 fold_convert_loc (loc, mult_type,
8060 						   TREE_OPERAND (op0, 0)),
8061 				 fold_convert_loc (loc, mult_type,
8062 						   TREE_OPERAND (op0, 1)));
8063 	      return fold_convert_loc (loc, type, tem);
8064 	    }
8065 	}
8066 
8067       tem = fold_convert_const (code, type, op0);
8068       return tem ? tem : NULL_TREE;
8069 
8070     case ADDR_SPACE_CONVERT_EXPR:
8071       if (integer_zerop (arg0))
8072 	return fold_convert_const (code, type, arg0);
8073       return NULL_TREE;
8074 
8075     case FIXED_CONVERT_EXPR:
8076       tem = fold_convert_const (code, type, arg0);
8077       return tem ? tem : NULL_TREE;
8078 
8079     case VIEW_CONVERT_EXPR:
8080       if (TREE_TYPE (op0) == type)
8081 	return op0;
8082       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8083 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8084 			    type, TREE_OPERAND (op0, 0));
8085       if (TREE_CODE (op0) == MEM_REF)
8086 	return fold_build2_loc (loc, MEM_REF, type,
8087 				TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8088 
8089       /* For integral conversions with the same precision or pointer
8090 	 conversions use a NOP_EXPR instead.  */
8091       if ((INTEGRAL_TYPE_P (type)
8092 	   || POINTER_TYPE_P (type))
8093 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8094 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8095 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8096 	return fold_convert_loc (loc, type, op0);
8097 
8098       /* Strip inner integral conversions that do not change the precision.  */
8099       if (CONVERT_EXPR_P (op0)
8100 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8101 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8102 	  && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8103 	      || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8104 	  && (TYPE_PRECISION (TREE_TYPE (op0))
8105 	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8106 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8107 			    type, TREE_OPERAND (op0, 0));
8108 
8109       return fold_view_convert_expr (type, op0);
8110 
8111     case NEGATE_EXPR:
8112       tem = fold_negate_expr (loc, arg0);
8113       if (tem)
8114 	return fold_convert_loc (loc, type, tem);
8115       return NULL_TREE;
8116 
8117     case ABS_EXPR:
8118       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8119 	return fold_abs_const (arg0, type);
8120       else if (TREE_CODE (arg0) == NEGATE_EXPR)
8121 	return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8122       /* Convert fabs((double)float) into (double)fabsf(float).  */
8123       else if (TREE_CODE (arg0) == NOP_EXPR
8124 	       && TREE_CODE (type) == REAL_TYPE)
8125 	{
8126 	  tree targ0 = strip_float_extensions (arg0);
8127 	  if (targ0 != arg0)
8128 	    return fold_convert_loc (loc, type,
8129 				     fold_build1_loc (loc, ABS_EXPR,
8130 						  TREE_TYPE (targ0),
8131 						  targ0));
8132 	}
8133       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8134       else if (TREE_CODE (arg0) == ABS_EXPR)
8135 	return arg0;
8136       else if (tree_expr_nonnegative_p (arg0))
8137 	return arg0;
8138 
8139       /* Strip sign ops from argument.  */
8140       if (TREE_CODE (type) == REAL_TYPE)
8141 	{
8142 	  tem = fold_strip_sign_ops (arg0);
8143 	  if (tem)
8144 	    return fold_build1_loc (loc, ABS_EXPR, type,
8145 				fold_convert_loc (loc, type, tem));
8146 	}
8147       return NULL_TREE;
8148 
8149     case CONJ_EXPR:
8150       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8151 	return fold_convert_loc (loc, type, arg0);
8152       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8153 	{
8154 	  tree itype = TREE_TYPE (type);
8155 	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8156 	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8157 	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8158 			      negate_expr (ipart));
8159 	}
8160       if (TREE_CODE (arg0) == COMPLEX_CST)
8161 	{
8162 	  tree itype = TREE_TYPE (type);
8163 	  tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8164 	  tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8165 	  return build_complex (type, rpart, negate_expr (ipart));
8166 	}
8167       if (TREE_CODE (arg0) == CONJ_EXPR)
8168 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8169       return NULL_TREE;
8170 
8171     case BIT_NOT_EXPR:
8172       if (TREE_CODE (arg0) == INTEGER_CST)
8173         return fold_not_const (arg0, type);
8174       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8175 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8176       /* Convert ~ (-A) to A - 1.  */
8177       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8178 	return fold_build2_loc (loc, MINUS_EXPR, type,
8179 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8180 			    build_int_cst (type, 1));
8181       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8182       else if (INTEGRAL_TYPE_P (type)
8183 	       && ((TREE_CODE (arg0) == MINUS_EXPR
8184 		    && integer_onep (TREE_OPERAND (arg0, 1)))
8185 		   || (TREE_CODE (arg0) == PLUS_EXPR
8186 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8187 	return fold_build1_loc (loc, NEGATE_EXPR, type,
8188 			    fold_convert_loc (loc, type,
8189 					      TREE_OPERAND (arg0, 0)));
8190       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8191       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8192 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8193 			       	     fold_convert_loc (loc, type,
8194 						       TREE_OPERAND (arg0, 0)))))
8195 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8196 			    fold_convert_loc (loc, type,
8197 					      TREE_OPERAND (arg0, 1)));
8198       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8199 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8200 			       	     fold_convert_loc (loc, type,
8201 						       TREE_OPERAND (arg0, 1)))))
8202 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8203 			    fold_convert_loc (loc, type,
8204 					      TREE_OPERAND (arg0, 0)), tem);
8205       /* Perform BIT_NOT_EXPR on each element individually.  */
8206       else if (TREE_CODE (arg0) == VECTOR_CST)
8207 	{
8208 	  tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8209 	  int count = TYPE_VECTOR_SUBPARTS (type), i;
8210 
8211 	  for (i = 0; i < count; i++)
8212 	    {
8213 	      if (elements)
8214 		{
8215 		  elem = TREE_VALUE (elements);
8216 		  elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8217 		  if (elem == NULL_TREE)
8218 		    break;
8219 		  elements = TREE_CHAIN (elements);
8220 		}
8221 	      else
8222 		elem = build_int_cst (TREE_TYPE (type), -1);
8223 	      list = tree_cons (NULL_TREE, elem, list);
8224 	    }
8225 	  if (i == count)
8226 	    return build_vector (type, nreverse (list));
8227 	}
8228 
8229       return NULL_TREE;
8230 
8231     case TRUTH_NOT_EXPR:
8232       /* The argument to invert_truthvalue must have Boolean type.  */
8233       if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8234           arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8235 
8236       /* Note that the operand of this must be an int
8237 	 and its values must be 0 or 1.
8238 	 ("true" is a fixed value perhaps depending on the language,
8239 	 but we don't handle values other than 1 correctly yet.)  */
8240       tem = fold_truth_not_expr (loc, arg0);
8241       if (!tem)
8242 	return NULL_TREE;
8243       return fold_convert_loc (loc, type, tem);
8244 
8245     case REALPART_EXPR:
8246       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8247 	return fold_convert_loc (loc, type, arg0);
8248       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8249 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8250 				 TREE_OPERAND (arg0, 1));
8251       if (TREE_CODE (arg0) == COMPLEX_CST)
8252 	return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8253       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8254 	{
8255 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8256 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8257 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8258 					  TREE_OPERAND (arg0, 0)),
8259 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8260 					  TREE_OPERAND (arg0, 1)));
8261 	  return fold_convert_loc (loc, type, tem);
8262 	}
8263       if (TREE_CODE (arg0) == CONJ_EXPR)
8264 	{
8265 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8266 	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8267 			     TREE_OPERAND (arg0, 0));
8268 	  return fold_convert_loc (loc, type, tem);
8269 	}
8270       if (TREE_CODE (arg0) == CALL_EXPR)
8271 	{
8272 	  tree fn = get_callee_fndecl (arg0);
8273 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8274 	    switch (DECL_FUNCTION_CODE (fn))
8275 	      {
8276 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8277 	        fn = mathfn_built_in (type, BUILT_IN_COS);
8278 		if (fn)
8279 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8280 		break;
8281 
8282 	      default:
8283 		break;
8284 	      }
8285 	}
8286       return NULL_TREE;
8287 
8288     case IMAGPART_EXPR:
8289       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8290 	return build_zero_cst (type);
8291       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8292 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8293 				 TREE_OPERAND (arg0, 0));
8294       if (TREE_CODE (arg0) == COMPLEX_CST)
8295 	return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8296       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8297 	{
8298 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8299 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8300 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8301 					  TREE_OPERAND (arg0, 0)),
8302 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8303 					  TREE_OPERAND (arg0, 1)));
8304 	  return fold_convert_loc (loc, type, tem);
8305 	}
8306       if (TREE_CODE (arg0) == CONJ_EXPR)
8307 	{
8308 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8309 	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8310 	  return fold_convert_loc (loc, type, negate_expr (tem));
8311 	}
8312       if (TREE_CODE (arg0) == CALL_EXPR)
8313 	{
8314 	  tree fn = get_callee_fndecl (arg0);
8315 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8316 	    switch (DECL_FUNCTION_CODE (fn))
8317 	      {
8318 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8319 	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8320 		if (fn)
8321 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8322 		break;
8323 
8324 	      default:
8325 		break;
8326 	      }
8327 	}
8328       return NULL_TREE;
8329 
8330     case INDIRECT_REF:
8331       /* Fold *&X to X if X is an lvalue.  */
8332       if (TREE_CODE (op0) == ADDR_EXPR)
8333 	{
8334 	  tree op00 = TREE_OPERAND (op0, 0);
8335 	  if ((TREE_CODE (op00) == VAR_DECL
8336 	       || TREE_CODE (op00) == PARM_DECL
8337 	       || TREE_CODE (op00) == RESULT_DECL)
8338 	      && !TREE_READONLY (op00))
8339 	    return op00;
8340 	}
8341       return NULL_TREE;
8342 
8343     case VEC_UNPACK_LO_EXPR:
8344     case VEC_UNPACK_HI_EXPR:
8345     case VEC_UNPACK_FLOAT_LO_EXPR:
8346     case VEC_UNPACK_FLOAT_HI_EXPR:
8347       {
8348 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8349 	tree *elts, vals = NULL_TREE;
8350 	enum tree_code subcode;
8351 
8352 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8353 	if (TREE_CODE (arg0) != VECTOR_CST)
8354 	  return NULL_TREE;
8355 
8356 	elts = XALLOCAVEC (tree, nelts * 2);
8357 	if (!vec_cst_ctor_to_array (arg0, elts))
8358 	  return NULL_TREE;
8359 
8360 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8361 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
8362 	  elts += nelts;
8363 
8364 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8365 	  subcode = NOP_EXPR;
8366 	else
8367 	  subcode = FLOAT_EXPR;
8368 
8369 	for (i = 0; i < nelts; i++)
8370 	  {
8371 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8372 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8373 	      return NULL_TREE;
8374 	  }
8375 
8376 	for (i = 0; i < nelts; i++)
8377 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
8378 	return build_vector (type, vals);
8379       }
8380 
8381     default:
8382       return NULL_TREE;
8383     } /* switch (code) */
8384 }
8385 
8386 
8387 /* If the operation was a conversion do _not_ mark a resulting constant
8388    with TREE_OVERFLOW if the original constant was not.  These conversions
8389    have implementation defined behavior and retaining the TREE_OVERFLOW
8390    flag here would confuse later passes such as VRP.  */
8391 tree
8392 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8393 				tree type, tree op0)
8394 {
8395   tree res = fold_unary_loc (loc, code, type, op0);
8396   if (res
8397       && TREE_CODE (res) == INTEGER_CST
8398       && TREE_CODE (op0) == INTEGER_CST
8399       && CONVERT_EXPR_CODE_P (code))
8400     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8401 
8402   return res;
8403 }
8404 
8405 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8406    operands OP0 and OP1.  LOC is the location of the resulting expression.
8407    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8408    Return the folded expression if folding is successful.  Otherwise,
8409    return NULL_TREE.  */
8410 static tree
8411 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8412 		  tree arg0, tree arg1, tree op0, tree op1)
8413 {
8414   tree tem;
8415 
8416   /* We only do these simplifications if we are optimizing.  */
8417   if (!optimize)
8418     return NULL_TREE;
8419 
8420   /* Check for things like (A || B) && (A || C).  We can convert this
8421      to A || (B && C).  Note that either operator can be any of the four
8422      truth and/or operations and the transformation will still be
8423      valid.   Also note that we only care about order for the
8424      ANDIF and ORIF operators.  If B contains side effects, this
8425      might change the truth-value of A.  */
8426   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8427       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8428 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8429 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8430 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8431       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8432     {
8433       tree a00 = TREE_OPERAND (arg0, 0);
8434       tree a01 = TREE_OPERAND (arg0, 1);
8435       tree a10 = TREE_OPERAND (arg1, 0);
8436       tree a11 = TREE_OPERAND (arg1, 1);
8437       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8438 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8439 			 && (code == TRUTH_AND_EXPR
8440 			     || code == TRUTH_OR_EXPR));
8441 
8442       if (operand_equal_p (a00, a10, 0))
8443 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8444 			    fold_build2_loc (loc, code, type, a01, a11));
8445       else if (commutative && operand_equal_p (a00, a11, 0))
8446 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8447 			    fold_build2_loc (loc, code, type, a01, a10));
8448       else if (commutative && operand_equal_p (a01, a10, 0))
8449 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8450 			    fold_build2_loc (loc, code, type, a00, a11));
8451 
8452       /* This case if tricky because we must either have commutative
8453 	 operators or else A10 must not have side-effects.  */
8454 
8455       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8456 	       && operand_equal_p (a01, a11, 0))
8457 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8458 			    fold_build2_loc (loc, code, type, a00, a10),
8459 			    a01);
8460     }
8461 
8462   /* See if we can build a range comparison.  */
8463   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8464     return tem;
8465 
8466   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8467       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8468     {
8469       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8470       if (tem)
8471 	return fold_build2_loc (loc, code, type, tem, arg1);
8472     }
8473 
8474   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8475       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8476     {
8477       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8478       if (tem)
8479 	return fold_build2_loc (loc, code, type, arg0, tem);
8480     }
8481 
8482   /* Check for the possibility of merging component references.  If our
8483      lhs is another similar operation, try to merge its rhs with our
8484      rhs.  Then try to merge our lhs and rhs.  */
8485   if (TREE_CODE (arg0) == code
8486       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8487 					 TREE_OPERAND (arg0, 1), arg1)))
8488     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8489 
8490   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8491     return tem;
8492 
8493   if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8494 		    false) >= 2)
8495       && LOGICAL_OP_NON_SHORT_CIRCUIT
8496       && (code == TRUTH_AND_EXPR
8497           || code == TRUTH_ANDIF_EXPR
8498           || code == TRUTH_OR_EXPR
8499           || code == TRUTH_ORIF_EXPR))
8500     {
8501       enum tree_code ncode, icode;
8502 
8503       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8504 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8505       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8506 
8507       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8508 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8509 	 We don't want to pack more than two leafs to a non-IF AND/OR
8510 	 expression.
8511 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8512 	 equal to IF-CODE, then we don't want to add right-hand operand.
8513 	 If the inner right-hand side of left-hand operand has
8514 	 side-effects, or isn't simple, then we can't add to it,
8515 	 as otherwise we might destroy if-sequence.  */
8516       if (TREE_CODE (arg0) == icode
8517 	  && simple_operand_p_2 (arg1)
8518 	  /* Needed for sequence points to handle trappings, and
8519 	     side-effects.  */
8520 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8521 	{
8522 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8523 				 arg1);
8524 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8525 				  tem);
8526 	}
8527 	/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8528 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8529       else if (TREE_CODE (arg1) == icode
8530 	  && simple_operand_p_2 (arg0)
8531 	  /* Needed for sequence points to handle trappings, and
8532 	     side-effects.  */
8533 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8534 	{
8535 	  tem = fold_build2_loc (loc, ncode, type,
8536 				 arg0, TREE_OPERAND (arg1, 0));
8537 	  return fold_build2_loc (loc, icode, type, tem,
8538 				  TREE_OPERAND (arg1, 1));
8539 	}
8540       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8541 	 into (A OR B).
8542 	 For sequence point consistancy, we need to check for trapping,
8543 	 and side-effects.  */
8544       else if (code == icode && simple_operand_p_2 (arg0)
8545                && simple_operand_p_2 (arg1))
8546 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8547     }
8548 
8549   return NULL_TREE;
8550 }
8551 
8552 /* Fold a binary expression of code CODE and type TYPE with operands
8553    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8554    Return the folded expression if folding is successful.  Otherwise,
8555    return NULL_TREE.  */
8556 
8557 static tree
8558 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8559 {
8560   enum tree_code compl_code;
8561 
8562   if (code == MIN_EXPR)
8563     compl_code = MAX_EXPR;
8564   else if (code == MAX_EXPR)
8565     compl_code = MIN_EXPR;
8566   else
8567     gcc_unreachable ();
8568 
8569   /* MIN (MAX (a, b), b) == b.  */
8570   if (TREE_CODE (op0) == compl_code
8571       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8572     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8573 
8574   /* MIN (MAX (b, a), b) == b.  */
8575   if (TREE_CODE (op0) == compl_code
8576       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8577       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8578     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8579 
8580   /* MIN (a, MAX (a, b)) == a.  */
8581   if (TREE_CODE (op1) == compl_code
8582       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8583       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8584     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8585 
8586   /* MIN (a, MAX (b, a)) == a.  */
8587   if (TREE_CODE (op1) == compl_code
8588       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8589       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8590     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8591 
8592   return NULL_TREE;
8593 }
8594 
8595 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8596    by changing CODE to reduce the magnitude of constants involved in
8597    ARG0 of the comparison.
8598    Returns a canonicalized comparison tree if a simplification was
8599    possible, otherwise returns NULL_TREE.
8600    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8601    valid if signed overflow is undefined.  */
8602 
8603 static tree
8604 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8605 				 tree arg0, tree arg1,
8606 				 bool *strict_overflow_p)
8607 {
8608   enum tree_code code0 = TREE_CODE (arg0);
8609   tree t, cst0 = NULL_TREE;
8610   int sgn0;
8611   bool swap = false;
8612 
8613   /* Match A +- CST code arg1 and CST code arg1.  We can change the
8614      first form only if overflow is undefined.  */
8615   if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8616 	 /* In principle pointers also have undefined overflow behavior,
8617 	    but that causes problems elsewhere.  */
8618 	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8619 	 && (code0 == MINUS_EXPR
8620 	     || code0 == PLUS_EXPR)
8621          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8622 	|| code0 == INTEGER_CST))
8623     return NULL_TREE;
8624 
8625   /* Identify the constant in arg0 and its sign.  */
8626   if (code0 == INTEGER_CST)
8627     cst0 = arg0;
8628   else
8629     cst0 = TREE_OPERAND (arg0, 1);
8630   sgn0 = tree_int_cst_sgn (cst0);
8631 
8632   /* Overflowed constants and zero will cause problems.  */
8633   if (integer_zerop (cst0)
8634       || TREE_OVERFLOW (cst0))
8635     return NULL_TREE;
8636 
8637   /* See if we can reduce the magnitude of the constant in
8638      arg0 by changing the comparison code.  */
8639   if (code0 == INTEGER_CST)
8640     {
8641       /* CST <= arg1  ->  CST-1 < arg1.  */
8642       if (code == LE_EXPR && sgn0 == 1)
8643 	code = LT_EXPR;
8644       /* -CST < arg1  ->  -CST-1 <= arg1.  */
8645       else if (code == LT_EXPR && sgn0 == -1)
8646 	code = LE_EXPR;
8647       /* CST > arg1  ->  CST-1 >= arg1.  */
8648       else if (code == GT_EXPR && sgn0 == 1)
8649 	code = GE_EXPR;
8650       /* -CST >= arg1  ->  -CST-1 > arg1.  */
8651       else if (code == GE_EXPR && sgn0 == -1)
8652 	code = GT_EXPR;
8653       else
8654         return NULL_TREE;
8655       /* arg1 code' CST' might be more canonical.  */
8656       swap = true;
8657     }
8658   else
8659     {
8660       /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8661       if (code == LT_EXPR
8662 	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8663 	code = LE_EXPR;
8664       /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8665       else if (code == GT_EXPR
8666 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8667 	code = GE_EXPR;
8668       /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8669       else if (code == LE_EXPR
8670 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8671 	code = LT_EXPR;
8672       /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8673       else if (code == GE_EXPR
8674 	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8675 	code = GT_EXPR;
8676       else
8677 	return NULL_TREE;
8678       *strict_overflow_p = true;
8679     }
8680 
8681   /* Now build the constant reduced in magnitude.  But not if that
8682      would produce one outside of its types range.  */
8683   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8684       && ((sgn0 == 1
8685 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8686 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8687 	  || (sgn0 == -1
8688 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8689 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8690     /* We cannot swap the comparison here as that would cause us to
8691        endlessly recurse.  */
8692     return NULL_TREE;
8693 
8694   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8695 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8696   if (code0 != INTEGER_CST)
8697     t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8698   t = fold_convert (TREE_TYPE (arg1), t);
8699 
8700   /* If swapping might yield to a more canonical form, do so.  */
8701   if (swap)
8702     return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8703   else
8704     return fold_build2_loc (loc, code, type, t, arg1);
8705 }
8706 
8707 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8708    overflow further.  Try to decrease the magnitude of constants involved
8709    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8710    and put sole constants at the second argument position.
8711    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8712 
8713 static tree
8714 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8715 			       tree arg0, tree arg1)
8716 {
8717   tree t;
8718   bool strict_overflow_p;
8719   const char * const warnmsg = G_("assuming signed overflow does not occur "
8720 				  "when reducing constant in comparison");
8721 
8722   /* Try canonicalization by simplifying arg0.  */
8723   strict_overflow_p = false;
8724   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8725 				       &strict_overflow_p);
8726   if (t)
8727     {
8728       if (strict_overflow_p)
8729 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8730       return t;
8731     }
8732 
8733   /* Try canonicalization by simplifying arg1 using the swapped
8734      comparison.  */
8735   code = swap_tree_comparison (code);
8736   strict_overflow_p = false;
8737   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8738 				       &strict_overflow_p);
8739   if (t && strict_overflow_p)
8740     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8741   return t;
8742 }
8743 
8744 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8745    space.  This is used to avoid issuing overflow warnings for
8746    expressions like &p->x which can not wrap.  */
8747 
8748 static bool
8749 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8750 {
8751   unsigned HOST_WIDE_INT offset_low, total_low;
8752   HOST_WIDE_INT size, offset_high, total_high;
8753 
8754   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8755     return true;
8756 
8757   if (bitpos < 0)
8758     return true;
8759 
8760   if (offset == NULL_TREE)
8761     {
8762       offset_low = 0;
8763       offset_high = 0;
8764     }
8765   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8766     return true;
8767   else
8768     {
8769       offset_low = TREE_INT_CST_LOW (offset);
8770       offset_high = TREE_INT_CST_HIGH (offset);
8771     }
8772 
8773   if (add_double_with_sign (offset_low, offset_high,
8774 			    bitpos / BITS_PER_UNIT, 0,
8775 			    &total_low, &total_high,
8776 			    true))
8777     return true;
8778 
8779   if (total_high != 0)
8780     return true;
8781 
8782   size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8783   if (size <= 0)
8784     return true;
8785 
8786   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8787      array.  */
8788   if (TREE_CODE (base) == ADDR_EXPR)
8789     {
8790       HOST_WIDE_INT base_size;
8791 
8792       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8793       if (base_size > 0 && size < base_size)
8794 	size = base_size;
8795     }
8796 
8797   return total_low > (unsigned HOST_WIDE_INT) size;
8798 }
8799 
8800 /* Subroutine of fold_binary.  This routine performs all of the
8801    transformations that are common to the equality/inequality
8802    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8803    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8804    fold_binary should call fold_binary.  Fold a comparison with
8805    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8806    the folded comparison or NULL_TREE.  */
8807 
8808 static tree
8809 fold_comparison (location_t loc, enum tree_code code, tree type,
8810 		 tree op0, tree op1)
8811 {
8812   tree arg0, arg1, tem;
8813 
8814   arg0 = op0;
8815   arg1 = op1;
8816 
8817   STRIP_SIGN_NOPS (arg0);
8818   STRIP_SIGN_NOPS (arg1);
8819 
8820   tem = fold_relational_const (code, type, arg0, arg1);
8821   if (tem != NULL_TREE)
8822     return tem;
8823 
8824   /* If one arg is a real or integer constant, put it last.  */
8825   if (tree_swap_operands_p (arg0, arg1, true))
8826     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8827 
8828   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
8829   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8830       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8831 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8832 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8833       && (TREE_CODE (arg1) == INTEGER_CST
8834 	  && !TREE_OVERFLOW (arg1)))
8835     {
8836       tree const1 = TREE_OPERAND (arg0, 1);
8837       tree const2 = arg1;
8838       tree variable = TREE_OPERAND (arg0, 0);
8839       tree lhs;
8840       int lhs_add;
8841       lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8842 
8843       lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8844 			 TREE_TYPE (arg1), const2, const1);
8845 
8846       /* If the constant operation overflowed this can be
8847 	 simplified as a comparison against INT_MAX/INT_MIN.  */
8848       if (TREE_CODE (lhs) == INTEGER_CST
8849 	  && TREE_OVERFLOW (lhs))
8850 	{
8851 	  int const1_sgn = tree_int_cst_sgn (const1);
8852 	  enum tree_code code2 = code;
8853 
8854 	  /* Get the sign of the constant on the lhs if the
8855 	     operation were VARIABLE + CONST1.  */
8856 	  if (TREE_CODE (arg0) == MINUS_EXPR)
8857 	    const1_sgn = -const1_sgn;
8858 
8859 	  /* The sign of the constant determines if we overflowed
8860 	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8861 	     Canonicalize to the INT_MIN overflow by swapping the comparison
8862 	     if necessary.  */
8863 	  if (const1_sgn == -1)
8864 	    code2 = swap_tree_comparison (code);
8865 
8866 	  /* We now can look at the canonicalized case
8867 	       VARIABLE + 1  CODE2  INT_MIN
8868 	     and decide on the result.  */
8869 	  if (code2 == LT_EXPR
8870 	      || code2 == LE_EXPR
8871 	      || code2 == EQ_EXPR)
8872 	    return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8873 	  else if (code2 == NE_EXPR
8874 		   || code2 == GE_EXPR
8875 		   || code2 == GT_EXPR)
8876 	    return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8877 	}
8878 
8879       if (TREE_CODE (lhs) == TREE_CODE (arg1)
8880 	  && (TREE_CODE (lhs) != INTEGER_CST
8881 	      || !TREE_OVERFLOW (lhs)))
8882 	{
8883 	  if (code != EQ_EXPR && code != NE_EXPR)
8884 	    fold_overflow_warning ("assuming signed overflow does not occur "
8885 				   "when changing X +- C1 cmp C2 to "
8886 				   "X cmp C1 +- C2",
8887 				   WARN_STRICT_OVERFLOW_COMPARISON);
8888 	  return fold_build2_loc (loc, code, type, variable, lhs);
8889 	}
8890     }
8891 
8892   /* For comparisons of pointers we can decompose it to a compile time
8893      comparison of the base objects and the offsets into the object.
8894      This requires at least one operand being an ADDR_EXPR or a
8895      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8896   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8897       && (TREE_CODE (arg0) == ADDR_EXPR
8898 	  || TREE_CODE (arg1) == ADDR_EXPR
8899 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8900 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8901     {
8902       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8903       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8904       enum machine_mode mode;
8905       int volatilep, unsignedp;
8906       bool indirect_base0 = false, indirect_base1 = false;
8907 
8908       /* Get base and offset for the access.  Strip ADDR_EXPR for
8909 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8910 	 off the base object if possible.  indirect_baseN will be true
8911 	 if baseN is not an address but refers to the object itself.  */
8912       base0 = arg0;
8913       if (TREE_CODE (arg0) == ADDR_EXPR)
8914 	{
8915 	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8916 				       &bitsize, &bitpos0, &offset0, &mode,
8917 				       &unsignedp, &volatilep, false);
8918 	  if (TREE_CODE (base0) == INDIRECT_REF)
8919 	    base0 = TREE_OPERAND (base0, 0);
8920 	  else
8921 	    indirect_base0 = true;
8922 	}
8923       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8924 	{
8925 	  base0 = TREE_OPERAND (arg0, 0);
8926 	  STRIP_SIGN_NOPS (base0);
8927 	  if (TREE_CODE (base0) == ADDR_EXPR)
8928 	    {
8929 	      base0 = TREE_OPERAND (base0, 0);
8930 	      indirect_base0 = true;
8931 	    }
8932 	  offset0 = TREE_OPERAND (arg0, 1);
8933 	  if (host_integerp (offset0, 0))
8934 	    {
8935 	      HOST_WIDE_INT off = size_low_cst (offset0);
8936 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8937 				   * BITS_PER_UNIT)
8938 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8939 		{
8940 		  bitpos0 = off * BITS_PER_UNIT;
8941 		  offset0 = NULL_TREE;
8942 		}
8943 	    }
8944 	}
8945 
8946       base1 = arg1;
8947       if (TREE_CODE (arg1) == ADDR_EXPR)
8948 	{
8949 	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8950 				       &bitsize, &bitpos1, &offset1, &mode,
8951 				       &unsignedp, &volatilep, false);
8952 	  if (TREE_CODE (base1) == INDIRECT_REF)
8953 	    base1 = TREE_OPERAND (base1, 0);
8954 	  else
8955 	    indirect_base1 = true;
8956 	}
8957       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8958 	{
8959 	  base1 = TREE_OPERAND (arg1, 0);
8960 	  STRIP_SIGN_NOPS (base1);
8961 	  if (TREE_CODE (base1) == ADDR_EXPR)
8962 	    {
8963 	      base1 = TREE_OPERAND (base1, 0);
8964 	      indirect_base1 = true;
8965 	    }
8966 	  offset1 = TREE_OPERAND (arg1, 1);
8967 	  if (host_integerp (offset1, 0))
8968 	    {
8969 	      HOST_WIDE_INT off = size_low_cst (offset1);
8970 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8971 				   * BITS_PER_UNIT)
8972 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8973 		{
8974 		  bitpos1 = off * BITS_PER_UNIT;
8975 		  offset1 = NULL_TREE;
8976 		}
8977 	    }
8978 	}
8979 
8980       /* A local variable can never be pointed to by
8981          the default SSA name of an incoming parameter.  */
8982       if ((TREE_CODE (arg0) == ADDR_EXPR
8983            && indirect_base0
8984            && TREE_CODE (base0) == VAR_DECL
8985            && auto_var_in_fn_p (base0, current_function_decl)
8986            && !indirect_base1
8987            && TREE_CODE (base1) == SSA_NAME
8988            && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8989            && SSA_NAME_IS_DEFAULT_DEF (base1))
8990           || (TREE_CODE (arg1) == ADDR_EXPR
8991               && indirect_base1
8992               && TREE_CODE (base1) == VAR_DECL
8993               && auto_var_in_fn_p (base1, current_function_decl)
8994               && !indirect_base0
8995               && TREE_CODE (base0) == SSA_NAME
8996               && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8997               && SSA_NAME_IS_DEFAULT_DEF (base0)))
8998         {
8999           if (code == NE_EXPR)
9000             return constant_boolean_node (1, type);
9001           else if (code == EQ_EXPR)
9002             return constant_boolean_node (0, type);
9003         }
9004       /* If we have equivalent bases we might be able to simplify.  */
9005       else if (indirect_base0 == indirect_base1
9006                && operand_equal_p (base0, base1, 0))
9007 	{
9008 	  /* We can fold this expression to a constant if the non-constant
9009 	     offset parts are equal.  */
9010 	  if ((offset0 == offset1
9011 	       || (offset0 && offset1
9012 		   && operand_equal_p (offset0, offset1, 0)))
9013 	      && (code == EQ_EXPR
9014 		  || code == NE_EXPR
9015 		  || (indirect_base0 && DECL_P (base0))
9016 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9017 
9018 	    {
9019 	      if (code != EQ_EXPR
9020 		  && code != NE_EXPR
9021 		  && bitpos0 != bitpos1
9022 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9023 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9024 		fold_overflow_warning (("assuming pointer wraparound does not "
9025 					"occur when comparing P +- C1 with "
9026 					"P +- C2"),
9027 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9028 
9029 	      switch (code)
9030 		{
9031 		case EQ_EXPR:
9032 		  return constant_boolean_node (bitpos0 == bitpos1, type);
9033 		case NE_EXPR:
9034 		  return constant_boolean_node (bitpos0 != bitpos1, type);
9035 		case LT_EXPR:
9036 		  return constant_boolean_node (bitpos0 < bitpos1, type);
9037 		case LE_EXPR:
9038 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
9039 		case GE_EXPR:
9040 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
9041 		case GT_EXPR:
9042 		  return constant_boolean_node (bitpos0 > bitpos1, type);
9043 		default:;
9044 		}
9045 	    }
9046 	  /* We can simplify the comparison to a comparison of the variable
9047 	     offset parts if the constant offset parts are equal.
9048 	     Be careful to use signed size type here because otherwise we
9049 	     mess with array offsets in the wrong way.  This is possible
9050 	     because pointer arithmetic is restricted to retain within an
9051 	     object and overflow on pointer differences is undefined as of
9052 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9053 	  else if (bitpos0 == bitpos1
9054 		   && ((code == EQ_EXPR || code == NE_EXPR)
9055 		       || (indirect_base0 && DECL_P (base0))
9056 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9057 	    {
9058 	      /* By converting to signed size type we cover middle-end pointer
9059 	         arithmetic which operates on unsigned pointer types of size
9060 	         type size and ARRAY_REF offsets which are properly sign or
9061 	         zero extended from their type in case it is narrower than
9062 	         size type.  */
9063 	      if (offset0 == NULL_TREE)
9064 		offset0 = build_int_cst (ssizetype, 0);
9065 	      else
9066 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9067 	      if (offset1 == NULL_TREE)
9068 		offset1 = build_int_cst (ssizetype, 0);
9069 	      else
9070 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9071 
9072 	      if (code != EQ_EXPR
9073 		  && code != NE_EXPR
9074 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9075 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9076 		fold_overflow_warning (("assuming pointer wraparound does not "
9077 					"occur when comparing P +- C1 with "
9078 					"P +- C2"),
9079 				       WARN_STRICT_OVERFLOW_COMPARISON);
9080 
9081 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9082 	    }
9083 	}
9084       /* For non-equal bases we can simplify if they are addresses
9085 	 of local binding decls or constants.  */
9086       else if (indirect_base0 && indirect_base1
9087 	       /* We know that !operand_equal_p (base0, base1, 0)
9088 		  because the if condition was false.  But make
9089 		  sure two decls are not the same.  */
9090 	       && base0 != base1
9091 	       && TREE_CODE (arg0) == ADDR_EXPR
9092 	       && TREE_CODE (arg1) == ADDR_EXPR
9093 	       && (((TREE_CODE (base0) == VAR_DECL
9094 		     || TREE_CODE (base0) == PARM_DECL)
9095 		    && (targetm.binds_local_p (base0)
9096 			|| CONSTANT_CLASS_P (base1)))
9097 		   || CONSTANT_CLASS_P (base0))
9098 	       && (((TREE_CODE (base1) == VAR_DECL
9099 		     || TREE_CODE (base1) == PARM_DECL)
9100 		    && (targetm.binds_local_p (base1)
9101 			|| CONSTANT_CLASS_P (base0)))
9102 		   || CONSTANT_CLASS_P (base1)))
9103 	{
9104 	  if (code == EQ_EXPR)
9105 	    return omit_two_operands_loc (loc, type, boolean_false_node,
9106 				      arg0, arg1);
9107 	  else if (code == NE_EXPR)
9108 	    return omit_two_operands_loc (loc, type, boolean_true_node,
9109 				      arg0, arg1);
9110 	}
9111       /* For equal offsets we can simplify to a comparison of the
9112 	 base addresses.  */
9113       else if (bitpos0 == bitpos1
9114 	       && (indirect_base0
9115 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9116 	       && (indirect_base1
9117 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9118 	       && ((offset0 == offset1)
9119 		   || (offset0 && offset1
9120 		       && operand_equal_p (offset0, offset1, 0))))
9121 	{
9122 	  if (indirect_base0)
9123 	    base0 = build_fold_addr_expr_loc (loc, base0);
9124 	  if (indirect_base1)
9125 	    base1 = build_fold_addr_expr_loc (loc, base1);
9126 	  return fold_build2_loc (loc, code, type, base0, base1);
9127 	}
9128     }
9129 
9130   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9131      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9132      the resulting offset is smaller in absolute value than the
9133      original one.  */
9134   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9135       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9136       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9137 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9138       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9139       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9140 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9141     {
9142       tree const1 = TREE_OPERAND (arg0, 1);
9143       tree const2 = TREE_OPERAND (arg1, 1);
9144       tree variable1 = TREE_OPERAND (arg0, 0);
9145       tree variable2 = TREE_OPERAND (arg1, 0);
9146       tree cst;
9147       const char * const warnmsg = G_("assuming signed overflow does not "
9148 				      "occur when combining constants around "
9149 				      "a comparison");
9150 
9151       /* Put the constant on the side where it doesn't overflow and is
9152 	 of lower absolute value than before.  */
9153       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9154 			     ? MINUS_EXPR : PLUS_EXPR,
9155 			     const2, const1);
9156       if (!TREE_OVERFLOW (cst)
9157 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9158 	{
9159 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9160 	  return fold_build2_loc (loc, code, type,
9161 			      variable1,
9162 			      fold_build2_loc (loc,
9163 					   TREE_CODE (arg1), TREE_TYPE (arg1),
9164 					   variable2, cst));
9165 	}
9166 
9167       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9168 			     ? MINUS_EXPR : PLUS_EXPR,
9169 			     const1, const2);
9170       if (!TREE_OVERFLOW (cst)
9171 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9172 	{
9173 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9174 	  return fold_build2_loc (loc, code, type,
9175 			      fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9176 					   variable1, cst),
9177 			      variable2);
9178 	}
9179     }
9180 
9181   /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9182      signed arithmetic case.  That form is created by the compiler
9183      often enough for folding it to be of value.  One example is in
9184      computing loop trip counts after Operator Strength Reduction.  */
9185   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9186       && TREE_CODE (arg0) == MULT_EXPR
9187       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9188           && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9189       && integer_zerop (arg1))
9190     {
9191       tree const1 = TREE_OPERAND (arg0, 1);
9192       tree const2 = arg1;                       /* zero */
9193       tree variable1 = TREE_OPERAND (arg0, 0);
9194       enum tree_code cmp_code = code;
9195 
9196       /* Handle unfolded multiplication by zero.  */
9197       if (integer_zerop (const1))
9198 	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9199 
9200       fold_overflow_warning (("assuming signed overflow does not occur when "
9201 			      "eliminating multiplication in comparison "
9202 			      "with zero"),
9203 			     WARN_STRICT_OVERFLOW_COMPARISON);
9204 
9205       /* If const1 is negative we swap the sense of the comparison.  */
9206       if (tree_int_cst_sgn (const1) < 0)
9207         cmp_code = swap_tree_comparison (cmp_code);
9208 
9209       return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9210     }
9211 
9212   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9213   if (tem)
9214     return tem;
9215 
9216   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9217     {
9218       tree targ0 = strip_float_extensions (arg0);
9219       tree targ1 = strip_float_extensions (arg1);
9220       tree newtype = TREE_TYPE (targ0);
9221 
9222       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9223 	newtype = TREE_TYPE (targ1);
9224 
9225       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9226       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9227 	return fold_build2_loc (loc, code, type,
9228 			    fold_convert_loc (loc, newtype, targ0),
9229 			    fold_convert_loc (loc, newtype, targ1));
9230 
9231       /* (-a) CMP (-b) -> b CMP a  */
9232       if (TREE_CODE (arg0) == NEGATE_EXPR
9233 	  && TREE_CODE (arg1) == NEGATE_EXPR)
9234 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9235 			    TREE_OPERAND (arg0, 0));
9236 
9237       if (TREE_CODE (arg1) == REAL_CST)
9238 	{
9239 	  REAL_VALUE_TYPE cst;
9240 	  cst = TREE_REAL_CST (arg1);
9241 
9242 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9243 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9244 	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9245 				TREE_OPERAND (arg0, 0),
9246 				build_real (TREE_TYPE (arg1),
9247 					    real_value_negate (&cst)));
9248 
9249 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9250 	  /* a CMP (-0) -> a CMP 0  */
9251 	  if (REAL_VALUE_MINUS_ZERO (cst))
9252 	    return fold_build2_loc (loc, code, type, arg0,
9253 				build_real (TREE_TYPE (arg1), dconst0));
9254 
9255 	  /* x != NaN is always true, other ops are always false.  */
9256 	  if (REAL_VALUE_ISNAN (cst)
9257 	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9258 	    {
9259 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9260 	      return omit_one_operand_loc (loc, type, tem, arg0);
9261 	    }
9262 
9263 	  /* Fold comparisons against infinity.  */
9264 	  if (REAL_VALUE_ISINF (cst)
9265 	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9266 	    {
9267 	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9268 	      if (tem != NULL_TREE)
9269 		return tem;
9270 	    }
9271 	}
9272 
9273       /* If this is a comparison of a real constant with a PLUS_EXPR
9274 	 or a MINUS_EXPR of a real constant, we can convert it into a
9275 	 comparison with a revised real constant as long as no overflow
9276 	 occurs when unsafe_math_optimizations are enabled.  */
9277       if (flag_unsafe_math_optimizations
9278 	  && TREE_CODE (arg1) == REAL_CST
9279 	  && (TREE_CODE (arg0) == PLUS_EXPR
9280 	      || TREE_CODE (arg0) == MINUS_EXPR)
9281 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9282 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9283 				      ? MINUS_EXPR : PLUS_EXPR,
9284 				      arg1, TREE_OPERAND (arg0, 1)))
9285 	  && !TREE_OVERFLOW (tem))
9286 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9287 
9288       /* Likewise, we can simplify a comparison of a real constant with
9289          a MINUS_EXPR whose first operand is also a real constant, i.e.
9290          (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9291          floating-point types only if -fassociative-math is set.  */
9292       if (flag_associative_math
9293 	  && TREE_CODE (arg1) == REAL_CST
9294 	  && TREE_CODE (arg0) == MINUS_EXPR
9295 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9296 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9297 				      arg1))
9298 	  && !TREE_OVERFLOW (tem))
9299 	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9300 			    TREE_OPERAND (arg0, 1), tem);
9301 
9302       /* Fold comparisons against built-in math functions.  */
9303       if (TREE_CODE (arg1) == REAL_CST
9304 	  && flag_unsafe_math_optimizations
9305 	  && ! flag_errno_math)
9306 	{
9307 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9308 
9309 	  if (fcode != END_BUILTINS)
9310 	    {
9311 	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9312 	      if (tem != NULL_TREE)
9313 		return tem;
9314 	    }
9315 	}
9316     }
9317 
9318   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9319       && CONVERT_EXPR_P (arg0))
9320     {
9321       /* If we are widening one operand of an integer comparison,
9322 	 see if the other operand is similarly being widened.  Perhaps we
9323 	 can do the comparison in the narrower type.  */
9324       tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9325       if (tem)
9326 	return tem;
9327 
9328       /* Or if we are changing signedness.  */
9329       tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9330       if (tem)
9331 	return tem;
9332     }
9333 
9334   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9335      constant, we can simplify it.  */
9336   if (TREE_CODE (arg1) == INTEGER_CST
9337       && (TREE_CODE (arg0) == MIN_EXPR
9338 	  || TREE_CODE (arg0) == MAX_EXPR)
9339       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9340     {
9341       tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9342       if (tem)
9343 	return tem;
9344     }
9345 
9346   /* Simplify comparison of something with itself.  (For IEEE
9347      floating-point, we can only do some of these simplifications.)  */
9348   if (operand_equal_p (arg0, arg1, 0))
9349     {
9350       switch (code)
9351 	{
9352 	case EQ_EXPR:
9353 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9354 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9355 	    return constant_boolean_node (1, type);
9356 	  break;
9357 
9358 	case GE_EXPR:
9359 	case LE_EXPR:
9360 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9361 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9362 	    return constant_boolean_node (1, type);
9363 	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9364 
9365 	case NE_EXPR:
9366 	  /* For NE, we can only do this simplification if integer
9367 	     or we don't honor IEEE floating point NaNs.  */
9368 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9369 	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9370 	    break;
9371 	  /* ... fall through ...  */
9372 	case GT_EXPR:
9373 	case LT_EXPR:
9374 	  return constant_boolean_node (0, type);
9375 	default:
9376 	  gcc_unreachable ();
9377 	}
9378     }
9379 
9380   /* If we are comparing an expression that just has comparisons
9381      of two integer values, arithmetic expressions of those comparisons,
9382      and constants, we can simplify it.  There are only three cases
9383      to check: the two values can either be equal, the first can be
9384      greater, or the second can be greater.  Fold the expression for
9385      those three values.  Since each value must be 0 or 1, we have
9386      eight possibilities, each of which corresponds to the constant 0
9387      or 1 or one of the six possible comparisons.
9388 
9389      This handles common cases like (a > b) == 0 but also handles
9390      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9391      occur in macroized code.  */
9392 
9393   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9394     {
9395       tree cval1 = 0, cval2 = 0;
9396       int save_p = 0;
9397 
9398       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9399 	  /* Don't handle degenerate cases here; they should already
9400 	     have been handled anyway.  */
9401 	  && cval1 != 0 && cval2 != 0
9402 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9403 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9404 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9405 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9406 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9407 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9408 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9409 	{
9410 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9411 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9412 
9413 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9414 	     was the same as ARG1.  */
9415 
9416 	  tree high_result
9417 		= fold_build2_loc (loc, code, type,
9418 			       eval_subst (loc, arg0, cval1, maxval,
9419 					   cval2, minval),
9420 			       arg1);
9421 	  tree equal_result
9422 		= fold_build2_loc (loc, code, type,
9423 			       eval_subst (loc, arg0, cval1, maxval,
9424 					   cval2, maxval),
9425 			       arg1);
9426 	  tree low_result
9427 		= fold_build2_loc (loc, code, type,
9428 			       eval_subst (loc, arg0, cval1, minval,
9429 					   cval2, maxval),
9430 			       arg1);
9431 
9432 	  /* All three of these results should be 0 or 1.  Confirm they are.
9433 	     Then use those values to select the proper code to use.  */
9434 
9435 	  if (TREE_CODE (high_result) == INTEGER_CST
9436 	      && TREE_CODE (equal_result) == INTEGER_CST
9437 	      && TREE_CODE (low_result) == INTEGER_CST)
9438 	    {
9439 	      /* Make a 3-bit mask with the high-order bit being the
9440 		 value for `>', the next for '=', and the low for '<'.  */
9441 	      switch ((integer_onep (high_result) * 4)
9442 		      + (integer_onep (equal_result) * 2)
9443 		      + integer_onep (low_result))
9444 		{
9445 		case 0:
9446 		  /* Always false.  */
9447 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9448 		case 1:
9449 		  code = LT_EXPR;
9450 		  break;
9451 		case 2:
9452 		  code = EQ_EXPR;
9453 		  break;
9454 		case 3:
9455 		  code = LE_EXPR;
9456 		  break;
9457 		case 4:
9458 		  code = GT_EXPR;
9459 		  break;
9460 		case 5:
9461 		  code = NE_EXPR;
9462 		  break;
9463 		case 6:
9464 		  code = GE_EXPR;
9465 		  break;
9466 		case 7:
9467 		  /* Always true.  */
9468 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9469 		}
9470 
9471 	      if (save_p)
9472 		{
9473 		  tem = save_expr (build2 (code, type, cval1, cval2));
9474 		  SET_EXPR_LOCATION (tem, loc);
9475 		  return tem;
9476 		}
9477 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9478 	    }
9479 	}
9480     }
9481 
9482   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9483      into a single range test.  */
9484   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9485        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9486       && TREE_CODE (arg1) == INTEGER_CST
9487       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9488       && !integer_zerop (TREE_OPERAND (arg0, 1))
9489       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9490       && !TREE_OVERFLOW (arg1))
9491     {
9492       tem = fold_div_compare (loc, code, type, arg0, arg1);
9493       if (tem != NULL_TREE)
9494 	return tem;
9495     }
9496 
9497   /* Fold ~X op ~Y as Y op X.  */
9498   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9499       && TREE_CODE (arg1) == BIT_NOT_EXPR)
9500     {
9501       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9502       return fold_build2_loc (loc, code, type,
9503 			  fold_convert_loc (loc, cmp_type,
9504 					    TREE_OPERAND (arg1, 0)),
9505 			  TREE_OPERAND (arg0, 0));
9506     }
9507 
9508   /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9509   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9510       && TREE_CODE (arg1) == INTEGER_CST)
9511     {
9512       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9513       return fold_build2_loc (loc, swap_tree_comparison (code), type,
9514 			  TREE_OPERAND (arg0, 0),
9515 			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9516 				       fold_convert_loc (loc, cmp_type, arg1)));
9517     }
9518 
9519   return NULL_TREE;
9520 }
9521 
9522 
9523 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9524    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9525    argument EXPR represents the expression "z" of type TYPE.  */
9526 
9527 static tree
9528 fold_mult_zconjz (location_t loc, tree type, tree expr)
9529 {
9530   tree itype = TREE_TYPE (type);
9531   tree rpart, ipart, tem;
9532 
9533   if (TREE_CODE (expr) == COMPLEX_EXPR)
9534     {
9535       rpart = TREE_OPERAND (expr, 0);
9536       ipart = TREE_OPERAND (expr, 1);
9537     }
9538   else if (TREE_CODE (expr) == COMPLEX_CST)
9539     {
9540       rpart = TREE_REALPART (expr);
9541       ipart = TREE_IMAGPART (expr);
9542     }
9543   else
9544     {
9545       expr = save_expr (expr);
9546       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9547       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9548     }
9549 
9550   rpart = save_expr (rpart);
9551   ipart = save_expr (ipart);
9552   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9553 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9554 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9555   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9556 			  build_zero_cst (itype));
9557 }
9558 
9559 
9560 /* Subroutine of fold_binary.  If P is the value of EXPR, computes
9561    power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9562    guarantees that P and N have the same least significant log2(M) bits.
9563    N is not otherwise constrained.  In particular, N is not normalized to
9564    0 <= N < M as is common.  In general, the precise value of P is unknown.
9565    M is chosen as large as possible such that constant N can be determined.
9566 
9567    Returns M and sets *RESIDUE to N.
9568 
9569    If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9570    account.  This is not always possible due to PR 35705.
9571  */
9572 
9573 static unsigned HOST_WIDE_INT
9574 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9575 				 bool allow_func_align)
9576 {
9577   enum tree_code code;
9578 
9579   *residue = 0;
9580 
9581   code = TREE_CODE (expr);
9582   if (code == ADDR_EXPR)
9583     {
9584       unsigned int bitalign;
9585       bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9586       *residue /= BITS_PER_UNIT;
9587       return bitalign / BITS_PER_UNIT;
9588     }
9589   else if (code == POINTER_PLUS_EXPR)
9590     {
9591       tree op0, op1;
9592       unsigned HOST_WIDE_INT modulus;
9593       enum tree_code inner_code;
9594 
9595       op0 = TREE_OPERAND (expr, 0);
9596       STRIP_NOPS (op0);
9597       modulus = get_pointer_modulus_and_residue (op0, residue,
9598 						 allow_func_align);
9599 
9600       op1 = TREE_OPERAND (expr, 1);
9601       STRIP_NOPS (op1);
9602       inner_code = TREE_CODE (op1);
9603       if (inner_code == INTEGER_CST)
9604 	{
9605 	  *residue += TREE_INT_CST_LOW (op1);
9606 	  return modulus;
9607 	}
9608       else if (inner_code == MULT_EXPR)
9609 	{
9610 	  op1 = TREE_OPERAND (op1, 1);
9611 	  if (TREE_CODE (op1) == INTEGER_CST)
9612 	    {
9613 	      unsigned HOST_WIDE_INT align;
9614 
9615 	      /* Compute the greatest power-of-2 divisor of op1.  */
9616 	      align = TREE_INT_CST_LOW (op1);
9617 	      align &= -align;
9618 
9619 	      /* If align is non-zero and less than *modulus, replace
9620 		 *modulus with align., If align is 0, then either op1 is 0
9621 		 or the greatest power-of-2 divisor of op1 doesn't fit in an
9622 		 unsigned HOST_WIDE_INT.  In either case, no additional
9623 		 constraint is imposed.  */
9624 	      if (align)
9625 		modulus = MIN (modulus, align);
9626 
9627 	      return modulus;
9628 	    }
9629 	}
9630     }
9631 
9632   /* If we get here, we were unable to determine anything useful about the
9633      expression.  */
9634   return 1;
9635 }
9636 
9637 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9638    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
9639 
9640 static bool
9641 vec_cst_ctor_to_array (tree arg, tree *elts)
9642 {
9643   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9644 
9645   if (TREE_CODE (arg) == VECTOR_CST)
9646     {
9647       tree t;
9648 
9649       for (i = 0, t = TREE_VECTOR_CST_ELTS (arg);
9650 	   i < nelts && t; i++, t = TREE_CHAIN (t))
9651 	elts[i] = TREE_VALUE (t);
9652       if (t)
9653 	return false;
9654     }
9655   else if (TREE_CODE (arg) == CONSTRUCTOR)
9656     {
9657       constructor_elt *elt;
9658 
9659       FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9660 	if (i >= nelts)
9661 	  return false;
9662 	else
9663 	  elts[i] = elt->value;
9664     }
9665   else
9666     return false;
9667   for (; i < nelts; i++)
9668     elts[i]
9669       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9670   return true;
9671 }
9672 
9673 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9674    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9675    NULL_TREE otherwise.  */
9676 
9677 static tree
9678 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9679 {
9680   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9681   tree *elts;
9682   bool need_ctor = false;
9683 
9684   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9685 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9686   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9687       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9688     return NULL_TREE;
9689 
9690   elts = XALLOCAVEC (tree, nelts * 3);
9691   if (!vec_cst_ctor_to_array (arg0, elts)
9692       || !vec_cst_ctor_to_array (arg1, elts + nelts))
9693     return NULL_TREE;
9694 
9695   for (i = 0; i < nelts; i++)
9696     {
9697       if (!CONSTANT_CLASS_P (elts[sel[i]]))
9698 	need_ctor = true;
9699       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9700     }
9701 
9702   if (need_ctor)
9703     {
9704       VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9705       for (i = 0; i < nelts; i++)
9706 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9707       return build_constructor (type, v);
9708     }
9709   else
9710     {
9711       tree vals = NULL_TREE;
9712       for (i = 0; i < nelts; i++)
9713 	vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals);
9714       return build_vector (type, vals);
9715     }
9716 }
9717 
9718 /* Try to fold a pointer difference of type TYPE two address expressions of
9719    array references AREF0 and AREF1 using location LOC.  Return a
9720    simplified expression for the difference or NULL_TREE.  */
9721 
9722 static tree
9723 fold_addr_of_array_ref_difference (location_t loc, tree type,
9724 				   tree aref0, tree aref1)
9725 {
9726   tree base0 = TREE_OPERAND (aref0, 0);
9727   tree base1 = TREE_OPERAND (aref1, 0);
9728   tree base_offset = build_int_cst (type, 0);
9729 
9730   /* If the bases are array references as well, recurse.  If the bases
9731      are pointer indirections compute the difference of the pointers.
9732      If the bases are equal, we are set.  */
9733   if ((TREE_CODE (base0) == ARRAY_REF
9734        && TREE_CODE (base1) == ARRAY_REF
9735        && (base_offset
9736 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9737       || (INDIRECT_REF_P (base0)
9738 	  && INDIRECT_REF_P (base1)
9739 	  && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9740 					     TREE_OPERAND (base0, 0),
9741 					     TREE_OPERAND (base1, 0))))
9742       || operand_equal_p (base0, base1, 0))
9743     {
9744       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9745       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9746       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9747       tree diff = build2 (MINUS_EXPR, type, op0, op1);
9748       return fold_build2_loc (loc, PLUS_EXPR, type,
9749 			      base_offset,
9750 			      fold_build2_loc (loc, MULT_EXPR, type,
9751 					       diff, esz));
9752     }
9753   return NULL_TREE;
9754 }
9755 
9756 /* Fold a binary expression of code CODE and type TYPE with operands
9757    OP0 and OP1.  LOC is the location of the resulting expression.
9758    Return the folded expression if folding is successful.  Otherwise,
9759    return NULL_TREE.  */
9760 
9761 tree
9762 fold_binary_loc (location_t loc,
9763 	     enum tree_code code, tree type, tree op0, tree op1)
9764 {
9765   enum tree_code_class kind = TREE_CODE_CLASS (code);
9766   tree arg0, arg1, tem;
9767   tree t1 = NULL_TREE;
9768   bool strict_overflow_p;
9769 
9770   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9771 	      && TREE_CODE_LENGTH (code) == 2
9772 	      && op0 != NULL_TREE
9773 	      && op1 != NULL_TREE);
9774 
9775   arg0 = op0;
9776   arg1 = op1;
9777 
9778   /* Strip any conversions that don't change the mode.  This is
9779      safe for every expression, except for a comparison expression
9780      because its signedness is derived from its operands.  So, in
9781      the latter case, only strip conversions that don't change the
9782      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9783      preserved.
9784 
9785      Note that this is done as an internal manipulation within the
9786      constant folder, in order to find the simplest representation
9787      of the arguments so that their form can be studied.  In any
9788      cases, the appropriate type conversions should be put back in
9789      the tree that will get out of the constant folder.  */
9790 
9791   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9792     {
9793       STRIP_SIGN_NOPS (arg0);
9794       STRIP_SIGN_NOPS (arg1);
9795     }
9796   else
9797     {
9798       STRIP_NOPS (arg0);
9799       STRIP_NOPS (arg1);
9800     }
9801 
9802   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9803      constant but we can't do arithmetic on them.  */
9804   if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9805       || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9806       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9807       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9808       || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9809       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9810     {
9811       if (kind == tcc_binary)
9812 	{
9813 	  /* Make sure type and arg0 have the same saturating flag.  */
9814 	  gcc_assert (TYPE_SATURATING (type)
9815 		      == TYPE_SATURATING (TREE_TYPE (arg0)));
9816 	  tem = const_binop (code, arg0, arg1);
9817 	}
9818       else if (kind == tcc_comparison)
9819 	tem = fold_relational_const (code, type, arg0, arg1);
9820       else
9821 	tem = NULL_TREE;
9822 
9823       if (tem != NULL_TREE)
9824 	{
9825 	  if (TREE_TYPE (tem) != type)
9826 	    tem = fold_convert_loc (loc, type, tem);
9827 	  return tem;
9828 	}
9829     }
9830 
9831   /* If this is a commutative operation, and ARG0 is a constant, move it
9832      to ARG1 to reduce the number of tests below.  */
9833   if (commutative_tree_code (code)
9834       && tree_swap_operands_p (arg0, arg1, true))
9835     return fold_build2_loc (loc, code, type, op1, op0);
9836 
9837   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9838 
9839      First check for cases where an arithmetic operation is applied to a
9840      compound, conditional, or comparison operation.  Push the arithmetic
9841      operation inside the compound or conditional to see if any folding
9842      can then be done.  Convert comparison to conditional for this purpose.
9843      The also optimizes non-constant cases that used to be done in
9844      expand_expr.
9845 
9846      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9847      one of the operands is a comparison and the other is a comparison, a
9848      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9849      code below would make the expression more complex.  Change it to a
9850      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9851      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9852 
9853   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9854        || code == EQ_EXPR || code == NE_EXPR)
9855       && ((truth_value_p (TREE_CODE (arg0))
9856 	   && (truth_value_p (TREE_CODE (arg1))
9857 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9858 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9859 	  || (truth_value_p (TREE_CODE (arg1))
9860 	      && (truth_value_p (TREE_CODE (arg0))
9861 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9862 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9863     {
9864       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9865 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9866 			 : TRUTH_XOR_EXPR,
9867 			 boolean_type_node,
9868 			 fold_convert_loc (loc, boolean_type_node, arg0),
9869 			 fold_convert_loc (loc, boolean_type_node, arg1));
9870 
9871       if (code == EQ_EXPR)
9872 	tem = invert_truthvalue_loc (loc, tem);
9873 
9874       return fold_convert_loc (loc, type, tem);
9875     }
9876 
9877   if (TREE_CODE_CLASS (code) == tcc_binary
9878       || TREE_CODE_CLASS (code) == tcc_comparison)
9879     {
9880       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9881 	{
9882 	  tem = fold_build2_loc (loc, code, type,
9883 			     fold_convert_loc (loc, TREE_TYPE (op0),
9884 					       TREE_OPERAND (arg0, 1)), op1);
9885 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9886 			     tem);
9887 	}
9888       if (TREE_CODE (arg1) == COMPOUND_EXPR
9889 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9890 	{
9891 	  tem = fold_build2_loc (loc, code, type, op0,
9892 			     fold_convert_loc (loc, TREE_TYPE (op1),
9893 					       TREE_OPERAND (arg1, 1)));
9894 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9895 			     tem);
9896 	}
9897 
9898       if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9899 	{
9900 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9901 						     arg0, arg1,
9902 						     /*cond_first_p=*/1);
9903 	  if (tem != NULL_TREE)
9904 	    return tem;
9905 	}
9906 
9907       if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9908 	{
9909 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9910 						     arg1, arg0,
9911 					             /*cond_first_p=*/0);
9912 	  if (tem != NULL_TREE)
9913 	    return tem;
9914 	}
9915     }
9916 
9917   switch (code)
9918     {
9919     case MEM_REF:
9920       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9921       if (TREE_CODE (arg0) == ADDR_EXPR
9922 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9923 	{
9924 	  tree iref = TREE_OPERAND (arg0, 0);
9925 	  return fold_build2 (MEM_REF, type,
9926 			      TREE_OPERAND (iref, 0),
9927 			      int_const_binop (PLUS_EXPR, arg1,
9928 					       TREE_OPERAND (iref, 1)));
9929 	}
9930 
9931       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9932       if (TREE_CODE (arg0) == ADDR_EXPR
9933 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9934 	{
9935 	  tree base;
9936 	  HOST_WIDE_INT coffset;
9937 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9938 						&coffset);
9939 	  if (!base)
9940 	    return NULL_TREE;
9941 	  return fold_build2 (MEM_REF, type,
9942 			      build_fold_addr_expr (base),
9943 			      int_const_binop (PLUS_EXPR, arg1,
9944 					       size_int (coffset)));
9945 	}
9946 
9947       return NULL_TREE;
9948 
9949     case POINTER_PLUS_EXPR:
9950       /* 0 +p index -> (type)index */
9951       if (integer_zerop (arg0))
9952 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9953 
9954       /* PTR +p 0 -> PTR */
9955       if (integer_zerop (arg1))
9956 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9957 
9958       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9959       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9960 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9961         return fold_convert_loc (loc, type,
9962 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9963 					      fold_convert_loc (loc, sizetype,
9964 								arg1),
9965 					      fold_convert_loc (loc, sizetype,
9966 								arg0)));
9967 
9968       /* (PTR +p B) +p A -> PTR +p (B + A) */
9969       if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9970 	{
9971 	  tree inner;
9972 	  tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9973 	  tree arg00 = TREE_OPERAND (arg0, 0);
9974 	  inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9975 			       arg01, fold_convert_loc (loc, sizetype, arg1));
9976 	  return fold_convert_loc (loc, type,
9977 				   fold_build_pointer_plus_loc (loc,
9978 								arg00, inner));
9979 	}
9980 
9981       /* PTR_CST +p CST -> CST1 */
9982       if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9983 	return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9984 			    fold_convert_loc (loc, type, arg1));
9985 
9986      /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9987 	of the array.  Loop optimizer sometimes produce this type of
9988 	expressions.  */
9989       if (TREE_CODE (arg0) == ADDR_EXPR)
9990 	{
9991 	  tem = try_move_mult_to_index (loc, arg0,
9992 					fold_convert_loc (loc, sizetype, arg1));
9993 	  if (tem)
9994 	    return fold_convert_loc (loc, type, tem);
9995 	}
9996 
9997       return NULL_TREE;
9998 
9999     case PLUS_EXPR:
10000       /* A + (-B) -> A - B */
10001       if (TREE_CODE (arg1) == NEGATE_EXPR)
10002 	return fold_build2_loc (loc, MINUS_EXPR, type,
10003 			    fold_convert_loc (loc, type, arg0),
10004 			    fold_convert_loc (loc, type,
10005 					      TREE_OPERAND (arg1, 0)));
10006       /* (-A) + B -> B - A */
10007       if (TREE_CODE (arg0) == NEGATE_EXPR
10008 	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10009 	return fold_build2_loc (loc, MINUS_EXPR, type,
10010 			    fold_convert_loc (loc, type, arg1),
10011 			    fold_convert_loc (loc, type,
10012 					      TREE_OPERAND (arg0, 0)));
10013 
10014       if (INTEGRAL_TYPE_P (type))
10015 	{
10016 	  /* Convert ~A + 1 to -A.  */
10017 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10018 	      && integer_onep (arg1))
10019 	    return fold_build1_loc (loc, NEGATE_EXPR, type,
10020 				fold_convert_loc (loc, type,
10021 						  TREE_OPERAND (arg0, 0)));
10022 
10023 	  /* ~X + X is -1.  */
10024 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10025 	      && !TYPE_OVERFLOW_TRAPS (type))
10026 	    {
10027 	      tree tem = TREE_OPERAND (arg0, 0);
10028 
10029 	      STRIP_NOPS (tem);
10030 	      if (operand_equal_p (tem, arg1, 0))
10031 		{
10032 		  t1 = build_int_cst_type (type, -1);
10033 		  return omit_one_operand_loc (loc, type, t1, arg1);
10034 		}
10035 	    }
10036 
10037 	  /* X + ~X is -1.  */
10038 	  if (TREE_CODE (arg1) == BIT_NOT_EXPR
10039 	      && !TYPE_OVERFLOW_TRAPS (type))
10040 	    {
10041 	      tree tem = TREE_OPERAND (arg1, 0);
10042 
10043 	      STRIP_NOPS (tem);
10044 	      if (operand_equal_p (arg0, tem, 0))
10045 		{
10046 		  t1 = build_int_cst_type (type, -1);
10047 		  return omit_one_operand_loc (loc, type, t1, arg0);
10048 		}
10049 	    }
10050 
10051 	  /* X + (X / CST) * -CST is X % CST.  */
10052 	  if (TREE_CODE (arg1) == MULT_EXPR
10053 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10054 	      && operand_equal_p (arg0,
10055 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10056 	    {
10057 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10058 	      tree cst1 = TREE_OPERAND (arg1, 1);
10059 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10060 				      cst1, cst0);
10061 	      if (sum && integer_zerop (sum))
10062 		return fold_convert_loc (loc, type,
10063 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10064 						      TREE_TYPE (arg0), arg0,
10065 						      cst0));
10066 	    }
10067 	}
10068 
10069       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10070 	 one.  Make sure the type is not saturating and has the signedness of
10071 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10072 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10073       if ((TREE_CODE (arg0) == MULT_EXPR
10074 	   || TREE_CODE (arg1) == MULT_EXPR)
10075 	  && !TYPE_SATURATING (type)
10076 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10077 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10078 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10079         {
10080 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10081 	  if (tem)
10082 	    return tem;
10083 	}
10084 
10085       if (! FLOAT_TYPE_P (type))
10086 	{
10087 	  if (integer_zerop (arg1))
10088 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10089 
10090 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10091 	     with a constant, and the two constants have no bits in common,
10092 	     we should treat this as a BIT_IOR_EXPR since this may produce more
10093 	     simplifications.  */
10094 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10095 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10096 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10097 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10098 	      && integer_zerop (const_binop (BIT_AND_EXPR,
10099 					     TREE_OPERAND (arg0, 1),
10100 					     TREE_OPERAND (arg1, 1))))
10101 	    {
10102 	      code = BIT_IOR_EXPR;
10103 	      goto bit_ior;
10104 	    }
10105 
10106 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10107 	     (plus (plus (mult) (mult)) (foo)) so that we can
10108 	     take advantage of the factoring cases below.  */
10109 	  if (TYPE_OVERFLOW_WRAPS (type)
10110 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10111 		    || TREE_CODE (arg0) == MINUS_EXPR)
10112 		   && TREE_CODE (arg1) == MULT_EXPR)
10113 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10114 		       || TREE_CODE (arg1) == MINUS_EXPR)
10115 		      && TREE_CODE (arg0) == MULT_EXPR)))
10116 	    {
10117 	      tree parg0, parg1, parg, marg;
10118 	      enum tree_code pcode;
10119 
10120 	      if (TREE_CODE (arg1) == MULT_EXPR)
10121 		parg = arg0, marg = arg1;
10122 	      else
10123 		parg = arg1, marg = arg0;
10124 	      pcode = TREE_CODE (parg);
10125 	      parg0 = TREE_OPERAND (parg, 0);
10126 	      parg1 = TREE_OPERAND (parg, 1);
10127 	      STRIP_NOPS (parg0);
10128 	      STRIP_NOPS (parg1);
10129 
10130 	      if (TREE_CODE (parg0) == MULT_EXPR
10131 		  && TREE_CODE (parg1) != MULT_EXPR)
10132 		return fold_build2_loc (loc, pcode, type,
10133 				    fold_build2_loc (loc, PLUS_EXPR, type,
10134 						 fold_convert_loc (loc, type,
10135 								   parg0),
10136 						 fold_convert_loc (loc, type,
10137 								   marg)),
10138 				    fold_convert_loc (loc, type, parg1));
10139 	      if (TREE_CODE (parg0) != MULT_EXPR
10140 		  && TREE_CODE (parg1) == MULT_EXPR)
10141 		return
10142 		  fold_build2_loc (loc, PLUS_EXPR, type,
10143 			       fold_convert_loc (loc, type, parg0),
10144 			       fold_build2_loc (loc, pcode, type,
10145 					    fold_convert_loc (loc, type, marg),
10146 					    fold_convert_loc (loc, type,
10147 							      parg1)));
10148 	    }
10149 	}
10150       else
10151 	{
10152 	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10153 	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10154 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10155 
10156 	  /* Likewise if the operands are reversed.  */
10157 	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10158 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10159 
10160 	  /* Convert X + -C into X - C.  */
10161 	  if (TREE_CODE (arg1) == REAL_CST
10162 	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10163 	    {
10164 	      tem = fold_negate_const (arg1, type);
10165 	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10166 		return fold_build2_loc (loc, MINUS_EXPR, type,
10167 				    fold_convert_loc (loc, type, arg0),
10168 				    fold_convert_loc (loc, type, tem));
10169 	    }
10170 
10171 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10172 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10173 	     if signed zeros are involved.  */
10174 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10175               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10176 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10177 	    {
10178 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10179 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10180 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10181 	      bool arg0rz = false, arg0iz = false;
10182 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10183 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10184 		{
10185 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10186 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10187 		  if (arg0rz && arg1i && real_zerop (arg1i))
10188 		    {
10189 		      tree rp = arg1r ? arg1r
10190 				  : build1 (REALPART_EXPR, rtype, arg1);
10191 		      tree ip = arg0i ? arg0i
10192 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10193 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10194 		    }
10195 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10196 		    {
10197 		      tree rp = arg0r ? arg0r
10198 				  : build1 (REALPART_EXPR, rtype, arg0);
10199 		      tree ip = arg1i ? arg1i
10200 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10201 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10202 		    }
10203 		}
10204 	    }
10205 
10206 	  if (flag_unsafe_math_optimizations
10207 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10208 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10209 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10210 	    return tem;
10211 
10212 	  /* Convert x+x into x*2.0.  */
10213 	  if (operand_equal_p (arg0, arg1, 0)
10214 	      && SCALAR_FLOAT_TYPE_P (type))
10215 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10216 				build_real (type, dconst2));
10217 
10218           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10219              We associate floats only if the user has specified
10220              -fassociative-math.  */
10221           if (flag_associative_math
10222               && TREE_CODE (arg1) == PLUS_EXPR
10223               && TREE_CODE (arg0) != MULT_EXPR)
10224             {
10225               tree tree10 = TREE_OPERAND (arg1, 0);
10226               tree tree11 = TREE_OPERAND (arg1, 1);
10227               if (TREE_CODE (tree11) == MULT_EXPR
10228 		  && TREE_CODE (tree10) == MULT_EXPR)
10229                 {
10230                   tree tree0;
10231                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10232                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10233                 }
10234             }
10235           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10236              We associate floats only if the user has specified
10237              -fassociative-math.  */
10238           if (flag_associative_math
10239               && TREE_CODE (arg0) == PLUS_EXPR
10240               && TREE_CODE (arg1) != MULT_EXPR)
10241             {
10242               tree tree00 = TREE_OPERAND (arg0, 0);
10243               tree tree01 = TREE_OPERAND (arg0, 1);
10244               if (TREE_CODE (tree01) == MULT_EXPR
10245 		  && TREE_CODE (tree00) == MULT_EXPR)
10246                 {
10247                   tree tree0;
10248                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10249                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10250                 }
10251             }
10252 	}
10253 
10254      bit_rotate:
10255       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10256 	 is a rotate of A by C1 bits.  */
10257       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10258 	 is a rotate of A by B bits.  */
10259       {
10260 	enum tree_code code0, code1;
10261 	tree rtype;
10262 	code0 = TREE_CODE (arg0);
10263 	code1 = TREE_CODE (arg1);
10264 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10265 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10266 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10267 			        TREE_OPERAND (arg1, 0), 0)
10268 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10269 	        TYPE_UNSIGNED (rtype))
10270 	    /* Only create rotates in complete modes.  Other cases are not
10271 	       expanded properly.  */
10272 	    && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10273 	  {
10274 	    tree tree01, tree11;
10275 	    enum tree_code code01, code11;
10276 
10277 	    tree01 = TREE_OPERAND (arg0, 1);
10278 	    tree11 = TREE_OPERAND (arg1, 1);
10279 	    STRIP_NOPS (tree01);
10280 	    STRIP_NOPS (tree11);
10281 	    code01 = TREE_CODE (tree01);
10282 	    code11 = TREE_CODE (tree11);
10283 	    if (code01 == INTEGER_CST
10284 		&& code11 == INTEGER_CST
10285 		&& TREE_INT_CST_HIGH (tree01) == 0
10286 		&& TREE_INT_CST_HIGH (tree11) == 0
10287 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10288 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10289 	      {
10290 		tem = build2_loc (loc, LROTATE_EXPR,
10291 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
10292 				  TREE_OPERAND (arg0, 0),
10293 				  code0 == LSHIFT_EXPR ? tree01 : tree11);
10294 		return fold_convert_loc (loc, type, tem);
10295 	      }
10296 	    else if (code11 == MINUS_EXPR)
10297 	      {
10298 		tree tree110, tree111;
10299 		tree110 = TREE_OPERAND (tree11, 0);
10300 		tree111 = TREE_OPERAND (tree11, 1);
10301 		STRIP_NOPS (tree110);
10302 		STRIP_NOPS (tree111);
10303 		if (TREE_CODE (tree110) == INTEGER_CST
10304 		    && 0 == compare_tree_int (tree110,
10305 					      TYPE_PRECISION
10306 					      (TREE_TYPE (TREE_OPERAND
10307 							  (arg0, 0))))
10308 		    && operand_equal_p (tree01, tree111, 0))
10309 		  return
10310 		    fold_convert_loc (loc, type,
10311 				      build2 ((code0 == LSHIFT_EXPR
10312 					       ? LROTATE_EXPR
10313 					       : RROTATE_EXPR),
10314 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10315 					      TREE_OPERAND (arg0, 0), tree01));
10316 	      }
10317 	    else if (code01 == MINUS_EXPR)
10318 	      {
10319 		tree tree010, tree011;
10320 		tree010 = TREE_OPERAND (tree01, 0);
10321 		tree011 = TREE_OPERAND (tree01, 1);
10322 		STRIP_NOPS (tree010);
10323 		STRIP_NOPS (tree011);
10324 		if (TREE_CODE (tree010) == INTEGER_CST
10325 		    && 0 == compare_tree_int (tree010,
10326 					      TYPE_PRECISION
10327 					      (TREE_TYPE (TREE_OPERAND
10328 							  (arg0, 0))))
10329 		    && operand_equal_p (tree11, tree011, 0))
10330 		    return fold_convert_loc
10331 		      (loc, type,
10332 		       build2 ((code0 != LSHIFT_EXPR
10333 				? LROTATE_EXPR
10334 				: RROTATE_EXPR),
10335 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10336 			       TREE_OPERAND (arg0, 0), tree11));
10337 	      }
10338 	  }
10339       }
10340 
10341     associate:
10342       /* In most languages, can't associate operations on floats through
10343 	 parentheses.  Rather than remember where the parentheses were, we
10344 	 don't associate floats at all, unless the user has specified
10345 	 -fassociative-math.
10346 	 And, we need to make sure type is not saturating.  */
10347 
10348       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10349 	  && !TYPE_SATURATING (type))
10350 	{
10351 	  tree var0, con0, lit0, minus_lit0;
10352 	  tree var1, con1, lit1, minus_lit1;
10353 	  bool ok = true;
10354 
10355 	  /* Split both trees into variables, constants, and literals.  Then
10356 	     associate each group together, the constants with literals,
10357 	     then the result with variables.  This increases the chances of
10358 	     literals being recombined later and of generating relocatable
10359 	     expressions for the sum of a constant and literal.  */
10360 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10361 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10362 			     code == MINUS_EXPR);
10363 
10364 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10365 	  if (code == MINUS_EXPR)
10366 	    code = PLUS_EXPR;
10367 
10368 	  /* With undefined overflow we can only associate constants with one
10369 	     variable, and constants whose association doesn't overflow.  */
10370 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10371 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10372 	    {
10373 	      if (var0 && var1)
10374 		{
10375 		  tree tmp0 = var0;
10376 		  tree tmp1 = var1;
10377 
10378 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10379 		    tmp0 = TREE_OPERAND (tmp0, 0);
10380 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10381 		    tmp1 = TREE_OPERAND (tmp1, 0);
10382 		  /* The only case we can still associate with two variables
10383 		     is if they are the same, modulo negation.  */
10384 		  if (!operand_equal_p (tmp0, tmp1, 0))
10385 		    ok = false;
10386 		}
10387 
10388 	      if (ok && lit0 && lit1)
10389 		{
10390 		  tree tmp0 = fold_convert (type, lit0);
10391 		  tree tmp1 = fold_convert (type, lit1);
10392 
10393 		  if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10394 		      && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10395 		    ok = false;
10396 		}
10397 	    }
10398 
10399 	  /* Only do something if we found more than two objects.  Otherwise,
10400 	     nothing has changed and we risk infinite recursion.  */
10401 	  if (ok
10402 	      && (2 < ((var0 != 0) + (var1 != 0)
10403 		       + (con0 != 0) + (con1 != 0)
10404 		       + (lit0 != 0) + (lit1 != 0)
10405 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10406 	    {
10407 	      var0 = associate_trees (loc, var0, var1, code, type);
10408 	      con0 = associate_trees (loc, con0, con1, code, type);
10409 	      lit0 = associate_trees (loc, lit0, lit1, code, type);
10410 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10411 
10412 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10413 		 greater than the positive part.  Otherwise, the multiplicative
10414 		 folding code (i.e extract_muldiv) may be fooled in case
10415 		 unsigned constants are subtracted, like in the following
10416 		 example: ((X*2 + 4) - 8U)/2.  */
10417 	      if (minus_lit0 && lit0)
10418 		{
10419 		  if (TREE_CODE (lit0) == INTEGER_CST
10420 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10421 		      && tree_int_cst_lt (lit0, minus_lit0))
10422 		    {
10423 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10424 						    MINUS_EXPR, type);
10425 		      lit0 = 0;
10426 		    }
10427 		  else
10428 		    {
10429 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10430 					      MINUS_EXPR, type);
10431 		      minus_lit0 = 0;
10432 		    }
10433 		}
10434 	      if (minus_lit0)
10435 		{
10436 		  if (con0 == 0)
10437 		    return
10438 		      fold_convert_loc (loc, type,
10439 					associate_trees (loc, var0, minus_lit0,
10440 							 MINUS_EXPR, type));
10441 		  else
10442 		    {
10443 		      con0 = associate_trees (loc, con0, minus_lit0,
10444 					      MINUS_EXPR, type);
10445 		      return
10446 			fold_convert_loc (loc, type,
10447 					  associate_trees (loc, var0, con0,
10448 							   PLUS_EXPR, type));
10449 		    }
10450 		}
10451 
10452 	      con0 = associate_trees (loc, con0, lit0, code, type);
10453 	      return
10454 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10455 							      code, type));
10456 	    }
10457 	}
10458 
10459       return NULL_TREE;
10460 
10461     case MINUS_EXPR:
10462       /* Pointer simplifications for subtraction, simple reassociations. */
10463       if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10464 	{
10465 	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10466 	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10467 	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10468 	    {
10469 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10470 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10471 	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10472 	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10473 	      return fold_build2_loc (loc, PLUS_EXPR, type,
10474 				  fold_build2_loc (loc, MINUS_EXPR, type,
10475 					       arg00, arg10),
10476 				  fold_build2_loc (loc, MINUS_EXPR, type,
10477 					       arg01, arg11));
10478 	    }
10479 	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10480 	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10481 	    {
10482 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10483 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10484 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10485 				      fold_convert_loc (loc, type, arg1));
10486 	      if (tmp)
10487 	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10488 	    }
10489 	}
10490       /* A - (-B) -> A + B */
10491       if (TREE_CODE (arg1) == NEGATE_EXPR)
10492 	return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10493 			    fold_convert_loc (loc, type,
10494 					      TREE_OPERAND (arg1, 0)));
10495       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10496       if (TREE_CODE (arg0) == NEGATE_EXPR
10497 	  && (FLOAT_TYPE_P (type)
10498 	      || INTEGRAL_TYPE_P (type))
10499 	  && negate_expr_p (arg1)
10500 	  && reorder_operands_p (arg0, arg1))
10501 	return fold_build2_loc (loc, MINUS_EXPR, type,
10502 			    fold_convert_loc (loc, type,
10503 					      negate_expr (arg1)),
10504 			    fold_convert_loc (loc, type,
10505 					      TREE_OPERAND (arg0, 0)));
10506       /* Convert -A - 1 to ~A.  */
10507       if (INTEGRAL_TYPE_P (type)
10508 	  && TREE_CODE (arg0) == NEGATE_EXPR
10509 	  && integer_onep (arg1)
10510 	  && !TYPE_OVERFLOW_TRAPS (type))
10511 	return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10512 			    fold_convert_loc (loc, type,
10513 					      TREE_OPERAND (arg0, 0)));
10514 
10515       /* Convert -1 - A to ~A.  */
10516       if (INTEGRAL_TYPE_P (type)
10517 	  && integer_all_onesp (arg0))
10518 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10519 
10520 
10521       /* X - (X / CST) * CST is X % CST.  */
10522       if (INTEGRAL_TYPE_P (type)
10523 	  && TREE_CODE (arg1) == MULT_EXPR
10524 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10525 	  && operand_equal_p (arg0,
10526 			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10527 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10528 			      TREE_OPERAND (arg1, 1), 0))
10529 	return
10530 	  fold_convert_loc (loc, type,
10531 			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10532 					 arg0, TREE_OPERAND (arg1, 1)));
10533 
10534       if (! FLOAT_TYPE_P (type))
10535 	{
10536 	  if (integer_zerop (arg0))
10537 	    return negate_expr (fold_convert_loc (loc, type, arg1));
10538 	  if (integer_zerop (arg1))
10539 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10540 
10541 	  /* Fold A - (A & B) into ~B & A.  */
10542 	  if (!TREE_SIDE_EFFECTS (arg0)
10543 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10544 	    {
10545 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10546 		{
10547 		  tree arg10 = fold_convert_loc (loc, type,
10548 						 TREE_OPERAND (arg1, 0));
10549 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10550 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10551 						   type, arg10),
10552 				      fold_convert_loc (loc, type, arg0));
10553 		}
10554 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10555 		{
10556 		  tree arg11 = fold_convert_loc (loc,
10557 						 type, TREE_OPERAND (arg1, 1));
10558 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10559 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10560 						   type, arg11),
10561 				      fold_convert_loc (loc, type, arg0));
10562 		}
10563 	    }
10564 
10565 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10566 	     any power of 2 minus 1.  */
10567 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10568 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10569 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10570 				  TREE_OPERAND (arg1, 0), 0))
10571 	    {
10572 	      tree mask0 = TREE_OPERAND (arg0, 1);
10573 	      tree mask1 = TREE_OPERAND (arg1, 1);
10574 	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10575 
10576 	      if (operand_equal_p (tem, mask1, 0))
10577 		{
10578 		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10579 				     TREE_OPERAND (arg0, 0), mask1);
10580 		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10581 		}
10582 	    }
10583 	}
10584 
10585       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10586       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10587 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10588 
10589       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10590 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10591 	 (-ARG1 + ARG0) reduces to -ARG1.  */
10592       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10593 	return negate_expr (fold_convert_loc (loc, type, arg1));
10594 
10595       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10596 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10597 	 signed zeros are involved.  */
10598       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10599 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10600 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10601         {
10602 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10603 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10604 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10605 	  bool arg0rz = false, arg0iz = false;
10606 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10607 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10608 	    {
10609 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10610 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10611 	      if (arg0rz && arg1i && real_zerop (arg1i))
10612 	        {
10613 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10614 					 arg1r ? arg1r
10615 					 : build1 (REALPART_EXPR, rtype, arg1));
10616 		  tree ip = arg0i ? arg0i
10617 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10618 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10619 		}
10620 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10621 	        {
10622 		  tree rp = arg0r ? arg0r
10623 		    : build1 (REALPART_EXPR, rtype, arg0);
10624 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10625 					 arg1i ? arg1i
10626 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10627 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10628 		}
10629 	    }
10630 	}
10631 
10632       /* Fold &x - &x.  This can happen from &x.foo - &x.
10633 	 This is unsafe for certain floats even in non-IEEE formats.
10634 	 In IEEE, it is unsafe because it does wrong for NaNs.
10635 	 Also note that operand_equal_p is always false if an operand
10636 	 is volatile.  */
10637 
10638       if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10639 	  && operand_equal_p (arg0, arg1, 0))
10640 	return build_zero_cst (type);
10641 
10642       /* A - B -> A + (-B) if B is easily negatable.  */
10643       if (negate_expr_p (arg1)
10644 	  && ((FLOAT_TYPE_P (type)
10645                /* Avoid this transformation if B is a positive REAL_CST.  */
10646 	       && (TREE_CODE (arg1) != REAL_CST
10647 		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10648 	      || INTEGRAL_TYPE_P (type)))
10649 	return fold_build2_loc (loc, PLUS_EXPR, type,
10650 			    fold_convert_loc (loc, type, arg0),
10651 			    fold_convert_loc (loc, type,
10652 					      negate_expr (arg1)));
10653 
10654       /* Try folding difference of addresses.  */
10655       {
10656 	HOST_WIDE_INT diff;
10657 
10658 	if ((TREE_CODE (arg0) == ADDR_EXPR
10659 	     || TREE_CODE (arg1) == ADDR_EXPR)
10660 	    && ptr_difference_const (arg0, arg1, &diff))
10661 	  return build_int_cst_type (type, diff);
10662       }
10663 
10664       /* Fold &a[i] - &a[j] to i-j.  */
10665       if (TREE_CODE (arg0) == ADDR_EXPR
10666 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10667 	  && TREE_CODE (arg1) == ADDR_EXPR
10668 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10669         {
10670 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10671 							TREE_OPERAND (arg0, 0),
10672 							TREE_OPERAND (arg1, 0));
10673 	  if (tem)
10674 	    return tem;
10675 	}
10676 
10677       if (FLOAT_TYPE_P (type)
10678 	  && flag_unsafe_math_optimizations
10679 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10680 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10681 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10682 	return tem;
10683 
10684       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10685 	 one.  Make sure the type is not saturating and has the signedness of
10686 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10687 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10688       if ((TREE_CODE (arg0) == MULT_EXPR
10689 	   || TREE_CODE (arg1) == MULT_EXPR)
10690 	  && !TYPE_SATURATING (type)
10691 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10692 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10693 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10694         {
10695 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10696 	  if (tem)
10697 	    return tem;
10698 	}
10699 
10700       goto associate;
10701 
10702     case MULT_EXPR:
10703       /* (-A) * (-B) -> A * B  */
10704       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10705 	return fold_build2_loc (loc, MULT_EXPR, type,
10706 			    fold_convert_loc (loc, type,
10707 					      TREE_OPERAND (arg0, 0)),
10708 			    fold_convert_loc (loc, type,
10709 					      negate_expr (arg1)));
10710       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10711 	return fold_build2_loc (loc, MULT_EXPR, type,
10712 			    fold_convert_loc (loc, type,
10713 					      negate_expr (arg0)),
10714 			    fold_convert_loc (loc, type,
10715 					      TREE_OPERAND (arg1, 0)));
10716 
10717       if (! FLOAT_TYPE_P (type))
10718 	{
10719 	  if (integer_zerop (arg1))
10720 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10721 	  if (integer_onep (arg1))
10722 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10723 	  /* Transform x * -1 into -x.  Make sure to do the negation
10724 	     on the original operand with conversions not stripped
10725 	     because we can only strip non-sign-changing conversions.  */
10726 	  if (integer_all_onesp (arg1))
10727 	    return fold_convert_loc (loc, type, negate_expr (op0));
10728 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10729 	  if (TREE_CODE (arg1) == INTEGER_CST
10730 	      && tree_int_cst_sgn (arg1) == -1
10731 	      && negate_expr_p (arg0)
10732 	      && (tem = negate_expr (arg1)) != arg1
10733 	      && !TREE_OVERFLOW (tem))
10734 	    return fold_build2_loc (loc, MULT_EXPR, type,
10735 	    			fold_convert_loc (loc, type,
10736 						  negate_expr (arg0)),
10737 				tem);
10738 
10739 	  /* (a * (1 << b)) is (a << b)  */
10740 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
10741 	      && integer_onep (TREE_OPERAND (arg1, 0)))
10742 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10743 				TREE_OPERAND (arg1, 1));
10744 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
10745 	      && integer_onep (TREE_OPERAND (arg0, 0)))
10746 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10747 				TREE_OPERAND (arg0, 1));
10748 
10749 	  /* (A + A) * C -> A * 2 * C  */
10750 	  if (TREE_CODE (arg0) == PLUS_EXPR
10751 	      && TREE_CODE (arg1) == INTEGER_CST
10752 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10753 			          TREE_OPERAND (arg0, 1), 0))
10754 	    return fold_build2_loc (loc, MULT_EXPR, type,
10755 				omit_one_operand_loc (loc, type,
10756 						  TREE_OPERAND (arg0, 0),
10757 						  TREE_OPERAND (arg0, 1)),
10758 				fold_build2_loc (loc, MULT_EXPR, type,
10759 					     build_int_cst (type, 2) , arg1));
10760 
10761 	  strict_overflow_p = false;
10762 	  if (TREE_CODE (arg1) == INTEGER_CST
10763 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10764 					     &strict_overflow_p)))
10765 	    {
10766 	      if (strict_overflow_p)
10767 		fold_overflow_warning (("assuming signed overflow does not "
10768 					"occur when simplifying "
10769 					"multiplication"),
10770 				       WARN_STRICT_OVERFLOW_MISC);
10771 	      return fold_convert_loc (loc, type, tem);
10772 	    }
10773 
10774 	  /* Optimize z * conj(z) for integer complex numbers.  */
10775 	  if (TREE_CODE (arg0) == CONJ_EXPR
10776 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10777 	    return fold_mult_zconjz (loc, type, arg1);
10778 	  if (TREE_CODE (arg1) == CONJ_EXPR
10779 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10780 	    return fold_mult_zconjz (loc, type, arg0);
10781 	}
10782       else
10783 	{
10784 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
10785 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
10786 	     same in modes with signed zeros, since multiplying a
10787 	     negative value by 0 gives -0, not +0.  */
10788 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10789 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10790 	      && real_zerop (arg1))
10791 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10792 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
10793 	     Likewise for complex arithmetic with signed zeros.  */
10794 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10795 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10796 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10797 	      && real_onep (arg1))
10798 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10799 
10800 	  /* Transform x * -1.0 into -x.  */
10801 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10802 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10803 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10804 	      && real_minus_onep (arg1))
10805 	    return fold_convert_loc (loc, type, negate_expr (arg0));
10806 
10807 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
10808              the result for floating point types due to rounding so it is applied
10809              only if -fassociative-math was specify.  */
10810 	  if (flag_associative_math
10811 	      && TREE_CODE (arg0) == RDIV_EXPR
10812 	      && TREE_CODE (arg1) == REAL_CST
10813 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10814 	    {
10815 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10816 				      arg1);
10817 	      if (tem)
10818 		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10819 				    TREE_OPERAND (arg0, 1));
10820 	    }
10821 
10822           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
10823 	  if (operand_equal_p (arg0, arg1, 0))
10824 	    {
10825 	      tree tem = fold_strip_sign_ops (arg0);
10826 	      if (tem != NULL_TREE)
10827 		{
10828 		  tem = fold_convert_loc (loc, type, tem);
10829 		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10830 		}
10831 	    }
10832 
10833 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10834 	     This is not the same for NaNs or if signed zeros are
10835 	     involved.  */
10836 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10837               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10838 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10839 	      && TREE_CODE (arg1) == COMPLEX_CST
10840 	      && real_zerop (TREE_REALPART (arg1)))
10841 	    {
10842 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10843 	      if (real_onep (TREE_IMAGPART (arg1)))
10844 		return
10845 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10846 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10847 							     rtype, arg0)),
10848 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10849 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10850 		return
10851 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10852 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10853 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10854 							     rtype, arg0)));
10855 	    }
10856 
10857 	  /* Optimize z * conj(z) for floating point complex numbers.
10858 	     Guarded by flag_unsafe_math_optimizations as non-finite
10859 	     imaginary components don't produce scalar results.  */
10860 	  if (flag_unsafe_math_optimizations
10861 	      && TREE_CODE (arg0) == CONJ_EXPR
10862 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10863 	    return fold_mult_zconjz (loc, type, arg1);
10864 	  if (flag_unsafe_math_optimizations
10865 	      && TREE_CODE (arg1) == CONJ_EXPR
10866 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10867 	    return fold_mult_zconjz (loc, type, arg0);
10868 
10869 	  if (flag_unsafe_math_optimizations)
10870 	    {
10871 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10872 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10873 
10874 	      /* Optimizations of root(...)*root(...).  */
10875 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10876 		{
10877 		  tree rootfn, arg;
10878 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10879 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10880 
10881 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
10882 		  if (BUILTIN_SQRT_P (fcode0)
10883 		      && operand_equal_p (arg00, arg10, 0)
10884 		      && ! HONOR_SNANS (TYPE_MODE (type)))
10885 		    return arg00;
10886 
10887 	          /* Optimize root(x)*root(y) as root(x*y).  */
10888 		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10889 		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10890 		  return build_call_expr_loc (loc, rootfn, 1, arg);
10891 		}
10892 
10893 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
10894 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10895 		{
10896 		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10897 		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10898 					  CALL_EXPR_ARG (arg0, 0),
10899 					  CALL_EXPR_ARG (arg1, 0));
10900 		  return build_call_expr_loc (loc, expfn, 1, arg);
10901 		}
10902 
10903 	      /* Optimizations of pow(...)*pow(...).  */
10904 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10905 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10906 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10907 		{
10908 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10909 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10910 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10911 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10912 
10913 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
10914 		  if (operand_equal_p (arg01, arg11, 0))
10915 		    {
10916 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10917 		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10918 					      arg00, arg10);
10919 		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10920 		    }
10921 
10922 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
10923 		  if (operand_equal_p (arg00, arg10, 0))
10924 		    {
10925 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10926 		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10927 					      arg01, arg11);
10928 		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10929 		    }
10930 		}
10931 
10932 	      /* Optimize tan(x)*cos(x) as sin(x).  */
10933 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10934 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10935 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10936 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10937 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10938 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10939 		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10940 				      CALL_EXPR_ARG (arg1, 0), 0))
10941 		{
10942 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10943 
10944 		  if (sinfn != NULL_TREE)
10945 		    return build_call_expr_loc (loc, sinfn, 1,
10946 					    CALL_EXPR_ARG (arg0, 0));
10947 		}
10948 
10949 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
10950 	      if (fcode1 == BUILT_IN_POW
10951 		  || fcode1 == BUILT_IN_POWF
10952 		  || fcode1 == BUILT_IN_POWL)
10953 		{
10954 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10955 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10956 		  if (TREE_CODE (arg11) == REAL_CST
10957 		      && !TREE_OVERFLOW (arg11)
10958 		      && operand_equal_p (arg0, arg10, 0))
10959 		    {
10960 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10961 		      REAL_VALUE_TYPE c;
10962 		      tree arg;
10963 
10964 		      c = TREE_REAL_CST (arg11);
10965 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10966 		      arg = build_real (type, c);
10967 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10968 		    }
10969 		}
10970 
10971 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
10972 	      if (fcode0 == BUILT_IN_POW
10973 		  || fcode0 == BUILT_IN_POWF
10974 		  || fcode0 == BUILT_IN_POWL)
10975 		{
10976 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10977 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10978 		  if (TREE_CODE (arg01) == REAL_CST
10979 		      && !TREE_OVERFLOW (arg01)
10980 		      && operand_equal_p (arg1, arg00, 0))
10981 		    {
10982 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10983 		      REAL_VALUE_TYPE c;
10984 		      tree arg;
10985 
10986 		      c = TREE_REAL_CST (arg01);
10987 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10988 		      arg = build_real (type, c);
10989 		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10990 		    }
10991 		}
10992 
10993 	      /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x.  */
10994 	      if (!in_gimple_form
10995 		  && optimize
10996 		  && operand_equal_p (arg0, arg1, 0))
10997 		{
10998 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10999 
11000 		  if (powfn)
11001 		    {
11002 		      tree arg = build_real (type, dconst2);
11003 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11004 		    }
11005 		}
11006 	    }
11007 	}
11008       goto associate;
11009 
11010     case BIT_IOR_EXPR:
11011     bit_ior:
11012       if (integer_all_onesp (arg1))
11013 	return omit_one_operand_loc (loc, type, arg1, arg0);
11014       if (integer_zerop (arg1))
11015 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11016       if (operand_equal_p (arg0, arg1, 0))
11017 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11018 
11019       /* ~X | X is -1.  */
11020       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11021 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11022 	{
11023 	  t1 = build_zero_cst (type);
11024 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11025 	  return omit_one_operand_loc (loc, type, t1, arg1);
11026 	}
11027 
11028       /* X | ~X is -1.  */
11029       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11030 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11031 	{
11032 	  t1 = build_zero_cst (type);
11033 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11034 	  return omit_one_operand_loc (loc, type, t1, arg0);
11035 	}
11036 
11037       /* Canonicalize (X & C1) | C2.  */
11038       if (TREE_CODE (arg0) == BIT_AND_EXPR
11039 	  && TREE_CODE (arg1) == INTEGER_CST
11040 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11041 	{
11042 	  double_int c1, c2, c3, msk;
11043 	  int width = TYPE_PRECISION (type), w;
11044 	  c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11045 	  c2 = tree_to_double_int (arg1);
11046 
11047 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11048 	  if (double_int_equal_p (double_int_and (c1, c2), c1))
11049 	    return omit_one_operand_loc (loc, type, arg1,
11050 					 TREE_OPERAND (arg0, 0));
11051 
11052 	  msk = double_int_mask (width);
11053 
11054 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11055 	  if (double_int_zero_p (double_int_and_not (msk,
11056 						     double_int_ior (c1, c2))))
11057 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11058 				    TREE_OPERAND (arg0, 0), arg1);
11059 
11060 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11061 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11062 	     mode which allows further optimizations.  */
11063 	  c1 = double_int_and (c1, msk);
11064 	  c2 = double_int_and (c2, msk);
11065 	  c3 = double_int_and_not (c1, c2);
11066 	  for (w = BITS_PER_UNIT;
11067 	       w <= width && w <= HOST_BITS_PER_WIDE_INT;
11068 	       w <<= 1)
11069 	    {
11070 	      unsigned HOST_WIDE_INT mask
11071 		= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11072 	      if (((c1.low | c2.low) & mask) == mask
11073 		  && (c1.low & ~mask) == 0 && c1.high == 0)
11074 		{
11075 		  c3 = uhwi_to_double_int (mask);
11076 		  break;
11077 		}
11078 	    }
11079 	  if (!double_int_equal_p (c3, c1))
11080 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11081 				    fold_build2_loc (loc, BIT_AND_EXPR, type,
11082 						     TREE_OPERAND (arg0, 0),
11083 						     double_int_to_tree (type,
11084 									 c3)),
11085 				    arg1);
11086 	}
11087 
11088       /* (X & Y) | Y is (X, Y).  */
11089       if (TREE_CODE (arg0) == BIT_AND_EXPR
11090 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11091 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11092       /* (X & Y) | X is (Y, X).  */
11093       if (TREE_CODE (arg0) == BIT_AND_EXPR
11094 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11095 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11096 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11097       /* X | (X & Y) is (Y, X).  */
11098       if (TREE_CODE (arg1) == BIT_AND_EXPR
11099 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11100 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11101 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11102       /* X | (Y & X) is (Y, X).  */
11103       if (TREE_CODE (arg1) == BIT_AND_EXPR
11104 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11105 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11106 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11107 
11108       /* (X & ~Y) | (~X & Y) is X ^ Y */
11109       if (TREE_CODE (arg0) == BIT_AND_EXPR
11110 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11111         {
11112 	  tree a0, a1, l0, l1, n0, n1;
11113 
11114 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11115 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11116 
11117 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11118 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11119 
11120 	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11121 	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11122 
11123 	  if ((operand_equal_p (n0, a0, 0)
11124 	       && operand_equal_p (n1, a1, 0))
11125 	      || (operand_equal_p (n0, a1, 0)
11126 		  && operand_equal_p (n1, a0, 0)))
11127 	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11128 	}
11129 
11130       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11131       if (t1 != NULL_TREE)
11132 	return t1;
11133 
11134       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11135 
11136 	 This results in more efficient code for machines without a NAND
11137 	 instruction.  Combine will canonicalize to the first form
11138 	 which will allow use of NAND instructions provided by the
11139 	 backend if they exist.  */
11140       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11141 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11142 	{
11143 	  return
11144 	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11145 			 build2 (BIT_AND_EXPR, type,
11146 				 fold_convert_loc (loc, type,
11147 						   TREE_OPERAND (arg0, 0)),
11148 				 fold_convert_loc (loc, type,
11149 						   TREE_OPERAND (arg1, 0))));
11150 	}
11151 
11152       /* See if this can be simplified into a rotate first.  If that
11153 	 is unsuccessful continue in the association code.  */
11154       goto bit_rotate;
11155 
11156     case BIT_XOR_EXPR:
11157       if (integer_zerop (arg1))
11158 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11159       if (integer_all_onesp (arg1))
11160 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11161       if (operand_equal_p (arg0, arg1, 0))
11162 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11163 
11164       /* ~X ^ X is -1.  */
11165       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11166 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11167 	{
11168 	  t1 = build_zero_cst (type);
11169 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11170 	  return omit_one_operand_loc (loc, type, t1, arg1);
11171 	}
11172 
11173       /* X ^ ~X is -1.  */
11174       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11175 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11176 	{
11177 	  t1 = build_zero_cst (type);
11178 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11179 	  return omit_one_operand_loc (loc, type, t1, arg0);
11180 	}
11181 
11182       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11183          with a constant, and the two constants have no bits in common,
11184 	 we should treat this as a BIT_IOR_EXPR since this may produce more
11185 	 simplifications.  */
11186       if (TREE_CODE (arg0) == BIT_AND_EXPR
11187 	  && TREE_CODE (arg1) == BIT_AND_EXPR
11188 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11189 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11190 	  && integer_zerop (const_binop (BIT_AND_EXPR,
11191 					 TREE_OPERAND (arg0, 1),
11192 					 TREE_OPERAND (arg1, 1))))
11193 	{
11194 	  code = BIT_IOR_EXPR;
11195 	  goto bit_ior;
11196 	}
11197 
11198       /* (X | Y) ^ X -> Y & ~ X*/
11199       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11200           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11201         {
11202 	  tree t2 = TREE_OPERAND (arg0, 1);
11203 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11204 			    arg1);
11205 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11206 			    fold_convert_loc (loc, type, t2),
11207 			    fold_convert_loc (loc, type, t1));
11208 	  return t1;
11209 	}
11210 
11211       /* (Y | X) ^ X -> Y & ~ X*/
11212       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11213           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11214         {
11215 	  tree t2 = TREE_OPERAND (arg0, 0);
11216 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11217 			    arg1);
11218 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11219 			    fold_convert_loc (loc, type, t2),
11220 			    fold_convert_loc (loc, type, t1));
11221 	  return t1;
11222 	}
11223 
11224       /* X ^ (X | Y) -> Y & ~ X*/
11225       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11226           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11227         {
11228 	  tree t2 = TREE_OPERAND (arg1, 1);
11229 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11230 			    arg0);
11231 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11232 			    fold_convert_loc (loc, type, t2),
11233 			    fold_convert_loc (loc, type, t1));
11234 	  return t1;
11235 	}
11236 
11237       /* X ^ (Y | X) -> Y & ~ X*/
11238       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11239           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11240         {
11241 	  tree t2 = TREE_OPERAND (arg1, 0);
11242 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11243 			    arg0);
11244 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11245 			    fold_convert_loc (loc, type, t2),
11246 			    fold_convert_loc (loc, type, t1));
11247 	  return t1;
11248 	}
11249 
11250       /* Convert ~X ^ ~Y to X ^ Y.  */
11251       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11252 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11253 	return fold_build2_loc (loc, code, type,
11254 			    fold_convert_loc (loc, type,
11255 					      TREE_OPERAND (arg0, 0)),
11256 			    fold_convert_loc (loc, type,
11257 					      TREE_OPERAND (arg1, 0)));
11258 
11259       /* Convert ~X ^ C to X ^ ~C.  */
11260       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11261 	  && TREE_CODE (arg1) == INTEGER_CST)
11262 	return fold_build2_loc (loc, code, type,
11263 			    fold_convert_loc (loc, type,
11264 					      TREE_OPERAND (arg0, 0)),
11265 			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11266 
11267       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11268       if (TREE_CODE (arg0) == BIT_AND_EXPR
11269 	  && integer_onep (TREE_OPERAND (arg0, 1))
11270 	  && integer_onep (arg1))
11271 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11272 			    build_int_cst (TREE_TYPE (arg0), 0));
11273 
11274       /* Fold (X & Y) ^ Y as ~X & Y.  */
11275       if (TREE_CODE (arg0) == BIT_AND_EXPR
11276 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11277 	{
11278 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11279 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11280 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11281 			      fold_convert_loc (loc, type, arg1));
11282 	}
11283       /* Fold (X & Y) ^ X as ~Y & X.  */
11284       if (TREE_CODE (arg0) == BIT_AND_EXPR
11285 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11286 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11287 	{
11288 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11289 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11290 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11291 			      fold_convert_loc (loc, type, arg1));
11292 	}
11293       /* Fold X ^ (X & Y) as X & ~Y.  */
11294       if (TREE_CODE (arg1) == BIT_AND_EXPR
11295 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11296 	{
11297 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11298 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11299 			      fold_convert_loc (loc, type, arg0),
11300 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11301 	}
11302       /* Fold X ^ (Y & X) as ~Y & X.  */
11303       if (TREE_CODE (arg1) == BIT_AND_EXPR
11304 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11305 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11306 	{
11307 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11308 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11309 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11310 			      fold_convert_loc (loc, type, arg0));
11311 	}
11312 
11313       /* See if this can be simplified into a rotate first.  If that
11314 	 is unsuccessful continue in the association code.  */
11315       goto bit_rotate;
11316 
11317     case BIT_AND_EXPR:
11318       if (integer_all_onesp (arg1))
11319 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11320       if (integer_zerop (arg1))
11321 	return omit_one_operand_loc (loc, type, arg1, arg0);
11322       if (operand_equal_p (arg0, arg1, 0))
11323 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11324 
11325       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
11326       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11327 	   || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11328 	   || (TREE_CODE (arg0) == EQ_EXPR
11329 	       && integer_zerop (TREE_OPERAND (arg0, 1))))
11330 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11331 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11332 
11333       /* X & ~X , X & (X == 0), and X & !X are always zero.  */
11334       if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11335 	   || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11336 	   || (TREE_CODE (arg1) == EQ_EXPR
11337 	       && integer_zerop (TREE_OPERAND (arg1, 1))))
11338 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11339 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11340 
11341       /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11342       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11343 	  && TREE_CODE (arg1) == INTEGER_CST
11344 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11345 	{
11346 	  tree tmp1 = fold_convert_loc (loc, type, arg1);
11347 	  tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11348 	  tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11349 	  tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11350 	  tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11351 	  return
11352 	    fold_convert_loc (loc, type,
11353 			      fold_build2_loc (loc, BIT_IOR_EXPR,
11354 					   type, tmp2, tmp3));
11355 	}
11356 
11357       /* (X | Y) & Y is (X, Y).  */
11358       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11359 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11360 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11361       /* (X | Y) & X is (Y, X).  */
11362       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11363 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11364 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11365 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11366       /* X & (X | Y) is (Y, X).  */
11367       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11368 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11369 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11370 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11371       /* X & (Y | X) is (Y, X).  */
11372       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11373 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11374 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11375 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11376 
11377       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11378       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11379 	  && integer_onep (TREE_OPERAND (arg0, 1))
11380 	  && integer_onep (arg1))
11381 	{
11382 	  tem = TREE_OPERAND (arg0, 0);
11383 	  return fold_build2_loc (loc, EQ_EXPR, type,
11384 			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11385 					   build_int_cst (TREE_TYPE (tem), 1)),
11386 			      build_int_cst (TREE_TYPE (tem), 0));
11387 	}
11388       /* Fold ~X & 1 as (X & 1) == 0.  */
11389       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11390 	  && integer_onep (arg1))
11391 	{
11392 	  tem = TREE_OPERAND (arg0, 0);
11393 	  return fold_build2_loc (loc, EQ_EXPR, type,
11394 			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11395 					   build_int_cst (TREE_TYPE (tem), 1)),
11396 			      build_int_cst (TREE_TYPE (tem), 0));
11397 	}
11398       /* Fold !X & 1 as X == 0.  */
11399       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11400 	  && integer_onep (arg1))
11401 	{
11402 	  tem = TREE_OPERAND (arg0, 0);
11403 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11404 				  build_int_cst (TREE_TYPE (tem), 0));
11405 	}
11406 
11407       /* Fold (X ^ Y) & Y as ~X & Y.  */
11408       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11409 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11410 	{
11411 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11412 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11413 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11414 			      fold_convert_loc (loc, type, arg1));
11415 	}
11416       /* Fold (X ^ Y) & X as ~Y & X.  */
11417       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11418 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11419 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11420 	{
11421 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11422 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11423 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11424 			      fold_convert_loc (loc, type, arg1));
11425 	}
11426       /* Fold X & (X ^ Y) as X & ~Y.  */
11427       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11428 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11429 	{
11430 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11431 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11432 			      fold_convert_loc (loc, type, arg0),
11433 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11434 	}
11435       /* Fold X & (Y ^ X) as ~Y & X.  */
11436       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11437 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11438 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11439 	{
11440 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11441 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11442 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11443 			      fold_convert_loc (loc, type, arg0));
11444 	}
11445 
11446       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11447 	 ((A & N) + B) & M -> (A + B) & M
11448 	 Similarly if (N & M) == 0,
11449 	 ((A | N) + B) & M -> (A + B) & M
11450 	 and for - instead of + (or unary - instead of +)
11451 	 and/or ^ instead of |.
11452 	 If B is constant and (B & M) == 0, fold into A & M.  */
11453       if (host_integerp (arg1, 1))
11454 	{
11455 	  unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11456 	  if (~cst1 && (cst1 & (cst1 + 1)) == 0
11457 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11458 	      && (TREE_CODE (arg0) == PLUS_EXPR
11459 		  || TREE_CODE (arg0) == MINUS_EXPR
11460 		  || TREE_CODE (arg0) == NEGATE_EXPR)
11461 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11462 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11463 	    {
11464 	      tree pmop[2];
11465 	      int which = 0;
11466 	      unsigned HOST_WIDE_INT cst0;
11467 
11468 	      /* Now we know that arg0 is (C + D) or (C - D) or
11469 		 -C and arg1 (M) is == (1LL << cst) - 1.
11470 		 Store C into PMOP[0] and D into PMOP[1].  */
11471 	      pmop[0] = TREE_OPERAND (arg0, 0);
11472 	      pmop[1] = NULL;
11473 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
11474 		{
11475 		  pmop[1] = TREE_OPERAND (arg0, 1);
11476 		  which = 1;
11477 		}
11478 
11479 	      if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11480 		  || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11481 		      & cst1) != cst1)
11482 		which = -1;
11483 
11484 	      for (; which >= 0; which--)
11485 		switch (TREE_CODE (pmop[which]))
11486 		  {
11487 		  case BIT_AND_EXPR:
11488 		  case BIT_IOR_EXPR:
11489 		  case BIT_XOR_EXPR:
11490 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11491 			!= INTEGER_CST)
11492 		      break;
11493 		    /* tree_low_cst not used, because we don't care about
11494 		       the upper bits.  */
11495 		    cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11496 		    cst0 &= cst1;
11497 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11498 		      {
11499 			if (cst0 != cst1)
11500 			  break;
11501 		      }
11502 		    else if (cst0 != 0)
11503 		      break;
11504 		    /* If C or D is of the form (A & N) where
11505 		       (N & M) == M, or of the form (A | N) or
11506 		       (A ^ N) where (N & M) == 0, replace it with A.  */
11507 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
11508 		    break;
11509 		  case INTEGER_CST:
11510 		    /* If C or D is a N where (N & M) == 0, it can be
11511 		       omitted (assumed 0).  */
11512 		    if ((TREE_CODE (arg0) == PLUS_EXPR
11513 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11514 			&& (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11515 		      pmop[which] = NULL;
11516 		    break;
11517 		  default:
11518 		    break;
11519 		  }
11520 
11521 	      /* Only build anything new if we optimized one or both arguments
11522 		 above.  */
11523 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
11524 		  || (TREE_CODE (arg0) != NEGATE_EXPR
11525 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
11526 		{
11527 		  tree utype = TREE_TYPE (arg0);
11528 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11529 		    {
11530 		      /* Perform the operations in a type that has defined
11531 			 overflow behavior.  */
11532 		      utype = unsigned_type_for (TREE_TYPE (arg0));
11533 		      if (pmop[0] != NULL)
11534 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11535 		      if (pmop[1] != NULL)
11536 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11537 		    }
11538 
11539 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
11540 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11541 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
11542 		    {
11543 		      if (pmop[0] != NULL && pmop[1] != NULL)
11544 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11545 					       pmop[0], pmop[1]);
11546 		      else if (pmop[0] != NULL)
11547 			tem = pmop[0];
11548 		      else if (pmop[1] != NULL)
11549 			tem = pmop[1];
11550 		      else
11551 			return build_int_cst (type, 0);
11552 		    }
11553 		  else if (pmop[0] == NULL)
11554 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11555 		  else
11556 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11557 					   pmop[0], pmop[1]);
11558 		  /* TEM is now the new binary +, - or unary - replacement.  */
11559 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11560 					 fold_convert_loc (loc, utype, arg1));
11561 		  return fold_convert_loc (loc, type, tem);
11562 		}
11563 	    }
11564 	}
11565 
11566       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11567       if (t1 != NULL_TREE)
11568 	return t1;
11569       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11570       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11571 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11572 	{
11573 	  unsigned int prec
11574 	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11575 
11576 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11577 	      && (~TREE_INT_CST_LOW (arg1)
11578 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11579 	    return
11580 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11581 	}
11582 
11583       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11584 
11585 	 This results in more efficient code for machines without a NOR
11586 	 instruction.  Combine will canonicalize to the first form
11587 	 which will allow use of NOR instructions provided by the
11588 	 backend if they exist.  */
11589       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11590 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11591 	{
11592 	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11593 			      build2 (BIT_IOR_EXPR, type,
11594 				      fold_convert_loc (loc, type,
11595 							TREE_OPERAND (arg0, 0)),
11596 				      fold_convert_loc (loc, type,
11597 							TREE_OPERAND (arg1, 0))));
11598 	}
11599 
11600       /* If arg0 is derived from the address of an object or function, we may
11601 	 be able to fold this expression using the object or function's
11602 	 alignment.  */
11603       if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11604 	{
11605 	  unsigned HOST_WIDE_INT modulus, residue;
11606 	  unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11607 
11608 	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
11609 						     integer_onep (arg1));
11610 
11611 	  /* This works because modulus is a power of 2.  If this weren't the
11612 	     case, we'd have to replace it by its greatest power-of-2
11613 	     divisor: modulus & -modulus.  */
11614 	  if (low < modulus)
11615 	    return build_int_cst (type, residue & low);
11616 	}
11617 
11618       /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11619 	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11620 	 if the new mask might be further optimized.  */
11621       if ((TREE_CODE (arg0) == LSHIFT_EXPR
11622 	   || TREE_CODE (arg0) == RSHIFT_EXPR)
11623 	  && host_integerp (TREE_OPERAND (arg0, 1), 1)
11624 	  && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11625 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11626 	     < TYPE_PRECISION (TREE_TYPE (arg0))
11627 	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11628 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11629 	{
11630 	  unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11631 	  unsigned HOST_WIDE_INT mask
11632 	    = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11633 	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
11634 	  tree shift_type = TREE_TYPE (arg0);
11635 
11636 	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
11637 	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11638 	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
11639 		   && TYPE_PRECISION (TREE_TYPE (arg0))
11640 		      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11641 	    {
11642 	      unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11643 	      tree arg00 = TREE_OPERAND (arg0, 0);
11644 	      /* See if more bits can be proven as zero because of
11645 		 zero extension.  */
11646 	      if (TREE_CODE (arg00) == NOP_EXPR
11647 		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11648 		{
11649 		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11650 		  if (TYPE_PRECISION (inner_type)
11651 		      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11652 		      && TYPE_PRECISION (inner_type) < prec)
11653 		    {
11654 		      prec = TYPE_PRECISION (inner_type);
11655 		      /* See if we can shorten the right shift.  */
11656 		      if (shiftc < prec)
11657 			shift_type = inner_type;
11658 		    }
11659 		}
11660 	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
11661 	      zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11662 	      zerobits <<= prec - shiftc;
11663 	      /* For arithmetic shift if sign bit could be set, zerobits
11664 		 can contain actually sign bits, so no transformation is
11665 		 possible, unless MASK masks them all away.  In that
11666 		 case the shift needs to be converted into logical shift.  */
11667 	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11668 		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11669 		{
11670 		  if ((mask & zerobits) == 0)
11671 		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11672 		  else
11673 		    zerobits = 0;
11674 		}
11675 	    }
11676 
11677 	  /* ((X << 16) & 0xff00) is (X, 0).  */
11678 	  if ((mask & zerobits) == mask)
11679 	    return omit_one_operand_loc (loc, type,
11680 				     build_int_cst (type, 0), arg0);
11681 
11682 	  newmask = mask | zerobits;
11683 	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
11684 	    {
11685 	      unsigned int prec;
11686 
11687 	      /* Only do the transformation if NEWMASK is some integer
11688 		 mode's mask.  */
11689 	      for (prec = BITS_PER_UNIT;
11690 		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11691 		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11692 		  break;
11693 	      if (prec < HOST_BITS_PER_WIDE_INT
11694 		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11695 		{
11696 		  tree newmaskt;
11697 
11698 		  if (shift_type != TREE_TYPE (arg0))
11699 		    {
11700 		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11701 					 fold_convert_loc (loc, shift_type,
11702 							   TREE_OPERAND (arg0, 0)),
11703 					 TREE_OPERAND (arg0, 1));
11704 		      tem = fold_convert_loc (loc, type, tem);
11705 		    }
11706 		  else
11707 		    tem = op0;
11708 		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11709 		  if (!tree_int_cst_equal (newmaskt, arg1))
11710 		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11711 		}
11712 	    }
11713 	}
11714 
11715       goto associate;
11716 
11717     case RDIV_EXPR:
11718       /* Don't touch a floating-point divide by zero unless the mode
11719 	 of the constant can represent infinity.  */
11720       if (TREE_CODE (arg1) == REAL_CST
11721 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11722 	  && real_zerop (arg1))
11723 	return NULL_TREE;
11724 
11725       /* Optimize A / A to 1.0 if we don't care about
11726 	 NaNs or Infinities.  Skip the transformation
11727 	 for non-real operands.  */
11728       if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11729 	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11730 	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11731 	  && operand_equal_p (arg0, arg1, 0))
11732 	{
11733 	  tree r = build_real (TREE_TYPE (arg0), dconst1);
11734 
11735 	  return omit_two_operands_loc (loc, type, r, arg0, arg1);
11736 	}
11737 
11738       /* The complex version of the above A / A optimization.  */
11739       if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11740 	  && operand_equal_p (arg0, arg1, 0))
11741 	{
11742 	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11743 	  if (! HONOR_NANS (TYPE_MODE (elem_type))
11744 	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11745 	    {
11746 	      tree r = build_real (elem_type, dconst1);
11747 	      /* omit_two_operands will call fold_convert for us.  */
11748 	      return omit_two_operands_loc (loc, type, r, arg0, arg1);
11749 	    }
11750 	}
11751 
11752       /* (-A) / (-B) -> A / B  */
11753       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11754 	return fold_build2_loc (loc, RDIV_EXPR, type,
11755 			    TREE_OPERAND (arg0, 0),
11756 			    negate_expr (arg1));
11757       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11758 	return fold_build2_loc (loc, RDIV_EXPR, type,
11759 			    negate_expr (arg0),
11760 			    TREE_OPERAND (arg1, 0));
11761 
11762       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11763       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11764 	  && real_onep (arg1))
11765 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11766 
11767       /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
11768       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11769 	  && real_minus_onep (arg1))
11770 	return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11771 						  negate_expr (arg0)));
11772 
11773       /* If ARG1 is a constant, we can convert this to a multiply by the
11774 	 reciprocal.  This does not have the same rounding properties,
11775 	 so only do this if -freciprocal-math.  We can actually
11776 	 always safely do it if ARG1 is a power of two, but it's hard to
11777 	 tell if it is or not in a portable manner.  */
11778       if (TREE_CODE (arg1) == REAL_CST)
11779 	{
11780 	  if (flag_reciprocal_math
11781 	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
11782 					  arg1)))
11783 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11784 	  /* Find the reciprocal if optimizing and the result is exact.  */
11785 	  if (optimize)
11786 	    {
11787 	      REAL_VALUE_TYPE r;
11788 	      r = TREE_REAL_CST (arg1);
11789 	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11790 		{
11791 		  tem = build_real (type, r);
11792 		  return fold_build2_loc (loc, MULT_EXPR, type,
11793 				      fold_convert_loc (loc, type, arg0), tem);
11794 		}
11795 	    }
11796 	}
11797       /* Convert A/B/C to A/(B*C).  */
11798       if (flag_reciprocal_math
11799 	  && TREE_CODE (arg0) == RDIV_EXPR)
11800 	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11801 			    fold_build2_loc (loc, MULT_EXPR, type,
11802 					 TREE_OPERAND (arg0, 1), arg1));
11803 
11804       /* Convert A/(B/C) to (A/B)*C.  */
11805       if (flag_reciprocal_math
11806 	  && TREE_CODE (arg1) == RDIV_EXPR)
11807 	return fold_build2_loc (loc, MULT_EXPR, type,
11808 			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11809 					 TREE_OPERAND (arg1, 0)),
11810 			    TREE_OPERAND (arg1, 1));
11811 
11812       /* Convert C1/(X*C2) into (C1/C2)/X.  */
11813       if (flag_reciprocal_math
11814 	  && TREE_CODE (arg1) == MULT_EXPR
11815 	  && TREE_CODE (arg0) == REAL_CST
11816 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11817 	{
11818 	  tree tem = const_binop (RDIV_EXPR, arg0,
11819 				  TREE_OPERAND (arg1, 1));
11820 	  if (tem)
11821 	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11822 				TREE_OPERAND (arg1, 0));
11823 	}
11824 
11825       if (flag_unsafe_math_optimizations)
11826 	{
11827 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11828 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11829 
11830 	  /* Optimize sin(x)/cos(x) as tan(x).  */
11831 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11832 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11833 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11834 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11835 				  CALL_EXPR_ARG (arg1, 0), 0))
11836 	    {
11837 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11838 
11839 	      if (tanfn != NULL_TREE)
11840 		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11841 	    }
11842 
11843 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11844 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11845 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11846 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11847 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11848 				  CALL_EXPR_ARG (arg1, 0), 0))
11849 	    {
11850 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11851 
11852 	      if (tanfn != NULL_TREE)
11853 		{
11854 		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11855 					      CALL_EXPR_ARG (arg0, 0));
11856 		  return fold_build2_loc (loc, RDIV_EXPR, type,
11857 				      build_real (type, dconst1), tmp);
11858 		}
11859 	    }
11860 
11861  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11862 	     NaNs or Infinities.  */
11863  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11864  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11865  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11866 	    {
11867 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11868 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11869 
11870 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11871 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11872 		  && operand_equal_p (arg00, arg01, 0))
11873 		{
11874 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11875 
11876 		  if (cosfn != NULL_TREE)
11877 		    return build_call_expr_loc (loc, cosfn, 1, arg00);
11878 		}
11879 	    }
11880 
11881  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11882 	     NaNs or Infinities.  */
11883  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11884  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11885  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11886 	    {
11887 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11888 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11889 
11890 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11891 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11892 		  && operand_equal_p (arg00, arg01, 0))
11893 		{
11894 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11895 
11896 		  if (cosfn != NULL_TREE)
11897 		    {
11898 		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11899 		      return fold_build2_loc (loc, RDIV_EXPR, type,
11900 					  build_real (type, dconst1),
11901 					  tmp);
11902 		    }
11903 		}
11904 	    }
11905 
11906 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
11907 	  if (fcode0 == BUILT_IN_POW
11908 	      || fcode0 == BUILT_IN_POWF
11909 	      || fcode0 == BUILT_IN_POWL)
11910 	    {
11911 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11912 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
11913 	      if (TREE_CODE (arg01) == REAL_CST
11914 		  && !TREE_OVERFLOW (arg01)
11915 		  && operand_equal_p (arg1, arg00, 0))
11916 		{
11917 		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11918 		  REAL_VALUE_TYPE c;
11919 		  tree arg;
11920 
11921 		  c = TREE_REAL_CST (arg01);
11922 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11923 		  arg = build_real (type, c);
11924 		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11925 		}
11926 	    }
11927 
11928 	  /* Optimize a/root(b/c) into a*root(c/b).  */
11929 	  if (BUILTIN_ROOT_P (fcode1))
11930 	    {
11931 	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
11932 
11933 	      if (TREE_CODE (rootarg) == RDIV_EXPR)
11934 		{
11935 		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11936 		  tree b = TREE_OPERAND (rootarg, 0);
11937 		  tree c = TREE_OPERAND (rootarg, 1);
11938 
11939 		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11940 
11941 		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11942 		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11943 		}
11944 	    }
11945 
11946 	  /* Optimize x/expN(y) into x*expN(-y).  */
11947 	  if (BUILTIN_EXPONENT_P (fcode1))
11948 	    {
11949 	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11950 	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11951 	      arg1 = build_call_expr_loc (loc,
11952 				      expfn, 1,
11953 				      fold_convert_loc (loc, type, arg));
11954 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11955 	    }
11956 
11957 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
11958 	  if (fcode1 == BUILT_IN_POW
11959 	      || fcode1 == BUILT_IN_POWF
11960 	      || fcode1 == BUILT_IN_POWL)
11961 	    {
11962 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11963 	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
11964 	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
11965 	      tree neg11 = fold_convert_loc (loc, type,
11966 					     negate_expr (arg11));
11967 	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11968 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11969 	    }
11970 	}
11971       return NULL_TREE;
11972 
11973     case TRUNC_DIV_EXPR:
11974       /* Optimize (X & (-A)) / A where A is a power of 2,
11975 	 to X >> log2(A) */
11976       if (TREE_CODE (arg0) == BIT_AND_EXPR
11977 	  && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11978 	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11979 	{
11980 	  tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11981 				      arg1, TREE_OPERAND (arg0, 1));
11982 	  if (sum && integer_zerop (sum)) {
11983 	    unsigned long pow2;
11984 
11985 	    if (TREE_INT_CST_LOW (arg1))
11986 	      pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11987 	    else
11988 	      pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11989 		      + HOST_BITS_PER_WIDE_INT;
11990 
11991 	    return fold_build2_loc (loc, RSHIFT_EXPR, type,
11992 			  TREE_OPERAND (arg0, 0),
11993 			  build_int_cst (integer_type_node, pow2));
11994 	  }
11995 	}
11996 
11997       /* Fall thru */
11998 
11999     case FLOOR_DIV_EXPR:
12000       /* Simplify A / (B << N) where A and B are positive and B is
12001 	 a power of 2, to A >> (N + log2(B)).  */
12002       strict_overflow_p = false;
12003       if (TREE_CODE (arg1) == LSHIFT_EXPR
12004 	  && (TYPE_UNSIGNED (type)
12005 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12006 	{
12007 	  tree sval = TREE_OPERAND (arg1, 0);
12008 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12009 	    {
12010 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
12011 	      unsigned long pow2;
12012 
12013 	      if (TREE_INT_CST_LOW (sval))
12014 		pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12015 	      else
12016 		pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12017 		       + HOST_BITS_PER_WIDE_INT;
12018 
12019 	      if (strict_overflow_p)
12020 		fold_overflow_warning (("assuming signed overflow does not "
12021 					"occur when simplifying A / (B << N)"),
12022 				       WARN_STRICT_OVERFLOW_MISC);
12023 
12024 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12025 					sh_cnt,
12026 					build_int_cst (TREE_TYPE (sh_cnt),
12027 						       pow2));
12028 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
12029 				  fold_convert_loc (loc, type, arg0), sh_cnt);
12030 	    }
12031 	}
12032 
12033       /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12034 	 TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12035       if (INTEGRAL_TYPE_P (type)
12036 	  && TYPE_UNSIGNED (type)
12037 	  && code == FLOOR_DIV_EXPR)
12038 	return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12039 
12040       /* Fall thru */
12041 
12042     case ROUND_DIV_EXPR:
12043     case CEIL_DIV_EXPR:
12044     case EXACT_DIV_EXPR:
12045       if (integer_onep (arg1))
12046 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12047       if (integer_zerop (arg1))
12048 	return NULL_TREE;
12049       /* X / -1 is -X.  */
12050       if (!TYPE_UNSIGNED (type)
12051 	  && TREE_CODE (arg1) == INTEGER_CST
12052 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12053 	  && TREE_INT_CST_HIGH (arg1) == -1)
12054 	return fold_convert_loc (loc, type, negate_expr (arg0));
12055 
12056       /* Convert -A / -B to A / B when the type is signed and overflow is
12057 	 undefined.  */
12058       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12059 	  && TREE_CODE (arg0) == NEGATE_EXPR
12060 	  && negate_expr_p (arg1))
12061 	{
12062 	  if (INTEGRAL_TYPE_P (type))
12063 	    fold_overflow_warning (("assuming signed overflow does not occur "
12064 				    "when distributing negation across "
12065 				    "division"),
12066 				   WARN_STRICT_OVERFLOW_MISC);
12067 	  return fold_build2_loc (loc, code, type,
12068 			      fold_convert_loc (loc, type,
12069 						TREE_OPERAND (arg0, 0)),
12070 			      fold_convert_loc (loc, type,
12071 						negate_expr (arg1)));
12072 	}
12073       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12074 	  && TREE_CODE (arg1) == NEGATE_EXPR
12075 	  && negate_expr_p (arg0))
12076 	{
12077 	  if (INTEGRAL_TYPE_P (type))
12078 	    fold_overflow_warning (("assuming signed overflow does not occur "
12079 				    "when distributing negation across "
12080 				    "division"),
12081 				   WARN_STRICT_OVERFLOW_MISC);
12082 	  return fold_build2_loc (loc, code, type,
12083 			      fold_convert_loc (loc, type,
12084 						negate_expr (arg0)),
12085 			      fold_convert_loc (loc, type,
12086 						TREE_OPERAND (arg1, 0)));
12087 	}
12088 
12089       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12090 	 operation, EXACT_DIV_EXPR.
12091 
12092 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12093 	 At one time others generated faster code, it's not clear if they do
12094 	 after the last round to changes to the DIV code in expmed.c.  */
12095       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12096 	  && multiple_of_p (type, arg0, arg1))
12097 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12098 
12099       strict_overflow_p = false;
12100       if (TREE_CODE (arg1) == INTEGER_CST
12101 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12102 					 &strict_overflow_p)))
12103 	{
12104 	  if (strict_overflow_p)
12105 	    fold_overflow_warning (("assuming signed overflow does not occur "
12106 				    "when simplifying division"),
12107 				   WARN_STRICT_OVERFLOW_MISC);
12108 	  return fold_convert_loc (loc, type, tem);
12109 	}
12110 
12111       return NULL_TREE;
12112 
12113     case CEIL_MOD_EXPR:
12114     case FLOOR_MOD_EXPR:
12115     case ROUND_MOD_EXPR:
12116     case TRUNC_MOD_EXPR:
12117       /* X % 1 is always zero, but be sure to preserve any side
12118 	 effects in X.  */
12119       if (integer_onep (arg1))
12120 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12121 
12122       /* X % 0, return X % 0 unchanged so that we can get the
12123 	 proper warnings and errors.  */
12124       if (integer_zerop (arg1))
12125 	return NULL_TREE;
12126 
12127       /* 0 % X is always zero, but be sure to preserve any side
12128 	 effects in X.  Place this after checking for X == 0.  */
12129       if (integer_zerop (arg0))
12130 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12131 
12132       /* X % -1 is zero.  */
12133       if (!TYPE_UNSIGNED (type)
12134 	  && TREE_CODE (arg1) == INTEGER_CST
12135 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12136 	  && TREE_INT_CST_HIGH (arg1) == -1)
12137 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12138 
12139       /* X % -C is the same as X % C.  */
12140       if (code == TRUNC_MOD_EXPR
12141 	  && !TYPE_UNSIGNED (type)
12142 	  && TREE_CODE (arg1) == INTEGER_CST
12143 	  && !TREE_OVERFLOW (arg1)
12144 	  && TREE_INT_CST_HIGH (arg1) < 0
12145 	  && !TYPE_OVERFLOW_TRAPS (type)
12146 	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12147 	  && !sign_bit_p (arg1, arg1))
12148 	return fold_build2_loc (loc, code, type,
12149 			    fold_convert_loc (loc, type, arg0),
12150 			    fold_convert_loc (loc, type,
12151 					      negate_expr (arg1)));
12152 
12153       /* X % -Y is the same as X % Y.  */
12154       if (code == TRUNC_MOD_EXPR
12155 	  && !TYPE_UNSIGNED (type)
12156 	  && TREE_CODE (arg1) == NEGATE_EXPR
12157 	  && !TYPE_OVERFLOW_TRAPS (type))
12158 	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12159 			    fold_convert_loc (loc, type,
12160 					      TREE_OPERAND (arg1, 0)));
12161 
12162       strict_overflow_p = false;
12163       if (TREE_CODE (arg1) == INTEGER_CST
12164 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12165 					 &strict_overflow_p)))
12166 	{
12167 	  if (strict_overflow_p)
12168 	    fold_overflow_warning (("assuming signed overflow does not occur "
12169 				    "when simplifying modulus"),
12170 				   WARN_STRICT_OVERFLOW_MISC);
12171 	  return fold_convert_loc (loc, type, tem);
12172 	}
12173 
12174       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12175          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12176       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12177 	  && (TYPE_UNSIGNED (type)
12178 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12179 	{
12180 	  tree c = arg1;
12181 	  /* Also optimize A % (C << N)  where C is a power of 2,
12182 	     to A & ((C << N) - 1).  */
12183 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12184 	    c = TREE_OPERAND (arg1, 0);
12185 
12186 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12187 	    {
12188 	      tree mask
12189 		= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12190 				   build_int_cst (TREE_TYPE (arg1), 1));
12191 	      if (strict_overflow_p)
12192 		fold_overflow_warning (("assuming signed overflow does not "
12193 					"occur when simplifying "
12194 					"X % (power of two)"),
12195 				       WARN_STRICT_OVERFLOW_MISC);
12196 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12197 				      fold_convert_loc (loc, type, arg0),
12198 				      fold_convert_loc (loc, type, mask));
12199 	    }
12200 	}
12201 
12202       return NULL_TREE;
12203 
12204     case LROTATE_EXPR:
12205     case RROTATE_EXPR:
12206       if (integer_all_onesp (arg0))
12207 	return omit_one_operand_loc (loc, type, arg0, arg1);
12208       goto shift;
12209 
12210     case RSHIFT_EXPR:
12211       /* Optimize -1 >> x for arithmetic right shifts.  */
12212       if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12213 	  && tree_expr_nonnegative_p (arg1))
12214 	return omit_one_operand_loc (loc, type, arg0, arg1);
12215       /* ... fall through ...  */
12216 
12217     case LSHIFT_EXPR:
12218     shift:
12219       if (integer_zerop (arg1))
12220 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12221       if (integer_zerop (arg0))
12222 	return omit_one_operand_loc (loc, type, arg0, arg1);
12223 
12224       /* Since negative shift count is not well-defined,
12225 	 don't try to compute it in the compiler.  */
12226       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12227 	return NULL_TREE;
12228 
12229       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12230       if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12231 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12232 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12233 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12234 	{
12235 	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12236 			       + TREE_INT_CST_LOW (arg1));
12237 
12238 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12239 	     being well defined.  */
12240 	  if (low >= TYPE_PRECISION (type))
12241 	    {
12242 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12243 	        low = low % TYPE_PRECISION (type);
12244 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12245 		return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12246 					 TREE_OPERAND (arg0, 0));
12247 	      else
12248 		low = TYPE_PRECISION (type) - 1;
12249 	    }
12250 
12251 	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12252 			      build_int_cst (type, low));
12253 	}
12254 
12255       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12256          into x & ((unsigned)-1 >> c) for unsigned types.  */
12257       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12258            || (TYPE_UNSIGNED (type)
12259 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12260 	  && host_integerp (arg1, false)
12261 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12262 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12263 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12264 	{
12265 	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12266 	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12267 	  tree lshift;
12268 	  tree arg00;
12269 
12270 	  if (low0 == low1)
12271 	    {
12272 	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12273 
12274 	      lshift = build_int_cst (type, -1);
12275 	      lshift = int_const_binop (code, lshift, arg1);
12276 
12277 	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12278 	    }
12279 	}
12280 
12281       /* Rewrite an LROTATE_EXPR by a constant into an
12282 	 RROTATE_EXPR by a new constant.  */
12283       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12284 	{
12285 	  tree tem = build_int_cst (TREE_TYPE (arg1),
12286 				    TYPE_PRECISION (type));
12287 	  tem = const_binop (MINUS_EXPR, tem, arg1);
12288 	  return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12289 	}
12290 
12291       /* If we have a rotate of a bit operation with the rotate count and
12292 	 the second operand of the bit operation both constant,
12293 	 permute the two operations.  */
12294       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12295 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12296 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12297 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12298 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12299 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
12300 			    fold_build2_loc (loc, code, type,
12301 					 TREE_OPERAND (arg0, 0), arg1),
12302 			    fold_build2_loc (loc, code, type,
12303 					 TREE_OPERAND (arg0, 1), arg1));
12304 
12305       /* Two consecutive rotates adding up to the precision of the
12306 	 type can be ignored.  */
12307       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12308 	  && TREE_CODE (arg0) == RROTATE_EXPR
12309 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12310 	  && TREE_INT_CST_HIGH (arg1) == 0
12311 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12312 	  && ((TREE_INT_CST_LOW (arg1)
12313 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12314 	      == (unsigned int) TYPE_PRECISION (type)))
12315 	return TREE_OPERAND (arg0, 0);
12316 
12317       /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12318 	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12319 	 if the latter can be further optimized.  */
12320       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12321 	  && TREE_CODE (arg0) == BIT_AND_EXPR
12322 	  && TREE_CODE (arg1) == INTEGER_CST
12323 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12324 	{
12325 	  tree mask = fold_build2_loc (loc, code, type,
12326 				   fold_convert_loc (loc, type,
12327 						     TREE_OPERAND (arg0, 1)),
12328 				   arg1);
12329 	  tree shift = fold_build2_loc (loc, code, type,
12330 				    fold_convert_loc (loc, type,
12331 						      TREE_OPERAND (arg0, 0)),
12332 				    arg1);
12333 	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12334 	  if (tem)
12335 	    return tem;
12336 	}
12337 
12338       return NULL_TREE;
12339 
12340     case MIN_EXPR:
12341       if (operand_equal_p (arg0, arg1, 0))
12342 	return omit_one_operand_loc (loc, type, arg0, arg1);
12343       if (INTEGRAL_TYPE_P (type)
12344 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12345 	return omit_one_operand_loc (loc, type, arg1, arg0);
12346       tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12347       if (tem)
12348 	return tem;
12349       goto associate;
12350 
12351     case MAX_EXPR:
12352       if (operand_equal_p (arg0, arg1, 0))
12353 	return omit_one_operand_loc (loc, type, arg0, arg1);
12354       if (INTEGRAL_TYPE_P (type)
12355 	  && TYPE_MAX_VALUE (type)
12356 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12357 	return omit_one_operand_loc (loc, type, arg1, arg0);
12358       tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12359       if (tem)
12360 	return tem;
12361       goto associate;
12362 
12363     case TRUTH_ANDIF_EXPR:
12364       /* Note that the operands of this must be ints
12365 	 and their values must be 0 or 1.
12366 	 ("true" is a fixed value perhaps depending on the language.)  */
12367       /* If first arg is constant zero, return it.  */
12368       if (integer_zerop (arg0))
12369 	return fold_convert_loc (loc, type, arg0);
12370     case TRUTH_AND_EXPR:
12371       /* If either arg is constant true, drop it.  */
12372       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12373 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12374       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12375 	  /* Preserve sequence points.  */
12376 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12377 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12378       /* If second arg is constant zero, result is zero, but first arg
12379 	 must be evaluated.  */
12380       if (integer_zerop (arg1))
12381 	return omit_one_operand_loc (loc, type, arg1, arg0);
12382       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12383 	 case will be handled here.  */
12384       if (integer_zerop (arg0))
12385 	return omit_one_operand_loc (loc, type, arg0, arg1);
12386 
12387       /* !X && X is always false.  */
12388       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12389 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12390 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12391       /* X && !X is always false.  */
12392       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12393 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12394 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12395 
12396       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12397 	 means A >= Y && A != MAX, but in this case we know that
12398 	 A < X <= MAX.  */
12399 
12400       if (!TREE_SIDE_EFFECTS (arg0)
12401 	  && !TREE_SIDE_EFFECTS (arg1))
12402 	{
12403 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12404 	  if (tem && !operand_equal_p (tem, arg0, 0))
12405 	    return fold_build2_loc (loc, code, type, tem, arg1);
12406 
12407 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12408 	  if (tem && !operand_equal_p (tem, arg1, 0))
12409 	    return fold_build2_loc (loc, code, type, arg0, tem);
12410 	}
12411 
12412       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12413           != NULL_TREE)
12414         return tem;
12415 
12416       return NULL_TREE;
12417 
12418     case TRUTH_ORIF_EXPR:
12419       /* Note that the operands of this must be ints
12420 	 and their values must be 0 or true.
12421 	 ("true" is a fixed value perhaps depending on the language.)  */
12422       /* If first arg is constant true, return it.  */
12423       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12424 	return fold_convert_loc (loc, type, arg0);
12425     case TRUTH_OR_EXPR:
12426       /* If either arg is constant zero, drop it.  */
12427       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12428 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12429       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12430 	  /* Preserve sequence points.  */
12431 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12432 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12433       /* If second arg is constant true, result is true, but we must
12434 	 evaluate first arg.  */
12435       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12436 	return omit_one_operand_loc (loc, type, arg1, arg0);
12437       /* Likewise for first arg, but note this only occurs here for
12438 	 TRUTH_OR_EXPR.  */
12439       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12440 	return omit_one_operand_loc (loc, type, arg0, arg1);
12441 
12442       /* !X || X is always true.  */
12443       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12444 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12445 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12446       /* X || !X is always true.  */
12447       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12448 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12449 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12450 
12451       /* (X && !Y) || (!X && Y) is X ^ Y */
12452       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12453 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12454         {
12455 	  tree a0, a1, l0, l1, n0, n1;
12456 
12457 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12458 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12459 
12460 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12461 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12462 
12463 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12464 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12465 
12466 	  if ((operand_equal_p (n0, a0, 0)
12467 	       && operand_equal_p (n1, a1, 0))
12468 	      || (operand_equal_p (n0, a1, 0)
12469 		  && operand_equal_p (n1, a0, 0)))
12470 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12471 	}
12472 
12473       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12474           != NULL_TREE)
12475         return tem;
12476 
12477       return NULL_TREE;
12478 
12479     case TRUTH_XOR_EXPR:
12480       /* If the second arg is constant zero, drop it.  */
12481       if (integer_zerop (arg1))
12482 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483       /* If the second arg is constant true, this is a logical inversion.  */
12484       if (integer_onep (arg1))
12485 	{
12486 	  /* Only call invert_truthvalue if operand is a truth value.  */
12487 	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12488 	    tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12489 	  else
12490 	    tem = invert_truthvalue_loc (loc, arg0);
12491 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12492 	}
12493       /* Identical arguments cancel to zero.  */
12494       if (operand_equal_p (arg0, arg1, 0))
12495 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12496 
12497       /* !X ^ X is always true.  */
12498       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12499 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12500 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12501 
12502       /* X ^ !X is always true.  */
12503       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12504 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12505 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12506 
12507       return NULL_TREE;
12508 
12509     case EQ_EXPR:
12510     case NE_EXPR:
12511       STRIP_NOPS (arg0);
12512       STRIP_NOPS (arg1);
12513 
12514       tem = fold_comparison (loc, code, type, op0, op1);
12515       if (tem != NULL_TREE)
12516 	return tem;
12517 
12518       /* bool_var != 0 becomes bool_var. */
12519       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12520           && code == NE_EXPR)
12521         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12522 
12523       /* bool_var == 1 becomes bool_var. */
12524       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12525           && code == EQ_EXPR)
12526         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12527 
12528       /* bool_var != 1 becomes !bool_var. */
12529       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12530           && code == NE_EXPR)
12531         return fold_convert_loc (loc, type,
12532 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12533 						  TREE_TYPE (arg0), arg0));
12534 
12535       /* bool_var == 0 becomes !bool_var. */
12536       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12537           && code == EQ_EXPR)
12538         return fold_convert_loc (loc, type,
12539 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12540 						  TREE_TYPE (arg0), arg0));
12541 
12542       /* !exp != 0 becomes !exp */
12543       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12544 	  && code == NE_EXPR)
12545         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12546 
12547       /* If this is an equality comparison of the address of two non-weak,
12548 	 unaliased symbols neither of which are extern (since we do not
12549 	 have access to attributes for externs), then we know the result.  */
12550       if (TREE_CODE (arg0) == ADDR_EXPR
12551 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12552 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12553 	  && ! lookup_attribute ("alias",
12554 				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12555 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12556 	  && TREE_CODE (arg1) == ADDR_EXPR
12557 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12558 	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12559 	  && ! lookup_attribute ("alias",
12560 				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12561 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12562 	{
12563 	  /* We know that we're looking at the address of two
12564 	     non-weak, unaliased, static _DECL nodes.
12565 
12566 	     It is both wasteful and incorrect to call operand_equal_p
12567 	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
12568 	     all we need to do is test pointer equality for the arguments
12569 	     to the two ADDR_EXPR nodes.  It is incorrect to use
12570 	     operand_equal_p as that function is NOT equivalent to a
12571 	     C equality test.  It can in fact return false for two
12572 	     objects which would test as equal using the C equality
12573 	     operator.  */
12574 	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12575 	  return constant_boolean_node (equal
12576 				        ? code == EQ_EXPR : code != EQ_EXPR,
12577 				        type);
12578 	}
12579 
12580       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12581 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
12582 	 a revised constant as long as no overflow occurs.  */
12583       if (TREE_CODE (arg1) == INTEGER_CST
12584 	  && (TREE_CODE (arg0) == PLUS_EXPR
12585 	      || TREE_CODE (arg0) == MINUS_EXPR)
12586 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12587 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12588 				      ? MINUS_EXPR : PLUS_EXPR,
12589 				      fold_convert_loc (loc, TREE_TYPE (arg0),
12590 							arg1),
12591 				      TREE_OPERAND (arg0, 1)))
12592 	  && !TREE_OVERFLOW (tem))
12593 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12594 
12595       /* Similarly for a NEGATE_EXPR.  */
12596       if (TREE_CODE (arg0) == NEGATE_EXPR
12597 	  && TREE_CODE (arg1) == INTEGER_CST
12598 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12599 							arg1)))
12600 	  && TREE_CODE (tem) == INTEGER_CST
12601 	  && !TREE_OVERFLOW (tem))
12602 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12603 
12604       /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12605       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12606 	  && TREE_CODE (arg1) == INTEGER_CST
12607 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12608 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12609 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12610 					 fold_convert_loc (loc,
12611 							   TREE_TYPE (arg0),
12612 							   arg1),
12613 					 TREE_OPERAND (arg0, 1)));
12614 
12615       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12616       if ((TREE_CODE (arg0) == PLUS_EXPR
12617 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12618 	   || TREE_CODE (arg0) == MINUS_EXPR)
12619 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12620 									0)),
12621 			      arg1, 0)
12622 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12623 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
12624 	{
12625 	  tree val = TREE_OPERAND (arg0, 1);
12626 	  return omit_two_operands_loc (loc, type,
12627 				    fold_build2_loc (loc, code, type,
12628 						 val,
12629 						 build_int_cst (TREE_TYPE (val),
12630 								0)),
12631 				    TREE_OPERAND (arg0, 0), arg1);
12632 	}
12633 
12634       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12635       if (TREE_CODE (arg0) == MINUS_EXPR
12636 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12637 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12638 									1)),
12639 			      arg1, 0)
12640 	  && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12641 	{
12642 	  return omit_two_operands_loc (loc, type,
12643 				    code == NE_EXPR
12644 				    ? boolean_true_node : boolean_false_node,
12645 				    TREE_OPERAND (arg0, 1), arg1);
12646 	}
12647 
12648       /* If we have X - Y == 0, we can convert that to X == Y and similarly
12649 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
12650       if (TREE_CODE (arg0) == MINUS_EXPR
12651 	  && integer_zerop (arg1))
12652 	return fold_build2_loc (loc, code, type,
12653 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12654 
12655       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12656       if (TREE_CODE (arg0) == ABS_EXPR
12657 	  && (integer_zerop (arg1) || real_zerop (arg1)))
12658 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12659 
12660       /* If this is an EQ or NE comparison with zero and ARG0 is
12661 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12662 	 two operations, but the latter can be done in one less insn
12663 	 on machines that have only two-operand insns or on which a
12664 	 constant cannot be the first operand.  */
12665       if (TREE_CODE (arg0) == BIT_AND_EXPR
12666 	  && integer_zerop (arg1))
12667 	{
12668 	  tree arg00 = TREE_OPERAND (arg0, 0);
12669 	  tree arg01 = TREE_OPERAND (arg0, 1);
12670 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12671 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12672 	    {
12673 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12674 				      arg01, TREE_OPERAND (arg00, 1));
12675 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12676 				 build_int_cst (TREE_TYPE (arg0), 1));
12677 	      return fold_build2_loc (loc, code, type,
12678 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12679 				  arg1);
12680 	    }
12681 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12682 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12683 	    {
12684 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12685 				      arg00, TREE_OPERAND (arg01, 1));
12686 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12687 				 build_int_cst (TREE_TYPE (arg0), 1));
12688 	      return fold_build2_loc (loc, code, type,
12689 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12690 				  arg1);
12691 	    }
12692 	}
12693 
12694       /* If this is an NE or EQ comparison of zero against the result of a
12695 	 signed MOD operation whose second operand is a power of 2, make
12696 	 the MOD operation unsigned since it is simpler and equivalent.  */
12697       if (integer_zerop (arg1)
12698 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12699 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12700 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
12701 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12702 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12703 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12704 	{
12705 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12706 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12707 				     fold_convert_loc (loc, newtype,
12708 						       TREE_OPERAND (arg0, 0)),
12709 				     fold_convert_loc (loc, newtype,
12710 						       TREE_OPERAND (arg0, 1)));
12711 
12712 	  return fold_build2_loc (loc, code, type, newmod,
12713 			      fold_convert_loc (loc, newtype, arg1));
12714 	}
12715 
12716       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12717 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12718 	 a single bit.  */
12719       if (TREE_CODE (arg0) == BIT_AND_EXPR
12720 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12721 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12722 	     == INTEGER_CST
12723 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12724 	  && integer_zerop (arg1))
12725 	{
12726 	  tree itype = TREE_TYPE (arg0);
12727 	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12728 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12729 
12730 	  /* Check for a valid shift count.  */
12731 	  if (TREE_INT_CST_HIGH (arg001) == 0
12732 	      && TREE_INT_CST_LOW (arg001) < prec)
12733 	    {
12734 	      tree arg01 = TREE_OPERAND (arg0, 1);
12735 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12736 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12737 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12738 		 can be rewritten as (X & (C2 << C1)) != 0.  */
12739 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12740 		{
12741 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12742 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12743 		  return fold_build2_loc (loc, code, type, tem,
12744 					  fold_convert_loc (loc, itype, arg1));
12745 		}
12746 	      /* Otherwise, for signed (arithmetic) shifts,
12747 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12748 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12749 	      else if (!TYPE_UNSIGNED (itype))
12750 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12751 				    arg000, build_int_cst (itype, 0));
12752 	      /* Otherwise, of unsigned (logical) shifts,
12753 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12754 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12755 	      else
12756 		return omit_one_operand_loc (loc, type,
12757 					 code == EQ_EXPR ? integer_one_node
12758 							 : integer_zero_node,
12759 					 arg000);
12760 	    }
12761 	}
12762 
12763       /* If we have (A & C) == C where C is a power of 2, convert this into
12764 	 (A & C) != 0.  Similarly for NE_EXPR.  */
12765       if (TREE_CODE (arg0) == BIT_AND_EXPR
12766 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12767 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12768 	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12769 			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12770 						    integer_zero_node));
12771 
12772       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12773 	 bit, then fold the expression into A < 0 or A >= 0.  */
12774       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12775       if (tem)
12776 	return tem;
12777 
12778       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12779 	 Similarly for NE_EXPR.  */
12780       if (TREE_CODE (arg0) == BIT_AND_EXPR
12781 	  && TREE_CODE (arg1) == INTEGER_CST
12782 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12783 	{
12784 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12785 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12786 				   TREE_OPERAND (arg0, 1));
12787 	  tree dandnotc
12788 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12789 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12790 			       notc);
12791 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12792 	  if (integer_nonzerop (dandnotc))
12793 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12794 	}
12795 
12796       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12797 	 Similarly for NE_EXPR.  */
12798       if (TREE_CODE (arg0) == BIT_IOR_EXPR
12799 	  && TREE_CODE (arg1) == INTEGER_CST
12800 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12801 	{
12802 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12803 	  tree candnotd
12804 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12805 			       TREE_OPERAND (arg0, 1),
12806 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12807 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12808 	  if (integer_nonzerop (candnotd))
12809 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12810 	}
12811 
12812       /* If this is a comparison of a field, we may be able to simplify it.  */
12813       if ((TREE_CODE (arg0) == COMPONENT_REF
12814 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12815 	  /* Handle the constant case even without -O
12816 	     to make sure the warnings are given.  */
12817 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12818 	{
12819 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12820 	  if (t1)
12821 	    return t1;
12822 	}
12823 
12824       /* Optimize comparisons of strlen vs zero to a compare of the
12825 	 first character of the string vs zero.  To wit,
12826 		strlen(ptr) == 0   =>  *ptr == 0
12827 		strlen(ptr) != 0   =>  *ptr != 0
12828 	 Other cases should reduce to one of these two (or a constant)
12829 	 due to the return value of strlen being unsigned.  */
12830       if (TREE_CODE (arg0) == CALL_EXPR
12831 	  && integer_zerop (arg1))
12832 	{
12833 	  tree fndecl = get_callee_fndecl (arg0);
12834 
12835 	  if (fndecl
12836 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12837 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12838 	      && call_expr_nargs (arg0) == 1
12839 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12840 	    {
12841 	      tree iref = build_fold_indirect_ref_loc (loc,
12842 						   CALL_EXPR_ARG (arg0, 0));
12843 	      return fold_build2_loc (loc, code, type, iref,
12844 				  build_int_cst (TREE_TYPE (iref), 0));
12845 	    }
12846 	}
12847 
12848       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12849 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12850       if (TREE_CODE (arg0) == RSHIFT_EXPR
12851 	  && integer_zerop (arg1)
12852 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12853 	{
12854 	  tree arg00 = TREE_OPERAND (arg0, 0);
12855 	  tree arg01 = TREE_OPERAND (arg0, 1);
12856 	  tree itype = TREE_TYPE (arg00);
12857 	  if (TREE_INT_CST_HIGH (arg01) == 0
12858 	      && TREE_INT_CST_LOW (arg01)
12859 		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12860 	    {
12861 	      if (TYPE_UNSIGNED (itype))
12862 		{
12863 		  itype = signed_type_for (itype);
12864 		  arg00 = fold_convert_loc (loc, itype, arg00);
12865 		}
12866 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12867 				  type, arg00, build_int_cst (itype, 0));
12868 	    }
12869 	}
12870 
12871       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12872       if (integer_zerop (arg1)
12873 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
12874 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12875 			    TREE_OPERAND (arg0, 1));
12876 
12877       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12878       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12879 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12880 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12881 				build_zero_cst (TREE_TYPE (arg0)));
12882       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
12883       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12884 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12885 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12886 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12887 				build_zero_cst (TREE_TYPE (arg0)));
12888 
12889       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
12890       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12891 	  && TREE_CODE (arg1) == INTEGER_CST
12892 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12893 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12894 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12895 					 TREE_OPERAND (arg0, 1), arg1));
12896 
12897       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12898 	 (X & C) == 0 when C is a single bit.  */
12899       if (TREE_CODE (arg0) == BIT_AND_EXPR
12900 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12901 	  && integer_zerop (arg1)
12902 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12903 	{
12904 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12905 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12906 				 TREE_OPERAND (arg0, 1));
12907 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12908 				  type, tem,
12909 				  fold_convert_loc (loc, TREE_TYPE (arg0),
12910 						    arg1));
12911 	}
12912 
12913       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12914 	 constant C is a power of two, i.e. a single bit.  */
12915       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12916 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12917 	  && integer_zerop (arg1)
12918 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12919 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12920 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12921 	{
12922 	  tree arg00 = TREE_OPERAND (arg0, 0);
12923 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12924 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
12925 	}
12926 
12927       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12928 	 when is C is a power of two, i.e. a single bit.  */
12929       if (TREE_CODE (arg0) == BIT_AND_EXPR
12930 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12931 	  && integer_zerop (arg1)
12932 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12933 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12934 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12935 	{
12936 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12937 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12938 			     arg000, TREE_OPERAND (arg0, 1));
12939 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12940 			      tem, build_int_cst (TREE_TYPE (tem), 0));
12941 	}
12942 
12943       if (integer_zerop (arg1)
12944 	  && tree_expr_nonzero_p (arg0))
12945         {
12946 	  tree res = constant_boolean_node (code==NE_EXPR, type);
12947 	  return omit_one_operand_loc (loc, type, res, arg0);
12948 	}
12949 
12950       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
12951       if (TREE_CODE (arg0) == NEGATE_EXPR
12952           && TREE_CODE (arg1) == NEGATE_EXPR)
12953 	return fold_build2_loc (loc, code, type,
12954 				TREE_OPERAND (arg0, 0),
12955 				fold_convert_loc (loc, TREE_TYPE (arg0),
12956 						  TREE_OPERAND (arg1, 0)));
12957 
12958       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
12959       if (TREE_CODE (arg0) == BIT_AND_EXPR
12960 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
12961 	{
12962 	  tree arg00 = TREE_OPERAND (arg0, 0);
12963 	  tree arg01 = TREE_OPERAND (arg0, 1);
12964 	  tree arg10 = TREE_OPERAND (arg1, 0);
12965 	  tree arg11 = TREE_OPERAND (arg1, 1);
12966 	  tree itype = TREE_TYPE (arg0);
12967 
12968 	  if (operand_equal_p (arg01, arg11, 0))
12969 	    return fold_build2_loc (loc, code, type,
12970 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12971 					     fold_build2_loc (loc,
12972 							  BIT_XOR_EXPR, itype,
12973 							  arg00, arg10),
12974 					     arg01),
12975 				build_zero_cst (itype));
12976 
12977 	  if (operand_equal_p (arg01, arg10, 0))
12978 	    return fold_build2_loc (loc, code, type,
12979 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12980 					     fold_build2_loc (loc,
12981 							  BIT_XOR_EXPR, itype,
12982 							  arg00, arg11),
12983 					     arg01),
12984 				build_zero_cst (itype));
12985 
12986 	  if (operand_equal_p (arg00, arg11, 0))
12987 	    return fold_build2_loc (loc, code, type,
12988 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12989 					     fold_build2_loc (loc,
12990 							  BIT_XOR_EXPR, itype,
12991 							  arg01, arg10),
12992 					     arg00),
12993 				build_zero_cst (itype));
12994 
12995 	  if (operand_equal_p (arg00, arg10, 0))
12996 	    return fold_build2_loc (loc, code, type,
12997 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12998 					     fold_build2_loc (loc,
12999 							  BIT_XOR_EXPR, itype,
13000 							  arg01, arg11),
13001 					     arg00),
13002 				build_zero_cst (itype));
13003 	}
13004 
13005       if (TREE_CODE (arg0) == BIT_XOR_EXPR
13006 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
13007 	{
13008 	  tree arg00 = TREE_OPERAND (arg0, 0);
13009 	  tree arg01 = TREE_OPERAND (arg0, 1);
13010 	  tree arg10 = TREE_OPERAND (arg1, 0);
13011 	  tree arg11 = TREE_OPERAND (arg1, 1);
13012 	  tree itype = TREE_TYPE (arg0);
13013 
13014 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13015 	     operand_equal_p guarantees no side-effects so we don't need
13016 	     to use omit_one_operand on Z.  */
13017 	  if (operand_equal_p (arg01, arg11, 0))
13018 	    return fold_build2_loc (loc, code, type, arg00,
13019 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13020 						      arg10));
13021 	  if (operand_equal_p (arg01, arg10, 0))
13022 	    return fold_build2_loc (loc, code, type, arg00,
13023 				    fold_convert_loc (loc, TREE_TYPE (arg00),
13024 						      arg11));
13025 	  if (operand_equal_p (arg00, arg11, 0))
13026 	    return fold_build2_loc (loc, code, type, arg01,
13027 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13028 						      arg10));
13029 	  if (operand_equal_p (arg00, arg10, 0))
13030 	    return fold_build2_loc (loc, code, type, arg01,
13031 				    fold_convert_loc (loc, TREE_TYPE (arg01),
13032 						      arg11));
13033 
13034 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13035 	  if (TREE_CODE (arg01) == INTEGER_CST
13036 	      && TREE_CODE (arg11) == INTEGER_CST)
13037 	    {
13038 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13039 				     fold_convert_loc (loc, itype, arg11));
13040 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13041 	      return fold_build2_loc (loc, code, type, tem,
13042 				      fold_convert_loc (loc, itype, arg10));
13043 	    }
13044 	}
13045 
13046       /* Attempt to simplify equality/inequality comparisons of complex
13047 	 values.  Only lower the comparison if the result is known or
13048 	 can be simplified to a single scalar comparison.  */
13049       if ((TREE_CODE (arg0) == COMPLEX_EXPR
13050 	   || TREE_CODE (arg0) == COMPLEX_CST)
13051 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
13052 	      || TREE_CODE (arg1) == COMPLEX_CST))
13053 	{
13054 	  tree real0, imag0, real1, imag1;
13055 	  tree rcond, icond;
13056 
13057 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
13058 	    {
13059 	      real0 = TREE_OPERAND (arg0, 0);
13060 	      imag0 = TREE_OPERAND (arg0, 1);
13061 	    }
13062 	  else
13063 	    {
13064 	      real0 = TREE_REALPART (arg0);
13065 	      imag0 = TREE_IMAGPART (arg0);
13066 	    }
13067 
13068 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
13069 	    {
13070 	      real1 = TREE_OPERAND (arg1, 0);
13071 	      imag1 = TREE_OPERAND (arg1, 1);
13072 	    }
13073 	  else
13074 	    {
13075 	      real1 = TREE_REALPART (arg1);
13076 	      imag1 = TREE_IMAGPART (arg1);
13077 	    }
13078 
13079 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
13080 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13081 	    {
13082 	      if (integer_zerop (rcond))
13083 		{
13084 		  if (code == EQ_EXPR)
13085 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13086 					      imag0, imag1);
13087 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13088 		}
13089 	      else
13090 		{
13091 		  if (code == NE_EXPR)
13092 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13093 					      imag0, imag1);
13094 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13095 		}
13096 	    }
13097 
13098 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
13099 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
13100 	    {
13101 	      if (integer_zerop (icond))
13102 		{
13103 		  if (code == EQ_EXPR)
13104 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13105 					      real0, real1);
13106 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13107 		}
13108 	      else
13109 		{
13110 		  if (code == NE_EXPR)
13111 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13112 					      real0, real1);
13113 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13114 		}
13115 	    }
13116 	}
13117 
13118       return NULL_TREE;
13119 
13120     case LT_EXPR:
13121     case GT_EXPR:
13122     case LE_EXPR:
13123     case GE_EXPR:
13124       tem = fold_comparison (loc, code, type, op0, op1);
13125       if (tem != NULL_TREE)
13126 	return tem;
13127 
13128       /* Transform comparisons of the form X +- C CMP X.  */
13129       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13130 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13131 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13132 	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13133 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13134 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13135 	{
13136 	  tree arg01 = TREE_OPERAND (arg0, 1);
13137 	  enum tree_code code0 = TREE_CODE (arg0);
13138 	  int is_positive;
13139 
13140 	  if (TREE_CODE (arg01) == REAL_CST)
13141 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13142 	  else
13143 	    is_positive = tree_int_cst_sgn (arg01);
13144 
13145 	  /* (X - c) > X becomes false.  */
13146 	  if (code == GT_EXPR
13147 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13148 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13149 	    {
13150 	      if (TREE_CODE (arg01) == INTEGER_CST
13151 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13152 		fold_overflow_warning (("assuming signed overflow does not "
13153 					"occur when assuming that (X - c) > X "
13154 					"is always false"),
13155 				       WARN_STRICT_OVERFLOW_ALL);
13156 	      return constant_boolean_node (0, type);
13157 	    }
13158 
13159 	  /* Likewise (X + c) < X becomes false.  */
13160 	  if (code == LT_EXPR
13161 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13162 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13163 	    {
13164 	      if (TREE_CODE (arg01) == INTEGER_CST
13165 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13166 		fold_overflow_warning (("assuming signed overflow does not "
13167 					"occur when assuming that "
13168 					"(X + c) < X is always false"),
13169 				       WARN_STRICT_OVERFLOW_ALL);
13170 	      return constant_boolean_node (0, type);
13171 	    }
13172 
13173 	  /* Convert (X - c) <= X to true.  */
13174 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13175 	      && code == LE_EXPR
13176 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13177 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13178 	    {
13179 	      if (TREE_CODE (arg01) == INTEGER_CST
13180 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13181 		fold_overflow_warning (("assuming signed overflow does not "
13182 					"occur when assuming that "
13183 					"(X - c) <= X is always true"),
13184 				       WARN_STRICT_OVERFLOW_ALL);
13185 	      return constant_boolean_node (1, type);
13186 	    }
13187 
13188 	  /* Convert (X + c) >= X to true.  */
13189 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13190 	      && code == GE_EXPR
13191 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13192 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13193 	    {
13194 	      if (TREE_CODE (arg01) == INTEGER_CST
13195 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13196 		fold_overflow_warning (("assuming signed overflow does not "
13197 					"occur when assuming that "
13198 					"(X + c) >= X is always true"),
13199 				       WARN_STRICT_OVERFLOW_ALL);
13200 	      return constant_boolean_node (1, type);
13201 	    }
13202 
13203 	  if (TREE_CODE (arg01) == INTEGER_CST)
13204 	    {
13205 	      /* Convert X + c > X and X - c < X to true for integers.  */
13206 	      if (code == GT_EXPR
13207 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13208 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13209 		{
13210 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13211 		    fold_overflow_warning (("assuming signed overflow does "
13212 					    "not occur when assuming that "
13213 					    "(X + c) > X is always true"),
13214 					   WARN_STRICT_OVERFLOW_ALL);
13215 		  return constant_boolean_node (1, type);
13216 		}
13217 
13218 	      if (code == LT_EXPR
13219 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13220 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13221 		{
13222 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13223 		    fold_overflow_warning (("assuming signed overflow does "
13224 					    "not occur when assuming that "
13225 					    "(X - c) < X is always true"),
13226 					   WARN_STRICT_OVERFLOW_ALL);
13227 		  return constant_boolean_node (1, type);
13228 		}
13229 
13230 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13231 	      if (code == LE_EXPR
13232 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13233 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13234 		{
13235 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13236 		    fold_overflow_warning (("assuming signed overflow does "
13237 					    "not occur when assuming that "
13238 					    "(X + c) <= X is always false"),
13239 					   WARN_STRICT_OVERFLOW_ALL);
13240 		  return constant_boolean_node (0, type);
13241 		}
13242 
13243 	      if (code == GE_EXPR
13244 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13245 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13246 		{
13247 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13248 		    fold_overflow_warning (("assuming signed overflow does "
13249 					    "not occur when assuming that "
13250 					    "(X - c) >= X is always false"),
13251 					   WARN_STRICT_OVERFLOW_ALL);
13252 		  return constant_boolean_node (0, type);
13253 		}
13254 	    }
13255 	}
13256 
13257       /* Comparisons with the highest or lowest possible integer of
13258 	 the specified precision will have known values.  */
13259       {
13260 	tree arg1_type = TREE_TYPE (arg1);
13261 	unsigned int width = TYPE_PRECISION (arg1_type);
13262 
13263 	if (TREE_CODE (arg1) == INTEGER_CST
13264 	    && width <= 2 * HOST_BITS_PER_WIDE_INT
13265 	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13266 	  {
13267 	    HOST_WIDE_INT signed_max_hi;
13268 	    unsigned HOST_WIDE_INT signed_max_lo;
13269 	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13270 
13271 	    if (width <= HOST_BITS_PER_WIDE_INT)
13272 	      {
13273 		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13274 				- 1;
13275 		signed_max_hi = 0;
13276 		max_hi = 0;
13277 
13278 		if (TYPE_UNSIGNED (arg1_type))
13279 		  {
13280 		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13281 		    min_lo = 0;
13282 		    min_hi = 0;
13283 		  }
13284 		else
13285 		  {
13286 		    max_lo = signed_max_lo;
13287 		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13288 		    min_hi = -1;
13289 		  }
13290 	      }
13291 	    else
13292 	      {
13293 		width -= HOST_BITS_PER_WIDE_INT;
13294 		signed_max_lo = -1;
13295 		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13296 				- 1;
13297 		max_lo = -1;
13298 		min_lo = 0;
13299 
13300 		if (TYPE_UNSIGNED (arg1_type))
13301 		  {
13302 		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13303 		    min_hi = 0;
13304 		  }
13305 		else
13306 		  {
13307 		    max_hi = signed_max_hi;
13308 		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13309 		  }
13310 	      }
13311 
13312 	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13313 		&& TREE_INT_CST_LOW (arg1) == max_lo)
13314 	      switch (code)
13315 		{
13316 		case GT_EXPR:
13317 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13318 
13319 		case GE_EXPR:
13320 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13321 
13322 		case LE_EXPR:
13323 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13324 
13325 		case LT_EXPR:
13326 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13327 
13328 		/* The GE_EXPR and LT_EXPR cases above are not normally
13329 		   reached because of previous transformations.  */
13330 
13331 		default:
13332 		  break;
13333 		}
13334 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13335 		     == max_hi
13336 		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13337 	      switch (code)
13338 		{
13339 		case GT_EXPR:
13340 		  arg1 = const_binop (PLUS_EXPR, arg1,
13341 				      build_int_cst (TREE_TYPE (arg1), 1));
13342 		  return fold_build2_loc (loc, EQ_EXPR, type,
13343 				      fold_convert_loc (loc,
13344 							TREE_TYPE (arg1), arg0),
13345 				      arg1);
13346 		case LE_EXPR:
13347 		  arg1 = const_binop (PLUS_EXPR, arg1,
13348 				      build_int_cst (TREE_TYPE (arg1), 1));
13349 		  return fold_build2_loc (loc, NE_EXPR, type,
13350 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13351 							arg0),
13352 				      arg1);
13353 		default:
13354 		  break;
13355 		}
13356 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13357 		     == min_hi
13358 		     && TREE_INT_CST_LOW (arg1) == min_lo)
13359 	      switch (code)
13360 		{
13361 		case LT_EXPR:
13362 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13363 
13364 		case LE_EXPR:
13365 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13366 
13367 		case GE_EXPR:
13368 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13369 
13370 		case GT_EXPR:
13371 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13372 
13373 		default:
13374 		  break;
13375 		}
13376 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13377 		     == min_hi
13378 		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13379 	      switch (code)
13380 		{
13381 		case GE_EXPR:
13382 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13383 		  return fold_build2_loc (loc, NE_EXPR, type,
13384 				      fold_convert_loc (loc,
13385 							TREE_TYPE (arg1), arg0),
13386 				      arg1);
13387 		case LT_EXPR:
13388 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13389 		  return fold_build2_loc (loc, EQ_EXPR, type,
13390 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13391 							arg0),
13392 				      arg1);
13393 		default:
13394 		  break;
13395 		}
13396 
13397 	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13398 		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13399 		     && TYPE_UNSIGNED (arg1_type)
13400 		     /* We will flip the signedness of the comparison operator
13401 			associated with the mode of arg1, so the sign bit is
13402 			specified by this mode.  Check that arg1 is the signed
13403 			max associated with this sign bit.  */
13404 		     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13405 		     /* signed_type does not work on pointer types.  */
13406 		     && INTEGRAL_TYPE_P (arg1_type))
13407 	      {
13408 		/* The following case also applies to X < signed_max+1
13409 		   and X >= signed_max+1 because previous transformations.  */
13410 		if (code == LE_EXPR || code == GT_EXPR)
13411 		  {
13412 		    tree st;
13413 		    st = signed_type_for (TREE_TYPE (arg1));
13414 		    return fold_build2_loc (loc,
13415 					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13416 					type, fold_convert_loc (loc, st, arg0),
13417 					build_int_cst (st, 0));
13418 		  }
13419 	      }
13420 	  }
13421       }
13422 
13423       /* If we are comparing an ABS_EXPR with a constant, we can
13424 	 convert all the cases into explicit comparisons, but they may
13425 	 well not be faster than doing the ABS and one comparison.
13426 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13427 	 and a comparison, and is probably faster.  */
13428       if (code == LE_EXPR
13429 	  && TREE_CODE (arg1) == INTEGER_CST
13430 	  && TREE_CODE (arg0) == ABS_EXPR
13431 	  && ! TREE_SIDE_EFFECTS (arg0)
13432 	  && (0 != (tem = negate_expr (arg1)))
13433 	  && TREE_CODE (tem) == INTEGER_CST
13434 	  && !TREE_OVERFLOW (tem))
13435 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13436 			    build2 (GE_EXPR, type,
13437 				    TREE_OPERAND (arg0, 0), tem),
13438 			    build2 (LE_EXPR, type,
13439 				    TREE_OPERAND (arg0, 0), arg1));
13440 
13441       /* Convert ABS_EXPR<x> >= 0 to true.  */
13442       strict_overflow_p = false;
13443       if (code == GE_EXPR
13444 	  && (integer_zerop (arg1)
13445 	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13446 		  && real_zerop (arg1)))
13447 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13448 	{
13449 	  if (strict_overflow_p)
13450 	    fold_overflow_warning (("assuming signed overflow does not occur "
13451 				    "when simplifying comparison of "
13452 				    "absolute value and zero"),
13453 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13454 	  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13455 	}
13456 
13457       /* Convert ABS_EXPR<x> < 0 to false.  */
13458       strict_overflow_p = false;
13459       if (code == LT_EXPR
13460 	  && (integer_zerop (arg1) || real_zerop (arg1))
13461 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13462 	{
13463 	  if (strict_overflow_p)
13464 	    fold_overflow_warning (("assuming signed overflow does not occur "
13465 				    "when simplifying comparison of "
13466 				    "absolute value and zero"),
13467 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13468 	  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13469 	}
13470 
13471       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13472 	 and similarly for >= into !=.  */
13473       if ((code == LT_EXPR || code == GE_EXPR)
13474 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13475 	  && TREE_CODE (arg1) == LSHIFT_EXPR
13476 	  && integer_onep (TREE_OPERAND (arg1, 0)))
13477 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13478 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13479 				   TREE_OPERAND (arg1, 1)),
13480 			   build_int_cst (TREE_TYPE (arg0), 0));
13481 
13482       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
13483 	 otherwise Y might be >= # of bits in X's type and thus e.g.
13484 	 (unsigned char) (1 << Y) for Y 15 might be 0.
13485 	 If the cast is widening, then 1 << Y should have unsigned type,
13486 	 otherwise if Y is number of bits in the signed shift type minus 1,
13487 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
13488 	 31 might be 0xffffffff80000000.  */
13489       if ((code == LT_EXPR || code == GE_EXPR)
13490 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13491 	  && CONVERT_EXPR_P (arg1)
13492 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13493 	  && (TYPE_PRECISION (TREE_TYPE (arg1))
13494 	      >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13495 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13496 	      || (TYPE_PRECISION (TREE_TYPE (arg1))
13497 		  == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13498 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13499 	{
13500 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13501 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13502 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13503 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13504 			     build_int_cst (TREE_TYPE (arg0), 0));
13505 	}
13506 
13507       return NULL_TREE;
13508 
13509     case UNORDERED_EXPR:
13510     case ORDERED_EXPR:
13511     case UNLT_EXPR:
13512     case UNLE_EXPR:
13513     case UNGT_EXPR:
13514     case UNGE_EXPR:
13515     case UNEQ_EXPR:
13516     case LTGT_EXPR:
13517       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13518 	{
13519 	  t1 = fold_relational_const (code, type, arg0, arg1);
13520 	  if (t1 != NULL_TREE)
13521 	    return t1;
13522 	}
13523 
13524       /* If the first operand is NaN, the result is constant.  */
13525       if (TREE_CODE (arg0) == REAL_CST
13526 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13527 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13528 	{
13529 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13530 	       ? integer_zero_node
13531 	       : integer_one_node;
13532 	  return omit_one_operand_loc (loc, type, t1, arg1);
13533 	}
13534 
13535       /* If the second operand is NaN, the result is constant.  */
13536       if (TREE_CODE (arg1) == REAL_CST
13537 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13538 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13539 	{
13540 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13541 	       ? integer_zero_node
13542 	       : integer_one_node;
13543 	  return omit_one_operand_loc (loc, type, t1, arg0);
13544 	}
13545 
13546       /* Simplify unordered comparison of something with itself.  */
13547       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13548 	  && operand_equal_p (arg0, arg1, 0))
13549 	return constant_boolean_node (1, type);
13550 
13551       if (code == LTGT_EXPR
13552 	  && !flag_trapping_math
13553 	  && operand_equal_p (arg0, arg1, 0))
13554 	return constant_boolean_node (0, type);
13555 
13556       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13557       {
13558 	tree targ0 = strip_float_extensions (arg0);
13559 	tree targ1 = strip_float_extensions (arg1);
13560 	tree newtype = TREE_TYPE (targ0);
13561 
13562 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13563 	  newtype = TREE_TYPE (targ1);
13564 
13565 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13566 	  return fold_build2_loc (loc, code, type,
13567 			      fold_convert_loc (loc, newtype, targ0),
13568 			      fold_convert_loc (loc, newtype, targ1));
13569       }
13570 
13571       return NULL_TREE;
13572 
13573     case COMPOUND_EXPR:
13574       /* When pedantic, a compound expression can be neither an lvalue
13575 	 nor an integer constant expression.  */
13576       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13577 	return NULL_TREE;
13578       /* Don't let (0, 0) be null pointer constant.  */
13579       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13580 				 : fold_convert_loc (loc, type, arg1);
13581       return pedantic_non_lvalue_loc (loc, tem);
13582 
13583     case COMPLEX_EXPR:
13584       if ((TREE_CODE (arg0) == REAL_CST
13585 	   && TREE_CODE (arg1) == REAL_CST)
13586 	  || (TREE_CODE (arg0) == INTEGER_CST
13587 	      && TREE_CODE (arg1) == INTEGER_CST))
13588 	return build_complex (type, arg0, arg1);
13589       if (TREE_CODE (arg0) == REALPART_EXPR
13590 	  && TREE_CODE (arg1) == IMAGPART_EXPR
13591 	  && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13592 	  && operand_equal_p (TREE_OPERAND (arg0, 0),
13593 			      TREE_OPERAND (arg1, 0), 0))
13594 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13595 				     TREE_OPERAND (arg1, 0));
13596       return NULL_TREE;
13597 
13598     case ASSERT_EXPR:
13599       /* An ASSERT_EXPR should never be passed to fold_binary.  */
13600       gcc_unreachable ();
13601 
13602     case VEC_PACK_TRUNC_EXPR:
13603     case VEC_PACK_FIX_TRUNC_EXPR:
13604       {
13605 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13606 	tree *elts, vals = NULL_TREE;
13607 
13608 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13609 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13610 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13611 	  return NULL_TREE;
13612 
13613 	elts = XALLOCAVEC (tree, nelts);
13614 	if (!vec_cst_ctor_to_array (arg0, elts)
13615 	    || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13616 	  return NULL_TREE;
13617 
13618 	for (i = 0; i < nelts; i++)
13619 	  {
13620 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13621 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
13622 					  TREE_TYPE (type), elts[i]);
13623 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13624 	      return NULL_TREE;
13625 	  }
13626 
13627 	for (i = 0; i < nelts; i++)
13628 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13629 	return build_vector (type, vals);
13630       }
13631 
13632     case VEC_WIDEN_MULT_LO_EXPR:
13633     case VEC_WIDEN_MULT_HI_EXPR:
13634       {
13635 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13636 	tree *elts, vals = NULL_TREE;
13637 
13638 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13639 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13640 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13641 	  return NULL_TREE;
13642 
13643 	elts = XALLOCAVEC (tree, nelts * 4);
13644 	if (!vec_cst_ctor_to_array (arg0, elts)
13645 	    || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13646 	  return NULL_TREE;
13647 
13648 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
13649 	  elts += nelts;
13650 
13651 	for (i = 0; i < nelts; i++)
13652 	  {
13653 	    elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
13654 	    elts[i + nelts * 2]
13655 	      = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
13656 				    elts[i + nelts * 2]);
13657 	    if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
13658 	      return NULL_TREE;
13659 	    elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
13660 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13661 	      return NULL_TREE;
13662 	  }
13663 
13664 	for (i = 0; i < nelts; i++)
13665 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13666 	return build_vector (type, vals);
13667       }
13668 
13669     default:
13670       return NULL_TREE;
13671     } /* switch (code) */
13672 }
13673 
13674 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13675    a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13676    of GOTO_EXPR.  */
13677 
13678 static tree
13679 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13680 {
13681   switch (TREE_CODE (*tp))
13682     {
13683     case LABEL_EXPR:
13684       return *tp;
13685 
13686     case GOTO_EXPR:
13687       *walk_subtrees = 0;
13688 
13689       /* ... fall through ...  */
13690 
13691     default:
13692       return NULL_TREE;
13693     }
13694 }
13695 
13696 /* Return whether the sub-tree ST contains a label which is accessible from
13697    outside the sub-tree.  */
13698 
13699 static bool
13700 contains_label_p (tree st)
13701 {
13702   return
13703    (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13704 }
13705 
13706 /* Fold a ternary expression of code CODE and type TYPE with operands
13707    OP0, OP1, and OP2.  Return the folded expression if folding is
13708    successful.  Otherwise, return NULL_TREE.  */
13709 
13710 tree
13711 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13712 		  tree op0, tree op1, tree op2)
13713 {
13714   tree tem;
13715   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13716   enum tree_code_class kind = TREE_CODE_CLASS (code);
13717 
13718   gcc_assert (IS_EXPR_CODE_CLASS (kind)
13719 	      && TREE_CODE_LENGTH (code) == 3);
13720 
13721   /* Strip any conversions that don't change the mode.  This is safe
13722      for every expression, except for a comparison expression because
13723      its signedness is derived from its operands.  So, in the latter
13724      case, only strip conversions that don't change the signedness.
13725 
13726      Note that this is done as an internal manipulation within the
13727      constant folder, in order to find the simplest representation of
13728      the arguments so that their form can be studied.  In any cases,
13729      the appropriate type conversions should be put back in the tree
13730      that will get out of the constant folder.  */
13731   if (op0)
13732     {
13733       arg0 = op0;
13734       STRIP_NOPS (arg0);
13735     }
13736 
13737   if (op1)
13738     {
13739       arg1 = op1;
13740       STRIP_NOPS (arg1);
13741     }
13742 
13743   if (op2)
13744     {
13745       arg2 = op2;
13746       STRIP_NOPS (arg2);
13747     }
13748 
13749   switch (code)
13750     {
13751     case COMPONENT_REF:
13752       if (TREE_CODE (arg0) == CONSTRUCTOR
13753 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13754 	{
13755 	  unsigned HOST_WIDE_INT idx;
13756 	  tree field, value;
13757 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13758 	    if (field == arg1)
13759 	      return value;
13760 	}
13761       return NULL_TREE;
13762 
13763     case COND_EXPR:
13764       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13765 	 so all simple results must be passed through pedantic_non_lvalue.  */
13766       if (TREE_CODE (arg0) == INTEGER_CST)
13767 	{
13768 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
13769 	  tem = integer_zerop (arg0) ? op2 : op1;
13770 	  /* Only optimize constant conditions when the selected branch
13771 	     has the same type as the COND_EXPR.  This avoids optimizing
13772              away "c ? x : throw", where the throw has a void type.
13773              Avoid throwing away that operand which contains label.  */
13774           if ((!TREE_SIDE_EFFECTS (unused_op)
13775                || !contains_label_p (unused_op))
13776               && (! VOID_TYPE_P (TREE_TYPE (tem))
13777                   || VOID_TYPE_P (type)))
13778 	    return pedantic_non_lvalue_loc (loc, tem);
13779 	  return NULL_TREE;
13780 	}
13781       if (operand_equal_p (arg1, op2, 0))
13782 	return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13783 
13784       /* If we have A op B ? A : C, we may be able to convert this to a
13785 	 simpler expression, depending on the operation and the values
13786 	 of B and C.  Signed zeros prevent all of these transformations,
13787 	 for reasons given above each one.
13788 
13789          Also try swapping the arguments and inverting the conditional.  */
13790       if (COMPARISON_CLASS_P (arg0)
13791 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13792 					     arg1, TREE_OPERAND (arg0, 1))
13793 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13794 	{
13795 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13796 	  if (tem)
13797 	    return tem;
13798 	}
13799 
13800       if (COMPARISON_CLASS_P (arg0)
13801 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13802 					     op2,
13803 					     TREE_OPERAND (arg0, 1))
13804 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13805 	{
13806 	  location_t loc0 = expr_location_or (arg0, loc);
13807 	  tem = fold_truth_not_expr (loc0, arg0);
13808 	  if (tem && COMPARISON_CLASS_P (tem))
13809 	    {
13810 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13811 	      if (tem)
13812 		return tem;
13813 	    }
13814 	}
13815 
13816       /* If the second operand is simpler than the third, swap them
13817 	 since that produces better jump optimization results.  */
13818       if (truth_value_p (TREE_CODE (arg0))
13819 	  && tree_swap_operands_p (op1, op2, false))
13820 	{
13821 	  location_t loc0 = expr_location_or (arg0, loc);
13822 	  /* See if this can be inverted.  If it can't, possibly because
13823 	     it was a floating-point inequality comparison, don't do
13824 	     anything.  */
13825 	  tem = fold_truth_not_expr (loc0, arg0);
13826 	  if (tem)
13827 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
13828 	}
13829 
13830       /* Convert A ? 1 : 0 to simply A.  */
13831       if (integer_onep (op1)
13832 	  && integer_zerop (op2)
13833 	  /* If we try to convert OP0 to our type, the
13834 	     call to fold will try to move the conversion inside
13835 	     a COND, which will recurse.  In that case, the COND_EXPR
13836 	     is probably the best choice, so leave it alone.  */
13837 	  && type == TREE_TYPE (arg0))
13838 	return pedantic_non_lvalue_loc (loc, arg0);
13839 
13840       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13841 	 over COND_EXPR in cases such as floating point comparisons.  */
13842       if (integer_zerop (op1)
13843 	  && integer_onep (op2)
13844 	  && truth_value_p (TREE_CODE (arg0)))
13845 	return pedantic_non_lvalue_loc (loc,
13846 				    fold_convert_loc (loc, type,
13847 					      invert_truthvalue_loc (loc,
13848 								     arg0)));
13849 
13850       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13851       if (TREE_CODE (arg0) == LT_EXPR
13852 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13853 	  && integer_zerop (op2)
13854 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13855 	{
13856 	  /* sign_bit_p looks through both zero and sign extensions,
13857 	     but for this optimization only sign extensions are
13858 	     usable.  */
13859 	  tree tem2 = TREE_OPERAND (arg0, 0);
13860 	  while (tem != tem2)
13861 	    {
13862 	      if (TREE_CODE (tem2) != NOP_EXPR
13863 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13864 		{
13865 		  tem = NULL_TREE;
13866 		  break;
13867 		}
13868 	      tem2 = TREE_OPERAND (tem2, 0);
13869 	    }
13870 	  /* sign_bit_p only checks ARG1 bits within A's precision.
13871 	     If <sign bit of A> has wider type than A, bits outside
13872 	     of A's precision in <sign bit of A> need to be checked.
13873 	     If they are all 0, this optimization needs to be done
13874 	     in unsigned A's type, if they are all 1 in signed A's type,
13875 	     otherwise this can't be done.  */
13876 	  if (tem
13877 	      && TYPE_PRECISION (TREE_TYPE (tem))
13878 		 < TYPE_PRECISION (TREE_TYPE (arg1))
13879 	      && TYPE_PRECISION (TREE_TYPE (tem))
13880 		 < TYPE_PRECISION (type))
13881 	    {
13882 	      unsigned HOST_WIDE_INT mask_lo;
13883 	      HOST_WIDE_INT mask_hi;
13884 	      int inner_width, outer_width;
13885 	      tree tem_type;
13886 
13887 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13888 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13889 	      if (outer_width > TYPE_PRECISION (type))
13890 		outer_width = TYPE_PRECISION (type);
13891 
13892 	      if (outer_width > HOST_BITS_PER_WIDE_INT)
13893 		{
13894 		  mask_hi = ((unsigned HOST_WIDE_INT) -1
13895 			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13896 		  mask_lo = -1;
13897 		}
13898 	      else
13899 		{
13900 		  mask_hi = 0;
13901 		  mask_lo = ((unsigned HOST_WIDE_INT) -1
13902 			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
13903 		}
13904 	      if (inner_width > HOST_BITS_PER_WIDE_INT)
13905 		{
13906 		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13907 			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
13908 		  mask_lo = 0;
13909 		}
13910 	      else
13911 		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13912 			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
13913 
13914 	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13915 		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13916 		{
13917 		  tem_type = signed_type_for (TREE_TYPE (tem));
13918 		  tem = fold_convert_loc (loc, tem_type, tem);
13919 		}
13920 	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13921 		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13922 		{
13923 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
13924 		  tem = fold_convert_loc (loc, tem_type, tem);
13925 		}
13926 	      else
13927 		tem = NULL;
13928 	    }
13929 
13930 	  if (tem)
13931 	    return
13932 	      fold_convert_loc (loc, type,
13933 				fold_build2_loc (loc, BIT_AND_EXPR,
13934 					     TREE_TYPE (tem), tem,
13935 					     fold_convert_loc (loc,
13936 							       TREE_TYPE (tem),
13937 							       arg1)));
13938 	}
13939 
13940       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13941 	 already handled above.  */
13942       if (TREE_CODE (arg0) == BIT_AND_EXPR
13943 	  && integer_onep (TREE_OPERAND (arg0, 1))
13944 	  && integer_zerop (op2)
13945 	  && integer_pow2p (arg1))
13946 	{
13947 	  tree tem = TREE_OPERAND (arg0, 0);
13948 	  STRIP_NOPS (tem);
13949 	  if (TREE_CODE (tem) == RSHIFT_EXPR
13950               && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13951               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13952 	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13953 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
13954 				TREE_OPERAND (tem, 0), arg1);
13955 	}
13956 
13957       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13958 	 is probably obsolete because the first operand should be a
13959 	 truth value (that's why we have the two cases above), but let's
13960 	 leave it in until we can confirm this for all front-ends.  */
13961       if (integer_zerop (op2)
13962 	  && TREE_CODE (arg0) == NE_EXPR
13963 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13964 	  && integer_pow2p (arg1)
13965 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13966 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13967 			      arg1, OEP_ONLY_CONST))
13968 	return pedantic_non_lvalue_loc (loc,
13969 				    fold_convert_loc (loc, type,
13970 						      TREE_OPERAND (arg0, 0)));
13971 
13972       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13973       if (integer_zerop (op2)
13974 	  && truth_value_p (TREE_CODE (arg0))
13975 	  && truth_value_p (TREE_CODE (arg1)))
13976 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13977 			    fold_convert_loc (loc, type, arg0),
13978 			    arg1);
13979 
13980       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13981       if (integer_onep (op2)
13982 	  && truth_value_p (TREE_CODE (arg0))
13983 	  && truth_value_p (TREE_CODE (arg1)))
13984 	{
13985 	  location_t loc0 = expr_location_or (arg0, loc);
13986 	  /* Only perform transformation if ARG0 is easily inverted.  */
13987 	  tem = fold_truth_not_expr (loc0, arg0);
13988 	  if (tem)
13989 	    return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13990 				fold_convert_loc (loc, type, tem),
13991 				arg1);
13992 	}
13993 
13994       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13995       if (integer_zerop (arg1)
13996 	  && truth_value_p (TREE_CODE (arg0))
13997 	  && truth_value_p (TREE_CODE (op2)))
13998 	{
13999 	  location_t loc0 = expr_location_or (arg0, loc);
14000 	  /* Only perform transformation if ARG0 is easily inverted.  */
14001 	  tem = fold_truth_not_expr (loc0, arg0);
14002 	  if (tem)
14003 	    return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14004 				fold_convert_loc (loc, type, tem),
14005 				op2);
14006 	}
14007 
14008       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
14009       if (integer_onep (arg1)
14010 	  && truth_value_p (TREE_CODE (arg0))
14011 	  && truth_value_p (TREE_CODE (op2)))
14012 	return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14013 			    fold_convert_loc (loc, type, arg0),
14014 			    op2);
14015 
14016       return NULL_TREE;
14017 
14018     case CALL_EXPR:
14019       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14020 	 of fold_ternary on them.  */
14021       gcc_unreachable ();
14022 
14023     case BIT_FIELD_REF:
14024       if ((TREE_CODE (arg0) == VECTOR_CST
14025 	   || TREE_CODE (arg0) == CONSTRUCTOR)
14026 	  && type == TREE_TYPE (TREE_TYPE (arg0)))
14027 	{
14028 	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14029 	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14030 
14031 	  if (width != 0
14032 	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14033 	      && (idx % width) == 0
14034 	      && (idx = idx / width)
14035 		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14036 	    {
14037 	      if (TREE_CODE (arg0) == VECTOR_CST)
14038 		{
14039 		  tree elements = TREE_VECTOR_CST_ELTS (arg0);
14040 		  while (idx-- > 0 && elements)
14041 		    elements = TREE_CHAIN (elements);
14042 		  if (elements)
14043 		    return TREE_VALUE (elements);
14044 		}
14045 	      else if (idx < CONSTRUCTOR_NELTS (arg0))
14046 		return CONSTRUCTOR_ELT (arg0, idx)->value;
14047 	      return build_zero_cst (type);
14048 	    }
14049 	}
14050 
14051       /* A bit-field-ref that referenced the full argument can be stripped.  */
14052       if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14053 	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14054 	  && integer_zerop (op2))
14055 	return fold_convert_loc (loc, type, arg0);
14056 
14057       return NULL_TREE;
14058 
14059     case FMA_EXPR:
14060       /* For integers we can decompose the FMA if possible.  */
14061       if (TREE_CODE (arg0) == INTEGER_CST
14062 	  && TREE_CODE (arg1) == INTEGER_CST)
14063 	return fold_build2_loc (loc, PLUS_EXPR, type,
14064 				const_binop (MULT_EXPR, arg0, arg1), arg2);
14065       if (integer_zerop (arg2))
14066 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14067 
14068       return fold_fma (loc, type, arg0, arg1, arg2);
14069 
14070     case VEC_PERM_EXPR:
14071       if (TREE_CODE (arg2) == VECTOR_CST)
14072 	{
14073 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14074 	  unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14075 	  tree t;
14076 	  bool need_mask_canon = false;
14077 
14078 	  gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)));
14079 	  for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2);
14080 	       i < nelts && t; i++, t = TREE_CHAIN (t))
14081 	    {
14082 	      if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST)
14083 		return NULL_TREE;
14084 
14085 	      sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1);
14086 	      if (TREE_INT_CST_HIGH (TREE_VALUE (t))
14087 		  || ((unsigned HOST_WIDE_INT)
14088 		      TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i]))
14089 		need_mask_canon = true;
14090 	    }
14091 	  if (t)
14092 	    return NULL_TREE;
14093 	  for (; i < nelts; i++)
14094 	    sel[i] = 0;
14095 
14096 	  if ((TREE_CODE (arg0) == VECTOR_CST
14097 	       || TREE_CODE (arg0) == CONSTRUCTOR)
14098 	      && (TREE_CODE (arg1) == VECTOR_CST
14099 		  || TREE_CODE (arg1) == CONSTRUCTOR))
14100 	    {
14101 	      t = fold_vec_perm (type, arg0, arg1, sel);
14102 	      if (t != NULL_TREE)
14103 		return t;
14104 	    }
14105 
14106 	  if (need_mask_canon && arg2 == op2)
14107 	    {
14108 	      tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2));
14109 	      for (i = 0; i < nelts; i++)
14110 		list = tree_cons (NULL_TREE,
14111 				  build_int_cst (eltype, sel[nelts - i - 1]),
14112 				  list);
14113 	      t = build_vector (TREE_TYPE (arg2), list);
14114 	      return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14115 	    }
14116 	}
14117       return NULL_TREE;
14118 
14119     default:
14120       return NULL_TREE;
14121     } /* switch (code) */
14122 }
14123 
14124 /* Perform constant folding and related simplification of EXPR.
14125    The related simplifications include x*1 => x, x*0 => 0, etc.,
14126    and application of the associative law.
14127    NOP_EXPR conversions may be removed freely (as long as we
14128    are careful not to change the type of the overall expression).
14129    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14130    but we can constant-fold them if they have constant operands.  */
14131 
14132 #ifdef ENABLE_FOLD_CHECKING
14133 # define fold(x) fold_1 (x)
14134 static tree fold_1 (tree);
14135 static
14136 #endif
14137 tree
14138 fold (tree expr)
14139 {
14140   const tree t = expr;
14141   enum tree_code code = TREE_CODE (t);
14142   enum tree_code_class kind = TREE_CODE_CLASS (code);
14143   tree tem;
14144   location_t loc = EXPR_LOCATION (expr);
14145 
14146   /* Return right away if a constant.  */
14147   if (kind == tcc_constant)
14148     return t;
14149 
14150   /* CALL_EXPR-like objects with variable numbers of operands are
14151      treated specially.  */
14152   if (kind == tcc_vl_exp)
14153     {
14154       if (code == CALL_EXPR)
14155 	{
14156 	  tem = fold_call_expr (loc, expr, false);
14157 	  return tem ? tem : expr;
14158 	}
14159       return expr;
14160     }
14161 
14162   if (IS_EXPR_CODE_CLASS (kind))
14163     {
14164       tree type = TREE_TYPE (t);
14165       tree op0, op1, op2;
14166 
14167       switch (TREE_CODE_LENGTH (code))
14168 	{
14169 	case 1:
14170 	  op0 = TREE_OPERAND (t, 0);
14171 	  tem = fold_unary_loc (loc, code, type, op0);
14172 	  return tem ? tem : expr;
14173 	case 2:
14174 	  op0 = TREE_OPERAND (t, 0);
14175 	  op1 = TREE_OPERAND (t, 1);
14176 	  tem = fold_binary_loc (loc, code, type, op0, op1);
14177 	  return tem ? tem : expr;
14178 	case 3:
14179 	  op0 = TREE_OPERAND (t, 0);
14180 	  op1 = TREE_OPERAND (t, 1);
14181 	  op2 = TREE_OPERAND (t, 2);
14182 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14183 	  return tem ? tem : expr;
14184 	default:
14185 	  break;
14186 	}
14187     }
14188 
14189   switch (code)
14190     {
14191     case ARRAY_REF:
14192       {
14193 	tree op0 = TREE_OPERAND (t, 0);
14194 	tree op1 = TREE_OPERAND (t, 1);
14195 
14196 	if (TREE_CODE (op1) == INTEGER_CST
14197 	    && TREE_CODE (op0) == CONSTRUCTOR
14198 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14199 	  {
14200 	    VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14201 	    unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14202 	    unsigned HOST_WIDE_INT begin = 0;
14203 
14204 	    /* Find a matching index by means of a binary search.  */
14205 	    while (begin != end)
14206 	      {
14207 		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14208 		tree index = VEC_index (constructor_elt, elts, middle)->index;
14209 
14210 		if (TREE_CODE (index) == INTEGER_CST
14211 		    && tree_int_cst_lt (index, op1))
14212 		  begin = middle + 1;
14213 		else if (TREE_CODE (index) == INTEGER_CST
14214 			 && tree_int_cst_lt (op1, index))
14215 		  end = middle;
14216 		else if (TREE_CODE (index) == RANGE_EXPR
14217 			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14218 		  begin = middle + 1;
14219 		else if (TREE_CODE (index) == RANGE_EXPR
14220 			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14221 		  end = middle;
14222 		else
14223 		  return VEC_index (constructor_elt, elts, middle)->value;
14224 	      }
14225 	  }
14226 
14227 	return t;
14228       }
14229 
14230     case CONST_DECL:
14231       return fold (DECL_INITIAL (t));
14232 
14233     default:
14234       return t;
14235     } /* switch (code) */
14236 }
14237 
14238 #ifdef ENABLE_FOLD_CHECKING
14239 #undef fold
14240 
14241 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14242 static void fold_check_failed (const_tree, const_tree);
14243 void print_fold_checksum (const_tree);
14244 
14245 /* When --enable-checking=fold, compute a digest of expr before
14246    and after actual fold call to see if fold did not accidentally
14247    change original expr.  */
14248 
14249 tree
14250 fold (tree expr)
14251 {
14252   tree ret;
14253   struct md5_ctx ctx;
14254   unsigned char checksum_before[16], checksum_after[16];
14255   htab_t ht;
14256 
14257   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14258   md5_init_ctx (&ctx);
14259   fold_checksum_tree (expr, &ctx, ht);
14260   md5_finish_ctx (&ctx, checksum_before);
14261   htab_empty (ht);
14262 
14263   ret = fold_1 (expr);
14264 
14265   md5_init_ctx (&ctx);
14266   fold_checksum_tree (expr, &ctx, ht);
14267   md5_finish_ctx (&ctx, checksum_after);
14268   htab_delete (ht);
14269 
14270   if (memcmp (checksum_before, checksum_after, 16))
14271     fold_check_failed (expr, ret);
14272 
14273   return ret;
14274 }
14275 
14276 void
14277 print_fold_checksum (const_tree expr)
14278 {
14279   struct md5_ctx ctx;
14280   unsigned char checksum[16], cnt;
14281   htab_t ht;
14282 
14283   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14284   md5_init_ctx (&ctx);
14285   fold_checksum_tree (expr, &ctx, ht);
14286   md5_finish_ctx (&ctx, checksum);
14287   htab_delete (ht);
14288   for (cnt = 0; cnt < 16; ++cnt)
14289     fprintf (stderr, "%02x", checksum[cnt]);
14290   putc ('\n', stderr);
14291 }
14292 
14293 static void
14294 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14295 {
14296   internal_error ("fold check: original tree changed by fold");
14297 }
14298 
14299 static void
14300 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14301 {
14302   void **slot;
14303   enum tree_code code;
14304   union tree_node buf;
14305   int i, len;
14306 
14307  recursive_label:
14308   if (expr == NULL)
14309     return;
14310   slot = (void **) htab_find_slot (ht, expr, INSERT);
14311   if (*slot != NULL)
14312     return;
14313   *slot = CONST_CAST_TREE (expr);
14314   code = TREE_CODE (expr);
14315   if (TREE_CODE_CLASS (code) == tcc_declaration
14316       && DECL_ASSEMBLER_NAME_SET_P (expr))
14317     {
14318       /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14319       memcpy ((char *) &buf, expr, tree_size (expr));
14320       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14321       expr = (tree) &buf;
14322     }
14323   else if (TREE_CODE_CLASS (code) == tcc_type
14324 	   && (TYPE_POINTER_TO (expr)
14325 	       || TYPE_REFERENCE_TO (expr)
14326 	       || TYPE_CACHED_VALUES_P (expr)
14327 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14328 	       || TYPE_NEXT_VARIANT (expr)))
14329     {
14330       /* Allow these fields to be modified.  */
14331       tree tmp;
14332       memcpy ((char *) &buf, expr, tree_size (expr));
14333       expr = tmp = (tree) &buf;
14334       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14335       TYPE_POINTER_TO (tmp) = NULL;
14336       TYPE_REFERENCE_TO (tmp) = NULL;
14337       TYPE_NEXT_VARIANT (tmp) = NULL;
14338       if (TYPE_CACHED_VALUES_P (tmp))
14339 	{
14340 	  TYPE_CACHED_VALUES_P (tmp) = 0;
14341 	  TYPE_CACHED_VALUES (tmp) = NULL;
14342 	}
14343     }
14344   md5_process_bytes (expr, tree_size (expr), ctx);
14345   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14346     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14347   if (TREE_CODE_CLASS (code) != tcc_type
14348       && TREE_CODE_CLASS (code) != tcc_declaration
14349       && code != TREE_LIST
14350       && code != SSA_NAME
14351       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14352     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14353   switch (TREE_CODE_CLASS (code))
14354     {
14355     case tcc_constant:
14356       switch (code)
14357 	{
14358 	case STRING_CST:
14359 	  md5_process_bytes (TREE_STRING_POINTER (expr),
14360 			     TREE_STRING_LENGTH (expr), ctx);
14361 	  break;
14362 	case COMPLEX_CST:
14363 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14364 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14365 	  break;
14366 	case VECTOR_CST:
14367 	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14368 	  break;
14369 	default:
14370 	  break;
14371 	}
14372       break;
14373     case tcc_exceptional:
14374       switch (code)
14375 	{
14376 	case TREE_LIST:
14377 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14378 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14379 	  expr = TREE_CHAIN (expr);
14380 	  goto recursive_label;
14381 	  break;
14382 	case TREE_VEC:
14383 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14384 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14385 	  break;
14386 	default:
14387 	  break;
14388 	}
14389       break;
14390     case tcc_expression:
14391     case tcc_reference:
14392     case tcc_comparison:
14393     case tcc_unary:
14394     case tcc_binary:
14395     case tcc_statement:
14396     case tcc_vl_exp:
14397       len = TREE_OPERAND_LENGTH (expr);
14398       for (i = 0; i < len; ++i)
14399 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14400       break;
14401     case tcc_declaration:
14402       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14403       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14404       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14405 	{
14406 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14407 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14408 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14409 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14410 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14411 	}
14412       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14413 	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14414 
14415       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14416 	{
14417 	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14418 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14419 	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14420 	}
14421       break;
14422     case tcc_type:
14423       if (TREE_CODE (expr) == ENUMERAL_TYPE)
14424         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14425       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14426       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14427       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14428       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14429       if (INTEGRAL_TYPE_P (expr)
14430           || SCALAR_FLOAT_TYPE_P (expr))
14431 	{
14432 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14433 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14434 	}
14435       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14436       if (TREE_CODE (expr) == RECORD_TYPE
14437 	  || TREE_CODE (expr) == UNION_TYPE
14438 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
14439 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14440       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14441       break;
14442     default:
14443       break;
14444     }
14445 }
14446 
14447 /* Helper function for outputting the checksum of a tree T.  When
14448    debugging with gdb, you can "define mynext" to be "next" followed
14449    by "call debug_fold_checksum (op0)", then just trace down till the
14450    outputs differ.  */
14451 
14452 DEBUG_FUNCTION void
14453 debug_fold_checksum (const_tree t)
14454 {
14455   int i;
14456   unsigned char checksum[16];
14457   struct md5_ctx ctx;
14458   htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14459 
14460   md5_init_ctx (&ctx);
14461   fold_checksum_tree (t, &ctx, ht);
14462   md5_finish_ctx (&ctx, checksum);
14463   htab_empty (ht);
14464 
14465   for (i = 0; i < 16; i++)
14466     fprintf (stderr, "%d ", checksum[i]);
14467 
14468   fprintf (stderr, "\n");
14469 }
14470 
14471 #endif
14472 
14473 /* Fold a unary tree expression with code CODE of type TYPE with an
14474    operand OP0.  LOC is the location of the resulting expression.
14475    Return a folded expression if successful.  Otherwise, return a tree
14476    expression with code CODE of type TYPE with an operand OP0.  */
14477 
14478 tree
14479 fold_build1_stat_loc (location_t loc,
14480 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14481 {
14482   tree tem;
14483 #ifdef ENABLE_FOLD_CHECKING
14484   unsigned char checksum_before[16], checksum_after[16];
14485   struct md5_ctx ctx;
14486   htab_t ht;
14487 
14488   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14489   md5_init_ctx (&ctx);
14490   fold_checksum_tree (op0, &ctx, ht);
14491   md5_finish_ctx (&ctx, checksum_before);
14492   htab_empty (ht);
14493 #endif
14494 
14495   tem = fold_unary_loc (loc, code, type, op0);
14496   if (!tem)
14497     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14498 
14499 #ifdef ENABLE_FOLD_CHECKING
14500   md5_init_ctx (&ctx);
14501   fold_checksum_tree (op0, &ctx, ht);
14502   md5_finish_ctx (&ctx, checksum_after);
14503   htab_delete (ht);
14504 
14505   if (memcmp (checksum_before, checksum_after, 16))
14506     fold_check_failed (op0, tem);
14507 #endif
14508   return tem;
14509 }
14510 
14511 /* Fold a binary tree expression with code CODE of type TYPE with
14512    operands OP0 and OP1.  LOC is the location of the resulting
14513    expression.  Return a folded expression if successful.  Otherwise,
14514    return a tree expression with code CODE of type TYPE with operands
14515    OP0 and OP1.  */
14516 
14517 tree
14518 fold_build2_stat_loc (location_t loc,
14519 		      enum tree_code code, tree type, tree op0, tree op1
14520 		      MEM_STAT_DECL)
14521 {
14522   tree tem;
14523 #ifdef ENABLE_FOLD_CHECKING
14524   unsigned char checksum_before_op0[16],
14525                 checksum_before_op1[16],
14526 		checksum_after_op0[16],
14527 		checksum_after_op1[16];
14528   struct md5_ctx ctx;
14529   htab_t ht;
14530 
14531   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14532   md5_init_ctx (&ctx);
14533   fold_checksum_tree (op0, &ctx, ht);
14534   md5_finish_ctx (&ctx, checksum_before_op0);
14535   htab_empty (ht);
14536 
14537   md5_init_ctx (&ctx);
14538   fold_checksum_tree (op1, &ctx, ht);
14539   md5_finish_ctx (&ctx, checksum_before_op1);
14540   htab_empty (ht);
14541 #endif
14542 
14543   tem = fold_binary_loc (loc, code, type, op0, op1);
14544   if (!tem)
14545     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14546 
14547 #ifdef ENABLE_FOLD_CHECKING
14548   md5_init_ctx (&ctx);
14549   fold_checksum_tree (op0, &ctx, ht);
14550   md5_finish_ctx (&ctx, checksum_after_op0);
14551   htab_empty (ht);
14552 
14553   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14554     fold_check_failed (op0, tem);
14555 
14556   md5_init_ctx (&ctx);
14557   fold_checksum_tree (op1, &ctx, ht);
14558   md5_finish_ctx (&ctx, checksum_after_op1);
14559   htab_delete (ht);
14560 
14561   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14562     fold_check_failed (op1, tem);
14563 #endif
14564   return tem;
14565 }
14566 
14567 /* Fold a ternary tree expression with code CODE of type TYPE with
14568    operands OP0, OP1, and OP2.  Return a folded expression if
14569    successful.  Otherwise, return a tree expression with code CODE of
14570    type TYPE with operands OP0, OP1, and OP2.  */
14571 
14572 tree
14573 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14574 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14575 {
14576   tree tem;
14577 #ifdef ENABLE_FOLD_CHECKING
14578   unsigned char checksum_before_op0[16],
14579                 checksum_before_op1[16],
14580                 checksum_before_op2[16],
14581 		checksum_after_op0[16],
14582 		checksum_after_op1[16],
14583 		checksum_after_op2[16];
14584   struct md5_ctx ctx;
14585   htab_t ht;
14586 
14587   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14588   md5_init_ctx (&ctx);
14589   fold_checksum_tree (op0, &ctx, ht);
14590   md5_finish_ctx (&ctx, checksum_before_op0);
14591   htab_empty (ht);
14592 
14593   md5_init_ctx (&ctx);
14594   fold_checksum_tree (op1, &ctx, ht);
14595   md5_finish_ctx (&ctx, checksum_before_op1);
14596   htab_empty (ht);
14597 
14598   md5_init_ctx (&ctx);
14599   fold_checksum_tree (op2, &ctx, ht);
14600   md5_finish_ctx (&ctx, checksum_before_op2);
14601   htab_empty (ht);
14602 #endif
14603 
14604   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14605   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14606   if (!tem)
14607     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14608 
14609 #ifdef ENABLE_FOLD_CHECKING
14610   md5_init_ctx (&ctx);
14611   fold_checksum_tree (op0, &ctx, ht);
14612   md5_finish_ctx (&ctx, checksum_after_op0);
14613   htab_empty (ht);
14614 
14615   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14616     fold_check_failed (op0, tem);
14617 
14618   md5_init_ctx (&ctx);
14619   fold_checksum_tree (op1, &ctx, ht);
14620   md5_finish_ctx (&ctx, checksum_after_op1);
14621   htab_empty (ht);
14622 
14623   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14624     fold_check_failed (op1, tem);
14625 
14626   md5_init_ctx (&ctx);
14627   fold_checksum_tree (op2, &ctx, ht);
14628   md5_finish_ctx (&ctx, checksum_after_op2);
14629   htab_delete (ht);
14630 
14631   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14632     fold_check_failed (op2, tem);
14633 #endif
14634   return tem;
14635 }
14636 
14637 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14638    arguments in ARGARRAY, and a null static chain.
14639    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14640    of type TYPE from the given operands as constructed by build_call_array.  */
14641 
14642 tree
14643 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14644 			   int nargs, tree *argarray)
14645 {
14646   tree tem;
14647 #ifdef ENABLE_FOLD_CHECKING
14648   unsigned char checksum_before_fn[16],
14649                 checksum_before_arglist[16],
14650 		checksum_after_fn[16],
14651 		checksum_after_arglist[16];
14652   struct md5_ctx ctx;
14653   htab_t ht;
14654   int i;
14655 
14656   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14657   md5_init_ctx (&ctx);
14658   fold_checksum_tree (fn, &ctx, ht);
14659   md5_finish_ctx (&ctx, checksum_before_fn);
14660   htab_empty (ht);
14661 
14662   md5_init_ctx (&ctx);
14663   for (i = 0; i < nargs; i++)
14664     fold_checksum_tree (argarray[i], &ctx, ht);
14665   md5_finish_ctx (&ctx, checksum_before_arglist);
14666   htab_empty (ht);
14667 #endif
14668 
14669   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14670 
14671 #ifdef ENABLE_FOLD_CHECKING
14672   md5_init_ctx (&ctx);
14673   fold_checksum_tree (fn, &ctx, ht);
14674   md5_finish_ctx (&ctx, checksum_after_fn);
14675   htab_empty (ht);
14676 
14677   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14678     fold_check_failed (fn, tem);
14679 
14680   md5_init_ctx (&ctx);
14681   for (i = 0; i < nargs; i++)
14682     fold_checksum_tree (argarray[i], &ctx, ht);
14683   md5_finish_ctx (&ctx, checksum_after_arglist);
14684   htab_delete (ht);
14685 
14686   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14687     fold_check_failed (NULL_TREE, tem);
14688 #endif
14689   return tem;
14690 }
14691 
14692 /* Perform constant folding and related simplification of initializer
14693    expression EXPR.  These behave identically to "fold_buildN" but ignore
14694    potential run-time traps and exceptions that fold must preserve.  */
14695 
14696 #define START_FOLD_INIT \
14697   int saved_signaling_nans = flag_signaling_nans;\
14698   int saved_trapping_math = flag_trapping_math;\
14699   int saved_rounding_math = flag_rounding_math;\
14700   int saved_trapv = flag_trapv;\
14701   int saved_folding_initializer = folding_initializer;\
14702   flag_signaling_nans = 0;\
14703   flag_trapping_math = 0;\
14704   flag_rounding_math = 0;\
14705   flag_trapv = 0;\
14706   folding_initializer = 1;
14707 
14708 #define END_FOLD_INIT \
14709   flag_signaling_nans = saved_signaling_nans;\
14710   flag_trapping_math = saved_trapping_math;\
14711   flag_rounding_math = saved_rounding_math;\
14712   flag_trapv = saved_trapv;\
14713   folding_initializer = saved_folding_initializer;
14714 
14715 tree
14716 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14717 			     tree type, tree op)
14718 {
14719   tree result;
14720   START_FOLD_INIT;
14721 
14722   result = fold_build1_loc (loc, code, type, op);
14723 
14724   END_FOLD_INIT;
14725   return result;
14726 }
14727 
14728 tree
14729 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14730 			     tree type, tree op0, tree op1)
14731 {
14732   tree result;
14733   START_FOLD_INIT;
14734 
14735   result = fold_build2_loc (loc, code, type, op0, op1);
14736 
14737   END_FOLD_INIT;
14738   return result;
14739 }
14740 
14741 tree
14742 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14743 			     tree type, tree op0, tree op1, tree op2)
14744 {
14745   tree result;
14746   START_FOLD_INIT;
14747 
14748   result = fold_build3_loc (loc, code, type, op0, op1, op2);
14749 
14750   END_FOLD_INIT;
14751   return result;
14752 }
14753 
14754 tree
14755 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14756 				       int nargs, tree *argarray)
14757 {
14758   tree result;
14759   START_FOLD_INIT;
14760 
14761   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14762 
14763   END_FOLD_INIT;
14764   return result;
14765 }
14766 
14767 #undef START_FOLD_INIT
14768 #undef END_FOLD_INIT
14769 
14770 /* Determine if first argument is a multiple of second argument.  Return 0 if
14771    it is not, or we cannot easily determined it to be.
14772 
14773    An example of the sort of thing we care about (at this point; this routine
14774    could surely be made more general, and expanded to do what the *_DIV_EXPR's
14775    fold cases do now) is discovering that
14776 
14777      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14778 
14779    is a multiple of
14780 
14781      SAVE_EXPR (J * 8)
14782 
14783    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14784 
14785    This code also handles discovering that
14786 
14787      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14788 
14789    is a multiple of 8 so we don't have to worry about dealing with a
14790    possible remainder.
14791 
14792    Note that we *look* inside a SAVE_EXPR only to determine how it was
14793    calculated; it is not safe for fold to do much of anything else with the
14794    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14795    at run time.  For example, the latter example above *cannot* be implemented
14796    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14797    evaluation time of the original SAVE_EXPR is not necessarily the same at
14798    the time the new expression is evaluated.  The only optimization of this
14799    sort that would be valid is changing
14800 
14801      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14802 
14803    divided by 8 to
14804 
14805      SAVE_EXPR (I) * SAVE_EXPR (J)
14806 
14807    (where the same SAVE_EXPR (J) is used in the original and the
14808    transformed version).  */
14809 
14810 int
14811 multiple_of_p (tree type, const_tree top, const_tree bottom)
14812 {
14813   if (operand_equal_p (top, bottom, 0))
14814     return 1;
14815 
14816   if (TREE_CODE (type) != INTEGER_TYPE)
14817     return 0;
14818 
14819   switch (TREE_CODE (top))
14820     {
14821     case BIT_AND_EXPR:
14822       /* Bitwise and provides a power of two multiple.  If the mask is
14823 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14824       if (!integer_pow2p (bottom))
14825 	return 0;
14826       /* FALLTHRU */
14827 
14828     case MULT_EXPR:
14829       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14830 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14831 
14832     case PLUS_EXPR:
14833     case MINUS_EXPR:
14834       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14835 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14836 
14837     case LSHIFT_EXPR:
14838       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14839 	{
14840 	  tree op1, t1;
14841 
14842 	  op1 = TREE_OPERAND (top, 1);
14843 	  /* const_binop may not detect overflow correctly,
14844 	     so check for it explicitly here.  */
14845 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14846 	      > TREE_INT_CST_LOW (op1)
14847 	      && TREE_INT_CST_HIGH (op1) == 0
14848 	      && 0 != (t1 = fold_convert (type,
14849 					  const_binop (LSHIFT_EXPR,
14850 						       size_one_node,
14851 						       op1)))
14852 	      && !TREE_OVERFLOW (t1))
14853 	    return multiple_of_p (type, t1, bottom);
14854 	}
14855       return 0;
14856 
14857     case NOP_EXPR:
14858       /* Can't handle conversions from non-integral or wider integral type.  */
14859       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14860 	  || (TYPE_PRECISION (type)
14861 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14862 	return 0;
14863 
14864       /* .. fall through ...  */
14865 
14866     case SAVE_EXPR:
14867       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14868 
14869     case COND_EXPR:
14870       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14871 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14872 
14873     case INTEGER_CST:
14874       if (TREE_CODE (bottom) != INTEGER_CST
14875 	  || integer_zerop (bottom)
14876 	  || (TYPE_UNSIGNED (type)
14877 	      && (tree_int_cst_sgn (top) < 0
14878 		  || tree_int_cst_sgn (bottom) < 0)))
14879 	return 0;
14880       return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14881 					     top, bottom));
14882 
14883     default:
14884       return 0;
14885     }
14886 }
14887 
14888 /* Return true if CODE or TYPE is known to be non-negative. */
14889 
14890 static bool
14891 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14892 {
14893   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14894       && truth_value_p (code))
14895     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14896        have a signed:1 type (where the value is -1 and 0).  */
14897     return true;
14898   return false;
14899 }
14900 
14901 /* Return true if (CODE OP0) is known to be non-negative.  If the return
14902    value is based on the assumption that signed overflow is undefined,
14903    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14904    *STRICT_OVERFLOW_P.  */
14905 
14906 bool
14907 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14908 				bool *strict_overflow_p)
14909 {
14910   if (TYPE_UNSIGNED (type))
14911     return true;
14912 
14913   switch (code)
14914     {
14915     case ABS_EXPR:
14916       /* We can't return 1 if flag_wrapv is set because
14917 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14918       if (!INTEGRAL_TYPE_P (type))
14919 	return true;
14920       if (TYPE_OVERFLOW_UNDEFINED (type))
14921 	{
14922 	  *strict_overflow_p = true;
14923 	  return true;
14924 	}
14925       break;
14926 
14927     case NON_LVALUE_EXPR:
14928     case FLOAT_EXPR:
14929     case FIX_TRUNC_EXPR:
14930       return tree_expr_nonnegative_warnv_p (op0,
14931 					    strict_overflow_p);
14932 
14933     case NOP_EXPR:
14934       {
14935 	tree inner_type = TREE_TYPE (op0);
14936 	tree outer_type = type;
14937 
14938 	if (TREE_CODE (outer_type) == REAL_TYPE)
14939 	  {
14940 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14941 	      return tree_expr_nonnegative_warnv_p (op0,
14942 						    strict_overflow_p);
14943 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14944 	      {
14945 		if (TYPE_UNSIGNED (inner_type))
14946 		  return true;
14947 		return tree_expr_nonnegative_warnv_p (op0,
14948 						      strict_overflow_p);
14949 	      }
14950 	  }
14951 	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14952 	  {
14953 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14954 	      return tree_expr_nonnegative_warnv_p (op0,
14955 						    strict_overflow_p);
14956 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14957 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14958 		      && TYPE_UNSIGNED (inner_type);
14959 	  }
14960       }
14961       break;
14962 
14963     default:
14964       return tree_simple_nonnegative_warnv_p (code, type);
14965     }
14966 
14967   /* We don't know sign of `t', so be conservative and return false.  */
14968   return false;
14969 }
14970 
14971 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14972    value is based on the assumption that signed overflow is undefined,
14973    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14974    *STRICT_OVERFLOW_P.  */
14975 
14976 bool
14977 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14978 				      tree op1, bool *strict_overflow_p)
14979 {
14980   if (TYPE_UNSIGNED (type))
14981     return true;
14982 
14983   switch (code)
14984     {
14985     case POINTER_PLUS_EXPR:
14986     case PLUS_EXPR:
14987       if (FLOAT_TYPE_P (type))
14988 	return (tree_expr_nonnegative_warnv_p (op0,
14989 					       strict_overflow_p)
14990 		&& tree_expr_nonnegative_warnv_p (op1,
14991 						  strict_overflow_p));
14992 
14993       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14994 	 both unsigned and at least 2 bits shorter than the result.  */
14995       if (TREE_CODE (type) == INTEGER_TYPE
14996 	  && TREE_CODE (op0) == NOP_EXPR
14997 	  && TREE_CODE (op1) == NOP_EXPR)
14998 	{
14999 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15000 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15001 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15002 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15003 	    {
15004 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
15005 				       TYPE_PRECISION (inner2)) + 1;
15006 	      return prec < TYPE_PRECISION (type);
15007 	    }
15008 	}
15009       break;
15010 
15011     case MULT_EXPR:
15012       if (FLOAT_TYPE_P (type))
15013 	{
15014 	  /* x * x for floating point x is always non-negative.  */
15015 	  if (operand_equal_p (op0, op1, 0))
15016 	    return true;
15017 	  return (tree_expr_nonnegative_warnv_p (op0,
15018 						 strict_overflow_p)
15019 		  && tree_expr_nonnegative_warnv_p (op1,
15020 						    strict_overflow_p));
15021 	}
15022 
15023       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15024 	 both unsigned and their total bits is shorter than the result.  */
15025       if (TREE_CODE (type) == INTEGER_TYPE
15026 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15027 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15028 	{
15029 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15030 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
15031 	    : TREE_TYPE (op0);
15032 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15033 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
15034 	    : TREE_TYPE (op1);
15035 
15036 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
15037 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
15038 
15039 	  if (TREE_CODE (op0) == INTEGER_CST)
15040 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15041 
15042 	  if (TREE_CODE (op1) == INTEGER_CST)
15043 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15044 
15045 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15046 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15047 	    {
15048 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15049 		? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15050 		: TYPE_PRECISION (inner0);
15051 
15052 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15053 		? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15054 		: TYPE_PRECISION (inner1);
15055 
15056 	      return precision0 + precision1 < TYPE_PRECISION (type);
15057 	    }
15058 	}
15059       return false;
15060 
15061     case BIT_AND_EXPR:
15062     case MAX_EXPR:
15063       return (tree_expr_nonnegative_warnv_p (op0,
15064 					     strict_overflow_p)
15065 	      || tree_expr_nonnegative_warnv_p (op1,
15066 						strict_overflow_p));
15067 
15068     case BIT_IOR_EXPR:
15069     case BIT_XOR_EXPR:
15070     case MIN_EXPR:
15071     case RDIV_EXPR:
15072     case TRUNC_DIV_EXPR:
15073     case CEIL_DIV_EXPR:
15074     case FLOOR_DIV_EXPR:
15075     case ROUND_DIV_EXPR:
15076       return (tree_expr_nonnegative_warnv_p (op0,
15077 					     strict_overflow_p)
15078 	      && tree_expr_nonnegative_warnv_p (op1,
15079 						strict_overflow_p));
15080 
15081     case TRUNC_MOD_EXPR:
15082     case CEIL_MOD_EXPR:
15083     case FLOOR_MOD_EXPR:
15084     case ROUND_MOD_EXPR:
15085       return tree_expr_nonnegative_warnv_p (op0,
15086 					    strict_overflow_p);
15087     default:
15088       return tree_simple_nonnegative_warnv_p (code, type);
15089     }
15090 
15091   /* We don't know sign of `t', so be conservative and return false.  */
15092   return false;
15093 }
15094 
15095 /* Return true if T is known to be non-negative.  If the return
15096    value is based on the assumption that signed overflow is undefined,
15097    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15098    *STRICT_OVERFLOW_P.  */
15099 
15100 bool
15101 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15102 {
15103   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15104     return true;
15105 
15106   switch (TREE_CODE (t))
15107     {
15108     case INTEGER_CST:
15109       return tree_int_cst_sgn (t) >= 0;
15110 
15111     case REAL_CST:
15112       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15113 
15114     case FIXED_CST:
15115       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15116 
15117     case COND_EXPR:
15118       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15119 					     strict_overflow_p)
15120 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15121 						strict_overflow_p));
15122     default:
15123       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15124 						   TREE_TYPE (t));
15125     }
15126   /* We don't know sign of `t', so be conservative and return false.  */
15127   return false;
15128 }
15129 
15130 /* Return true if T is known to be non-negative.  If the return
15131    value is based on the assumption that signed overflow is undefined,
15132    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15133    *STRICT_OVERFLOW_P.  */
15134 
15135 bool
15136 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15137 			       tree arg0, tree arg1, bool *strict_overflow_p)
15138 {
15139   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15140     switch (DECL_FUNCTION_CODE (fndecl))
15141       {
15142 	CASE_FLT_FN (BUILT_IN_ACOS):
15143 	CASE_FLT_FN (BUILT_IN_ACOSH):
15144 	CASE_FLT_FN (BUILT_IN_CABS):
15145 	CASE_FLT_FN (BUILT_IN_COSH):
15146 	CASE_FLT_FN (BUILT_IN_ERFC):
15147 	CASE_FLT_FN (BUILT_IN_EXP):
15148 	CASE_FLT_FN (BUILT_IN_EXP10):
15149 	CASE_FLT_FN (BUILT_IN_EXP2):
15150 	CASE_FLT_FN (BUILT_IN_FABS):
15151 	CASE_FLT_FN (BUILT_IN_FDIM):
15152 	CASE_FLT_FN (BUILT_IN_HYPOT):
15153 	CASE_FLT_FN (BUILT_IN_POW10):
15154 	CASE_INT_FN (BUILT_IN_FFS):
15155 	CASE_INT_FN (BUILT_IN_PARITY):
15156 	CASE_INT_FN (BUILT_IN_POPCOUNT):
15157       case BUILT_IN_BSWAP32:
15158       case BUILT_IN_BSWAP64:
15159 	/* Always true.  */
15160 	return true;
15161 
15162 	CASE_FLT_FN (BUILT_IN_SQRT):
15163 	/* sqrt(-0.0) is -0.0.  */
15164 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15165 	  return true;
15166 	return tree_expr_nonnegative_warnv_p (arg0,
15167 					      strict_overflow_p);
15168 
15169 	CASE_FLT_FN (BUILT_IN_ASINH):
15170 	CASE_FLT_FN (BUILT_IN_ATAN):
15171 	CASE_FLT_FN (BUILT_IN_ATANH):
15172 	CASE_FLT_FN (BUILT_IN_CBRT):
15173 	CASE_FLT_FN (BUILT_IN_CEIL):
15174 	CASE_FLT_FN (BUILT_IN_ERF):
15175 	CASE_FLT_FN (BUILT_IN_EXPM1):
15176 	CASE_FLT_FN (BUILT_IN_FLOOR):
15177 	CASE_FLT_FN (BUILT_IN_FMOD):
15178 	CASE_FLT_FN (BUILT_IN_FREXP):
15179 	CASE_FLT_FN (BUILT_IN_ICEIL):
15180 	CASE_FLT_FN (BUILT_IN_IFLOOR):
15181 	CASE_FLT_FN (BUILT_IN_IRINT):
15182 	CASE_FLT_FN (BUILT_IN_IROUND):
15183 	CASE_FLT_FN (BUILT_IN_LCEIL):
15184 	CASE_FLT_FN (BUILT_IN_LDEXP):
15185 	CASE_FLT_FN (BUILT_IN_LFLOOR):
15186 	CASE_FLT_FN (BUILT_IN_LLCEIL):
15187 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15188 	CASE_FLT_FN (BUILT_IN_LLRINT):
15189 	CASE_FLT_FN (BUILT_IN_LLROUND):
15190 	CASE_FLT_FN (BUILT_IN_LRINT):
15191 	CASE_FLT_FN (BUILT_IN_LROUND):
15192 	CASE_FLT_FN (BUILT_IN_MODF):
15193 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15194 	CASE_FLT_FN (BUILT_IN_RINT):
15195 	CASE_FLT_FN (BUILT_IN_ROUND):
15196 	CASE_FLT_FN (BUILT_IN_SCALB):
15197 	CASE_FLT_FN (BUILT_IN_SCALBLN):
15198 	CASE_FLT_FN (BUILT_IN_SCALBN):
15199 	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15200 	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15201 	CASE_FLT_FN (BUILT_IN_SINH):
15202 	CASE_FLT_FN (BUILT_IN_TANH):
15203 	CASE_FLT_FN (BUILT_IN_TRUNC):
15204 	/* True if the 1st argument is nonnegative.  */
15205 	return tree_expr_nonnegative_warnv_p (arg0,
15206 					      strict_overflow_p);
15207 
15208 	CASE_FLT_FN (BUILT_IN_FMAX):
15209 	/* True if the 1st OR 2nd arguments are nonnegative.  */
15210 	return (tree_expr_nonnegative_warnv_p (arg0,
15211 					       strict_overflow_p)
15212 		|| (tree_expr_nonnegative_warnv_p (arg1,
15213 						   strict_overflow_p)));
15214 
15215 	CASE_FLT_FN (BUILT_IN_FMIN):
15216 	/* True if the 1st AND 2nd arguments are nonnegative.  */
15217 	return (tree_expr_nonnegative_warnv_p (arg0,
15218 					       strict_overflow_p)
15219 		&& (tree_expr_nonnegative_warnv_p (arg1,
15220 						   strict_overflow_p)));
15221 
15222 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15223 	/* True if the 2nd argument is nonnegative.  */
15224 	return tree_expr_nonnegative_warnv_p (arg1,
15225 					      strict_overflow_p);
15226 
15227 	CASE_FLT_FN (BUILT_IN_POWI):
15228 	/* True if the 1st argument is nonnegative or the second
15229 	   argument is an even integer.  */
15230 	if (TREE_CODE (arg1) == INTEGER_CST
15231 	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15232 	  return true;
15233 	return tree_expr_nonnegative_warnv_p (arg0,
15234 					      strict_overflow_p);
15235 
15236 	CASE_FLT_FN (BUILT_IN_POW):
15237 	/* True if the 1st argument is nonnegative or the second
15238 	   argument is an even integer valued real.  */
15239 	if (TREE_CODE (arg1) == REAL_CST)
15240 	  {
15241 	    REAL_VALUE_TYPE c;
15242 	    HOST_WIDE_INT n;
15243 
15244 	    c = TREE_REAL_CST (arg1);
15245 	    n = real_to_integer (&c);
15246 	    if ((n & 1) == 0)
15247 	      {
15248 		REAL_VALUE_TYPE cint;
15249 		real_from_integer (&cint, VOIDmode, n,
15250 				   n < 0 ? -1 : 0, 0);
15251 		if (real_identical (&c, &cint))
15252 		  return true;
15253 	      }
15254 	  }
15255 	return tree_expr_nonnegative_warnv_p (arg0,
15256 					      strict_overflow_p);
15257 
15258       default:
15259 	break;
15260       }
15261   return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15262 					  type);
15263 }
15264 
15265 /* Return true if T is known to be non-negative.  If the return
15266    value is based on the assumption that signed overflow is undefined,
15267    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15268    *STRICT_OVERFLOW_P.  */
15269 
15270 bool
15271 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15272 {
15273   enum tree_code code = TREE_CODE (t);
15274   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15275     return true;
15276 
15277   switch (code)
15278     {
15279     case TARGET_EXPR:
15280       {
15281 	tree temp = TARGET_EXPR_SLOT (t);
15282 	t = TARGET_EXPR_INITIAL (t);
15283 
15284 	/* If the initializer is non-void, then it's a normal expression
15285 	   that will be assigned to the slot.  */
15286 	if (!VOID_TYPE_P (t))
15287 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15288 
15289 	/* Otherwise, the initializer sets the slot in some way.  One common
15290 	   way is an assignment statement at the end of the initializer.  */
15291 	while (1)
15292 	  {
15293 	    if (TREE_CODE (t) == BIND_EXPR)
15294 	      t = expr_last (BIND_EXPR_BODY (t));
15295 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15296 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15297 	      t = expr_last (TREE_OPERAND (t, 0));
15298 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15299 	      t = expr_last (t);
15300 	    else
15301 	      break;
15302 	  }
15303 	if (TREE_CODE (t) == MODIFY_EXPR
15304 	    && TREE_OPERAND (t, 0) == temp)
15305 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15306 						strict_overflow_p);
15307 
15308 	return false;
15309       }
15310 
15311     case CALL_EXPR:
15312       {
15313 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15314 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15315 
15316 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15317 					      get_callee_fndecl (t),
15318 					      arg0,
15319 					      arg1,
15320 					      strict_overflow_p);
15321       }
15322     case COMPOUND_EXPR:
15323     case MODIFY_EXPR:
15324       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15325 					    strict_overflow_p);
15326     case BIND_EXPR:
15327       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15328 					    strict_overflow_p);
15329     case SAVE_EXPR:
15330       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15331 					    strict_overflow_p);
15332 
15333     default:
15334       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15335 						   TREE_TYPE (t));
15336     }
15337 
15338   /* We don't know sign of `t', so be conservative and return false.  */
15339   return false;
15340 }
15341 
15342 /* Return true if T is known to be non-negative.  If the return
15343    value is based on the assumption that signed overflow is undefined,
15344    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15345    *STRICT_OVERFLOW_P.  */
15346 
15347 bool
15348 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15349 {
15350   enum tree_code code;
15351   if (t == error_mark_node)
15352     return false;
15353 
15354   code = TREE_CODE (t);
15355   switch (TREE_CODE_CLASS (code))
15356     {
15357     case tcc_binary:
15358     case tcc_comparison:
15359       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15360 					      TREE_TYPE (t),
15361 					      TREE_OPERAND (t, 0),
15362 					      TREE_OPERAND (t, 1),
15363 					      strict_overflow_p);
15364 
15365     case tcc_unary:
15366       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15367 					     TREE_TYPE (t),
15368 					     TREE_OPERAND (t, 0),
15369 					     strict_overflow_p);
15370 
15371     case tcc_constant:
15372     case tcc_declaration:
15373     case tcc_reference:
15374       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15375 
15376     default:
15377       break;
15378     }
15379 
15380   switch (code)
15381     {
15382     case TRUTH_AND_EXPR:
15383     case TRUTH_OR_EXPR:
15384     case TRUTH_XOR_EXPR:
15385       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15386 					      TREE_TYPE (t),
15387 					      TREE_OPERAND (t, 0),
15388 					      TREE_OPERAND (t, 1),
15389 					      strict_overflow_p);
15390     case TRUTH_NOT_EXPR:
15391       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15392 					     TREE_TYPE (t),
15393 					     TREE_OPERAND (t, 0),
15394 					     strict_overflow_p);
15395 
15396     case COND_EXPR:
15397     case CONSTRUCTOR:
15398     case OBJ_TYPE_REF:
15399     case ASSERT_EXPR:
15400     case ADDR_EXPR:
15401     case WITH_SIZE_EXPR:
15402     case SSA_NAME:
15403       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15404 
15405     default:
15406       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15407     }
15408 }
15409 
15410 /* Return true if `t' is known to be non-negative.  Handle warnings
15411    about undefined signed overflow.  */
15412 
15413 bool
15414 tree_expr_nonnegative_p (tree t)
15415 {
15416   bool ret, strict_overflow_p;
15417 
15418   strict_overflow_p = false;
15419   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15420   if (strict_overflow_p)
15421     fold_overflow_warning (("assuming signed overflow does not occur when "
15422 			    "determining that expression is always "
15423 			    "non-negative"),
15424 			   WARN_STRICT_OVERFLOW_MISC);
15425   return ret;
15426 }
15427 
15428 
15429 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15430    For floating point we further ensure that T is not denormal.
15431    Similar logic is present in nonzero_address in rtlanal.h.
15432 
15433    If the return value is based on the assumption that signed overflow
15434    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15435    change *STRICT_OVERFLOW_P.  */
15436 
15437 bool
15438 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15439 				 bool *strict_overflow_p)
15440 {
15441   switch (code)
15442     {
15443     case ABS_EXPR:
15444       return tree_expr_nonzero_warnv_p (op0,
15445 					strict_overflow_p);
15446 
15447     case NOP_EXPR:
15448       {
15449 	tree inner_type = TREE_TYPE (op0);
15450 	tree outer_type = type;
15451 
15452 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15453 		&& tree_expr_nonzero_warnv_p (op0,
15454 					      strict_overflow_p));
15455       }
15456       break;
15457 
15458     case NON_LVALUE_EXPR:
15459       return tree_expr_nonzero_warnv_p (op0,
15460 					strict_overflow_p);
15461 
15462     default:
15463       break;
15464   }
15465 
15466   return false;
15467 }
15468 
15469 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15470    For floating point we further ensure that T is not denormal.
15471    Similar logic is present in nonzero_address in rtlanal.h.
15472 
15473    If the return value is based on the assumption that signed overflow
15474    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15475    change *STRICT_OVERFLOW_P.  */
15476 
15477 bool
15478 tree_binary_nonzero_warnv_p (enum tree_code code,
15479 			     tree type,
15480 			     tree op0,
15481 			     tree op1, bool *strict_overflow_p)
15482 {
15483   bool sub_strict_overflow_p;
15484   switch (code)
15485     {
15486     case POINTER_PLUS_EXPR:
15487     case PLUS_EXPR:
15488       if (TYPE_OVERFLOW_UNDEFINED (type))
15489 	{
15490 	  /* With the presence of negative values it is hard
15491 	     to say something.  */
15492 	  sub_strict_overflow_p = false;
15493 	  if (!tree_expr_nonnegative_warnv_p (op0,
15494 					      &sub_strict_overflow_p)
15495 	      || !tree_expr_nonnegative_warnv_p (op1,
15496 						 &sub_strict_overflow_p))
15497 	    return false;
15498 	  /* One of operands must be positive and the other non-negative.  */
15499 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15500 	     overflows, on a twos-complement machine the sum of two
15501 	     nonnegative numbers can never be zero.  */
15502 	  return (tree_expr_nonzero_warnv_p (op0,
15503 					     strict_overflow_p)
15504 		  || tree_expr_nonzero_warnv_p (op1,
15505 						strict_overflow_p));
15506 	}
15507       break;
15508 
15509     case MULT_EXPR:
15510       if (TYPE_OVERFLOW_UNDEFINED (type))
15511 	{
15512 	  if (tree_expr_nonzero_warnv_p (op0,
15513 					 strict_overflow_p)
15514 	      && tree_expr_nonzero_warnv_p (op1,
15515 					    strict_overflow_p))
15516 	    {
15517 	      *strict_overflow_p = true;
15518 	      return true;
15519 	    }
15520 	}
15521       break;
15522 
15523     case MIN_EXPR:
15524       sub_strict_overflow_p = false;
15525       if (tree_expr_nonzero_warnv_p (op0,
15526 				     &sub_strict_overflow_p)
15527 	  && tree_expr_nonzero_warnv_p (op1,
15528 					&sub_strict_overflow_p))
15529 	{
15530 	  if (sub_strict_overflow_p)
15531 	    *strict_overflow_p = true;
15532 	}
15533       break;
15534 
15535     case MAX_EXPR:
15536       sub_strict_overflow_p = false;
15537       if (tree_expr_nonzero_warnv_p (op0,
15538 				     &sub_strict_overflow_p))
15539 	{
15540 	  if (sub_strict_overflow_p)
15541 	    *strict_overflow_p = true;
15542 
15543 	  /* When both operands are nonzero, then MAX must be too.  */
15544 	  if (tree_expr_nonzero_warnv_p (op1,
15545 					 strict_overflow_p))
15546 	    return true;
15547 
15548 	  /* MAX where operand 0 is positive is positive.  */
15549 	  return tree_expr_nonnegative_warnv_p (op0,
15550 					       strict_overflow_p);
15551 	}
15552       /* MAX where operand 1 is positive is positive.  */
15553       else if (tree_expr_nonzero_warnv_p (op1,
15554 					  &sub_strict_overflow_p)
15555 	       && tree_expr_nonnegative_warnv_p (op1,
15556 						 &sub_strict_overflow_p))
15557 	{
15558 	  if (sub_strict_overflow_p)
15559 	    *strict_overflow_p = true;
15560 	  return true;
15561 	}
15562       break;
15563 
15564     case BIT_IOR_EXPR:
15565       return (tree_expr_nonzero_warnv_p (op1,
15566 					 strict_overflow_p)
15567 	      || tree_expr_nonzero_warnv_p (op0,
15568 					    strict_overflow_p));
15569 
15570     default:
15571       break;
15572   }
15573 
15574   return false;
15575 }
15576 
15577 /* Return true when T is an address and is known to be nonzero.
15578    For floating point we further ensure that T is not denormal.
15579    Similar logic is present in nonzero_address in rtlanal.h.
15580 
15581    If the return value is based on the assumption that signed overflow
15582    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15583    change *STRICT_OVERFLOW_P.  */
15584 
15585 bool
15586 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15587 {
15588   bool sub_strict_overflow_p;
15589   switch (TREE_CODE (t))
15590     {
15591     case INTEGER_CST:
15592       return !integer_zerop (t);
15593 
15594     case ADDR_EXPR:
15595       {
15596 	tree base = TREE_OPERAND (t, 0);
15597 	if (!DECL_P (base))
15598 	  base = get_base_address (base);
15599 
15600 	if (!base)
15601 	  return false;
15602 
15603 	/* Weak declarations may link to NULL.  Other things may also be NULL
15604 	   so protect with -fdelete-null-pointer-checks; but not variables
15605 	   allocated on the stack.  */
15606 	if (DECL_P (base)
15607 	    && (flag_delete_null_pointer_checks
15608 		|| (DECL_CONTEXT (base)
15609 		    && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15610 		    && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15611 	  return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15612 
15613 	/* Constants are never weak.  */
15614 	if (CONSTANT_CLASS_P (base))
15615 	  return true;
15616 
15617 	return false;
15618       }
15619 
15620     case COND_EXPR:
15621       sub_strict_overflow_p = false;
15622       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15623 				     &sub_strict_overflow_p)
15624 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15625 					&sub_strict_overflow_p))
15626 	{
15627 	  if (sub_strict_overflow_p)
15628 	    *strict_overflow_p = true;
15629 	  return true;
15630 	}
15631       break;
15632 
15633     default:
15634       break;
15635     }
15636   return false;
15637 }
15638 
15639 /* Return true when T is an address and is known to be nonzero.
15640    For floating point we further ensure that T is not denormal.
15641    Similar logic is present in nonzero_address in rtlanal.h.
15642 
15643    If the return value is based on the assumption that signed overflow
15644    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15645    change *STRICT_OVERFLOW_P.  */
15646 
15647 bool
15648 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15649 {
15650   tree type = TREE_TYPE (t);
15651   enum tree_code code;
15652 
15653   /* Doing something useful for floating point would need more work.  */
15654   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15655     return false;
15656 
15657   code = TREE_CODE (t);
15658   switch (TREE_CODE_CLASS (code))
15659     {
15660     case tcc_unary:
15661       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15662 					      strict_overflow_p);
15663     case tcc_binary:
15664     case tcc_comparison:
15665       return tree_binary_nonzero_warnv_p (code, type,
15666 					       TREE_OPERAND (t, 0),
15667 					       TREE_OPERAND (t, 1),
15668 					       strict_overflow_p);
15669     case tcc_constant:
15670     case tcc_declaration:
15671     case tcc_reference:
15672       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15673 
15674     default:
15675       break;
15676     }
15677 
15678   switch (code)
15679     {
15680     case TRUTH_NOT_EXPR:
15681       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15682 					      strict_overflow_p);
15683 
15684     case TRUTH_AND_EXPR:
15685     case TRUTH_OR_EXPR:
15686     case TRUTH_XOR_EXPR:
15687       return tree_binary_nonzero_warnv_p (code, type,
15688 					       TREE_OPERAND (t, 0),
15689 					       TREE_OPERAND (t, 1),
15690 					       strict_overflow_p);
15691 
15692     case COND_EXPR:
15693     case CONSTRUCTOR:
15694     case OBJ_TYPE_REF:
15695     case ASSERT_EXPR:
15696     case ADDR_EXPR:
15697     case WITH_SIZE_EXPR:
15698     case SSA_NAME:
15699       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15700 
15701     case COMPOUND_EXPR:
15702     case MODIFY_EXPR:
15703     case BIND_EXPR:
15704       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15705 					strict_overflow_p);
15706 
15707     case SAVE_EXPR:
15708       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15709 					strict_overflow_p);
15710 
15711     case CALL_EXPR:
15712       return alloca_call_p (t);
15713 
15714     default:
15715       break;
15716     }
15717   return false;
15718 }
15719 
15720 /* Return true when T is an address and is known to be nonzero.
15721    Handle warnings about undefined signed overflow.  */
15722 
15723 bool
15724 tree_expr_nonzero_p (tree t)
15725 {
15726   bool ret, strict_overflow_p;
15727 
15728   strict_overflow_p = false;
15729   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15730   if (strict_overflow_p)
15731     fold_overflow_warning (("assuming signed overflow does not occur when "
15732 			    "determining that expression is always "
15733 			    "non-zero"),
15734 			   WARN_STRICT_OVERFLOW_MISC);
15735   return ret;
15736 }
15737 
15738 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15739    attempt to fold the expression to a constant without modifying TYPE,
15740    OP0 or OP1.
15741 
15742    If the expression could be simplified to a constant, then return
15743    the constant.  If the expression would not be simplified to a
15744    constant, then return NULL_TREE.  */
15745 
15746 tree
15747 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15748 {
15749   tree tem = fold_binary (code, type, op0, op1);
15750   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15751 }
15752 
15753 /* Given the components of a unary expression CODE, TYPE and OP0,
15754    attempt to fold the expression to a constant without modifying
15755    TYPE or OP0.
15756 
15757    If the expression could be simplified to a constant, then return
15758    the constant.  If the expression would not be simplified to a
15759    constant, then return NULL_TREE.  */
15760 
15761 tree
15762 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15763 {
15764   tree tem = fold_unary (code, type, op0);
15765   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15766 }
15767 
15768 /* If EXP represents referencing an element in a constant string
15769    (either via pointer arithmetic or array indexing), return the
15770    tree representing the value accessed, otherwise return NULL.  */
15771 
15772 tree
15773 fold_read_from_constant_string (tree exp)
15774 {
15775   if ((TREE_CODE (exp) == INDIRECT_REF
15776        || TREE_CODE (exp) == ARRAY_REF)
15777       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15778     {
15779       tree exp1 = TREE_OPERAND (exp, 0);
15780       tree index;
15781       tree string;
15782       location_t loc = EXPR_LOCATION (exp);
15783 
15784       if (TREE_CODE (exp) == INDIRECT_REF)
15785 	string = string_constant (exp1, &index);
15786       else
15787 	{
15788 	  tree low_bound = array_ref_low_bound (exp);
15789 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15790 
15791 	  /* Optimize the special-case of a zero lower bound.
15792 
15793 	     We convert the low_bound to sizetype to avoid some problems
15794 	     with constant folding.  (E.g. suppose the lower bound is 1,
15795 	     and its mode is QI.  Without the conversion,l (ARRAY
15796 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15797 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15798 	  if (! integer_zerop (low_bound))
15799 	    index = size_diffop_loc (loc, index,
15800 				 fold_convert_loc (loc, sizetype, low_bound));
15801 
15802 	  string = exp1;
15803 	}
15804 
15805       if (string
15806 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15807 	  && TREE_CODE (string) == STRING_CST
15808 	  && TREE_CODE (index) == INTEGER_CST
15809 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15810 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15811 	      == MODE_INT)
15812 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15813 	return build_int_cst_type (TREE_TYPE (exp),
15814 				   (TREE_STRING_POINTER (string)
15815 				    [TREE_INT_CST_LOW (index)]));
15816     }
15817   return NULL;
15818 }
15819 
15820 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15821    an integer constant, real, or fixed-point constant.
15822 
15823    TYPE is the type of the result.  */
15824 
15825 static tree
15826 fold_negate_const (tree arg0, tree type)
15827 {
15828   tree t = NULL_TREE;
15829 
15830   switch (TREE_CODE (arg0))
15831     {
15832     case INTEGER_CST:
15833       {
15834 	double_int val = tree_to_double_int (arg0);
15835 	int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15836 
15837 	t = force_fit_type_double (type, val, 1,
15838 				   (overflow | TREE_OVERFLOW (arg0))
15839 				   && !TYPE_UNSIGNED (type));
15840 	break;
15841       }
15842 
15843     case REAL_CST:
15844       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15845       break;
15846 
15847     case FIXED_CST:
15848       {
15849         FIXED_VALUE_TYPE f;
15850         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15851 					    &(TREE_FIXED_CST (arg0)), NULL,
15852 					    TYPE_SATURATING (type));
15853 	t = build_fixed (type, f);
15854 	/* Propagate overflow flags.  */
15855 	if (overflow_p | TREE_OVERFLOW (arg0))
15856 	  TREE_OVERFLOW (t) = 1;
15857 	break;
15858       }
15859 
15860     default:
15861       gcc_unreachable ();
15862     }
15863 
15864   return t;
15865 }
15866 
15867 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15868    an integer constant or real constant.
15869 
15870    TYPE is the type of the result.  */
15871 
15872 tree
15873 fold_abs_const (tree arg0, tree type)
15874 {
15875   tree t = NULL_TREE;
15876 
15877   switch (TREE_CODE (arg0))
15878     {
15879     case INTEGER_CST:
15880       {
15881 	double_int val = tree_to_double_int (arg0);
15882 
15883         /* If the value is unsigned or non-negative, then the absolute value
15884 	   is the same as the ordinary value.  */
15885 	if (TYPE_UNSIGNED (type)
15886 	    || !double_int_negative_p (val))
15887 	  t = arg0;
15888 
15889 	/* If the value is negative, then the absolute value is
15890 	   its negation.  */
15891 	else
15892 	  {
15893 	    int overflow;
15894 
15895 	    overflow = neg_double (val.low, val.high, &val.low, &val.high);
15896 	    t = force_fit_type_double (type, val, -1,
15897 				       overflow | TREE_OVERFLOW (arg0));
15898 	  }
15899       }
15900       break;
15901 
15902     case REAL_CST:
15903       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15904 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15905       else
15906 	t =  arg0;
15907       break;
15908 
15909     default:
15910       gcc_unreachable ();
15911     }
15912 
15913   return t;
15914 }
15915 
15916 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15917    constant.  TYPE is the type of the result.  */
15918 
15919 static tree
15920 fold_not_const (const_tree arg0, tree type)
15921 {
15922   double_int val;
15923 
15924   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15925 
15926   val = double_int_not (tree_to_double_int (arg0));
15927   return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15928 }
15929 
15930 /* Given CODE, a relational operator, the target type, TYPE and two
15931    constant operands OP0 and OP1, return the result of the
15932    relational operation.  If the result is not a compile time
15933    constant, then return NULL_TREE.  */
15934 
15935 static tree
15936 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15937 {
15938   int result, invert;
15939 
15940   /* From here on, the only cases we handle are when the result is
15941      known to be a constant.  */
15942 
15943   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15944     {
15945       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15946       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15947 
15948       /* Handle the cases where either operand is a NaN.  */
15949       if (real_isnan (c0) || real_isnan (c1))
15950 	{
15951 	  switch (code)
15952 	    {
15953 	    case EQ_EXPR:
15954 	    case ORDERED_EXPR:
15955 	      result = 0;
15956 	      break;
15957 
15958 	    case NE_EXPR:
15959 	    case UNORDERED_EXPR:
15960 	    case UNLT_EXPR:
15961 	    case UNLE_EXPR:
15962 	    case UNGT_EXPR:
15963 	    case UNGE_EXPR:
15964 	    case UNEQ_EXPR:
15965               result = 1;
15966 	      break;
15967 
15968 	    case LT_EXPR:
15969 	    case LE_EXPR:
15970 	    case GT_EXPR:
15971 	    case GE_EXPR:
15972 	    case LTGT_EXPR:
15973 	      if (flag_trapping_math)
15974 		return NULL_TREE;
15975 	      result = 0;
15976 	      break;
15977 
15978 	    default:
15979 	      gcc_unreachable ();
15980 	    }
15981 
15982 	  return constant_boolean_node (result, type);
15983 	}
15984 
15985       return constant_boolean_node (real_compare (code, c0, c1), type);
15986     }
15987 
15988   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15989     {
15990       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15991       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15992       return constant_boolean_node (fixed_compare (code, c0, c1), type);
15993     }
15994 
15995   /* Handle equality/inequality of complex constants.  */
15996   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15997     {
15998       tree rcond = fold_relational_const (code, type,
15999 					  TREE_REALPART (op0),
16000 					  TREE_REALPART (op1));
16001       tree icond = fold_relational_const (code, type,
16002 					  TREE_IMAGPART (op0),
16003 					  TREE_IMAGPART (op1));
16004       if (code == EQ_EXPR)
16005 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16006       else if (code == NE_EXPR)
16007 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16008       else
16009 	return NULL_TREE;
16010     }
16011 
16012   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16013 
16014      To compute GT, swap the arguments and do LT.
16015      To compute GE, do LT and invert the result.
16016      To compute LE, swap the arguments, do LT and invert the result.
16017      To compute NE, do EQ and invert the result.
16018 
16019      Therefore, the code below must handle only EQ and LT.  */
16020 
16021   if (code == LE_EXPR || code == GT_EXPR)
16022     {
16023       tree tem = op0;
16024       op0 = op1;
16025       op1 = tem;
16026       code = swap_tree_comparison (code);
16027     }
16028 
16029   /* Note that it is safe to invert for real values here because we
16030      have already handled the one case that it matters.  */
16031 
16032   invert = 0;
16033   if (code == NE_EXPR || code == GE_EXPR)
16034     {
16035       invert = 1;
16036       code = invert_tree_comparison (code, false);
16037     }
16038 
16039   /* Compute a result for LT or EQ if args permit;
16040      Otherwise return T.  */
16041   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16042     {
16043       if (code == EQ_EXPR)
16044 	result = tree_int_cst_equal (op0, op1);
16045       else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16046 	result = INT_CST_LT_UNSIGNED (op0, op1);
16047       else
16048 	result = INT_CST_LT (op0, op1);
16049     }
16050   else
16051     return NULL_TREE;
16052 
16053   if (invert)
16054     result ^= 1;
16055   return constant_boolean_node (result, type);
16056 }
16057 
16058 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16059    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16060    itself.  */
16061 
16062 tree
16063 fold_build_cleanup_point_expr (tree type, tree expr)
16064 {
16065   /* If the expression does not have side effects then we don't have to wrap
16066      it with a cleanup point expression.  */
16067   if (!TREE_SIDE_EFFECTS (expr))
16068     return expr;
16069 
16070   /* If the expression is a return, check to see if the expression inside the
16071      return has no side effects or the right hand side of the modify expression
16072      inside the return. If either don't have side effects set we don't need to
16073      wrap the expression in a cleanup point expression.  Note we don't check the
16074      left hand side of the modify because it should always be a return decl.  */
16075   if (TREE_CODE (expr) == RETURN_EXPR)
16076     {
16077       tree op = TREE_OPERAND (expr, 0);
16078       if (!op || !TREE_SIDE_EFFECTS (op))
16079         return expr;
16080       op = TREE_OPERAND (op, 1);
16081       if (!TREE_SIDE_EFFECTS (op))
16082         return expr;
16083     }
16084 
16085   return build1 (CLEANUP_POINT_EXPR, type, expr);
16086 }
16087 
16088 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16089    of an indirection through OP0, or NULL_TREE if no simplification is
16090    possible.  */
16091 
16092 tree
16093 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16094 {
16095   tree sub = op0;
16096   tree subtype;
16097 
16098   STRIP_NOPS (sub);
16099   subtype = TREE_TYPE (sub);
16100   if (!POINTER_TYPE_P (subtype))
16101     return NULL_TREE;
16102 
16103   if (TREE_CODE (sub) == ADDR_EXPR)
16104     {
16105       tree op = TREE_OPERAND (sub, 0);
16106       tree optype = TREE_TYPE (op);
16107       /* *&CONST_DECL -> to the value of the const decl.  */
16108       if (TREE_CODE (op) == CONST_DECL)
16109 	return DECL_INITIAL (op);
16110       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16111       if (type == optype)
16112 	{
16113 	  tree fop = fold_read_from_constant_string (op);
16114 	  if (fop)
16115 	    return fop;
16116 	  else
16117 	    return op;
16118 	}
16119       /* *(foo *)&fooarray => fooarray[0] */
16120       else if (TREE_CODE (optype) == ARRAY_TYPE
16121 	       && type == TREE_TYPE (optype)
16122 	       && (!in_gimple_form
16123 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16124 	{
16125 	  tree type_domain = TYPE_DOMAIN (optype);
16126 	  tree min_val = size_zero_node;
16127 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16128 	    min_val = TYPE_MIN_VALUE (type_domain);
16129 	  if (in_gimple_form
16130 	      && TREE_CODE (min_val) != INTEGER_CST)
16131 	    return NULL_TREE;
16132 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
16133 			     NULL_TREE, NULL_TREE);
16134 	}
16135       /* *(foo *)&complexfoo => __real__ complexfoo */
16136       else if (TREE_CODE (optype) == COMPLEX_TYPE
16137 	       && type == TREE_TYPE (optype))
16138 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16139       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16140       else if (TREE_CODE (optype) == VECTOR_TYPE
16141 	       && type == TREE_TYPE (optype))
16142 	{
16143 	  tree part_width = TYPE_SIZE (type);
16144 	  tree index = bitsize_int (0);
16145 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16146 	}
16147     }
16148 
16149   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16150       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16151     {
16152       tree op00 = TREE_OPERAND (sub, 0);
16153       tree op01 = TREE_OPERAND (sub, 1);
16154 
16155       STRIP_NOPS (op00);
16156       if (TREE_CODE (op00) == ADDR_EXPR)
16157 	{
16158 	  tree op00type;
16159 	  op00 = TREE_OPERAND (op00, 0);
16160 	  op00type = TREE_TYPE (op00);
16161 
16162 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16163 	  if (TREE_CODE (op00type) == VECTOR_TYPE
16164 	      && type == TREE_TYPE (op00type))
16165 	    {
16166 	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16167 	      tree part_width = TYPE_SIZE (type);
16168 	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16169 	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16170 	      tree index = bitsize_int (indexi);
16171 
16172 	      if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16173 		return fold_build3_loc (loc,
16174 					BIT_FIELD_REF, type, op00,
16175 					part_width, index);
16176 
16177 	    }
16178 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16179 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
16180 		   && type == TREE_TYPE (op00type))
16181 	    {
16182 	      tree size = TYPE_SIZE_UNIT (type);
16183 	      if (tree_int_cst_equal (size, op01))
16184 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16185 	    }
16186 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
16187 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
16188 		   && type == TREE_TYPE (op00type))
16189 	    {
16190 	      tree type_domain = TYPE_DOMAIN (op00type);
16191 	      tree min_val = size_zero_node;
16192 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
16193 		min_val = TYPE_MIN_VALUE (type_domain);
16194 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16195 				     TYPE_SIZE_UNIT (type));
16196 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16197 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
16198 				 NULL_TREE, NULL_TREE);
16199 	    }
16200 	}
16201     }
16202 
16203   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16204   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16205       && type == TREE_TYPE (TREE_TYPE (subtype))
16206       && (!in_gimple_form
16207 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16208     {
16209       tree type_domain;
16210       tree min_val = size_zero_node;
16211       sub = build_fold_indirect_ref_loc (loc, sub);
16212       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16213       if (type_domain && TYPE_MIN_VALUE (type_domain))
16214 	min_val = TYPE_MIN_VALUE (type_domain);
16215       if (in_gimple_form
16216 	  && TREE_CODE (min_val) != INTEGER_CST)
16217 	return NULL_TREE;
16218       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16219 			 NULL_TREE);
16220     }
16221 
16222   return NULL_TREE;
16223 }
16224 
16225 /* Builds an expression for an indirection through T, simplifying some
16226    cases.  */
16227 
16228 tree
16229 build_fold_indirect_ref_loc (location_t loc, tree t)
16230 {
16231   tree type = TREE_TYPE (TREE_TYPE (t));
16232   tree sub = fold_indirect_ref_1 (loc, type, t);
16233 
16234   if (sub)
16235     return sub;
16236 
16237   return build1_loc (loc, INDIRECT_REF, type, t);
16238 }
16239 
16240 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16241 
16242 tree
16243 fold_indirect_ref_loc (location_t loc, tree t)
16244 {
16245   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16246 
16247   if (sub)
16248     return sub;
16249   else
16250     return t;
16251 }
16252 
16253 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16254    whose result is ignored.  The type of the returned tree need not be
16255    the same as the original expression.  */
16256 
16257 tree
16258 fold_ignored_result (tree t)
16259 {
16260   if (!TREE_SIDE_EFFECTS (t))
16261     return integer_zero_node;
16262 
16263   for (;;)
16264     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16265       {
16266       case tcc_unary:
16267 	t = TREE_OPERAND (t, 0);
16268 	break;
16269 
16270       case tcc_binary:
16271       case tcc_comparison:
16272 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16273 	  t = TREE_OPERAND (t, 0);
16274 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16275 	  t = TREE_OPERAND (t, 1);
16276 	else
16277 	  return t;
16278 	break;
16279 
16280       case tcc_expression:
16281 	switch (TREE_CODE (t))
16282 	  {
16283 	  case COMPOUND_EXPR:
16284 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16285 	      return t;
16286 	    t = TREE_OPERAND (t, 0);
16287 	    break;
16288 
16289 	  case COND_EXPR:
16290 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16291 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16292 	      return t;
16293 	    t = TREE_OPERAND (t, 0);
16294 	    break;
16295 
16296 	  default:
16297 	    return t;
16298 	  }
16299 	break;
16300 
16301       default:
16302 	return t;
16303       }
16304 }
16305 
16306 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16307    This can only be applied to objects of a sizetype.  */
16308 
16309 tree
16310 round_up_loc (location_t loc, tree value, int divisor)
16311 {
16312   tree div = NULL_TREE;
16313 
16314   gcc_assert (divisor > 0);
16315   if (divisor == 1)
16316     return value;
16317 
16318   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16319      have to do anything.  Only do this when we are not given a const,
16320      because in that case, this check is more expensive than just
16321      doing it.  */
16322   if (TREE_CODE (value) != INTEGER_CST)
16323     {
16324       div = build_int_cst (TREE_TYPE (value), divisor);
16325 
16326       if (multiple_of_p (TREE_TYPE (value), value, div))
16327 	return value;
16328     }
16329 
16330   /* If divisor is a power of two, simplify this to bit manipulation.  */
16331   if (divisor == (divisor & -divisor))
16332     {
16333       if (TREE_CODE (value) == INTEGER_CST)
16334 	{
16335 	  double_int val = tree_to_double_int (value);
16336 	  bool overflow_p;
16337 
16338 	  if ((val.low & (divisor - 1)) == 0)
16339 	    return value;
16340 
16341 	  overflow_p = TREE_OVERFLOW (value);
16342 	  val.low &= ~(divisor - 1);
16343 	  val.low += divisor;
16344 	  if (val.low == 0)
16345 	    {
16346 	      val.high++;
16347 	      if (val.high == 0)
16348 		overflow_p = true;
16349 	    }
16350 
16351 	  return force_fit_type_double (TREE_TYPE (value), val,
16352 					-1, overflow_p);
16353 	}
16354       else
16355 	{
16356 	  tree t;
16357 
16358 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16359 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16360 	  t = build_int_cst (TREE_TYPE (value), -divisor);
16361 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16362 	}
16363     }
16364   else
16365     {
16366       if (!div)
16367 	div = build_int_cst (TREE_TYPE (value), divisor);
16368       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16369       value = size_binop_loc (loc, MULT_EXPR, value, div);
16370     }
16371 
16372   return value;
16373 }
16374 
16375 /* Likewise, but round down.  */
16376 
16377 tree
16378 round_down_loc (location_t loc, tree value, int divisor)
16379 {
16380   tree div = NULL_TREE;
16381 
16382   gcc_assert (divisor > 0);
16383   if (divisor == 1)
16384     return value;
16385 
16386   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16387      have to do anything.  Only do this when we are not given a const,
16388      because in that case, this check is more expensive than just
16389      doing it.  */
16390   if (TREE_CODE (value) != INTEGER_CST)
16391     {
16392       div = build_int_cst (TREE_TYPE (value), divisor);
16393 
16394       if (multiple_of_p (TREE_TYPE (value), value, div))
16395 	return value;
16396     }
16397 
16398   /* If divisor is a power of two, simplify this to bit manipulation.  */
16399   if (divisor == (divisor & -divisor))
16400     {
16401       tree t;
16402 
16403       t = build_int_cst (TREE_TYPE (value), -divisor);
16404       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16405     }
16406   else
16407     {
16408       if (!div)
16409 	div = build_int_cst (TREE_TYPE (value), divisor);
16410       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16411       value = size_binop_loc (loc, MULT_EXPR, value, div);
16412     }
16413 
16414   return value;
16415 }
16416 
16417 /* Returns the pointer to the base of the object addressed by EXP and
16418    extracts the information about the offset of the access, storing it
16419    to PBITPOS and POFFSET.  */
16420 
16421 static tree
16422 split_address_to_core_and_offset (tree exp,
16423 				  HOST_WIDE_INT *pbitpos, tree *poffset)
16424 {
16425   tree core;
16426   enum machine_mode mode;
16427   int unsignedp, volatilep;
16428   HOST_WIDE_INT bitsize;
16429   location_t loc = EXPR_LOCATION (exp);
16430 
16431   if (TREE_CODE (exp) == ADDR_EXPR)
16432     {
16433       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16434 				  poffset, &mode, &unsignedp, &volatilep,
16435 				  false);
16436       core = build_fold_addr_expr_loc (loc, core);
16437     }
16438   else
16439     {
16440       core = exp;
16441       *pbitpos = 0;
16442       *poffset = NULL_TREE;
16443     }
16444 
16445   return core;
16446 }
16447 
16448 /* Returns true if addresses of E1 and E2 differ by a constant, false
16449    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16450 
16451 bool
16452 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16453 {
16454   tree core1, core2;
16455   HOST_WIDE_INT bitpos1, bitpos2;
16456   tree toffset1, toffset2, tdiff, type;
16457 
16458   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16459   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16460 
16461   if (bitpos1 % BITS_PER_UNIT != 0
16462       || bitpos2 % BITS_PER_UNIT != 0
16463       || !operand_equal_p (core1, core2, 0))
16464     return false;
16465 
16466   if (toffset1 && toffset2)
16467     {
16468       type = TREE_TYPE (toffset1);
16469       if (type != TREE_TYPE (toffset2))
16470 	toffset2 = fold_convert (type, toffset2);
16471 
16472       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16473       if (!cst_and_fits_in_hwi (tdiff))
16474 	return false;
16475 
16476       *diff = int_cst_value (tdiff);
16477     }
16478   else if (toffset1 || toffset2)
16479     {
16480       /* If only one of the offsets is non-constant, the difference cannot
16481 	 be a constant.  */
16482       return false;
16483     }
16484   else
16485     *diff = 0;
16486 
16487   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16488   return true;
16489 }
16490 
16491 /* Simplify the floating point expression EXP when the sign of the
16492    result is not significant.  Return NULL_TREE if no simplification
16493    is possible.  */
16494 
16495 tree
16496 fold_strip_sign_ops (tree exp)
16497 {
16498   tree arg0, arg1;
16499   location_t loc = EXPR_LOCATION (exp);
16500 
16501   switch (TREE_CODE (exp))
16502     {
16503     case ABS_EXPR:
16504     case NEGATE_EXPR:
16505       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16506       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16507 
16508     case MULT_EXPR:
16509     case RDIV_EXPR:
16510       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16511 	return NULL_TREE;
16512       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16513       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16514       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16515 	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16516 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
16517 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
16518       break;
16519 
16520     case COMPOUND_EXPR:
16521       arg0 = TREE_OPERAND (exp, 0);
16522       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16523       if (arg1)
16524 	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16525       break;
16526 
16527     case COND_EXPR:
16528       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16529       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16530       if (arg0 || arg1)
16531 	return fold_build3_loc (loc,
16532 			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16533 			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
16534 			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
16535       break;
16536 
16537     case CALL_EXPR:
16538       {
16539 	const enum built_in_function fcode = builtin_mathfn_code (exp);
16540 	switch (fcode)
16541 	{
16542 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
16543 	  /* Strip copysign function call, return the 1st argument. */
16544 	  arg0 = CALL_EXPR_ARG (exp, 0);
16545 	  arg1 = CALL_EXPR_ARG (exp, 1);
16546 	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16547 
16548 	default:
16549 	  /* Strip sign ops from the argument of "odd" math functions.  */
16550 	  if (negate_mathfn_p (fcode))
16551             {
16552 	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16553 	      if (arg0)
16554 		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16555 	    }
16556 	  break;
16557 	}
16558       }
16559       break;
16560 
16561     default:
16562       break;
16563     }
16564   return NULL_TREE;
16565 }
16566