xref: /dragonfly/contrib/gcc-4.7/gcc/fold-const.c (revision cecb9aae)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4    2012 Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 /*@@ This file should be rewritten to use an arbitrary precision
23   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25   @@ The routines that translate from the ap rep should
26   @@ warn if precision et. al. is lost.
27   @@ This would also make life easier when this technology is used
28   @@ for cross-compilers.  */
29 
30 /* The entry points in this file are fold, size_int_wide and size_binop.
31 
32    fold takes a tree as argument and returns a simplified tree.
33 
34    size_binop takes a tree code for an arithmetic operation
35    and two operands that are trees, and produces a tree for the
36    result, assuming the type comes from `sizetype'.
37 
38    size_int takes an integer value, and creates a tree constant
39    with type from `sizetype'.
40 
41    Note: Since the folders get called on non-gimple code as well as
42    gimple code, we need to handle GIMPLE tuples as well as their
43    corresponding tree equivalents.  */
44 
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "realmpfr.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "target.h"
56 #include "diagnostic-core.h"
57 #include "intl.h"
58 #include "ggc.h"
59 #include "hashtab.h"
60 #include "langhooks.h"
61 #include "md5.h"
62 #include "gimple.h"
63 #include "tree-flow.h"
64 
65 /* Nonzero if we are folding constants inside an initializer; zero
66    otherwise.  */
67 int folding_initializer = 0;
68 
69 /* The following constants represent a bit based encoding of GCC's
70    comparison operators.  This encoding simplifies transformations
71    on relational comparison operators, such as AND and OR.  */
72 enum comparison_code {
73   COMPCODE_FALSE = 0,
74   COMPCODE_LT = 1,
75   COMPCODE_EQ = 2,
76   COMPCODE_LE = 3,
77   COMPCODE_GT = 4,
78   COMPCODE_LTGT = 5,
79   COMPCODE_GE = 6,
80   COMPCODE_ORD = 7,
81   COMPCODE_UNORD = 8,
82   COMPCODE_UNLT = 9,
83   COMPCODE_UNEQ = 10,
84   COMPCODE_UNLE = 11,
85   COMPCODE_UNGT = 12,
86   COMPCODE_NE = 13,
87   COMPCODE_UNGE = 14,
88   COMPCODE_TRUE = 15
89 };
90 
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 				HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
107 					tree, tree, tree);
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
109 				    HOST_WIDE_INT *,
110 				    enum machine_mode *, int *, int *,
111 				    tree *, tree *);
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
123 					tree, tree, tree);
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 						 enum tree_code, tree,
128 						 tree, tree,
129 						 tree, tree, int);
130 static tree fold_mathfn_compare (location_t,
131 				 enum built_in_function, enum tree_code,
132 				 tree, tree, tree);
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142    Otherwise, return LOC.  */
143 
144 static location_t
145 expr_location_or (tree t, location_t loc)
146 {
147   location_t tloc = EXPR_LOCATION (t);
148   return tloc != UNKNOWN_LOCATION ? tloc : loc;
149 }
150 
151 /* Similar to protected_set_expr_location, but never modify x in place,
152    if location can and needs to be set, unshare it.  */
153 
154 static inline tree
155 protected_set_expr_location_unshare (tree x, location_t loc)
156 {
157   if (CAN_HAVE_LOCATION_P (x)
158       && EXPR_LOCATION (x) != loc
159       && !(TREE_CODE (x) == SAVE_EXPR
160 	   || TREE_CODE (x) == TARGET_EXPR
161 	   || TREE_CODE (x) == BIND_EXPR))
162     {
163       x = copy_node (x);
164       SET_EXPR_LOCATION (x, loc);
165     }
166   return x;
167 }
168 
169 
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171    overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
172    and SUM1.  Then this yields nonzero if overflow occurred during the
173    addition.
174 
175    Overflow occurs if A and B have the same sign, but A and SUM differ in
176    sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
177    sign.  */
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
179 
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181    of type CODE and returns the quotient.
182    Otherwise returns NULL_TREE.  */
183 
184 tree
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
186 {
187   double_int quo, rem;
188   int uns;
189 
190   /* The sign of the division is according to operand two, that
191      does the correct thing for POINTER_PLUS_EXPR where we want
192      a signed division.  */
193   uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194   if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195       && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
196     uns = false;
197 
198   quo = double_int_divmod (tree_to_double_int (arg1),
199 			   tree_to_double_int (arg2),
200 			   uns, code, &rem);
201 
202   if (double_int_zero_p (rem))
203     return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
204 
205   return NULL_TREE;
206 }
207 
208 /* This is nonzero if we should defer warnings about undefined
209    overflow.  This facility exists because these warnings are a
210    special case.  The code to estimate loop iterations does not want
211    to issue any warnings, since it works with expressions which do not
212    occur in user code.  Various bits of cleanup code call fold(), but
213    only use the result if it has certain characteristics (e.g., is a
214    constant); that code only wants to issue a warning if the result is
215    used.  */
216 
217 static int fold_deferring_overflow_warnings;
218 
219 /* If a warning about undefined overflow is deferred, this is the
220    warning.  Note that this may cause us to turn two warnings into
221    one, but that is fine since it is sufficient to only give one
222    warning per expression.  */
223 
224 static const char* fold_deferred_overflow_warning;
225 
226 /* If a warning about undefined overflow is deferred, this is the
227    level at which the warning should be emitted.  */
228 
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
230 
231 /* Start deferring overflow warnings.  We could use a stack here to
232    permit nested calls, but at present it is not necessary.  */
233 
234 void
235 fold_defer_overflow_warnings (void)
236 {
237   ++fold_deferring_overflow_warnings;
238 }
239 
240 /* Stop deferring overflow warnings.  If there is a pending warning,
241    and ISSUE is true, then issue the warning if appropriate.  STMT is
242    the statement with which the warning should be associated (used for
243    location information); STMT may be NULL.  CODE is the level of the
244    warning--a warn_strict_overflow_code value.  This function will use
245    the smaller of CODE and the deferred code when deciding whether to
246    issue the warning.  CODE may be zero to mean to always use the
247    deferred code.  */
248 
249 void
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
251 {
252   const char *warnmsg;
253   location_t locus;
254 
255   gcc_assert (fold_deferring_overflow_warnings > 0);
256   --fold_deferring_overflow_warnings;
257   if (fold_deferring_overflow_warnings > 0)
258     {
259       if (fold_deferred_overflow_warning != NULL
260 	  && code != 0
261 	  && code < (int) fold_deferred_overflow_code)
262 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
263       return;
264     }
265 
266   warnmsg = fold_deferred_overflow_warning;
267   fold_deferred_overflow_warning = NULL;
268 
269   if (!issue || warnmsg == NULL)
270     return;
271 
272   if (gimple_no_warning_p (stmt))
273     return;
274 
275   /* Use the smallest code level when deciding to issue the
276      warning.  */
277   if (code == 0 || code > (int) fold_deferred_overflow_code)
278     code = fold_deferred_overflow_code;
279 
280   if (!issue_strict_overflow_warning (code))
281     return;
282 
283   if (stmt == NULL)
284     locus = input_location;
285   else
286     locus = gimple_location (stmt);
287   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
288 }
289 
290 /* Stop deferring overflow warnings, ignoring any deferred
291    warnings.  */
292 
293 void
294 fold_undefer_and_ignore_overflow_warnings (void)
295 {
296   fold_undefer_overflow_warnings (false, NULL, 0);
297 }
298 
299 /* Whether we are deferring overflow warnings.  */
300 
301 bool
302 fold_deferring_overflow_warnings_p (void)
303 {
304   return fold_deferring_overflow_warnings > 0;
305 }
306 
307 /* This is called when we fold something based on the fact that signed
308    overflow is undefined.  */
309 
310 static void
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
312 {
313   if (fold_deferring_overflow_warnings > 0)
314     {
315       if (fold_deferred_overflow_warning == NULL
316 	  || wc < fold_deferred_overflow_code)
317 	{
318 	  fold_deferred_overflow_warning = gmsgid;
319 	  fold_deferred_overflow_code = wc;
320 	}
321     }
322   else if (issue_strict_overflow_warning (wc))
323     warning (OPT_Wstrict_overflow, gmsgid);
324 }
325 
326 /* Return true if the built-in mathematical function specified by CODE
327    is odd, i.e. -f(x) == f(-x).  */
328 
329 static bool
330 negate_mathfn_p (enum built_in_function code)
331 {
332   switch (code)
333     {
334     CASE_FLT_FN (BUILT_IN_ASIN):
335     CASE_FLT_FN (BUILT_IN_ASINH):
336     CASE_FLT_FN (BUILT_IN_ATAN):
337     CASE_FLT_FN (BUILT_IN_ATANH):
338     CASE_FLT_FN (BUILT_IN_CASIN):
339     CASE_FLT_FN (BUILT_IN_CASINH):
340     CASE_FLT_FN (BUILT_IN_CATAN):
341     CASE_FLT_FN (BUILT_IN_CATANH):
342     CASE_FLT_FN (BUILT_IN_CBRT):
343     CASE_FLT_FN (BUILT_IN_CPROJ):
344     CASE_FLT_FN (BUILT_IN_CSIN):
345     CASE_FLT_FN (BUILT_IN_CSINH):
346     CASE_FLT_FN (BUILT_IN_CTAN):
347     CASE_FLT_FN (BUILT_IN_CTANH):
348     CASE_FLT_FN (BUILT_IN_ERF):
349     CASE_FLT_FN (BUILT_IN_LLROUND):
350     CASE_FLT_FN (BUILT_IN_LROUND):
351     CASE_FLT_FN (BUILT_IN_ROUND):
352     CASE_FLT_FN (BUILT_IN_SIN):
353     CASE_FLT_FN (BUILT_IN_SINH):
354     CASE_FLT_FN (BUILT_IN_TAN):
355     CASE_FLT_FN (BUILT_IN_TANH):
356     CASE_FLT_FN (BUILT_IN_TRUNC):
357       return true;
358 
359     CASE_FLT_FN (BUILT_IN_LLRINT):
360     CASE_FLT_FN (BUILT_IN_LRINT):
361     CASE_FLT_FN (BUILT_IN_NEARBYINT):
362     CASE_FLT_FN (BUILT_IN_RINT):
363       return !flag_rounding_math;
364 
365     default:
366       break;
367     }
368   return false;
369 }
370 
371 /* Check whether we may negate an integer constant T without causing
372    overflow.  */
373 
374 bool
375 may_negate_without_overflow_p (const_tree t)
376 {
377   unsigned HOST_WIDE_INT val;
378   unsigned int prec;
379   tree type;
380 
381   gcc_assert (TREE_CODE (t) == INTEGER_CST);
382 
383   type = TREE_TYPE (t);
384   if (TYPE_UNSIGNED (type))
385     return false;
386 
387   prec = TYPE_PRECISION (type);
388   if (prec > HOST_BITS_PER_WIDE_INT)
389     {
390       if (TREE_INT_CST_LOW (t) != 0)
391 	return true;
392       prec -= HOST_BITS_PER_WIDE_INT;
393       val = TREE_INT_CST_HIGH (t);
394     }
395   else
396     val = TREE_INT_CST_LOW (t);
397   if (prec < HOST_BITS_PER_WIDE_INT)
398     val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399   return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
400 }
401 
402 /* Determine whether an expression T can be cheaply negated using
403    the function negate_expr without introducing undefined overflow.  */
404 
405 static bool
406 negate_expr_p (tree t)
407 {
408   tree type;
409 
410   if (t == 0)
411     return false;
412 
413   type = TREE_TYPE (t);
414 
415   STRIP_SIGN_NOPS (t);
416   switch (TREE_CODE (t))
417     {
418     case INTEGER_CST:
419       if (TYPE_OVERFLOW_WRAPS (type))
420 	return true;
421 
422       /* Check that -CST will not overflow type.  */
423       return may_negate_without_overflow_p (t);
424     case BIT_NOT_EXPR:
425       return (INTEGRAL_TYPE_P (type)
426 	      && TYPE_OVERFLOW_WRAPS (type));
427 
428     case FIXED_CST:
429     case NEGATE_EXPR:
430       return true;
431 
432     case REAL_CST:
433       /* We want to canonicalize to positive real constants.  Pretend
434          that only negative ones can be easily negated.  */
435       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
436 
437     case COMPLEX_CST:
438       return negate_expr_p (TREE_REALPART (t))
439 	     && negate_expr_p (TREE_IMAGPART (t));
440 
441     case COMPLEX_EXPR:
442       return negate_expr_p (TREE_OPERAND (t, 0))
443 	     && negate_expr_p (TREE_OPERAND (t, 1));
444 
445     case CONJ_EXPR:
446       return negate_expr_p (TREE_OPERAND (t, 0));
447 
448     case PLUS_EXPR:
449       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 	  || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
451 	return false;
452       /* -(A + B) -> (-B) - A.  */
453       if (negate_expr_p (TREE_OPERAND (t, 1))
454 	  && reorder_operands_p (TREE_OPERAND (t, 0),
455 				 TREE_OPERAND (t, 1)))
456 	return true;
457       /* -(A + B) -> (-A) - B.  */
458       return negate_expr_p (TREE_OPERAND (t, 0));
459 
460     case MINUS_EXPR:
461       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
462       return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 	     && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 	     && reorder_operands_p (TREE_OPERAND (t, 0),
465 				    TREE_OPERAND (t, 1));
466 
467     case MULT_EXPR:
468       if (TYPE_UNSIGNED (TREE_TYPE (t)))
469         break;
470 
471       /* Fall through.  */
472 
473     case RDIV_EXPR:
474       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 	return negate_expr_p (TREE_OPERAND (t, 1))
476 	       || negate_expr_p (TREE_OPERAND (t, 0));
477       break;
478 
479     case TRUNC_DIV_EXPR:
480     case ROUND_DIV_EXPR:
481     case FLOOR_DIV_EXPR:
482     case CEIL_DIV_EXPR:
483     case EXACT_DIV_EXPR:
484       /* In general we can't negate A / B, because if A is INT_MIN and
485 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 	 and actually traps on some architectures.  But if overflow is
487 	 undefined, we can negate, because - (INT_MIN / 1) is an
488 	 overflow.  */
489       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 	  && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
491         break;
492       return negate_expr_p (TREE_OPERAND (t, 1))
493              || negate_expr_p (TREE_OPERAND (t, 0));
494 
495     case NOP_EXPR:
496       /* Negate -((double)float) as (double)(-float).  */
497       if (TREE_CODE (type) == REAL_TYPE)
498 	{
499 	  tree tem = strip_float_extensions (t);
500 	  if (tem != t)
501 	    return negate_expr_p (tem);
502 	}
503       break;
504 
505     case CALL_EXPR:
506       /* Negate -f(x) as f(-x).  */
507       if (negate_mathfn_p (builtin_mathfn_code (t)))
508 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
509       break;
510 
511     case RSHIFT_EXPR:
512       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
513       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
514 	{
515 	  tree op1 = TREE_OPERAND (t, 1);
516 	  if (TREE_INT_CST_HIGH (op1) == 0
517 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 		 == TREE_INT_CST_LOW (op1))
519 	    return true;
520 	}
521       break;
522 
523     default:
524       break;
525     }
526   return false;
527 }
528 
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530    simplification is possible.
531    If negate_expr_p would return true for T, NULL_TREE will never be
532    returned.  */
533 
534 static tree
535 fold_negate_expr (location_t loc, tree t)
536 {
537   tree type = TREE_TYPE (t);
538   tree tem;
539 
540   switch (TREE_CODE (t))
541     {
542     /* Convert - (~A) to A + 1.  */
543     case BIT_NOT_EXPR:
544       if (INTEGRAL_TYPE_P (type))
545         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546                             build_int_cst (type, 1));
547       break;
548 
549     case INTEGER_CST:
550       tem = fold_negate_const (t, type);
551       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 	  || !TYPE_OVERFLOW_TRAPS (type))
553 	return tem;
554       break;
555 
556     case REAL_CST:
557       tem = fold_negate_const (t, type);
558       /* Two's complement FP formats, such as c4x, may overflow.  */
559       if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
560 	return tem;
561       break;
562 
563     case FIXED_CST:
564       tem = fold_negate_const (t, type);
565       return tem;
566 
567     case COMPLEX_CST:
568       {
569 	tree rpart = negate_expr (TREE_REALPART (t));
570 	tree ipart = negate_expr (TREE_IMAGPART (t));
571 
572 	if ((TREE_CODE (rpart) == REAL_CST
573 	     && TREE_CODE (ipart) == REAL_CST)
574 	    || (TREE_CODE (rpart) == INTEGER_CST
575 		&& TREE_CODE (ipart) == INTEGER_CST))
576 	  return build_complex (type, rpart, ipart);
577       }
578       break;
579 
580     case COMPLEX_EXPR:
581       if (negate_expr_p (t))
582 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
585       break;
586 
587     case CONJ_EXPR:
588       if (negate_expr_p (t))
589 	return fold_build1_loc (loc, CONJ_EXPR, type,
590 			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
591       break;
592 
593     case NEGATE_EXPR:
594       return TREE_OPERAND (t, 0);
595 
596     case PLUS_EXPR:
597       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
599 	{
600 	  /* -(A + B) -> (-B) - A.  */
601 	  if (negate_expr_p (TREE_OPERAND (t, 1))
602 	      && reorder_operands_p (TREE_OPERAND (t, 0),
603 				     TREE_OPERAND (t, 1)))
604 	    {
605 	      tem = negate_expr (TREE_OPERAND (t, 1));
606 	      return fold_build2_loc (loc, MINUS_EXPR, type,
607 				  tem, TREE_OPERAND (t, 0));
608 	    }
609 
610 	  /* -(A + B) -> (-A) - B.  */
611 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
612 	    {
613 	      tem = negate_expr (TREE_OPERAND (t, 0));
614 	      return fold_build2_loc (loc, MINUS_EXPR, type,
615 				  tem, TREE_OPERAND (t, 1));
616 	    }
617 	}
618       break;
619 
620     case MINUS_EXPR:
621       /* - (A - B) -> B - A  */
622       if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 	return fold_build2_loc (loc, MINUS_EXPR, type,
626 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
627       break;
628 
629     case MULT_EXPR:
630       if (TYPE_UNSIGNED (type))
631         break;
632 
633       /* Fall through.  */
634 
635     case RDIV_EXPR:
636       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
637 	{
638 	  tem = TREE_OPERAND (t, 1);
639 	  if (negate_expr_p (tem))
640 	    return fold_build2_loc (loc, TREE_CODE (t), type,
641 				TREE_OPERAND (t, 0), negate_expr (tem));
642 	  tem = TREE_OPERAND (t, 0);
643 	  if (negate_expr_p (tem))
644 	    return fold_build2_loc (loc, TREE_CODE (t), type,
645 				negate_expr (tem), TREE_OPERAND (t, 1));
646 	}
647       break;
648 
649     case TRUNC_DIV_EXPR:
650     case ROUND_DIV_EXPR:
651     case FLOOR_DIV_EXPR:
652     case CEIL_DIV_EXPR:
653     case EXACT_DIV_EXPR:
654       /* In general we can't negate A / B, because if A is INT_MIN and
655 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 	 and actually traps on some architectures.  But if overflow is
657 	 undefined, we can negate, because - (INT_MIN / 1) is an
658 	 overflow.  */
659       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
660         {
661 	  const char * const warnmsg = G_("assuming signed overflow does not "
662 					  "occur when negating a division");
663           tem = TREE_OPERAND (t, 1);
664           if (negate_expr_p (tem))
665 	    {
666 	      if (INTEGRAL_TYPE_P (type)
667 		  && (TREE_CODE (tem) != INTEGER_CST
668 		      || integer_onep (tem)))
669 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 	      return fold_build2_loc (loc, TREE_CODE (t), type,
671 				  TREE_OPERAND (t, 0), negate_expr (tem));
672 	    }
673           tem = TREE_OPERAND (t, 0);
674           if (negate_expr_p (tem))
675 	    {
676 	      if (INTEGRAL_TYPE_P (type)
677 		  && (TREE_CODE (tem) != INTEGER_CST
678 		      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 	      return fold_build2_loc (loc, TREE_CODE (t), type,
681 				  negate_expr (tem), TREE_OPERAND (t, 1));
682 	    }
683         }
684       break;
685 
686     case NOP_EXPR:
687       /* Convert -((double)float) into (double)(-float).  */
688       if (TREE_CODE (type) == REAL_TYPE)
689 	{
690 	  tem = strip_float_extensions (t);
691 	  if (tem != t && negate_expr_p (tem))
692 	    return fold_convert_loc (loc, type, negate_expr (tem));
693 	}
694       break;
695 
696     case CALL_EXPR:
697       /* Negate -f(x) as f(-x).  */
698       if (negate_mathfn_p (builtin_mathfn_code (t))
699 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
700 	{
701 	  tree fndecl, arg;
702 
703 	  fndecl = get_callee_fndecl (t);
704 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 	  return build_call_expr_loc (loc, fndecl, 1, arg);
706 	}
707       break;
708 
709     case RSHIFT_EXPR:
710       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
711       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
712 	{
713 	  tree op1 = TREE_OPERAND (t, 1);
714 	  if (TREE_INT_CST_HIGH (op1) == 0
715 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 		 == TREE_INT_CST_LOW (op1))
717 	    {
718 	      tree ntype = TYPE_UNSIGNED (type)
719 			   ? signed_type_for (type)
720 			   : unsigned_type_for (type);
721 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 	      return fold_convert_loc (loc, type, temp);
724 	    }
725 	}
726       break;
727 
728     default:
729       break;
730     }
731 
732   return NULL_TREE;
733 }
734 
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
737    return NULL_TREE. */
738 
739 static tree
740 negate_expr (tree t)
741 {
742   tree type, tem;
743   location_t loc;
744 
745   if (t == NULL_TREE)
746     return NULL_TREE;
747 
748   loc = EXPR_LOCATION (t);
749   type = TREE_TYPE (t);
750   STRIP_SIGN_NOPS (t);
751 
752   tem = fold_negate_expr (loc, t);
753   if (!tem)
754     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755   return fold_convert_loc (loc, type, tem);
756 }
757 
758 /* Split a tree IN into a constant, literal and variable parts that could be
759    combined with CODE to make IN.  "constant" means an expression with
760    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
761    commutative arithmetic operation.  Store the constant part into *CONP,
762    the literal in *LITP and return the variable part.  If a part isn't
763    present, set it to null.  If the tree does not decompose in this way,
764    return the entire tree as the variable part and the other parts as null.
765 
766    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
767    case, we negate an operand that was subtracted.  Except if it is a
768    literal for which we use *MINUS_LITP instead.
769 
770    If NEGATE_P is true, we are negating all of IN, again except a literal
771    for which we use *MINUS_LITP instead.
772 
773    If IN is itself a literal or constant, return it as appropriate.
774 
775    Note that we do not guarantee that any of the three values will be the
776    same type as IN, but they will have the same signedness and mode.  */
777 
778 static tree
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 	    tree *minus_litp, int negate_p)
781 {
782   tree var = 0;
783 
784   *conp = 0;
785   *litp = 0;
786   *minus_litp = 0;
787 
788   /* Strip any conversions that don't change the machine mode or signedness.  */
789   STRIP_SIGN_NOPS (in);
790 
791   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792       || TREE_CODE (in) == FIXED_CST)
793     *litp = in;
794   else if (TREE_CODE (in) == code
795 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 	       /* We can associate addition and subtraction together (even
798 		  though the C standard doesn't say so) for integers because
799 		  the value is not affected.  For reals, the value might be
800 		  affected, so we can't.  */
801 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
803     {
804       tree op0 = TREE_OPERAND (in, 0);
805       tree op1 = TREE_OPERAND (in, 1);
806       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
808 
809       /* First see if either of the operands is a literal, then a constant.  */
810       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 	  || TREE_CODE (op0) == FIXED_CST)
812 	*litp = op0, op0 = 0;
813       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 	       || TREE_CODE (op1) == FIXED_CST)
815 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
816 
817       if (op0 != 0 && TREE_CONSTANT (op0))
818 	*conp = op0, op0 = 0;
819       else if (op1 != 0 && TREE_CONSTANT (op1))
820 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
821 
822       /* If we haven't dealt with either operand, this is not a case we can
823 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
824       if (op0 != 0 && op1 != 0)
825 	var = in;
826       else if (op0 != 0)
827 	var = op0;
828       else
829 	var = op1, neg_var_p = neg1_p;
830 
831       /* Now do any needed negations.  */
832       if (neg_litp_p)
833 	*minus_litp = *litp, *litp = 0;
834       if (neg_conp_p)
835 	*conp = negate_expr (*conp);
836       if (neg_var_p)
837 	var = negate_expr (var);
838     }
839   else if (TREE_CONSTANT (in))
840     *conp = in;
841   else
842     var = in;
843 
844   if (negate_p)
845     {
846       if (*litp)
847 	*minus_litp = *litp, *litp = 0;
848       else if (*minus_litp)
849 	*litp = *minus_litp, *minus_litp = 0;
850       *conp = negate_expr (*conp);
851       var = negate_expr (var);
852     }
853 
854   return var;
855 }
856 
857 /* Re-associate trees split by the above function.  T1 and T2 are
858    either expressions to associate or null.  Return the new
859    expression, if any.  LOC is the location of the new expression.  If
860    we build an operation, do it in TYPE and with CODE.  */
861 
862 static tree
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
864 {
865   if (t1 == 0)
866     return t2;
867   else if (t2 == 0)
868     return t1;
869 
870   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871      try to fold this since we will have infinite recursion.  But do
872      deal with any NEGATE_EXPRs.  */
873   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
875     {
876       if (code == PLUS_EXPR)
877 	{
878 	  if (TREE_CODE (t1) == NEGATE_EXPR)
879 	    return build2_loc (loc, MINUS_EXPR, type,
880 			       fold_convert_loc (loc, type, t2),
881 			       fold_convert_loc (loc, type,
882 						 TREE_OPERAND (t1, 0)));
883 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
884 	    return build2_loc (loc, MINUS_EXPR, type,
885 			       fold_convert_loc (loc, type, t1),
886 			       fold_convert_loc (loc, type,
887 						 TREE_OPERAND (t2, 0)));
888 	  else if (integer_zerop (t2))
889 	    return fold_convert_loc (loc, type, t1);
890 	}
891       else if (code == MINUS_EXPR)
892 	{
893 	  if (integer_zerop (t2))
894 	    return fold_convert_loc (loc, type, t1);
895 	}
896 
897       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 			 fold_convert_loc (loc, type, t2));
899     }
900 
901   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 			  fold_convert_loc (loc, type, t2));
903 }
904 
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906    for use in int_const_binop, size_binop and size_diffop.  */
907 
908 static bool
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
910 {
911   if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
912     return false;
913   if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
914     return false;
915 
916   switch (code)
917     {
918     case LSHIFT_EXPR:
919     case RSHIFT_EXPR:
920     case LROTATE_EXPR:
921     case RROTATE_EXPR:
922       return true;
923 
924     default:
925       break;
926     }
927 
928   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
931 }
932 
933 
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935    to produce a new constant.  Return NULL_TREE if we don't know how
936    to evaluate CODE at compile-time.  */
937 
938 tree
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
940 {
941   double_int op1, op2, res, tmp;
942   tree t;
943   tree type = TREE_TYPE (arg1);
944   bool uns = TYPE_UNSIGNED (type);
945   bool is_sizetype
946     = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947   bool overflow = false;
948 
949   op1 = tree_to_double_int (arg1);
950   op2 = tree_to_double_int (arg2);
951 
952   switch (code)
953     {
954     case BIT_IOR_EXPR:
955       res = double_int_ior (op1, op2);
956       break;
957 
958     case BIT_XOR_EXPR:
959       res = double_int_xor (op1, op2);
960       break;
961 
962     case BIT_AND_EXPR:
963       res = double_int_and (op1, op2);
964       break;
965 
966     case RSHIFT_EXPR:
967       res = double_int_rshift (op1, double_int_to_shwi (op2),
968 			       TYPE_PRECISION (type), !uns);
969       break;
970 
971     case LSHIFT_EXPR:
972       /* It's unclear from the C standard whether shifts can overflow.
973 	 The following code ignores overflow; perhaps a C standard
974 	 interpretation ruling is needed.  */
975       res = double_int_lshift (op1, double_int_to_shwi (op2),
976 			       TYPE_PRECISION (type), !uns);
977       break;
978 
979     case RROTATE_EXPR:
980       res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 				TYPE_PRECISION (type));
982       break;
983 
984     case LROTATE_EXPR:
985       res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 				TYPE_PRECISION (type));
987       break;
988 
989     case PLUS_EXPR:
990       overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 			     &res.low, &res.high);
992       break;
993 
994     case MINUS_EXPR:
995       neg_double (op2.low, op2.high, &res.low, &res.high);
996       add_double (op1.low, op1.high, res.low, res.high,
997 		  &res.low, &res.high);
998       overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
999       break;
1000 
1001     case MULT_EXPR:
1002       overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 			     &res.low, &res.high);
1004       break;
1005 
1006     case TRUNC_DIV_EXPR:
1007     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008     case EXACT_DIV_EXPR:
1009       /* This is a shortcut for a common special case.  */
1010       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 	  && !TREE_OVERFLOW (arg1)
1012 	  && !TREE_OVERFLOW (arg2)
1013 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1014 	{
1015 	  if (code == CEIL_DIV_EXPR)
1016 	    op1.low += op2.low - 1;
1017 
1018 	  res.low = op1.low / op2.low, res.high = 0;
1019 	  break;
1020 	}
1021 
1022       /* ... fall through ...  */
1023 
1024     case ROUND_DIV_EXPR:
1025       if (double_int_zero_p (op2))
1026 	return NULL_TREE;
1027       if (double_int_one_p (op2))
1028 	{
1029 	  res = op1;
1030 	  break;
1031 	}
1032       if (double_int_equal_p (op1, op2)
1033 	  && ! double_int_zero_p (op1))
1034 	{
1035 	  res = double_int_one;
1036 	  break;
1037 	}
1038       overflow = div_and_round_double (code, uns,
1039 				       op1.low, op1.high, op2.low, op2.high,
1040 				       &res.low, &res.high,
1041 				       &tmp.low, &tmp.high);
1042       break;
1043 
1044     case TRUNC_MOD_EXPR:
1045     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046       /* This is a shortcut for a common special case.  */
1047       if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 	  && !TREE_OVERFLOW (arg1)
1049 	  && !TREE_OVERFLOW (arg2)
1050 	  && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1051 	{
1052 	  if (code == CEIL_MOD_EXPR)
1053 	    op1.low += op2.low - 1;
1054 	  res.low = op1.low % op2.low, res.high = 0;
1055 	  break;
1056 	}
1057 
1058       /* ... fall through ...  */
1059 
1060     case ROUND_MOD_EXPR:
1061       if (double_int_zero_p (op2))
1062 	return NULL_TREE;
1063       overflow = div_and_round_double (code, uns,
1064 				       op1.low, op1.high, op2.low, op2.high,
1065 				       &tmp.low, &tmp.high,
1066 				       &res.low, &res.high);
1067       break;
1068 
1069     case MIN_EXPR:
1070       res = double_int_min (op1, op2, uns);
1071       break;
1072 
1073     case MAX_EXPR:
1074       res = double_int_max (op1, op2, uns);
1075       break;
1076 
1077     default:
1078       return NULL_TREE;
1079     }
1080 
1081   t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 			     ((!uns || is_sizetype) && overflow)
1083 			     | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1084 
1085   return t;
1086 }
1087 
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1090    are the same kind of constant and the same machine mode.  Return zero if
1091    combining the constants is not allowed in the current operating mode.  */
1092 
1093 static tree
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1095 {
1096   /* Sanity check for the recursive cases.  */
1097   if (!arg1 || !arg2)
1098     return NULL_TREE;
1099 
1100   STRIP_NOPS (arg1);
1101   STRIP_NOPS (arg2);
1102 
1103   if (TREE_CODE (arg1) == INTEGER_CST)
1104     return int_const_binop (code, arg1, arg2);
1105 
1106   if (TREE_CODE (arg1) == REAL_CST)
1107     {
1108       enum machine_mode mode;
1109       REAL_VALUE_TYPE d1;
1110       REAL_VALUE_TYPE d2;
1111       REAL_VALUE_TYPE value;
1112       REAL_VALUE_TYPE result;
1113       bool inexact;
1114       tree t, type;
1115 
1116       /* The following codes are handled by real_arithmetic.  */
1117       switch (code)
1118 	{
1119 	case PLUS_EXPR:
1120 	case MINUS_EXPR:
1121 	case MULT_EXPR:
1122 	case RDIV_EXPR:
1123 	case MIN_EXPR:
1124 	case MAX_EXPR:
1125 	  break;
1126 
1127 	default:
1128 	  return NULL_TREE;
1129 	}
1130 
1131       d1 = TREE_REAL_CST (arg1);
1132       d2 = TREE_REAL_CST (arg2);
1133 
1134       type = TREE_TYPE (arg1);
1135       mode = TYPE_MODE (type);
1136 
1137       /* Don't perform operation if we honor signaling NaNs and
1138 	 either operand is a NaN.  */
1139       if (HONOR_SNANS (mode)
1140 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1141 	return NULL_TREE;
1142 
1143       /* Don't perform operation if it would raise a division
1144 	 by zero exception.  */
1145       if (code == RDIV_EXPR
1146 	  && REAL_VALUES_EQUAL (d2, dconst0)
1147 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1148 	return NULL_TREE;
1149 
1150       /* If either operand is a NaN, just return it.  Otherwise, set up
1151 	 for floating-point trap; we return an overflow.  */
1152       if (REAL_VALUE_ISNAN (d1))
1153 	return arg1;
1154       else if (REAL_VALUE_ISNAN (d2))
1155 	return arg2;
1156 
1157       inexact = real_arithmetic (&value, code, &d1, &d2);
1158       real_convert (&result, mode, &value);
1159 
1160       /* Don't constant fold this floating point operation if
1161 	 the result has overflowed and flag_trapping_math.  */
1162       if (flag_trapping_math
1163 	  && MODE_HAS_INFINITIES (mode)
1164 	  && REAL_VALUE_ISINF (result)
1165 	  && !REAL_VALUE_ISINF (d1)
1166 	  && !REAL_VALUE_ISINF (d2))
1167 	return NULL_TREE;
1168 
1169       /* Don't constant fold this floating point operation if the
1170 	 result may dependent upon the run-time rounding mode and
1171 	 flag_rounding_math is set, or if GCC's software emulation
1172 	 is unable to accurately represent the result.  */
1173       if ((flag_rounding_math
1174 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 	  && (inexact || !real_identical (&result, &value)))
1176 	return NULL_TREE;
1177 
1178       t = build_real (type, result);
1179 
1180       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1181       return t;
1182     }
1183 
1184   if (TREE_CODE (arg1) == FIXED_CST)
1185     {
1186       FIXED_VALUE_TYPE f1;
1187       FIXED_VALUE_TYPE f2;
1188       FIXED_VALUE_TYPE result;
1189       tree t, type;
1190       int sat_p;
1191       bool overflow_p;
1192 
1193       /* The following codes are handled by fixed_arithmetic.  */
1194       switch (code)
1195         {
1196 	case PLUS_EXPR:
1197 	case MINUS_EXPR:
1198 	case MULT_EXPR:
1199 	case TRUNC_DIV_EXPR:
1200 	  f2 = TREE_FIXED_CST (arg2);
1201 	  break;
1202 
1203 	case LSHIFT_EXPR:
1204 	case RSHIFT_EXPR:
1205 	  f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 	  f2.data.low = TREE_INT_CST_LOW (arg2);
1207 	  f2.mode = SImode;
1208 	  break;
1209 
1210         default:
1211 	  return NULL_TREE;
1212         }
1213 
1214       f1 = TREE_FIXED_CST (arg1);
1215       type = TREE_TYPE (arg1);
1216       sat_p = TYPE_SATURATING (type);
1217       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218       t = build_fixed (type, result);
1219       /* Propagate overflow flags.  */
1220       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 	TREE_OVERFLOW (t) = 1;
1222       return t;
1223     }
1224 
1225   if (TREE_CODE (arg1) == COMPLEX_CST)
1226     {
1227       tree type = TREE_TYPE (arg1);
1228       tree r1 = TREE_REALPART (arg1);
1229       tree i1 = TREE_IMAGPART (arg1);
1230       tree r2 = TREE_REALPART (arg2);
1231       tree i2 = TREE_IMAGPART (arg2);
1232       tree real, imag;
1233 
1234       switch (code)
1235 	{
1236 	case PLUS_EXPR:
1237 	case MINUS_EXPR:
1238 	  real = const_binop (code, r1, r2);
1239 	  imag = const_binop (code, i1, i2);
1240 	  break;
1241 
1242 	case MULT_EXPR:
1243 	  if (COMPLEX_FLOAT_TYPE_P (type))
1244 	    return do_mpc_arg2 (arg1, arg2, type,
1245 				/* do_nonfinite= */ folding_initializer,
1246 				mpc_mul);
1247 
1248 	  real = const_binop (MINUS_EXPR,
1249 			      const_binop (MULT_EXPR, r1, r2),
1250 			      const_binop (MULT_EXPR, i1, i2));
1251 	  imag = const_binop (PLUS_EXPR,
1252 			      const_binop (MULT_EXPR, r1, i2),
1253 			      const_binop (MULT_EXPR, i1, r2));
1254 	  break;
1255 
1256 	case RDIV_EXPR:
1257 	  if (COMPLEX_FLOAT_TYPE_P (type))
1258 	    return do_mpc_arg2 (arg1, arg2, type,
1259                                 /* do_nonfinite= */ folding_initializer,
1260 				mpc_div);
1261 	  /* Fallthru ... */
1262 	case TRUNC_DIV_EXPR:
1263 	case CEIL_DIV_EXPR:
1264 	case FLOOR_DIV_EXPR:
1265 	case ROUND_DIV_EXPR:
1266 	  if (flag_complex_method == 0)
1267 	  {
1268 	    /* Keep this algorithm in sync with
1269 	       tree-complex.c:expand_complex_div_straight().
1270 
1271 	       Expand complex division to scalars, straightforward algorithm.
1272 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1273 	       t = br*br + bi*bi
1274 	    */
1275 	    tree magsquared
1276 	      = const_binop (PLUS_EXPR,
1277 			     const_binop (MULT_EXPR, r2, r2),
1278 			     const_binop (MULT_EXPR, i2, i2));
1279 	    tree t1
1280 	      = const_binop (PLUS_EXPR,
1281 			     const_binop (MULT_EXPR, r1, r2),
1282 			     const_binop (MULT_EXPR, i1, i2));
1283 	    tree t2
1284 	      = const_binop (MINUS_EXPR,
1285 			     const_binop (MULT_EXPR, i1, r2),
1286 			     const_binop (MULT_EXPR, r1, i2));
1287 
1288 	    real = const_binop (code, t1, magsquared);
1289 	    imag = const_binop (code, t2, magsquared);
1290 	  }
1291 	  else
1292 	  {
1293 	    /* Keep this algorithm in sync with
1294                tree-complex.c:expand_complex_div_wide().
1295 
1296 	       Expand complex division to scalars, modified algorithm to minimize
1297 	       overflow with wide input ranges.  */
1298 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 					fold_abs_const (r2, TREE_TYPE (type)),
1300 					fold_abs_const (i2, TREE_TYPE (type)));
1301 
1302 	    if (integer_nonzerop (compare))
1303 	      {
1304 		/* In the TRUE branch, we compute
1305 		   ratio = br/bi;
1306 		   div = (br * ratio) + bi;
1307 		   tr = (ar * ratio) + ai;
1308 		   ti = (ai * ratio) - ar;
1309 		   tr = tr / div;
1310 		   ti = ti / div;  */
1311 		tree ratio = const_binop (code, r2, i2);
1312 		tree div = const_binop (PLUS_EXPR, i2,
1313 					const_binop (MULT_EXPR, r2, ratio));
1314 		real = const_binop (MULT_EXPR, r1, ratio);
1315 		real = const_binop (PLUS_EXPR, real, i1);
1316 		real = const_binop (code, real, div);
1317 
1318 		imag = const_binop (MULT_EXPR, i1, ratio);
1319 		imag = const_binop (MINUS_EXPR, imag, r1);
1320 		imag = const_binop (code, imag, div);
1321 	      }
1322 	    else
1323 	      {
1324 		/* In the FALSE branch, we compute
1325 		   ratio = d/c;
1326 		   divisor = (d * ratio) + c;
1327 		   tr = (b * ratio) + a;
1328 		   ti = b - (a * ratio);
1329 		   tr = tr / div;
1330 		   ti = ti / div;  */
1331 		tree ratio = const_binop (code, i2, r2);
1332 		tree div = const_binop (PLUS_EXPR, r2,
1333                                         const_binop (MULT_EXPR, i2, ratio));
1334 
1335 		real = const_binop (MULT_EXPR, i1, ratio);
1336 		real = const_binop (PLUS_EXPR, real, r1);
1337 		real = const_binop (code, real, div);
1338 
1339 		imag = const_binop (MULT_EXPR, r1, ratio);
1340 		imag = const_binop (MINUS_EXPR, i1, imag);
1341 		imag = const_binop (code, imag, div);
1342 	      }
1343 	  }
1344 	  break;
1345 
1346 	default:
1347 	  return NULL_TREE;
1348 	}
1349 
1350       if (real && imag)
1351 	return build_complex (type, real, imag);
1352     }
1353 
1354   if (TREE_CODE (arg1) == VECTOR_CST)
1355     {
1356       tree type = TREE_TYPE(arg1);
1357       int count = TYPE_VECTOR_SUBPARTS (type), i;
1358       tree elements1, elements2, list = NULL_TREE;
1359 
1360       if(TREE_CODE(arg2) != VECTOR_CST)
1361         return NULL_TREE;
1362 
1363       elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364       elements2 = TREE_VECTOR_CST_ELTS (arg2);
1365 
1366       for (i = 0; i < count; i++)
1367 	{
1368           tree elem1, elem2, elem;
1369 
1370           /* The trailing elements can be empty and should be treated as 0 */
1371           if(!elements1)
1372             elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1373           else
1374             {
1375               elem1 = TREE_VALUE(elements1);
1376               elements1 = TREE_CHAIN (elements1);
1377             }
1378 
1379           if(!elements2)
1380             elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1381           else
1382             {
1383               elem2 = TREE_VALUE(elements2);
1384               elements2 = TREE_CHAIN (elements2);
1385             }
1386 
1387           elem = const_binop (code, elem1, elem2);
1388 
1389           /* It is possible that const_binop cannot handle the given
1390             code and return NULL_TREE */
1391           if(elem == NULL_TREE)
1392             return NULL_TREE;
1393 
1394           list = tree_cons (NULL_TREE, elem, list);
1395 	}
1396       return build_vector(type, nreverse(list));
1397     }
1398   return NULL_TREE;
1399 }
1400 
1401 /* Create a size type INT_CST node with NUMBER sign extended.  KIND
1402    indicates which particular sizetype to create.  */
1403 
1404 tree
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1406 {
1407   return build_int_cst (sizetype_tab[(int) kind], number);
1408 }
1409 
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1411    is a tree code.  The type of the result is taken from the operands.
1412    Both must be equivalent integer types, ala int_binop_types_match_p.
1413    If the operands are constant, so is the result.  */
1414 
1415 tree
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1417 {
1418   tree type = TREE_TYPE (arg0);
1419 
1420   if (arg0 == error_mark_node || arg1 == error_mark_node)
1421     return error_mark_node;
1422 
1423   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1424                                        TREE_TYPE (arg1)));
1425 
1426   /* Handle the special case of two integer constants faster.  */
1427   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1428     {
1429       /* And some specific cases even faster than that.  */
1430       if (code == PLUS_EXPR)
1431 	{
1432 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1433 	    return arg1;
1434 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1435 	    return arg0;
1436 	}
1437       else if (code == MINUS_EXPR)
1438 	{
1439 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 	    return arg0;
1441 	}
1442       else if (code == MULT_EXPR)
1443 	{
1444 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1445 	    return arg1;
1446 	}
1447 
1448       /* Handle general case of two integer constants.  */
1449       return int_const_binop (code, arg0, arg1);
1450     }
1451 
1452   return fold_build2_loc (loc, code, type, arg0, arg1);
1453 }
1454 
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456    compute the difference between the two values.  Return the value
1457    in signed type corresponding to the type of the operands.  */
1458 
1459 tree
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1461 {
1462   tree type = TREE_TYPE (arg0);
1463   tree ctype;
1464 
1465   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1466 				       TREE_TYPE (arg1)));
1467 
1468   /* If the type is already signed, just do the simple thing.  */
1469   if (!TYPE_UNSIGNED (type))
1470     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1471 
1472   if (type == sizetype)
1473     ctype = ssizetype;
1474   else if (type == bitsizetype)
1475     ctype = sbitsizetype;
1476   else
1477     ctype = signed_type_for (type);
1478 
1479   /* If either operand is not a constant, do the conversions to the signed
1480      type and subtract.  The hardware will do the right thing with any
1481      overflow in the subtraction.  */
1482   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483     return size_binop_loc (loc, MINUS_EXPR,
1484 			   fold_convert_loc (loc, ctype, arg0),
1485 			   fold_convert_loc (loc, ctype, arg1));
1486 
1487   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489      overflow) and negate (which can't either).  Special-case a result
1490      of zero while we're here.  */
1491   if (tree_int_cst_equal (arg0, arg1))
1492     return build_int_cst (ctype, 0);
1493   else if (tree_int_cst_lt (arg1, arg0))
1494     return fold_convert_loc (loc, ctype,
1495 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1496   else
1497     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 			   fold_convert_loc (loc, ctype,
1499 					     size_binop_loc (loc,
1500 							     MINUS_EXPR,
1501 							     arg1, arg0)));
1502 }
1503 
1504 /* A subroutine of fold_convert_const handling conversions of an
1505    INTEGER_CST to another integer type.  */
1506 
1507 static tree
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1509 {
1510   tree t;
1511 
1512   /* Given an integer constant, make new constant with new type,
1513      appropriately sign-extended or truncated.  */
1514   t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 			     !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 			     (TREE_INT_CST_HIGH (arg1) < 0
1517 		 	      && (TYPE_UNSIGNED (type)
1518 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 			     | TREE_OVERFLOW (arg1));
1520 
1521   return t;
1522 }
1523 
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525    to an integer type.  */
1526 
1527 static tree
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1529 {
1530   int overflow = 0;
1531   tree t;
1532 
1533   /* The following code implements the floating point to integer
1534      conversion rules required by the Java Language Specification,
1535      that IEEE NaNs are mapped to zero and values that overflow
1536      the target precision saturate, i.e. values greater than
1537      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538      are mapped to INT_MIN.  These semantics are allowed by the
1539      C and C++ standards that simply state that the behavior of
1540      FP-to-integer conversion is unspecified upon overflow.  */
1541 
1542   double_int val;
1543   REAL_VALUE_TYPE r;
1544   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1545 
1546   switch (code)
1547     {
1548     case FIX_TRUNC_EXPR:
1549       real_trunc (&r, VOIDmode, &x);
1550       break;
1551 
1552     default:
1553       gcc_unreachable ();
1554     }
1555 
1556   /* If R is NaN, return zero and show we have an overflow.  */
1557   if (REAL_VALUE_ISNAN (r))
1558     {
1559       overflow = 1;
1560       val = double_int_zero;
1561     }
1562 
1563   /* See if R is less than the lower bound or greater than the
1564      upper bound.  */
1565 
1566   if (! overflow)
1567     {
1568       tree lt = TYPE_MIN_VALUE (type);
1569       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570       if (REAL_VALUES_LESS (r, l))
1571 	{
1572 	  overflow = 1;
1573 	  val = tree_to_double_int (lt);
1574 	}
1575     }
1576 
1577   if (! overflow)
1578     {
1579       tree ut = TYPE_MAX_VALUE (type);
1580       if (ut)
1581 	{
1582 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 	  if (REAL_VALUES_LESS (u, r))
1584 	    {
1585 	      overflow = 1;
1586 	      val = tree_to_double_int (ut);
1587 	    }
1588 	}
1589     }
1590 
1591   if (! overflow)
1592     real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1593 
1594   t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1595   return t;
1596 }
1597 
1598 /* A subroutine of fold_convert_const handling conversions of a
1599    FIXED_CST to an integer type.  */
1600 
1601 static tree
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1603 {
1604   tree t;
1605   double_int temp, temp_trunc;
1606   unsigned int mode;
1607 
1608   /* Right shift FIXED_CST to temp by fbit.  */
1609   temp = TREE_FIXED_CST (arg1).data;
1610   mode = TREE_FIXED_CST (arg1).mode;
1611   if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1612     {
1613       temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 			        HOST_BITS_PER_DOUBLE_INT,
1615 			        SIGNED_FIXED_POINT_MODE_P (mode));
1616 
1617       /* Left shift temp to temp_trunc by fbit.  */
1618       temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 				      HOST_BITS_PER_DOUBLE_INT,
1620 				      SIGNED_FIXED_POINT_MODE_P (mode));
1621     }
1622   else
1623     {
1624       temp = double_int_zero;
1625       temp_trunc = double_int_zero;
1626     }
1627 
1628   /* If FIXED_CST is negative, we need to round the value toward 0.
1629      By checking if the fractional bits are not zero to add 1 to temp.  */
1630   if (SIGNED_FIXED_POINT_MODE_P (mode)
1631       && double_int_negative_p (temp_trunc)
1632       && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633     temp = double_int_add (temp, double_int_one);
1634 
1635   /* Given a fixed-point constant, make new constant with new type,
1636      appropriately sign-extended or truncated.  */
1637   t = force_fit_type_double (type, temp, -1,
1638 			     (double_int_negative_p (temp)
1639 		 	      && (TYPE_UNSIGNED (type)
1640 				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 			     | TREE_OVERFLOW (arg1));
1642 
1643   return t;
1644 }
1645 
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647    to another floating point type.  */
1648 
1649 static tree
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1651 {
1652   REAL_VALUE_TYPE value;
1653   tree t;
1654 
1655   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656   t = build_real (type, value);
1657 
1658   /* If converting an infinity or NAN to a representation that doesn't
1659      have one, set the overflow bit so that we can produce some kind of
1660      error message at the appropriate point if necessary.  It's not the
1661      most user-friendly message, but it's better than nothing.  */
1662   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664     TREE_OVERFLOW (t) = 1;
1665   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
1667     TREE_OVERFLOW (t) = 1;
1668   /* Regular overflow, conversion produced an infinity in a mode that
1669      can't represent them.  */
1670   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 	   && REAL_VALUE_ISINF (value)
1672 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673     TREE_OVERFLOW (t) = 1;
1674   else
1675     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1676   return t;
1677 }
1678 
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680    to a floating point type.  */
1681 
1682 static tree
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1684 {
1685   REAL_VALUE_TYPE value;
1686   tree t;
1687 
1688   real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689   t = build_real (type, value);
1690 
1691   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1692   return t;
1693 }
1694 
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696    to another fixed-point type.  */
1697 
1698 static tree
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1700 {
1701   FIXED_VALUE_TYPE value;
1702   tree t;
1703   bool overflow_p;
1704 
1705   overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 			      TYPE_SATURATING (type));
1707   t = build_fixed (type, value);
1708 
1709   /* Propagate overflow flags.  */
1710   if (overflow_p | TREE_OVERFLOW (arg1))
1711     TREE_OVERFLOW (t) = 1;
1712   return t;
1713 }
1714 
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716    to a fixed-point type.  */
1717 
1718 static tree
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1720 {
1721   FIXED_VALUE_TYPE value;
1722   tree t;
1723   bool overflow_p;
1724 
1725   overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 				       TREE_INT_CST (arg1),
1727 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 				       TYPE_SATURATING (type));
1729   t = build_fixed (type, value);
1730 
1731   /* Propagate overflow flags.  */
1732   if (overflow_p | TREE_OVERFLOW (arg1))
1733     TREE_OVERFLOW (t) = 1;
1734   return t;
1735 }
1736 
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738    to a fixed-point type.  */
1739 
1740 static tree
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1742 {
1743   FIXED_VALUE_TYPE value;
1744   tree t;
1745   bool overflow_p;
1746 
1747   overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 					&TREE_REAL_CST (arg1),
1749 					TYPE_SATURATING (type));
1750   t = build_fixed (type, value);
1751 
1752   /* Propagate overflow flags.  */
1753   if (overflow_p | TREE_OVERFLOW (arg1))
1754     TREE_OVERFLOW (t) = 1;
1755   return t;
1756 }
1757 
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759    type TYPE.  If no simplification can be done return NULL_TREE.  */
1760 
1761 static tree
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1763 {
1764   if (TREE_TYPE (arg1) == type)
1765     return arg1;
1766 
1767   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768       || TREE_CODE (type) == OFFSET_TYPE)
1769     {
1770       if (TREE_CODE (arg1) == INTEGER_CST)
1771 	return fold_convert_const_int_from_int (type, arg1);
1772       else if (TREE_CODE (arg1) == REAL_CST)
1773 	return fold_convert_const_int_from_real (code, type, arg1);
1774       else if (TREE_CODE (arg1) == FIXED_CST)
1775 	return fold_convert_const_int_from_fixed (type, arg1);
1776     }
1777   else if (TREE_CODE (type) == REAL_TYPE)
1778     {
1779       if (TREE_CODE (arg1) == INTEGER_CST)
1780 	return build_real_from_int_cst (type, arg1);
1781       else if (TREE_CODE (arg1) == REAL_CST)
1782 	return fold_convert_const_real_from_real (type, arg1);
1783       else if (TREE_CODE (arg1) == FIXED_CST)
1784 	return fold_convert_const_real_from_fixed (type, arg1);
1785     }
1786   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1787     {
1788       if (TREE_CODE (arg1) == FIXED_CST)
1789 	return fold_convert_const_fixed_from_fixed (type, arg1);
1790       else if (TREE_CODE (arg1) == INTEGER_CST)
1791 	return fold_convert_const_fixed_from_int (type, arg1);
1792       else if (TREE_CODE (arg1) == REAL_CST)
1793 	return fold_convert_const_fixed_from_real (type, arg1);
1794     }
1795   return NULL_TREE;
1796 }
1797 
1798 /* Construct a vector of zero elements of vector type TYPE.  */
1799 
1800 static tree
1801 build_zero_vector (tree type)
1802 {
1803   tree t;
1804 
1805   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806   return build_vector_from_val (type, t);
1807 }
1808 
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
1810 
1811 bool
1812 fold_convertible_p (const_tree type, const_tree arg)
1813 {
1814   tree orig = TREE_TYPE (arg);
1815 
1816   if (type == orig)
1817     return true;
1818 
1819   if (TREE_CODE (arg) == ERROR_MARK
1820       || TREE_CODE (type) == ERROR_MARK
1821       || TREE_CODE (orig) == ERROR_MARK)
1822     return false;
1823 
1824   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1825     return true;
1826 
1827   switch (TREE_CODE (type))
1828     {
1829     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830     case POINTER_TYPE: case REFERENCE_TYPE:
1831     case OFFSET_TYPE:
1832       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 	  || TREE_CODE (orig) == OFFSET_TYPE)
1834         return true;
1835       return (TREE_CODE (orig) == VECTOR_TYPE
1836 	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1837 
1838     case REAL_TYPE:
1839     case FIXED_POINT_TYPE:
1840     case COMPLEX_TYPE:
1841     case VECTOR_TYPE:
1842     case VOID_TYPE:
1843       return TREE_CODE (type) == TREE_CODE (orig);
1844 
1845     default:
1846       return false;
1847     }
1848 }
1849 
1850 /* Convert expression ARG to type TYPE.  Used by the middle-end for
1851    simple conversions in preference to calling the front-end's convert.  */
1852 
1853 tree
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1855 {
1856   tree orig = TREE_TYPE (arg);
1857   tree tem;
1858 
1859   if (type == orig)
1860     return arg;
1861 
1862   if (TREE_CODE (arg) == ERROR_MARK
1863       || TREE_CODE (type) == ERROR_MARK
1864       || TREE_CODE (orig) == ERROR_MARK)
1865     return error_mark_node;
1866 
1867   switch (TREE_CODE (type))
1868     {
1869     case POINTER_TYPE:
1870     case REFERENCE_TYPE:
1871       /* Handle conversions between pointers to different address spaces.  */
1872       if (POINTER_TYPE_P (orig)
1873 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1876       /* fall through */
1877 
1878     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1879     case OFFSET_TYPE:
1880       if (TREE_CODE (arg) == INTEGER_CST)
1881 	{
1882 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1883 	  if (tem != NULL_TREE)
1884 	    return tem;
1885 	}
1886       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 	  || TREE_CODE (orig) == OFFSET_TYPE)
1888 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889       if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 	return fold_convert_loc (loc, type,
1891 			     fold_build1_loc (loc, REALPART_EXPR,
1892 					  TREE_TYPE (orig), arg));
1893       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895       return fold_build1_loc (loc, NOP_EXPR, type, arg);
1896 
1897     case REAL_TYPE:
1898       if (TREE_CODE (arg) == INTEGER_CST)
1899 	{
1900 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 	  if (tem != NULL_TREE)
1902 	    return tem;
1903 	}
1904       else if (TREE_CODE (arg) == REAL_CST)
1905 	{
1906 	  tem = fold_convert_const (NOP_EXPR, type, arg);
1907 	  if (tem != NULL_TREE)
1908 	    return tem;
1909 	}
1910       else if (TREE_CODE (arg) == FIXED_CST)
1911 	{
1912 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 	  if (tem != NULL_TREE)
1914 	    return tem;
1915 	}
1916 
1917       switch (TREE_CODE (orig))
1918 	{
1919 	case INTEGER_TYPE:
1920 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 	case POINTER_TYPE: case REFERENCE_TYPE:
1922 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1923 
1924 	case REAL_TYPE:
1925 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
1926 
1927 	case FIXED_POINT_TYPE:
1928 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1929 
1930 	case COMPLEX_TYPE:
1931 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 	  return fold_convert_loc (loc, type, tem);
1933 
1934 	default:
1935 	  gcc_unreachable ();
1936 	}
1937 
1938     case FIXED_POINT_TYPE:
1939       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 	  || TREE_CODE (arg) == REAL_CST)
1941 	{
1942 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 	  if (tem != NULL_TREE)
1944 	    goto fold_convert_exit;
1945 	}
1946 
1947       switch (TREE_CODE (orig))
1948 	{
1949 	case FIXED_POINT_TYPE:
1950 	case INTEGER_TYPE:
1951 	case ENUMERAL_TYPE:
1952 	case BOOLEAN_TYPE:
1953 	case REAL_TYPE:
1954 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1955 
1956 	case COMPLEX_TYPE:
1957 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 	  return fold_convert_loc (loc, type, tem);
1959 
1960 	default:
1961 	  gcc_unreachable ();
1962 	}
1963 
1964     case COMPLEX_TYPE:
1965       switch (TREE_CODE (orig))
1966 	{
1967 	case INTEGER_TYPE:
1968 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 	case POINTER_TYPE: case REFERENCE_TYPE:
1970 	case REAL_TYPE:
1971 	case FIXED_POINT_TYPE:
1972 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 			      fold_convert_loc (loc, TREE_TYPE (type),
1975 					    integer_zero_node));
1976 	case COMPLEX_TYPE:
1977 	  {
1978 	    tree rpart, ipart;
1979 
1980 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
1981 	      {
1982 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 				      TREE_OPERAND (arg, 0));
1984 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 				      TREE_OPERAND (arg, 1));
1986 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1987 	      }
1988 
1989 	    arg = save_expr (arg);
1990 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 	  }
1996 
1997 	default:
1998 	  gcc_unreachable ();
1999 	}
2000 
2001     case VECTOR_TYPE:
2002       if (integer_zerop (arg))
2003 	return build_zero_vector (type);
2004       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 		  || TREE_CODE (orig) == VECTOR_TYPE);
2007       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2008 
2009     case VOID_TYPE:
2010       tem = fold_ignored_result (arg);
2011       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2012 
2013     default:
2014       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2016       gcc_unreachable ();
2017     }
2018  fold_convert_exit:
2019   protected_set_expr_location_unshare (tem, loc);
2020   return tem;
2021 }
2022 
2023 /* Return false if expr can be assumed not to be an lvalue, true
2024    otherwise.  */
2025 
2026 static bool
2027 maybe_lvalue_p (const_tree x)
2028 {
2029   /* We only need to wrap lvalue tree codes.  */
2030   switch (TREE_CODE (x))
2031   {
2032   case VAR_DECL:
2033   case PARM_DECL:
2034   case RESULT_DECL:
2035   case LABEL_DECL:
2036   case FUNCTION_DECL:
2037   case SSA_NAME:
2038 
2039   case COMPONENT_REF:
2040   case MEM_REF:
2041   case INDIRECT_REF:
2042   case ARRAY_REF:
2043   case ARRAY_RANGE_REF:
2044   case BIT_FIELD_REF:
2045   case OBJ_TYPE_REF:
2046 
2047   case REALPART_EXPR:
2048   case IMAGPART_EXPR:
2049   case PREINCREMENT_EXPR:
2050   case PREDECREMENT_EXPR:
2051   case SAVE_EXPR:
2052   case TRY_CATCH_EXPR:
2053   case WITH_CLEANUP_EXPR:
2054   case COMPOUND_EXPR:
2055   case MODIFY_EXPR:
2056   case TARGET_EXPR:
2057   case COND_EXPR:
2058   case BIND_EXPR:
2059     break;
2060 
2061   default:
2062     /* Assume the worst for front-end tree codes.  */
2063     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2064       break;
2065     return false;
2066   }
2067 
2068   return true;
2069 }
2070 
2071 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2072 
2073 tree
2074 non_lvalue_loc (location_t loc, tree x)
2075 {
2076   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2077      us.  */
2078   if (in_gimple_form)
2079     return x;
2080 
2081   if (! maybe_lvalue_p (x))
2082     return x;
2083   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2084 }
2085 
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087    Zero means allow extended lvalues.  */
2088 
2089 int pedantic_lvalues;
2090 
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092    pedantic lvalue.  Otherwise, return X.  */
2093 
2094 static tree
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2096 {
2097   if (pedantic_lvalues)
2098     return non_lvalue_loc (loc, x);
2099 
2100   return protected_set_expr_location_unshare (x, loc);
2101 }
2102 
2103 /* Given a tree comparison code, return the code that is the logical inverse.
2104    It is generally not safe to do this for floating-point comparisons, except
2105    for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case.  */
2106 
2107 enum tree_code
2108 invert_tree_comparison (enum tree_code code, bool honor_nans)
2109 {
2110   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2111     return ERROR_MARK;
2112 
2113   switch (code)
2114     {
2115     case EQ_EXPR:
2116       return NE_EXPR;
2117     case NE_EXPR:
2118       return EQ_EXPR;
2119     case GT_EXPR:
2120       return honor_nans ? UNLE_EXPR : LE_EXPR;
2121     case GE_EXPR:
2122       return honor_nans ? UNLT_EXPR : LT_EXPR;
2123     case LT_EXPR:
2124       return honor_nans ? UNGE_EXPR : GE_EXPR;
2125     case LE_EXPR:
2126       return honor_nans ? UNGT_EXPR : GT_EXPR;
2127     case LTGT_EXPR:
2128       return UNEQ_EXPR;
2129     case UNEQ_EXPR:
2130       return LTGT_EXPR;
2131     case UNGT_EXPR:
2132       return LE_EXPR;
2133     case UNGE_EXPR:
2134       return LT_EXPR;
2135     case UNLT_EXPR:
2136       return GE_EXPR;
2137     case UNLE_EXPR:
2138       return GT_EXPR;
2139     case ORDERED_EXPR:
2140       return UNORDERED_EXPR;
2141     case UNORDERED_EXPR:
2142       return ORDERED_EXPR;
2143     default:
2144       gcc_unreachable ();
2145     }
2146 }
2147 
2148 /* Similar, but return the comparison that results if the operands are
2149    swapped.  This is safe for floating-point.  */
2150 
2151 enum tree_code
2152 swap_tree_comparison (enum tree_code code)
2153 {
2154   switch (code)
2155     {
2156     case EQ_EXPR:
2157     case NE_EXPR:
2158     case ORDERED_EXPR:
2159     case UNORDERED_EXPR:
2160     case LTGT_EXPR:
2161     case UNEQ_EXPR:
2162       return code;
2163     case GT_EXPR:
2164       return LT_EXPR;
2165     case GE_EXPR:
2166       return LE_EXPR;
2167     case LT_EXPR:
2168       return GT_EXPR;
2169     case LE_EXPR:
2170       return GE_EXPR;
2171     case UNGT_EXPR:
2172       return UNLT_EXPR;
2173     case UNGE_EXPR:
2174       return UNLE_EXPR;
2175     case UNLT_EXPR:
2176       return UNGT_EXPR;
2177     case UNLE_EXPR:
2178       return UNGE_EXPR;
2179     default:
2180       gcc_unreachable ();
2181     }
2182 }
2183 
2184 
2185 /* Convert a comparison tree code from an enum tree_code representation
2186    into a compcode bit-based encoding.  This function is the inverse of
2187    compcode_to_comparison.  */
2188 
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code)
2191 {
2192   switch (code)
2193     {
2194     case LT_EXPR:
2195       return COMPCODE_LT;
2196     case EQ_EXPR:
2197       return COMPCODE_EQ;
2198     case LE_EXPR:
2199       return COMPCODE_LE;
2200     case GT_EXPR:
2201       return COMPCODE_GT;
2202     case NE_EXPR:
2203       return COMPCODE_NE;
2204     case GE_EXPR:
2205       return COMPCODE_GE;
2206     case ORDERED_EXPR:
2207       return COMPCODE_ORD;
2208     case UNORDERED_EXPR:
2209       return COMPCODE_UNORD;
2210     case UNLT_EXPR:
2211       return COMPCODE_UNLT;
2212     case UNEQ_EXPR:
2213       return COMPCODE_UNEQ;
2214     case UNLE_EXPR:
2215       return COMPCODE_UNLE;
2216     case UNGT_EXPR:
2217       return COMPCODE_UNGT;
2218     case LTGT_EXPR:
2219       return COMPCODE_LTGT;
2220     case UNGE_EXPR:
2221       return COMPCODE_UNGE;
2222     default:
2223       gcc_unreachable ();
2224     }
2225 }
2226 
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228    to GCC's enum tree_code representation.  This function is the
2229    inverse of comparison_to_compcode.  */
2230 
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code)
2233 {
2234   switch (code)
2235     {
2236     case COMPCODE_LT:
2237       return LT_EXPR;
2238     case COMPCODE_EQ:
2239       return EQ_EXPR;
2240     case COMPCODE_LE:
2241       return LE_EXPR;
2242     case COMPCODE_GT:
2243       return GT_EXPR;
2244     case COMPCODE_NE:
2245       return NE_EXPR;
2246     case COMPCODE_GE:
2247       return GE_EXPR;
2248     case COMPCODE_ORD:
2249       return ORDERED_EXPR;
2250     case COMPCODE_UNORD:
2251       return UNORDERED_EXPR;
2252     case COMPCODE_UNLT:
2253       return UNLT_EXPR;
2254     case COMPCODE_UNEQ:
2255       return UNEQ_EXPR;
2256     case COMPCODE_UNLE:
2257       return UNLE_EXPR;
2258     case COMPCODE_UNGT:
2259       return UNGT_EXPR;
2260     case COMPCODE_LTGT:
2261       return LTGT_EXPR;
2262     case COMPCODE_UNGE:
2263       return UNGE_EXPR;
2264     default:
2265       gcc_unreachable ();
2266     }
2267 }
2268 
2269 /* Return a tree for the comparison which is the combination of
2270    doing the AND or OR (depending on CODE) of the two operations LCODE
2271    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2272    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273    if this makes the transformation invalid.  */
2274 
2275 tree
2276 combine_comparisons (location_t loc,
2277 		     enum tree_code code, enum tree_code lcode,
2278 		     enum tree_code rcode, tree truth_type,
2279 		     tree ll_arg, tree lr_arg)
2280 {
2281   bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2282   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2283   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2284   int compcode;
2285 
2286   switch (code)
2287     {
2288     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2289       compcode = lcompcode & rcompcode;
2290       break;
2291 
2292     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2293       compcode = lcompcode | rcompcode;
2294       break;
2295 
2296     default:
2297       return NULL_TREE;
2298     }
2299 
2300   if (!honor_nans)
2301     {
2302       /* Eliminate unordered comparisons, as well as LTGT and ORD
2303 	 which are not used unless the mode has NaNs.  */
2304       compcode &= ~COMPCODE_UNORD;
2305       if (compcode == COMPCODE_LTGT)
2306 	compcode = COMPCODE_NE;
2307       else if (compcode == COMPCODE_ORD)
2308 	compcode = COMPCODE_TRUE;
2309     }
2310    else if (flag_trapping_math)
2311      {
2312 	/* Check that the original operation and the optimized ones will trap
2313 	   under the same condition.  */
2314 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2315 		     && (lcompcode != COMPCODE_EQ)
2316 		     && (lcompcode != COMPCODE_ORD);
2317 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2318 		     && (rcompcode != COMPCODE_EQ)
2319 		     && (rcompcode != COMPCODE_ORD);
2320 	bool trap = (compcode & COMPCODE_UNORD) == 0
2321 		    && (compcode != COMPCODE_EQ)
2322 		    && (compcode != COMPCODE_ORD);
2323 
2324         /* In a short-circuited boolean expression the LHS might be
2325 	   such that the RHS, if evaluated, will never trap.  For
2326 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2327 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2328 	   example, the expression above will never trap, hence
2329 	   optimizing it to x < y would be invalid).  */
2330         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2331             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2332           rtrap = false;
2333 
2334         /* If the comparison was short-circuited, and only the RHS
2335 	   trapped, we may now generate a spurious trap.  */
2336 	if (rtrap && !ltrap
2337 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2338 	  return NULL_TREE;
2339 
2340 	/* If we changed the conditions that cause a trap, we lose.  */
2341 	if ((ltrap || rtrap) != trap)
2342 	  return NULL_TREE;
2343       }
2344 
2345   if (compcode == COMPCODE_TRUE)
2346     return constant_boolean_node (true, truth_type);
2347   else if (compcode == COMPCODE_FALSE)
2348     return constant_boolean_node (false, truth_type);
2349   else
2350     {
2351       enum tree_code tcode;
2352 
2353       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2354       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2355     }
2356 }
2357 
2358 /* Return nonzero if two operands (typically of the same tree node)
2359    are necessarily equal.  If either argument has side-effects this
2360    function returns zero.  FLAGS modifies behavior as follows:
2361 
2362    If OEP_ONLY_CONST is set, only return nonzero for constants.
2363    This function tests whether the operands are indistinguishable;
2364    it does not test whether they are equal using C's == operation.
2365    The distinction is important for IEEE floating point, because
2366    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2367    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2368 
2369    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2370    even though it may hold multiple values during a function.
2371    This is because a GCC tree node guarantees that nothing else is
2372    executed between the evaluation of its "operands" (which may often
2373    be evaluated in arbitrary order).  Hence if the operands themselves
2374    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2375    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2376    unset means assuming isochronic (or instantaneous) tree equivalence.
2377    Unless comparing arbitrary expression trees, such as from different
2378    statements, this flag can usually be left unset.
2379 
2380    If OEP_PURE_SAME is set, then pure functions with identical arguments
2381    are considered the same.  It is used when the caller has other ways
2382    to ensure that global memory is unchanged in between.  */
2383 
2384 int
2385 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2386 {
2387   /* If either is ERROR_MARK, they aren't equal.  */
2388   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2389       || TREE_TYPE (arg0) == error_mark_node
2390       || TREE_TYPE (arg1) == error_mark_node)
2391     return 0;
2392 
2393   /* Similar, if either does not have a type (like a released SSA name),
2394      they aren't equal.  */
2395   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2396     return 0;
2397 
2398   /* Check equality of integer constants before bailing out due to
2399      precision differences.  */
2400   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2401     return tree_int_cst_equal (arg0, arg1);
2402 
2403   /* If both types don't have the same signedness, then we can't consider
2404      them equal.  We must check this before the STRIP_NOPS calls
2405      because they may change the signedness of the arguments.  As pointers
2406      strictly don't have a signedness, require either two pointers or
2407      two non-pointers as well.  */
2408   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2409       || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2410     return 0;
2411 
2412   /* We cannot consider pointers to different address space equal.  */
2413   if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2414       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2415 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2416     return 0;
2417 
2418   /* If both types don't have the same precision, then it is not safe
2419      to strip NOPs.  */
2420   if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2421     return 0;
2422 
2423   STRIP_NOPS (arg0);
2424   STRIP_NOPS (arg1);
2425 
2426   /* In case both args are comparisons but with different comparison
2427      code, try to swap the comparison operands of one arg to produce
2428      a match and compare that variant.  */
2429   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430       && COMPARISON_CLASS_P (arg0)
2431       && COMPARISON_CLASS_P (arg1))
2432     {
2433       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2434 
2435       if (TREE_CODE (arg0) == swap_code)
2436 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2437 			        TREE_OPERAND (arg1, 1), flags)
2438 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2439 				   TREE_OPERAND (arg1, 0), flags);
2440     }
2441 
2442   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443       /* This is needed for conversions and for COMPONENT_REF.
2444 	 Might as well play it safe and always test this.  */
2445       || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2446       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2447       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2448     return 0;
2449 
2450   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2451      We don't care about side effects in that case because the SAVE_EXPR
2452      takes care of that for us. In all other cases, two expressions are
2453      equal if they have no side effects.  If we have two identical
2454      expressions with side effects that should be treated the same due
2455      to the only side effects being identical SAVE_EXPR's, that will
2456      be detected in the recursive calls below.
2457      If we are taking an invariant address of two identical objects
2458      they are necessarily equal as well.  */
2459   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2460       && (TREE_CODE (arg0) == SAVE_EXPR
2461 	  || (flags & OEP_CONSTANT_ADDRESS_OF)
2462 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2463     return 1;
2464 
2465   /* Next handle constant cases, those for which we can return 1 even
2466      if ONLY_CONST is set.  */
2467   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2468     switch (TREE_CODE (arg0))
2469       {
2470       case INTEGER_CST:
2471 	return tree_int_cst_equal (arg0, arg1);
2472 
2473       case FIXED_CST:
2474 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2475 				       TREE_FIXED_CST (arg1));
2476 
2477       case REAL_CST:
2478 	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2479 				   TREE_REAL_CST (arg1)))
2480 	  return 1;
2481 
2482 
2483 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2484 	  {
2485 	    /* If we do not distinguish between signed and unsigned zero,
2486 	       consider them equal.  */
2487 	    if (real_zerop (arg0) && real_zerop (arg1))
2488 	      return 1;
2489 	  }
2490 	return 0;
2491 
2492       case VECTOR_CST:
2493 	{
2494 	  tree v1, v2;
2495 
2496 	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2497 	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2498 	  while (v1 && v2)
2499 	    {
2500 	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2501 				    flags))
2502 		return 0;
2503 	      v1 = TREE_CHAIN (v1);
2504 	      v2 = TREE_CHAIN (v2);
2505 	    }
2506 
2507 	  return v1 == v2;
2508 	}
2509 
2510       case COMPLEX_CST:
2511 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2512 				 flags)
2513 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2514 				    flags));
2515 
2516       case STRING_CST:
2517 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2518 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2519 			      TREE_STRING_POINTER (arg1),
2520 			      TREE_STRING_LENGTH (arg0)));
2521 
2522       case ADDR_EXPR:
2523 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2524 				TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2525 				? OEP_CONSTANT_ADDRESS_OF : 0);
2526       default:
2527 	break;
2528       }
2529 
2530   if (flags & OEP_ONLY_CONST)
2531     return 0;
2532 
2533 /* Define macros to test an operand from arg0 and arg1 for equality and a
2534    variant that allows null and views null as being different from any
2535    non-null value.  In the latter case, if either is null, the both
2536    must be; otherwise, do the normal comparison.  */
2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2538 				    TREE_OPERAND (arg1, N), flags)
2539 
2540 #define OP_SAME_WITH_NULL(N)				\
2541   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2542    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2543 
2544   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2545     {
2546     case tcc_unary:
2547       /* Two conversions are equal only if signedness and modes match.  */
2548       switch (TREE_CODE (arg0))
2549         {
2550 	CASE_CONVERT:
2551         case FIX_TRUNC_EXPR:
2552 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2553 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2554 	    return 0;
2555 	  break;
2556 	default:
2557 	  break;
2558 	}
2559 
2560       return OP_SAME (0);
2561 
2562 
2563     case tcc_comparison:
2564     case tcc_binary:
2565       if (OP_SAME (0) && OP_SAME (1))
2566 	return 1;
2567 
2568       /* For commutative ops, allow the other order.  */
2569       return (commutative_tree_code (TREE_CODE (arg0))
2570 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2571 				  TREE_OPERAND (arg1, 1), flags)
2572 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2573 				  TREE_OPERAND (arg1, 0), flags));
2574 
2575     case tcc_reference:
2576       /* If either of the pointer (or reference) expressions we are
2577 	 dereferencing contain a side effect, these cannot be equal.  */
2578       if (TREE_SIDE_EFFECTS (arg0)
2579 	  || TREE_SIDE_EFFECTS (arg1))
2580 	return 0;
2581 
2582       switch (TREE_CODE (arg0))
2583 	{
2584 	case INDIRECT_REF:
2585 	case REALPART_EXPR:
2586 	case IMAGPART_EXPR:
2587 	  return OP_SAME (0);
2588 
2589 	case MEM_REF:
2590 	  /* Require equal access sizes, and similar pointer types.
2591 	     We can have incomplete types for array references of
2592 	     variable-sized arrays from the Fortran frontent
2593 	     though.  */
2594 	  return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2595 		   || (TYPE_SIZE (TREE_TYPE (arg0))
2596 		       && TYPE_SIZE (TREE_TYPE (arg1))
2597 		       && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2598 					   TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2599 		  && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2600 		      == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2601 		  && OP_SAME (0) && OP_SAME (1));
2602 
2603 	case ARRAY_REF:
2604 	case ARRAY_RANGE_REF:
2605 	  /* Operands 2 and 3 may be null.
2606 	     Compare the array index by value if it is constant first as we
2607 	     may have different types but same value here.  */
2608 	  return (OP_SAME (0)
2609 		  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2610 					  TREE_OPERAND (arg1, 1))
2611 		      || OP_SAME (1))
2612 		  && OP_SAME_WITH_NULL (2)
2613 		  && OP_SAME_WITH_NULL (3));
2614 
2615 	case COMPONENT_REF:
2616 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2617 	     may be NULL when we're called to compare MEM_EXPRs.  */
2618 	  return OP_SAME_WITH_NULL (0)
2619 		 && OP_SAME (1)
2620 		 && OP_SAME_WITH_NULL (2);
2621 
2622 	case BIT_FIELD_REF:
2623 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2624 
2625 	default:
2626 	  return 0;
2627 	}
2628 
2629     case tcc_expression:
2630       switch (TREE_CODE (arg0))
2631 	{
2632 	case ADDR_EXPR:
2633 	case TRUTH_NOT_EXPR:
2634 	  return OP_SAME (0);
2635 
2636 	case TRUTH_ANDIF_EXPR:
2637 	case TRUTH_ORIF_EXPR:
2638 	  return OP_SAME (0) && OP_SAME (1);
2639 
2640 	case FMA_EXPR:
2641 	case WIDEN_MULT_PLUS_EXPR:
2642 	case WIDEN_MULT_MINUS_EXPR:
2643 	  if (!OP_SAME (2))
2644 	    return 0;
2645 	  /* The multiplcation operands are commutative.  */
2646 	  /* FALLTHRU */
2647 
2648 	case TRUTH_AND_EXPR:
2649 	case TRUTH_OR_EXPR:
2650 	case TRUTH_XOR_EXPR:
2651 	  if (OP_SAME (0) && OP_SAME (1))
2652 	    return 1;
2653 
2654 	  /* Otherwise take into account this is a commutative operation.  */
2655 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2656 				   TREE_OPERAND (arg1, 1), flags)
2657 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 				      TREE_OPERAND (arg1, 0), flags));
2659 
2660 	case COND_EXPR:
2661 	case VEC_COND_EXPR:
2662 	case DOT_PROD_EXPR:
2663 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2664 
2665 	default:
2666 	  return 0;
2667 	}
2668 
2669     case tcc_vl_exp:
2670       switch (TREE_CODE (arg0))
2671 	{
2672 	case CALL_EXPR:
2673 	  /* If the CALL_EXPRs call different functions, then they
2674 	     clearly can not be equal.  */
2675 	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2676 				 flags))
2677 	    return 0;
2678 
2679 	  {
2680 	    unsigned int cef = call_expr_flags (arg0);
2681 	    if (flags & OEP_PURE_SAME)
2682 	      cef &= ECF_CONST | ECF_PURE;
2683 	    else
2684 	      cef &= ECF_CONST;
2685 	    if (!cef)
2686 	      return 0;
2687 	  }
2688 
2689 	  /* Now see if all the arguments are the same.  */
2690 	  {
2691 	    const_call_expr_arg_iterator iter0, iter1;
2692 	    const_tree a0, a1;
2693 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
2694 		   a1 = first_const_call_expr_arg (arg1, &iter1);
2695 		 a0 && a1;
2696 		 a0 = next_const_call_expr_arg (&iter0),
2697 		   a1 = next_const_call_expr_arg (&iter1))
2698 	      if (! operand_equal_p (a0, a1, flags))
2699 		return 0;
2700 
2701 	    /* If we get here and both argument lists are exhausted
2702 	       then the CALL_EXPRs are equal.  */
2703 	    return ! (a0 || a1);
2704 	  }
2705 	default:
2706 	  return 0;
2707 	}
2708 
2709     case tcc_declaration:
2710       /* Consider __builtin_sqrt equal to sqrt.  */
2711       return (TREE_CODE (arg0) == FUNCTION_DECL
2712 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2713 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2714 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2715 
2716     default:
2717       return 0;
2718     }
2719 
2720 #undef OP_SAME
2721 #undef OP_SAME_WITH_NULL
2722 }
2723 
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2726 
2727    When in doubt, return 0.  */
2728 
2729 static int
2730 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2731 {
2732   int unsignedp1, unsignedpo;
2733   tree primarg0, primarg1, primother;
2734   unsigned int correct_width;
2735 
2736   if (operand_equal_p (arg0, arg1, 0))
2737     return 1;
2738 
2739   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2740       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2741     return 0;
2742 
2743   /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744      and see if the inner values are the same.  This removes any
2745      signedness comparison, which doesn't matter here.  */
2746   primarg0 = arg0, primarg1 = arg1;
2747   STRIP_NOPS (primarg0);
2748   STRIP_NOPS (primarg1);
2749   if (operand_equal_p (primarg0, primarg1, 0))
2750     return 1;
2751 
2752   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753      actual comparison operand, ARG0.
2754 
2755      First throw away any conversions to wider types
2756      already present in the operands.  */
2757 
2758   primarg1 = get_narrower (arg1, &unsignedp1);
2759   primother = get_narrower (other, &unsignedpo);
2760 
2761   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2762   if (unsignedp1 == unsignedpo
2763       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2764       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2765     {
2766       tree type = TREE_TYPE (arg0);
2767 
2768       /* Make sure shorter operand is extended the right way
2769 	 to match the longer operand.  */
2770       primarg1 = fold_convert (signed_or_unsigned_type_for
2771 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2772 
2773       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2774 	return 1;
2775     }
2776 
2777   return 0;
2778 }
2779 
2780 /* See if ARG is an expression that is either a comparison or is performing
2781    arithmetic on comparisons.  The comparisons must only be comparing
2782    two different values, which will be stored in *CVAL1 and *CVAL2; if
2783    they are nonzero it means that some operands have already been found.
2784    No variables may be used anywhere else in the expression except in the
2785    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2786    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2787 
2788    If this is true, return 1.  Otherwise, return zero.  */
2789 
2790 static int
2791 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2792 {
2793   enum tree_code code = TREE_CODE (arg);
2794   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2795 
2796   /* We can handle some of the tcc_expression cases here.  */
2797   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2798     tclass = tcc_unary;
2799   else if (tclass == tcc_expression
2800 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2801 	       || code == COMPOUND_EXPR))
2802     tclass = tcc_binary;
2803 
2804   else if (tclass == tcc_expression && code == SAVE_EXPR
2805 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2806     {
2807       /* If we've already found a CVAL1 or CVAL2, this expression is
2808 	 two complex to handle.  */
2809       if (*cval1 || *cval2)
2810 	return 0;
2811 
2812       tclass = tcc_unary;
2813       *save_p = 1;
2814     }
2815 
2816   switch (tclass)
2817     {
2818     case tcc_unary:
2819       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2820 
2821     case tcc_binary:
2822       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2823 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 				      cval1, cval2, save_p));
2825 
2826     case tcc_constant:
2827       return 1;
2828 
2829     case tcc_expression:
2830       if (code == COND_EXPR)
2831 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2832 				     cval1, cval2, save_p)
2833 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2834 					cval1, cval2, save_p)
2835 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2836 					cval1, cval2, save_p));
2837       return 0;
2838 
2839     case tcc_comparison:
2840       /* First see if we can handle the first operand, then the second.  For
2841 	 the second operand, we know *CVAL1 can't be zero.  It must be that
2842 	 one side of the comparison is each of the values; test for the
2843 	 case where this isn't true by failing if the two operands
2844 	 are the same.  */
2845 
2846       if (operand_equal_p (TREE_OPERAND (arg, 0),
2847 			   TREE_OPERAND (arg, 1), 0))
2848 	return 0;
2849 
2850       if (*cval1 == 0)
2851 	*cval1 = TREE_OPERAND (arg, 0);
2852       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2853 	;
2854       else if (*cval2 == 0)
2855 	*cval2 = TREE_OPERAND (arg, 0);
2856       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2857 	;
2858       else
2859 	return 0;
2860 
2861       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2862 	;
2863       else if (*cval2 == 0)
2864 	*cval2 = TREE_OPERAND (arg, 1);
2865       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2866 	;
2867       else
2868 	return 0;
2869 
2870       return 1;
2871 
2872     default:
2873       return 0;
2874     }
2875 }
2876 
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878    comparisons.  Evaluate the operations in the tree substituting NEW0 for
2879    any occurrence of OLD0 as an operand of a comparison and likewise for
2880    NEW1 and OLD1.  */
2881 
2882 static tree
2883 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2884 	    tree old1, tree new1)
2885 {
2886   tree type = TREE_TYPE (arg);
2887   enum tree_code code = TREE_CODE (arg);
2888   enum tree_code_class tclass = TREE_CODE_CLASS (code);
2889 
2890   /* We can handle some of the tcc_expression cases here.  */
2891   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2892     tclass = tcc_unary;
2893   else if (tclass == tcc_expression
2894 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2895     tclass = tcc_binary;
2896 
2897   switch (tclass)
2898     {
2899     case tcc_unary:
2900       return fold_build1_loc (loc, code, type,
2901 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2902 				      old0, new0, old1, new1));
2903 
2904     case tcc_binary:
2905       return fold_build2_loc (loc, code, type,
2906 			  eval_subst (loc, TREE_OPERAND (arg, 0),
2907 				      old0, new0, old1, new1),
2908 			  eval_subst (loc, TREE_OPERAND (arg, 1),
2909 				      old0, new0, old1, new1));
2910 
2911     case tcc_expression:
2912       switch (code)
2913 	{
2914 	case SAVE_EXPR:
2915 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2916 			     old1, new1);
2917 
2918 	case COMPOUND_EXPR:
2919 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2920 			     old1, new1);
2921 
2922 	case COND_EXPR:
2923 	  return fold_build3_loc (loc, code, type,
2924 			      eval_subst (loc, TREE_OPERAND (arg, 0),
2925 					  old0, new0, old1, new1),
2926 			      eval_subst (loc, TREE_OPERAND (arg, 1),
2927 					  old0, new0, old1, new1),
2928 			      eval_subst (loc, TREE_OPERAND (arg, 2),
2929 					  old0, new0, old1, new1));
2930 	default:
2931 	  break;
2932 	}
2933       /* Fall through - ???  */
2934 
2935     case tcc_comparison:
2936       {
2937 	tree arg0 = TREE_OPERAND (arg, 0);
2938 	tree arg1 = TREE_OPERAND (arg, 1);
2939 
2940 	/* We need to check both for exact equality and tree equality.  The
2941 	   former will be true if the operand has a side-effect.  In that
2942 	   case, we know the operand occurred exactly once.  */
2943 
2944 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2945 	  arg0 = new0;
2946 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2947 	  arg0 = new1;
2948 
2949 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2950 	  arg1 = new0;
2951 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2952 	  arg1 = new1;
2953 
2954 	return fold_build2_loc (loc, code, type, arg0, arg1);
2955       }
2956 
2957     default:
2958       return arg;
2959     }
2960 }
2961 
2962 /* Return a tree for the case when the result of an expression is RESULT
2963    converted to TYPE and OMITTED was previously an operand of the expression
2964    but is now not needed (e.g., we folded OMITTED * 0).
2965 
2966    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2967    the conversion of RESULT to TYPE.  */
2968 
2969 tree
2970 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2971 {
2972   tree t = fold_convert_loc (loc, type, result);
2973 
2974   /* If the resulting operand is an empty statement, just return the omitted
2975      statement casted to void. */
2976   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2977     return build1_loc (loc, NOP_EXPR, void_type_node,
2978 		       fold_ignored_result (omitted));
2979 
2980   if (TREE_SIDE_EFFECTS (omitted))
2981     return build2_loc (loc, COMPOUND_EXPR, type,
2982 		       fold_ignored_result (omitted), t);
2983 
2984   return non_lvalue_loc (loc, t);
2985 }
2986 
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2988 
2989 static tree
2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2991 			       tree omitted)
2992 {
2993   tree t = fold_convert_loc (loc, type, result);
2994 
2995   /* If the resulting operand is an empty statement, just return the omitted
2996      statement casted to void. */
2997   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2998     return build1_loc (loc, NOP_EXPR, void_type_node,
2999 		       fold_ignored_result (omitted));
3000 
3001   if (TREE_SIDE_EFFECTS (omitted))
3002     return build2_loc (loc, COMPOUND_EXPR, type,
3003 		       fold_ignored_result (omitted), t);
3004 
3005   return pedantic_non_lvalue_loc (loc, t);
3006 }
3007 
3008 /* Return a tree for the case when the result of an expression is RESULT
3009    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010    of the expression but are now not needed.
3011 
3012    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3015    just do the conversion of RESULT to TYPE.  */
3016 
3017 tree
3018 omit_two_operands_loc (location_t loc, tree type, tree result,
3019 		       tree omitted1, tree omitted2)
3020 {
3021   tree t = fold_convert_loc (loc, type, result);
3022 
3023   if (TREE_SIDE_EFFECTS (omitted2))
3024     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3025   if (TREE_SIDE_EFFECTS (omitted1))
3026     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3027 
3028   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3029 }
3030 
3031 
3032 /* Return a simplified tree node for the truth-negation of ARG.  This
3033    never alters ARG itself.  We assume that ARG is an operation that
3034    returns a truth value (0 or 1).
3035 
3036    FIXME: one would think we would fold the result, but it causes
3037    problems with the dominator optimizer.  */
3038 
3039 tree
3040 fold_truth_not_expr (location_t loc, tree arg)
3041 {
3042   tree type = TREE_TYPE (arg);
3043   enum tree_code code = TREE_CODE (arg);
3044   location_t loc1, loc2;
3045 
3046   /* If this is a comparison, we can simply invert it, except for
3047      floating-point non-equality comparisons, in which case we just
3048      enclose a TRUTH_NOT_EXPR around what we have.  */
3049 
3050   if (TREE_CODE_CLASS (code) == tcc_comparison)
3051     {
3052       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3053       if (FLOAT_TYPE_P (op_type)
3054 	  && flag_trapping_math
3055 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3056 	  && code != NE_EXPR && code != EQ_EXPR)
3057 	return NULL_TREE;
3058 
3059       code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3060       if (code == ERROR_MARK)
3061 	return NULL_TREE;
3062 
3063       return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3064 			 TREE_OPERAND (arg, 1));
3065     }
3066 
3067   switch (code)
3068     {
3069     case INTEGER_CST:
3070       return constant_boolean_node (integer_zerop (arg), type);
3071 
3072     case TRUTH_AND_EXPR:
3073       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3074       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3075       return build2_loc (loc, TRUTH_OR_EXPR, type,
3076 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3077 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3078 
3079     case TRUTH_OR_EXPR:
3080       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3081       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3082       return build2_loc (loc, TRUTH_AND_EXPR, type,
3083 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3084 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 
3086     case TRUTH_XOR_EXPR:
3087       /* Here we can invert either operand.  We invert the first operand
3088 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 	 result is the XOR of the first operand with the inside of the
3090 	 negation of the second operand.  */
3091 
3092       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3093 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3094 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3095       else
3096 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3097 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3098 			   TREE_OPERAND (arg, 1));
3099 
3100     case TRUTH_ANDIF_EXPR:
3101       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3102       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3103       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3104 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3105 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3106 
3107     case TRUTH_ORIF_EXPR:
3108       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3109       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3110       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3111 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3112 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3113 
3114     case TRUTH_NOT_EXPR:
3115       return TREE_OPERAND (arg, 0);
3116 
3117     case COND_EXPR:
3118       {
3119 	tree arg1 = TREE_OPERAND (arg, 1);
3120 	tree arg2 = TREE_OPERAND (arg, 2);
3121 
3122 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3124 
3125 	/* A COND_EXPR may have a throw as one operand, which
3126 	   then has void type.  Just leave void operands
3127 	   as they are.  */
3128 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3129 			   VOID_TYPE_P (TREE_TYPE (arg1))
3130 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3131 			   VOID_TYPE_P (TREE_TYPE (arg2))
3132 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3133       }
3134 
3135     case COMPOUND_EXPR:
3136       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3137       return build2_loc (loc, COMPOUND_EXPR, type,
3138 			 TREE_OPERAND (arg, 0),
3139 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3140 
3141     case NON_LVALUE_EXPR:
3142       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3144 
3145     CASE_CONVERT:
3146       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3147 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3148 
3149       /* ... fall through ...  */
3150 
3151     case FLOAT_EXPR:
3152       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3153       return build1_loc (loc, TREE_CODE (arg), type,
3154 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3155 
3156     case BIT_AND_EXPR:
3157       if (!integer_onep (TREE_OPERAND (arg, 1)))
3158 	return NULL_TREE;
3159       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3160 
3161     case SAVE_EXPR:
3162       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3163 
3164     case CLEANUP_POINT_EXPR:
3165       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3167 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3168 
3169     default:
3170       return NULL_TREE;
3171     }
3172 }
3173 
3174 /* Return a simplified tree node for the truth-negation of ARG.  This
3175    never alters ARG itself.  We assume that ARG is an operation that
3176    returns a truth value (0 or 1).
3177 
3178    FIXME: one would think we would fold the result, but it causes
3179    problems with the dominator optimizer.  */
3180 
3181 tree
3182 invert_truthvalue_loc (location_t loc, tree arg)
3183 {
3184   tree tem;
3185 
3186   if (TREE_CODE (arg) == ERROR_MARK)
3187     return arg;
3188 
3189   tem = fold_truth_not_expr (loc, arg);
3190   if (!tem)
3191     tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3192 
3193   return tem;
3194 }
3195 
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197    operands are another bit-wise operation with a common input.  If so,
3198    distribute the bit operations to save an operation and possibly two if
3199    constants are involved.  For example, convert
3200 	(A | B) & (A | C) into A | (B & C)
3201    Further simplification will occur if B and C are constants.
3202 
3203    If this optimization cannot be done, 0 will be returned.  */
3204 
3205 static tree
3206 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3207 		     tree arg0, tree arg1)
3208 {
3209   tree common;
3210   tree left, right;
3211 
3212   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3213       || TREE_CODE (arg0) == code
3214       || (TREE_CODE (arg0) != BIT_AND_EXPR
3215 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3216     return 0;
3217 
3218   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3219     {
3220       common = TREE_OPERAND (arg0, 0);
3221       left = TREE_OPERAND (arg0, 1);
3222       right = TREE_OPERAND (arg1, 1);
3223     }
3224   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3225     {
3226       common = TREE_OPERAND (arg0, 0);
3227       left = TREE_OPERAND (arg0, 1);
3228       right = TREE_OPERAND (arg1, 0);
3229     }
3230   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3231     {
3232       common = TREE_OPERAND (arg0, 1);
3233       left = TREE_OPERAND (arg0, 0);
3234       right = TREE_OPERAND (arg1, 1);
3235     }
3236   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3237     {
3238       common = TREE_OPERAND (arg0, 1);
3239       left = TREE_OPERAND (arg0, 0);
3240       right = TREE_OPERAND (arg1, 0);
3241     }
3242   else
3243     return 0;
3244 
3245   common = fold_convert_loc (loc, type, common);
3246   left = fold_convert_loc (loc, type, left);
3247   right = fold_convert_loc (loc, type, right);
3248   return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3249 		      fold_build2_loc (loc, code, type, left, right));
3250 }
3251 
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253    with code CODE.  This optimization is unsafe.  */
3254 static tree
3255 distribute_real_division (location_t loc, enum tree_code code, tree type,
3256 			  tree arg0, tree arg1)
3257 {
3258   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3259   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3260 
3261   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3262   if (mul0 == mul1
3263       && operand_equal_p (TREE_OPERAND (arg0, 1),
3264 		       TREE_OPERAND (arg1, 1), 0))
3265     return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3266 			fold_build2_loc (loc, code, type,
3267 				     TREE_OPERAND (arg0, 0),
3268 				     TREE_OPERAND (arg1, 0)),
3269 			TREE_OPERAND (arg0, 1));
3270 
3271   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3272   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3273 		       TREE_OPERAND (arg1, 0), 0)
3274       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3275       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3276     {
3277       REAL_VALUE_TYPE r0, r1;
3278       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3279       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3280       if (!mul0)
3281 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3282       if (!mul1)
3283         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3284       real_arithmetic (&r0, code, &r0, &r1);
3285       return fold_build2_loc (loc, MULT_EXPR, type,
3286 			  TREE_OPERAND (arg0, 0),
3287 			  build_real (type, r0));
3288     }
3289 
3290   return NULL_TREE;
3291 }
3292 
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3295 
3296 static tree
3297 make_bit_field_ref (location_t loc, tree inner, tree type,
3298 		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3299 {
3300   tree result, bftype;
3301 
3302   if (bitpos == 0)
3303     {
3304       tree size = TYPE_SIZE (TREE_TYPE (inner));
3305       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3306 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3307 	  && host_integerp (size, 0)
3308 	  && tree_low_cst (size, 0) == bitsize)
3309 	return fold_convert_loc (loc, type, inner);
3310     }
3311 
3312   bftype = type;
3313   if (TYPE_PRECISION (bftype) != bitsize
3314       || TYPE_UNSIGNED (bftype) == !unsignedp)
3315     bftype = build_nonstandard_integer_type (bitsize, 0);
3316 
3317   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3318 		       size_int (bitsize), bitsize_int (bitpos));
3319 
3320   if (bftype != type)
3321     result = fold_convert_loc (loc, type, result);
3322 
3323   return result;
3324 }
3325 
3326 /* Optimize a bit-field compare.
3327 
3328    There are two cases:  First is a compare against a constant and the
3329    second is a comparison of two items where the fields are at the same
3330    bit position relative to the start of a chunk (byte, halfword, word)
3331    large enough to contain it.  In these cases we can avoid the shift
3332    implicit in bitfield extractions.
3333 
3334    For constants, we emit a compare of the shifted constant with the
3335    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336    compared.  For two fields at the same position, we do the ANDs with the
3337    similar mask and compare the result of the ANDs.
3338 
3339    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341    are the left and right operands of the comparison, respectively.
3342 
3343    If the optimization described above can be done, we return the resulting
3344    tree.  Otherwise we return zero.  */
3345 
3346 static tree
3347 optimize_bit_field_compare (location_t loc, enum tree_code code,
3348 			    tree compare_type, tree lhs, tree rhs)
3349 {
3350   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3351   tree type = TREE_TYPE (lhs);
3352   tree signed_type, unsigned_type;
3353   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3354   enum machine_mode lmode, rmode, nmode;
3355   int lunsignedp, runsignedp;
3356   int lvolatilep = 0, rvolatilep = 0;
3357   tree linner, rinner = NULL_TREE;
3358   tree mask;
3359   tree offset;
3360 
3361   /* Get all the information about the extractions being done.  If the bit size
3362      if the same as the size of the underlying object, we aren't doing an
3363      extraction at all and so can do nothing.  We also don't want to
3364      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3365      then will no longer be able to replace it.  */
3366   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3367 				&lunsignedp, &lvolatilep, false);
3368   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3369       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3370     return 0;
3371 
3372  if (!const_p)
3373    {
3374      /* If this is not a constant, we can only do something if bit positions,
3375 	sizes, and signedness are the same.  */
3376      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3377 				   &runsignedp, &rvolatilep, false);
3378 
3379      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3380 	 || lunsignedp != runsignedp || offset != 0
3381 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3382        return 0;
3383    }
3384 
3385   /* See if we can find a mode to refer to this field.  We should be able to,
3386      but fail if we can't.  */
3387   if (lvolatilep
3388       && GET_MODE_BITSIZE (lmode) > 0
3389       && flag_strict_volatile_bitfields > 0)
3390     nmode = lmode;
3391   else
3392     nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3393 			   const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3394 			   : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3395 				  TYPE_ALIGN (TREE_TYPE (rinner))),
3396 			   word_mode, lvolatilep || rvolatilep);
3397   if (nmode == VOIDmode)
3398     return 0;
3399 
3400   /* Set signed and unsigned types of the precision of this mode for the
3401      shifts below.  */
3402   signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3403   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3404 
3405   /* Compute the bit position and size for the new reference and our offset
3406      within it. If the new reference is the same size as the original, we
3407      won't optimize anything, so return zero.  */
3408   nbitsize = GET_MODE_BITSIZE (nmode);
3409   nbitpos = lbitpos & ~ (nbitsize - 1);
3410   lbitpos -= nbitpos;
3411   if (nbitsize == lbitsize)
3412     return 0;
3413 
3414   if (BYTES_BIG_ENDIAN)
3415     lbitpos = nbitsize - lbitsize - lbitpos;
3416 
3417   /* Make the mask to be used against the extracted field.  */
3418   mask = build_int_cst_type (unsigned_type, -1);
3419   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3420   mask = const_binop (RSHIFT_EXPR, mask,
3421 		      size_int (nbitsize - lbitsize - lbitpos));
3422 
3423   if (! const_p)
3424     /* If not comparing with constant, just rework the comparison
3425        and return.  */
3426     return fold_build2_loc (loc, code, compare_type,
3427 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3428 				     make_bit_field_ref (loc, linner,
3429 							 unsigned_type,
3430 							 nbitsize, nbitpos,
3431 							 1),
3432 				     mask),
3433 			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3434 				     make_bit_field_ref (loc, rinner,
3435 							 unsigned_type,
3436 							 nbitsize, nbitpos,
3437 							 1),
3438 				     mask));
3439 
3440   /* Otherwise, we are handling the constant case. See if the constant is too
3441      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3442      this not only for its own sake, but to avoid having to test for this
3443      error case below.  If we didn't, we might generate wrong code.
3444 
3445      For unsigned fields, the constant shifted right by the field length should
3446      be all zero.  For signed fields, the high-order bits should agree with
3447      the sign bit.  */
3448 
3449   if (lunsignedp)
3450     {
3451       if (! integer_zerop (const_binop (RSHIFT_EXPR,
3452 					fold_convert_loc (loc,
3453 							  unsigned_type, rhs),
3454 					size_int (lbitsize))))
3455 	{
3456 	  warning (0, "comparison is always %d due to width of bit-field",
3457 		   code == NE_EXPR);
3458 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3459 	}
3460     }
3461   else
3462     {
3463       tree tem = const_binop (RSHIFT_EXPR,
3464 			      fold_convert_loc (loc, signed_type, rhs),
3465 			      size_int (lbitsize - 1));
3466       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3467 	{
3468 	  warning (0, "comparison is always %d due to width of bit-field",
3469 		   code == NE_EXPR);
3470 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3471 	}
3472     }
3473 
3474   /* Single-bit compares should always be against zero.  */
3475   if (lbitsize == 1 && ! integer_zerop (rhs))
3476     {
3477       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3478       rhs = build_int_cst (type, 0);
3479     }
3480 
3481   /* Make a new bitfield reference, shift the constant over the
3482      appropriate number of bits and mask it with the computed mask
3483      (in case this was a signed field).  If we changed it, make a new one.  */
3484   lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3485   if (lvolatilep)
3486     {
3487       TREE_SIDE_EFFECTS (lhs) = 1;
3488       TREE_THIS_VOLATILE (lhs) = 1;
3489     }
3490 
3491   rhs = const_binop (BIT_AND_EXPR,
3492 		     const_binop (LSHIFT_EXPR,
3493 				  fold_convert_loc (loc, unsigned_type, rhs),
3494 				  size_int (lbitpos)),
3495 		     mask);
3496 
3497   lhs = build2_loc (loc, code, compare_type,
3498 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3499   return lhs;
3500 }
3501 
3502 /* Subroutine for fold_truth_andor_1: decode a field reference.
3503 
3504    If EXP is a comparison reference, we return the innermost reference.
3505 
3506    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3507    set to the starting bit number.
3508 
3509    If the innermost field can be completely contained in a mode-sized
3510    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3511 
3512    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3513    otherwise it is not changed.
3514 
3515    *PUNSIGNEDP is set to the signedness of the field.
3516 
3517    *PMASK is set to the mask used.  This is either contained in a
3518    BIT_AND_EXPR or derived from the width of the field.
3519 
3520    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3521 
3522    Return 0 if this is not a component reference or is one that we can't
3523    do anything with.  */
3524 
3525 static tree
3526 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3527 			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3528 			int *punsignedp, int *pvolatilep,
3529 			tree *pmask, tree *pand_mask)
3530 {
3531   tree outer_type = 0;
3532   tree and_mask = 0;
3533   tree mask, inner, offset;
3534   tree unsigned_type;
3535   unsigned int precision;
3536 
3537   /* All the optimizations using this function assume integer fields.
3538      There are problems with FP fields since the type_for_size call
3539      below can fail for, e.g., XFmode.  */
3540   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3541     return 0;
3542 
3543   /* We are interested in the bare arrangement of bits, so strip everything
3544      that doesn't affect the machine mode.  However, record the type of the
3545      outermost expression if it may matter below.  */
3546   if (CONVERT_EXPR_P (exp)
3547       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3548     outer_type = TREE_TYPE (exp);
3549   STRIP_NOPS (exp);
3550 
3551   if (TREE_CODE (exp) == BIT_AND_EXPR)
3552     {
3553       and_mask = TREE_OPERAND (exp, 1);
3554       exp = TREE_OPERAND (exp, 0);
3555       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3556       if (TREE_CODE (and_mask) != INTEGER_CST)
3557 	return 0;
3558     }
3559 
3560   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3561 			       punsignedp, pvolatilep, false);
3562   if ((inner == exp && and_mask == 0)
3563       || *pbitsize < 0 || offset != 0
3564       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3565     return 0;
3566 
3567   /* If the number of bits in the reference is the same as the bitsize of
3568      the outer type, then the outer type gives the signedness. Otherwise
3569      (in case of a small bitfield) the signedness is unchanged.  */
3570   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3571     *punsignedp = TYPE_UNSIGNED (outer_type);
3572 
3573   /* Compute the mask to access the bitfield.  */
3574   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3575   precision = TYPE_PRECISION (unsigned_type);
3576 
3577   mask = build_int_cst_type (unsigned_type, -1);
3578 
3579   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3580   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3581 
3582   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3583   if (and_mask != 0)
3584     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3585 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
3586 
3587   *pmask = mask;
3588   *pand_mask = and_mask;
3589   return inner;
3590 }
3591 
3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3593    bit positions.  */
3594 
3595 static int
3596 all_ones_mask_p (const_tree mask, int size)
3597 {
3598   tree type = TREE_TYPE (mask);
3599   unsigned int precision = TYPE_PRECISION (type);
3600   tree tmask;
3601 
3602   tmask = build_int_cst_type (signed_type_for (type), -1);
3603 
3604   return
3605     tree_int_cst_equal (mask,
3606 			const_binop (RSHIFT_EXPR,
3607 				     const_binop (LSHIFT_EXPR, tmask,
3608 						  size_int (precision - size)),
3609 				     size_int (precision - size)));
3610 }
3611 
3612 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3613    represents the sign bit of EXP's type.  If EXP represents a sign
3614    or zero extension, also test VAL against the unextended type.
3615    The return value is the (sub)expression whose sign bit is VAL,
3616    or NULL_TREE otherwise.  */
3617 
3618 static tree
3619 sign_bit_p (tree exp, const_tree val)
3620 {
3621   unsigned HOST_WIDE_INT mask_lo, lo;
3622   HOST_WIDE_INT mask_hi, hi;
3623   int width;
3624   tree t;
3625 
3626   /* Tree EXP must have an integral type.  */
3627   t = TREE_TYPE (exp);
3628   if (! INTEGRAL_TYPE_P (t))
3629     return NULL_TREE;
3630 
3631   /* Tree VAL must be an integer constant.  */
3632   if (TREE_CODE (val) != INTEGER_CST
3633       || TREE_OVERFLOW (val))
3634     return NULL_TREE;
3635 
3636   width = TYPE_PRECISION (t);
3637   if (width > HOST_BITS_PER_WIDE_INT)
3638     {
3639       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3640       lo = 0;
3641 
3642       mask_hi = ((unsigned HOST_WIDE_INT) -1
3643 		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3644       mask_lo = -1;
3645     }
3646   else
3647     {
3648       hi = 0;
3649       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3650 
3651       mask_hi = 0;
3652       mask_lo = ((unsigned HOST_WIDE_INT) -1
3653 		 >> (HOST_BITS_PER_WIDE_INT - width));
3654     }
3655 
3656   /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3657      treat VAL as if it were unsigned.  */
3658   if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3659       && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3660     return exp;
3661 
3662   /* Handle extension from a narrower type.  */
3663   if (TREE_CODE (exp) == NOP_EXPR
3664       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3665     return sign_bit_p (TREE_OPERAND (exp, 0), val);
3666 
3667   return NULL_TREE;
3668 }
3669 
3670 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3671    to be evaluated unconditionally.  */
3672 
3673 static int
3674 simple_operand_p (const_tree exp)
3675 {
3676   /* Strip any conversions that don't change the machine mode.  */
3677   STRIP_NOPS (exp);
3678 
3679   return (CONSTANT_CLASS_P (exp)
3680   	  || TREE_CODE (exp) == SSA_NAME
3681 	  || (DECL_P (exp)
3682 	      && ! TREE_ADDRESSABLE (exp)
3683 	      && ! TREE_THIS_VOLATILE (exp)
3684 	      && ! DECL_NONLOCAL (exp)
3685 	      /* Don't regard global variables as simple.  They may be
3686 		 allocated in ways unknown to the compiler (shared memory,
3687 		 #pragma weak, etc).  */
3688 	      && ! TREE_PUBLIC (exp)
3689 	      && ! DECL_EXTERNAL (exp)
3690 	      /* Loading a static variable is unduly expensive, but global
3691 		 registers aren't expensive.  */
3692 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3693 }
3694 
3695 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3696    to be evaluated unconditionally.
3697    I addition to simple_operand_p, we assume that comparisons, conversions,
3698    and logic-not operations are simple, if their operands are simple, too.  */
3699 
3700 static bool
3701 simple_operand_p_2 (tree exp)
3702 {
3703   enum tree_code code;
3704 
3705   if (TREE_SIDE_EFFECTS (exp)
3706       || tree_could_trap_p (exp))
3707     return false;
3708 
3709   while (CONVERT_EXPR_P (exp))
3710     exp = TREE_OPERAND (exp, 0);
3711 
3712   code = TREE_CODE (exp);
3713 
3714   if (TREE_CODE_CLASS (code) == tcc_comparison)
3715     return (simple_operand_p (TREE_OPERAND (exp, 0))
3716 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
3717 
3718   if (code == TRUTH_NOT_EXPR)
3719       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3720 
3721   return simple_operand_p (exp);
3722 }
3723 
3724 
3725 /* The following functions are subroutines to fold_range_test and allow it to
3726    try to change a logical combination of comparisons into a range test.
3727 
3728    For example, both
3729 	X == 2 || X == 3 || X == 4 || X == 5
3730    and
3731 	X >= 2 && X <= 5
3732    are converted to
3733 	(unsigned) (X - 2) <= 3
3734 
3735    We describe each set of comparisons as being either inside or outside
3736    a range, using a variable named like IN_P, and then describe the
3737    range with a lower and upper bound.  If one of the bounds is omitted,
3738    it represents either the highest or lowest value of the type.
3739 
3740    In the comments below, we represent a range by two numbers in brackets
3741    preceded by a "+" to designate being inside that range, or a "-" to
3742    designate being outside that range, so the condition can be inverted by
3743    flipping the prefix.  An omitted bound is represented by a "-".  For
3744    example, "- [-, 10]" means being outside the range starting at the lowest
3745    possible value and ending at 10, in other words, being greater than 10.
3746    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3747    always false.
3748 
3749    We set up things so that the missing bounds are handled in a consistent
3750    manner so neither a missing bound nor "true" and "false" need to be
3751    handled using a special case.  */
3752 
3753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3754    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3755    and UPPER1_P are nonzero if the respective argument is an upper bound
3756    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3757    must be specified for a comparison.  ARG1 will be converted to ARG0's
3758    type if both are specified.  */
3759 
3760 static tree
3761 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3762 	     tree arg1, int upper1_p)
3763 {
3764   tree tem;
3765   int result;
3766   int sgn0, sgn1;
3767 
3768   /* If neither arg represents infinity, do the normal operation.
3769      Else, if not a comparison, return infinity.  Else handle the special
3770      comparison rules. Note that most of the cases below won't occur, but
3771      are handled for consistency.  */
3772 
3773   if (arg0 != 0 && arg1 != 0)
3774     {
3775       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3776 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3777       STRIP_NOPS (tem);
3778       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3779     }
3780 
3781   if (TREE_CODE_CLASS (code) != tcc_comparison)
3782     return 0;
3783 
3784   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3785      for neither.  In real maths, we cannot assume open ended ranges are
3786      the same. But, this is computer arithmetic, where numbers are finite.
3787      We can therefore make the transformation of any unbounded range with
3788      the value Z, Z being greater than any representable number. This permits
3789      us to treat unbounded ranges as equal.  */
3790   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3791   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3792   switch (code)
3793     {
3794     case EQ_EXPR:
3795       result = sgn0 == sgn1;
3796       break;
3797     case NE_EXPR:
3798       result = sgn0 != sgn1;
3799       break;
3800     case LT_EXPR:
3801       result = sgn0 < sgn1;
3802       break;
3803     case LE_EXPR:
3804       result = sgn0 <= sgn1;
3805       break;
3806     case GT_EXPR:
3807       result = sgn0 > sgn1;
3808       break;
3809     case GE_EXPR:
3810       result = sgn0 >= sgn1;
3811       break;
3812     default:
3813       gcc_unreachable ();
3814     }
3815 
3816   return constant_boolean_node (result, type);
3817 }
3818 
3819 /* Helper routine for make_range.  Perform one step for it, return
3820    new expression if the loop should continue or NULL_TREE if it should
3821    stop.  */
3822 
3823 tree
3824 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3825 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3826 		 bool *strict_overflow_p)
3827 {
3828   tree arg0_type = TREE_TYPE (arg0);
3829   tree n_low, n_high, low = *p_low, high = *p_high;
3830   int in_p = *p_in_p, n_in_p;
3831 
3832   switch (code)
3833     {
3834     case TRUTH_NOT_EXPR:
3835       *p_in_p = ! in_p;
3836       return arg0;
3837 
3838     case EQ_EXPR: case NE_EXPR:
3839     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3840       /* We can only do something if the range is testing for zero
3841 	 and if the second operand is an integer constant.  Note that
3842 	 saying something is "in" the range we make is done by
3843 	 complementing IN_P since it will set in the initial case of
3844 	 being not equal to zero; "out" is leaving it alone.  */
3845       if (low == NULL_TREE || high == NULL_TREE
3846 	  || ! integer_zerop (low) || ! integer_zerop (high)
3847 	  || TREE_CODE (arg1) != INTEGER_CST)
3848 	return NULL_TREE;
3849 
3850       switch (code)
3851 	{
3852 	case NE_EXPR:  /* - [c, c]  */
3853 	  low = high = arg1;
3854 	  break;
3855 	case EQ_EXPR:  /* + [c, c]  */
3856 	  in_p = ! in_p, low = high = arg1;
3857 	  break;
3858 	case GT_EXPR:  /* - [-, c] */
3859 	  low = 0, high = arg1;
3860 	  break;
3861 	case GE_EXPR:  /* + [c, -] */
3862 	  in_p = ! in_p, low = arg1, high = 0;
3863 	  break;
3864 	case LT_EXPR:  /* - [c, -] */
3865 	  low = arg1, high = 0;
3866 	  break;
3867 	case LE_EXPR:  /* + [-, c] */
3868 	  in_p = ! in_p, low = 0, high = arg1;
3869 	  break;
3870 	default:
3871 	  gcc_unreachable ();
3872 	}
3873 
3874       /* If this is an unsigned comparison, we also know that EXP is
3875 	 greater than or equal to zero.  We base the range tests we make
3876 	 on that fact, so we record it here so we can parse existing
3877 	 range tests.  We test arg0_type since often the return type
3878 	 of, e.g. EQ_EXPR, is boolean.  */
3879       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3880 	{
3881 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3882 			      in_p, low, high, 1,
3883 			      build_int_cst (arg0_type, 0),
3884 			      NULL_TREE))
3885 	    return NULL_TREE;
3886 
3887 	  in_p = n_in_p, low = n_low, high = n_high;
3888 
3889 	  /* If the high bound is missing, but we have a nonzero low
3890 	     bound, reverse the range so it goes from zero to the low bound
3891 	     minus 1.  */
3892 	  if (high == 0 && low && ! integer_zerop (low))
3893 	    {
3894 	      in_p = ! in_p;
3895 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3896 				  integer_one_node, 0);
3897 	      low = build_int_cst (arg0_type, 0);
3898 	    }
3899 	}
3900 
3901       *p_low = low;
3902       *p_high = high;
3903       *p_in_p = in_p;
3904       return arg0;
3905 
3906     case NEGATE_EXPR:
3907       /* (-x) IN [a,b] -> x in [-b, -a]  */
3908       n_low = range_binop (MINUS_EXPR, exp_type,
3909 			   build_int_cst (exp_type, 0),
3910 			   0, high, 1);
3911       n_high = range_binop (MINUS_EXPR, exp_type,
3912 			    build_int_cst (exp_type, 0),
3913 			    0, low, 0);
3914       if (n_high != 0 && TREE_OVERFLOW (n_high))
3915 	return NULL_TREE;
3916       goto normalize;
3917 
3918     case BIT_NOT_EXPR:
3919       /* ~ X -> -X - 1  */
3920       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3921 			 build_int_cst (exp_type, 1));
3922 
3923     case PLUS_EXPR:
3924     case MINUS_EXPR:
3925       if (TREE_CODE (arg1) != INTEGER_CST)
3926 	return NULL_TREE;
3927 
3928       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3929 	 move a constant to the other side.  */
3930       if (!TYPE_UNSIGNED (arg0_type)
3931 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3932 	return NULL_TREE;
3933 
3934       /* If EXP is signed, any overflow in the computation is undefined,
3935 	 so we don't worry about it so long as our computations on
3936 	 the bounds don't overflow.  For unsigned, overflow is defined
3937 	 and this is exactly the right thing.  */
3938       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3939 			   arg0_type, low, 0, arg1, 0);
3940       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3941 			    arg0_type, high, 1, arg1, 0);
3942       if ((n_low != 0 && TREE_OVERFLOW (n_low))
3943 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
3944 	return NULL_TREE;
3945 
3946       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
3947 	*strict_overflow_p = true;
3948 
3949       normalize:
3950 	/* Check for an unsigned range which has wrapped around the maximum
3951 	   value thus making n_high < n_low, and normalize it.  */
3952 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3953 	  {
3954 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3955 			       integer_one_node, 0);
3956 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3957 				integer_one_node, 0);
3958 
3959 	    /* If the range is of the form +/- [ x+1, x ], we won't
3960 	       be able to normalize it.  But then, it represents the
3961 	       whole range or the empty set, so make it
3962 	       +/- [ -, - ].  */
3963 	    if (tree_int_cst_equal (n_low, low)
3964 		&& tree_int_cst_equal (n_high, high))
3965 	      low = high = 0;
3966 	    else
3967 	      in_p = ! in_p;
3968 	  }
3969 	else
3970 	  low = n_low, high = n_high;
3971 
3972 	*p_low = low;
3973 	*p_high = high;
3974 	*p_in_p = in_p;
3975 	return arg0;
3976 
3977     CASE_CONVERT:
3978     case NON_LVALUE_EXPR:
3979       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3980 	return NULL_TREE;
3981 
3982       if (! INTEGRAL_TYPE_P (arg0_type)
3983 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
3984 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3985 	return NULL_TREE;
3986 
3987       n_low = low, n_high = high;
3988 
3989       if (n_low != 0)
3990 	n_low = fold_convert_loc (loc, arg0_type, n_low);
3991 
3992       if (n_high != 0)
3993 	n_high = fold_convert_loc (loc, arg0_type, n_high);
3994 
3995       /* If we're converting arg0 from an unsigned type, to exp,
3996 	 a signed type,  we will be doing the comparison as unsigned.
3997 	 The tests above have already verified that LOW and HIGH
3998 	 are both positive.
3999 
4000 	 So we have to ensure that we will handle large unsigned
4001 	 values the same way that the current signed bounds treat
4002 	 negative values.  */
4003 
4004       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4005 	{
4006 	  tree high_positive;
4007 	  tree equiv_type;
4008 	  /* For fixed-point modes, we need to pass the saturating flag
4009 	     as the 2nd parameter.  */
4010 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4011 	    equiv_type
4012 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4013 						TYPE_SATURATING (arg0_type));
4014 	  else
4015 	    equiv_type
4016 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4017 
4018 	  /* A range without an upper bound is, naturally, unbounded.
4019 	     Since convert would have cropped a very large value, use
4020 	     the max value for the destination type.  */
4021 	  high_positive
4022 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4023 	      : TYPE_MAX_VALUE (arg0_type);
4024 
4025 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4026 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4027 					     fold_convert_loc (loc, arg0_type,
4028 							       high_positive),
4029 					     build_int_cst (arg0_type, 1));
4030 
4031 	  /* If the low bound is specified, "and" the range with the
4032 	     range for which the original unsigned value will be
4033 	     positive.  */
4034 	  if (low != 0)
4035 	    {
4036 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4037 				  1, fold_convert_loc (loc, arg0_type,
4038 						       integer_zero_node),
4039 				  high_positive))
4040 		return NULL_TREE;
4041 
4042 	      in_p = (n_in_p == in_p);
4043 	    }
4044 	  else
4045 	    {
4046 	      /* Otherwise, "or" the range with the range of the input
4047 		 that will be interpreted as negative.  */
4048 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4049 				  1, fold_convert_loc (loc, arg0_type,
4050 						       integer_zero_node),
4051 				  high_positive))
4052 		return NULL_TREE;
4053 
4054 	      in_p = (in_p != n_in_p);
4055 	    }
4056 	}
4057 
4058       *p_low = n_low;
4059       *p_high = n_high;
4060       *p_in_p = in_p;
4061       return arg0;
4062 
4063     default:
4064       return NULL_TREE;
4065     }
4066 }
4067 
4068 /* Given EXP, a logical expression, set the range it is testing into
4069    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4070    actually being tested.  *PLOW and *PHIGH will be made of the same
4071    type as the returned expression.  If EXP is not a comparison, we
4072    will most likely not be returning a useful value and range.  Set
4073    *STRICT_OVERFLOW_P to true if the return value is only valid
4074    because signed overflow is undefined; otherwise, do not change
4075    *STRICT_OVERFLOW_P.  */
4076 
4077 tree
4078 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4079 	    bool *strict_overflow_p)
4080 {
4081   enum tree_code code;
4082   tree arg0, arg1 = NULL_TREE;
4083   tree exp_type, nexp;
4084   int in_p;
4085   tree low, high;
4086   location_t loc = EXPR_LOCATION (exp);
4087 
4088   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4089      and see if we can refine the range.  Some of the cases below may not
4090      happen, but it doesn't seem worth worrying about this.  We "continue"
4091      the outer loop when we've changed something; otherwise we "break"
4092      the switch, which will "break" the while.  */
4093 
4094   in_p = 0;
4095   low = high = build_int_cst (TREE_TYPE (exp), 0);
4096 
4097   while (1)
4098     {
4099       code = TREE_CODE (exp);
4100       exp_type = TREE_TYPE (exp);
4101       arg0 = NULL_TREE;
4102 
4103       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4104 	{
4105 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4106 	    arg0 = TREE_OPERAND (exp, 0);
4107 	  if (TREE_CODE_CLASS (code) == tcc_binary
4108 	      || TREE_CODE_CLASS (code) == tcc_comparison
4109 	      || (TREE_CODE_CLASS (code) == tcc_expression
4110 		  && TREE_OPERAND_LENGTH (exp) > 1))
4111 	    arg1 = TREE_OPERAND (exp, 1);
4112 	}
4113       if (arg0 == NULL_TREE)
4114 	break;
4115 
4116       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4117 			      &high, &in_p, strict_overflow_p);
4118       if (nexp == NULL_TREE)
4119 	break;
4120       exp = nexp;
4121     }
4122 
4123   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4124   if (TREE_CODE (exp) == INTEGER_CST)
4125     {
4126       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4127 						 exp, 0, low, 0))
4128 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4129 						    exp, 1, high, 1)));
4130       low = high = 0;
4131       exp = 0;
4132     }
4133 
4134   *pin_p = in_p, *plow = low, *phigh = high;
4135   return exp;
4136 }
4137 
4138 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4139    type, TYPE, return an expression to test if EXP is in (or out of, depending
4140    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4141 
4142 tree
4143 build_range_check (location_t loc, tree type, tree exp, int in_p,
4144 		   tree low, tree high)
4145 {
4146   tree etype = TREE_TYPE (exp), value;
4147 
4148 #ifdef HAVE_canonicalize_funcptr_for_compare
4149   /* Disable this optimization for function pointer expressions
4150      on targets that require function pointer canonicalization.  */
4151   if (HAVE_canonicalize_funcptr_for_compare
4152       && TREE_CODE (etype) == POINTER_TYPE
4153       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4154     return NULL_TREE;
4155 #endif
4156 
4157   if (! in_p)
4158     {
4159       value = build_range_check (loc, type, exp, 1, low, high);
4160       if (value != 0)
4161         return invert_truthvalue_loc (loc, value);
4162 
4163       return 0;
4164     }
4165 
4166   if (low == 0 && high == 0)
4167     return build_int_cst (type, 1);
4168 
4169   if (low == 0)
4170     return fold_build2_loc (loc, LE_EXPR, type, exp,
4171 			fold_convert_loc (loc, etype, high));
4172 
4173   if (high == 0)
4174     return fold_build2_loc (loc, GE_EXPR, type, exp,
4175 			fold_convert_loc (loc, etype, low));
4176 
4177   if (operand_equal_p (low, high, 0))
4178     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4179 			fold_convert_loc (loc, etype, low));
4180 
4181   if (integer_zerop (low))
4182     {
4183       if (! TYPE_UNSIGNED (etype))
4184 	{
4185 	  etype = unsigned_type_for (etype);
4186 	  high = fold_convert_loc (loc, etype, high);
4187 	  exp = fold_convert_loc (loc, etype, exp);
4188 	}
4189       return build_range_check (loc, type, exp, 1, 0, high);
4190     }
4191 
4192   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4193   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4194     {
4195       unsigned HOST_WIDE_INT lo;
4196       HOST_WIDE_INT hi;
4197       int prec;
4198 
4199       prec = TYPE_PRECISION (etype);
4200       if (prec <= HOST_BITS_PER_WIDE_INT)
4201 	{
4202 	  hi = 0;
4203 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4204 	}
4205       else
4206 	{
4207 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4208 	  lo = (unsigned HOST_WIDE_INT) -1;
4209 	}
4210 
4211       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4212 	{
4213 	  if (TYPE_UNSIGNED (etype))
4214 	    {
4215 	      tree signed_etype = signed_type_for (etype);
4216 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4217 		etype
4218 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4219 	      else
4220 		etype = signed_etype;
4221 	      exp = fold_convert_loc (loc, etype, exp);
4222 	    }
4223 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4224 			      build_int_cst (etype, 0));
4225 	}
4226     }
4227 
4228   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4229      This requires wrap-around arithmetics for the type of the expression.
4230      First make sure that arithmetics in this type is valid, then make sure
4231      that it wraps around.  */
4232   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4233     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4234 					    TYPE_UNSIGNED (etype));
4235 
4236   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4237     {
4238       tree utype, minv, maxv;
4239 
4240       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4241 	 for the type in question, as we rely on this here.  */
4242       utype = unsigned_type_for (etype);
4243       maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4244       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4245 			  integer_one_node, 1);
4246       minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4247 
4248       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4249 				      minv, 1, maxv, 1)))
4250 	etype = utype;
4251       else
4252 	return 0;
4253     }
4254 
4255   high = fold_convert_loc (loc, etype, high);
4256   low = fold_convert_loc (loc, etype, low);
4257   exp = fold_convert_loc (loc, etype, exp);
4258 
4259   value = const_binop (MINUS_EXPR, high, low);
4260 
4261 
4262   if (POINTER_TYPE_P (etype))
4263     {
4264       if (value != 0 && !TREE_OVERFLOW (value))
4265 	{
4266 	  low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4267           return build_range_check (loc, type,
4268 			     	    fold_build_pointer_plus_loc (loc, exp, low),
4269 			            1, build_int_cst (etype, 0), value);
4270 	}
4271       return 0;
4272     }
4273 
4274   if (value != 0 && !TREE_OVERFLOW (value))
4275     return build_range_check (loc, type,
4276 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4277 			      1, build_int_cst (etype, 0), value);
4278 
4279   return 0;
4280 }
4281 
4282 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4283 
4284 static tree
4285 range_predecessor (tree val)
4286 {
4287   tree type = TREE_TYPE (val);
4288 
4289   if (INTEGRAL_TYPE_P (type)
4290       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4291     return 0;
4292   else
4293     return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4294 }
4295 
4296 /* Return the successor of VAL in its type, handling the infinite case.  */
4297 
4298 static tree
4299 range_successor (tree val)
4300 {
4301   tree type = TREE_TYPE (val);
4302 
4303   if (INTEGRAL_TYPE_P (type)
4304       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4305     return 0;
4306   else
4307     return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4308 }
4309 
4310 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4311    can, 0 if we can't.  Set the output range into the specified parameters.  */
4312 
4313 bool
4314 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4315 	      tree high0, int in1_p, tree low1, tree high1)
4316 {
4317   int no_overlap;
4318   int subset;
4319   int temp;
4320   tree tem;
4321   int in_p;
4322   tree low, high;
4323   int lowequal = ((low0 == 0 && low1 == 0)
4324 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4325 						low0, 0, low1, 0)));
4326   int highequal = ((high0 == 0 && high1 == 0)
4327 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4328 						 high0, 1, high1, 1)));
4329 
4330   /* Make range 0 be the range that starts first, or ends last if they
4331      start at the same value.  Swap them if it isn't.  */
4332   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4333 				 low0, 0, low1, 0))
4334       || (lowequal
4335 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4336 					high1, 1, high0, 1))))
4337     {
4338       temp = in0_p, in0_p = in1_p, in1_p = temp;
4339       tem = low0, low0 = low1, low1 = tem;
4340       tem = high0, high0 = high1, high1 = tem;
4341     }
4342 
4343   /* Now flag two cases, whether the ranges are disjoint or whether the
4344      second range is totally subsumed in the first.  Note that the tests
4345      below are simplified by the ones above.  */
4346   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4347 					  high0, 1, low1, 0));
4348   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4349 				      high1, 1, high0, 1));
4350 
4351   /* We now have four cases, depending on whether we are including or
4352      excluding the two ranges.  */
4353   if (in0_p && in1_p)
4354     {
4355       /* If they don't overlap, the result is false.  If the second range
4356 	 is a subset it is the result.  Otherwise, the range is from the start
4357 	 of the second to the end of the first.  */
4358       if (no_overlap)
4359 	in_p = 0, low = high = 0;
4360       else if (subset)
4361 	in_p = 1, low = low1, high = high1;
4362       else
4363 	in_p = 1, low = low1, high = high0;
4364     }
4365 
4366   else if (in0_p && ! in1_p)
4367     {
4368       /* If they don't overlap, the result is the first range.  If they are
4369 	 equal, the result is false.  If the second range is a subset of the
4370 	 first, and the ranges begin at the same place, we go from just after
4371 	 the end of the second range to the end of the first.  If the second
4372 	 range is not a subset of the first, or if it is a subset and both
4373 	 ranges end at the same place, the range starts at the start of the
4374 	 first range and ends just before the second range.
4375 	 Otherwise, we can't describe this as a single range.  */
4376       if (no_overlap)
4377 	in_p = 1, low = low0, high = high0;
4378       else if (lowequal && highequal)
4379 	in_p = 0, low = high = 0;
4380       else if (subset && lowequal)
4381 	{
4382 	  low = range_successor (high1);
4383 	  high = high0;
4384 	  in_p = 1;
4385 	  if (low == 0)
4386 	    {
4387 	      /* We are in the weird situation where high0 > high1 but
4388 		 high1 has no successor.  Punt.  */
4389 	      return 0;
4390 	    }
4391 	}
4392       else if (! subset || highequal)
4393 	{
4394 	  low = low0;
4395 	  high = range_predecessor (low1);
4396 	  in_p = 1;
4397 	  if (high == 0)
4398 	    {
4399 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4400 	      return 0;
4401 	    }
4402 	}
4403       else
4404 	return 0;
4405     }
4406 
4407   else if (! in0_p && in1_p)
4408     {
4409       /* If they don't overlap, the result is the second range.  If the second
4410 	 is a subset of the first, the result is false.  Otherwise,
4411 	 the range starts just after the first range and ends at the
4412 	 end of the second.  */
4413       if (no_overlap)
4414 	in_p = 1, low = low1, high = high1;
4415       else if (subset || highequal)
4416 	in_p = 0, low = high = 0;
4417       else
4418 	{
4419 	  low = range_successor (high0);
4420 	  high = high1;
4421 	  in_p = 1;
4422 	  if (low == 0)
4423 	    {
4424 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4425 	      return 0;
4426 	    }
4427 	}
4428     }
4429 
4430   else
4431     {
4432       /* The case where we are excluding both ranges.  Here the complex case
4433 	 is if they don't overlap.  In that case, the only time we have a
4434 	 range is if they are adjacent.  If the second is a subset of the
4435 	 first, the result is the first.  Otherwise, the range to exclude
4436 	 starts at the beginning of the first range and ends at the end of the
4437 	 second.  */
4438       if (no_overlap)
4439 	{
4440 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4441 					 range_successor (high0),
4442 					 1, low1, 0)))
4443 	    in_p = 0, low = low0, high = high1;
4444 	  else
4445 	    {
4446 	      /* Canonicalize - [min, x] into - [-, x].  */
4447 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4448 		switch (TREE_CODE (TREE_TYPE (low0)))
4449 		  {
4450 		  case ENUMERAL_TYPE:
4451 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4452 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4453 		      break;
4454 		    /* FALLTHROUGH */
4455 		  case INTEGER_TYPE:
4456 		    if (tree_int_cst_equal (low0,
4457 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4458 		      low0 = 0;
4459 		    break;
4460 		  case POINTER_TYPE:
4461 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4462 			&& integer_zerop (low0))
4463 		      low0 = 0;
4464 		    break;
4465 		  default:
4466 		    break;
4467 		  }
4468 
4469 	      /* Canonicalize - [x, max] into - [x, -].  */
4470 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4471 		switch (TREE_CODE (TREE_TYPE (high1)))
4472 		  {
4473 		  case ENUMERAL_TYPE:
4474 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4475 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4476 		      break;
4477 		    /* FALLTHROUGH */
4478 		  case INTEGER_TYPE:
4479 		    if (tree_int_cst_equal (high1,
4480 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4481 		      high1 = 0;
4482 		    break;
4483 		  case POINTER_TYPE:
4484 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4485 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4486 						       high1, 1,
4487 						       integer_one_node, 1)))
4488 		      high1 = 0;
4489 		    break;
4490 		  default:
4491 		    break;
4492 		  }
4493 
4494 	      /* The ranges might be also adjacent between the maximum and
4495 	         minimum values of the given type.  For
4496 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4497 	         return + [x + 1, y - 1].  */
4498 	      if (low0 == 0 && high1 == 0)
4499 	        {
4500 		  low = range_successor (high0);
4501 		  high = range_predecessor (low1);
4502 		  if (low == 0 || high == 0)
4503 		    return 0;
4504 
4505 		  in_p = 1;
4506 		}
4507 	      else
4508 		return 0;
4509 	    }
4510 	}
4511       else if (subset)
4512 	in_p = 0, low = low0, high = high0;
4513       else
4514 	in_p = 0, low = low0, high = high1;
4515     }
4516 
4517   *pin_p = in_p, *plow = low, *phigh = high;
4518   return 1;
4519 }
4520 
4521 
4522 /* Subroutine of fold, looking inside expressions of the form
4523    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4524    of the COND_EXPR.  This function is being used also to optimize
4525    A op B ? C : A, by reversing the comparison first.
4526 
4527    Return a folded expression whose code is not a COND_EXPR
4528    anymore, or NULL_TREE if no folding opportunity is found.  */
4529 
4530 static tree
4531 fold_cond_expr_with_comparison (location_t loc, tree type,
4532 				tree arg0, tree arg1, tree arg2)
4533 {
4534   enum tree_code comp_code = TREE_CODE (arg0);
4535   tree arg00 = TREE_OPERAND (arg0, 0);
4536   tree arg01 = TREE_OPERAND (arg0, 1);
4537   tree arg1_type = TREE_TYPE (arg1);
4538   tree tem;
4539 
4540   STRIP_NOPS (arg1);
4541   STRIP_NOPS (arg2);
4542 
4543   /* If we have A op 0 ? A : -A, consider applying the following
4544      transformations:
4545 
4546      A == 0? A : -A    same as -A
4547      A != 0? A : -A    same as A
4548      A >= 0? A : -A    same as abs (A)
4549      A > 0?  A : -A    same as abs (A)
4550      A <= 0? A : -A    same as -abs (A)
4551      A < 0?  A : -A    same as -abs (A)
4552 
4553      None of these transformations work for modes with signed
4554      zeros.  If A is +/-0, the first two transformations will
4555      change the sign of the result (from +0 to -0, or vice
4556      versa).  The last four will fix the sign of the result,
4557      even though the original expressions could be positive or
4558      negative, depending on the sign of A.
4559 
4560      Note that all these transformations are correct if A is
4561      NaN, since the two alternatives (A and -A) are also NaNs.  */
4562   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4563       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4564 	  ? real_zerop (arg01)
4565 	  : integer_zerop (arg01))
4566       && ((TREE_CODE (arg2) == NEGATE_EXPR
4567 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4568 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4569 	        have already been folded to Y-X, check for that. */
4570 	  || (TREE_CODE (arg1) == MINUS_EXPR
4571 	      && TREE_CODE (arg2) == MINUS_EXPR
4572 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4573 				  TREE_OPERAND (arg2, 1), 0)
4574 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4575 				  TREE_OPERAND (arg2, 0), 0))))
4576     switch (comp_code)
4577       {
4578       case EQ_EXPR:
4579       case UNEQ_EXPR:
4580 	tem = fold_convert_loc (loc, arg1_type, arg1);
4581 	return pedantic_non_lvalue_loc (loc,
4582 				    fold_convert_loc (loc, type,
4583 						  negate_expr (tem)));
4584       case NE_EXPR:
4585       case LTGT_EXPR:
4586 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4587       case UNGE_EXPR:
4588       case UNGT_EXPR:
4589 	if (flag_trapping_math)
4590 	  break;
4591 	/* Fall through.  */
4592       case GE_EXPR:
4593       case GT_EXPR:
4594 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4595 	  arg1 = fold_convert_loc (loc, signed_type_for
4596 			       (TREE_TYPE (arg1)), arg1);
4597 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4598 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4599       case UNLE_EXPR:
4600       case UNLT_EXPR:
4601 	if (flag_trapping_math)
4602 	  break;
4603       case LE_EXPR:
4604       case LT_EXPR:
4605 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4606 	  arg1 = fold_convert_loc (loc, signed_type_for
4607 			       (TREE_TYPE (arg1)), arg1);
4608 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4609 	return negate_expr (fold_convert_loc (loc, type, tem));
4610       default:
4611 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4612 	break;
4613       }
4614 
4615   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4616      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4617      both transformations are correct when A is NaN: A != 0
4618      is then true, and A == 0 is false.  */
4619 
4620   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4621       && integer_zerop (arg01) && integer_zerop (arg2))
4622     {
4623       if (comp_code == NE_EXPR)
4624 	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4625       else if (comp_code == EQ_EXPR)
4626 	return build_int_cst (type, 0);
4627     }
4628 
4629   /* Try some transformations of A op B ? A : B.
4630 
4631      A == B? A : B    same as B
4632      A != B? A : B    same as A
4633      A >= B? A : B    same as max (A, B)
4634      A > B?  A : B    same as max (B, A)
4635      A <= B? A : B    same as min (A, B)
4636      A < B?  A : B    same as min (B, A)
4637 
4638      As above, these transformations don't work in the presence
4639      of signed zeros.  For example, if A and B are zeros of
4640      opposite sign, the first two transformations will change
4641      the sign of the result.  In the last four, the original
4642      expressions give different results for (A=+0, B=-0) and
4643      (A=-0, B=+0), but the transformed expressions do not.
4644 
4645      The first two transformations are correct if either A or B
4646      is a NaN.  In the first transformation, the condition will
4647      be false, and B will indeed be chosen.  In the case of the
4648      second transformation, the condition A != B will be true,
4649      and A will be chosen.
4650 
4651      The conversions to max() and min() are not correct if B is
4652      a number and A is not.  The conditions in the original
4653      expressions will be false, so all four give B.  The min()
4654      and max() versions would give a NaN instead.  */
4655   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4656       && operand_equal_for_comparison_p (arg01, arg2, arg00)
4657       /* Avoid these transformations if the COND_EXPR may be used
4658 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4659       && (in_gimple_form
4660 	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4661 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4662 	  || ! maybe_lvalue_p (arg1)
4663 	  || ! maybe_lvalue_p (arg2)))
4664     {
4665       tree comp_op0 = arg00;
4666       tree comp_op1 = arg01;
4667       tree comp_type = TREE_TYPE (comp_op0);
4668 
4669       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4670       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4671 	{
4672 	  comp_type = type;
4673 	  comp_op0 = arg1;
4674 	  comp_op1 = arg2;
4675 	}
4676 
4677       switch (comp_code)
4678 	{
4679 	case EQ_EXPR:
4680 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4681 	case NE_EXPR:
4682 	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4683 	case LE_EXPR:
4684 	case LT_EXPR:
4685 	case UNLE_EXPR:
4686 	case UNLT_EXPR:
4687 	  /* In C++ a ?: expression can be an lvalue, so put the
4688 	     operand which will be used if they are equal first
4689 	     so that we can convert this back to the
4690 	     corresponding COND_EXPR.  */
4691 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 	    {
4693 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4694 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4695 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4696 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4697 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
4698 				   comp_op1, comp_op0);
4699 	      return pedantic_non_lvalue_loc (loc,
4700 					  fold_convert_loc (loc, type, tem));
4701 	    }
4702 	  break;
4703 	case GE_EXPR:
4704 	case GT_EXPR:
4705 	case UNGE_EXPR:
4706 	case UNGT_EXPR:
4707 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4708 	    {
4709 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4710 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4711 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4712 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4713 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
4714 				   comp_op1, comp_op0);
4715 	      return pedantic_non_lvalue_loc (loc,
4716 					  fold_convert_loc (loc, type, tem));
4717 	    }
4718 	  break;
4719 	case UNEQ_EXPR:
4720 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4721 	    return pedantic_non_lvalue_loc (loc,
4722 					fold_convert_loc (loc, type, arg2));
4723 	  break;
4724 	case LTGT_EXPR:
4725 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4726 	    return pedantic_non_lvalue_loc (loc,
4727 					fold_convert_loc (loc, type, arg1));
4728 	  break;
4729 	default:
4730 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4731 	  break;
4732 	}
4733     }
4734 
4735   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4736      we might still be able to simplify this.  For example,
4737      if C1 is one less or one more than C2, this might have started
4738      out as a MIN or MAX and been transformed by this function.
4739      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4740 
4741   if (INTEGRAL_TYPE_P (type)
4742       && TREE_CODE (arg01) == INTEGER_CST
4743       && TREE_CODE (arg2) == INTEGER_CST)
4744     switch (comp_code)
4745       {
4746       case EQ_EXPR:
4747 	if (TREE_CODE (arg1) == INTEGER_CST)
4748 	  break;
4749 	/* We can replace A with C1 in this case.  */
4750 	arg1 = fold_convert_loc (loc, type, arg01);
4751 	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4752 
4753       case LT_EXPR:
4754 	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4755 	   MIN_EXPR, to preserve the signedness of the comparison.  */
4756 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4757 			       OEP_ONLY_CONST)
4758 	    && operand_equal_p (arg01,
4759 				const_binop (PLUS_EXPR, arg2,
4760 					     build_int_cst (type, 1)),
4761 				OEP_ONLY_CONST))
4762 	  {
4763 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4764 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4765 						     arg2));
4766 	    return pedantic_non_lvalue_loc (loc,
4767 					    fold_convert_loc (loc, type, tem));
4768 	  }
4769 	break;
4770 
4771       case LE_EXPR:
4772 	/* If C1 is C2 - 1, this is min(A, C2), with the same care
4773 	   as above.  */
4774 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4775 			       OEP_ONLY_CONST)
4776 	    && operand_equal_p (arg01,
4777 				const_binop (MINUS_EXPR, arg2,
4778 					     build_int_cst (type, 1)),
4779 				OEP_ONLY_CONST))
4780 	  {
4781 	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4782 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4783 						     arg2));
4784 	    return pedantic_non_lvalue_loc (loc,
4785 					    fold_convert_loc (loc, type, tem));
4786 	  }
4787 	break;
4788 
4789       case GT_EXPR:
4790 	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4791 	   MAX_EXPR, to preserve the signedness of the comparison.  */
4792 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4793 			       OEP_ONLY_CONST)
4794 	    && operand_equal_p (arg01,
4795 				const_binop (MINUS_EXPR, arg2,
4796 					     build_int_cst (type, 1)),
4797 				OEP_ONLY_CONST))
4798 	  {
4799 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4800 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4801 						     arg2));
4802 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4803 	  }
4804 	break;
4805 
4806       case GE_EXPR:
4807 	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
4808 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4809 			       OEP_ONLY_CONST)
4810 	    && operand_equal_p (arg01,
4811 				const_binop (PLUS_EXPR, arg2,
4812 					     build_int_cst (type, 1)),
4813 				OEP_ONLY_CONST))
4814 	  {
4815 	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4816 				   fold_convert_loc (loc, TREE_TYPE (arg00),
4817 						     arg2));
4818 	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4819 	  }
4820 	break;
4821       case NE_EXPR:
4822 	break;
4823       default:
4824 	gcc_unreachable ();
4825       }
4826 
4827   return NULL_TREE;
4828 }
4829 
4830 
4831 
4832 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4833 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4834   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4835 		false) >= 2)
4836 #endif
4837 
4838 /* EXP is some logical combination of boolean tests.  See if we can
4839    merge it into some range test.  Return the new tree if so.  */
4840 
4841 static tree
4842 fold_range_test (location_t loc, enum tree_code code, tree type,
4843 		 tree op0, tree op1)
4844 {
4845   int or_op = (code == TRUTH_ORIF_EXPR
4846 	       || code == TRUTH_OR_EXPR);
4847   int in0_p, in1_p, in_p;
4848   tree low0, low1, low, high0, high1, high;
4849   bool strict_overflow_p = false;
4850   tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4851   tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4852   tree tem;
4853   const char * const warnmsg = G_("assuming signed overflow does not occur "
4854 				  "when simplifying range test");
4855 
4856   /* If this is an OR operation, invert both sides; we will invert
4857      again at the end.  */
4858   if (or_op)
4859     in0_p = ! in0_p, in1_p = ! in1_p;
4860 
4861   /* If both expressions are the same, if we can merge the ranges, and we
4862      can build the range test, return it or it inverted.  If one of the
4863      ranges is always true or always false, consider it to be the same
4864      expression as the other.  */
4865   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4866       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4867 		       in1_p, low1, high1)
4868       && 0 != (tem = (build_range_check (loc, type,
4869 					 lhs != 0 ? lhs
4870 					 : rhs != 0 ? rhs : integer_zero_node,
4871 					 in_p, low, high))))
4872     {
4873       if (strict_overflow_p)
4874 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4875       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4876     }
4877 
4878   /* On machines where the branch cost is expensive, if this is a
4879      short-circuited branch and the underlying object on both sides
4880      is the same, make a non-short-circuit operation.  */
4881   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4882 	   && lhs != 0 && rhs != 0
4883 	   && (code == TRUTH_ANDIF_EXPR
4884 	       || code == TRUTH_ORIF_EXPR)
4885 	   && operand_equal_p (lhs, rhs, 0))
4886     {
4887       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4888 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4889 	 which cases we can't do this.  */
4890       if (simple_operand_p (lhs))
4891 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4892 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4893 			   type, op0, op1);
4894 
4895       else if (!lang_hooks.decls.global_bindings_p ()
4896 	       && !CONTAINS_PLACEHOLDER_P (lhs))
4897 	{
4898 	  tree common = save_expr (lhs);
4899 
4900 	  if (0 != (lhs = build_range_check (loc, type, common,
4901 					     or_op ? ! in0_p : in0_p,
4902 					     low0, high0))
4903 	      && (0 != (rhs = build_range_check (loc, type, common,
4904 						 or_op ? ! in1_p : in1_p,
4905 						 low1, high1))))
4906 	    {
4907 	      if (strict_overflow_p)
4908 		fold_overflow_warning (warnmsg,
4909 				       WARN_STRICT_OVERFLOW_COMPARISON);
4910 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4911 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4912 				 type, lhs, rhs);
4913 	    }
4914 	}
4915     }
4916 
4917   return 0;
4918 }
4919 
4920 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4921    bit value.  Arrange things so the extra bits will be set to zero if and
4922    only if C is signed-extended to its full width.  If MASK is nonzero,
4923    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4924 
4925 static tree
4926 unextend (tree c, int p, int unsignedp, tree mask)
4927 {
4928   tree type = TREE_TYPE (c);
4929   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4930   tree temp;
4931 
4932   if (p == modesize || unsignedp)
4933     return c;
4934 
4935   /* We work by getting just the sign bit into the low-order bit, then
4936      into the high-order bit, then sign-extend.  We then XOR that value
4937      with C.  */
4938   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4939   temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4940 
4941   /* We must use a signed type in order to get an arithmetic right shift.
4942      However, we must also avoid introducing accidental overflows, so that
4943      a subsequent call to integer_zerop will work.  Hence we must
4944      do the type conversion here.  At this point, the constant is either
4945      zero or one, and the conversion to a signed type can never overflow.
4946      We could get an overflow if this conversion is done anywhere else.  */
4947   if (TYPE_UNSIGNED (type))
4948     temp = fold_convert (signed_type_for (type), temp);
4949 
4950   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
4951   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
4952   if (mask != 0)
4953     temp = const_binop (BIT_AND_EXPR, temp,
4954 			fold_convert (TREE_TYPE (c), mask));
4955   /* If necessary, convert the type back to match the type of C.  */
4956   if (TYPE_UNSIGNED (type))
4957     temp = fold_convert (type, temp);
4958 
4959   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
4960 }
4961 
4962 /* For an expression that has the form
4963      (A && B) || ~B
4964    or
4965      (A || B) && ~B,
4966    we can drop one of the inner expressions and simplify to
4967      A || ~B
4968    or
4969      A && ~B
4970    LOC is the location of the resulting expression.  OP is the inner
4971    logical operation; the left-hand side in the examples above, while CMPOP
4972    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
4973    removing a condition that guards another, as in
4974      (A != NULL && A->...) || A == NULL
4975    which we must not transform.  If RHS_ONLY is true, only eliminate the
4976    right-most operand of the inner logical operation.  */
4977 
4978 static tree
4979 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
4980 				 bool rhs_only)
4981 {
4982   tree type = TREE_TYPE (cmpop);
4983   enum tree_code code = TREE_CODE (cmpop);
4984   enum tree_code truthop_code = TREE_CODE (op);
4985   tree lhs = TREE_OPERAND (op, 0);
4986   tree rhs = TREE_OPERAND (op, 1);
4987   tree orig_lhs = lhs, orig_rhs = rhs;
4988   enum tree_code rhs_code = TREE_CODE (rhs);
4989   enum tree_code lhs_code = TREE_CODE (lhs);
4990   enum tree_code inv_code;
4991 
4992   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
4993     return NULL_TREE;
4994 
4995   if (TREE_CODE_CLASS (code) != tcc_comparison)
4996     return NULL_TREE;
4997 
4998   if (rhs_code == truthop_code)
4999     {
5000       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5001       if (newrhs != NULL_TREE)
5002 	{
5003 	  rhs = newrhs;
5004 	  rhs_code = TREE_CODE (rhs);
5005 	}
5006     }
5007   if (lhs_code == truthop_code && !rhs_only)
5008     {
5009       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5010       if (newlhs != NULL_TREE)
5011 	{
5012 	  lhs = newlhs;
5013 	  lhs_code = TREE_CODE (lhs);
5014 	}
5015     }
5016 
5017   inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5018   if (inv_code == rhs_code
5019       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5020       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5021     return lhs;
5022   if (!rhs_only && inv_code == lhs_code
5023       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5024       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5025     return rhs;
5026   if (rhs != orig_rhs || lhs != orig_lhs)
5027     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5028 			    lhs, rhs);
5029   return NULL_TREE;
5030 }
5031 
5032 /* Find ways of folding logical expressions of LHS and RHS:
5033    Try to merge two comparisons to the same innermost item.
5034    Look for range tests like "ch >= '0' && ch <= '9'".
5035    Look for combinations of simple terms on machines with expensive branches
5036    and evaluate the RHS unconditionally.
5037 
5038    For example, if we have p->a == 2 && p->b == 4 and we can make an
5039    object large enough to span both A and B, we can do this with a comparison
5040    against the object ANDed with the a mask.
5041 
5042    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5043    operations to do this with one comparison.
5044 
5045    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5046    function and the one above.
5047 
5048    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5049    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5050 
5051    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5052    two operands.
5053 
5054    We return the simplified tree or 0 if no optimization is possible.  */
5055 
5056 static tree
5057 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5058 		    tree lhs, tree rhs)
5059 {
5060   /* If this is the "or" of two comparisons, we can do something if
5061      the comparisons are NE_EXPR.  If this is the "and", we can do something
5062      if the comparisons are EQ_EXPR.  I.e.,
5063 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5064 
5065      WANTED_CODE is this operation code.  For single bit fields, we can
5066      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5067      comparison for one-bit fields.  */
5068 
5069   enum tree_code wanted_code;
5070   enum tree_code lcode, rcode;
5071   tree ll_arg, lr_arg, rl_arg, rr_arg;
5072   tree ll_inner, lr_inner, rl_inner, rr_inner;
5073   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5074   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5075   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5076   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5077   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5078   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5079   enum machine_mode lnmode, rnmode;
5080   tree ll_mask, lr_mask, rl_mask, rr_mask;
5081   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5082   tree l_const, r_const;
5083   tree lntype, rntype, result;
5084   HOST_WIDE_INT first_bit, end_bit;
5085   int volatilep;
5086 
5087   /* Start by getting the comparison codes.  Fail if anything is volatile.
5088      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5089      it were surrounded with a NE_EXPR.  */
5090 
5091   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5092     return 0;
5093 
5094   lcode = TREE_CODE (lhs);
5095   rcode = TREE_CODE (rhs);
5096 
5097   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5098     {
5099       lhs = build2 (NE_EXPR, truth_type, lhs,
5100 		    build_int_cst (TREE_TYPE (lhs), 0));
5101       lcode = NE_EXPR;
5102     }
5103 
5104   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5105     {
5106       rhs = build2 (NE_EXPR, truth_type, rhs,
5107 		    build_int_cst (TREE_TYPE (rhs), 0));
5108       rcode = NE_EXPR;
5109     }
5110 
5111   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5112       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5113     return 0;
5114 
5115   ll_arg = TREE_OPERAND (lhs, 0);
5116   lr_arg = TREE_OPERAND (lhs, 1);
5117   rl_arg = TREE_OPERAND (rhs, 0);
5118   rr_arg = TREE_OPERAND (rhs, 1);
5119 
5120   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5121   if (simple_operand_p (ll_arg)
5122       && simple_operand_p (lr_arg))
5123     {
5124       if (operand_equal_p (ll_arg, rl_arg, 0)
5125           && operand_equal_p (lr_arg, rr_arg, 0))
5126 	{
5127           result = combine_comparisons (loc, code, lcode, rcode,
5128 					truth_type, ll_arg, lr_arg);
5129 	  if (result)
5130 	    return result;
5131 	}
5132       else if (operand_equal_p (ll_arg, rr_arg, 0)
5133                && operand_equal_p (lr_arg, rl_arg, 0))
5134 	{
5135           result = combine_comparisons (loc, code, lcode,
5136 					swap_tree_comparison (rcode),
5137 					truth_type, ll_arg, lr_arg);
5138 	  if (result)
5139 	    return result;
5140 	}
5141     }
5142 
5143   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5144 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5145 
5146   /* If the RHS can be evaluated unconditionally and its operands are
5147      simple, it wins to evaluate the RHS unconditionally on machines
5148      with expensive branches.  In this case, this isn't a comparison
5149      that can be merged.  */
5150 
5151   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5152 		   false) >= 2
5153       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5154       && simple_operand_p (rl_arg)
5155       && simple_operand_p (rr_arg))
5156     {
5157       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5158       if (code == TRUTH_OR_EXPR
5159 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5160 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5161 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5162 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5163 	return build2_loc (loc, NE_EXPR, truth_type,
5164 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5165 				   ll_arg, rl_arg),
5166 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5167 
5168       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5169       if (code == TRUTH_AND_EXPR
5170 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5171 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5172 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5173 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5174 	return build2_loc (loc, EQ_EXPR, truth_type,
5175 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5176 				   ll_arg, rl_arg),
5177 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5178     }
5179 
5180   /* See if the comparisons can be merged.  Then get all the parameters for
5181      each side.  */
5182 
5183   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5184       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5185     return 0;
5186 
5187   volatilep = 0;
5188   ll_inner = decode_field_reference (loc, ll_arg,
5189 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5190 				     &ll_unsignedp, &volatilep, &ll_mask,
5191 				     &ll_and_mask);
5192   lr_inner = decode_field_reference (loc, lr_arg,
5193 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5194 				     &lr_unsignedp, &volatilep, &lr_mask,
5195 				     &lr_and_mask);
5196   rl_inner = decode_field_reference (loc, rl_arg,
5197 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5198 				     &rl_unsignedp, &volatilep, &rl_mask,
5199 				     &rl_and_mask);
5200   rr_inner = decode_field_reference (loc, rr_arg,
5201 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5202 				     &rr_unsignedp, &volatilep, &rr_mask,
5203 				     &rr_and_mask);
5204 
5205   /* It must be true that the inner operation on the lhs of each
5206      comparison must be the same if we are to be able to do anything.
5207      Then see if we have constants.  If not, the same must be true for
5208      the rhs's.  */
5209   if (volatilep || ll_inner == 0 || rl_inner == 0
5210       || ! operand_equal_p (ll_inner, rl_inner, 0))
5211     return 0;
5212 
5213   if (TREE_CODE (lr_arg) == INTEGER_CST
5214       && TREE_CODE (rr_arg) == INTEGER_CST)
5215     l_const = lr_arg, r_const = rr_arg;
5216   else if (lr_inner == 0 || rr_inner == 0
5217 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5218     return 0;
5219   else
5220     l_const = r_const = 0;
5221 
5222   /* If either comparison code is not correct for our logical operation,
5223      fail.  However, we can convert a one-bit comparison against zero into
5224      the opposite comparison against that bit being set in the field.  */
5225 
5226   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5227   if (lcode != wanted_code)
5228     {
5229       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5230 	{
5231 	  /* Make the left operand unsigned, since we are only interested
5232 	     in the value of one bit.  Otherwise we are doing the wrong
5233 	     thing below.  */
5234 	  ll_unsignedp = 1;
5235 	  l_const = ll_mask;
5236 	}
5237       else
5238 	return 0;
5239     }
5240 
5241   /* This is analogous to the code for l_const above.  */
5242   if (rcode != wanted_code)
5243     {
5244       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5245 	{
5246 	  rl_unsignedp = 1;
5247 	  r_const = rl_mask;
5248 	}
5249       else
5250 	return 0;
5251     }
5252 
5253   /* See if we can find a mode that contains both fields being compared on
5254      the left.  If we can't, fail.  Otherwise, update all constants and masks
5255      to be relative to a field of that size.  */
5256   first_bit = MIN (ll_bitpos, rl_bitpos);
5257   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5258   lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5259 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5260 			  volatilep);
5261   if (lnmode == VOIDmode)
5262     return 0;
5263 
5264   lnbitsize = GET_MODE_BITSIZE (lnmode);
5265   lnbitpos = first_bit & ~ (lnbitsize - 1);
5266   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5267   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5268 
5269   if (BYTES_BIG_ENDIAN)
5270     {
5271       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5272       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5273     }
5274 
5275   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5276 			 size_int (xll_bitpos));
5277   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5278 			 size_int (xrl_bitpos));
5279 
5280   if (l_const)
5281     {
5282       l_const = fold_convert_loc (loc, lntype, l_const);
5283       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5284       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5285       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5286 					fold_build1_loc (loc, BIT_NOT_EXPR,
5287 						     lntype, ll_mask))))
5288 	{
5289 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5290 
5291 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5292 	}
5293     }
5294   if (r_const)
5295     {
5296       r_const = fold_convert_loc (loc, lntype, r_const);
5297       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5298       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5299       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5300 					fold_build1_loc (loc, BIT_NOT_EXPR,
5301 						     lntype, rl_mask))))
5302 	{
5303 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5304 
5305 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5306 	}
5307     }
5308 
5309   /* If the right sides are not constant, do the same for it.  Also,
5310      disallow this optimization if a size or signedness mismatch occurs
5311      between the left and right sides.  */
5312   if (l_const == 0)
5313     {
5314       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5315 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5316 	  /* Make sure the two fields on the right
5317 	     correspond to the left without being swapped.  */
5318 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5319 	return 0;
5320 
5321       first_bit = MIN (lr_bitpos, rr_bitpos);
5322       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5323       rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5324 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5325 			      volatilep);
5326       if (rnmode == VOIDmode)
5327 	return 0;
5328 
5329       rnbitsize = GET_MODE_BITSIZE (rnmode);
5330       rnbitpos = first_bit & ~ (rnbitsize - 1);
5331       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5332       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5333 
5334       if (BYTES_BIG_ENDIAN)
5335 	{
5336 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5337 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5338 	}
5339 
5340       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5341 							    rntype, lr_mask),
5342 			     size_int (xlr_bitpos));
5343       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5344 							    rntype, rr_mask),
5345 			     size_int (xrr_bitpos));
5346 
5347       /* Make a mask that corresponds to both fields being compared.
5348 	 Do this for both items being compared.  If the operands are the
5349 	 same size and the bits being compared are in the same position
5350 	 then we can do this by masking both and comparing the masked
5351 	 results.  */
5352       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5353       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5354       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5355 	{
5356 	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5357 				    ll_unsignedp || rl_unsignedp);
5358 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5359 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5360 
5361 	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5362 				    lr_unsignedp || rr_unsignedp);
5363 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5364 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5365 
5366 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5367 	}
5368 
5369       /* There is still another way we can do something:  If both pairs of
5370 	 fields being compared are adjacent, we may be able to make a wider
5371 	 field containing them both.
5372 
5373 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5374 	 the mask must be shifted to account for the shift done by
5375 	 make_bit_field_ref.  */
5376       if ((ll_bitsize + ll_bitpos == rl_bitpos
5377 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5378 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5379 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5380 	{
5381 	  tree type;
5382 
5383 	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
5384 				    ll_bitsize + rl_bitsize,
5385 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5386 	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
5387 				    lr_bitsize + rr_bitsize,
5388 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5389 
5390 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5391 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
5392 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5393 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5394 
5395 	  /* Convert to the smaller type before masking out unwanted bits.  */
5396 	  type = lntype;
5397 	  if (lntype != rntype)
5398 	    {
5399 	      if (lnbitsize > rnbitsize)
5400 		{
5401 		  lhs = fold_convert_loc (loc, rntype, lhs);
5402 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5403 		  type = rntype;
5404 		}
5405 	      else if (lnbitsize < rnbitsize)
5406 		{
5407 		  rhs = fold_convert_loc (loc, lntype, rhs);
5408 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5409 		  type = lntype;
5410 		}
5411 	    }
5412 
5413 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5414 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5415 
5416 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5417 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5418 
5419 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5420 	}
5421 
5422       return 0;
5423     }
5424 
5425   /* Handle the case of comparisons with constants.  If there is something in
5426      common between the masks, those bits of the constants must be the same.
5427      If not, the condition is always false.  Test for this to avoid generating
5428      incorrect code below.  */
5429   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5430   if (! integer_zerop (result)
5431       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5432 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5433     {
5434       if (wanted_code == NE_EXPR)
5435 	{
5436 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5437 	  return constant_boolean_node (true, truth_type);
5438 	}
5439       else
5440 	{
5441 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5442 	  return constant_boolean_node (false, truth_type);
5443 	}
5444     }
5445 
5446   /* Construct the expression we will return.  First get the component
5447      reference we will make.  Unless the mask is all ones the width of
5448      that field, perform the mask operation.  Then compare with the
5449      merged constant.  */
5450   result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5451 			       ll_unsignedp || rl_unsignedp);
5452 
5453   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5454   if (! all_ones_mask_p (ll_mask, lnbitsize))
5455     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5456 
5457   return build2_loc (loc, wanted_code, truth_type, result,
5458 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
5459 }
5460 
5461 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5462    constant.  */
5463 
5464 static tree
5465 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5466 			    tree op0, tree op1)
5467 {
5468   tree arg0 = op0;
5469   enum tree_code op_code;
5470   tree comp_const;
5471   tree minmax_const;
5472   int consts_equal, consts_lt;
5473   tree inner;
5474 
5475   STRIP_SIGN_NOPS (arg0);
5476 
5477   op_code = TREE_CODE (arg0);
5478   minmax_const = TREE_OPERAND (arg0, 1);
5479   comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5480   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5481   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5482   inner = TREE_OPERAND (arg0, 0);
5483 
5484   /* If something does not permit us to optimize, return the original tree.  */
5485   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5486       || TREE_CODE (comp_const) != INTEGER_CST
5487       || TREE_OVERFLOW (comp_const)
5488       || TREE_CODE (minmax_const) != INTEGER_CST
5489       || TREE_OVERFLOW (minmax_const))
5490     return NULL_TREE;
5491 
5492   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5493      and GT_EXPR, doing the rest with recursive calls using logical
5494      simplifications.  */
5495   switch (code)
5496     {
5497     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5498       {
5499 	tree tem
5500 	  = optimize_minmax_comparison (loc,
5501 					invert_tree_comparison (code, false),
5502 					type, op0, op1);
5503 	if (tem)
5504 	  return invert_truthvalue_loc (loc, tem);
5505 	return NULL_TREE;
5506       }
5507 
5508     case GE_EXPR:
5509       return
5510 	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5511 		     optimize_minmax_comparison
5512 		     (loc, EQ_EXPR, type, arg0, comp_const),
5513 		     optimize_minmax_comparison
5514 		     (loc, GT_EXPR, type, arg0, comp_const));
5515 
5516     case EQ_EXPR:
5517       if (op_code == MAX_EXPR && consts_equal)
5518 	/* MAX (X, 0) == 0  ->  X <= 0  */
5519 	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5520 
5521       else if (op_code == MAX_EXPR && consts_lt)
5522 	/* MAX (X, 0) == 5  ->  X == 5   */
5523 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5524 
5525       else if (op_code == MAX_EXPR)
5526 	/* MAX (X, 0) == -1  ->  false  */
5527 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5528 
5529       else if (consts_equal)
5530 	/* MIN (X, 0) == 0  ->  X >= 0  */
5531 	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5532 
5533       else if (consts_lt)
5534 	/* MIN (X, 0) == 5  ->  false  */
5535 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5536 
5537       else
5538 	/* MIN (X, 0) == -1  ->  X == -1  */
5539 	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5540 
5541     case GT_EXPR:
5542       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5543 	/* MAX (X, 0) > 0  ->  X > 0
5544 	   MAX (X, 0) > 5  ->  X > 5  */
5545 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5546 
5547       else if (op_code == MAX_EXPR)
5548 	/* MAX (X, 0) > -1  ->  true  */
5549 	return omit_one_operand_loc (loc, type, integer_one_node, inner);
5550 
5551       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5552 	/* MIN (X, 0) > 0  ->  false
5553 	   MIN (X, 0) > 5  ->  false  */
5554 	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5555 
5556       else
5557 	/* MIN (X, 0) > -1  ->  X > -1  */
5558 	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5559 
5560     default:
5561       return NULL_TREE;
5562     }
5563 }
5564 
5565 /* T is an integer expression that is being multiplied, divided, or taken a
5566    modulus (CODE says which and what kind of divide or modulus) by a
5567    constant C.  See if we can eliminate that operation by folding it with
5568    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5569    should be used for the computation if wider than our type.
5570 
5571    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5572    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5573    expression would not overflow or that overflow is undefined for the type
5574    in the language in question.
5575 
5576    If we return a non-null expression, it is an equivalent form of the
5577    original computation, but need not be in the original type.
5578 
5579    We set *STRICT_OVERFLOW_P to true if the return values depends on
5580    signed overflow being undefined.  Otherwise we do not change
5581    *STRICT_OVERFLOW_P.  */
5582 
5583 static tree
5584 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5585 		bool *strict_overflow_p)
5586 {
5587   /* To avoid exponential search depth, refuse to allow recursion past
5588      three levels.  Beyond that (1) it's highly unlikely that we'll find
5589      something interesting and (2) we've probably processed it before
5590      when we built the inner expression.  */
5591 
5592   static int depth;
5593   tree ret;
5594 
5595   if (depth > 3)
5596     return NULL;
5597 
5598   depth++;
5599   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5600   depth--;
5601 
5602   return ret;
5603 }
5604 
5605 static tree
5606 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5607 		  bool *strict_overflow_p)
5608 {
5609   tree type = TREE_TYPE (t);
5610   enum tree_code tcode = TREE_CODE (t);
5611   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5612 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5613 		? wide_type : type);
5614   tree t1, t2;
5615   int same_p = tcode == code;
5616   tree op0 = NULL_TREE, op1 = NULL_TREE;
5617   bool sub_strict_overflow_p;
5618 
5619   /* Don't deal with constants of zero here; they confuse the code below.  */
5620   if (integer_zerop (c))
5621     return NULL_TREE;
5622 
5623   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5624     op0 = TREE_OPERAND (t, 0);
5625 
5626   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5627     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5628 
5629   /* Note that we need not handle conditional operations here since fold
5630      already handles those cases.  So just do arithmetic here.  */
5631   switch (tcode)
5632     {
5633     case INTEGER_CST:
5634       /* For a constant, we can always simplify if we are a multiply
5635 	 or (for divide and modulus) if it is a multiple of our constant.  */
5636       if (code == MULT_EXPR
5637 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5638 	return const_binop (code, fold_convert (ctype, t),
5639 			    fold_convert (ctype, c));
5640       break;
5641 
5642     CASE_CONVERT: case NON_LVALUE_EXPR:
5643       /* If op0 is an expression ...  */
5644       if ((COMPARISON_CLASS_P (op0)
5645 	   || UNARY_CLASS_P (op0)
5646 	   || BINARY_CLASS_P (op0)
5647 	   || VL_EXP_CLASS_P (op0)
5648 	   || EXPRESSION_CLASS_P (op0))
5649 	  /* ... and has wrapping overflow, and its type is smaller
5650 	     than ctype, then we cannot pass through as widening.  */
5651 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5652 	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5653 		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5654 	       && (TYPE_PRECISION (ctype)
5655 	           > TYPE_PRECISION (TREE_TYPE (op0))))
5656 	      /* ... or this is a truncation (t is narrower than op0),
5657 		 then we cannot pass through this narrowing.  */
5658 	      || (TYPE_PRECISION (type)
5659 		  < TYPE_PRECISION (TREE_TYPE (op0)))
5660 	      /* ... or signedness changes for division or modulus,
5661 		 then we cannot pass through this conversion.  */
5662 	      || (code != MULT_EXPR
5663 		  && (TYPE_UNSIGNED (ctype)
5664 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
5665 	      /* ... or has undefined overflow while the converted to
5666 		 type has not, we cannot do the operation in the inner type
5667 		 as that would introduce undefined overflow.  */
5668 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5669 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
5670 	break;
5671 
5672       /* Pass the constant down and see if we can make a simplification.  If
5673 	 we can, replace this expression with the inner simplification for
5674 	 possible later conversion to our or some other type.  */
5675       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5676 	  && TREE_CODE (t2) == INTEGER_CST
5677 	  && !TREE_OVERFLOW (t2)
5678 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5679 					 code == MULT_EXPR
5680 					 ? ctype : NULL_TREE,
5681 					 strict_overflow_p))))
5682 	return t1;
5683       break;
5684 
5685     case ABS_EXPR:
5686       /* If widening the type changes it from signed to unsigned, then we
5687          must avoid building ABS_EXPR itself as unsigned.  */
5688       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5689         {
5690           tree cstype = (*signed_type_for) (ctype);
5691           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5692 	      != 0)
5693             {
5694               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5695               return fold_convert (ctype, t1);
5696             }
5697           break;
5698         }
5699       /* If the constant is negative, we cannot simplify this.  */
5700       if (tree_int_cst_sgn (c) == -1)
5701         break;
5702       /* FALLTHROUGH */
5703     case NEGATE_EXPR:
5704       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5705 	  != 0)
5706 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5707       break;
5708 
5709     case MIN_EXPR:  case MAX_EXPR:
5710       /* If widening the type changes the signedness, then we can't perform
5711 	 this optimization as that changes the result.  */
5712       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5713 	break;
5714 
5715       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5716       sub_strict_overflow_p = false;
5717       if ((t1 = extract_muldiv (op0, c, code, wide_type,
5718 				&sub_strict_overflow_p)) != 0
5719 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5720 				   &sub_strict_overflow_p)) != 0)
5721 	{
5722 	  if (tree_int_cst_sgn (c) < 0)
5723 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5724 	  if (sub_strict_overflow_p)
5725 	    *strict_overflow_p = true;
5726 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5727 			      fold_convert (ctype, t2));
5728 	}
5729       break;
5730 
5731     case LSHIFT_EXPR:  case RSHIFT_EXPR:
5732       /* If the second operand is constant, this is a multiplication
5733 	 or floor division, by a power of two, so we can treat it that
5734 	 way unless the multiplier or divisor overflows.  Signed
5735 	 left-shift overflow is implementation-defined rather than
5736 	 undefined in C90, so do not convert signed left shift into
5737 	 multiplication.  */
5738       if (TREE_CODE (op1) == INTEGER_CST
5739 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5740 	  /* const_binop may not detect overflow correctly,
5741 	     so check for it explicitly here.  */
5742 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5743 	  && TREE_INT_CST_HIGH (op1) == 0
5744 	  && 0 != (t1 = fold_convert (ctype,
5745 				      const_binop (LSHIFT_EXPR,
5746 						   size_one_node,
5747 						   op1)))
5748 	  && !TREE_OVERFLOW (t1))
5749 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5750 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5751 				       ctype,
5752 				       fold_convert (ctype, op0),
5753 				       t1),
5754 			       c, code, wide_type, strict_overflow_p);
5755       break;
5756 
5757     case PLUS_EXPR:  case MINUS_EXPR:
5758       /* See if we can eliminate the operation on both sides.  If we can, we
5759 	 can return a new PLUS or MINUS.  If we can't, the only remaining
5760 	 cases where we can do anything are if the second operand is a
5761 	 constant.  */
5762       sub_strict_overflow_p = false;
5763       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5764       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5765       if (t1 != 0 && t2 != 0
5766 	  && (code == MULT_EXPR
5767 	      /* If not multiplication, we can only do this if both operands
5768 		 are divisible by c.  */
5769 	      || (multiple_of_p (ctype, op0, c)
5770 	          && multiple_of_p (ctype, op1, c))))
5771 	{
5772 	  if (sub_strict_overflow_p)
5773 	    *strict_overflow_p = true;
5774 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5775 			      fold_convert (ctype, t2));
5776 	}
5777 
5778       /* If this was a subtraction, negate OP1 and set it to be an addition.
5779 	 This simplifies the logic below.  */
5780       if (tcode == MINUS_EXPR)
5781 	{
5782 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
5783 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
5784 	  if (TREE_CODE (op0) == INTEGER_CST)
5785 	    {
5786 	      tree tem = op0;
5787 	      op0 = op1;
5788 	      op1 = tem;
5789 	      tem = t1;
5790 	      t1 = t2;
5791 	      t2 = tem;
5792 	    }
5793 	}
5794 
5795       if (TREE_CODE (op1) != INTEGER_CST)
5796 	break;
5797 
5798       /* If either OP1 or C are negative, this optimization is not safe for
5799 	 some of the division and remainder types while for others we need
5800 	 to change the code.  */
5801       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5802 	{
5803 	  if (code == CEIL_DIV_EXPR)
5804 	    code = FLOOR_DIV_EXPR;
5805 	  else if (code == FLOOR_DIV_EXPR)
5806 	    code = CEIL_DIV_EXPR;
5807 	  else if (code != MULT_EXPR
5808 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5809 	    break;
5810 	}
5811 
5812       /* If it's a multiply or a division/modulus operation of a multiple
5813          of our constant, do the operation and verify it doesn't overflow.  */
5814       if (code == MULT_EXPR
5815 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5816 	{
5817 	  op1 = const_binop (code, fold_convert (ctype, op1),
5818 			     fold_convert (ctype, c));
5819 	  /* We allow the constant to overflow with wrapping semantics.  */
5820 	  if (op1 == 0
5821 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5822 	    break;
5823 	}
5824       else
5825 	break;
5826 
5827       /* If we have an unsigned type is not a sizetype, we cannot widen
5828 	 the operation since it will change the result if the original
5829 	 computation overflowed.  */
5830       if (TYPE_UNSIGNED (ctype)
5831 	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5832 	  && ctype != type)
5833 	break;
5834 
5835       /* If we were able to eliminate our operation from the first side,
5836 	 apply our operation to the second side and reform the PLUS.  */
5837       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5838 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5839 
5840       /* The last case is if we are a multiply.  In that case, we can
5841 	 apply the distributive law to commute the multiply and addition
5842 	 if the multiplication of the constants doesn't overflow.  */
5843       if (code == MULT_EXPR)
5844 	return fold_build2 (tcode, ctype,
5845 			    fold_build2 (code, ctype,
5846 					 fold_convert (ctype, op0),
5847 					 fold_convert (ctype, c)),
5848 			    op1);
5849 
5850       break;
5851 
5852     case MULT_EXPR:
5853       /* We have a special case here if we are doing something like
5854 	 (C * 8) % 4 since we know that's zero.  */
5855       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5856 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5857 	  /* If the multiplication can overflow we cannot optimize this.
5858 	     ???  Until we can properly mark individual operations as
5859 	     not overflowing we need to treat sizetype special here as
5860 	     stor-layout relies on this opimization to make
5861 	     DECL_FIELD_BIT_OFFSET always a constant.  */
5862 	  && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5863 	      || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5864 		  && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5865 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5866 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5867 	{
5868 	  *strict_overflow_p = true;
5869 	  return omit_one_operand (type, integer_zero_node, op0);
5870 	}
5871 
5872       /* ... fall through ...  */
5873 
5874     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5875     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5876       /* If we can extract our operation from the LHS, do so and return a
5877 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5878 	 do something only if the second operand is a constant.  */
5879       if (same_p
5880 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5881 				   strict_overflow_p)) != 0)
5882 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5883 			    fold_convert (ctype, op1));
5884       else if (tcode == MULT_EXPR && code == MULT_EXPR
5885 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5886 					strict_overflow_p)) != 0)
5887 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5888 			    fold_convert (ctype, t1));
5889       else if (TREE_CODE (op1) != INTEGER_CST)
5890 	return 0;
5891 
5892       /* If these are the same operation types, we can associate them
5893 	 assuming no overflow.  */
5894       if (tcode == code)
5895 	{
5896 	  double_int mul;
5897 	  int overflow_p;
5898 	  mul = double_int_mul_with_sign
5899 	          (double_int_ext
5900 		     (tree_to_double_int (op1),
5901 		      TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5902 		   double_int_ext
5903 		     (tree_to_double_int (c),
5904 		      TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
5905 		   false, &overflow_p);
5906 	  overflow_p = (((!TYPE_UNSIGNED (ctype)
5907 			  || (TREE_CODE (ctype) == INTEGER_TYPE
5908 			      && TYPE_IS_SIZETYPE (ctype)))
5909 			 && overflow_p)
5910 			| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5911 	  if (!double_int_fits_to_tree_p (ctype, mul)
5912 	      && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR)
5913 		  || !TYPE_UNSIGNED (ctype)
5914 		  || (TREE_CODE (ctype) == INTEGER_TYPE
5915 		      && TYPE_IS_SIZETYPE (ctype))))
5916 	    overflow_p = 1;
5917 	  if (!overflow_p)
5918 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5919 				double_int_to_tree (ctype, mul));
5920 	}
5921 
5922       /* If these operations "cancel" each other, we have the main
5923 	 optimizations of this pass, which occur when either constant is a
5924 	 multiple of the other, in which case we replace this with either an
5925 	 operation or CODE or TCODE.
5926 
5927 	 If we have an unsigned type that is not a sizetype, we cannot do
5928 	 this since it will change the result if the original computation
5929 	 overflowed.  */
5930       if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5931 	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5932 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5933 	      || (tcode == MULT_EXPR
5934 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5935 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5936 		  && code != MULT_EXPR)))
5937 	{
5938 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5939 	    {
5940 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5941 		*strict_overflow_p = true;
5942 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5943 				  fold_convert (ctype,
5944 						const_binop (TRUNC_DIV_EXPR,
5945 							     op1, c)));
5946 	    }
5947 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5948 	    {
5949 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5950 		*strict_overflow_p = true;
5951 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
5952 				  fold_convert (ctype,
5953 						const_binop (TRUNC_DIV_EXPR,
5954 							     c, op1)));
5955 	    }
5956 	}
5957       break;
5958 
5959     default:
5960       break;
5961     }
5962 
5963   return 0;
5964 }
5965 
5966 /* Return a node which has the indicated constant VALUE (either 0 or
5967    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
5968    and is of the indicated TYPE.  */
5969 
5970 tree
5971 constant_boolean_node (bool value, tree type)
5972 {
5973   if (type == integer_type_node)
5974     return value ? integer_one_node : integer_zero_node;
5975   else if (type == boolean_type_node)
5976     return value ? boolean_true_node : boolean_false_node;
5977   else if (TREE_CODE (type) == VECTOR_TYPE)
5978     return build_vector_from_val (type,
5979 				  build_int_cst (TREE_TYPE (type),
5980 						 value ? -1 : 0));
5981   else
5982     return fold_convert (type, value ? integer_one_node : integer_zero_node);
5983 }
5984 
5985 
5986 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5987    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5988    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5989    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5990    COND is the first argument to CODE; otherwise (as in the example
5991    given here), it is the second argument.  TYPE is the type of the
5992    original expression.  Return NULL_TREE if no simplification is
5993    possible.  */
5994 
5995 static tree
5996 fold_binary_op_with_conditional_arg (location_t loc,
5997 				     enum tree_code code,
5998 				     tree type, tree op0, tree op1,
5999 				     tree cond, tree arg, int cond_first_p)
6000 {
6001   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6002   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6003   tree test, true_value, false_value;
6004   tree lhs = NULL_TREE;
6005   tree rhs = NULL_TREE;
6006 
6007   if (TREE_CODE (cond) == COND_EXPR)
6008     {
6009       test = TREE_OPERAND (cond, 0);
6010       true_value = TREE_OPERAND (cond, 1);
6011       false_value = TREE_OPERAND (cond, 2);
6012       /* If this operand throws an expression, then it does not make
6013 	 sense to try to perform a logical or arithmetic operation
6014 	 involving it.  */
6015       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6016 	lhs = true_value;
6017       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6018 	rhs = false_value;
6019     }
6020   else
6021     {
6022       tree testtype = TREE_TYPE (cond);
6023       test = cond;
6024       true_value = constant_boolean_node (true, testtype);
6025       false_value = constant_boolean_node (false, testtype);
6026     }
6027 
6028   /* This transformation is only worthwhile if we don't have to wrap ARG
6029      in a SAVE_EXPR and the operation can be simplified on at least one
6030      of the branches once its pushed inside the COND_EXPR.  */
6031   if (!TREE_CONSTANT (arg)
6032       && (TREE_SIDE_EFFECTS (arg)
6033 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6034     return NULL_TREE;
6035 
6036   arg = fold_convert_loc (loc, arg_type, arg);
6037   if (lhs == 0)
6038     {
6039       true_value = fold_convert_loc (loc, cond_type, true_value);
6040       if (cond_first_p)
6041 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6042       else
6043 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6044     }
6045   if (rhs == 0)
6046     {
6047       false_value = fold_convert_loc (loc, cond_type, false_value);
6048       if (cond_first_p)
6049 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6050       else
6051 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6052     }
6053 
6054   /* Check that we have simplified at least one of the branches.  */
6055   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6056     return NULL_TREE;
6057 
6058   return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6059 }
6060 
6061 
6062 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6063 
6064    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6065    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6066    ADDEND is the same as X.
6067 
6068    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6069    and finite.  The problematic cases are when X is zero, and its mode
6070    has signed zeros.  In the case of rounding towards -infinity,
6071    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6072    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6073 
6074 bool
6075 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6076 {
6077   if (!real_zerop (addend))
6078     return false;
6079 
6080   /* Don't allow the fold with -fsignaling-nans.  */
6081   if (HONOR_SNANS (TYPE_MODE (type)))
6082     return false;
6083 
6084   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6085   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6086     return true;
6087 
6088   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6089   if (TREE_CODE (addend) == REAL_CST
6090       && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6091     negate = !negate;
6092 
6093   /* The mode has signed zeros, and we have to honor their sign.
6094      In this situation, there is only one case we can return true for.
6095      X - 0 is the same as X unless rounding towards -infinity is
6096      supported.  */
6097   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6098 }
6099 
6100 /* Subroutine of fold() that checks comparisons of built-in math
6101    functions against real constants.
6102 
6103    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6104    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6105    is the type of the result and ARG0 and ARG1 are the operands of the
6106    comparison.  ARG1 must be a TREE_REAL_CST.
6107 
6108    The function returns the constant folded tree if a simplification
6109    can be made, and NULL_TREE otherwise.  */
6110 
6111 static tree
6112 fold_mathfn_compare (location_t loc,
6113 		     enum built_in_function fcode, enum tree_code code,
6114 		     tree type, tree arg0, tree arg1)
6115 {
6116   REAL_VALUE_TYPE c;
6117 
6118   if (BUILTIN_SQRT_P (fcode))
6119     {
6120       tree arg = CALL_EXPR_ARG (arg0, 0);
6121       enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6122 
6123       c = TREE_REAL_CST (arg1);
6124       if (REAL_VALUE_NEGATIVE (c))
6125 	{
6126 	  /* sqrt(x) < y is always false, if y is negative.  */
6127 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6128 	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6129 
6130 	  /* sqrt(x) > y is always true, if y is negative and we
6131 	     don't care about NaNs, i.e. negative values of x.  */
6132 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6133 	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6134 
6135 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6136 	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6137 			      build_real (TREE_TYPE (arg), dconst0));
6138 	}
6139       else if (code == GT_EXPR || code == GE_EXPR)
6140 	{
6141 	  REAL_VALUE_TYPE c2;
6142 
6143 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6144 	  real_convert (&c2, mode, &c2);
6145 
6146 	  if (REAL_VALUE_ISINF (c2))
6147 	    {
6148 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6149 	      if (HONOR_INFINITIES (mode))
6150 		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6151 				    build_real (TREE_TYPE (arg), c2));
6152 
6153 	      /* sqrt(x) > y is always false, when y is very large
6154 		 and we don't care about infinities.  */
6155 	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6156 	    }
6157 
6158 	  /* sqrt(x) > c is the same as x > c*c.  */
6159 	  return fold_build2_loc (loc, code, type, arg,
6160 			      build_real (TREE_TYPE (arg), c2));
6161 	}
6162       else if (code == LT_EXPR || code == LE_EXPR)
6163 	{
6164 	  REAL_VALUE_TYPE c2;
6165 
6166 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6167 	  real_convert (&c2, mode, &c2);
6168 
6169 	  if (REAL_VALUE_ISINF (c2))
6170 	    {
6171 	      /* sqrt(x) < y is always true, when y is a very large
6172 		 value and we don't care about NaNs or Infinities.  */
6173 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6174 		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6175 
6176 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6177 		 don't care about NaNs.  */
6178 	      if (! HONOR_NANS (mode))
6179 		return fold_build2_loc (loc, NE_EXPR, type, arg,
6180 				    build_real (TREE_TYPE (arg), c2));
6181 
6182 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6183 		 don't care about Infinities.  */
6184 	      if (! HONOR_INFINITIES (mode))
6185 		return fold_build2_loc (loc, GE_EXPR, type, arg,
6186 				    build_real (TREE_TYPE (arg), dconst0));
6187 
6188 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6189 	      arg = save_expr (arg);
6190 	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6191 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6192 					       build_real (TREE_TYPE (arg),
6193 							   dconst0)),
6194 				  fold_build2_loc (loc, NE_EXPR, type, arg,
6195 					       build_real (TREE_TYPE (arg),
6196 							   c2)));
6197 	    }
6198 
6199 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6200 	  if (! HONOR_NANS (mode))
6201 	    return fold_build2_loc (loc, code, type, arg,
6202 				build_real (TREE_TYPE (arg), c2));
6203 
6204 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6205 	  arg = save_expr (arg);
6206 	  return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6207 				  fold_build2_loc (loc, GE_EXPR, type, arg,
6208 					       build_real (TREE_TYPE (arg),
6209 							   dconst0)),
6210 				  fold_build2_loc (loc, code, type, arg,
6211 					       build_real (TREE_TYPE (arg),
6212 							   c2)));
6213 	}
6214     }
6215 
6216   return NULL_TREE;
6217 }
6218 
6219 /* Subroutine of fold() that optimizes comparisons against Infinities,
6220    either +Inf or -Inf.
6221 
6222    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6223    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6224    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6225 
6226    The function returns the constant folded tree if a simplification
6227    can be made, and NULL_TREE otherwise.  */
6228 
6229 static tree
6230 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6231 		  tree arg0, tree arg1)
6232 {
6233   enum machine_mode mode;
6234   REAL_VALUE_TYPE max;
6235   tree temp;
6236   bool neg;
6237 
6238   mode = TYPE_MODE (TREE_TYPE (arg0));
6239 
6240   /* For negative infinity swap the sense of the comparison.  */
6241   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6242   if (neg)
6243     code = swap_tree_comparison (code);
6244 
6245   switch (code)
6246     {
6247     case GT_EXPR:
6248       /* x > +Inf is always false, if with ignore sNANs.  */
6249       if (HONOR_SNANS (mode))
6250         return NULL_TREE;
6251       return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6252 
6253     case LE_EXPR:
6254       /* x <= +Inf is always true, if we don't case about NaNs.  */
6255       if (! HONOR_NANS (mode))
6256 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6257 
6258       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6259       arg0 = save_expr (arg0);
6260       return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6261 
6262     case EQ_EXPR:
6263     case GE_EXPR:
6264       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6265       real_maxval (&max, neg, mode);
6266       return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6267 			  arg0, build_real (TREE_TYPE (arg0), max));
6268 
6269     case LT_EXPR:
6270       /* x < +Inf is always equal to x <= DBL_MAX.  */
6271       real_maxval (&max, neg, mode);
6272       return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6273 			  arg0, build_real (TREE_TYPE (arg0), max));
6274 
6275     case NE_EXPR:
6276       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6277       real_maxval (&max, neg, mode);
6278       if (! HONOR_NANS (mode))
6279 	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6280 			    arg0, build_real (TREE_TYPE (arg0), max));
6281 
6282       temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6283 			  arg0, build_real (TREE_TYPE (arg0), max));
6284       return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6285 
6286     default:
6287       break;
6288     }
6289 
6290   return NULL_TREE;
6291 }
6292 
6293 /* Subroutine of fold() that optimizes comparisons of a division by
6294    a nonzero integer constant against an integer constant, i.e.
6295    X/C1 op C2.
6296 
6297    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6299    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6300 
6301    The function returns the constant folded tree if a simplification
6302    can be made, and NULL_TREE otherwise.  */
6303 
6304 static tree
6305 fold_div_compare (location_t loc,
6306 		  enum tree_code code, tree type, tree arg0, tree arg1)
6307 {
6308   tree prod, tmp, hi, lo;
6309   tree arg00 = TREE_OPERAND (arg0, 0);
6310   tree arg01 = TREE_OPERAND (arg0, 1);
6311   double_int val;
6312   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6313   bool neg_overflow;
6314   int overflow;
6315 
6316   /* We have to do this the hard way to detect unsigned overflow.
6317      prod = int_const_binop (MULT_EXPR, arg01, arg1);  */
6318   overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6319 				   TREE_INT_CST_HIGH (arg01),
6320 				   TREE_INT_CST_LOW (arg1),
6321 				   TREE_INT_CST_HIGH (arg1),
6322 				   &val.low, &val.high, unsigned_p);
6323   prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6324   neg_overflow = false;
6325 
6326   if (unsigned_p)
6327     {
6328       tmp = int_const_binop (MINUS_EXPR, arg01,
6329                              build_int_cst (TREE_TYPE (arg01), 1));
6330       lo = prod;
6331 
6332       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6333       overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6334 				       TREE_INT_CST_HIGH (prod),
6335 				       TREE_INT_CST_LOW (tmp),
6336 				       TREE_INT_CST_HIGH (tmp),
6337 				       &val.low, &val.high, unsigned_p);
6338       hi = force_fit_type_double (TREE_TYPE (arg00), val,
6339 				  -1, overflow | TREE_OVERFLOW (prod));
6340     }
6341   else if (tree_int_cst_sgn (arg01) >= 0)
6342     {
6343       tmp = int_const_binop (MINUS_EXPR, arg01,
6344 			     build_int_cst (TREE_TYPE (arg01), 1));
6345       switch (tree_int_cst_sgn (arg1))
6346 	{
6347 	case -1:
6348 	  neg_overflow = true;
6349 	  lo = int_const_binop (MINUS_EXPR, prod, tmp);
6350 	  hi = prod;
6351 	  break;
6352 
6353 	case  0:
6354 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6355 	  hi = tmp;
6356 	  break;
6357 
6358 	case  1:
6359           hi = int_const_binop (PLUS_EXPR, prod, tmp);
6360 	  lo = prod;
6361 	  break;
6362 
6363 	default:
6364 	  gcc_unreachable ();
6365 	}
6366     }
6367   else
6368     {
6369       /* A negative divisor reverses the relational operators.  */
6370       code = swap_tree_comparison (code);
6371 
6372       tmp = int_const_binop (PLUS_EXPR, arg01,
6373 			     build_int_cst (TREE_TYPE (arg01), 1));
6374       switch (tree_int_cst_sgn (arg1))
6375 	{
6376 	case -1:
6377 	  hi = int_const_binop (MINUS_EXPR, prod, tmp);
6378 	  lo = prod;
6379 	  break;
6380 
6381 	case  0:
6382 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6383 	  lo = tmp;
6384 	  break;
6385 
6386 	case  1:
6387 	  neg_overflow = true;
6388 	  lo = int_const_binop (PLUS_EXPR, prod, tmp);
6389 	  hi = prod;
6390 	  break;
6391 
6392 	default:
6393 	  gcc_unreachable ();
6394 	}
6395     }
6396 
6397   switch (code)
6398     {
6399     case EQ_EXPR:
6400       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401 	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6402       if (TREE_OVERFLOW (hi))
6403 	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6404       if (TREE_OVERFLOW (lo))
6405 	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6406       return build_range_check (loc, type, arg00, 1, lo, hi);
6407 
6408     case NE_EXPR:
6409       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6410 	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6411       if (TREE_OVERFLOW (hi))
6412 	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6413       if (TREE_OVERFLOW (lo))
6414 	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6415       return build_range_check (loc, type, arg00, 0, lo, hi);
6416 
6417     case LT_EXPR:
6418       if (TREE_OVERFLOW (lo))
6419 	{
6420 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6421 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6422 	}
6423       return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6424 
6425     case LE_EXPR:
6426       if (TREE_OVERFLOW (hi))
6427 	{
6428 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6429 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6430 	}
6431       return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6432 
6433     case GT_EXPR:
6434       if (TREE_OVERFLOW (hi))
6435 	{
6436 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6437 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6438 	}
6439       return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6440 
6441     case GE_EXPR:
6442       if (TREE_OVERFLOW (lo))
6443 	{
6444 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6445 	  return omit_one_operand_loc (loc, type, tmp, arg00);
6446 	}
6447       return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6448 
6449     default:
6450       break;
6451     }
6452 
6453   return NULL_TREE;
6454 }
6455 
6456 
6457 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6458    equality/inequality test, then return a simplified form of the test
6459    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6460    result type.  */
6461 
6462 static tree
6463 fold_single_bit_test_into_sign_test (location_t loc,
6464 				     enum tree_code code, tree arg0, tree arg1,
6465 				     tree result_type)
6466 {
6467   /* If this is testing a single bit, we can optimize the test.  */
6468   if ((code == NE_EXPR || code == EQ_EXPR)
6469       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6470       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6471     {
6472       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6473 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6474       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6475 
6476       if (arg00 != NULL_TREE
6477 	  /* This is only a win if casting to a signed type is cheap,
6478 	     i.e. when arg00's type is not a partial mode.  */
6479 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6480 	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6481 	{
6482 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6483 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6484 			      result_type,
6485 			      fold_convert_loc (loc, stype, arg00),
6486 			      build_int_cst (stype, 0));
6487 	}
6488     }
6489 
6490   return NULL_TREE;
6491 }
6492 
6493 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6494    equality/inequality test, then return a simplified form of
6495    the test using shifts and logical operations.  Otherwise return
6496    NULL.  TYPE is the desired result type.  */
6497 
6498 tree
6499 fold_single_bit_test (location_t loc, enum tree_code code,
6500 		      tree arg0, tree arg1, tree result_type)
6501 {
6502   /* If this is testing a single bit, we can optimize the test.  */
6503   if ((code == NE_EXPR || code == EQ_EXPR)
6504       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6505       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6506     {
6507       tree inner = TREE_OPERAND (arg0, 0);
6508       tree type = TREE_TYPE (arg0);
6509       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6510       enum machine_mode operand_mode = TYPE_MODE (type);
6511       int ops_unsigned;
6512       tree signed_type, unsigned_type, intermediate_type;
6513       tree tem, one;
6514 
6515       /* First, see if we can fold the single bit test into a sign-bit
6516 	 test.  */
6517       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6518 						 result_type);
6519       if (tem)
6520 	return tem;
6521 
6522       /* Otherwise we have (A & C) != 0 where C is a single bit,
6523 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6524 	 Similarly for (A & C) == 0.  */
6525 
6526       /* If INNER is a right shift of a constant and it plus BITNUM does
6527 	 not overflow, adjust BITNUM and INNER.  */
6528       if (TREE_CODE (inner) == RSHIFT_EXPR
6529 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6530 	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6531 	  && bitnum < TYPE_PRECISION (type)
6532 	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6533 				   bitnum - TYPE_PRECISION (type)))
6534 	{
6535 	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6536 	  inner = TREE_OPERAND (inner, 0);
6537 	}
6538 
6539       /* If we are going to be able to omit the AND below, we must do our
6540 	 operations as unsigned.  If we must use the AND, we have a choice.
6541 	 Normally unsigned is faster, but for some machines signed is.  */
6542 #ifdef LOAD_EXTEND_OP
6543       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6544 		      && !flag_syntax_only) ? 0 : 1;
6545 #else
6546       ops_unsigned = 1;
6547 #endif
6548 
6549       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6550       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6551       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6552       inner = fold_convert_loc (loc, intermediate_type, inner);
6553 
6554       if (bitnum != 0)
6555 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6556 			inner, size_int (bitnum));
6557 
6558       one = build_int_cst (intermediate_type, 1);
6559 
6560       if (code == EQ_EXPR)
6561 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6562 
6563       /* Put the AND last so it can combine with more things.  */
6564       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6565 
6566       /* Make sure to return the proper type.  */
6567       inner = fold_convert_loc (loc, result_type, inner);
6568 
6569       return inner;
6570     }
6571   return NULL_TREE;
6572 }
6573 
6574 /* Check whether we are allowed to reorder operands arg0 and arg1,
6575    such that the evaluation of arg1 occurs before arg0.  */
6576 
6577 static bool
6578 reorder_operands_p (const_tree arg0, const_tree arg1)
6579 {
6580   if (! flag_evaluation_order)
6581       return true;
6582   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6583     return true;
6584   return ! TREE_SIDE_EFFECTS (arg0)
6585 	 && ! TREE_SIDE_EFFECTS (arg1);
6586 }
6587 
6588 /* Test whether it is preferable two swap two operands, ARG0 and
6589    ARG1, for example because ARG0 is an integer constant and ARG1
6590    isn't.  If REORDER is true, only recommend swapping if we can
6591    evaluate the operands in reverse order.  */
6592 
6593 bool
6594 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6595 {
6596   STRIP_SIGN_NOPS (arg0);
6597   STRIP_SIGN_NOPS (arg1);
6598 
6599   if (TREE_CODE (arg1) == INTEGER_CST)
6600     return 0;
6601   if (TREE_CODE (arg0) == INTEGER_CST)
6602     return 1;
6603 
6604   if (TREE_CODE (arg1) == REAL_CST)
6605     return 0;
6606   if (TREE_CODE (arg0) == REAL_CST)
6607     return 1;
6608 
6609   if (TREE_CODE (arg1) == FIXED_CST)
6610     return 0;
6611   if (TREE_CODE (arg0) == FIXED_CST)
6612     return 1;
6613 
6614   if (TREE_CODE (arg1) == COMPLEX_CST)
6615     return 0;
6616   if (TREE_CODE (arg0) == COMPLEX_CST)
6617     return 1;
6618 
6619   if (TREE_CONSTANT (arg1))
6620     return 0;
6621   if (TREE_CONSTANT (arg0))
6622     return 1;
6623 
6624   if (optimize_function_for_size_p (cfun))
6625     return 0;
6626 
6627   if (reorder && flag_evaluation_order
6628       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6629     return 0;
6630 
6631   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6632      for commutative and comparison operators.  Ensuring a canonical
6633      form allows the optimizers to find additional redundancies without
6634      having to explicitly check for both orderings.  */
6635   if (TREE_CODE (arg0) == SSA_NAME
6636       && TREE_CODE (arg1) == SSA_NAME
6637       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6638     return 1;
6639 
6640   /* Put SSA_NAMEs last.  */
6641   if (TREE_CODE (arg1) == SSA_NAME)
6642     return 0;
6643   if (TREE_CODE (arg0) == SSA_NAME)
6644     return 1;
6645 
6646   /* Put variables last.  */
6647   if (DECL_P (arg1))
6648     return 0;
6649   if (DECL_P (arg0))
6650     return 1;
6651 
6652   return 0;
6653 }
6654 
6655 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6656    ARG0 is extended to a wider type.  */
6657 
6658 static tree
6659 fold_widened_comparison (location_t loc, enum tree_code code,
6660 			 tree type, tree arg0, tree arg1)
6661 {
6662   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6663   tree arg1_unw;
6664   tree shorter_type, outer_type;
6665   tree min, max;
6666   bool above, below;
6667 
6668   if (arg0_unw == arg0)
6669     return NULL_TREE;
6670   shorter_type = TREE_TYPE (arg0_unw);
6671 
6672 #ifdef HAVE_canonicalize_funcptr_for_compare
6673   /* Disable this optimization if we're casting a function pointer
6674      type on targets that require function pointer canonicalization.  */
6675   if (HAVE_canonicalize_funcptr_for_compare
6676       && TREE_CODE (shorter_type) == POINTER_TYPE
6677       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6678     return NULL_TREE;
6679 #endif
6680 
6681   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6682     return NULL_TREE;
6683 
6684   arg1_unw = get_unwidened (arg1, NULL_TREE);
6685 
6686   /* If possible, express the comparison in the shorter mode.  */
6687   if ((code == EQ_EXPR || code == NE_EXPR
6688        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6689       && (TREE_TYPE (arg1_unw) == shorter_type
6690 	  || ((TYPE_PRECISION (shorter_type)
6691 	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6692 	      && (TYPE_UNSIGNED (shorter_type)
6693 		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6694 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6695 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6696 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6697 	      && int_fits_type_p (arg1_unw, shorter_type))))
6698     return fold_build2_loc (loc, code, type, arg0_unw,
6699 			fold_convert_loc (loc, shorter_type, arg1_unw));
6700 
6701   if (TREE_CODE (arg1_unw) != INTEGER_CST
6702       || TREE_CODE (shorter_type) != INTEGER_TYPE
6703       || !int_fits_type_p (arg1_unw, shorter_type))
6704     return NULL_TREE;
6705 
6706   /* If we are comparing with the integer that does not fit into the range
6707      of the shorter type, the result is known.  */
6708   outer_type = TREE_TYPE (arg1_unw);
6709   min = lower_bound_in_type (outer_type, shorter_type);
6710   max = upper_bound_in_type (outer_type, shorter_type);
6711 
6712   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6713 						   max, arg1_unw));
6714   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6715 						   arg1_unw, min));
6716 
6717   switch (code)
6718     {
6719     case EQ_EXPR:
6720       if (above || below)
6721 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6722       break;
6723 
6724     case NE_EXPR:
6725       if (above || below)
6726 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6727       break;
6728 
6729     case LT_EXPR:
6730     case LE_EXPR:
6731       if (above)
6732 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6733       else if (below)
6734 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6735 
6736     case GT_EXPR:
6737     case GE_EXPR:
6738       if (above)
6739 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6740       else if (below)
6741 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6742 
6743     default:
6744       break;
6745     }
6746 
6747   return NULL_TREE;
6748 }
6749 
6750 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6751    ARG0 just the signedness is changed.  */
6752 
6753 static tree
6754 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6755 			      tree arg0, tree arg1)
6756 {
6757   tree arg0_inner;
6758   tree inner_type, outer_type;
6759 
6760   if (!CONVERT_EXPR_P (arg0))
6761     return NULL_TREE;
6762 
6763   outer_type = TREE_TYPE (arg0);
6764   arg0_inner = TREE_OPERAND (arg0, 0);
6765   inner_type = TREE_TYPE (arg0_inner);
6766 
6767 #ifdef HAVE_canonicalize_funcptr_for_compare
6768   /* Disable this optimization if we're casting a function pointer
6769      type on targets that require function pointer canonicalization.  */
6770   if (HAVE_canonicalize_funcptr_for_compare
6771       && TREE_CODE (inner_type) == POINTER_TYPE
6772       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6773     return NULL_TREE;
6774 #endif
6775 
6776   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6777     return NULL_TREE;
6778 
6779   if (TREE_CODE (arg1) != INTEGER_CST
6780       && !(CONVERT_EXPR_P (arg1)
6781 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6782     return NULL_TREE;
6783 
6784   if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6785        || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6786       && code != NE_EXPR
6787       && code != EQ_EXPR)
6788     return NULL_TREE;
6789 
6790   if (TREE_CODE (arg1) == INTEGER_CST)
6791     arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6792 				  0, TREE_OVERFLOW (arg1));
6793   else
6794     arg1 = fold_convert_loc (loc, inner_type, arg1);
6795 
6796   return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6797 }
6798 
6799 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6800    step of the array.  Reconstructs s and delta in the case of s *
6801    delta being an integer constant (and thus already folded).  ADDR is
6802    the address. MULT is the multiplicative expression.  If the
6803    function succeeds, the new address expression is returned.
6804    Otherwise NULL_TREE is returned.  LOC is the location of the
6805    resulting expression.  */
6806 
6807 static tree
6808 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6809 {
6810   tree s, delta, step;
6811   tree ref = TREE_OPERAND (addr, 0), pref;
6812   tree ret, pos;
6813   tree itype;
6814   bool mdim = false;
6815 
6816   /*  Strip the nops that might be added when converting op1 to sizetype. */
6817   STRIP_NOPS (op1);
6818 
6819   /* Canonicalize op1 into a possibly non-constant delta
6820      and an INTEGER_CST s.  */
6821   if (TREE_CODE (op1) == MULT_EXPR)
6822     {
6823       tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6824 
6825       STRIP_NOPS (arg0);
6826       STRIP_NOPS (arg1);
6827 
6828       if (TREE_CODE (arg0) == INTEGER_CST)
6829         {
6830           s = arg0;
6831           delta = arg1;
6832         }
6833       else if (TREE_CODE (arg1) == INTEGER_CST)
6834         {
6835           s = arg1;
6836           delta = arg0;
6837         }
6838       else
6839         return NULL_TREE;
6840     }
6841   else if (TREE_CODE (op1) == INTEGER_CST)
6842     {
6843       delta = op1;
6844       s = NULL_TREE;
6845     }
6846   else
6847     {
6848       /* Simulate we are delta * 1.  */
6849       delta = op1;
6850       s = integer_one_node;
6851     }
6852 
6853   /* Handle &x.array the same as we would handle &x.array[0].  */
6854   if (TREE_CODE (ref) == COMPONENT_REF
6855       && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6856     {
6857       tree domain;
6858 
6859       /* Remember if this was a multi-dimensional array.  */
6860       if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6861 	mdim = true;
6862 
6863       domain = TYPE_DOMAIN (TREE_TYPE (ref));
6864       if (! domain)
6865 	goto cont;
6866       itype = TREE_TYPE (domain);
6867 
6868       step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6869       if (TREE_CODE (step) != INTEGER_CST)
6870 	goto cont;
6871 
6872       if (s)
6873 	{
6874 	  if (! tree_int_cst_equal (step, s))
6875 	    goto cont;
6876 	}
6877       else
6878 	{
6879 	  /* Try if delta is a multiple of step.  */
6880 	  tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6881 	  if (! tmp)
6882 	    goto cont;
6883 	  delta = tmp;
6884 	}
6885 
6886       /* Only fold here if we can verify we do not overflow one
6887 	 dimension of a multi-dimensional array.  */
6888       if (mdim)
6889 	{
6890 	  tree tmp;
6891 
6892 	  if (!TYPE_MIN_VALUE (domain)
6893 	      || !TYPE_MAX_VALUE (domain)
6894 	      || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6895 	    goto cont;
6896 
6897 	  tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6898 				 fold_convert_loc (loc, itype,
6899 						   TYPE_MIN_VALUE (domain)),
6900 				 fold_convert_loc (loc, itype, delta));
6901 	  if (TREE_CODE (tmp) != INTEGER_CST
6902 	      || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6903 	    goto cont;
6904 	}
6905 
6906       /* We found a suitable component reference.  */
6907 
6908       pref = TREE_OPERAND (addr, 0);
6909       ret = copy_node (pref);
6910       SET_EXPR_LOCATION (ret, loc);
6911 
6912       ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6913 			fold_build2_loc
6914 			  (loc, PLUS_EXPR, itype,
6915 			   fold_convert_loc (loc, itype,
6916 					     TYPE_MIN_VALUE
6917 					       (TYPE_DOMAIN (TREE_TYPE (ref)))),
6918 			   fold_convert_loc (loc, itype, delta)),
6919 			NULL_TREE, NULL_TREE);
6920       return build_fold_addr_expr_loc (loc, ret);
6921     }
6922 
6923 cont:
6924 
6925   for (;; ref = TREE_OPERAND (ref, 0))
6926     {
6927       if (TREE_CODE (ref) == ARRAY_REF)
6928 	{
6929 	  tree domain;
6930 
6931 	  /* Remember if this was a multi-dimensional array.  */
6932 	  if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6933 	    mdim = true;
6934 
6935 	  domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6936 	  if (! domain)
6937 	    continue;
6938 	  itype = TREE_TYPE (domain);
6939 
6940 	  step = array_ref_element_size (ref);
6941 	  if (TREE_CODE (step) != INTEGER_CST)
6942 	    continue;
6943 
6944 	  if (s)
6945 	    {
6946 	      if (! tree_int_cst_equal (step, s))
6947                 continue;
6948 	    }
6949 	  else
6950 	    {
6951 	      /* Try if delta is a multiple of step.  */
6952 	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6953 	      if (! tmp)
6954 		continue;
6955 	      delta = tmp;
6956 	    }
6957 
6958 	  /* Only fold here if we can verify we do not overflow one
6959 	     dimension of a multi-dimensional array.  */
6960 	  if (mdim)
6961 	    {
6962 	      tree tmp;
6963 
6964 	      if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6965 		  || !TYPE_MAX_VALUE (domain)
6966 		  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6967 		continue;
6968 
6969 	      tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6970 				     fold_convert_loc (loc, itype,
6971 						       TREE_OPERAND (ref, 1)),
6972 				     fold_convert_loc (loc, itype, delta));
6973 	      if (!tmp
6974 		  || TREE_CODE (tmp) != INTEGER_CST
6975 		  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6976 		continue;
6977 	    }
6978 
6979 	  break;
6980 	}
6981       else
6982 	mdim = false;
6983 
6984       if (!handled_component_p (ref))
6985 	return NULL_TREE;
6986     }
6987 
6988   /* We found the suitable array reference.  So copy everything up to it,
6989      and replace the index.  */
6990 
6991   pref = TREE_OPERAND (addr, 0);
6992   ret = copy_node (pref);
6993   SET_EXPR_LOCATION (ret, loc);
6994   pos = ret;
6995 
6996   while (pref != ref)
6997     {
6998       pref = TREE_OPERAND (pref, 0);
6999       TREE_OPERAND (pos, 0) = copy_node (pref);
7000       pos = TREE_OPERAND (pos, 0);
7001     }
7002 
7003   TREE_OPERAND (pos, 1)
7004     = fold_build2_loc (loc, PLUS_EXPR, itype,
7005 		       fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7006 		       fold_convert_loc (loc, itype, delta));
7007   return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7008 }
7009 
7010 
7011 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7012    means A >= Y && A != MAX, but in this case we know that
7013    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7014 
7015 static tree
7016 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7017 {
7018   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7019 
7020   if (TREE_CODE (bound) == LT_EXPR)
7021     a = TREE_OPERAND (bound, 0);
7022   else if (TREE_CODE (bound) == GT_EXPR)
7023     a = TREE_OPERAND (bound, 1);
7024   else
7025     return NULL_TREE;
7026 
7027   typea = TREE_TYPE (a);
7028   if (!INTEGRAL_TYPE_P (typea)
7029       && !POINTER_TYPE_P (typea))
7030     return NULL_TREE;
7031 
7032   if (TREE_CODE (ineq) == LT_EXPR)
7033     {
7034       a1 = TREE_OPERAND (ineq, 1);
7035       y = TREE_OPERAND (ineq, 0);
7036     }
7037   else if (TREE_CODE (ineq) == GT_EXPR)
7038     {
7039       a1 = TREE_OPERAND (ineq, 0);
7040       y = TREE_OPERAND (ineq, 1);
7041     }
7042   else
7043     return NULL_TREE;
7044 
7045   if (TREE_TYPE (a1) != typea)
7046     return NULL_TREE;
7047 
7048   if (POINTER_TYPE_P (typea))
7049     {
7050       /* Convert the pointer types into integer before taking the difference.  */
7051       tree ta = fold_convert_loc (loc, ssizetype, a);
7052       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7053       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7054     }
7055   else
7056     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7057 
7058   if (!diff || !integer_onep (diff))
7059    return NULL_TREE;
7060 
7061   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7062 }
7063 
7064 /* Fold a sum or difference of at least one multiplication.
7065    Returns the folded tree or NULL if no simplification could be made.  */
7066 
7067 static tree
7068 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7069 			  tree arg0, tree arg1)
7070 {
7071   tree arg00, arg01, arg10, arg11;
7072   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7073 
7074   /* (A * C) +- (B * C) -> (A+-B) * C.
7075      (A * C) +- A -> A * (C+-1).
7076      We are most concerned about the case where C is a constant,
7077      but other combinations show up during loop reduction.  Since
7078      it is not difficult, try all four possibilities.  */
7079 
7080   if (TREE_CODE (arg0) == MULT_EXPR)
7081     {
7082       arg00 = TREE_OPERAND (arg0, 0);
7083       arg01 = TREE_OPERAND (arg0, 1);
7084     }
7085   else if (TREE_CODE (arg0) == INTEGER_CST)
7086     {
7087       arg00 = build_one_cst (type);
7088       arg01 = arg0;
7089     }
7090   else
7091     {
7092       /* We cannot generate constant 1 for fract.  */
7093       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7094 	return NULL_TREE;
7095       arg00 = arg0;
7096       arg01 = build_one_cst (type);
7097     }
7098   if (TREE_CODE (arg1) == MULT_EXPR)
7099     {
7100       arg10 = TREE_OPERAND (arg1, 0);
7101       arg11 = TREE_OPERAND (arg1, 1);
7102     }
7103   else if (TREE_CODE (arg1) == INTEGER_CST)
7104     {
7105       arg10 = build_one_cst (type);
7106       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7107 	 the purpose of this canonicalization.  */
7108       if (TREE_INT_CST_HIGH (arg1) == -1
7109 	  && negate_expr_p (arg1)
7110 	  && code == PLUS_EXPR)
7111 	{
7112 	  arg11 = negate_expr (arg1);
7113 	  code = MINUS_EXPR;
7114 	}
7115       else
7116 	arg11 = arg1;
7117     }
7118   else
7119     {
7120       /* We cannot generate constant 1 for fract.  */
7121       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7122 	return NULL_TREE;
7123       arg10 = arg1;
7124       arg11 = build_one_cst (type);
7125     }
7126   same = NULL_TREE;
7127 
7128   if (operand_equal_p (arg01, arg11, 0))
7129     same = arg01, alt0 = arg00, alt1 = arg10;
7130   else if (operand_equal_p (arg00, arg10, 0))
7131     same = arg00, alt0 = arg01, alt1 = arg11;
7132   else if (operand_equal_p (arg00, arg11, 0))
7133     same = arg00, alt0 = arg01, alt1 = arg10;
7134   else if (operand_equal_p (arg01, arg10, 0))
7135     same = arg01, alt0 = arg00, alt1 = arg11;
7136 
7137   /* No identical multiplicands; see if we can find a common
7138      power-of-two factor in non-power-of-two multiplies.  This
7139      can help in multi-dimensional array access.  */
7140   else if (host_integerp (arg01, 0)
7141 	   && host_integerp (arg11, 0))
7142     {
7143       HOST_WIDE_INT int01, int11, tmp;
7144       bool swap = false;
7145       tree maybe_same;
7146       int01 = TREE_INT_CST_LOW (arg01);
7147       int11 = TREE_INT_CST_LOW (arg11);
7148 
7149       /* Move min of absolute values to int11.  */
7150       if (absu_hwi (int01) < absu_hwi (int11))
7151         {
7152 	  tmp = int01, int01 = int11, int11 = tmp;
7153 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7154 	  maybe_same = arg01;
7155 	  swap = true;
7156 	}
7157       else
7158 	maybe_same = arg11;
7159 
7160       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7161 	  /* The remainder should not be a constant, otherwise we
7162 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7163 	     increased the number of multiplications necessary.  */
7164 	  && TREE_CODE (arg10) != INTEGER_CST)
7165         {
7166 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7167 			      build_int_cst (TREE_TYPE (arg00),
7168 					     int01 / int11));
7169 	  alt1 = arg10;
7170 	  same = maybe_same;
7171 	  if (swap)
7172 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7173 	}
7174     }
7175 
7176   if (same)
7177     return fold_build2_loc (loc, MULT_EXPR, type,
7178 			fold_build2_loc (loc, code, type,
7179 				     fold_convert_loc (loc, type, alt0),
7180 				     fold_convert_loc (loc, type, alt1)),
7181 			fold_convert_loc (loc, type, same));
7182 
7183   return NULL_TREE;
7184 }
7185 
7186 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7187    specified by EXPR into the buffer PTR of length LEN bytes.
7188    Return the number of bytes placed in the buffer, or zero
7189    upon failure.  */
7190 
7191 static int
7192 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7193 {
7194   tree type = TREE_TYPE (expr);
7195   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7196   int byte, offset, word, words;
7197   unsigned char value;
7198 
7199   if (total_bytes > len)
7200     return 0;
7201   words = total_bytes / UNITS_PER_WORD;
7202 
7203   for (byte = 0; byte < total_bytes; byte++)
7204     {
7205       int bitpos = byte * BITS_PER_UNIT;
7206       if (bitpos < HOST_BITS_PER_WIDE_INT)
7207 	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7208       else
7209 	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7210 				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7211 
7212       if (total_bytes > UNITS_PER_WORD)
7213 	{
7214 	  word = byte / UNITS_PER_WORD;
7215 	  if (WORDS_BIG_ENDIAN)
7216 	    word = (words - 1) - word;
7217 	  offset = word * UNITS_PER_WORD;
7218 	  if (BYTES_BIG_ENDIAN)
7219 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7220 	  else
7221 	    offset += byte % UNITS_PER_WORD;
7222 	}
7223       else
7224 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7225       ptr[offset] = value;
7226     }
7227   return total_bytes;
7228 }
7229 
7230 
7231 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7232    specified by EXPR into the buffer PTR of length LEN bytes.
7233    Return the number of bytes placed in the buffer, or zero
7234    upon failure.  */
7235 
7236 static int
7237 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7238 {
7239   tree type = TREE_TYPE (expr);
7240   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7241   int byte, offset, word, words, bitpos;
7242   unsigned char value;
7243 
7244   /* There are always 32 bits in each long, no matter the size of
7245      the hosts long.  We handle floating point representations with
7246      up to 192 bits.  */
7247   long tmp[6];
7248 
7249   if (total_bytes > len)
7250     return 0;
7251   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7252 
7253   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7254 
7255   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7256        bitpos += BITS_PER_UNIT)
7257     {
7258       byte = (bitpos / BITS_PER_UNIT) & 3;
7259       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7260 
7261       if (UNITS_PER_WORD < 4)
7262 	{
7263 	  word = byte / UNITS_PER_WORD;
7264 	  if (WORDS_BIG_ENDIAN)
7265 	    word = (words - 1) - word;
7266 	  offset = word * UNITS_PER_WORD;
7267 	  if (BYTES_BIG_ENDIAN)
7268 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7269 	  else
7270 	    offset += byte % UNITS_PER_WORD;
7271 	}
7272       else
7273 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7274       ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7275     }
7276   return total_bytes;
7277 }
7278 
7279 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7280    specified by EXPR into the buffer PTR of length LEN bytes.
7281    Return the number of bytes placed in the buffer, or zero
7282    upon failure.  */
7283 
7284 static int
7285 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7286 {
7287   int rsize, isize;
7288   tree part;
7289 
7290   part = TREE_REALPART (expr);
7291   rsize = native_encode_expr (part, ptr, len);
7292   if (rsize == 0)
7293     return 0;
7294   part = TREE_IMAGPART (expr);
7295   isize = native_encode_expr (part, ptr+rsize, len-rsize);
7296   if (isize != rsize)
7297     return 0;
7298   return rsize + isize;
7299 }
7300 
7301 
7302 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7303    specified by EXPR into the buffer PTR of length LEN bytes.
7304    Return the number of bytes placed in the buffer, or zero
7305    upon failure.  */
7306 
7307 static int
7308 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7309 {
7310   int i, size, offset, count;
7311   tree itype, elem, elements;
7312 
7313   offset = 0;
7314   elements = TREE_VECTOR_CST_ELTS (expr);
7315   count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7316   itype = TREE_TYPE (TREE_TYPE (expr));
7317   size = GET_MODE_SIZE (TYPE_MODE (itype));
7318   for (i = 0; i < count; i++)
7319     {
7320       if (elements)
7321 	{
7322 	  elem = TREE_VALUE (elements);
7323 	  elements = TREE_CHAIN (elements);
7324 	}
7325       else
7326 	elem = NULL_TREE;
7327 
7328       if (elem)
7329 	{
7330 	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7331 	    return 0;
7332 	}
7333       else
7334 	{
7335 	  if (offset + size > len)
7336 	    return 0;
7337 	  memset (ptr+offset, 0, size);
7338 	}
7339       offset += size;
7340     }
7341   return offset;
7342 }
7343 
7344 
7345 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7346    specified by EXPR into the buffer PTR of length LEN bytes.
7347    Return the number of bytes placed in the buffer, or zero
7348    upon failure.  */
7349 
7350 static int
7351 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7352 {
7353   tree type = TREE_TYPE (expr);
7354   HOST_WIDE_INT total_bytes;
7355 
7356   if (TREE_CODE (type) != ARRAY_TYPE
7357       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7358       || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7359       || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7360     return 0;
7361   total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7362   if (total_bytes > len)
7363     return 0;
7364   if (TREE_STRING_LENGTH (expr) < total_bytes)
7365     {
7366       memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7367       memset (ptr + TREE_STRING_LENGTH (expr), 0,
7368 	      total_bytes - TREE_STRING_LENGTH (expr));
7369     }
7370   else
7371     memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7372   return total_bytes;
7373 }
7374 
7375 
7376 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7377    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7378    buffer PTR of length LEN bytes.  Return the number of bytes
7379    placed in the buffer, or zero upon failure.  */
7380 
7381 int
7382 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7383 {
7384   switch (TREE_CODE (expr))
7385     {
7386     case INTEGER_CST:
7387       return native_encode_int (expr, ptr, len);
7388 
7389     case REAL_CST:
7390       return native_encode_real (expr, ptr, len);
7391 
7392     case COMPLEX_CST:
7393       return native_encode_complex (expr, ptr, len);
7394 
7395     case VECTOR_CST:
7396       return native_encode_vector (expr, ptr, len);
7397 
7398     case STRING_CST:
7399       return native_encode_string (expr, ptr, len);
7400 
7401     default:
7402       return 0;
7403     }
7404 }
7405 
7406 
7407 /* Subroutine of native_interpret_expr.  Interpret the contents of
7408    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7409    If the buffer cannot be interpreted, return NULL_TREE.  */
7410 
7411 static tree
7412 native_interpret_int (tree type, const unsigned char *ptr, int len)
7413 {
7414   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7415   int byte, offset, word, words;
7416   unsigned char value;
7417   double_int result;
7418 
7419   if (total_bytes > len)
7420     return NULL_TREE;
7421   if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7422     return NULL_TREE;
7423 
7424   result = double_int_zero;
7425   words = total_bytes / UNITS_PER_WORD;
7426 
7427   for (byte = 0; byte < total_bytes; byte++)
7428     {
7429       int bitpos = byte * BITS_PER_UNIT;
7430       if (total_bytes > UNITS_PER_WORD)
7431 	{
7432 	  word = byte / UNITS_PER_WORD;
7433 	  if (WORDS_BIG_ENDIAN)
7434 	    word = (words - 1) - word;
7435 	  offset = word * UNITS_PER_WORD;
7436 	  if (BYTES_BIG_ENDIAN)
7437 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7438 	  else
7439 	    offset += byte % UNITS_PER_WORD;
7440 	}
7441       else
7442 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7443       value = ptr[offset];
7444 
7445       if (bitpos < HOST_BITS_PER_WIDE_INT)
7446 	result.low |= (unsigned HOST_WIDE_INT) value << bitpos;
7447       else
7448 	result.high |= (unsigned HOST_WIDE_INT) value
7449 		       << (bitpos - HOST_BITS_PER_WIDE_INT);
7450     }
7451 
7452   return double_int_to_tree (type, result);
7453 }
7454 
7455 
7456 /* Subroutine of native_interpret_expr.  Interpret the contents of
7457    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7458    If the buffer cannot be interpreted, return NULL_TREE.  */
7459 
7460 static tree
7461 native_interpret_real (tree type, const unsigned char *ptr, int len)
7462 {
7463   enum machine_mode mode = TYPE_MODE (type);
7464   int total_bytes = GET_MODE_SIZE (mode);
7465   int byte, offset, word, words, bitpos;
7466   unsigned char value;
7467   /* There are always 32 bits in each long, no matter the size of
7468      the hosts long.  We handle floating point representations with
7469      up to 192 bits.  */
7470   REAL_VALUE_TYPE r;
7471   long tmp[6];
7472 
7473   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7474   if (total_bytes > len || total_bytes > 24)
7475     return NULL_TREE;
7476   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7477 
7478   memset (tmp, 0, sizeof (tmp));
7479   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7480        bitpos += BITS_PER_UNIT)
7481     {
7482       byte = (bitpos / BITS_PER_UNIT) & 3;
7483       if (UNITS_PER_WORD < 4)
7484 	{
7485 	  word = byte / UNITS_PER_WORD;
7486 	  if (WORDS_BIG_ENDIAN)
7487 	    word = (words - 1) - word;
7488 	  offset = word * UNITS_PER_WORD;
7489 	  if (BYTES_BIG_ENDIAN)
7490 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7491 	  else
7492 	    offset += byte % UNITS_PER_WORD;
7493 	}
7494       else
7495 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7496       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7497 
7498       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7499     }
7500 
7501   real_from_target (&r, tmp, mode);
7502   return build_real (type, r);
7503 }
7504 
7505 
7506 /* Subroutine of native_interpret_expr.  Interpret the contents of
7507    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7508    If the buffer cannot be interpreted, return NULL_TREE.  */
7509 
7510 static tree
7511 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7512 {
7513   tree etype, rpart, ipart;
7514   int size;
7515 
7516   etype = TREE_TYPE (type);
7517   size = GET_MODE_SIZE (TYPE_MODE (etype));
7518   if (size * 2 > len)
7519     return NULL_TREE;
7520   rpart = native_interpret_expr (etype, ptr, size);
7521   if (!rpart)
7522     return NULL_TREE;
7523   ipart = native_interpret_expr (etype, ptr+size, size);
7524   if (!ipart)
7525     return NULL_TREE;
7526   return build_complex (type, rpart, ipart);
7527 }
7528 
7529 
7530 /* Subroutine of native_interpret_expr.  Interpret the contents of
7531    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7532    If the buffer cannot be interpreted, return NULL_TREE.  */
7533 
7534 static tree
7535 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7536 {
7537   tree etype, elem, elements;
7538   int i, size, count;
7539 
7540   etype = TREE_TYPE (type);
7541   size = GET_MODE_SIZE (TYPE_MODE (etype));
7542   count = TYPE_VECTOR_SUBPARTS (type);
7543   if (size * count > len)
7544     return NULL_TREE;
7545 
7546   elements = NULL_TREE;
7547   for (i = count - 1; i >= 0; i--)
7548     {
7549       elem = native_interpret_expr (etype, ptr+(i*size), size);
7550       if (!elem)
7551 	return NULL_TREE;
7552       elements = tree_cons (NULL_TREE, elem, elements);
7553     }
7554   return build_vector (type, elements);
7555 }
7556 
7557 
7558 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7559    the buffer PTR of length LEN as a constant of type TYPE.  For
7560    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7561    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7562    return NULL_TREE.  */
7563 
7564 tree
7565 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7566 {
7567   switch (TREE_CODE (type))
7568     {
7569     case INTEGER_TYPE:
7570     case ENUMERAL_TYPE:
7571     case BOOLEAN_TYPE:
7572       return native_interpret_int (type, ptr, len);
7573 
7574     case REAL_TYPE:
7575       return native_interpret_real (type, ptr, len);
7576 
7577     case COMPLEX_TYPE:
7578       return native_interpret_complex (type, ptr, len);
7579 
7580     case VECTOR_TYPE:
7581       return native_interpret_vector (type, ptr, len);
7582 
7583     default:
7584       return NULL_TREE;
7585     }
7586 }
7587 
7588 
7589 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7590    TYPE at compile-time.  If we're unable to perform the conversion
7591    return NULL_TREE.  */
7592 
7593 static tree
7594 fold_view_convert_expr (tree type, tree expr)
7595 {
7596   /* We support up to 512-bit values (for V8DFmode).  */
7597   unsigned char buffer[64];
7598   int len;
7599 
7600   /* Check that the host and target are sane.  */
7601   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7602     return NULL_TREE;
7603 
7604   len = native_encode_expr (expr, buffer, sizeof (buffer));
7605   if (len == 0)
7606     return NULL_TREE;
7607 
7608   return native_interpret_expr (type, buffer, len);
7609 }
7610 
7611 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7612    to avoid confusing the gimplify process.  */
7613 
7614 tree
7615 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7616 {
7617   /* The size of the object is not relevant when talking about its address.  */
7618   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7619     t = TREE_OPERAND (t, 0);
7620 
7621   if (TREE_CODE (t) == INDIRECT_REF)
7622     {
7623       t = TREE_OPERAND (t, 0);
7624 
7625       if (TREE_TYPE (t) != ptrtype)
7626 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7627     }
7628   else if (TREE_CODE (t) == MEM_REF
7629       && integer_zerop (TREE_OPERAND (t, 1)))
7630     return TREE_OPERAND (t, 0);
7631   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7632     {
7633       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7634 
7635       if (TREE_TYPE (t) != ptrtype)
7636 	t = fold_convert_loc (loc, ptrtype, t);
7637     }
7638   else
7639     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7640 
7641   return t;
7642 }
7643 
7644 /* Build an expression for the address of T.  */
7645 
7646 tree
7647 build_fold_addr_expr_loc (location_t loc, tree t)
7648 {
7649   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7650 
7651   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7652 }
7653 
7654 static bool vec_cst_ctor_to_array (tree, tree *);
7655 
7656 /* Fold a unary expression of code CODE and type TYPE with operand
7657    OP0.  Return the folded expression if folding is successful.
7658    Otherwise, return NULL_TREE.  */
7659 
7660 tree
7661 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7662 {
7663   tree tem;
7664   tree arg0;
7665   enum tree_code_class kind = TREE_CODE_CLASS (code);
7666 
7667   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7668 	      && TREE_CODE_LENGTH (code) == 1);
7669 
7670   arg0 = op0;
7671   if (arg0)
7672     {
7673       if (CONVERT_EXPR_CODE_P (code)
7674 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7675 	{
7676 	  /* Don't use STRIP_NOPS, because signedness of argument type
7677 	     matters.  */
7678 	  STRIP_SIGN_NOPS (arg0);
7679 	}
7680       else
7681 	{
7682 	  /* Strip any conversions that don't change the mode.  This
7683 	     is safe for every expression, except for a comparison
7684 	     expression because its signedness is derived from its
7685 	     operands.
7686 
7687 	     Note that this is done as an internal manipulation within
7688 	     the constant folder, in order to find the simplest
7689 	     representation of the arguments so that their form can be
7690 	     studied.  In any cases, the appropriate type conversions
7691 	     should be put back in the tree that will get out of the
7692 	     constant folder.  */
7693 	  STRIP_NOPS (arg0);
7694 	}
7695     }
7696 
7697   if (TREE_CODE_CLASS (code) == tcc_unary)
7698     {
7699       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7700 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7701 		       fold_build1_loc (loc, code, type,
7702 				    fold_convert_loc (loc, TREE_TYPE (op0),
7703 						      TREE_OPERAND (arg0, 1))));
7704       else if (TREE_CODE (arg0) == COND_EXPR)
7705 	{
7706 	  tree arg01 = TREE_OPERAND (arg0, 1);
7707 	  tree arg02 = TREE_OPERAND (arg0, 2);
7708 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7709 	    arg01 = fold_build1_loc (loc, code, type,
7710 				 fold_convert_loc (loc,
7711 						   TREE_TYPE (op0), arg01));
7712 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7713 	    arg02 = fold_build1_loc (loc, code, type,
7714 				 fold_convert_loc (loc,
7715 						   TREE_TYPE (op0), arg02));
7716 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7717 			     arg01, arg02);
7718 
7719 	  /* If this was a conversion, and all we did was to move into
7720 	     inside the COND_EXPR, bring it back out.  But leave it if
7721 	     it is a conversion from integer to integer and the
7722 	     result precision is no wider than a word since such a
7723 	     conversion is cheap and may be optimized away by combine,
7724 	     while it couldn't if it were outside the COND_EXPR.  Then return
7725 	     so we don't get into an infinite recursion loop taking the
7726 	     conversion out and then back in.  */
7727 
7728 	  if ((CONVERT_EXPR_CODE_P (code)
7729 	       || code == NON_LVALUE_EXPR)
7730 	      && TREE_CODE (tem) == COND_EXPR
7731 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7732 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7733 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7734 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7735 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7736 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7737 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7738 		     && (INTEGRAL_TYPE_P
7739 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7740 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7741 		  || flag_syntax_only))
7742 	    tem = build1_loc (loc, code, type,
7743 			      build3 (COND_EXPR,
7744 				      TREE_TYPE (TREE_OPERAND
7745 						 (TREE_OPERAND (tem, 1), 0)),
7746 				      TREE_OPERAND (tem, 0),
7747 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7748 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7749 						    0)));
7750 	  return tem;
7751 	}
7752    }
7753 
7754   switch (code)
7755     {
7756     case PAREN_EXPR:
7757       /* Re-association barriers around constants and other re-association
7758 	 barriers can be removed.  */
7759       if (CONSTANT_CLASS_P (op0)
7760 	  || TREE_CODE (op0) == PAREN_EXPR)
7761 	return fold_convert_loc (loc, type, op0);
7762       return NULL_TREE;
7763 
7764     CASE_CONVERT:
7765     case FLOAT_EXPR:
7766     case FIX_TRUNC_EXPR:
7767       if (TREE_TYPE (op0) == type)
7768 	return op0;
7769 
7770       if (COMPARISON_CLASS_P (op0))
7771 	{
7772 	  /* If we have (type) (a CMP b) and type is an integral type, return
7773 	     new expression involving the new type.  Canonicalize
7774 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7775 	     non-integral type.
7776 	     Do not fold the result as that would not simplify further, also
7777 	     folding again results in recursions.  */
7778 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7779 	    return build2_loc (loc, TREE_CODE (op0), type,
7780 			       TREE_OPERAND (op0, 0),
7781 			       TREE_OPERAND (op0, 1));
7782 	  else if (!INTEGRAL_TYPE_P (type))
7783 	    return build3_loc (loc, COND_EXPR, type, op0,
7784 			       constant_boolean_node (true, type),
7785 			       constant_boolean_node (false, type));
7786 	}
7787 
7788       /* Handle cases of two conversions in a row.  */
7789       if (CONVERT_EXPR_P (op0))
7790 	{
7791 	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7792 	  tree inter_type = TREE_TYPE (op0);
7793 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7794 	  int inside_ptr = POINTER_TYPE_P (inside_type);
7795 	  int inside_float = FLOAT_TYPE_P (inside_type);
7796 	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7797 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7798 	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7799 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7800 	  int inter_ptr = POINTER_TYPE_P (inter_type);
7801 	  int inter_float = FLOAT_TYPE_P (inter_type);
7802 	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7803 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7804 	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7805 	  int final_int = INTEGRAL_TYPE_P (type);
7806 	  int final_ptr = POINTER_TYPE_P (type);
7807 	  int final_float = FLOAT_TYPE_P (type);
7808 	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7809 	  unsigned int final_prec = TYPE_PRECISION (type);
7810 	  int final_unsignedp = TYPE_UNSIGNED (type);
7811 
7812 	  /* In addition to the cases of two conversions in a row
7813 	     handled below, if we are converting something to its own
7814 	     type via an object of identical or wider precision, neither
7815 	     conversion is needed.  */
7816 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7817 	      && (((inter_int || inter_ptr) && final_int)
7818 		  || (inter_float && final_float))
7819 	      && inter_prec >= final_prec)
7820 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7821 
7822 	  /* Likewise, if the intermediate and initial types are either both
7823 	     float or both integer, we don't need the middle conversion if the
7824 	     former is wider than the latter and doesn't change the signedness
7825 	     (for integers).  Avoid this if the final type is a pointer since
7826 	     then we sometimes need the middle conversion.  Likewise if the
7827 	     final type has a precision not equal to the size of its mode.  */
7828 	  if (((inter_int && inside_int)
7829 	       || (inter_float && inside_float)
7830 	       || (inter_vec && inside_vec))
7831 	      && inter_prec >= inside_prec
7832 	      && (inter_float || inter_vec
7833 		  || inter_unsignedp == inside_unsignedp)
7834 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7835 		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7836 	      && ! final_ptr
7837 	      && (! final_vec || inter_prec == inside_prec))
7838 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7839 
7840 	  /* If we have a sign-extension of a zero-extended value, we can
7841 	     replace that by a single zero-extension.  */
7842 	  if (inside_int && inter_int && final_int
7843 	      && inside_prec < inter_prec && inter_prec < final_prec
7844 	      && inside_unsignedp && !inter_unsignedp)
7845 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7846 
7847 	  /* Two conversions in a row are not needed unless:
7848 	     - some conversion is floating-point (overstrict for now), or
7849 	     - some conversion is a vector (overstrict for now), or
7850 	     - the intermediate type is narrower than both initial and
7851 	       final, or
7852 	     - the intermediate type and innermost type differ in signedness,
7853 	       and the outermost type is wider than the intermediate, or
7854 	     - the initial type is a pointer type and the precisions of the
7855 	       intermediate and final types differ, or
7856 	     - the final type is a pointer type and the precisions of the
7857 	       initial and intermediate types differ.  */
7858 	  if (! inside_float && ! inter_float && ! final_float
7859 	      && ! inside_vec && ! inter_vec && ! final_vec
7860 	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7861 	      && ! (inside_int && inter_int
7862 		    && inter_unsignedp != inside_unsignedp
7863 		    && inter_prec < final_prec)
7864 	      && ((inter_unsignedp && inter_prec > inside_prec)
7865 		  == (final_unsignedp && final_prec > inter_prec))
7866 	      && ! (inside_ptr && inter_prec != final_prec)
7867 	      && ! (final_ptr && inside_prec != inter_prec)
7868 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7869 		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7870 	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7871 	}
7872 
7873       /* Handle (T *)&A.B.C for A being of type T and B and C
7874 	 living at offset zero.  This occurs frequently in
7875 	 C++ upcasting and then accessing the base.  */
7876       if (TREE_CODE (op0) == ADDR_EXPR
7877 	  && POINTER_TYPE_P (type)
7878 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7879         {
7880 	  HOST_WIDE_INT bitsize, bitpos;
7881 	  tree offset;
7882 	  enum machine_mode mode;
7883 	  int unsignedp, volatilep;
7884           tree base = TREE_OPERAND (op0, 0);
7885 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7886 				      &mode, &unsignedp, &volatilep, false);
7887 	  /* If the reference was to a (constant) zero offset, we can use
7888 	     the address of the base if it has the same base type
7889 	     as the result type and the pointer type is unqualified.  */
7890 	  if (! offset && bitpos == 0
7891 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7892 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7893 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7894 	    return fold_convert_loc (loc, type,
7895 				     build_fold_addr_expr_loc (loc, base));
7896         }
7897 
7898       if (TREE_CODE (op0) == MODIFY_EXPR
7899 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7900 	  /* Detect assigning a bitfield.  */
7901 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7902 	       && DECL_BIT_FIELD
7903 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7904 	{
7905 	  /* Don't leave an assignment inside a conversion
7906 	     unless assigning a bitfield.  */
7907 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7908 	  /* First do the assignment, then return converted constant.  */
7909 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7910 	  TREE_NO_WARNING (tem) = 1;
7911 	  TREE_USED (tem) = 1;
7912 	  return tem;
7913 	}
7914 
7915       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7916 	 constants (if x has signed type, the sign bit cannot be set
7917 	 in c).  This folds extension into the BIT_AND_EXPR.
7918 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7919 	 very likely don't have maximal range for their precision and this
7920 	 transformation effectively doesn't preserve non-maximal ranges.  */
7921       if (TREE_CODE (type) == INTEGER_TYPE
7922 	  && TREE_CODE (op0) == BIT_AND_EXPR
7923 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7924 	{
7925 	  tree and_expr = op0;
7926 	  tree and0 = TREE_OPERAND (and_expr, 0);
7927 	  tree and1 = TREE_OPERAND (and_expr, 1);
7928 	  int change = 0;
7929 
7930 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7931 	      || (TYPE_PRECISION (type)
7932 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7933 	    change = 1;
7934 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7935 		   <= HOST_BITS_PER_WIDE_INT
7936 		   && host_integerp (and1, 1))
7937 	    {
7938 	      unsigned HOST_WIDE_INT cst;
7939 
7940 	      cst = tree_low_cst (and1, 1);
7941 	      cst &= (HOST_WIDE_INT) -1
7942 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7943 	      change = (cst == 0);
7944 #ifdef LOAD_EXTEND_OP
7945 	      if (change
7946 		  && !flag_syntax_only
7947 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7948 		      == ZERO_EXTEND))
7949 		{
7950 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7951 		  and0 = fold_convert_loc (loc, uns, and0);
7952 		  and1 = fold_convert_loc (loc, uns, and1);
7953 		}
7954 #endif
7955 	    }
7956 	  if (change)
7957 	    {
7958 	      tem = force_fit_type_double (type, tree_to_double_int (and1),
7959 					   0, TREE_OVERFLOW (and1));
7960 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7961 				  fold_convert_loc (loc, type, and0), tem);
7962 	    }
7963 	}
7964 
7965       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7966          when one of the new casts will fold away. Conservatively we assume
7967 	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7968       if (POINTER_TYPE_P (type)
7969 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7970 	  && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
7971 	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7972 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7973 	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7974 	{
7975 	  tree arg00 = TREE_OPERAND (arg0, 0);
7976 	  tree arg01 = TREE_OPERAND (arg0, 1);
7977 
7978 	  return fold_build_pointer_plus_loc
7979 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7980 	}
7981 
7982       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7983 	 of the same precision, and X is an integer type not narrower than
7984 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7985       if (INTEGRAL_TYPE_P (type)
7986 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7987 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7988 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7989 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7990 	{
7991 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7992 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7993 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7994 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7995 				fold_convert_loc (loc, type, tem));
7996 	}
7997 
7998       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7999 	 type of X and Y (integer types only).  */
8000       if (INTEGRAL_TYPE_P (type)
8001 	  && TREE_CODE (op0) == MULT_EXPR
8002 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8003 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8004 	{
8005 	  /* Be careful not to introduce new overflows.  */
8006 	  tree mult_type;
8007           if (TYPE_OVERFLOW_WRAPS (type))
8008 	    mult_type = type;
8009 	  else
8010 	    mult_type = unsigned_type_for (type);
8011 
8012 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8013 	    {
8014 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8015 				 fold_convert_loc (loc, mult_type,
8016 						   TREE_OPERAND (op0, 0)),
8017 				 fold_convert_loc (loc, mult_type,
8018 						   TREE_OPERAND (op0, 1)));
8019 	      return fold_convert_loc (loc, type, tem);
8020 	    }
8021 	}
8022 
8023       tem = fold_convert_const (code, type, op0);
8024       return tem ? tem : NULL_TREE;
8025 
8026     case ADDR_SPACE_CONVERT_EXPR:
8027       if (integer_zerop (arg0))
8028 	return fold_convert_const (code, type, arg0);
8029       return NULL_TREE;
8030 
8031     case FIXED_CONVERT_EXPR:
8032       tem = fold_convert_const (code, type, arg0);
8033       return tem ? tem : NULL_TREE;
8034 
8035     case VIEW_CONVERT_EXPR:
8036       if (TREE_TYPE (op0) == type)
8037 	return op0;
8038       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8039 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8040 			    type, TREE_OPERAND (op0, 0));
8041       if (TREE_CODE (op0) == MEM_REF)
8042 	return fold_build2_loc (loc, MEM_REF, type,
8043 				TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8044 
8045       /* For integral conversions with the same precision or pointer
8046 	 conversions use a NOP_EXPR instead.  */
8047       if ((INTEGRAL_TYPE_P (type)
8048 	   || POINTER_TYPE_P (type))
8049 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8050 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8051 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8052 	return fold_convert_loc (loc, type, op0);
8053 
8054       /* Strip inner integral conversions that do not change the precision.  */
8055       if (CONVERT_EXPR_P (op0)
8056 	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8057 	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8058 	  && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8059 	      || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8060 	  && (TYPE_PRECISION (TREE_TYPE (op0))
8061 	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8062 	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8063 			    type, TREE_OPERAND (op0, 0));
8064 
8065       return fold_view_convert_expr (type, op0);
8066 
8067     case NEGATE_EXPR:
8068       tem = fold_negate_expr (loc, arg0);
8069       if (tem)
8070 	return fold_convert_loc (loc, type, tem);
8071       return NULL_TREE;
8072 
8073     case ABS_EXPR:
8074       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8075 	return fold_abs_const (arg0, type);
8076       else if (TREE_CODE (arg0) == NEGATE_EXPR)
8077 	return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8078       /* Convert fabs((double)float) into (double)fabsf(float).  */
8079       else if (TREE_CODE (arg0) == NOP_EXPR
8080 	       && TREE_CODE (type) == REAL_TYPE)
8081 	{
8082 	  tree targ0 = strip_float_extensions (arg0);
8083 	  if (targ0 != arg0)
8084 	    return fold_convert_loc (loc, type,
8085 				     fold_build1_loc (loc, ABS_EXPR,
8086 						  TREE_TYPE (targ0),
8087 						  targ0));
8088 	}
8089       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8090       else if (TREE_CODE (arg0) == ABS_EXPR)
8091 	return arg0;
8092       else if (tree_expr_nonnegative_p (arg0))
8093 	return arg0;
8094 
8095       /* Strip sign ops from argument.  */
8096       if (TREE_CODE (type) == REAL_TYPE)
8097 	{
8098 	  tem = fold_strip_sign_ops (arg0);
8099 	  if (tem)
8100 	    return fold_build1_loc (loc, ABS_EXPR, type,
8101 				fold_convert_loc (loc, type, tem));
8102 	}
8103       return NULL_TREE;
8104 
8105     case CONJ_EXPR:
8106       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8107 	return fold_convert_loc (loc, type, arg0);
8108       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8109 	{
8110 	  tree itype = TREE_TYPE (type);
8111 	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8112 	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8113 	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8114 			      negate_expr (ipart));
8115 	}
8116       if (TREE_CODE (arg0) == COMPLEX_CST)
8117 	{
8118 	  tree itype = TREE_TYPE (type);
8119 	  tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8120 	  tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8121 	  return build_complex (type, rpart, negate_expr (ipart));
8122 	}
8123       if (TREE_CODE (arg0) == CONJ_EXPR)
8124 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8125       return NULL_TREE;
8126 
8127     case BIT_NOT_EXPR:
8128       if (TREE_CODE (arg0) == INTEGER_CST)
8129         return fold_not_const (arg0, type);
8130       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8131 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8132       /* Convert ~ (-A) to A - 1.  */
8133       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8134 	return fold_build2_loc (loc, MINUS_EXPR, type,
8135 			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8136 			    build_int_cst (type, 1));
8137       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8138       else if (INTEGRAL_TYPE_P (type)
8139 	       && ((TREE_CODE (arg0) == MINUS_EXPR
8140 		    && integer_onep (TREE_OPERAND (arg0, 1)))
8141 		   || (TREE_CODE (arg0) == PLUS_EXPR
8142 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8143 	return fold_build1_loc (loc, NEGATE_EXPR, type,
8144 			    fold_convert_loc (loc, type,
8145 					      TREE_OPERAND (arg0, 0)));
8146       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8147       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8148 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8149 			       	     fold_convert_loc (loc, type,
8150 						       TREE_OPERAND (arg0, 0)))))
8151 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8152 			    fold_convert_loc (loc, type,
8153 					      TREE_OPERAND (arg0, 1)));
8154       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8155 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8156 			       	     fold_convert_loc (loc, type,
8157 						       TREE_OPERAND (arg0, 1)))))
8158 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8159 			    fold_convert_loc (loc, type,
8160 					      TREE_OPERAND (arg0, 0)), tem);
8161       /* Perform BIT_NOT_EXPR on each element individually.  */
8162       else if (TREE_CODE (arg0) == VECTOR_CST)
8163 	{
8164 	  tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8165 	  int count = TYPE_VECTOR_SUBPARTS (type), i;
8166 
8167 	  for (i = 0; i < count; i++)
8168 	    {
8169 	      if (elements)
8170 		{
8171 		  elem = TREE_VALUE (elements);
8172 		  elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8173 		  if (elem == NULL_TREE)
8174 		    break;
8175 		  elements = TREE_CHAIN (elements);
8176 		}
8177 	      else
8178 		elem = build_int_cst (TREE_TYPE (type), -1);
8179 	      list = tree_cons (NULL_TREE, elem, list);
8180 	    }
8181 	  if (i == count)
8182 	    return build_vector (type, nreverse (list));
8183 	}
8184 
8185       return NULL_TREE;
8186 
8187     case TRUTH_NOT_EXPR:
8188       /* The argument to invert_truthvalue must have Boolean type.  */
8189       if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8190           arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8191 
8192       /* Note that the operand of this must be an int
8193 	 and its values must be 0 or 1.
8194 	 ("true" is a fixed value perhaps depending on the language,
8195 	 but we don't handle values other than 1 correctly yet.)  */
8196       tem = fold_truth_not_expr (loc, arg0);
8197       if (!tem)
8198 	return NULL_TREE;
8199       return fold_convert_loc (loc, type, tem);
8200 
8201     case REALPART_EXPR:
8202       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8203 	return fold_convert_loc (loc, type, arg0);
8204       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8205 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8206 				 TREE_OPERAND (arg0, 1));
8207       if (TREE_CODE (arg0) == COMPLEX_CST)
8208 	return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8209       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8210 	{
8211 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8212 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8213 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8214 					  TREE_OPERAND (arg0, 0)),
8215 			     fold_build1_loc (loc, REALPART_EXPR, itype,
8216 					  TREE_OPERAND (arg0, 1)));
8217 	  return fold_convert_loc (loc, type, tem);
8218 	}
8219       if (TREE_CODE (arg0) == CONJ_EXPR)
8220 	{
8221 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8222 	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8223 			     TREE_OPERAND (arg0, 0));
8224 	  return fold_convert_loc (loc, type, tem);
8225 	}
8226       if (TREE_CODE (arg0) == CALL_EXPR)
8227 	{
8228 	  tree fn = get_callee_fndecl (arg0);
8229 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8230 	    switch (DECL_FUNCTION_CODE (fn))
8231 	      {
8232 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8233 	        fn = mathfn_built_in (type, BUILT_IN_COS);
8234 		if (fn)
8235 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8236 		break;
8237 
8238 	      default:
8239 		break;
8240 	      }
8241 	}
8242       return NULL_TREE;
8243 
8244     case IMAGPART_EXPR:
8245       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8246 	return build_zero_cst (type);
8247       if (TREE_CODE (arg0) == COMPLEX_EXPR)
8248 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8249 				 TREE_OPERAND (arg0, 0));
8250       if (TREE_CODE (arg0) == COMPLEX_CST)
8251 	return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8252       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8253 	{
8254 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8255 	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8256 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8257 					  TREE_OPERAND (arg0, 0)),
8258 			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8259 					  TREE_OPERAND (arg0, 1)));
8260 	  return fold_convert_loc (loc, type, tem);
8261 	}
8262       if (TREE_CODE (arg0) == CONJ_EXPR)
8263 	{
8264 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8265 	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8266 	  return fold_convert_loc (loc, type, negate_expr (tem));
8267 	}
8268       if (TREE_CODE (arg0) == CALL_EXPR)
8269 	{
8270 	  tree fn = get_callee_fndecl (arg0);
8271 	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8272 	    switch (DECL_FUNCTION_CODE (fn))
8273 	      {
8274 	      CASE_FLT_FN (BUILT_IN_CEXPI):
8275 	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8276 		if (fn)
8277 	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8278 		break;
8279 
8280 	      default:
8281 		break;
8282 	      }
8283 	}
8284       return NULL_TREE;
8285 
8286     case INDIRECT_REF:
8287       /* Fold *&X to X if X is an lvalue.  */
8288       if (TREE_CODE (op0) == ADDR_EXPR)
8289 	{
8290 	  tree op00 = TREE_OPERAND (op0, 0);
8291 	  if ((TREE_CODE (op00) == VAR_DECL
8292 	       || TREE_CODE (op00) == PARM_DECL
8293 	       || TREE_CODE (op00) == RESULT_DECL)
8294 	      && !TREE_READONLY (op00))
8295 	    return op00;
8296 	}
8297       return NULL_TREE;
8298 
8299     case VEC_UNPACK_LO_EXPR:
8300     case VEC_UNPACK_HI_EXPR:
8301     case VEC_UNPACK_FLOAT_LO_EXPR:
8302     case VEC_UNPACK_FLOAT_HI_EXPR:
8303       {
8304 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8305 	tree *elts, vals = NULL_TREE;
8306 	enum tree_code subcode;
8307 
8308 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8309 	if (TREE_CODE (arg0) != VECTOR_CST)
8310 	  return NULL_TREE;
8311 
8312 	elts = XALLOCAVEC (tree, nelts * 2);
8313 	if (!vec_cst_ctor_to_array (arg0, elts))
8314 	  return NULL_TREE;
8315 
8316 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8317 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
8318 	  elts += nelts;
8319 
8320 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8321 	  subcode = NOP_EXPR;
8322 	else
8323 	  subcode = FLOAT_EXPR;
8324 
8325 	for (i = 0; i < nelts; i++)
8326 	  {
8327 	    elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8328 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8329 	      return NULL_TREE;
8330 	  }
8331 
8332 	for (i = 0; i < nelts; i++)
8333 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
8334 	return build_vector (type, vals);
8335       }
8336 
8337     default:
8338       return NULL_TREE;
8339     } /* switch (code) */
8340 }
8341 
8342 
8343 /* If the operation was a conversion do _not_ mark a resulting constant
8344    with TREE_OVERFLOW if the original constant was not.  These conversions
8345    have implementation defined behavior and retaining the TREE_OVERFLOW
8346    flag here would confuse later passes such as VRP.  */
8347 tree
8348 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8349 				tree type, tree op0)
8350 {
8351   tree res = fold_unary_loc (loc, code, type, op0);
8352   if (res
8353       && TREE_CODE (res) == INTEGER_CST
8354       && TREE_CODE (op0) == INTEGER_CST
8355       && CONVERT_EXPR_CODE_P (code))
8356     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8357 
8358   return res;
8359 }
8360 
8361 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8362    operands OP0 and OP1.  LOC is the location of the resulting expression.
8363    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8364    Return the folded expression if folding is successful.  Otherwise,
8365    return NULL_TREE.  */
8366 static tree
8367 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8368 		  tree arg0, tree arg1, tree op0, tree op1)
8369 {
8370   tree tem;
8371 
8372   /* We only do these simplifications if we are optimizing.  */
8373   if (!optimize)
8374     return NULL_TREE;
8375 
8376   /* Check for things like (A || B) && (A || C).  We can convert this
8377      to A || (B && C).  Note that either operator can be any of the four
8378      truth and/or operations and the transformation will still be
8379      valid.   Also note that we only care about order for the
8380      ANDIF and ORIF operators.  If B contains side effects, this
8381      might change the truth-value of A.  */
8382   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8383       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8384 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8385 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8386 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8387       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8388     {
8389       tree a00 = TREE_OPERAND (arg0, 0);
8390       tree a01 = TREE_OPERAND (arg0, 1);
8391       tree a10 = TREE_OPERAND (arg1, 0);
8392       tree a11 = TREE_OPERAND (arg1, 1);
8393       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8394 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8395 			 && (code == TRUTH_AND_EXPR
8396 			     || code == TRUTH_OR_EXPR));
8397 
8398       if (operand_equal_p (a00, a10, 0))
8399 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8400 			    fold_build2_loc (loc, code, type, a01, a11));
8401       else if (commutative && operand_equal_p (a00, a11, 0))
8402 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8403 			    fold_build2_loc (loc, code, type, a01, a10));
8404       else if (commutative && operand_equal_p (a01, a10, 0))
8405 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8406 			    fold_build2_loc (loc, code, type, a00, a11));
8407 
8408       /* This case if tricky because we must either have commutative
8409 	 operators or else A10 must not have side-effects.  */
8410 
8411       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8412 	       && operand_equal_p (a01, a11, 0))
8413 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8414 			    fold_build2_loc (loc, code, type, a00, a10),
8415 			    a01);
8416     }
8417 
8418   /* See if we can build a range comparison.  */
8419   if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8420     return tem;
8421 
8422   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8423       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8424     {
8425       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8426       if (tem)
8427 	return fold_build2_loc (loc, code, type, tem, arg1);
8428     }
8429 
8430   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8431       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8432     {
8433       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8434       if (tem)
8435 	return fold_build2_loc (loc, code, type, arg0, tem);
8436     }
8437 
8438   /* Check for the possibility of merging component references.  If our
8439      lhs is another similar operation, try to merge its rhs with our
8440      rhs.  Then try to merge our lhs and rhs.  */
8441   if (TREE_CODE (arg0) == code
8442       && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8443 					 TREE_OPERAND (arg0, 1), arg1)))
8444     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8445 
8446   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8447     return tem;
8448 
8449   if ((BRANCH_COST (optimize_function_for_speed_p (cfun),
8450 		    false) >= 2)
8451       && LOGICAL_OP_NON_SHORT_CIRCUIT
8452       && (code == TRUTH_AND_EXPR
8453           || code == TRUTH_ANDIF_EXPR
8454           || code == TRUTH_OR_EXPR
8455           || code == TRUTH_ORIF_EXPR))
8456     {
8457       enum tree_code ncode, icode;
8458 
8459       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8460 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8461       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8462 
8463       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8464 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8465 	 We don't want to pack more than two leafs to a non-IF AND/OR
8466 	 expression.
8467 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8468 	 equal to IF-CODE, then we don't want to add right-hand operand.
8469 	 If the inner right-hand side of left-hand operand has
8470 	 side-effects, or isn't simple, then we can't add to it,
8471 	 as otherwise we might destroy if-sequence.  */
8472       if (TREE_CODE (arg0) == icode
8473 	  && simple_operand_p_2 (arg1)
8474 	  /* Needed for sequence points to handle trappings, and
8475 	     side-effects.  */
8476 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8477 	{
8478 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8479 				 arg1);
8480 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8481 				  tem);
8482 	}
8483 	/* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8484 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8485       else if (TREE_CODE (arg1) == icode
8486 	  && simple_operand_p_2 (arg0)
8487 	  /* Needed for sequence points to handle trappings, and
8488 	     side-effects.  */
8489 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8490 	{
8491 	  tem = fold_build2_loc (loc, ncode, type,
8492 				 arg0, TREE_OPERAND (arg1, 0));
8493 	  return fold_build2_loc (loc, icode, type, tem,
8494 				  TREE_OPERAND (arg1, 1));
8495 	}
8496       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8497 	 into (A OR B).
8498 	 For sequence point consistancy, we need to check for trapping,
8499 	 and side-effects.  */
8500       else if (code == icode && simple_operand_p_2 (arg0)
8501                && simple_operand_p_2 (arg1))
8502 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8503     }
8504 
8505   return NULL_TREE;
8506 }
8507 
8508 /* Fold a binary expression of code CODE and type TYPE with operands
8509    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8510    Return the folded expression if folding is successful.  Otherwise,
8511    return NULL_TREE.  */
8512 
8513 static tree
8514 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8515 {
8516   enum tree_code compl_code;
8517 
8518   if (code == MIN_EXPR)
8519     compl_code = MAX_EXPR;
8520   else if (code == MAX_EXPR)
8521     compl_code = MIN_EXPR;
8522   else
8523     gcc_unreachable ();
8524 
8525   /* MIN (MAX (a, b), b) == b.  */
8526   if (TREE_CODE (op0) == compl_code
8527       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8528     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8529 
8530   /* MIN (MAX (b, a), b) == b.  */
8531   if (TREE_CODE (op0) == compl_code
8532       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8533       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8534     return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8535 
8536   /* MIN (a, MAX (a, b)) == a.  */
8537   if (TREE_CODE (op1) == compl_code
8538       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8539       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8540     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8541 
8542   /* MIN (a, MAX (b, a)) == a.  */
8543   if (TREE_CODE (op1) == compl_code
8544       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8545       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8546     return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8547 
8548   return NULL_TREE;
8549 }
8550 
8551 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8552    by changing CODE to reduce the magnitude of constants involved in
8553    ARG0 of the comparison.
8554    Returns a canonicalized comparison tree if a simplification was
8555    possible, otherwise returns NULL_TREE.
8556    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8557    valid if signed overflow is undefined.  */
8558 
8559 static tree
8560 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8561 				 tree arg0, tree arg1,
8562 				 bool *strict_overflow_p)
8563 {
8564   enum tree_code code0 = TREE_CODE (arg0);
8565   tree t, cst0 = NULL_TREE;
8566   int sgn0;
8567   bool swap = false;
8568 
8569   /* Match A +- CST code arg1 and CST code arg1.  We can change the
8570      first form only if overflow is undefined.  */
8571   if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8572 	 /* In principle pointers also have undefined overflow behavior,
8573 	    but that causes problems elsewhere.  */
8574 	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8575 	 && (code0 == MINUS_EXPR
8576 	     || code0 == PLUS_EXPR)
8577          && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8578 	|| code0 == INTEGER_CST))
8579     return NULL_TREE;
8580 
8581   /* Identify the constant in arg0 and its sign.  */
8582   if (code0 == INTEGER_CST)
8583     cst0 = arg0;
8584   else
8585     cst0 = TREE_OPERAND (arg0, 1);
8586   sgn0 = tree_int_cst_sgn (cst0);
8587 
8588   /* Overflowed constants and zero will cause problems.  */
8589   if (integer_zerop (cst0)
8590       || TREE_OVERFLOW (cst0))
8591     return NULL_TREE;
8592 
8593   /* See if we can reduce the magnitude of the constant in
8594      arg0 by changing the comparison code.  */
8595   if (code0 == INTEGER_CST)
8596     {
8597       /* CST <= arg1  ->  CST-1 < arg1.  */
8598       if (code == LE_EXPR && sgn0 == 1)
8599 	code = LT_EXPR;
8600       /* -CST < arg1  ->  -CST-1 <= arg1.  */
8601       else if (code == LT_EXPR && sgn0 == -1)
8602 	code = LE_EXPR;
8603       /* CST > arg1  ->  CST-1 >= arg1.  */
8604       else if (code == GT_EXPR && sgn0 == 1)
8605 	code = GE_EXPR;
8606       /* -CST >= arg1  ->  -CST-1 > arg1.  */
8607       else if (code == GE_EXPR && sgn0 == -1)
8608 	code = GT_EXPR;
8609       else
8610         return NULL_TREE;
8611       /* arg1 code' CST' might be more canonical.  */
8612       swap = true;
8613     }
8614   else
8615     {
8616       /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8617       if (code == LT_EXPR
8618 	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8619 	code = LE_EXPR;
8620       /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8621       else if (code == GT_EXPR
8622 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8623 	code = GE_EXPR;
8624       /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8625       else if (code == LE_EXPR
8626 	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8627 	code = LT_EXPR;
8628       /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8629       else if (code == GE_EXPR
8630 	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8631 	code = GT_EXPR;
8632       else
8633 	return NULL_TREE;
8634       *strict_overflow_p = true;
8635     }
8636 
8637   /* Now build the constant reduced in magnitude.  But not if that
8638      would produce one outside of its types range.  */
8639   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8640       && ((sgn0 == 1
8641 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8642 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8643 	  || (sgn0 == -1
8644 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8645 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8646     /* We cannot swap the comparison here as that would cause us to
8647        endlessly recurse.  */
8648     return NULL_TREE;
8649 
8650   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8651 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8652   if (code0 != INTEGER_CST)
8653     t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8654   t = fold_convert (TREE_TYPE (arg1), t);
8655 
8656   /* If swapping might yield to a more canonical form, do so.  */
8657   if (swap)
8658     return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8659   else
8660     return fold_build2_loc (loc, code, type, t, arg1);
8661 }
8662 
8663 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8664    overflow further.  Try to decrease the magnitude of constants involved
8665    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8666    and put sole constants at the second argument position.
8667    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8668 
8669 static tree
8670 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8671 			       tree arg0, tree arg1)
8672 {
8673   tree t;
8674   bool strict_overflow_p;
8675   const char * const warnmsg = G_("assuming signed overflow does not occur "
8676 				  "when reducing constant in comparison");
8677 
8678   /* Try canonicalization by simplifying arg0.  */
8679   strict_overflow_p = false;
8680   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8681 				       &strict_overflow_p);
8682   if (t)
8683     {
8684       if (strict_overflow_p)
8685 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8686       return t;
8687     }
8688 
8689   /* Try canonicalization by simplifying arg1 using the swapped
8690      comparison.  */
8691   code = swap_tree_comparison (code);
8692   strict_overflow_p = false;
8693   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8694 				       &strict_overflow_p);
8695   if (t && strict_overflow_p)
8696     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8697   return t;
8698 }
8699 
8700 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8701    space.  This is used to avoid issuing overflow warnings for
8702    expressions like &p->x which can not wrap.  */
8703 
8704 static bool
8705 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8706 {
8707   unsigned HOST_WIDE_INT offset_low, total_low;
8708   HOST_WIDE_INT size, offset_high, total_high;
8709 
8710   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8711     return true;
8712 
8713   if (bitpos < 0)
8714     return true;
8715 
8716   if (offset == NULL_TREE)
8717     {
8718       offset_low = 0;
8719       offset_high = 0;
8720     }
8721   else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8722     return true;
8723   else
8724     {
8725       offset_low = TREE_INT_CST_LOW (offset);
8726       offset_high = TREE_INT_CST_HIGH (offset);
8727     }
8728 
8729   if (add_double_with_sign (offset_low, offset_high,
8730 			    bitpos / BITS_PER_UNIT, 0,
8731 			    &total_low, &total_high,
8732 			    true))
8733     return true;
8734 
8735   if (total_high != 0)
8736     return true;
8737 
8738   size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8739   if (size <= 0)
8740     return true;
8741 
8742   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8743      array.  */
8744   if (TREE_CODE (base) == ADDR_EXPR)
8745     {
8746       HOST_WIDE_INT base_size;
8747 
8748       base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8749       if (base_size > 0 && size < base_size)
8750 	size = base_size;
8751     }
8752 
8753   return total_low > (unsigned HOST_WIDE_INT) size;
8754 }
8755 
8756 /* Subroutine of fold_binary.  This routine performs all of the
8757    transformations that are common to the equality/inequality
8758    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8759    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8760    fold_binary should call fold_binary.  Fold a comparison with
8761    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8762    the folded comparison or NULL_TREE.  */
8763 
8764 static tree
8765 fold_comparison (location_t loc, enum tree_code code, tree type,
8766 		 tree op0, tree op1)
8767 {
8768   tree arg0, arg1, tem;
8769 
8770   arg0 = op0;
8771   arg1 = op1;
8772 
8773   STRIP_SIGN_NOPS (arg0);
8774   STRIP_SIGN_NOPS (arg1);
8775 
8776   tem = fold_relational_const (code, type, arg0, arg1);
8777   if (tem != NULL_TREE)
8778     return tem;
8779 
8780   /* If one arg is a real or integer constant, put it last.  */
8781   if (tree_swap_operands_p (arg0, arg1, true))
8782     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8783 
8784   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
8785   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8786       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8787 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8788 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8789       && (TREE_CODE (arg1) == INTEGER_CST
8790 	  && !TREE_OVERFLOW (arg1)))
8791     {
8792       tree const1 = TREE_OPERAND (arg0, 1);
8793       tree const2 = arg1;
8794       tree variable = TREE_OPERAND (arg0, 0);
8795       tree lhs;
8796       int lhs_add;
8797       lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8798 
8799       lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8800 			 TREE_TYPE (arg1), const2, const1);
8801 
8802       /* If the constant operation overflowed this can be
8803 	 simplified as a comparison against INT_MAX/INT_MIN.  */
8804       if (TREE_CODE (lhs) == INTEGER_CST
8805 	  && TREE_OVERFLOW (lhs))
8806 	{
8807 	  int const1_sgn = tree_int_cst_sgn (const1);
8808 	  enum tree_code code2 = code;
8809 
8810 	  /* Get the sign of the constant on the lhs if the
8811 	     operation were VARIABLE + CONST1.  */
8812 	  if (TREE_CODE (arg0) == MINUS_EXPR)
8813 	    const1_sgn = -const1_sgn;
8814 
8815 	  /* The sign of the constant determines if we overflowed
8816 	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8817 	     Canonicalize to the INT_MIN overflow by swapping the comparison
8818 	     if necessary.  */
8819 	  if (const1_sgn == -1)
8820 	    code2 = swap_tree_comparison (code);
8821 
8822 	  /* We now can look at the canonicalized case
8823 	       VARIABLE + 1  CODE2  INT_MIN
8824 	     and decide on the result.  */
8825 	  if (code2 == LT_EXPR
8826 	      || code2 == LE_EXPR
8827 	      || code2 == EQ_EXPR)
8828 	    return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8829 	  else if (code2 == NE_EXPR
8830 		   || code2 == GE_EXPR
8831 		   || code2 == GT_EXPR)
8832 	    return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8833 	}
8834 
8835       if (TREE_CODE (lhs) == TREE_CODE (arg1)
8836 	  && (TREE_CODE (lhs) != INTEGER_CST
8837 	      || !TREE_OVERFLOW (lhs)))
8838 	{
8839 	  if (code != EQ_EXPR && code != NE_EXPR)
8840 	    fold_overflow_warning ("assuming signed overflow does not occur "
8841 				   "when changing X +- C1 cmp C2 to "
8842 				   "X cmp C1 +- C2",
8843 				   WARN_STRICT_OVERFLOW_COMPARISON);
8844 	  return fold_build2_loc (loc, code, type, variable, lhs);
8845 	}
8846     }
8847 
8848   /* For comparisons of pointers we can decompose it to a compile time
8849      comparison of the base objects and the offsets into the object.
8850      This requires at least one operand being an ADDR_EXPR or a
8851      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8852   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8853       && (TREE_CODE (arg0) == ADDR_EXPR
8854 	  || TREE_CODE (arg1) == ADDR_EXPR
8855 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8856 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8857     {
8858       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8859       HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8860       enum machine_mode mode;
8861       int volatilep, unsignedp;
8862       bool indirect_base0 = false, indirect_base1 = false;
8863 
8864       /* Get base and offset for the access.  Strip ADDR_EXPR for
8865 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8866 	 off the base object if possible.  indirect_baseN will be true
8867 	 if baseN is not an address but refers to the object itself.  */
8868       base0 = arg0;
8869       if (TREE_CODE (arg0) == ADDR_EXPR)
8870 	{
8871 	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8872 				       &bitsize, &bitpos0, &offset0, &mode,
8873 				       &unsignedp, &volatilep, false);
8874 	  if (TREE_CODE (base0) == INDIRECT_REF)
8875 	    base0 = TREE_OPERAND (base0, 0);
8876 	  else
8877 	    indirect_base0 = true;
8878 	}
8879       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8880 	{
8881 	  base0 = TREE_OPERAND (arg0, 0);
8882 	  STRIP_SIGN_NOPS (base0);
8883 	  if (TREE_CODE (base0) == ADDR_EXPR)
8884 	    {
8885 	      base0 = TREE_OPERAND (base0, 0);
8886 	      indirect_base0 = true;
8887 	    }
8888 	  offset0 = TREE_OPERAND (arg0, 1);
8889 	  if (host_integerp (offset0, 0))
8890 	    {
8891 	      HOST_WIDE_INT off = size_low_cst (offset0);
8892 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8893 				   * BITS_PER_UNIT)
8894 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8895 		{
8896 		  bitpos0 = off * BITS_PER_UNIT;
8897 		  offset0 = NULL_TREE;
8898 		}
8899 	    }
8900 	}
8901 
8902       base1 = arg1;
8903       if (TREE_CODE (arg1) == ADDR_EXPR)
8904 	{
8905 	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8906 				       &bitsize, &bitpos1, &offset1, &mode,
8907 				       &unsignedp, &volatilep, false);
8908 	  if (TREE_CODE (base1) == INDIRECT_REF)
8909 	    base1 = TREE_OPERAND (base1, 0);
8910 	  else
8911 	    indirect_base1 = true;
8912 	}
8913       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8914 	{
8915 	  base1 = TREE_OPERAND (arg1, 0);
8916 	  STRIP_SIGN_NOPS (base1);
8917 	  if (TREE_CODE (base1) == ADDR_EXPR)
8918 	    {
8919 	      base1 = TREE_OPERAND (base1, 0);
8920 	      indirect_base1 = true;
8921 	    }
8922 	  offset1 = TREE_OPERAND (arg1, 1);
8923 	  if (host_integerp (offset1, 0))
8924 	    {
8925 	      HOST_WIDE_INT off = size_low_cst (offset1);
8926 	      if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8927 				   * BITS_PER_UNIT)
8928 		  / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8929 		{
8930 		  bitpos1 = off * BITS_PER_UNIT;
8931 		  offset1 = NULL_TREE;
8932 		}
8933 	    }
8934 	}
8935 
8936       /* A local variable can never be pointed to by
8937          the default SSA name of an incoming parameter.  */
8938       if ((TREE_CODE (arg0) == ADDR_EXPR
8939            && indirect_base0
8940            && TREE_CODE (base0) == VAR_DECL
8941            && auto_var_in_fn_p (base0, current_function_decl)
8942            && !indirect_base1
8943            && TREE_CODE (base1) == SSA_NAME
8944            && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL
8945            && SSA_NAME_IS_DEFAULT_DEF (base1))
8946           || (TREE_CODE (arg1) == ADDR_EXPR
8947               && indirect_base1
8948               && TREE_CODE (base1) == VAR_DECL
8949               && auto_var_in_fn_p (base1, current_function_decl)
8950               && !indirect_base0
8951               && TREE_CODE (base0) == SSA_NAME
8952               && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL
8953               && SSA_NAME_IS_DEFAULT_DEF (base0)))
8954         {
8955           if (code == NE_EXPR)
8956             return constant_boolean_node (1, type);
8957           else if (code == EQ_EXPR)
8958             return constant_boolean_node (0, type);
8959         }
8960       /* If we have equivalent bases we might be able to simplify.  */
8961       else if (indirect_base0 == indirect_base1
8962                && operand_equal_p (base0, base1, 0))
8963 	{
8964 	  /* We can fold this expression to a constant if the non-constant
8965 	     offset parts are equal.  */
8966 	  if ((offset0 == offset1
8967 	       || (offset0 && offset1
8968 		   && operand_equal_p (offset0, offset1, 0)))
8969 	      && (code == EQ_EXPR
8970 		  || code == NE_EXPR
8971 		  || (indirect_base0 && DECL_P (base0))
8972 		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
8973 
8974 	    {
8975 	      if (code != EQ_EXPR
8976 		  && code != NE_EXPR
8977 		  && bitpos0 != bitpos1
8978 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8979 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8980 		fold_overflow_warning (("assuming pointer wraparound does not "
8981 					"occur when comparing P +- C1 with "
8982 					"P +- C2"),
8983 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8984 
8985 	      switch (code)
8986 		{
8987 		case EQ_EXPR:
8988 		  return constant_boolean_node (bitpos0 == bitpos1, type);
8989 		case NE_EXPR:
8990 		  return constant_boolean_node (bitpos0 != bitpos1, type);
8991 		case LT_EXPR:
8992 		  return constant_boolean_node (bitpos0 < bitpos1, type);
8993 		case LE_EXPR:
8994 		  return constant_boolean_node (bitpos0 <= bitpos1, type);
8995 		case GE_EXPR:
8996 		  return constant_boolean_node (bitpos0 >= bitpos1, type);
8997 		case GT_EXPR:
8998 		  return constant_boolean_node (bitpos0 > bitpos1, type);
8999 		default:;
9000 		}
9001 	    }
9002 	  /* We can simplify the comparison to a comparison of the variable
9003 	     offset parts if the constant offset parts are equal.
9004 	     Be careful to use signed size type here because otherwise we
9005 	     mess with array offsets in the wrong way.  This is possible
9006 	     because pointer arithmetic is restricted to retain within an
9007 	     object and overflow on pointer differences is undefined as of
9008 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9009 	  else if (bitpos0 == bitpos1
9010 		   && ((code == EQ_EXPR || code == NE_EXPR)
9011 		       || (indirect_base0 && DECL_P (base0))
9012 		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9013 	    {
9014 	      /* By converting to signed size type we cover middle-end pointer
9015 	         arithmetic which operates on unsigned pointer types of size
9016 	         type size and ARRAY_REF offsets which are properly sign or
9017 	         zero extended from their type in case it is narrower than
9018 	         size type.  */
9019 	      if (offset0 == NULL_TREE)
9020 		offset0 = build_int_cst (ssizetype, 0);
9021 	      else
9022 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9023 	      if (offset1 == NULL_TREE)
9024 		offset1 = build_int_cst (ssizetype, 0);
9025 	      else
9026 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9027 
9028 	      if (code != EQ_EXPR
9029 		  && code != NE_EXPR
9030 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9031 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9032 		fold_overflow_warning (("assuming pointer wraparound does not "
9033 					"occur when comparing P +- C1 with "
9034 					"P +- C2"),
9035 				       WARN_STRICT_OVERFLOW_COMPARISON);
9036 
9037 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9038 	    }
9039 	}
9040       /* For non-equal bases we can simplify if they are addresses
9041 	 of local binding decls or constants.  */
9042       else if (indirect_base0 && indirect_base1
9043 	       /* We know that !operand_equal_p (base0, base1, 0)
9044 		  because the if condition was false.  But make
9045 		  sure two decls are not the same.  */
9046 	       && base0 != base1
9047 	       && TREE_CODE (arg0) == ADDR_EXPR
9048 	       && TREE_CODE (arg1) == ADDR_EXPR
9049 	       && (((TREE_CODE (base0) == VAR_DECL
9050 		     || TREE_CODE (base0) == PARM_DECL)
9051 		    && (targetm.binds_local_p (base0)
9052 			|| CONSTANT_CLASS_P (base1)))
9053 		   || CONSTANT_CLASS_P (base0))
9054 	       && (((TREE_CODE (base1) == VAR_DECL
9055 		     || TREE_CODE (base1) == PARM_DECL)
9056 		    && (targetm.binds_local_p (base1)
9057 			|| CONSTANT_CLASS_P (base0)))
9058 		   || CONSTANT_CLASS_P (base1)))
9059 	{
9060 	  if (code == EQ_EXPR)
9061 	    return omit_two_operands_loc (loc, type, boolean_false_node,
9062 				      arg0, arg1);
9063 	  else if (code == NE_EXPR)
9064 	    return omit_two_operands_loc (loc, type, boolean_true_node,
9065 				      arg0, arg1);
9066 	}
9067       /* For equal offsets we can simplify to a comparison of the
9068 	 base addresses.  */
9069       else if (bitpos0 == bitpos1
9070 	       && (indirect_base0
9071 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9072 	       && (indirect_base1
9073 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9074 	       && ((offset0 == offset1)
9075 		   || (offset0 && offset1
9076 		       && operand_equal_p (offset0, offset1, 0))))
9077 	{
9078 	  if (indirect_base0)
9079 	    base0 = build_fold_addr_expr_loc (loc, base0);
9080 	  if (indirect_base1)
9081 	    base1 = build_fold_addr_expr_loc (loc, base1);
9082 	  return fold_build2_loc (loc, code, type, base0, base1);
9083 	}
9084     }
9085 
9086   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9087      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9088      the resulting offset is smaller in absolute value than the
9089      original one.  */
9090   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9091       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9092       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9093 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9094       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9095       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9096 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9097     {
9098       tree const1 = TREE_OPERAND (arg0, 1);
9099       tree const2 = TREE_OPERAND (arg1, 1);
9100       tree variable1 = TREE_OPERAND (arg0, 0);
9101       tree variable2 = TREE_OPERAND (arg1, 0);
9102       tree cst;
9103       const char * const warnmsg = G_("assuming signed overflow does not "
9104 				      "occur when combining constants around "
9105 				      "a comparison");
9106 
9107       /* Put the constant on the side where it doesn't overflow and is
9108 	 of lower absolute value than before.  */
9109       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9110 			     ? MINUS_EXPR : PLUS_EXPR,
9111 			     const2, const1);
9112       if (!TREE_OVERFLOW (cst)
9113 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9114 	{
9115 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9116 	  return fold_build2_loc (loc, code, type,
9117 			      variable1,
9118 			      fold_build2_loc (loc,
9119 					   TREE_CODE (arg1), TREE_TYPE (arg1),
9120 					   variable2, cst));
9121 	}
9122 
9123       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9124 			     ? MINUS_EXPR : PLUS_EXPR,
9125 			     const1, const2);
9126       if (!TREE_OVERFLOW (cst)
9127 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9128 	{
9129 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9130 	  return fold_build2_loc (loc, code, type,
9131 			      fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9132 					   variable1, cst),
9133 			      variable2);
9134 	}
9135     }
9136 
9137   /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9138      signed arithmetic case.  That form is created by the compiler
9139      often enough for folding it to be of value.  One example is in
9140      computing loop trip counts after Operator Strength Reduction.  */
9141   if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9142       && TREE_CODE (arg0) == MULT_EXPR
9143       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9144           && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9145       && integer_zerop (arg1))
9146     {
9147       tree const1 = TREE_OPERAND (arg0, 1);
9148       tree const2 = arg1;                       /* zero */
9149       tree variable1 = TREE_OPERAND (arg0, 0);
9150       enum tree_code cmp_code = code;
9151 
9152       /* Handle unfolded multiplication by zero.  */
9153       if (integer_zerop (const1))
9154 	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9155 
9156       fold_overflow_warning (("assuming signed overflow does not occur when "
9157 			      "eliminating multiplication in comparison "
9158 			      "with zero"),
9159 			     WARN_STRICT_OVERFLOW_COMPARISON);
9160 
9161       /* If const1 is negative we swap the sense of the comparison.  */
9162       if (tree_int_cst_sgn (const1) < 0)
9163         cmp_code = swap_tree_comparison (cmp_code);
9164 
9165       return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9166     }
9167 
9168   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9169   if (tem)
9170     return tem;
9171 
9172   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9173     {
9174       tree targ0 = strip_float_extensions (arg0);
9175       tree targ1 = strip_float_extensions (arg1);
9176       tree newtype = TREE_TYPE (targ0);
9177 
9178       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9179 	newtype = TREE_TYPE (targ1);
9180 
9181       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9182       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9183 	return fold_build2_loc (loc, code, type,
9184 			    fold_convert_loc (loc, newtype, targ0),
9185 			    fold_convert_loc (loc, newtype, targ1));
9186 
9187       /* (-a) CMP (-b) -> b CMP a  */
9188       if (TREE_CODE (arg0) == NEGATE_EXPR
9189 	  && TREE_CODE (arg1) == NEGATE_EXPR)
9190 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9191 			    TREE_OPERAND (arg0, 0));
9192 
9193       if (TREE_CODE (arg1) == REAL_CST)
9194 	{
9195 	  REAL_VALUE_TYPE cst;
9196 	  cst = TREE_REAL_CST (arg1);
9197 
9198 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9199 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9200 	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9201 				TREE_OPERAND (arg0, 0),
9202 				build_real (TREE_TYPE (arg1),
9203 					    real_value_negate (&cst)));
9204 
9205 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9206 	  /* a CMP (-0) -> a CMP 0  */
9207 	  if (REAL_VALUE_MINUS_ZERO (cst))
9208 	    return fold_build2_loc (loc, code, type, arg0,
9209 				build_real (TREE_TYPE (arg1), dconst0));
9210 
9211 	  /* x != NaN is always true, other ops are always false.  */
9212 	  if (REAL_VALUE_ISNAN (cst)
9213 	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9214 	    {
9215 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9216 	      return omit_one_operand_loc (loc, type, tem, arg0);
9217 	    }
9218 
9219 	  /* Fold comparisons against infinity.  */
9220 	  if (REAL_VALUE_ISINF (cst)
9221 	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9222 	    {
9223 	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9224 	      if (tem != NULL_TREE)
9225 		return tem;
9226 	    }
9227 	}
9228 
9229       /* If this is a comparison of a real constant with a PLUS_EXPR
9230 	 or a MINUS_EXPR of a real constant, we can convert it into a
9231 	 comparison with a revised real constant as long as no overflow
9232 	 occurs when unsafe_math_optimizations are enabled.  */
9233       if (flag_unsafe_math_optimizations
9234 	  && TREE_CODE (arg1) == REAL_CST
9235 	  && (TREE_CODE (arg0) == PLUS_EXPR
9236 	      || TREE_CODE (arg0) == MINUS_EXPR)
9237 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9238 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9239 				      ? MINUS_EXPR : PLUS_EXPR,
9240 				      arg1, TREE_OPERAND (arg0, 1)))
9241 	  && !TREE_OVERFLOW (tem))
9242 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9243 
9244       /* Likewise, we can simplify a comparison of a real constant with
9245          a MINUS_EXPR whose first operand is also a real constant, i.e.
9246          (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9247          floating-point types only if -fassociative-math is set.  */
9248       if (flag_associative_math
9249 	  && TREE_CODE (arg1) == REAL_CST
9250 	  && TREE_CODE (arg0) == MINUS_EXPR
9251 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9252 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9253 				      arg1))
9254 	  && !TREE_OVERFLOW (tem))
9255 	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9256 			    TREE_OPERAND (arg0, 1), tem);
9257 
9258       /* Fold comparisons against built-in math functions.  */
9259       if (TREE_CODE (arg1) == REAL_CST
9260 	  && flag_unsafe_math_optimizations
9261 	  && ! flag_errno_math)
9262 	{
9263 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9264 
9265 	  if (fcode != END_BUILTINS)
9266 	    {
9267 	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9268 	      if (tem != NULL_TREE)
9269 		return tem;
9270 	    }
9271 	}
9272     }
9273 
9274   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9275       && CONVERT_EXPR_P (arg0))
9276     {
9277       /* If we are widening one operand of an integer comparison,
9278 	 see if the other operand is similarly being widened.  Perhaps we
9279 	 can do the comparison in the narrower type.  */
9280       tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9281       if (tem)
9282 	return tem;
9283 
9284       /* Or if we are changing signedness.  */
9285       tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9286       if (tem)
9287 	return tem;
9288     }
9289 
9290   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9291      constant, we can simplify it.  */
9292   if (TREE_CODE (arg1) == INTEGER_CST
9293       && (TREE_CODE (arg0) == MIN_EXPR
9294 	  || TREE_CODE (arg0) == MAX_EXPR)
9295       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9296     {
9297       tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9298       if (tem)
9299 	return tem;
9300     }
9301 
9302   /* Simplify comparison of something with itself.  (For IEEE
9303      floating-point, we can only do some of these simplifications.)  */
9304   if (operand_equal_p (arg0, arg1, 0))
9305     {
9306       switch (code)
9307 	{
9308 	case EQ_EXPR:
9309 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9310 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9311 	    return constant_boolean_node (1, type);
9312 	  break;
9313 
9314 	case GE_EXPR:
9315 	case LE_EXPR:
9316 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9317 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9318 	    return constant_boolean_node (1, type);
9319 	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9320 
9321 	case NE_EXPR:
9322 	  /* For NE, we can only do this simplification if integer
9323 	     or we don't honor IEEE floating point NaNs.  */
9324 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9325 	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9326 	    break;
9327 	  /* ... fall through ...  */
9328 	case GT_EXPR:
9329 	case LT_EXPR:
9330 	  return constant_boolean_node (0, type);
9331 	default:
9332 	  gcc_unreachable ();
9333 	}
9334     }
9335 
9336   /* If we are comparing an expression that just has comparisons
9337      of two integer values, arithmetic expressions of those comparisons,
9338      and constants, we can simplify it.  There are only three cases
9339      to check: the two values can either be equal, the first can be
9340      greater, or the second can be greater.  Fold the expression for
9341      those three values.  Since each value must be 0 or 1, we have
9342      eight possibilities, each of which corresponds to the constant 0
9343      or 1 or one of the six possible comparisons.
9344 
9345      This handles common cases like (a > b) == 0 but also handles
9346      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9347      occur in macroized code.  */
9348 
9349   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9350     {
9351       tree cval1 = 0, cval2 = 0;
9352       int save_p = 0;
9353 
9354       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9355 	  /* Don't handle degenerate cases here; they should already
9356 	     have been handled anyway.  */
9357 	  && cval1 != 0 && cval2 != 0
9358 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9359 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9360 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9361 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9362 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9363 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9364 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9365 	{
9366 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9367 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9368 
9369 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9370 	     was the same as ARG1.  */
9371 
9372 	  tree high_result
9373 		= fold_build2_loc (loc, code, type,
9374 			       eval_subst (loc, arg0, cval1, maxval,
9375 					   cval2, minval),
9376 			       arg1);
9377 	  tree equal_result
9378 		= fold_build2_loc (loc, code, type,
9379 			       eval_subst (loc, arg0, cval1, maxval,
9380 					   cval2, maxval),
9381 			       arg1);
9382 	  tree low_result
9383 		= fold_build2_loc (loc, code, type,
9384 			       eval_subst (loc, arg0, cval1, minval,
9385 					   cval2, maxval),
9386 			       arg1);
9387 
9388 	  /* All three of these results should be 0 or 1.  Confirm they are.
9389 	     Then use those values to select the proper code to use.  */
9390 
9391 	  if (TREE_CODE (high_result) == INTEGER_CST
9392 	      && TREE_CODE (equal_result) == INTEGER_CST
9393 	      && TREE_CODE (low_result) == INTEGER_CST)
9394 	    {
9395 	      /* Make a 3-bit mask with the high-order bit being the
9396 		 value for `>', the next for '=', and the low for '<'.  */
9397 	      switch ((integer_onep (high_result) * 4)
9398 		      + (integer_onep (equal_result) * 2)
9399 		      + integer_onep (low_result))
9400 		{
9401 		case 0:
9402 		  /* Always false.  */
9403 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9404 		case 1:
9405 		  code = LT_EXPR;
9406 		  break;
9407 		case 2:
9408 		  code = EQ_EXPR;
9409 		  break;
9410 		case 3:
9411 		  code = LE_EXPR;
9412 		  break;
9413 		case 4:
9414 		  code = GT_EXPR;
9415 		  break;
9416 		case 5:
9417 		  code = NE_EXPR;
9418 		  break;
9419 		case 6:
9420 		  code = GE_EXPR;
9421 		  break;
9422 		case 7:
9423 		  /* Always true.  */
9424 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9425 		}
9426 
9427 	      if (save_p)
9428 		{
9429 		  tem = save_expr (build2 (code, type, cval1, cval2));
9430 		  SET_EXPR_LOCATION (tem, loc);
9431 		  return tem;
9432 		}
9433 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9434 	    }
9435 	}
9436     }
9437 
9438   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9439      into a single range test.  */
9440   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9441        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9442       && TREE_CODE (arg1) == INTEGER_CST
9443       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9444       && !integer_zerop (TREE_OPERAND (arg0, 1))
9445       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9446       && !TREE_OVERFLOW (arg1))
9447     {
9448       tem = fold_div_compare (loc, code, type, arg0, arg1);
9449       if (tem != NULL_TREE)
9450 	return tem;
9451     }
9452 
9453   /* Fold ~X op ~Y as Y op X.  */
9454   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9455       && TREE_CODE (arg1) == BIT_NOT_EXPR)
9456     {
9457       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9458       return fold_build2_loc (loc, code, type,
9459 			  fold_convert_loc (loc, cmp_type,
9460 					    TREE_OPERAND (arg1, 0)),
9461 			  TREE_OPERAND (arg0, 0));
9462     }
9463 
9464   /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9465   if (TREE_CODE (arg0) == BIT_NOT_EXPR
9466       && TREE_CODE (arg1) == INTEGER_CST)
9467     {
9468       tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9469       return fold_build2_loc (loc, swap_tree_comparison (code), type,
9470 			  TREE_OPERAND (arg0, 0),
9471 			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9472 				       fold_convert_loc (loc, cmp_type, arg1)));
9473     }
9474 
9475   return NULL_TREE;
9476 }
9477 
9478 
9479 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9480    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9481    argument EXPR represents the expression "z" of type TYPE.  */
9482 
9483 static tree
9484 fold_mult_zconjz (location_t loc, tree type, tree expr)
9485 {
9486   tree itype = TREE_TYPE (type);
9487   tree rpart, ipart, tem;
9488 
9489   if (TREE_CODE (expr) == COMPLEX_EXPR)
9490     {
9491       rpart = TREE_OPERAND (expr, 0);
9492       ipart = TREE_OPERAND (expr, 1);
9493     }
9494   else if (TREE_CODE (expr) == COMPLEX_CST)
9495     {
9496       rpart = TREE_REALPART (expr);
9497       ipart = TREE_IMAGPART (expr);
9498     }
9499   else
9500     {
9501       expr = save_expr (expr);
9502       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9503       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9504     }
9505 
9506   rpart = save_expr (rpart);
9507   ipart = save_expr (ipart);
9508   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9509 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9510 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9511   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9512 			  build_zero_cst (itype));
9513 }
9514 
9515 
9516 /* Subroutine of fold_binary.  If P is the value of EXPR, computes
9517    power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9518    guarantees that P and N have the same least significant log2(M) bits.
9519    N is not otherwise constrained.  In particular, N is not normalized to
9520    0 <= N < M as is common.  In general, the precise value of P is unknown.
9521    M is chosen as large as possible such that constant N can be determined.
9522 
9523    Returns M and sets *RESIDUE to N.
9524 
9525    If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9526    account.  This is not always possible due to PR 35705.
9527  */
9528 
9529 static unsigned HOST_WIDE_INT
9530 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9531 				 bool allow_func_align)
9532 {
9533   enum tree_code code;
9534 
9535   *residue = 0;
9536 
9537   code = TREE_CODE (expr);
9538   if (code == ADDR_EXPR)
9539     {
9540       unsigned int bitalign;
9541       bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue);
9542       *residue /= BITS_PER_UNIT;
9543       return bitalign / BITS_PER_UNIT;
9544     }
9545   else if (code == POINTER_PLUS_EXPR)
9546     {
9547       tree op0, op1;
9548       unsigned HOST_WIDE_INT modulus;
9549       enum tree_code inner_code;
9550 
9551       op0 = TREE_OPERAND (expr, 0);
9552       STRIP_NOPS (op0);
9553       modulus = get_pointer_modulus_and_residue (op0, residue,
9554 						 allow_func_align);
9555 
9556       op1 = TREE_OPERAND (expr, 1);
9557       STRIP_NOPS (op1);
9558       inner_code = TREE_CODE (op1);
9559       if (inner_code == INTEGER_CST)
9560 	{
9561 	  *residue += TREE_INT_CST_LOW (op1);
9562 	  return modulus;
9563 	}
9564       else if (inner_code == MULT_EXPR)
9565 	{
9566 	  op1 = TREE_OPERAND (op1, 1);
9567 	  if (TREE_CODE (op1) == INTEGER_CST)
9568 	    {
9569 	      unsigned HOST_WIDE_INT align;
9570 
9571 	      /* Compute the greatest power-of-2 divisor of op1.  */
9572 	      align = TREE_INT_CST_LOW (op1);
9573 	      align &= -align;
9574 
9575 	      /* If align is non-zero and less than *modulus, replace
9576 		 *modulus with align., If align is 0, then either op1 is 0
9577 		 or the greatest power-of-2 divisor of op1 doesn't fit in an
9578 		 unsigned HOST_WIDE_INT.  In either case, no additional
9579 		 constraint is imposed.  */
9580 	      if (align)
9581 		modulus = MIN (modulus, align);
9582 
9583 	      return modulus;
9584 	    }
9585 	}
9586     }
9587 
9588   /* If we get here, we were unable to determine anything useful about the
9589      expression.  */
9590   return 1;
9591 }
9592 
9593 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9594    CONSTRUCTOR ARG into array ELTS and return true if successful.  */
9595 
9596 static bool
9597 vec_cst_ctor_to_array (tree arg, tree *elts)
9598 {
9599   unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9600 
9601   if (TREE_CODE (arg) == VECTOR_CST)
9602     {
9603       tree t;
9604 
9605       for (i = 0, t = TREE_VECTOR_CST_ELTS (arg);
9606 	   i < nelts && t; i++, t = TREE_CHAIN (t))
9607 	elts[i] = TREE_VALUE (t);
9608       if (t)
9609 	return false;
9610     }
9611   else if (TREE_CODE (arg) == CONSTRUCTOR)
9612     {
9613       constructor_elt *elt;
9614 
9615       FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
9616 	if (i >= nelts)
9617 	  return false;
9618 	else
9619 	  elts[i] = elt->value;
9620     }
9621   else
9622     return false;
9623   for (; i < nelts; i++)
9624     elts[i]
9625       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9626   return true;
9627 }
9628 
9629 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9630    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9631    NULL_TREE otherwise.  */
9632 
9633 static tree
9634 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9635 {
9636   unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9637   tree *elts;
9638   bool need_ctor = false;
9639 
9640   gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9641 	      && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9642   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9643       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9644     return NULL_TREE;
9645 
9646   elts = XALLOCAVEC (tree, nelts * 3);
9647   if (!vec_cst_ctor_to_array (arg0, elts)
9648       || !vec_cst_ctor_to_array (arg1, elts + nelts))
9649     return NULL_TREE;
9650 
9651   for (i = 0; i < nelts; i++)
9652     {
9653       if (!CONSTANT_CLASS_P (elts[sel[i]]))
9654 	need_ctor = true;
9655       elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9656     }
9657 
9658   if (need_ctor)
9659     {
9660       VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
9661       for (i = 0; i < nelts; i++)
9662 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9663       return build_constructor (type, v);
9664     }
9665   else
9666     {
9667       tree vals = NULL_TREE;
9668       for (i = 0; i < nelts; i++)
9669 	vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals);
9670       return build_vector (type, vals);
9671     }
9672 }
9673 
9674 /* Try to fold a pointer difference of type TYPE two address expressions of
9675    array references AREF0 and AREF1 using location LOC.  Return a
9676    simplified expression for the difference or NULL_TREE.  */
9677 
9678 static tree
9679 fold_addr_of_array_ref_difference (location_t loc, tree type,
9680 				   tree aref0, tree aref1)
9681 {
9682   tree base0 = TREE_OPERAND (aref0, 0);
9683   tree base1 = TREE_OPERAND (aref1, 0);
9684   tree base_offset = build_int_cst (type, 0);
9685 
9686   /* If the bases are array references as well, recurse.  If the bases
9687      are pointer indirections compute the difference of the pointers.
9688      If the bases are equal, we are set.  */
9689   if ((TREE_CODE (base0) == ARRAY_REF
9690        && TREE_CODE (base1) == ARRAY_REF
9691        && (base_offset
9692 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9693       || (INDIRECT_REF_P (base0)
9694 	  && INDIRECT_REF_P (base1)
9695 	  && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9696 					     TREE_OPERAND (base0, 0),
9697 					     TREE_OPERAND (base1, 0))))
9698       || operand_equal_p (base0, base1, 0))
9699     {
9700       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9701       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9702       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9703       tree diff = build2 (MINUS_EXPR, type, op0, op1);
9704       return fold_build2_loc (loc, PLUS_EXPR, type,
9705 			      base_offset,
9706 			      fold_build2_loc (loc, MULT_EXPR, type,
9707 					       diff, esz));
9708     }
9709   return NULL_TREE;
9710 }
9711 
9712 /* Fold a binary expression of code CODE and type TYPE with operands
9713    OP0 and OP1.  LOC is the location of the resulting expression.
9714    Return the folded expression if folding is successful.  Otherwise,
9715    return NULL_TREE.  */
9716 
9717 tree
9718 fold_binary_loc (location_t loc,
9719 	     enum tree_code code, tree type, tree op0, tree op1)
9720 {
9721   enum tree_code_class kind = TREE_CODE_CLASS (code);
9722   tree arg0, arg1, tem;
9723   tree t1 = NULL_TREE;
9724   bool strict_overflow_p;
9725 
9726   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9727 	      && TREE_CODE_LENGTH (code) == 2
9728 	      && op0 != NULL_TREE
9729 	      && op1 != NULL_TREE);
9730 
9731   arg0 = op0;
9732   arg1 = op1;
9733 
9734   /* Strip any conversions that don't change the mode.  This is
9735      safe for every expression, except for a comparison expression
9736      because its signedness is derived from its operands.  So, in
9737      the latter case, only strip conversions that don't change the
9738      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9739      preserved.
9740 
9741      Note that this is done as an internal manipulation within the
9742      constant folder, in order to find the simplest representation
9743      of the arguments so that their form can be studied.  In any
9744      cases, the appropriate type conversions should be put back in
9745      the tree that will get out of the constant folder.  */
9746 
9747   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9748     {
9749       STRIP_SIGN_NOPS (arg0);
9750       STRIP_SIGN_NOPS (arg1);
9751     }
9752   else
9753     {
9754       STRIP_NOPS (arg0);
9755       STRIP_NOPS (arg1);
9756     }
9757 
9758   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9759      constant but we can't do arithmetic on them.  */
9760   if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9761       || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9762       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9763       || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9764       || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9765       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9766     {
9767       if (kind == tcc_binary)
9768 	{
9769 	  /* Make sure type and arg0 have the same saturating flag.  */
9770 	  gcc_assert (TYPE_SATURATING (type)
9771 		      == TYPE_SATURATING (TREE_TYPE (arg0)));
9772 	  tem = const_binop (code, arg0, arg1);
9773 	}
9774       else if (kind == tcc_comparison)
9775 	tem = fold_relational_const (code, type, arg0, arg1);
9776       else
9777 	tem = NULL_TREE;
9778 
9779       if (tem != NULL_TREE)
9780 	{
9781 	  if (TREE_TYPE (tem) != type)
9782 	    tem = fold_convert_loc (loc, type, tem);
9783 	  return tem;
9784 	}
9785     }
9786 
9787   /* If this is a commutative operation, and ARG0 is a constant, move it
9788      to ARG1 to reduce the number of tests below.  */
9789   if (commutative_tree_code (code)
9790       && tree_swap_operands_p (arg0, arg1, true))
9791     return fold_build2_loc (loc, code, type, op1, op0);
9792 
9793   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9794 
9795      First check for cases where an arithmetic operation is applied to a
9796      compound, conditional, or comparison operation.  Push the arithmetic
9797      operation inside the compound or conditional to see if any folding
9798      can then be done.  Convert comparison to conditional for this purpose.
9799      The also optimizes non-constant cases that used to be done in
9800      expand_expr.
9801 
9802      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9803      one of the operands is a comparison and the other is a comparison, a
9804      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9805      code below would make the expression more complex.  Change it to a
9806      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9807      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9808 
9809   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9810        || code == EQ_EXPR || code == NE_EXPR)
9811       && ((truth_value_p (TREE_CODE (arg0))
9812 	   && (truth_value_p (TREE_CODE (arg1))
9813 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9814 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9815 	  || (truth_value_p (TREE_CODE (arg1))
9816 	      && (truth_value_p (TREE_CODE (arg0))
9817 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9818 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9819     {
9820       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9821 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9822 			 : TRUTH_XOR_EXPR,
9823 			 boolean_type_node,
9824 			 fold_convert_loc (loc, boolean_type_node, arg0),
9825 			 fold_convert_loc (loc, boolean_type_node, arg1));
9826 
9827       if (code == EQ_EXPR)
9828 	tem = invert_truthvalue_loc (loc, tem);
9829 
9830       return fold_convert_loc (loc, type, tem);
9831     }
9832 
9833   if (TREE_CODE_CLASS (code) == tcc_binary
9834       || TREE_CODE_CLASS (code) == tcc_comparison)
9835     {
9836       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9837 	{
9838 	  tem = fold_build2_loc (loc, code, type,
9839 			     fold_convert_loc (loc, TREE_TYPE (op0),
9840 					       TREE_OPERAND (arg0, 1)), op1);
9841 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9842 			     tem);
9843 	}
9844       if (TREE_CODE (arg1) == COMPOUND_EXPR
9845 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9846 	{
9847 	  tem = fold_build2_loc (loc, code, type, op0,
9848 			     fold_convert_loc (loc, TREE_TYPE (op1),
9849 					       TREE_OPERAND (arg1, 1)));
9850 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9851 			     tem);
9852 	}
9853 
9854       if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9855 	{
9856 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9857 						     arg0, arg1,
9858 						     /*cond_first_p=*/1);
9859 	  if (tem != NULL_TREE)
9860 	    return tem;
9861 	}
9862 
9863       if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9864 	{
9865 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9866 						     arg1, arg0,
9867 					             /*cond_first_p=*/0);
9868 	  if (tem != NULL_TREE)
9869 	    return tem;
9870 	}
9871     }
9872 
9873   switch (code)
9874     {
9875     case MEM_REF:
9876       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9877       if (TREE_CODE (arg0) == ADDR_EXPR
9878 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9879 	{
9880 	  tree iref = TREE_OPERAND (arg0, 0);
9881 	  return fold_build2 (MEM_REF, type,
9882 			      TREE_OPERAND (iref, 0),
9883 			      int_const_binop (PLUS_EXPR, arg1,
9884 					       TREE_OPERAND (iref, 1)));
9885 	}
9886 
9887       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9888       if (TREE_CODE (arg0) == ADDR_EXPR
9889 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9890 	{
9891 	  tree base;
9892 	  HOST_WIDE_INT coffset;
9893 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9894 						&coffset);
9895 	  if (!base)
9896 	    return NULL_TREE;
9897 	  return fold_build2 (MEM_REF, type,
9898 			      build_fold_addr_expr (base),
9899 			      int_const_binop (PLUS_EXPR, arg1,
9900 					       size_int (coffset)));
9901 	}
9902 
9903       return NULL_TREE;
9904 
9905     case POINTER_PLUS_EXPR:
9906       /* 0 +p index -> (type)index */
9907       if (integer_zerop (arg0))
9908 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
9909 
9910       /* PTR +p 0 -> PTR */
9911       if (integer_zerop (arg1))
9912 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
9913 
9914       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9915       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9916 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9917         return fold_convert_loc (loc, type,
9918 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9919 					      fold_convert_loc (loc, sizetype,
9920 								arg1),
9921 					      fold_convert_loc (loc, sizetype,
9922 								arg0)));
9923 
9924       /* (PTR +p B) +p A -> PTR +p (B + A) */
9925       if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9926 	{
9927 	  tree inner;
9928 	  tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
9929 	  tree arg00 = TREE_OPERAND (arg0, 0);
9930 	  inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
9931 			       arg01, fold_convert_loc (loc, sizetype, arg1));
9932 	  return fold_convert_loc (loc, type,
9933 				   fold_build_pointer_plus_loc (loc,
9934 								arg00, inner));
9935 	}
9936 
9937       /* PTR_CST +p CST -> CST1 */
9938       if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9939 	return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
9940 			    fold_convert_loc (loc, type, arg1));
9941 
9942      /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9943 	of the array.  Loop optimizer sometimes produce this type of
9944 	expressions.  */
9945       if (TREE_CODE (arg0) == ADDR_EXPR)
9946 	{
9947 	  tem = try_move_mult_to_index (loc, arg0,
9948 					fold_convert_loc (loc, sizetype, arg1));
9949 	  if (tem)
9950 	    return fold_convert_loc (loc, type, tem);
9951 	}
9952 
9953       return NULL_TREE;
9954 
9955     case PLUS_EXPR:
9956       /* A + (-B) -> A - B */
9957       if (TREE_CODE (arg1) == NEGATE_EXPR)
9958 	return fold_build2_loc (loc, MINUS_EXPR, type,
9959 			    fold_convert_loc (loc, type, arg0),
9960 			    fold_convert_loc (loc, type,
9961 					      TREE_OPERAND (arg1, 0)));
9962       /* (-A) + B -> B - A */
9963       if (TREE_CODE (arg0) == NEGATE_EXPR
9964 	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9965 	return fold_build2_loc (loc, MINUS_EXPR, type,
9966 			    fold_convert_loc (loc, type, arg1),
9967 			    fold_convert_loc (loc, type,
9968 					      TREE_OPERAND (arg0, 0)));
9969 
9970       if (INTEGRAL_TYPE_P (type))
9971 	{
9972 	  /* Convert ~A + 1 to -A.  */
9973 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9974 	      && integer_onep (arg1))
9975 	    return fold_build1_loc (loc, NEGATE_EXPR, type,
9976 				fold_convert_loc (loc, type,
9977 						  TREE_OPERAND (arg0, 0)));
9978 
9979 	  /* ~X + X is -1.  */
9980 	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9981 	      && !TYPE_OVERFLOW_TRAPS (type))
9982 	    {
9983 	      tree tem = TREE_OPERAND (arg0, 0);
9984 
9985 	      STRIP_NOPS (tem);
9986 	      if (operand_equal_p (tem, arg1, 0))
9987 		{
9988 		  t1 = build_int_cst_type (type, -1);
9989 		  return omit_one_operand_loc (loc, type, t1, arg1);
9990 		}
9991 	    }
9992 
9993 	  /* X + ~X is -1.  */
9994 	  if (TREE_CODE (arg1) == BIT_NOT_EXPR
9995 	      && !TYPE_OVERFLOW_TRAPS (type))
9996 	    {
9997 	      tree tem = TREE_OPERAND (arg1, 0);
9998 
9999 	      STRIP_NOPS (tem);
10000 	      if (operand_equal_p (arg0, tem, 0))
10001 		{
10002 		  t1 = build_int_cst_type (type, -1);
10003 		  return omit_one_operand_loc (loc, type, t1, arg0);
10004 		}
10005 	    }
10006 
10007 	  /* X + (X / CST) * -CST is X % CST.  */
10008 	  if (TREE_CODE (arg1) == MULT_EXPR
10009 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10010 	      && operand_equal_p (arg0,
10011 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10012 	    {
10013 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10014 	      tree cst1 = TREE_OPERAND (arg1, 1);
10015 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10016 				      cst1, cst0);
10017 	      if (sum && integer_zerop (sum))
10018 		return fold_convert_loc (loc, type,
10019 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10020 						      TREE_TYPE (arg0), arg0,
10021 						      cst0));
10022 	    }
10023 	}
10024 
10025       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10026 	 one.  Make sure the type is not saturating and has the signedness of
10027 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10028 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10029       if ((TREE_CODE (arg0) == MULT_EXPR
10030 	   || TREE_CODE (arg1) == MULT_EXPR)
10031 	  && !TYPE_SATURATING (type)
10032 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10033 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10034 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10035         {
10036 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10037 	  if (tem)
10038 	    return tem;
10039 	}
10040 
10041       if (! FLOAT_TYPE_P (type))
10042 	{
10043 	  if (integer_zerop (arg1))
10044 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10045 
10046 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10047 	     with a constant, and the two constants have no bits in common,
10048 	     we should treat this as a BIT_IOR_EXPR since this may produce more
10049 	     simplifications.  */
10050 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10051 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10052 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10053 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10054 	      && integer_zerop (const_binop (BIT_AND_EXPR,
10055 					     TREE_OPERAND (arg0, 1),
10056 					     TREE_OPERAND (arg1, 1))))
10057 	    {
10058 	      code = BIT_IOR_EXPR;
10059 	      goto bit_ior;
10060 	    }
10061 
10062 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10063 	     (plus (plus (mult) (mult)) (foo)) so that we can
10064 	     take advantage of the factoring cases below.  */
10065 	  if (TYPE_OVERFLOW_WRAPS (type)
10066 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10067 		    || TREE_CODE (arg0) == MINUS_EXPR)
10068 		   && TREE_CODE (arg1) == MULT_EXPR)
10069 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10070 		       || TREE_CODE (arg1) == MINUS_EXPR)
10071 		      && TREE_CODE (arg0) == MULT_EXPR)))
10072 	    {
10073 	      tree parg0, parg1, parg, marg;
10074 	      enum tree_code pcode;
10075 
10076 	      if (TREE_CODE (arg1) == MULT_EXPR)
10077 		parg = arg0, marg = arg1;
10078 	      else
10079 		parg = arg1, marg = arg0;
10080 	      pcode = TREE_CODE (parg);
10081 	      parg0 = TREE_OPERAND (parg, 0);
10082 	      parg1 = TREE_OPERAND (parg, 1);
10083 	      STRIP_NOPS (parg0);
10084 	      STRIP_NOPS (parg1);
10085 
10086 	      if (TREE_CODE (parg0) == MULT_EXPR
10087 		  && TREE_CODE (parg1) != MULT_EXPR)
10088 		return fold_build2_loc (loc, pcode, type,
10089 				    fold_build2_loc (loc, PLUS_EXPR, type,
10090 						 fold_convert_loc (loc, type,
10091 								   parg0),
10092 						 fold_convert_loc (loc, type,
10093 								   marg)),
10094 				    fold_convert_loc (loc, type, parg1));
10095 	      if (TREE_CODE (parg0) != MULT_EXPR
10096 		  && TREE_CODE (parg1) == MULT_EXPR)
10097 		return
10098 		  fold_build2_loc (loc, PLUS_EXPR, type,
10099 			       fold_convert_loc (loc, type, parg0),
10100 			       fold_build2_loc (loc, pcode, type,
10101 					    fold_convert_loc (loc, type, marg),
10102 					    fold_convert_loc (loc, type,
10103 							      parg1)));
10104 	    }
10105 	}
10106       else
10107 	{
10108 	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10109 	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10110 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10111 
10112 	  /* Likewise if the operands are reversed.  */
10113 	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10114 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10115 
10116 	  /* Convert X + -C into X - C.  */
10117 	  if (TREE_CODE (arg1) == REAL_CST
10118 	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10119 	    {
10120 	      tem = fold_negate_const (arg1, type);
10121 	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10122 		return fold_build2_loc (loc, MINUS_EXPR, type,
10123 				    fold_convert_loc (loc, type, arg0),
10124 				    fold_convert_loc (loc, type, tem));
10125 	    }
10126 
10127 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10128 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10129 	     if signed zeros are involved.  */
10130 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10131               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10132 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10133 	    {
10134 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10135 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10136 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10137 	      bool arg0rz = false, arg0iz = false;
10138 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10139 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10140 		{
10141 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10142 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10143 		  if (arg0rz && arg1i && real_zerop (arg1i))
10144 		    {
10145 		      tree rp = arg1r ? arg1r
10146 				  : build1 (REALPART_EXPR, rtype, arg1);
10147 		      tree ip = arg0i ? arg0i
10148 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10149 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10150 		    }
10151 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10152 		    {
10153 		      tree rp = arg0r ? arg0r
10154 				  : build1 (REALPART_EXPR, rtype, arg0);
10155 		      tree ip = arg1i ? arg1i
10156 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10157 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10158 		    }
10159 		}
10160 	    }
10161 
10162 	  if (flag_unsafe_math_optimizations
10163 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10164 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10165 	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10166 	    return tem;
10167 
10168 	  /* Convert x+x into x*2.0.  */
10169 	  if (operand_equal_p (arg0, arg1, 0)
10170 	      && SCALAR_FLOAT_TYPE_P (type))
10171 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10172 				build_real (type, dconst2));
10173 
10174           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10175              We associate floats only if the user has specified
10176              -fassociative-math.  */
10177           if (flag_associative_math
10178               && TREE_CODE (arg1) == PLUS_EXPR
10179               && TREE_CODE (arg0) != MULT_EXPR)
10180             {
10181               tree tree10 = TREE_OPERAND (arg1, 0);
10182               tree tree11 = TREE_OPERAND (arg1, 1);
10183               if (TREE_CODE (tree11) == MULT_EXPR
10184 		  && TREE_CODE (tree10) == MULT_EXPR)
10185                 {
10186                   tree tree0;
10187                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10188                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10189                 }
10190             }
10191           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10192              We associate floats only if the user has specified
10193              -fassociative-math.  */
10194           if (flag_associative_math
10195               && TREE_CODE (arg0) == PLUS_EXPR
10196               && TREE_CODE (arg1) != MULT_EXPR)
10197             {
10198               tree tree00 = TREE_OPERAND (arg0, 0);
10199               tree tree01 = TREE_OPERAND (arg0, 1);
10200               if (TREE_CODE (tree01) == MULT_EXPR
10201 		  && TREE_CODE (tree00) == MULT_EXPR)
10202                 {
10203                   tree tree0;
10204                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10205                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10206                 }
10207             }
10208 	}
10209 
10210      bit_rotate:
10211       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10212 	 is a rotate of A by C1 bits.  */
10213       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10214 	 is a rotate of A by B bits.  */
10215       {
10216 	enum tree_code code0, code1;
10217 	tree rtype;
10218 	code0 = TREE_CODE (arg0);
10219 	code1 = TREE_CODE (arg1);
10220 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10221 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10222 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10223 			        TREE_OPERAND (arg1, 0), 0)
10224 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10225 	        TYPE_UNSIGNED (rtype))
10226 	    /* Only create rotates in complete modes.  Other cases are not
10227 	       expanded properly.  */
10228 	    && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10229 	  {
10230 	    tree tree01, tree11;
10231 	    enum tree_code code01, code11;
10232 
10233 	    tree01 = TREE_OPERAND (arg0, 1);
10234 	    tree11 = TREE_OPERAND (arg1, 1);
10235 	    STRIP_NOPS (tree01);
10236 	    STRIP_NOPS (tree11);
10237 	    code01 = TREE_CODE (tree01);
10238 	    code11 = TREE_CODE (tree11);
10239 	    if (code01 == INTEGER_CST
10240 		&& code11 == INTEGER_CST
10241 		&& TREE_INT_CST_HIGH (tree01) == 0
10242 		&& TREE_INT_CST_HIGH (tree11) == 0
10243 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10244 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10245 	      {
10246 		tem = build2_loc (loc, LROTATE_EXPR,
10247 				  TREE_TYPE (TREE_OPERAND (arg0, 0)),
10248 				  TREE_OPERAND (arg0, 0),
10249 				  code0 == LSHIFT_EXPR ? tree01 : tree11);
10250 		return fold_convert_loc (loc, type, tem);
10251 	      }
10252 	    else if (code11 == MINUS_EXPR)
10253 	      {
10254 		tree tree110, tree111;
10255 		tree110 = TREE_OPERAND (tree11, 0);
10256 		tree111 = TREE_OPERAND (tree11, 1);
10257 		STRIP_NOPS (tree110);
10258 		STRIP_NOPS (tree111);
10259 		if (TREE_CODE (tree110) == INTEGER_CST
10260 		    && 0 == compare_tree_int (tree110,
10261 					      TYPE_PRECISION
10262 					      (TREE_TYPE (TREE_OPERAND
10263 							  (arg0, 0))))
10264 		    && operand_equal_p (tree01, tree111, 0))
10265 		  return
10266 		    fold_convert_loc (loc, type,
10267 				      build2 ((code0 == LSHIFT_EXPR
10268 					       ? LROTATE_EXPR
10269 					       : RROTATE_EXPR),
10270 					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10271 					      TREE_OPERAND (arg0, 0), tree01));
10272 	      }
10273 	    else if (code01 == MINUS_EXPR)
10274 	      {
10275 		tree tree010, tree011;
10276 		tree010 = TREE_OPERAND (tree01, 0);
10277 		tree011 = TREE_OPERAND (tree01, 1);
10278 		STRIP_NOPS (tree010);
10279 		STRIP_NOPS (tree011);
10280 		if (TREE_CODE (tree010) == INTEGER_CST
10281 		    && 0 == compare_tree_int (tree010,
10282 					      TYPE_PRECISION
10283 					      (TREE_TYPE (TREE_OPERAND
10284 							  (arg0, 0))))
10285 		    && operand_equal_p (tree11, tree011, 0))
10286 		    return fold_convert_loc
10287 		      (loc, type,
10288 		       build2 ((code0 != LSHIFT_EXPR
10289 				? LROTATE_EXPR
10290 				: RROTATE_EXPR),
10291 			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10292 			       TREE_OPERAND (arg0, 0), tree11));
10293 	      }
10294 	  }
10295       }
10296 
10297     associate:
10298       /* In most languages, can't associate operations on floats through
10299 	 parentheses.  Rather than remember where the parentheses were, we
10300 	 don't associate floats at all, unless the user has specified
10301 	 -fassociative-math.
10302 	 And, we need to make sure type is not saturating.  */
10303 
10304       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10305 	  && !TYPE_SATURATING (type))
10306 	{
10307 	  tree var0, con0, lit0, minus_lit0;
10308 	  tree var1, con1, lit1, minus_lit1;
10309 	  bool ok = true;
10310 
10311 	  /* Split both trees into variables, constants, and literals.  Then
10312 	     associate each group together, the constants with literals,
10313 	     then the result with variables.  This increases the chances of
10314 	     literals being recombined later and of generating relocatable
10315 	     expressions for the sum of a constant and literal.  */
10316 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10317 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10318 			     code == MINUS_EXPR);
10319 
10320 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10321 	  if (code == MINUS_EXPR)
10322 	    code = PLUS_EXPR;
10323 
10324 	  /* With undefined overflow we can only associate constants with one
10325 	     variable, and constants whose association doesn't overflow.  */
10326 	  if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10327 	      || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10328 	    {
10329 	      if (var0 && var1)
10330 		{
10331 		  tree tmp0 = var0;
10332 		  tree tmp1 = var1;
10333 
10334 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10335 		    tmp0 = TREE_OPERAND (tmp0, 0);
10336 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10337 		    tmp1 = TREE_OPERAND (tmp1, 0);
10338 		  /* The only case we can still associate with two variables
10339 		     is if they are the same, modulo negation.  */
10340 		  if (!operand_equal_p (tmp0, tmp1, 0))
10341 		    ok = false;
10342 		}
10343 
10344 	      if (ok && lit0 && lit1)
10345 		{
10346 		  tree tmp0 = fold_convert (type, lit0);
10347 		  tree tmp1 = fold_convert (type, lit1);
10348 
10349 		  if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10350 		      && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10351 		    ok = false;
10352 		}
10353 	    }
10354 
10355 	  /* Only do something if we found more than two objects.  Otherwise,
10356 	     nothing has changed and we risk infinite recursion.  */
10357 	  if (ok
10358 	      && (2 < ((var0 != 0) + (var1 != 0)
10359 		       + (con0 != 0) + (con1 != 0)
10360 		       + (lit0 != 0) + (lit1 != 0)
10361 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10362 	    {
10363 	      var0 = associate_trees (loc, var0, var1, code, type);
10364 	      con0 = associate_trees (loc, con0, con1, code, type);
10365 	      lit0 = associate_trees (loc, lit0, lit1, code, type);
10366 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10367 
10368 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10369 		 greater than the positive part.  Otherwise, the multiplicative
10370 		 folding code (i.e extract_muldiv) may be fooled in case
10371 		 unsigned constants are subtracted, like in the following
10372 		 example: ((X*2 + 4) - 8U)/2.  */
10373 	      if (minus_lit0 && lit0)
10374 		{
10375 		  if (TREE_CODE (lit0) == INTEGER_CST
10376 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10377 		      && tree_int_cst_lt (lit0, minus_lit0))
10378 		    {
10379 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10380 						    MINUS_EXPR, type);
10381 		      lit0 = 0;
10382 		    }
10383 		  else
10384 		    {
10385 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10386 					      MINUS_EXPR, type);
10387 		      minus_lit0 = 0;
10388 		    }
10389 		}
10390 	      if (minus_lit0)
10391 		{
10392 		  if (con0 == 0)
10393 		    return
10394 		      fold_convert_loc (loc, type,
10395 					associate_trees (loc, var0, minus_lit0,
10396 							 MINUS_EXPR, type));
10397 		  else
10398 		    {
10399 		      con0 = associate_trees (loc, con0, minus_lit0,
10400 					      MINUS_EXPR, type);
10401 		      return
10402 			fold_convert_loc (loc, type,
10403 					  associate_trees (loc, var0, con0,
10404 							   PLUS_EXPR, type));
10405 		    }
10406 		}
10407 
10408 	      con0 = associate_trees (loc, con0, lit0, code, type);
10409 	      return
10410 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10411 							      code, type));
10412 	    }
10413 	}
10414 
10415       return NULL_TREE;
10416 
10417     case MINUS_EXPR:
10418       /* Pointer simplifications for subtraction, simple reassociations. */
10419       if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10420 	{
10421 	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10422 	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10423 	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10424 	    {
10425 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10426 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10427 	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10428 	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10429 	      return fold_build2_loc (loc, PLUS_EXPR, type,
10430 				  fold_build2_loc (loc, MINUS_EXPR, type,
10431 					       arg00, arg10),
10432 				  fold_build2_loc (loc, MINUS_EXPR, type,
10433 					       arg01, arg11));
10434 	    }
10435 	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10436 	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10437 	    {
10438 	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10439 	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10440 	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10441 				      fold_convert_loc (loc, type, arg1));
10442 	      if (tmp)
10443 	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10444 	    }
10445 	}
10446       /* A - (-B) -> A + B */
10447       if (TREE_CODE (arg1) == NEGATE_EXPR)
10448 	return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10449 			    fold_convert_loc (loc, type,
10450 					      TREE_OPERAND (arg1, 0)));
10451       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10452       if (TREE_CODE (arg0) == NEGATE_EXPR
10453 	  && (FLOAT_TYPE_P (type)
10454 	      || INTEGRAL_TYPE_P (type))
10455 	  && negate_expr_p (arg1)
10456 	  && reorder_operands_p (arg0, arg1))
10457 	return fold_build2_loc (loc, MINUS_EXPR, type,
10458 			    fold_convert_loc (loc, type,
10459 					      negate_expr (arg1)),
10460 			    fold_convert_loc (loc, type,
10461 					      TREE_OPERAND (arg0, 0)));
10462       /* Convert -A - 1 to ~A.  */
10463       if (INTEGRAL_TYPE_P (type)
10464 	  && TREE_CODE (arg0) == NEGATE_EXPR
10465 	  && integer_onep (arg1)
10466 	  && !TYPE_OVERFLOW_TRAPS (type))
10467 	return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10468 			    fold_convert_loc (loc, type,
10469 					      TREE_OPERAND (arg0, 0)));
10470 
10471       /* Convert -1 - A to ~A.  */
10472       if (INTEGRAL_TYPE_P (type)
10473 	  && integer_all_onesp (arg0))
10474 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10475 
10476 
10477       /* X - (X / CST) * CST is X % CST.  */
10478       if (INTEGRAL_TYPE_P (type)
10479 	  && TREE_CODE (arg1) == MULT_EXPR
10480 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10481 	  && operand_equal_p (arg0,
10482 			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10483 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10484 			      TREE_OPERAND (arg1, 1), 0))
10485 	return
10486 	  fold_convert_loc (loc, type,
10487 			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10488 					 arg0, TREE_OPERAND (arg1, 1)));
10489 
10490       if (! FLOAT_TYPE_P (type))
10491 	{
10492 	  if (integer_zerop (arg0))
10493 	    return negate_expr (fold_convert_loc (loc, type, arg1));
10494 	  if (integer_zerop (arg1))
10495 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10496 
10497 	  /* Fold A - (A & B) into ~B & A.  */
10498 	  if (!TREE_SIDE_EFFECTS (arg0)
10499 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10500 	    {
10501 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10502 		{
10503 		  tree arg10 = fold_convert_loc (loc, type,
10504 						 TREE_OPERAND (arg1, 0));
10505 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10506 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10507 						   type, arg10),
10508 				      fold_convert_loc (loc, type, arg0));
10509 		}
10510 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10511 		{
10512 		  tree arg11 = fold_convert_loc (loc,
10513 						 type, TREE_OPERAND (arg1, 1));
10514 		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10515 				      fold_build1_loc (loc, BIT_NOT_EXPR,
10516 						   type, arg11),
10517 				      fold_convert_loc (loc, type, arg0));
10518 		}
10519 	    }
10520 
10521 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10522 	     any power of 2 minus 1.  */
10523 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10524 	      && TREE_CODE (arg1) == BIT_AND_EXPR
10525 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10526 				  TREE_OPERAND (arg1, 0), 0))
10527 	    {
10528 	      tree mask0 = TREE_OPERAND (arg0, 1);
10529 	      tree mask1 = TREE_OPERAND (arg1, 1);
10530 	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10531 
10532 	      if (operand_equal_p (tem, mask1, 0))
10533 		{
10534 		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10535 				     TREE_OPERAND (arg0, 0), mask1);
10536 		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10537 		}
10538 	    }
10539 	}
10540 
10541       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10542       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10543 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10544 
10545       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10546 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10547 	 (-ARG1 + ARG0) reduces to -ARG1.  */
10548       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10549 	return negate_expr (fold_convert_loc (loc, type, arg1));
10550 
10551       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10552 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10553 	 signed zeros are involved.  */
10554       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10555 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10556 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10557         {
10558 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10559 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10560 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10561 	  bool arg0rz = false, arg0iz = false;
10562 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10563 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10564 	    {
10565 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10566 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10567 	      if (arg0rz && arg1i && real_zerop (arg1i))
10568 	        {
10569 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10570 					 arg1r ? arg1r
10571 					 : build1 (REALPART_EXPR, rtype, arg1));
10572 		  tree ip = arg0i ? arg0i
10573 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10574 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10575 		}
10576 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10577 	        {
10578 		  tree rp = arg0r ? arg0r
10579 		    : build1 (REALPART_EXPR, rtype, arg0);
10580 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10581 					 arg1i ? arg1i
10582 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10583 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10584 		}
10585 	    }
10586 	}
10587 
10588       /* Fold &x - &x.  This can happen from &x.foo - &x.
10589 	 This is unsafe for certain floats even in non-IEEE formats.
10590 	 In IEEE, it is unsafe because it does wrong for NaNs.
10591 	 Also note that operand_equal_p is always false if an operand
10592 	 is volatile.  */
10593 
10594       if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10595 	  && operand_equal_p (arg0, arg1, 0))
10596 	return build_zero_cst (type);
10597 
10598       /* A - B -> A + (-B) if B is easily negatable.  */
10599       if (negate_expr_p (arg1)
10600 	  && ((FLOAT_TYPE_P (type)
10601                /* Avoid this transformation if B is a positive REAL_CST.  */
10602 	       && (TREE_CODE (arg1) != REAL_CST
10603 		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10604 	      || INTEGRAL_TYPE_P (type)))
10605 	return fold_build2_loc (loc, PLUS_EXPR, type,
10606 			    fold_convert_loc (loc, type, arg0),
10607 			    fold_convert_loc (loc, type,
10608 					      negate_expr (arg1)));
10609 
10610       /* Try folding difference of addresses.  */
10611       {
10612 	HOST_WIDE_INT diff;
10613 
10614 	if ((TREE_CODE (arg0) == ADDR_EXPR
10615 	     || TREE_CODE (arg1) == ADDR_EXPR)
10616 	    && ptr_difference_const (arg0, arg1, &diff))
10617 	  return build_int_cst_type (type, diff);
10618       }
10619 
10620       /* Fold &a[i] - &a[j] to i-j.  */
10621       if (TREE_CODE (arg0) == ADDR_EXPR
10622 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10623 	  && TREE_CODE (arg1) == ADDR_EXPR
10624 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10625         {
10626 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10627 							TREE_OPERAND (arg0, 0),
10628 							TREE_OPERAND (arg1, 0));
10629 	  if (tem)
10630 	    return tem;
10631 	}
10632 
10633       if (FLOAT_TYPE_P (type)
10634 	  && flag_unsafe_math_optimizations
10635 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10636 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10637 	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10638 	return tem;
10639 
10640       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10641 	 one.  Make sure the type is not saturating and has the signedness of
10642 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10643 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10644       if ((TREE_CODE (arg0) == MULT_EXPR
10645 	   || TREE_CODE (arg1) == MULT_EXPR)
10646 	  && !TYPE_SATURATING (type)
10647 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10648 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10649 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10650         {
10651 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10652 	  if (tem)
10653 	    return tem;
10654 	}
10655 
10656       goto associate;
10657 
10658     case MULT_EXPR:
10659       /* (-A) * (-B) -> A * B  */
10660       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10661 	return fold_build2_loc (loc, MULT_EXPR, type,
10662 			    fold_convert_loc (loc, type,
10663 					      TREE_OPERAND (arg0, 0)),
10664 			    fold_convert_loc (loc, type,
10665 					      negate_expr (arg1)));
10666       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10667 	return fold_build2_loc (loc, MULT_EXPR, type,
10668 			    fold_convert_loc (loc, type,
10669 					      negate_expr (arg0)),
10670 			    fold_convert_loc (loc, type,
10671 					      TREE_OPERAND (arg1, 0)));
10672 
10673       if (! FLOAT_TYPE_P (type))
10674 	{
10675 	  if (integer_zerop (arg1))
10676 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10677 	  if (integer_onep (arg1))
10678 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10679 	  /* Transform x * -1 into -x.  Make sure to do the negation
10680 	     on the original operand with conversions not stripped
10681 	     because we can only strip non-sign-changing conversions.  */
10682 	  if (integer_all_onesp (arg1))
10683 	    return fold_convert_loc (loc, type, negate_expr (op0));
10684 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10685 	  if (TREE_CODE (arg1) == INTEGER_CST
10686 	      && tree_int_cst_sgn (arg1) == -1
10687 	      && negate_expr_p (arg0)
10688 	      && (tem = negate_expr (arg1)) != arg1
10689 	      && !TREE_OVERFLOW (tem))
10690 	    return fold_build2_loc (loc, MULT_EXPR, type,
10691 	    			fold_convert_loc (loc, type,
10692 						  negate_expr (arg0)),
10693 				tem);
10694 
10695 	  /* (a * (1 << b)) is (a << b)  */
10696 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
10697 	      && integer_onep (TREE_OPERAND (arg1, 0)))
10698 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10699 				TREE_OPERAND (arg1, 1));
10700 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
10701 	      && integer_onep (TREE_OPERAND (arg0, 0)))
10702 	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10703 				TREE_OPERAND (arg0, 1));
10704 
10705 	  /* (A + A) * C -> A * 2 * C  */
10706 	  if (TREE_CODE (arg0) == PLUS_EXPR
10707 	      && TREE_CODE (arg1) == INTEGER_CST
10708 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10709 			          TREE_OPERAND (arg0, 1), 0))
10710 	    return fold_build2_loc (loc, MULT_EXPR, type,
10711 				omit_one_operand_loc (loc, type,
10712 						  TREE_OPERAND (arg0, 0),
10713 						  TREE_OPERAND (arg0, 1)),
10714 				fold_build2_loc (loc, MULT_EXPR, type,
10715 					     build_int_cst (type, 2) , arg1));
10716 
10717 	  strict_overflow_p = false;
10718 	  if (TREE_CODE (arg1) == INTEGER_CST
10719 	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10720 					     &strict_overflow_p)))
10721 	    {
10722 	      if (strict_overflow_p)
10723 		fold_overflow_warning (("assuming signed overflow does not "
10724 					"occur when simplifying "
10725 					"multiplication"),
10726 				       WARN_STRICT_OVERFLOW_MISC);
10727 	      return fold_convert_loc (loc, type, tem);
10728 	    }
10729 
10730 	  /* Optimize z * conj(z) for integer complex numbers.  */
10731 	  if (TREE_CODE (arg0) == CONJ_EXPR
10732 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10733 	    return fold_mult_zconjz (loc, type, arg1);
10734 	  if (TREE_CODE (arg1) == CONJ_EXPR
10735 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10736 	    return fold_mult_zconjz (loc, type, arg0);
10737 	}
10738       else
10739 	{
10740 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
10741 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
10742 	     same in modes with signed zeros, since multiplying a
10743 	     negative value by 0 gives -0, not +0.  */
10744 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10745 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10746 	      && real_zerop (arg1))
10747 	    return omit_one_operand_loc (loc, type, arg1, arg0);
10748 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
10749 	     Likewise for complex arithmetic with signed zeros.  */
10750 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10751 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10752 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10753 	      && real_onep (arg1))
10754 	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10755 
10756 	  /* Transform x * -1.0 into -x.  */
10757 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10758 	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10759 		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10760 	      && real_minus_onep (arg1))
10761 	    return fold_convert_loc (loc, type, negate_expr (arg0));
10762 
10763 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
10764              the result for floating point types due to rounding so it is applied
10765              only if -fassociative-math was specify.  */
10766 	  if (flag_associative_math
10767 	      && TREE_CODE (arg0) == RDIV_EXPR
10768 	      && TREE_CODE (arg1) == REAL_CST
10769 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10770 	    {
10771 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10772 				      arg1);
10773 	      if (tem)
10774 		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10775 				    TREE_OPERAND (arg0, 1));
10776 	    }
10777 
10778           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
10779 	  if (operand_equal_p (arg0, arg1, 0))
10780 	    {
10781 	      tree tem = fold_strip_sign_ops (arg0);
10782 	      if (tem != NULL_TREE)
10783 		{
10784 		  tem = fold_convert_loc (loc, type, tem);
10785 		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10786 		}
10787 	    }
10788 
10789 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10790 	     This is not the same for NaNs or if signed zeros are
10791 	     involved.  */
10792 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10793               && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10794 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10795 	      && TREE_CODE (arg1) == COMPLEX_CST
10796 	      && real_zerop (TREE_REALPART (arg1)))
10797 	    {
10798 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10799 	      if (real_onep (TREE_IMAGPART (arg1)))
10800 		return
10801 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10802 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10803 							     rtype, arg0)),
10804 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10805 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10806 		return
10807 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10808 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10809 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10810 							     rtype, arg0)));
10811 	    }
10812 
10813 	  /* Optimize z * conj(z) for floating point complex numbers.
10814 	     Guarded by flag_unsafe_math_optimizations as non-finite
10815 	     imaginary components don't produce scalar results.  */
10816 	  if (flag_unsafe_math_optimizations
10817 	      && TREE_CODE (arg0) == CONJ_EXPR
10818 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10819 	    return fold_mult_zconjz (loc, type, arg1);
10820 	  if (flag_unsafe_math_optimizations
10821 	      && TREE_CODE (arg1) == CONJ_EXPR
10822 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10823 	    return fold_mult_zconjz (loc, type, arg0);
10824 
10825 	  if (flag_unsafe_math_optimizations)
10826 	    {
10827 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10828 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10829 
10830 	      /* Optimizations of root(...)*root(...).  */
10831 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10832 		{
10833 		  tree rootfn, arg;
10834 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10835 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10836 
10837 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
10838 		  if (BUILTIN_SQRT_P (fcode0)
10839 		      && operand_equal_p (arg00, arg10, 0)
10840 		      && ! HONOR_SNANS (TYPE_MODE (type)))
10841 		    return arg00;
10842 
10843 	          /* Optimize root(x)*root(y) as root(x*y).  */
10844 		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10845 		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10846 		  return build_call_expr_loc (loc, rootfn, 1, arg);
10847 		}
10848 
10849 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
10850 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10851 		{
10852 		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10853 		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10854 					  CALL_EXPR_ARG (arg0, 0),
10855 					  CALL_EXPR_ARG (arg1, 0));
10856 		  return build_call_expr_loc (loc, expfn, 1, arg);
10857 		}
10858 
10859 	      /* Optimizations of pow(...)*pow(...).  */
10860 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10861 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10862 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10863 		{
10864 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10865 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10866 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10867 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10868 
10869 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
10870 		  if (operand_equal_p (arg01, arg11, 0))
10871 		    {
10872 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10873 		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10874 					      arg00, arg10);
10875 		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10876 		    }
10877 
10878 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
10879 		  if (operand_equal_p (arg00, arg10, 0))
10880 		    {
10881 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10882 		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10883 					      arg01, arg11);
10884 		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10885 		    }
10886 		}
10887 
10888 	      /* Optimize tan(x)*cos(x) as sin(x).  */
10889 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10890 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10891 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10892 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10893 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10894 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10895 		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10896 				      CALL_EXPR_ARG (arg1, 0), 0))
10897 		{
10898 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10899 
10900 		  if (sinfn != NULL_TREE)
10901 		    return build_call_expr_loc (loc, sinfn, 1,
10902 					    CALL_EXPR_ARG (arg0, 0));
10903 		}
10904 
10905 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
10906 	      if (fcode1 == BUILT_IN_POW
10907 		  || fcode1 == BUILT_IN_POWF
10908 		  || fcode1 == BUILT_IN_POWL)
10909 		{
10910 		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
10911 		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
10912 		  if (TREE_CODE (arg11) == REAL_CST
10913 		      && !TREE_OVERFLOW (arg11)
10914 		      && operand_equal_p (arg0, arg10, 0))
10915 		    {
10916 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10917 		      REAL_VALUE_TYPE c;
10918 		      tree arg;
10919 
10920 		      c = TREE_REAL_CST (arg11);
10921 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10922 		      arg = build_real (type, c);
10923 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10924 		    }
10925 		}
10926 
10927 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
10928 	      if (fcode0 == BUILT_IN_POW
10929 		  || fcode0 == BUILT_IN_POWF
10930 		  || fcode0 == BUILT_IN_POWL)
10931 		{
10932 		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
10933 		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
10934 		  if (TREE_CODE (arg01) == REAL_CST
10935 		      && !TREE_OVERFLOW (arg01)
10936 		      && operand_equal_p (arg1, arg00, 0))
10937 		    {
10938 		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10939 		      REAL_VALUE_TYPE c;
10940 		      tree arg;
10941 
10942 		      c = TREE_REAL_CST (arg01);
10943 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10944 		      arg = build_real (type, c);
10945 		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10946 		    }
10947 		}
10948 
10949 	      /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x.  */
10950 	      if (!in_gimple_form
10951 		  && optimize
10952 		  && operand_equal_p (arg0, arg1, 0))
10953 		{
10954 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10955 
10956 		  if (powfn)
10957 		    {
10958 		      tree arg = build_real (type, dconst2);
10959 		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10960 		    }
10961 		}
10962 	    }
10963 	}
10964       goto associate;
10965 
10966     case BIT_IOR_EXPR:
10967     bit_ior:
10968       if (integer_all_onesp (arg1))
10969 	return omit_one_operand_loc (loc, type, arg1, arg0);
10970       if (integer_zerop (arg1))
10971 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10972       if (operand_equal_p (arg0, arg1, 0))
10973 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10974 
10975       /* ~X | X is -1.  */
10976       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10977 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10978 	{
10979 	  t1 = build_zero_cst (type);
10980 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10981 	  return omit_one_operand_loc (loc, type, t1, arg1);
10982 	}
10983 
10984       /* X | ~X is -1.  */
10985       if (TREE_CODE (arg1) == BIT_NOT_EXPR
10986 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10987 	{
10988 	  t1 = build_zero_cst (type);
10989 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10990 	  return omit_one_operand_loc (loc, type, t1, arg0);
10991 	}
10992 
10993       /* Canonicalize (X & C1) | C2.  */
10994       if (TREE_CODE (arg0) == BIT_AND_EXPR
10995 	  && TREE_CODE (arg1) == INTEGER_CST
10996 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10997 	{
10998 	  double_int c1, c2, c3, msk;
10999 	  int width = TYPE_PRECISION (type), w;
11000 	  c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11001 	  c2 = tree_to_double_int (arg1);
11002 
11003 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11004 	  if (double_int_equal_p (double_int_and (c1, c2), c1))
11005 	    return omit_one_operand_loc (loc, type, arg1,
11006 					 TREE_OPERAND (arg0, 0));
11007 
11008 	  msk = double_int_mask (width);
11009 
11010 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11011 	  if (double_int_zero_p (double_int_and_not (msk,
11012 						     double_int_ior (c1, c2))))
11013 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11014 				    TREE_OPERAND (arg0, 0), arg1);
11015 
11016 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11017 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11018 	     mode which allows further optimizations.  */
11019 	  c1 = double_int_and (c1, msk);
11020 	  c2 = double_int_and (c2, msk);
11021 	  c3 = double_int_and_not (c1, c2);
11022 	  for (w = BITS_PER_UNIT;
11023 	       w <= width && w <= HOST_BITS_PER_WIDE_INT;
11024 	       w <<= 1)
11025 	    {
11026 	      unsigned HOST_WIDE_INT mask
11027 		= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11028 	      if (((c1.low | c2.low) & mask) == mask
11029 		  && (c1.low & ~mask) == 0 && c1.high == 0)
11030 		{
11031 		  c3 = uhwi_to_double_int (mask);
11032 		  break;
11033 		}
11034 	    }
11035 	  if (!double_int_equal_p (c3, c1))
11036 	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11037 				    fold_build2_loc (loc, BIT_AND_EXPR, type,
11038 						     TREE_OPERAND (arg0, 0),
11039 						     double_int_to_tree (type,
11040 									 c3)),
11041 				    arg1);
11042 	}
11043 
11044       /* (X & Y) | Y is (X, Y).  */
11045       if (TREE_CODE (arg0) == BIT_AND_EXPR
11046 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11047 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11048       /* (X & Y) | X is (Y, X).  */
11049       if (TREE_CODE (arg0) == BIT_AND_EXPR
11050 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11051 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11052 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11053       /* X | (X & Y) is (Y, X).  */
11054       if (TREE_CODE (arg1) == BIT_AND_EXPR
11055 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11056 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11057 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11058       /* X | (Y & X) is (Y, X).  */
11059       if (TREE_CODE (arg1) == BIT_AND_EXPR
11060 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11061 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11062 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11063 
11064       /* (X & ~Y) | (~X & Y) is X ^ Y */
11065       if (TREE_CODE (arg0) == BIT_AND_EXPR
11066 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11067         {
11068 	  tree a0, a1, l0, l1, n0, n1;
11069 
11070 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11071 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11072 
11073 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11074 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11075 
11076 	  n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11077 	  n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11078 
11079 	  if ((operand_equal_p (n0, a0, 0)
11080 	       && operand_equal_p (n1, a1, 0))
11081 	      || (operand_equal_p (n0, a1, 0)
11082 		  && operand_equal_p (n1, a0, 0)))
11083 	    return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11084 	}
11085 
11086       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11087       if (t1 != NULL_TREE)
11088 	return t1;
11089 
11090       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11091 
11092 	 This results in more efficient code for machines without a NAND
11093 	 instruction.  Combine will canonicalize to the first form
11094 	 which will allow use of NAND instructions provided by the
11095 	 backend if they exist.  */
11096       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11097 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11098 	{
11099 	  return
11100 	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11101 			 build2 (BIT_AND_EXPR, type,
11102 				 fold_convert_loc (loc, type,
11103 						   TREE_OPERAND (arg0, 0)),
11104 				 fold_convert_loc (loc, type,
11105 						   TREE_OPERAND (arg1, 0))));
11106 	}
11107 
11108       /* See if this can be simplified into a rotate first.  If that
11109 	 is unsuccessful continue in the association code.  */
11110       goto bit_rotate;
11111 
11112     case BIT_XOR_EXPR:
11113       if (integer_zerop (arg1))
11114 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11115       if (integer_all_onesp (arg1))
11116 	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11117       if (operand_equal_p (arg0, arg1, 0))
11118 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11119 
11120       /* ~X ^ X is -1.  */
11121       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11122 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 	{
11124 	  t1 = build_zero_cst (type);
11125 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11126 	  return omit_one_operand_loc (loc, type, t1, arg1);
11127 	}
11128 
11129       /* X ^ ~X is -1.  */
11130       if (TREE_CODE (arg1) == BIT_NOT_EXPR
11131 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11132 	{
11133 	  t1 = build_zero_cst (type);
11134 	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11135 	  return omit_one_operand_loc (loc, type, t1, arg0);
11136 	}
11137 
11138       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11139          with a constant, and the two constants have no bits in common,
11140 	 we should treat this as a BIT_IOR_EXPR since this may produce more
11141 	 simplifications.  */
11142       if (TREE_CODE (arg0) == BIT_AND_EXPR
11143 	  && TREE_CODE (arg1) == BIT_AND_EXPR
11144 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11145 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11146 	  && integer_zerop (const_binop (BIT_AND_EXPR,
11147 					 TREE_OPERAND (arg0, 1),
11148 					 TREE_OPERAND (arg1, 1))))
11149 	{
11150 	  code = BIT_IOR_EXPR;
11151 	  goto bit_ior;
11152 	}
11153 
11154       /* (X | Y) ^ X -> Y & ~ X*/
11155       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11156           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11157         {
11158 	  tree t2 = TREE_OPERAND (arg0, 1);
11159 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11160 			    arg1);
11161 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11162 			    fold_convert_loc (loc, type, t2),
11163 			    fold_convert_loc (loc, type, t1));
11164 	  return t1;
11165 	}
11166 
11167       /* (Y | X) ^ X -> Y & ~ X*/
11168       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11169           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11170         {
11171 	  tree t2 = TREE_OPERAND (arg0, 0);
11172 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11173 			    arg1);
11174 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11175 			    fold_convert_loc (loc, type, t2),
11176 			    fold_convert_loc (loc, type, t1));
11177 	  return t1;
11178 	}
11179 
11180       /* X ^ (X | Y) -> Y & ~ X*/
11181       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11182           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11183         {
11184 	  tree t2 = TREE_OPERAND (arg1, 1);
11185 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11186 			    arg0);
11187 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11188 			    fold_convert_loc (loc, type, t2),
11189 			    fold_convert_loc (loc, type, t1));
11190 	  return t1;
11191 	}
11192 
11193       /* X ^ (Y | X) -> Y & ~ X*/
11194       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11195           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11196         {
11197 	  tree t2 = TREE_OPERAND (arg1, 0);
11198 	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11199 			    arg0);
11200 	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11201 			    fold_convert_loc (loc, type, t2),
11202 			    fold_convert_loc (loc, type, t1));
11203 	  return t1;
11204 	}
11205 
11206       /* Convert ~X ^ ~Y to X ^ Y.  */
11207       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11208 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11209 	return fold_build2_loc (loc, code, type,
11210 			    fold_convert_loc (loc, type,
11211 					      TREE_OPERAND (arg0, 0)),
11212 			    fold_convert_loc (loc, type,
11213 					      TREE_OPERAND (arg1, 0)));
11214 
11215       /* Convert ~X ^ C to X ^ ~C.  */
11216       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11217 	  && TREE_CODE (arg1) == INTEGER_CST)
11218 	return fold_build2_loc (loc, code, type,
11219 			    fold_convert_loc (loc, type,
11220 					      TREE_OPERAND (arg0, 0)),
11221 			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11222 
11223       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11224       if (TREE_CODE (arg0) == BIT_AND_EXPR
11225 	  && integer_onep (TREE_OPERAND (arg0, 1))
11226 	  && integer_onep (arg1))
11227 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11228 			    build_int_cst (TREE_TYPE (arg0), 0));
11229 
11230       /* Fold (X & Y) ^ Y as ~X & Y.  */
11231       if (TREE_CODE (arg0) == BIT_AND_EXPR
11232 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11233 	{
11234 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11235 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11236 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11237 			      fold_convert_loc (loc, type, arg1));
11238 	}
11239       /* Fold (X & Y) ^ X as ~Y & X.  */
11240       if (TREE_CODE (arg0) == BIT_AND_EXPR
11241 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11242 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11243 	{
11244 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11245 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11246 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11247 			      fold_convert_loc (loc, type, arg1));
11248 	}
11249       /* Fold X ^ (X & Y) as X & ~Y.  */
11250       if (TREE_CODE (arg1) == BIT_AND_EXPR
11251 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11252 	{
11253 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11254 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11255 			      fold_convert_loc (loc, type, arg0),
11256 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11257 	}
11258       /* Fold X ^ (Y & X) as ~Y & X.  */
11259       if (TREE_CODE (arg1) == BIT_AND_EXPR
11260 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11261 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11262 	{
11263 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11264 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11265 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11266 			      fold_convert_loc (loc, type, arg0));
11267 	}
11268 
11269       /* See if this can be simplified into a rotate first.  If that
11270 	 is unsuccessful continue in the association code.  */
11271       goto bit_rotate;
11272 
11273     case BIT_AND_EXPR:
11274       if (integer_all_onesp (arg1))
11275 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11276       if (integer_zerop (arg1))
11277 	return omit_one_operand_loc (loc, type, arg1, arg0);
11278       if (operand_equal_p (arg0, arg1, 0))
11279 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11280 
11281       /* ~X & X, (X == 0) & X, and !X & X are always zero.  */
11282       if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11283 	   || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11284 	   || (TREE_CODE (arg0) == EQ_EXPR
11285 	       && integer_zerop (TREE_OPERAND (arg0, 1))))
11286 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11287 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11288 
11289       /* X & ~X , X & (X == 0), and X & !X are always zero.  */
11290       if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11291 	   || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11292 	   || (TREE_CODE (arg1) == EQ_EXPR
11293 	       && integer_zerop (TREE_OPERAND (arg1, 1))))
11294 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11295 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11296 
11297       /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11298       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11299 	  && TREE_CODE (arg1) == INTEGER_CST
11300 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11301 	{
11302 	  tree tmp1 = fold_convert_loc (loc, type, arg1);
11303 	  tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11304 	  tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11305 	  tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11306 	  tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11307 	  return
11308 	    fold_convert_loc (loc, type,
11309 			      fold_build2_loc (loc, BIT_IOR_EXPR,
11310 					   type, tmp2, tmp3));
11311 	}
11312 
11313       /* (X | Y) & Y is (X, Y).  */
11314       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11315 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11316 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11317       /* (X | Y) & X is (Y, X).  */
11318       if (TREE_CODE (arg0) == BIT_IOR_EXPR
11319 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11320 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11321 	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11322       /* X & (X | Y) is (Y, X).  */
11323       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11324 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11325 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11326 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11327       /* X & (Y | X) is (Y, X).  */
11328       if (TREE_CODE (arg1) == BIT_IOR_EXPR
11329 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11330 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11331 	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11332 
11333       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11334       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11335 	  && integer_onep (TREE_OPERAND (arg0, 1))
11336 	  && integer_onep (arg1))
11337 	{
11338 	  tem = TREE_OPERAND (arg0, 0);
11339 	  return fold_build2_loc (loc, EQ_EXPR, type,
11340 			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11341 					   build_int_cst (TREE_TYPE (tem), 1)),
11342 			      build_int_cst (TREE_TYPE (tem), 0));
11343 	}
11344       /* Fold ~X & 1 as (X & 1) == 0.  */
11345       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11346 	  && integer_onep (arg1))
11347 	{
11348 	  tem = TREE_OPERAND (arg0, 0);
11349 	  return fold_build2_loc (loc, EQ_EXPR, type,
11350 			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11351 					   build_int_cst (TREE_TYPE (tem), 1)),
11352 			      build_int_cst (TREE_TYPE (tem), 0));
11353 	}
11354       /* Fold !X & 1 as X == 0.  */
11355       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11356 	  && integer_onep (arg1))
11357 	{
11358 	  tem = TREE_OPERAND (arg0, 0);
11359 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11360 				  build_int_cst (TREE_TYPE (tem), 0));
11361 	}
11362 
11363       /* Fold (X ^ Y) & Y as ~X & Y.  */
11364       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11365 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11366 	{
11367 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11368 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11369 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11370 			      fold_convert_loc (loc, type, arg1));
11371 	}
11372       /* Fold (X ^ Y) & X as ~Y & X.  */
11373       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11374 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11375 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11376 	{
11377 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11378 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11379 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11380 			      fold_convert_loc (loc, type, arg1));
11381 	}
11382       /* Fold X & (X ^ Y) as X & ~Y.  */
11383       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11384 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11385 	{
11386 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11387 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11388 			      fold_convert_loc (loc, type, arg0),
11389 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11390 	}
11391       /* Fold X & (Y ^ X) as ~Y & X.  */
11392       if (TREE_CODE (arg1) == BIT_XOR_EXPR
11393 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11394 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11395 	{
11396 	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11397 	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11398 			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11399 			      fold_convert_loc (loc, type, arg0));
11400 	}
11401 
11402       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11403 	 ((A & N) + B) & M -> (A + B) & M
11404 	 Similarly if (N & M) == 0,
11405 	 ((A | N) + B) & M -> (A + B) & M
11406 	 and for - instead of + (or unary - instead of +)
11407 	 and/or ^ instead of |.
11408 	 If B is constant and (B & M) == 0, fold into A & M.  */
11409       if (host_integerp (arg1, 1))
11410 	{
11411 	  unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11412 	  if (~cst1 && (cst1 & (cst1 + 1)) == 0
11413 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11414 	      && (TREE_CODE (arg0) == PLUS_EXPR
11415 		  || TREE_CODE (arg0) == MINUS_EXPR
11416 		  || TREE_CODE (arg0) == NEGATE_EXPR)
11417 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11418 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11419 	    {
11420 	      tree pmop[2];
11421 	      int which = 0;
11422 	      unsigned HOST_WIDE_INT cst0;
11423 
11424 	      /* Now we know that arg0 is (C + D) or (C - D) or
11425 		 -C and arg1 (M) is == (1LL << cst) - 1.
11426 		 Store C into PMOP[0] and D into PMOP[1].  */
11427 	      pmop[0] = TREE_OPERAND (arg0, 0);
11428 	      pmop[1] = NULL;
11429 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
11430 		{
11431 		  pmop[1] = TREE_OPERAND (arg0, 1);
11432 		  which = 1;
11433 		}
11434 
11435 	      if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11436 		  || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11437 		      & cst1) != cst1)
11438 		which = -1;
11439 
11440 	      for (; which >= 0; which--)
11441 		switch (TREE_CODE (pmop[which]))
11442 		  {
11443 		  case BIT_AND_EXPR:
11444 		  case BIT_IOR_EXPR:
11445 		  case BIT_XOR_EXPR:
11446 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11447 			!= INTEGER_CST)
11448 		      break;
11449 		    /* tree_low_cst not used, because we don't care about
11450 		       the upper bits.  */
11451 		    cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11452 		    cst0 &= cst1;
11453 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11454 		      {
11455 			if (cst0 != cst1)
11456 			  break;
11457 		      }
11458 		    else if (cst0 != 0)
11459 		      break;
11460 		    /* If C or D is of the form (A & N) where
11461 		       (N & M) == M, or of the form (A | N) or
11462 		       (A ^ N) where (N & M) == 0, replace it with A.  */
11463 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
11464 		    break;
11465 		  case INTEGER_CST:
11466 		    /* If C or D is a N where (N & M) == 0, it can be
11467 		       omitted (assumed 0).  */
11468 		    if ((TREE_CODE (arg0) == PLUS_EXPR
11469 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11470 			&& (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11471 		      pmop[which] = NULL;
11472 		    break;
11473 		  default:
11474 		    break;
11475 		  }
11476 
11477 	      /* Only build anything new if we optimized one or both arguments
11478 		 above.  */
11479 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
11480 		  || (TREE_CODE (arg0) != NEGATE_EXPR
11481 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
11482 		{
11483 		  tree utype = TREE_TYPE (arg0);
11484 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11485 		    {
11486 		      /* Perform the operations in a type that has defined
11487 			 overflow behavior.  */
11488 		      utype = unsigned_type_for (TREE_TYPE (arg0));
11489 		      if (pmop[0] != NULL)
11490 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11491 		      if (pmop[1] != NULL)
11492 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11493 		    }
11494 
11495 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
11496 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11497 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
11498 		    {
11499 		      if (pmop[0] != NULL && pmop[1] != NULL)
11500 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11501 					       pmop[0], pmop[1]);
11502 		      else if (pmop[0] != NULL)
11503 			tem = pmop[0];
11504 		      else if (pmop[1] != NULL)
11505 			tem = pmop[1];
11506 		      else
11507 			return build_int_cst (type, 0);
11508 		    }
11509 		  else if (pmop[0] == NULL)
11510 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11511 		  else
11512 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11513 					   pmop[0], pmop[1]);
11514 		  /* TEM is now the new binary +, - or unary - replacement.  */
11515 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11516 					 fold_convert_loc (loc, utype, arg1));
11517 		  return fold_convert_loc (loc, type, tem);
11518 		}
11519 	    }
11520 	}
11521 
11522       t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11523       if (t1 != NULL_TREE)
11524 	return t1;
11525       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11526       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11527 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11528 	{
11529 	  unsigned int prec
11530 	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11531 
11532 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11533 	      && (~TREE_INT_CST_LOW (arg1)
11534 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11535 	    return
11536 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11537 	}
11538 
11539       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11540 
11541 	 This results in more efficient code for machines without a NOR
11542 	 instruction.  Combine will canonicalize to the first form
11543 	 which will allow use of NOR instructions provided by the
11544 	 backend if they exist.  */
11545       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11546 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11547 	{
11548 	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11549 			      build2 (BIT_IOR_EXPR, type,
11550 				      fold_convert_loc (loc, type,
11551 							TREE_OPERAND (arg0, 0)),
11552 				      fold_convert_loc (loc, type,
11553 							TREE_OPERAND (arg1, 0))));
11554 	}
11555 
11556       /* If arg0 is derived from the address of an object or function, we may
11557 	 be able to fold this expression using the object or function's
11558 	 alignment.  */
11559       if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11560 	{
11561 	  unsigned HOST_WIDE_INT modulus, residue;
11562 	  unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11563 
11564 	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
11565 						     integer_onep (arg1));
11566 
11567 	  /* This works because modulus is a power of 2.  If this weren't the
11568 	     case, we'd have to replace it by its greatest power-of-2
11569 	     divisor: modulus & -modulus.  */
11570 	  if (low < modulus)
11571 	    return build_int_cst (type, residue & low);
11572 	}
11573 
11574       /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11575 	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11576 	 if the new mask might be further optimized.  */
11577       if ((TREE_CODE (arg0) == LSHIFT_EXPR
11578 	   || TREE_CODE (arg0) == RSHIFT_EXPR)
11579 	  && host_integerp (TREE_OPERAND (arg0, 1), 1)
11580 	  && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11581 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11582 	     < TYPE_PRECISION (TREE_TYPE (arg0))
11583 	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11584 	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11585 	{
11586 	  unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11587 	  unsigned HOST_WIDE_INT mask
11588 	    = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11589 	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
11590 	  tree shift_type = TREE_TYPE (arg0);
11591 
11592 	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
11593 	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11594 	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
11595 		   && TYPE_PRECISION (TREE_TYPE (arg0))
11596 		      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11597 	    {
11598 	      unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11599 	      tree arg00 = TREE_OPERAND (arg0, 0);
11600 	      /* See if more bits can be proven as zero because of
11601 		 zero extension.  */
11602 	      if (TREE_CODE (arg00) == NOP_EXPR
11603 		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11604 		{
11605 		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11606 		  if (TYPE_PRECISION (inner_type)
11607 		      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11608 		      && TYPE_PRECISION (inner_type) < prec)
11609 		    {
11610 		      prec = TYPE_PRECISION (inner_type);
11611 		      /* See if we can shorten the right shift.  */
11612 		      if (shiftc < prec)
11613 			shift_type = inner_type;
11614 		    }
11615 		}
11616 	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
11617 	      zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11618 	      zerobits <<= prec - shiftc;
11619 	      /* For arithmetic shift if sign bit could be set, zerobits
11620 		 can contain actually sign bits, so no transformation is
11621 		 possible, unless MASK masks them all away.  In that
11622 		 case the shift needs to be converted into logical shift.  */
11623 	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11624 		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11625 		{
11626 		  if ((mask & zerobits) == 0)
11627 		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11628 		  else
11629 		    zerobits = 0;
11630 		}
11631 	    }
11632 
11633 	  /* ((X << 16) & 0xff00) is (X, 0).  */
11634 	  if ((mask & zerobits) == mask)
11635 	    return omit_one_operand_loc (loc, type,
11636 				     build_int_cst (type, 0), arg0);
11637 
11638 	  newmask = mask | zerobits;
11639 	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
11640 	    {
11641 	      unsigned int prec;
11642 
11643 	      /* Only do the transformation if NEWMASK is some integer
11644 		 mode's mask.  */
11645 	      for (prec = BITS_PER_UNIT;
11646 		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11647 		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11648 		  break;
11649 	      if (prec < HOST_BITS_PER_WIDE_INT
11650 		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11651 		{
11652 		  tree newmaskt;
11653 
11654 		  if (shift_type != TREE_TYPE (arg0))
11655 		    {
11656 		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11657 					 fold_convert_loc (loc, shift_type,
11658 							   TREE_OPERAND (arg0, 0)),
11659 					 TREE_OPERAND (arg0, 1));
11660 		      tem = fold_convert_loc (loc, type, tem);
11661 		    }
11662 		  else
11663 		    tem = op0;
11664 		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11665 		  if (!tree_int_cst_equal (newmaskt, arg1))
11666 		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11667 		}
11668 	    }
11669 	}
11670 
11671       goto associate;
11672 
11673     case RDIV_EXPR:
11674       /* Don't touch a floating-point divide by zero unless the mode
11675 	 of the constant can represent infinity.  */
11676       if (TREE_CODE (arg1) == REAL_CST
11677 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11678 	  && real_zerop (arg1))
11679 	return NULL_TREE;
11680 
11681       /* Optimize A / A to 1.0 if we don't care about
11682 	 NaNs or Infinities.  Skip the transformation
11683 	 for non-real operands.  */
11684       if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11685 	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11686 	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11687 	  && operand_equal_p (arg0, arg1, 0))
11688 	{
11689 	  tree r = build_real (TREE_TYPE (arg0), dconst1);
11690 
11691 	  return omit_two_operands_loc (loc, type, r, arg0, arg1);
11692 	}
11693 
11694       /* The complex version of the above A / A optimization.  */
11695       if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11696 	  && operand_equal_p (arg0, arg1, 0))
11697 	{
11698 	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11699 	  if (! HONOR_NANS (TYPE_MODE (elem_type))
11700 	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11701 	    {
11702 	      tree r = build_real (elem_type, dconst1);
11703 	      /* omit_two_operands will call fold_convert for us.  */
11704 	      return omit_two_operands_loc (loc, type, r, arg0, arg1);
11705 	    }
11706 	}
11707 
11708       /* (-A) / (-B) -> A / B  */
11709       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11710 	return fold_build2_loc (loc, RDIV_EXPR, type,
11711 			    TREE_OPERAND (arg0, 0),
11712 			    negate_expr (arg1));
11713       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11714 	return fold_build2_loc (loc, RDIV_EXPR, type,
11715 			    negate_expr (arg0),
11716 			    TREE_OPERAND (arg1, 0));
11717 
11718       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11719       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11720 	  && real_onep (arg1))
11721 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11722 
11723       /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
11724       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11725 	  && real_minus_onep (arg1))
11726 	return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11727 						  negate_expr (arg0)));
11728 
11729       /* If ARG1 is a constant, we can convert this to a multiply by the
11730 	 reciprocal.  This does not have the same rounding properties,
11731 	 so only do this if -freciprocal-math.  We can actually
11732 	 always safely do it if ARG1 is a power of two, but it's hard to
11733 	 tell if it is or not in a portable manner.  */
11734       if (TREE_CODE (arg1) == REAL_CST)
11735 	{
11736 	  if (flag_reciprocal_math
11737 	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
11738 					  arg1)))
11739 	    return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11740 	  /* Find the reciprocal if optimizing and the result is exact.  */
11741 	  if (optimize)
11742 	    {
11743 	      REAL_VALUE_TYPE r;
11744 	      r = TREE_REAL_CST (arg1);
11745 	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11746 		{
11747 		  tem = build_real (type, r);
11748 		  return fold_build2_loc (loc, MULT_EXPR, type,
11749 				      fold_convert_loc (loc, type, arg0), tem);
11750 		}
11751 	    }
11752 	}
11753       /* Convert A/B/C to A/(B*C).  */
11754       if (flag_reciprocal_math
11755 	  && TREE_CODE (arg0) == RDIV_EXPR)
11756 	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11757 			    fold_build2_loc (loc, MULT_EXPR, type,
11758 					 TREE_OPERAND (arg0, 1), arg1));
11759 
11760       /* Convert A/(B/C) to (A/B)*C.  */
11761       if (flag_reciprocal_math
11762 	  && TREE_CODE (arg1) == RDIV_EXPR)
11763 	return fold_build2_loc (loc, MULT_EXPR, type,
11764 			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11765 					 TREE_OPERAND (arg1, 0)),
11766 			    TREE_OPERAND (arg1, 1));
11767 
11768       /* Convert C1/(X*C2) into (C1/C2)/X.  */
11769       if (flag_reciprocal_math
11770 	  && TREE_CODE (arg1) == MULT_EXPR
11771 	  && TREE_CODE (arg0) == REAL_CST
11772 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11773 	{
11774 	  tree tem = const_binop (RDIV_EXPR, arg0,
11775 				  TREE_OPERAND (arg1, 1));
11776 	  if (tem)
11777 	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11778 				TREE_OPERAND (arg1, 0));
11779 	}
11780 
11781       if (flag_unsafe_math_optimizations)
11782 	{
11783 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11784 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11785 
11786 	  /* Optimize sin(x)/cos(x) as tan(x).  */
11787 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11788 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11789 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11790 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11791 				  CALL_EXPR_ARG (arg1, 0), 0))
11792 	    {
11793 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11794 
11795 	      if (tanfn != NULL_TREE)
11796 		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11797 	    }
11798 
11799 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11800 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11801 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11802 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11803 	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11804 				  CALL_EXPR_ARG (arg1, 0), 0))
11805 	    {
11806 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11807 
11808 	      if (tanfn != NULL_TREE)
11809 		{
11810 		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11811 					      CALL_EXPR_ARG (arg0, 0));
11812 		  return fold_build2_loc (loc, RDIV_EXPR, type,
11813 				      build_real (type, dconst1), tmp);
11814 		}
11815 	    }
11816 
11817  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11818 	     NaNs or Infinities.  */
11819  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11820  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11821  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11822 	    {
11823 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11824 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11825 
11826 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11827 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11828 		  && operand_equal_p (arg00, arg01, 0))
11829 		{
11830 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11831 
11832 		  if (cosfn != NULL_TREE)
11833 		    return build_call_expr_loc (loc, cosfn, 1, arg00);
11834 		}
11835 	    }
11836 
11837  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11838 	     NaNs or Infinities.  */
11839  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11840  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11841  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11842 	    {
11843 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11844 	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11845 
11846 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11847 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11848 		  && operand_equal_p (arg00, arg01, 0))
11849 		{
11850 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11851 
11852 		  if (cosfn != NULL_TREE)
11853 		    {
11854 		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11855 		      return fold_build2_loc (loc, RDIV_EXPR, type,
11856 					  build_real (type, dconst1),
11857 					  tmp);
11858 		    }
11859 		}
11860 	    }
11861 
11862 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
11863 	  if (fcode0 == BUILT_IN_POW
11864 	      || fcode0 == BUILT_IN_POWF
11865 	      || fcode0 == BUILT_IN_POWL)
11866 	    {
11867 	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11868 	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
11869 	      if (TREE_CODE (arg01) == REAL_CST
11870 		  && !TREE_OVERFLOW (arg01)
11871 		  && operand_equal_p (arg1, arg00, 0))
11872 		{
11873 		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11874 		  REAL_VALUE_TYPE c;
11875 		  tree arg;
11876 
11877 		  c = TREE_REAL_CST (arg01);
11878 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11879 		  arg = build_real (type, c);
11880 		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11881 		}
11882 	    }
11883 
11884 	  /* Optimize a/root(b/c) into a*root(c/b).  */
11885 	  if (BUILTIN_ROOT_P (fcode1))
11886 	    {
11887 	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
11888 
11889 	      if (TREE_CODE (rootarg) == RDIV_EXPR)
11890 		{
11891 		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11892 		  tree b = TREE_OPERAND (rootarg, 0);
11893 		  tree c = TREE_OPERAND (rootarg, 1);
11894 
11895 		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11896 
11897 		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11898 		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11899 		}
11900 	    }
11901 
11902 	  /* Optimize x/expN(y) into x*expN(-y).  */
11903 	  if (BUILTIN_EXPONENT_P (fcode1))
11904 	    {
11905 	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11906 	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11907 	      arg1 = build_call_expr_loc (loc,
11908 				      expfn, 1,
11909 				      fold_convert_loc (loc, type, arg));
11910 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11911 	    }
11912 
11913 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
11914 	  if (fcode1 == BUILT_IN_POW
11915 	      || fcode1 == BUILT_IN_POWF
11916 	      || fcode1 == BUILT_IN_POWL)
11917 	    {
11918 	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11919 	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
11920 	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
11921 	      tree neg11 = fold_convert_loc (loc, type,
11922 					     negate_expr (arg11));
11923 	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11924 	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11925 	    }
11926 	}
11927       return NULL_TREE;
11928 
11929     case TRUNC_DIV_EXPR:
11930       /* Optimize (X & (-A)) / A where A is a power of 2,
11931 	 to X >> log2(A) */
11932       if (TREE_CODE (arg0) == BIT_AND_EXPR
11933 	  && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11934 	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11935 	{
11936 	  tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11937 				      arg1, TREE_OPERAND (arg0, 1));
11938 	  if (sum && integer_zerop (sum)) {
11939 	    unsigned long pow2;
11940 
11941 	    if (TREE_INT_CST_LOW (arg1))
11942 	      pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
11943 	    else
11944 	      pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
11945 		      + HOST_BITS_PER_WIDE_INT;
11946 
11947 	    return fold_build2_loc (loc, RSHIFT_EXPR, type,
11948 			  TREE_OPERAND (arg0, 0),
11949 			  build_int_cst (integer_type_node, pow2));
11950 	  }
11951 	}
11952 
11953       /* Fall thru */
11954 
11955     case FLOOR_DIV_EXPR:
11956       /* Simplify A / (B << N) where A and B are positive and B is
11957 	 a power of 2, to A >> (N + log2(B)).  */
11958       strict_overflow_p = false;
11959       if (TREE_CODE (arg1) == LSHIFT_EXPR
11960 	  && (TYPE_UNSIGNED (type)
11961 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11962 	{
11963 	  tree sval = TREE_OPERAND (arg1, 0);
11964 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11965 	    {
11966 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11967 	      unsigned long pow2;
11968 
11969 	      if (TREE_INT_CST_LOW (sval))
11970 		pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11971 	      else
11972 		pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
11973 		       + HOST_BITS_PER_WIDE_INT;
11974 
11975 	      if (strict_overflow_p)
11976 		fold_overflow_warning (("assuming signed overflow does not "
11977 					"occur when simplifying A / (B << N)"),
11978 				       WARN_STRICT_OVERFLOW_MISC);
11979 
11980 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11981 					sh_cnt,
11982 					build_int_cst (TREE_TYPE (sh_cnt),
11983 						       pow2));
11984 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11985 				  fold_convert_loc (loc, type, arg0), sh_cnt);
11986 	    }
11987 	}
11988 
11989       /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11990 	 TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
11991       if (INTEGRAL_TYPE_P (type)
11992 	  && TYPE_UNSIGNED (type)
11993 	  && code == FLOOR_DIV_EXPR)
11994 	return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
11995 
11996       /* Fall thru */
11997 
11998     case ROUND_DIV_EXPR:
11999     case CEIL_DIV_EXPR:
12000     case EXACT_DIV_EXPR:
12001       if (integer_onep (arg1))
12002 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12003       if (integer_zerop (arg1))
12004 	return NULL_TREE;
12005       /* X / -1 is -X.  */
12006       if (!TYPE_UNSIGNED (type)
12007 	  && TREE_CODE (arg1) == INTEGER_CST
12008 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12009 	  && TREE_INT_CST_HIGH (arg1) == -1)
12010 	return fold_convert_loc (loc, type, negate_expr (arg0));
12011 
12012       /* Convert -A / -B to A / B when the type is signed and overflow is
12013 	 undefined.  */
12014       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12015 	  && TREE_CODE (arg0) == NEGATE_EXPR
12016 	  && negate_expr_p (arg1))
12017 	{
12018 	  if (INTEGRAL_TYPE_P (type))
12019 	    fold_overflow_warning (("assuming signed overflow does not occur "
12020 				    "when distributing negation across "
12021 				    "division"),
12022 				   WARN_STRICT_OVERFLOW_MISC);
12023 	  return fold_build2_loc (loc, code, type,
12024 			      fold_convert_loc (loc, type,
12025 						TREE_OPERAND (arg0, 0)),
12026 			      fold_convert_loc (loc, type,
12027 						negate_expr (arg1)));
12028 	}
12029       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12030 	  && TREE_CODE (arg1) == NEGATE_EXPR
12031 	  && negate_expr_p (arg0))
12032 	{
12033 	  if (INTEGRAL_TYPE_P (type))
12034 	    fold_overflow_warning (("assuming signed overflow does not occur "
12035 				    "when distributing negation across "
12036 				    "division"),
12037 				   WARN_STRICT_OVERFLOW_MISC);
12038 	  return fold_build2_loc (loc, code, type,
12039 			      fold_convert_loc (loc, type,
12040 						negate_expr (arg0)),
12041 			      fold_convert_loc (loc, type,
12042 						TREE_OPERAND (arg1, 0)));
12043 	}
12044 
12045       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12046 	 operation, EXACT_DIV_EXPR.
12047 
12048 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12049 	 At one time others generated faster code, it's not clear if they do
12050 	 after the last round to changes to the DIV code in expmed.c.  */
12051       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12052 	  && multiple_of_p (type, arg0, arg1))
12053 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12054 
12055       strict_overflow_p = false;
12056       if (TREE_CODE (arg1) == INTEGER_CST
12057 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12058 					 &strict_overflow_p)))
12059 	{
12060 	  if (strict_overflow_p)
12061 	    fold_overflow_warning (("assuming signed overflow does not occur "
12062 				    "when simplifying division"),
12063 				   WARN_STRICT_OVERFLOW_MISC);
12064 	  return fold_convert_loc (loc, type, tem);
12065 	}
12066 
12067       return NULL_TREE;
12068 
12069     case CEIL_MOD_EXPR:
12070     case FLOOR_MOD_EXPR:
12071     case ROUND_MOD_EXPR:
12072     case TRUNC_MOD_EXPR:
12073       /* X % 1 is always zero, but be sure to preserve any side
12074 	 effects in X.  */
12075       if (integer_onep (arg1))
12076 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12077 
12078       /* X % 0, return X % 0 unchanged so that we can get the
12079 	 proper warnings and errors.  */
12080       if (integer_zerop (arg1))
12081 	return NULL_TREE;
12082 
12083       /* 0 % X is always zero, but be sure to preserve any side
12084 	 effects in X.  Place this after checking for X == 0.  */
12085       if (integer_zerop (arg0))
12086 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12087 
12088       /* X % -1 is zero.  */
12089       if (!TYPE_UNSIGNED (type)
12090 	  && TREE_CODE (arg1) == INTEGER_CST
12091 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12092 	  && TREE_INT_CST_HIGH (arg1) == -1)
12093 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12094 
12095       /* X % -C is the same as X % C.  */
12096       if (code == TRUNC_MOD_EXPR
12097 	  && !TYPE_UNSIGNED (type)
12098 	  && TREE_CODE (arg1) == INTEGER_CST
12099 	  && !TREE_OVERFLOW (arg1)
12100 	  && TREE_INT_CST_HIGH (arg1) < 0
12101 	  && !TYPE_OVERFLOW_TRAPS (type)
12102 	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12103 	  && !sign_bit_p (arg1, arg1))
12104 	return fold_build2_loc (loc, code, type,
12105 			    fold_convert_loc (loc, type, arg0),
12106 			    fold_convert_loc (loc, type,
12107 					      negate_expr (arg1)));
12108 
12109       /* X % -Y is the same as X % Y.  */
12110       if (code == TRUNC_MOD_EXPR
12111 	  && !TYPE_UNSIGNED (type)
12112 	  && TREE_CODE (arg1) == NEGATE_EXPR
12113 	  && !TYPE_OVERFLOW_TRAPS (type))
12114 	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12115 			    fold_convert_loc (loc, type,
12116 					      TREE_OPERAND (arg1, 0)));
12117 
12118       strict_overflow_p = false;
12119       if (TREE_CODE (arg1) == INTEGER_CST
12120 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12121 					 &strict_overflow_p)))
12122 	{
12123 	  if (strict_overflow_p)
12124 	    fold_overflow_warning (("assuming signed overflow does not occur "
12125 				    "when simplifying modulus"),
12126 				   WARN_STRICT_OVERFLOW_MISC);
12127 	  return fold_convert_loc (loc, type, tem);
12128 	}
12129 
12130       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12131          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12132       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12133 	  && (TYPE_UNSIGNED (type)
12134 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12135 	{
12136 	  tree c = arg1;
12137 	  /* Also optimize A % (C << N)  where C is a power of 2,
12138 	     to A & ((C << N) - 1).  */
12139 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12140 	    c = TREE_OPERAND (arg1, 0);
12141 
12142 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12143 	    {
12144 	      tree mask
12145 		= fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12146 				   build_int_cst (TREE_TYPE (arg1), 1));
12147 	      if (strict_overflow_p)
12148 		fold_overflow_warning (("assuming signed overflow does not "
12149 					"occur when simplifying "
12150 					"X % (power of two)"),
12151 				       WARN_STRICT_OVERFLOW_MISC);
12152 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12153 				      fold_convert_loc (loc, type, arg0),
12154 				      fold_convert_loc (loc, type, mask));
12155 	    }
12156 	}
12157 
12158       return NULL_TREE;
12159 
12160     case LROTATE_EXPR:
12161     case RROTATE_EXPR:
12162       if (integer_all_onesp (arg0))
12163 	return omit_one_operand_loc (loc, type, arg0, arg1);
12164       goto shift;
12165 
12166     case RSHIFT_EXPR:
12167       /* Optimize -1 >> x for arithmetic right shifts.  */
12168       if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12169 	  && tree_expr_nonnegative_p (arg1))
12170 	return omit_one_operand_loc (loc, type, arg0, arg1);
12171       /* ... fall through ...  */
12172 
12173     case LSHIFT_EXPR:
12174     shift:
12175       if (integer_zerop (arg1))
12176 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12177       if (integer_zerop (arg0))
12178 	return omit_one_operand_loc (loc, type, arg0, arg1);
12179 
12180       /* Since negative shift count is not well-defined,
12181 	 don't try to compute it in the compiler.  */
12182       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12183 	return NULL_TREE;
12184 
12185       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12186       if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12187 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12188 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12189 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12190 	{
12191 	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12192 			       + TREE_INT_CST_LOW (arg1));
12193 
12194 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12195 	     being well defined.  */
12196 	  if (low >= TYPE_PRECISION (type))
12197 	    {
12198 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12199 	        low = low % TYPE_PRECISION (type);
12200 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12201 		return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12202 					 TREE_OPERAND (arg0, 0));
12203 	      else
12204 		low = TYPE_PRECISION (type) - 1;
12205 	    }
12206 
12207 	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12208 			      build_int_cst (type, low));
12209 	}
12210 
12211       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12212          into x & ((unsigned)-1 >> c) for unsigned types.  */
12213       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12214            || (TYPE_UNSIGNED (type)
12215 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12216 	  && host_integerp (arg1, false)
12217 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12218 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12219 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12220 	{
12221 	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12222 	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12223 	  tree lshift;
12224 	  tree arg00;
12225 
12226 	  if (low0 == low1)
12227 	    {
12228 	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12229 
12230 	      lshift = build_int_cst (type, -1);
12231 	      lshift = int_const_binop (code, lshift, arg1);
12232 
12233 	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12234 	    }
12235 	}
12236 
12237       /* Rewrite an LROTATE_EXPR by a constant into an
12238 	 RROTATE_EXPR by a new constant.  */
12239       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12240 	{
12241 	  tree tem = build_int_cst (TREE_TYPE (arg1),
12242 				    TYPE_PRECISION (type));
12243 	  tem = const_binop (MINUS_EXPR, tem, arg1);
12244 	  return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12245 	}
12246 
12247       /* If we have a rotate of a bit operation with the rotate count and
12248 	 the second operand of the bit operation both constant,
12249 	 permute the two operations.  */
12250       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12251 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12252 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12253 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12254 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12255 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
12256 			    fold_build2_loc (loc, code, type,
12257 					 TREE_OPERAND (arg0, 0), arg1),
12258 			    fold_build2_loc (loc, code, type,
12259 					 TREE_OPERAND (arg0, 1), arg1));
12260 
12261       /* Two consecutive rotates adding up to the precision of the
12262 	 type can be ignored.  */
12263       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12264 	  && TREE_CODE (arg0) == RROTATE_EXPR
12265 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12266 	  && TREE_INT_CST_HIGH (arg1) == 0
12267 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12268 	  && ((TREE_INT_CST_LOW (arg1)
12269 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12270 	      == (unsigned int) TYPE_PRECISION (type)))
12271 	return TREE_OPERAND (arg0, 0);
12272 
12273       /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12274 	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12275 	 if the latter can be further optimized.  */
12276       if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12277 	  && TREE_CODE (arg0) == BIT_AND_EXPR
12278 	  && TREE_CODE (arg1) == INTEGER_CST
12279 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12280 	{
12281 	  tree mask = fold_build2_loc (loc, code, type,
12282 				   fold_convert_loc (loc, type,
12283 						     TREE_OPERAND (arg0, 1)),
12284 				   arg1);
12285 	  tree shift = fold_build2_loc (loc, code, type,
12286 				    fold_convert_loc (loc, type,
12287 						      TREE_OPERAND (arg0, 0)),
12288 				    arg1);
12289 	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12290 	  if (tem)
12291 	    return tem;
12292 	}
12293 
12294       return NULL_TREE;
12295 
12296     case MIN_EXPR:
12297       if (operand_equal_p (arg0, arg1, 0))
12298 	return omit_one_operand_loc (loc, type, arg0, arg1);
12299       if (INTEGRAL_TYPE_P (type)
12300 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12301 	return omit_one_operand_loc (loc, type, arg1, arg0);
12302       tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12303       if (tem)
12304 	return tem;
12305       goto associate;
12306 
12307     case MAX_EXPR:
12308       if (operand_equal_p (arg0, arg1, 0))
12309 	return omit_one_operand_loc (loc, type, arg0, arg1);
12310       if (INTEGRAL_TYPE_P (type)
12311 	  && TYPE_MAX_VALUE (type)
12312 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12313 	return omit_one_operand_loc (loc, type, arg1, arg0);
12314       tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12315       if (tem)
12316 	return tem;
12317       goto associate;
12318 
12319     case TRUTH_ANDIF_EXPR:
12320       /* Note that the operands of this must be ints
12321 	 and their values must be 0 or 1.
12322 	 ("true" is a fixed value perhaps depending on the language.)  */
12323       /* If first arg is constant zero, return it.  */
12324       if (integer_zerop (arg0))
12325 	return fold_convert_loc (loc, type, arg0);
12326     case TRUTH_AND_EXPR:
12327       /* If either arg is constant true, drop it.  */
12328       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12329 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12330       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12331 	  /* Preserve sequence points.  */
12332 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12333 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12334       /* If second arg is constant zero, result is zero, but first arg
12335 	 must be evaluated.  */
12336       if (integer_zerop (arg1))
12337 	return omit_one_operand_loc (loc, type, arg1, arg0);
12338       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12339 	 case will be handled here.  */
12340       if (integer_zerop (arg0))
12341 	return omit_one_operand_loc (loc, type, arg0, arg1);
12342 
12343       /* !X && X is always false.  */
12344       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12345 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12346 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12347       /* X && !X is always false.  */
12348       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12349 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12350 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12351 
12352       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12353 	 means A >= Y && A != MAX, but in this case we know that
12354 	 A < X <= MAX.  */
12355 
12356       if (!TREE_SIDE_EFFECTS (arg0)
12357 	  && !TREE_SIDE_EFFECTS (arg1))
12358 	{
12359 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12360 	  if (tem && !operand_equal_p (tem, arg0, 0))
12361 	    return fold_build2_loc (loc, code, type, tem, arg1);
12362 
12363 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12364 	  if (tem && !operand_equal_p (tem, arg1, 0))
12365 	    return fold_build2_loc (loc, code, type, arg0, tem);
12366 	}
12367 
12368       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12369           != NULL_TREE)
12370         return tem;
12371 
12372       return NULL_TREE;
12373 
12374     case TRUTH_ORIF_EXPR:
12375       /* Note that the operands of this must be ints
12376 	 and their values must be 0 or true.
12377 	 ("true" is a fixed value perhaps depending on the language.)  */
12378       /* If first arg is constant true, return it.  */
12379       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12380 	return fold_convert_loc (loc, type, arg0);
12381     case TRUTH_OR_EXPR:
12382       /* If either arg is constant zero, drop it.  */
12383       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12384 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12385       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12386 	  /* Preserve sequence points.  */
12387 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12388 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12389       /* If second arg is constant true, result is true, but we must
12390 	 evaluate first arg.  */
12391       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12392 	return omit_one_operand_loc (loc, type, arg1, arg0);
12393       /* Likewise for first arg, but note this only occurs here for
12394 	 TRUTH_OR_EXPR.  */
12395       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12396 	return omit_one_operand_loc (loc, type, arg0, arg1);
12397 
12398       /* !X || X is always true.  */
12399       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12400 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12401 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12402       /* X || !X is always true.  */
12403       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12404 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12405 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12406 
12407       /* (X && !Y) || (!X && Y) is X ^ Y */
12408       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12409 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12410         {
12411 	  tree a0, a1, l0, l1, n0, n1;
12412 
12413 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12414 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12415 
12416 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12417 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12418 
12419 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12420 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12421 
12422 	  if ((operand_equal_p (n0, a0, 0)
12423 	       && operand_equal_p (n1, a1, 0))
12424 	      || (operand_equal_p (n0, a1, 0)
12425 		  && operand_equal_p (n1, a0, 0)))
12426 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12427 	}
12428 
12429       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12430           != NULL_TREE)
12431         return tem;
12432 
12433       return NULL_TREE;
12434 
12435     case TRUTH_XOR_EXPR:
12436       /* If the second arg is constant zero, drop it.  */
12437       if (integer_zerop (arg1))
12438 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12439       /* If the second arg is constant true, this is a logical inversion.  */
12440       if (integer_onep (arg1))
12441 	{
12442 	  /* Only call invert_truthvalue if operand is a truth value.  */
12443 	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12444 	    tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12445 	  else
12446 	    tem = invert_truthvalue_loc (loc, arg0);
12447 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12448 	}
12449       /* Identical arguments cancel to zero.  */
12450       if (operand_equal_p (arg0, arg1, 0))
12451 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12452 
12453       /* !X ^ X is always true.  */
12454       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12455 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12456 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12457 
12458       /* X ^ !X is always true.  */
12459       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12460 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12461 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12462 
12463       return NULL_TREE;
12464 
12465     case EQ_EXPR:
12466     case NE_EXPR:
12467       STRIP_NOPS (arg0);
12468       STRIP_NOPS (arg1);
12469 
12470       tem = fold_comparison (loc, code, type, op0, op1);
12471       if (tem != NULL_TREE)
12472 	return tem;
12473 
12474       /* bool_var != 0 becomes bool_var. */
12475       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12476           && code == NE_EXPR)
12477         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12478 
12479       /* bool_var == 1 becomes bool_var. */
12480       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12481           && code == EQ_EXPR)
12482         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12483 
12484       /* bool_var != 1 becomes !bool_var. */
12485       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12486           && code == NE_EXPR)
12487         return fold_convert_loc (loc, type,
12488 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12489 						  TREE_TYPE (arg0), arg0));
12490 
12491       /* bool_var == 0 becomes !bool_var. */
12492       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12493           && code == EQ_EXPR)
12494         return fold_convert_loc (loc, type,
12495 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12496 						  TREE_TYPE (arg0), arg0));
12497 
12498       /* !exp != 0 becomes !exp */
12499       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12500 	  && code == NE_EXPR)
12501         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12502 
12503       /* If this is an equality comparison of the address of two non-weak,
12504 	 unaliased symbols neither of which are extern (since we do not
12505 	 have access to attributes for externs), then we know the result.  */
12506       if (TREE_CODE (arg0) == ADDR_EXPR
12507 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12508 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12509 	  && ! lookup_attribute ("alias",
12510 				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12511 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12512 	  && TREE_CODE (arg1) == ADDR_EXPR
12513 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12514 	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12515 	  && ! lookup_attribute ("alias",
12516 				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12517 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12518 	{
12519 	  /* We know that we're looking at the address of two
12520 	     non-weak, unaliased, static _DECL nodes.
12521 
12522 	     It is both wasteful and incorrect to call operand_equal_p
12523 	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
12524 	     all we need to do is test pointer equality for the arguments
12525 	     to the two ADDR_EXPR nodes.  It is incorrect to use
12526 	     operand_equal_p as that function is NOT equivalent to a
12527 	     C equality test.  It can in fact return false for two
12528 	     objects which would test as equal using the C equality
12529 	     operator.  */
12530 	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12531 	  return constant_boolean_node (equal
12532 				        ? code == EQ_EXPR : code != EQ_EXPR,
12533 				        type);
12534 	}
12535 
12536       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12537 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
12538 	 a revised constant as long as no overflow occurs.  */
12539       if (TREE_CODE (arg1) == INTEGER_CST
12540 	  && (TREE_CODE (arg0) == PLUS_EXPR
12541 	      || TREE_CODE (arg0) == MINUS_EXPR)
12542 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12543 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12544 				      ? MINUS_EXPR : PLUS_EXPR,
12545 				      fold_convert_loc (loc, TREE_TYPE (arg0),
12546 							arg1),
12547 				      TREE_OPERAND (arg0, 1)))
12548 	  && !TREE_OVERFLOW (tem))
12549 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12550 
12551       /* Similarly for a NEGATE_EXPR.  */
12552       if (TREE_CODE (arg0) == NEGATE_EXPR
12553 	  && TREE_CODE (arg1) == INTEGER_CST
12554 	  && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12555 							arg1)))
12556 	  && TREE_CODE (tem) == INTEGER_CST
12557 	  && !TREE_OVERFLOW (tem))
12558 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12559 
12560       /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12561       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12562 	  && TREE_CODE (arg1) == INTEGER_CST
12563 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12564 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12565 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12566 					 fold_convert_loc (loc,
12567 							   TREE_TYPE (arg0),
12568 							   arg1),
12569 					 TREE_OPERAND (arg0, 1)));
12570 
12571       /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12572       if ((TREE_CODE (arg0) == PLUS_EXPR
12573 	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12574 	   || TREE_CODE (arg0) == MINUS_EXPR)
12575 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12576 									0)),
12577 			      arg1, 0)
12578 	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12579 	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
12580 	{
12581 	  tree val = TREE_OPERAND (arg0, 1);
12582 	  return omit_two_operands_loc (loc, type,
12583 				    fold_build2_loc (loc, code, type,
12584 						 val,
12585 						 build_int_cst (TREE_TYPE (val),
12586 								0)),
12587 				    TREE_OPERAND (arg0, 0), arg1);
12588 	}
12589 
12590       /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12591       if (TREE_CODE (arg0) == MINUS_EXPR
12592 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12593 	  && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12594 									1)),
12595 			      arg1, 0)
12596 	  && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12597 	{
12598 	  return omit_two_operands_loc (loc, type,
12599 				    code == NE_EXPR
12600 				    ? boolean_true_node : boolean_false_node,
12601 				    TREE_OPERAND (arg0, 1), arg1);
12602 	}
12603 
12604       /* If we have X - Y == 0, we can convert that to X == Y and similarly
12605 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
12606       if (TREE_CODE (arg0) == MINUS_EXPR
12607 	  && integer_zerop (arg1))
12608 	return fold_build2_loc (loc, code, type,
12609 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12610 
12611       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12612       if (TREE_CODE (arg0) == ABS_EXPR
12613 	  && (integer_zerop (arg1) || real_zerop (arg1)))
12614 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12615 
12616       /* If this is an EQ or NE comparison with zero and ARG0 is
12617 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12618 	 two operations, but the latter can be done in one less insn
12619 	 on machines that have only two-operand insns or on which a
12620 	 constant cannot be the first operand.  */
12621       if (TREE_CODE (arg0) == BIT_AND_EXPR
12622 	  && integer_zerop (arg1))
12623 	{
12624 	  tree arg00 = TREE_OPERAND (arg0, 0);
12625 	  tree arg01 = TREE_OPERAND (arg0, 1);
12626 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12627 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12628 	    {
12629 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12630 				      arg01, TREE_OPERAND (arg00, 1));
12631 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12632 				 build_int_cst (TREE_TYPE (arg0), 1));
12633 	      return fold_build2_loc (loc, code, type,
12634 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12635 				  arg1);
12636 	    }
12637 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12638 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12639 	    {
12640 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12641 				      arg00, TREE_OPERAND (arg01, 1));
12642 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12643 				 build_int_cst (TREE_TYPE (arg0), 1));
12644 	      return fold_build2_loc (loc, code, type,
12645 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12646 				  arg1);
12647 	    }
12648 	}
12649 
12650       /* If this is an NE or EQ comparison of zero against the result of a
12651 	 signed MOD operation whose second operand is a power of 2, make
12652 	 the MOD operation unsigned since it is simpler and equivalent.  */
12653       if (integer_zerop (arg1)
12654 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12655 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12656 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
12657 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12658 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12659 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12660 	{
12661 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12662 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12663 				     fold_convert_loc (loc, newtype,
12664 						       TREE_OPERAND (arg0, 0)),
12665 				     fold_convert_loc (loc, newtype,
12666 						       TREE_OPERAND (arg0, 1)));
12667 
12668 	  return fold_build2_loc (loc, code, type, newmod,
12669 			      fold_convert_loc (loc, newtype, arg1));
12670 	}
12671 
12672       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12673 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12674 	 a single bit.  */
12675       if (TREE_CODE (arg0) == BIT_AND_EXPR
12676 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12677 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12678 	     == INTEGER_CST
12679 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12680 	  && integer_zerop (arg1))
12681 	{
12682 	  tree itype = TREE_TYPE (arg0);
12683 	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12684 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12685 
12686 	  /* Check for a valid shift count.  */
12687 	  if (TREE_INT_CST_HIGH (arg001) == 0
12688 	      && TREE_INT_CST_LOW (arg001) < prec)
12689 	    {
12690 	      tree arg01 = TREE_OPERAND (arg0, 1);
12691 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12692 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12693 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12694 		 can be rewritten as (X & (C2 << C1)) != 0.  */
12695 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12696 		{
12697 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12698 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12699 		  return fold_build2_loc (loc, code, type, tem,
12700 					  fold_convert_loc (loc, itype, arg1));
12701 		}
12702 	      /* Otherwise, for signed (arithmetic) shifts,
12703 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12704 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12705 	      else if (!TYPE_UNSIGNED (itype))
12706 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12707 				    arg000, build_int_cst (itype, 0));
12708 	      /* Otherwise, of unsigned (logical) shifts,
12709 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12710 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12711 	      else
12712 		return omit_one_operand_loc (loc, type,
12713 					 code == EQ_EXPR ? integer_one_node
12714 							 : integer_zero_node,
12715 					 arg000);
12716 	    }
12717 	}
12718 
12719       /* If we have (A & C) == C where C is a power of 2, convert this into
12720 	 (A & C) != 0.  Similarly for NE_EXPR.  */
12721       if (TREE_CODE (arg0) == BIT_AND_EXPR
12722 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12723 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12724 	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12725 			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12726 						    integer_zero_node));
12727 
12728       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12729 	 bit, then fold the expression into A < 0 or A >= 0.  */
12730       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12731       if (tem)
12732 	return tem;
12733 
12734       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12735 	 Similarly for NE_EXPR.  */
12736       if (TREE_CODE (arg0) == BIT_AND_EXPR
12737 	  && TREE_CODE (arg1) == INTEGER_CST
12738 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12739 	{
12740 	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12741 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12742 				   TREE_OPERAND (arg0, 1));
12743 	  tree dandnotc
12744 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12745 			       fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12746 			       notc);
12747 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12748 	  if (integer_nonzerop (dandnotc))
12749 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12750 	}
12751 
12752       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12753 	 Similarly for NE_EXPR.  */
12754       if (TREE_CODE (arg0) == BIT_IOR_EXPR
12755 	  && TREE_CODE (arg1) == INTEGER_CST
12756 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12757 	{
12758 	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12759 	  tree candnotd
12760 	    = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12761 			       TREE_OPERAND (arg0, 1),
12762 			       fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12763 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12764 	  if (integer_nonzerop (candnotd))
12765 	    return omit_one_operand_loc (loc, type, rslt, arg0);
12766 	}
12767 
12768       /* If this is a comparison of a field, we may be able to simplify it.  */
12769       if ((TREE_CODE (arg0) == COMPONENT_REF
12770 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12771 	  /* Handle the constant case even without -O
12772 	     to make sure the warnings are given.  */
12773 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12774 	{
12775 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12776 	  if (t1)
12777 	    return t1;
12778 	}
12779 
12780       /* Optimize comparisons of strlen vs zero to a compare of the
12781 	 first character of the string vs zero.  To wit,
12782 		strlen(ptr) == 0   =>  *ptr == 0
12783 		strlen(ptr) != 0   =>  *ptr != 0
12784 	 Other cases should reduce to one of these two (or a constant)
12785 	 due to the return value of strlen being unsigned.  */
12786       if (TREE_CODE (arg0) == CALL_EXPR
12787 	  && integer_zerop (arg1))
12788 	{
12789 	  tree fndecl = get_callee_fndecl (arg0);
12790 
12791 	  if (fndecl
12792 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12793 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12794 	      && call_expr_nargs (arg0) == 1
12795 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12796 	    {
12797 	      tree iref = build_fold_indirect_ref_loc (loc,
12798 						   CALL_EXPR_ARG (arg0, 0));
12799 	      return fold_build2_loc (loc, code, type, iref,
12800 				  build_int_cst (TREE_TYPE (iref), 0));
12801 	    }
12802 	}
12803 
12804       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12805 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12806       if (TREE_CODE (arg0) == RSHIFT_EXPR
12807 	  && integer_zerop (arg1)
12808 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12809 	{
12810 	  tree arg00 = TREE_OPERAND (arg0, 0);
12811 	  tree arg01 = TREE_OPERAND (arg0, 1);
12812 	  tree itype = TREE_TYPE (arg00);
12813 	  if (TREE_INT_CST_HIGH (arg01) == 0
12814 	      && TREE_INT_CST_LOW (arg01)
12815 		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12816 	    {
12817 	      if (TYPE_UNSIGNED (itype))
12818 		{
12819 		  itype = signed_type_for (itype);
12820 		  arg00 = fold_convert_loc (loc, itype, arg00);
12821 		}
12822 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12823 				  type, arg00, build_int_cst (itype, 0));
12824 	    }
12825 	}
12826 
12827       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12828       if (integer_zerop (arg1)
12829 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
12830 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12831 			    TREE_OPERAND (arg0, 1));
12832 
12833       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12834       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12835 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12836 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12837 				build_zero_cst (TREE_TYPE (arg0)));
12838       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
12839       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12840 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12841 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12842 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12843 				build_zero_cst (TREE_TYPE (arg0)));
12844 
12845       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
12846       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12847 	  && TREE_CODE (arg1) == INTEGER_CST
12848 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12849 	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12850 			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12851 					 TREE_OPERAND (arg0, 1), arg1));
12852 
12853       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12854 	 (X & C) == 0 when C is a single bit.  */
12855       if (TREE_CODE (arg0) == BIT_AND_EXPR
12856 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12857 	  && integer_zerop (arg1)
12858 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12859 	{
12860 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12861 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12862 				 TREE_OPERAND (arg0, 1));
12863 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12864 				  type, tem,
12865 				  fold_convert_loc (loc, TREE_TYPE (arg0),
12866 						    arg1));
12867 	}
12868 
12869       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12870 	 constant C is a power of two, i.e. a single bit.  */
12871       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12872 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12873 	  && integer_zerop (arg1)
12874 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12875 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12876 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12877 	{
12878 	  tree arg00 = TREE_OPERAND (arg0, 0);
12879 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12880 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
12881 	}
12882 
12883       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12884 	 when is C is a power of two, i.e. a single bit.  */
12885       if (TREE_CODE (arg0) == BIT_AND_EXPR
12886 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12887 	  && integer_zerop (arg1)
12888 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12889 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12890 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12891 	{
12892 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12893 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12894 			     arg000, TREE_OPERAND (arg0, 1));
12895 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12896 			      tem, build_int_cst (TREE_TYPE (tem), 0));
12897 	}
12898 
12899       if (integer_zerop (arg1)
12900 	  && tree_expr_nonzero_p (arg0))
12901         {
12902 	  tree res = constant_boolean_node (code==NE_EXPR, type);
12903 	  return omit_one_operand_loc (loc, type, res, arg0);
12904 	}
12905 
12906       /* Fold -X op -Y as X op Y, where op is eq/ne.  */
12907       if (TREE_CODE (arg0) == NEGATE_EXPR
12908           && TREE_CODE (arg1) == NEGATE_EXPR)
12909 	return fold_build2_loc (loc, code, type,
12910 				TREE_OPERAND (arg0, 0),
12911 				fold_convert_loc (loc, TREE_TYPE (arg0),
12912 						  TREE_OPERAND (arg1, 0)));
12913 
12914       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
12915       if (TREE_CODE (arg0) == BIT_AND_EXPR
12916 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
12917 	{
12918 	  tree arg00 = TREE_OPERAND (arg0, 0);
12919 	  tree arg01 = TREE_OPERAND (arg0, 1);
12920 	  tree arg10 = TREE_OPERAND (arg1, 0);
12921 	  tree arg11 = TREE_OPERAND (arg1, 1);
12922 	  tree itype = TREE_TYPE (arg0);
12923 
12924 	  if (operand_equal_p (arg01, arg11, 0))
12925 	    return fold_build2_loc (loc, code, type,
12926 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12927 					     fold_build2_loc (loc,
12928 							  BIT_XOR_EXPR, itype,
12929 							  arg00, arg10),
12930 					     arg01),
12931 				build_zero_cst (itype));
12932 
12933 	  if (operand_equal_p (arg01, arg10, 0))
12934 	    return fold_build2_loc (loc, code, type,
12935 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12936 					     fold_build2_loc (loc,
12937 							  BIT_XOR_EXPR, itype,
12938 							  arg00, arg11),
12939 					     arg01),
12940 				build_zero_cst (itype));
12941 
12942 	  if (operand_equal_p (arg00, arg11, 0))
12943 	    return fold_build2_loc (loc, code, type,
12944 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12945 					     fold_build2_loc (loc,
12946 							  BIT_XOR_EXPR, itype,
12947 							  arg01, arg10),
12948 					     arg00),
12949 				build_zero_cst (itype));
12950 
12951 	  if (operand_equal_p (arg00, arg10, 0))
12952 	    return fold_build2_loc (loc, code, type,
12953 				fold_build2_loc (loc, BIT_AND_EXPR, itype,
12954 					     fold_build2_loc (loc,
12955 							  BIT_XOR_EXPR, itype,
12956 							  arg01, arg11),
12957 					     arg00),
12958 				build_zero_cst (itype));
12959 	}
12960 
12961       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12962 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
12963 	{
12964 	  tree arg00 = TREE_OPERAND (arg0, 0);
12965 	  tree arg01 = TREE_OPERAND (arg0, 1);
12966 	  tree arg10 = TREE_OPERAND (arg1, 0);
12967 	  tree arg11 = TREE_OPERAND (arg1, 1);
12968 	  tree itype = TREE_TYPE (arg0);
12969 
12970 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12971 	     operand_equal_p guarantees no side-effects so we don't need
12972 	     to use omit_one_operand on Z.  */
12973 	  if (operand_equal_p (arg01, arg11, 0))
12974 	    return fold_build2_loc (loc, code, type, arg00,
12975 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12976 						      arg10));
12977 	  if (operand_equal_p (arg01, arg10, 0))
12978 	    return fold_build2_loc (loc, code, type, arg00,
12979 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12980 						      arg11));
12981 	  if (operand_equal_p (arg00, arg11, 0))
12982 	    return fold_build2_loc (loc, code, type, arg01,
12983 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12984 						      arg10));
12985 	  if (operand_equal_p (arg00, arg10, 0))
12986 	    return fold_build2_loc (loc, code, type, arg01,
12987 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12988 						      arg11));
12989 
12990 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
12991 	  if (TREE_CODE (arg01) == INTEGER_CST
12992 	      && TREE_CODE (arg11) == INTEGER_CST)
12993 	    {
12994 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12995 				     fold_convert_loc (loc, itype, arg11));
12996 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12997 	      return fold_build2_loc (loc, code, type, tem,
12998 				      fold_convert_loc (loc, itype, arg10));
12999 	    }
13000 	}
13001 
13002       /* Attempt to simplify equality/inequality comparisons of complex
13003 	 values.  Only lower the comparison if the result is known or
13004 	 can be simplified to a single scalar comparison.  */
13005       if ((TREE_CODE (arg0) == COMPLEX_EXPR
13006 	   || TREE_CODE (arg0) == COMPLEX_CST)
13007 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
13008 	      || TREE_CODE (arg1) == COMPLEX_CST))
13009 	{
13010 	  tree real0, imag0, real1, imag1;
13011 	  tree rcond, icond;
13012 
13013 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
13014 	    {
13015 	      real0 = TREE_OPERAND (arg0, 0);
13016 	      imag0 = TREE_OPERAND (arg0, 1);
13017 	    }
13018 	  else
13019 	    {
13020 	      real0 = TREE_REALPART (arg0);
13021 	      imag0 = TREE_IMAGPART (arg0);
13022 	    }
13023 
13024 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
13025 	    {
13026 	      real1 = TREE_OPERAND (arg1, 0);
13027 	      imag1 = TREE_OPERAND (arg1, 1);
13028 	    }
13029 	  else
13030 	    {
13031 	      real1 = TREE_REALPART (arg1);
13032 	      imag1 = TREE_IMAGPART (arg1);
13033 	    }
13034 
13035 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
13036 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13037 	    {
13038 	      if (integer_zerop (rcond))
13039 		{
13040 		  if (code == EQ_EXPR)
13041 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13042 					      imag0, imag1);
13043 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13044 		}
13045 	      else
13046 		{
13047 		  if (code == NE_EXPR)
13048 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13049 					      imag0, imag1);
13050 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13051 		}
13052 	    }
13053 
13054 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
13055 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
13056 	    {
13057 	      if (integer_zerop (icond))
13058 		{
13059 		  if (code == EQ_EXPR)
13060 		    return omit_two_operands_loc (loc, type, boolean_false_node,
13061 					      real0, real1);
13062 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13063 		}
13064 	      else
13065 		{
13066 		  if (code == NE_EXPR)
13067 		    return omit_two_operands_loc (loc, type, boolean_true_node,
13068 					      real0, real1);
13069 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13070 		}
13071 	    }
13072 	}
13073 
13074       return NULL_TREE;
13075 
13076     case LT_EXPR:
13077     case GT_EXPR:
13078     case LE_EXPR:
13079     case GE_EXPR:
13080       tem = fold_comparison (loc, code, type, op0, op1);
13081       if (tem != NULL_TREE)
13082 	return tem;
13083 
13084       /* Transform comparisons of the form X +- C CMP X.  */
13085       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13086 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13087 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13088 	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13089 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13090 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13091 	{
13092 	  tree arg01 = TREE_OPERAND (arg0, 1);
13093 	  enum tree_code code0 = TREE_CODE (arg0);
13094 	  int is_positive;
13095 
13096 	  if (TREE_CODE (arg01) == REAL_CST)
13097 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13098 	  else
13099 	    is_positive = tree_int_cst_sgn (arg01);
13100 
13101 	  /* (X - c) > X becomes false.  */
13102 	  if (code == GT_EXPR
13103 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13104 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13105 	    {
13106 	      if (TREE_CODE (arg01) == INTEGER_CST
13107 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13108 		fold_overflow_warning (("assuming signed overflow does not "
13109 					"occur when assuming that (X - c) > X "
13110 					"is always false"),
13111 				       WARN_STRICT_OVERFLOW_ALL);
13112 	      return constant_boolean_node (0, type);
13113 	    }
13114 
13115 	  /* Likewise (X + c) < X becomes false.  */
13116 	  if (code == LT_EXPR
13117 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13118 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13119 	    {
13120 	      if (TREE_CODE (arg01) == INTEGER_CST
13121 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13122 		fold_overflow_warning (("assuming signed overflow does not "
13123 					"occur when assuming that "
13124 					"(X + c) < X is always false"),
13125 				       WARN_STRICT_OVERFLOW_ALL);
13126 	      return constant_boolean_node (0, type);
13127 	    }
13128 
13129 	  /* Convert (X - c) <= X to true.  */
13130 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13131 	      && code == LE_EXPR
13132 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13133 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13134 	    {
13135 	      if (TREE_CODE (arg01) == INTEGER_CST
13136 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13137 		fold_overflow_warning (("assuming signed overflow does not "
13138 					"occur when assuming that "
13139 					"(X - c) <= X is always true"),
13140 				       WARN_STRICT_OVERFLOW_ALL);
13141 	      return constant_boolean_node (1, type);
13142 	    }
13143 
13144 	  /* Convert (X + c) >= X to true.  */
13145 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13146 	      && code == GE_EXPR
13147 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13148 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13149 	    {
13150 	      if (TREE_CODE (arg01) == INTEGER_CST
13151 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13152 		fold_overflow_warning (("assuming signed overflow does not "
13153 					"occur when assuming that "
13154 					"(X + c) >= X is always true"),
13155 				       WARN_STRICT_OVERFLOW_ALL);
13156 	      return constant_boolean_node (1, type);
13157 	    }
13158 
13159 	  if (TREE_CODE (arg01) == INTEGER_CST)
13160 	    {
13161 	      /* Convert X + c > X and X - c < X to true for integers.  */
13162 	      if (code == GT_EXPR
13163 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13164 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13165 		{
13166 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13167 		    fold_overflow_warning (("assuming signed overflow does "
13168 					    "not occur when assuming that "
13169 					    "(X + c) > X is always true"),
13170 					   WARN_STRICT_OVERFLOW_ALL);
13171 		  return constant_boolean_node (1, type);
13172 		}
13173 
13174 	      if (code == LT_EXPR
13175 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13176 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13177 		{
13178 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13179 		    fold_overflow_warning (("assuming signed overflow does "
13180 					    "not occur when assuming that "
13181 					    "(X - c) < X is always true"),
13182 					   WARN_STRICT_OVERFLOW_ALL);
13183 		  return constant_boolean_node (1, type);
13184 		}
13185 
13186 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13187 	      if (code == LE_EXPR
13188 	          && ((code0 == PLUS_EXPR && is_positive > 0)
13189 		      || (code0 == MINUS_EXPR && is_positive < 0)))
13190 		{
13191 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13192 		    fold_overflow_warning (("assuming signed overflow does "
13193 					    "not occur when assuming that "
13194 					    "(X + c) <= X is always false"),
13195 					   WARN_STRICT_OVERFLOW_ALL);
13196 		  return constant_boolean_node (0, type);
13197 		}
13198 
13199 	      if (code == GE_EXPR
13200 	          && ((code0 == MINUS_EXPR && is_positive > 0)
13201 		      || (code0 == PLUS_EXPR && is_positive < 0)))
13202 		{
13203 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13204 		    fold_overflow_warning (("assuming signed overflow does "
13205 					    "not occur when assuming that "
13206 					    "(X - c) >= X is always false"),
13207 					   WARN_STRICT_OVERFLOW_ALL);
13208 		  return constant_boolean_node (0, type);
13209 		}
13210 	    }
13211 	}
13212 
13213       /* Comparisons with the highest or lowest possible integer of
13214 	 the specified precision will have known values.  */
13215       {
13216 	tree arg1_type = TREE_TYPE (arg1);
13217 	unsigned int width = TYPE_PRECISION (arg1_type);
13218 
13219 	if (TREE_CODE (arg1) == INTEGER_CST
13220 	    && width <= 2 * HOST_BITS_PER_WIDE_INT
13221 	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13222 	  {
13223 	    HOST_WIDE_INT signed_max_hi;
13224 	    unsigned HOST_WIDE_INT signed_max_lo;
13225 	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13226 
13227 	    if (width <= HOST_BITS_PER_WIDE_INT)
13228 	      {
13229 		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13230 				- 1;
13231 		signed_max_hi = 0;
13232 		max_hi = 0;
13233 
13234 		if (TYPE_UNSIGNED (arg1_type))
13235 		  {
13236 		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13237 		    min_lo = 0;
13238 		    min_hi = 0;
13239 		  }
13240 		else
13241 		  {
13242 		    max_lo = signed_max_lo;
13243 		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13244 		    min_hi = -1;
13245 		  }
13246 	      }
13247 	    else
13248 	      {
13249 		width -= HOST_BITS_PER_WIDE_INT;
13250 		signed_max_lo = -1;
13251 		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13252 				- 1;
13253 		max_lo = -1;
13254 		min_lo = 0;
13255 
13256 		if (TYPE_UNSIGNED (arg1_type))
13257 		  {
13258 		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13259 		    min_hi = 0;
13260 		  }
13261 		else
13262 		  {
13263 		    max_hi = signed_max_hi;
13264 		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13265 		  }
13266 	      }
13267 
13268 	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13269 		&& TREE_INT_CST_LOW (arg1) == max_lo)
13270 	      switch (code)
13271 		{
13272 		case GT_EXPR:
13273 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13274 
13275 		case GE_EXPR:
13276 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13277 
13278 		case LE_EXPR:
13279 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13280 
13281 		case LT_EXPR:
13282 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13283 
13284 		/* The GE_EXPR and LT_EXPR cases above are not normally
13285 		   reached because of previous transformations.  */
13286 
13287 		default:
13288 		  break;
13289 		}
13290 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13291 		     == max_hi
13292 		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13293 	      switch (code)
13294 		{
13295 		case GT_EXPR:
13296 		  arg1 = const_binop (PLUS_EXPR, arg1,
13297 				      build_int_cst (TREE_TYPE (arg1), 1));
13298 		  return fold_build2_loc (loc, EQ_EXPR, type,
13299 				      fold_convert_loc (loc,
13300 							TREE_TYPE (arg1), arg0),
13301 				      arg1);
13302 		case LE_EXPR:
13303 		  arg1 = const_binop (PLUS_EXPR, arg1,
13304 				      build_int_cst (TREE_TYPE (arg1), 1));
13305 		  return fold_build2_loc (loc, NE_EXPR, type,
13306 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13307 							arg0),
13308 				      arg1);
13309 		default:
13310 		  break;
13311 		}
13312 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13313 		     == min_hi
13314 		     && TREE_INT_CST_LOW (arg1) == min_lo)
13315 	      switch (code)
13316 		{
13317 		case LT_EXPR:
13318 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13319 
13320 		case LE_EXPR:
13321 		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13322 
13323 		case GE_EXPR:
13324 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13325 
13326 		case GT_EXPR:
13327 		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13328 
13329 		default:
13330 		  break;
13331 		}
13332 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13333 		     == min_hi
13334 		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13335 	      switch (code)
13336 		{
13337 		case GE_EXPR:
13338 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13339 		  return fold_build2_loc (loc, NE_EXPR, type,
13340 				      fold_convert_loc (loc,
13341 							TREE_TYPE (arg1), arg0),
13342 				      arg1);
13343 		case LT_EXPR:
13344 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13345 		  return fold_build2_loc (loc, EQ_EXPR, type,
13346 				      fold_convert_loc (loc, TREE_TYPE (arg1),
13347 							arg0),
13348 				      arg1);
13349 		default:
13350 		  break;
13351 		}
13352 
13353 	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13354 		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13355 		     && TYPE_UNSIGNED (arg1_type)
13356 		     /* We will flip the signedness of the comparison operator
13357 			associated with the mode of arg1, so the sign bit is
13358 			specified by this mode.  Check that arg1 is the signed
13359 			max associated with this sign bit.  */
13360 		     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13361 		     /* signed_type does not work on pointer types.  */
13362 		     && INTEGRAL_TYPE_P (arg1_type))
13363 	      {
13364 		/* The following case also applies to X < signed_max+1
13365 		   and X >= signed_max+1 because previous transformations.  */
13366 		if (code == LE_EXPR || code == GT_EXPR)
13367 		  {
13368 		    tree st;
13369 		    st = signed_type_for (TREE_TYPE (arg1));
13370 		    return fold_build2_loc (loc,
13371 					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13372 					type, fold_convert_loc (loc, st, arg0),
13373 					build_int_cst (st, 0));
13374 		  }
13375 	      }
13376 	  }
13377       }
13378 
13379       /* If we are comparing an ABS_EXPR with a constant, we can
13380 	 convert all the cases into explicit comparisons, but they may
13381 	 well not be faster than doing the ABS and one comparison.
13382 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13383 	 and a comparison, and is probably faster.  */
13384       if (code == LE_EXPR
13385 	  && TREE_CODE (arg1) == INTEGER_CST
13386 	  && TREE_CODE (arg0) == ABS_EXPR
13387 	  && ! TREE_SIDE_EFFECTS (arg0)
13388 	  && (0 != (tem = negate_expr (arg1)))
13389 	  && TREE_CODE (tem) == INTEGER_CST
13390 	  && !TREE_OVERFLOW (tem))
13391 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13392 			    build2 (GE_EXPR, type,
13393 				    TREE_OPERAND (arg0, 0), tem),
13394 			    build2 (LE_EXPR, type,
13395 				    TREE_OPERAND (arg0, 0), arg1));
13396 
13397       /* Convert ABS_EXPR<x> >= 0 to true.  */
13398       strict_overflow_p = false;
13399       if (code == GE_EXPR
13400 	  && (integer_zerop (arg1)
13401 	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13402 		  && real_zerop (arg1)))
13403 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13404 	{
13405 	  if (strict_overflow_p)
13406 	    fold_overflow_warning (("assuming signed overflow does not occur "
13407 				    "when simplifying comparison of "
13408 				    "absolute value and zero"),
13409 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13410 	  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13411 	}
13412 
13413       /* Convert ABS_EXPR<x> < 0 to false.  */
13414       strict_overflow_p = false;
13415       if (code == LT_EXPR
13416 	  && (integer_zerop (arg1) || real_zerop (arg1))
13417 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13418 	{
13419 	  if (strict_overflow_p)
13420 	    fold_overflow_warning (("assuming signed overflow does not occur "
13421 				    "when simplifying comparison of "
13422 				    "absolute value and zero"),
13423 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13424 	  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13425 	}
13426 
13427       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13428 	 and similarly for >= into !=.  */
13429       if ((code == LT_EXPR || code == GE_EXPR)
13430 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13431 	  && TREE_CODE (arg1) == LSHIFT_EXPR
13432 	  && integer_onep (TREE_OPERAND (arg1, 0)))
13433 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13434 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13435 				   TREE_OPERAND (arg1, 1)),
13436 			   build_int_cst (TREE_TYPE (arg0), 0));
13437 
13438       if ((code == LT_EXPR || code == GE_EXPR)
13439 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13440 	  && CONVERT_EXPR_P (arg1)
13441 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13442 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13443 	{
13444 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13445 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13446 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13447 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13448 			     build_int_cst (TREE_TYPE (arg0), 0));
13449 	}
13450 
13451       return NULL_TREE;
13452 
13453     case UNORDERED_EXPR:
13454     case ORDERED_EXPR:
13455     case UNLT_EXPR:
13456     case UNLE_EXPR:
13457     case UNGT_EXPR:
13458     case UNGE_EXPR:
13459     case UNEQ_EXPR:
13460     case LTGT_EXPR:
13461       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13462 	{
13463 	  t1 = fold_relational_const (code, type, arg0, arg1);
13464 	  if (t1 != NULL_TREE)
13465 	    return t1;
13466 	}
13467 
13468       /* If the first operand is NaN, the result is constant.  */
13469       if (TREE_CODE (arg0) == REAL_CST
13470 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13471 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13472 	{
13473 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13474 	       ? integer_zero_node
13475 	       : integer_one_node;
13476 	  return omit_one_operand_loc (loc, type, t1, arg1);
13477 	}
13478 
13479       /* If the second operand is NaN, the result is constant.  */
13480       if (TREE_CODE (arg1) == REAL_CST
13481 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13482 	  && (code != LTGT_EXPR || ! flag_trapping_math))
13483 	{
13484 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13485 	       ? integer_zero_node
13486 	       : integer_one_node;
13487 	  return omit_one_operand_loc (loc, type, t1, arg0);
13488 	}
13489 
13490       /* Simplify unordered comparison of something with itself.  */
13491       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13492 	  && operand_equal_p (arg0, arg1, 0))
13493 	return constant_boolean_node (1, type);
13494 
13495       if (code == LTGT_EXPR
13496 	  && !flag_trapping_math
13497 	  && operand_equal_p (arg0, arg1, 0))
13498 	return constant_boolean_node (0, type);
13499 
13500       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13501       {
13502 	tree targ0 = strip_float_extensions (arg0);
13503 	tree targ1 = strip_float_extensions (arg1);
13504 	tree newtype = TREE_TYPE (targ0);
13505 
13506 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13507 	  newtype = TREE_TYPE (targ1);
13508 
13509 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13510 	  return fold_build2_loc (loc, code, type,
13511 			      fold_convert_loc (loc, newtype, targ0),
13512 			      fold_convert_loc (loc, newtype, targ1));
13513       }
13514 
13515       return NULL_TREE;
13516 
13517     case COMPOUND_EXPR:
13518       /* When pedantic, a compound expression can be neither an lvalue
13519 	 nor an integer constant expression.  */
13520       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13521 	return NULL_TREE;
13522       /* Don't let (0, 0) be null pointer constant.  */
13523       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13524 				 : fold_convert_loc (loc, type, arg1);
13525       return pedantic_non_lvalue_loc (loc, tem);
13526 
13527     case COMPLEX_EXPR:
13528       if ((TREE_CODE (arg0) == REAL_CST
13529 	   && TREE_CODE (arg1) == REAL_CST)
13530 	  || (TREE_CODE (arg0) == INTEGER_CST
13531 	      && TREE_CODE (arg1) == INTEGER_CST))
13532 	return build_complex (type, arg0, arg1);
13533       if (TREE_CODE (arg0) == REALPART_EXPR
13534 	  && TREE_CODE (arg1) == IMAGPART_EXPR
13535 	  && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13536 	  && operand_equal_p (TREE_OPERAND (arg0, 0),
13537 			      TREE_OPERAND (arg1, 0), 0))
13538 	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13539 				     TREE_OPERAND (arg1, 0));
13540       return NULL_TREE;
13541 
13542     case ASSERT_EXPR:
13543       /* An ASSERT_EXPR should never be passed to fold_binary.  */
13544       gcc_unreachable ();
13545 
13546     case VEC_PACK_TRUNC_EXPR:
13547     case VEC_PACK_FIX_TRUNC_EXPR:
13548       {
13549 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13550 	tree *elts, vals = NULL_TREE;
13551 
13552 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13553 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13554 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13555 	  return NULL_TREE;
13556 
13557 	elts = XALLOCAVEC (tree, nelts);
13558 	if (!vec_cst_ctor_to_array (arg0, elts)
13559 	    || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13560 	  return NULL_TREE;
13561 
13562 	for (i = 0; i < nelts; i++)
13563 	  {
13564 	    elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13565 					  ? NOP_EXPR : FIX_TRUNC_EXPR,
13566 					  TREE_TYPE (type), elts[i]);
13567 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13568 	      return NULL_TREE;
13569 	  }
13570 
13571 	for (i = 0; i < nelts; i++)
13572 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13573 	return build_vector (type, vals);
13574       }
13575 
13576     case VEC_WIDEN_MULT_LO_EXPR:
13577     case VEC_WIDEN_MULT_HI_EXPR:
13578       {
13579 	unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13580 	tree *elts, vals = NULL_TREE;
13581 
13582 	gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13583 		    && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13584 	if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13585 	  return NULL_TREE;
13586 
13587 	elts = XALLOCAVEC (tree, nelts * 4);
13588 	if (!vec_cst_ctor_to_array (arg0, elts)
13589 	    || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13590 	  return NULL_TREE;
13591 
13592 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR))
13593 	  elts += nelts;
13594 
13595 	for (i = 0; i < nelts; i++)
13596 	  {
13597 	    elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]);
13598 	    elts[i + nelts * 2]
13599 	      = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
13600 				    elts[i + nelts * 2]);
13601 	    if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE)
13602 	      return NULL_TREE;
13603 	    elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]);
13604 	    if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13605 	      return NULL_TREE;
13606 	  }
13607 
13608 	for (i = 0; i < nelts; i++)
13609 	  vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals);
13610 	return build_vector (type, vals);
13611       }
13612 
13613     default:
13614       return NULL_TREE;
13615     } /* switch (code) */
13616 }
13617 
13618 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13619    a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13620    of GOTO_EXPR.  */
13621 
13622 static tree
13623 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13624 {
13625   switch (TREE_CODE (*tp))
13626     {
13627     case LABEL_EXPR:
13628       return *tp;
13629 
13630     case GOTO_EXPR:
13631       *walk_subtrees = 0;
13632 
13633       /* ... fall through ...  */
13634 
13635     default:
13636       return NULL_TREE;
13637     }
13638 }
13639 
13640 /* Return whether the sub-tree ST contains a label which is accessible from
13641    outside the sub-tree.  */
13642 
13643 static bool
13644 contains_label_p (tree st)
13645 {
13646   return
13647    (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13648 }
13649 
13650 /* Fold a ternary expression of code CODE and type TYPE with operands
13651    OP0, OP1, and OP2.  Return the folded expression if folding is
13652    successful.  Otherwise, return NULL_TREE.  */
13653 
13654 tree
13655 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13656 		  tree op0, tree op1, tree op2)
13657 {
13658   tree tem;
13659   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13660   enum tree_code_class kind = TREE_CODE_CLASS (code);
13661 
13662   gcc_assert (IS_EXPR_CODE_CLASS (kind)
13663 	      && TREE_CODE_LENGTH (code) == 3);
13664 
13665   /* Strip any conversions that don't change the mode.  This is safe
13666      for every expression, except for a comparison expression because
13667      its signedness is derived from its operands.  So, in the latter
13668      case, only strip conversions that don't change the signedness.
13669 
13670      Note that this is done as an internal manipulation within the
13671      constant folder, in order to find the simplest representation of
13672      the arguments so that their form can be studied.  In any cases,
13673      the appropriate type conversions should be put back in the tree
13674      that will get out of the constant folder.  */
13675   if (op0)
13676     {
13677       arg0 = op0;
13678       STRIP_NOPS (arg0);
13679     }
13680 
13681   if (op1)
13682     {
13683       arg1 = op1;
13684       STRIP_NOPS (arg1);
13685     }
13686 
13687   if (op2)
13688     {
13689       arg2 = op2;
13690       STRIP_NOPS (arg2);
13691     }
13692 
13693   switch (code)
13694     {
13695     case COMPONENT_REF:
13696       if (TREE_CODE (arg0) == CONSTRUCTOR
13697 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13698 	{
13699 	  unsigned HOST_WIDE_INT idx;
13700 	  tree field, value;
13701 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13702 	    if (field == arg1)
13703 	      return value;
13704 	}
13705       return NULL_TREE;
13706 
13707     case COND_EXPR:
13708       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13709 	 so all simple results must be passed through pedantic_non_lvalue.  */
13710       if (TREE_CODE (arg0) == INTEGER_CST)
13711 	{
13712 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
13713 	  tem = integer_zerop (arg0) ? op2 : op1;
13714 	  /* Only optimize constant conditions when the selected branch
13715 	     has the same type as the COND_EXPR.  This avoids optimizing
13716              away "c ? x : throw", where the throw has a void type.
13717              Avoid throwing away that operand which contains label.  */
13718           if ((!TREE_SIDE_EFFECTS (unused_op)
13719                || !contains_label_p (unused_op))
13720               && (! VOID_TYPE_P (TREE_TYPE (tem))
13721                   || VOID_TYPE_P (type)))
13722 	    return pedantic_non_lvalue_loc (loc, tem);
13723 	  return NULL_TREE;
13724 	}
13725       if (operand_equal_p (arg1, op2, 0))
13726 	return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13727 
13728       /* If we have A op B ? A : C, we may be able to convert this to a
13729 	 simpler expression, depending on the operation and the values
13730 	 of B and C.  Signed zeros prevent all of these transformations,
13731 	 for reasons given above each one.
13732 
13733          Also try swapping the arguments and inverting the conditional.  */
13734       if (COMPARISON_CLASS_P (arg0)
13735 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13736 					     arg1, TREE_OPERAND (arg0, 1))
13737 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13738 	{
13739 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13740 	  if (tem)
13741 	    return tem;
13742 	}
13743 
13744       if (COMPARISON_CLASS_P (arg0)
13745 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13746 					     op2,
13747 					     TREE_OPERAND (arg0, 1))
13748 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13749 	{
13750 	  location_t loc0 = expr_location_or (arg0, loc);
13751 	  tem = fold_truth_not_expr (loc0, arg0);
13752 	  if (tem && COMPARISON_CLASS_P (tem))
13753 	    {
13754 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13755 	      if (tem)
13756 		return tem;
13757 	    }
13758 	}
13759 
13760       /* If the second operand is simpler than the third, swap them
13761 	 since that produces better jump optimization results.  */
13762       if (truth_value_p (TREE_CODE (arg0))
13763 	  && tree_swap_operands_p (op1, op2, false))
13764 	{
13765 	  location_t loc0 = expr_location_or (arg0, loc);
13766 	  /* See if this can be inverted.  If it can't, possibly because
13767 	     it was a floating-point inequality comparison, don't do
13768 	     anything.  */
13769 	  tem = fold_truth_not_expr (loc0, arg0);
13770 	  if (tem)
13771 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
13772 	}
13773 
13774       /* Convert A ? 1 : 0 to simply A.  */
13775       if (integer_onep (op1)
13776 	  && integer_zerop (op2)
13777 	  /* If we try to convert OP0 to our type, the
13778 	     call to fold will try to move the conversion inside
13779 	     a COND, which will recurse.  In that case, the COND_EXPR
13780 	     is probably the best choice, so leave it alone.  */
13781 	  && type == TREE_TYPE (arg0))
13782 	return pedantic_non_lvalue_loc (loc, arg0);
13783 
13784       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13785 	 over COND_EXPR in cases such as floating point comparisons.  */
13786       if (integer_zerop (op1)
13787 	  && integer_onep (op2)
13788 	  && truth_value_p (TREE_CODE (arg0)))
13789 	return pedantic_non_lvalue_loc (loc,
13790 				    fold_convert_loc (loc, type,
13791 					      invert_truthvalue_loc (loc,
13792 								     arg0)));
13793 
13794       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13795       if (TREE_CODE (arg0) == LT_EXPR
13796 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13797 	  && integer_zerop (op2)
13798 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13799 	{
13800 	  /* sign_bit_p only checks ARG1 bits within A's precision.
13801 	     If <sign bit of A> has wider type than A, bits outside
13802 	     of A's precision in <sign bit of A> need to be checked.
13803 	     If they are all 0, this optimization needs to be done
13804 	     in unsigned A's type, if they are all 1 in signed A's type,
13805 	     otherwise this can't be done.  */
13806 	  if (TYPE_PRECISION (TREE_TYPE (tem))
13807 	      < TYPE_PRECISION (TREE_TYPE (arg1))
13808 	      && TYPE_PRECISION (TREE_TYPE (tem))
13809 		 < TYPE_PRECISION (type))
13810 	    {
13811 	      unsigned HOST_WIDE_INT mask_lo;
13812 	      HOST_WIDE_INT mask_hi;
13813 	      int inner_width, outer_width;
13814 	      tree tem_type;
13815 
13816 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13817 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13818 	      if (outer_width > TYPE_PRECISION (type))
13819 		outer_width = TYPE_PRECISION (type);
13820 
13821 	      if (outer_width > HOST_BITS_PER_WIDE_INT)
13822 		{
13823 		  mask_hi = ((unsigned HOST_WIDE_INT) -1
13824 			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13825 		  mask_lo = -1;
13826 		}
13827 	      else
13828 		{
13829 		  mask_hi = 0;
13830 		  mask_lo = ((unsigned HOST_WIDE_INT) -1
13831 			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
13832 		}
13833 	      if (inner_width > HOST_BITS_PER_WIDE_INT)
13834 		{
13835 		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13836 			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
13837 		  mask_lo = 0;
13838 		}
13839 	      else
13840 		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13841 			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
13842 
13843 	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13844 		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13845 		{
13846 		  tem_type = signed_type_for (TREE_TYPE (tem));
13847 		  tem = fold_convert_loc (loc, tem_type, tem);
13848 		}
13849 	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13850 		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13851 		{
13852 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
13853 		  tem = fold_convert_loc (loc, tem_type, tem);
13854 		}
13855 	      else
13856 		tem = NULL;
13857 	    }
13858 
13859 	  if (tem)
13860 	    return
13861 	      fold_convert_loc (loc, type,
13862 				fold_build2_loc (loc, BIT_AND_EXPR,
13863 					     TREE_TYPE (tem), tem,
13864 					     fold_convert_loc (loc,
13865 							       TREE_TYPE (tem),
13866 							       arg1)));
13867 	}
13868 
13869       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13870 	 already handled above.  */
13871       if (TREE_CODE (arg0) == BIT_AND_EXPR
13872 	  && integer_onep (TREE_OPERAND (arg0, 1))
13873 	  && integer_zerop (op2)
13874 	  && integer_pow2p (arg1))
13875 	{
13876 	  tree tem = TREE_OPERAND (arg0, 0);
13877 	  STRIP_NOPS (tem);
13878 	  if (TREE_CODE (tem) == RSHIFT_EXPR
13879               && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13880               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13881 	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13882 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
13883 				TREE_OPERAND (tem, 0), arg1);
13884 	}
13885 
13886       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13887 	 is probably obsolete because the first operand should be a
13888 	 truth value (that's why we have the two cases above), but let's
13889 	 leave it in until we can confirm this for all front-ends.  */
13890       if (integer_zerop (op2)
13891 	  && TREE_CODE (arg0) == NE_EXPR
13892 	  && integer_zerop (TREE_OPERAND (arg0, 1))
13893 	  && integer_pow2p (arg1)
13894 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13895 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13896 			      arg1, OEP_ONLY_CONST))
13897 	return pedantic_non_lvalue_loc (loc,
13898 				    fold_convert_loc (loc, type,
13899 						      TREE_OPERAND (arg0, 0)));
13900 
13901       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13902       if (integer_zerop (op2)
13903 	  && truth_value_p (TREE_CODE (arg0))
13904 	  && truth_value_p (TREE_CODE (arg1)))
13905 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13906 			    fold_convert_loc (loc, type, arg0),
13907 			    arg1);
13908 
13909       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13910       if (integer_onep (op2)
13911 	  && truth_value_p (TREE_CODE (arg0))
13912 	  && truth_value_p (TREE_CODE (arg1)))
13913 	{
13914 	  location_t loc0 = expr_location_or (arg0, loc);
13915 	  /* Only perform transformation if ARG0 is easily inverted.  */
13916 	  tem = fold_truth_not_expr (loc0, arg0);
13917 	  if (tem)
13918 	    return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13919 				fold_convert_loc (loc, type, tem),
13920 				arg1);
13921 	}
13922 
13923       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13924       if (integer_zerop (arg1)
13925 	  && truth_value_p (TREE_CODE (arg0))
13926 	  && truth_value_p (TREE_CODE (op2)))
13927 	{
13928 	  location_t loc0 = expr_location_or (arg0, loc);
13929 	  /* Only perform transformation if ARG0 is easily inverted.  */
13930 	  tem = fold_truth_not_expr (loc0, arg0);
13931 	  if (tem)
13932 	    return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13933 				fold_convert_loc (loc, type, tem),
13934 				op2);
13935 	}
13936 
13937       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
13938       if (integer_onep (arg1)
13939 	  && truth_value_p (TREE_CODE (arg0))
13940 	  && truth_value_p (TREE_CODE (op2)))
13941 	return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13942 			    fold_convert_loc (loc, type, arg0),
13943 			    op2);
13944 
13945       return NULL_TREE;
13946 
13947     case CALL_EXPR:
13948       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
13949 	 of fold_ternary on them.  */
13950       gcc_unreachable ();
13951 
13952     case BIT_FIELD_REF:
13953       if ((TREE_CODE (arg0) == VECTOR_CST
13954 	   || TREE_CODE (arg0) == CONSTRUCTOR)
13955 	  && type == TREE_TYPE (TREE_TYPE (arg0)))
13956 	{
13957 	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13958 	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13959 
13960 	  if (width != 0
13961 	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13962 	      && (idx % width) == 0
13963 	      && (idx = idx / width)
13964 		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13965 	    {
13966 	      if (TREE_CODE (arg0) == VECTOR_CST)
13967 		{
13968 		  tree elements = TREE_VECTOR_CST_ELTS (arg0);
13969 		  while (idx-- > 0 && elements)
13970 		    elements = TREE_CHAIN (elements);
13971 		  if (elements)
13972 		    return TREE_VALUE (elements);
13973 		}
13974 	      else if (idx < CONSTRUCTOR_NELTS (arg0))
13975 		return CONSTRUCTOR_ELT (arg0, idx)->value;
13976 	      return build_zero_cst (type);
13977 	    }
13978 	}
13979 
13980       /* A bit-field-ref that referenced the full argument can be stripped.  */
13981       if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13982 	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13983 	  && integer_zerop (op2))
13984 	return fold_convert_loc (loc, type, arg0);
13985 
13986       return NULL_TREE;
13987 
13988     case FMA_EXPR:
13989       /* For integers we can decompose the FMA if possible.  */
13990       if (TREE_CODE (arg0) == INTEGER_CST
13991 	  && TREE_CODE (arg1) == INTEGER_CST)
13992 	return fold_build2_loc (loc, PLUS_EXPR, type,
13993 				const_binop (MULT_EXPR, arg0, arg1), arg2);
13994       if (integer_zerop (arg2))
13995 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13996 
13997       return fold_fma (loc, type, arg0, arg1, arg2);
13998 
13999     case VEC_PERM_EXPR:
14000       if (TREE_CODE (arg2) == VECTOR_CST)
14001 	{
14002 	  unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14003 	  unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14004 	  tree t;
14005 	  bool need_mask_canon = false;
14006 
14007 	  gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)));
14008 	  for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2);
14009 	       i < nelts && t; i++, t = TREE_CHAIN (t))
14010 	    {
14011 	      if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST)
14012 		return NULL_TREE;
14013 
14014 	      sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1);
14015 	      if (TREE_INT_CST_HIGH (TREE_VALUE (t))
14016 		  || ((unsigned HOST_WIDE_INT)
14017 		      TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i]))
14018 		need_mask_canon = true;
14019 	    }
14020 	  if (t)
14021 	    return NULL_TREE;
14022 	  for (; i < nelts; i++)
14023 	    sel[i] = 0;
14024 
14025 	  if ((TREE_CODE (arg0) == VECTOR_CST
14026 	       || TREE_CODE (arg0) == CONSTRUCTOR)
14027 	      && (TREE_CODE (arg1) == VECTOR_CST
14028 		  || TREE_CODE (arg1) == CONSTRUCTOR))
14029 	    {
14030 	      t = fold_vec_perm (type, arg0, arg1, sel);
14031 	      if (t != NULL_TREE)
14032 		return t;
14033 	    }
14034 
14035 	  if (need_mask_canon && arg2 == op2)
14036 	    {
14037 	      tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2));
14038 	      for (i = 0; i < nelts; i++)
14039 		list = tree_cons (NULL_TREE,
14040 				  build_int_cst (eltype, sel[nelts - i - 1]),
14041 				  list);
14042 	      t = build_vector (TREE_TYPE (arg2), list);
14043 	      return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t);
14044 	    }
14045 	}
14046       return NULL_TREE;
14047 
14048     default:
14049       return NULL_TREE;
14050     } /* switch (code) */
14051 }
14052 
14053 /* Perform constant folding and related simplification of EXPR.
14054    The related simplifications include x*1 => x, x*0 => 0, etc.,
14055    and application of the associative law.
14056    NOP_EXPR conversions may be removed freely (as long as we
14057    are careful not to change the type of the overall expression).
14058    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14059    but we can constant-fold them if they have constant operands.  */
14060 
14061 #ifdef ENABLE_FOLD_CHECKING
14062 # define fold(x) fold_1 (x)
14063 static tree fold_1 (tree);
14064 static
14065 #endif
14066 tree
14067 fold (tree expr)
14068 {
14069   const tree t = expr;
14070   enum tree_code code = TREE_CODE (t);
14071   enum tree_code_class kind = TREE_CODE_CLASS (code);
14072   tree tem;
14073   location_t loc = EXPR_LOCATION (expr);
14074 
14075   /* Return right away if a constant.  */
14076   if (kind == tcc_constant)
14077     return t;
14078 
14079   /* CALL_EXPR-like objects with variable numbers of operands are
14080      treated specially.  */
14081   if (kind == tcc_vl_exp)
14082     {
14083       if (code == CALL_EXPR)
14084 	{
14085 	  tem = fold_call_expr (loc, expr, false);
14086 	  return tem ? tem : expr;
14087 	}
14088       return expr;
14089     }
14090 
14091   if (IS_EXPR_CODE_CLASS (kind))
14092     {
14093       tree type = TREE_TYPE (t);
14094       tree op0, op1, op2;
14095 
14096       switch (TREE_CODE_LENGTH (code))
14097 	{
14098 	case 1:
14099 	  op0 = TREE_OPERAND (t, 0);
14100 	  tem = fold_unary_loc (loc, code, type, op0);
14101 	  return tem ? tem : expr;
14102 	case 2:
14103 	  op0 = TREE_OPERAND (t, 0);
14104 	  op1 = TREE_OPERAND (t, 1);
14105 	  tem = fold_binary_loc (loc, code, type, op0, op1);
14106 	  return tem ? tem : expr;
14107 	case 3:
14108 	  op0 = TREE_OPERAND (t, 0);
14109 	  op1 = TREE_OPERAND (t, 1);
14110 	  op2 = TREE_OPERAND (t, 2);
14111 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14112 	  return tem ? tem : expr;
14113 	default:
14114 	  break;
14115 	}
14116     }
14117 
14118   switch (code)
14119     {
14120     case ARRAY_REF:
14121       {
14122 	tree op0 = TREE_OPERAND (t, 0);
14123 	tree op1 = TREE_OPERAND (t, 1);
14124 
14125 	if (TREE_CODE (op1) == INTEGER_CST
14126 	    && TREE_CODE (op0) == CONSTRUCTOR
14127 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14128 	  {
14129 	    VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14130 	    unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14131 	    unsigned HOST_WIDE_INT begin = 0;
14132 
14133 	    /* Find a matching index by means of a binary search.  */
14134 	    while (begin != end)
14135 	      {
14136 		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14137 		tree index = VEC_index (constructor_elt, elts, middle)->index;
14138 
14139 		if (TREE_CODE (index) == INTEGER_CST
14140 		    && tree_int_cst_lt (index, op1))
14141 		  begin = middle + 1;
14142 		else if (TREE_CODE (index) == INTEGER_CST
14143 			 && tree_int_cst_lt (op1, index))
14144 		  end = middle;
14145 		else if (TREE_CODE (index) == RANGE_EXPR
14146 			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14147 		  begin = middle + 1;
14148 		else if (TREE_CODE (index) == RANGE_EXPR
14149 			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14150 		  end = middle;
14151 		else
14152 		  return VEC_index (constructor_elt, elts, middle)->value;
14153 	      }
14154 	  }
14155 
14156 	return t;
14157       }
14158 
14159     case CONST_DECL:
14160       return fold (DECL_INITIAL (t));
14161 
14162     default:
14163       return t;
14164     } /* switch (code) */
14165 }
14166 
14167 #ifdef ENABLE_FOLD_CHECKING
14168 #undef fold
14169 
14170 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14171 static void fold_check_failed (const_tree, const_tree);
14172 void print_fold_checksum (const_tree);
14173 
14174 /* When --enable-checking=fold, compute a digest of expr before
14175    and after actual fold call to see if fold did not accidentally
14176    change original expr.  */
14177 
14178 tree
14179 fold (tree expr)
14180 {
14181   tree ret;
14182   struct md5_ctx ctx;
14183   unsigned char checksum_before[16], checksum_after[16];
14184   htab_t ht;
14185 
14186   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14187   md5_init_ctx (&ctx);
14188   fold_checksum_tree (expr, &ctx, ht);
14189   md5_finish_ctx (&ctx, checksum_before);
14190   htab_empty (ht);
14191 
14192   ret = fold_1 (expr);
14193 
14194   md5_init_ctx (&ctx);
14195   fold_checksum_tree (expr, &ctx, ht);
14196   md5_finish_ctx (&ctx, checksum_after);
14197   htab_delete (ht);
14198 
14199   if (memcmp (checksum_before, checksum_after, 16))
14200     fold_check_failed (expr, ret);
14201 
14202   return ret;
14203 }
14204 
14205 void
14206 print_fold_checksum (const_tree expr)
14207 {
14208   struct md5_ctx ctx;
14209   unsigned char checksum[16], cnt;
14210   htab_t ht;
14211 
14212   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14213   md5_init_ctx (&ctx);
14214   fold_checksum_tree (expr, &ctx, ht);
14215   md5_finish_ctx (&ctx, checksum);
14216   htab_delete (ht);
14217   for (cnt = 0; cnt < 16; ++cnt)
14218     fprintf (stderr, "%02x", checksum[cnt]);
14219   putc ('\n', stderr);
14220 }
14221 
14222 static void
14223 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14224 {
14225   internal_error ("fold check: original tree changed by fold");
14226 }
14227 
14228 static void
14229 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14230 {
14231   void **slot;
14232   enum tree_code code;
14233   union tree_node buf;
14234   int i, len;
14235 
14236  recursive_label:
14237   if (expr == NULL)
14238     return;
14239   slot = (void **) htab_find_slot (ht, expr, INSERT);
14240   if (*slot != NULL)
14241     return;
14242   *slot = CONST_CAST_TREE (expr);
14243   code = TREE_CODE (expr);
14244   if (TREE_CODE_CLASS (code) == tcc_declaration
14245       && DECL_ASSEMBLER_NAME_SET_P (expr))
14246     {
14247       /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14248       memcpy ((char *) &buf, expr, tree_size (expr));
14249       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14250       expr = (tree) &buf;
14251     }
14252   else if (TREE_CODE_CLASS (code) == tcc_type
14253 	   && (TYPE_POINTER_TO (expr)
14254 	       || TYPE_REFERENCE_TO (expr)
14255 	       || TYPE_CACHED_VALUES_P (expr)
14256 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14257 	       || TYPE_NEXT_VARIANT (expr)))
14258     {
14259       /* Allow these fields to be modified.  */
14260       tree tmp;
14261       memcpy ((char *) &buf, expr, tree_size (expr));
14262       expr = tmp = (tree) &buf;
14263       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14264       TYPE_POINTER_TO (tmp) = NULL;
14265       TYPE_REFERENCE_TO (tmp) = NULL;
14266       TYPE_NEXT_VARIANT (tmp) = NULL;
14267       if (TYPE_CACHED_VALUES_P (tmp))
14268 	{
14269 	  TYPE_CACHED_VALUES_P (tmp) = 0;
14270 	  TYPE_CACHED_VALUES (tmp) = NULL;
14271 	}
14272     }
14273   md5_process_bytes (expr, tree_size (expr), ctx);
14274   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14275     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14276   if (TREE_CODE_CLASS (code) != tcc_type
14277       && TREE_CODE_CLASS (code) != tcc_declaration
14278       && code != TREE_LIST
14279       && code != SSA_NAME
14280       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14281     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14282   switch (TREE_CODE_CLASS (code))
14283     {
14284     case tcc_constant:
14285       switch (code)
14286 	{
14287 	case STRING_CST:
14288 	  md5_process_bytes (TREE_STRING_POINTER (expr),
14289 			     TREE_STRING_LENGTH (expr), ctx);
14290 	  break;
14291 	case COMPLEX_CST:
14292 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14293 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14294 	  break;
14295 	case VECTOR_CST:
14296 	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14297 	  break;
14298 	default:
14299 	  break;
14300 	}
14301       break;
14302     case tcc_exceptional:
14303       switch (code)
14304 	{
14305 	case TREE_LIST:
14306 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14307 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14308 	  expr = TREE_CHAIN (expr);
14309 	  goto recursive_label;
14310 	  break;
14311 	case TREE_VEC:
14312 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14313 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14314 	  break;
14315 	default:
14316 	  break;
14317 	}
14318       break;
14319     case tcc_expression:
14320     case tcc_reference:
14321     case tcc_comparison:
14322     case tcc_unary:
14323     case tcc_binary:
14324     case tcc_statement:
14325     case tcc_vl_exp:
14326       len = TREE_OPERAND_LENGTH (expr);
14327       for (i = 0; i < len; ++i)
14328 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14329       break;
14330     case tcc_declaration:
14331       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14332       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14333       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14334 	{
14335 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14336 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14337 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14338 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14339 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14340 	}
14341       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14342 	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14343 
14344       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14345 	{
14346 	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14347 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14348 	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14349 	}
14350       break;
14351     case tcc_type:
14352       if (TREE_CODE (expr) == ENUMERAL_TYPE)
14353         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14354       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14355       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14356       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14357       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14358       if (INTEGRAL_TYPE_P (expr)
14359           || SCALAR_FLOAT_TYPE_P (expr))
14360 	{
14361 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14362 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14363 	}
14364       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14365       if (TREE_CODE (expr) == RECORD_TYPE
14366 	  || TREE_CODE (expr) == UNION_TYPE
14367 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
14368 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14369       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14370       break;
14371     default:
14372       break;
14373     }
14374 }
14375 
14376 /* Helper function for outputting the checksum of a tree T.  When
14377    debugging with gdb, you can "define mynext" to be "next" followed
14378    by "call debug_fold_checksum (op0)", then just trace down till the
14379    outputs differ.  */
14380 
14381 DEBUG_FUNCTION void
14382 debug_fold_checksum (const_tree t)
14383 {
14384   int i;
14385   unsigned char checksum[16];
14386   struct md5_ctx ctx;
14387   htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14388 
14389   md5_init_ctx (&ctx);
14390   fold_checksum_tree (t, &ctx, ht);
14391   md5_finish_ctx (&ctx, checksum);
14392   htab_empty (ht);
14393 
14394   for (i = 0; i < 16; i++)
14395     fprintf (stderr, "%d ", checksum[i]);
14396 
14397   fprintf (stderr, "\n");
14398 }
14399 
14400 #endif
14401 
14402 /* Fold a unary tree expression with code CODE of type TYPE with an
14403    operand OP0.  LOC is the location of the resulting expression.
14404    Return a folded expression if successful.  Otherwise, return a tree
14405    expression with code CODE of type TYPE with an operand OP0.  */
14406 
14407 tree
14408 fold_build1_stat_loc (location_t loc,
14409 		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14410 {
14411   tree tem;
14412 #ifdef ENABLE_FOLD_CHECKING
14413   unsigned char checksum_before[16], checksum_after[16];
14414   struct md5_ctx ctx;
14415   htab_t ht;
14416 
14417   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14418   md5_init_ctx (&ctx);
14419   fold_checksum_tree (op0, &ctx, ht);
14420   md5_finish_ctx (&ctx, checksum_before);
14421   htab_empty (ht);
14422 #endif
14423 
14424   tem = fold_unary_loc (loc, code, type, op0);
14425   if (!tem)
14426     tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14427 
14428 #ifdef ENABLE_FOLD_CHECKING
14429   md5_init_ctx (&ctx);
14430   fold_checksum_tree (op0, &ctx, ht);
14431   md5_finish_ctx (&ctx, checksum_after);
14432   htab_delete (ht);
14433 
14434   if (memcmp (checksum_before, checksum_after, 16))
14435     fold_check_failed (op0, tem);
14436 #endif
14437   return tem;
14438 }
14439 
14440 /* Fold a binary tree expression with code CODE of type TYPE with
14441    operands OP0 and OP1.  LOC is the location of the resulting
14442    expression.  Return a folded expression if successful.  Otherwise,
14443    return a tree expression with code CODE of type TYPE with operands
14444    OP0 and OP1.  */
14445 
14446 tree
14447 fold_build2_stat_loc (location_t loc,
14448 		      enum tree_code code, tree type, tree op0, tree op1
14449 		      MEM_STAT_DECL)
14450 {
14451   tree tem;
14452 #ifdef ENABLE_FOLD_CHECKING
14453   unsigned char checksum_before_op0[16],
14454                 checksum_before_op1[16],
14455 		checksum_after_op0[16],
14456 		checksum_after_op1[16];
14457   struct md5_ctx ctx;
14458   htab_t ht;
14459 
14460   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14461   md5_init_ctx (&ctx);
14462   fold_checksum_tree (op0, &ctx, ht);
14463   md5_finish_ctx (&ctx, checksum_before_op0);
14464   htab_empty (ht);
14465 
14466   md5_init_ctx (&ctx);
14467   fold_checksum_tree (op1, &ctx, ht);
14468   md5_finish_ctx (&ctx, checksum_before_op1);
14469   htab_empty (ht);
14470 #endif
14471 
14472   tem = fold_binary_loc (loc, code, type, op0, op1);
14473   if (!tem)
14474     tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14475 
14476 #ifdef ENABLE_FOLD_CHECKING
14477   md5_init_ctx (&ctx);
14478   fold_checksum_tree (op0, &ctx, ht);
14479   md5_finish_ctx (&ctx, checksum_after_op0);
14480   htab_empty (ht);
14481 
14482   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14483     fold_check_failed (op0, tem);
14484 
14485   md5_init_ctx (&ctx);
14486   fold_checksum_tree (op1, &ctx, ht);
14487   md5_finish_ctx (&ctx, checksum_after_op1);
14488   htab_delete (ht);
14489 
14490   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14491     fold_check_failed (op1, tem);
14492 #endif
14493   return tem;
14494 }
14495 
14496 /* Fold a ternary tree expression with code CODE of type TYPE with
14497    operands OP0, OP1, and OP2.  Return a folded expression if
14498    successful.  Otherwise, return a tree expression with code CODE of
14499    type TYPE with operands OP0, OP1, and OP2.  */
14500 
14501 tree
14502 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14503 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14504 {
14505   tree tem;
14506 #ifdef ENABLE_FOLD_CHECKING
14507   unsigned char checksum_before_op0[16],
14508                 checksum_before_op1[16],
14509                 checksum_before_op2[16],
14510 		checksum_after_op0[16],
14511 		checksum_after_op1[16],
14512 		checksum_after_op2[16];
14513   struct md5_ctx ctx;
14514   htab_t ht;
14515 
14516   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14517   md5_init_ctx (&ctx);
14518   fold_checksum_tree (op0, &ctx, ht);
14519   md5_finish_ctx (&ctx, checksum_before_op0);
14520   htab_empty (ht);
14521 
14522   md5_init_ctx (&ctx);
14523   fold_checksum_tree (op1, &ctx, ht);
14524   md5_finish_ctx (&ctx, checksum_before_op1);
14525   htab_empty (ht);
14526 
14527   md5_init_ctx (&ctx);
14528   fold_checksum_tree (op2, &ctx, ht);
14529   md5_finish_ctx (&ctx, checksum_before_op2);
14530   htab_empty (ht);
14531 #endif
14532 
14533   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14534   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14535   if (!tem)
14536     tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14537 
14538 #ifdef ENABLE_FOLD_CHECKING
14539   md5_init_ctx (&ctx);
14540   fold_checksum_tree (op0, &ctx, ht);
14541   md5_finish_ctx (&ctx, checksum_after_op0);
14542   htab_empty (ht);
14543 
14544   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14545     fold_check_failed (op0, tem);
14546 
14547   md5_init_ctx (&ctx);
14548   fold_checksum_tree (op1, &ctx, ht);
14549   md5_finish_ctx (&ctx, checksum_after_op1);
14550   htab_empty (ht);
14551 
14552   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14553     fold_check_failed (op1, tem);
14554 
14555   md5_init_ctx (&ctx);
14556   fold_checksum_tree (op2, &ctx, ht);
14557   md5_finish_ctx (&ctx, checksum_after_op2);
14558   htab_delete (ht);
14559 
14560   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14561     fold_check_failed (op2, tem);
14562 #endif
14563   return tem;
14564 }
14565 
14566 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14567    arguments in ARGARRAY, and a null static chain.
14568    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14569    of type TYPE from the given operands as constructed by build_call_array.  */
14570 
14571 tree
14572 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14573 			   int nargs, tree *argarray)
14574 {
14575   tree tem;
14576 #ifdef ENABLE_FOLD_CHECKING
14577   unsigned char checksum_before_fn[16],
14578                 checksum_before_arglist[16],
14579 		checksum_after_fn[16],
14580 		checksum_after_arglist[16];
14581   struct md5_ctx ctx;
14582   htab_t ht;
14583   int i;
14584 
14585   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14586   md5_init_ctx (&ctx);
14587   fold_checksum_tree (fn, &ctx, ht);
14588   md5_finish_ctx (&ctx, checksum_before_fn);
14589   htab_empty (ht);
14590 
14591   md5_init_ctx (&ctx);
14592   for (i = 0; i < nargs; i++)
14593     fold_checksum_tree (argarray[i], &ctx, ht);
14594   md5_finish_ctx (&ctx, checksum_before_arglist);
14595   htab_empty (ht);
14596 #endif
14597 
14598   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14599 
14600 #ifdef ENABLE_FOLD_CHECKING
14601   md5_init_ctx (&ctx);
14602   fold_checksum_tree (fn, &ctx, ht);
14603   md5_finish_ctx (&ctx, checksum_after_fn);
14604   htab_empty (ht);
14605 
14606   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14607     fold_check_failed (fn, tem);
14608 
14609   md5_init_ctx (&ctx);
14610   for (i = 0; i < nargs; i++)
14611     fold_checksum_tree (argarray[i], &ctx, ht);
14612   md5_finish_ctx (&ctx, checksum_after_arglist);
14613   htab_delete (ht);
14614 
14615   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14616     fold_check_failed (NULL_TREE, tem);
14617 #endif
14618   return tem;
14619 }
14620 
14621 /* Perform constant folding and related simplification of initializer
14622    expression EXPR.  These behave identically to "fold_buildN" but ignore
14623    potential run-time traps and exceptions that fold must preserve.  */
14624 
14625 #define START_FOLD_INIT \
14626   int saved_signaling_nans = flag_signaling_nans;\
14627   int saved_trapping_math = flag_trapping_math;\
14628   int saved_rounding_math = flag_rounding_math;\
14629   int saved_trapv = flag_trapv;\
14630   int saved_folding_initializer = folding_initializer;\
14631   flag_signaling_nans = 0;\
14632   flag_trapping_math = 0;\
14633   flag_rounding_math = 0;\
14634   flag_trapv = 0;\
14635   folding_initializer = 1;
14636 
14637 #define END_FOLD_INIT \
14638   flag_signaling_nans = saved_signaling_nans;\
14639   flag_trapping_math = saved_trapping_math;\
14640   flag_rounding_math = saved_rounding_math;\
14641   flag_trapv = saved_trapv;\
14642   folding_initializer = saved_folding_initializer;
14643 
14644 tree
14645 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14646 			     tree type, tree op)
14647 {
14648   tree result;
14649   START_FOLD_INIT;
14650 
14651   result = fold_build1_loc (loc, code, type, op);
14652 
14653   END_FOLD_INIT;
14654   return result;
14655 }
14656 
14657 tree
14658 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14659 			     tree type, tree op0, tree op1)
14660 {
14661   tree result;
14662   START_FOLD_INIT;
14663 
14664   result = fold_build2_loc (loc, code, type, op0, op1);
14665 
14666   END_FOLD_INIT;
14667   return result;
14668 }
14669 
14670 tree
14671 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14672 			     tree type, tree op0, tree op1, tree op2)
14673 {
14674   tree result;
14675   START_FOLD_INIT;
14676 
14677   result = fold_build3_loc (loc, code, type, op0, op1, op2);
14678 
14679   END_FOLD_INIT;
14680   return result;
14681 }
14682 
14683 tree
14684 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14685 				       int nargs, tree *argarray)
14686 {
14687   tree result;
14688   START_FOLD_INIT;
14689 
14690   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14691 
14692   END_FOLD_INIT;
14693   return result;
14694 }
14695 
14696 #undef START_FOLD_INIT
14697 #undef END_FOLD_INIT
14698 
14699 /* Determine if first argument is a multiple of second argument.  Return 0 if
14700    it is not, or we cannot easily determined it to be.
14701 
14702    An example of the sort of thing we care about (at this point; this routine
14703    could surely be made more general, and expanded to do what the *_DIV_EXPR's
14704    fold cases do now) is discovering that
14705 
14706      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14707 
14708    is a multiple of
14709 
14710      SAVE_EXPR (J * 8)
14711 
14712    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14713 
14714    This code also handles discovering that
14715 
14716      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14717 
14718    is a multiple of 8 so we don't have to worry about dealing with a
14719    possible remainder.
14720 
14721    Note that we *look* inside a SAVE_EXPR only to determine how it was
14722    calculated; it is not safe for fold to do much of anything else with the
14723    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14724    at run time.  For example, the latter example above *cannot* be implemented
14725    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14726    evaluation time of the original SAVE_EXPR is not necessarily the same at
14727    the time the new expression is evaluated.  The only optimization of this
14728    sort that would be valid is changing
14729 
14730      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14731 
14732    divided by 8 to
14733 
14734      SAVE_EXPR (I) * SAVE_EXPR (J)
14735 
14736    (where the same SAVE_EXPR (J) is used in the original and the
14737    transformed version).  */
14738 
14739 int
14740 multiple_of_p (tree type, const_tree top, const_tree bottom)
14741 {
14742   if (operand_equal_p (top, bottom, 0))
14743     return 1;
14744 
14745   if (TREE_CODE (type) != INTEGER_TYPE)
14746     return 0;
14747 
14748   switch (TREE_CODE (top))
14749     {
14750     case BIT_AND_EXPR:
14751       /* Bitwise and provides a power of two multiple.  If the mask is
14752 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14753       if (!integer_pow2p (bottom))
14754 	return 0;
14755       /* FALLTHRU */
14756 
14757     case MULT_EXPR:
14758       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14759 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14760 
14761     case PLUS_EXPR:
14762     case MINUS_EXPR:
14763       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14764 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14765 
14766     case LSHIFT_EXPR:
14767       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14768 	{
14769 	  tree op1, t1;
14770 
14771 	  op1 = TREE_OPERAND (top, 1);
14772 	  /* const_binop may not detect overflow correctly,
14773 	     so check for it explicitly here.  */
14774 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14775 	      > TREE_INT_CST_LOW (op1)
14776 	      && TREE_INT_CST_HIGH (op1) == 0
14777 	      && 0 != (t1 = fold_convert (type,
14778 					  const_binop (LSHIFT_EXPR,
14779 						       size_one_node,
14780 						       op1)))
14781 	      && !TREE_OVERFLOW (t1))
14782 	    return multiple_of_p (type, t1, bottom);
14783 	}
14784       return 0;
14785 
14786     case NOP_EXPR:
14787       /* Can't handle conversions from non-integral or wider integral type.  */
14788       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14789 	  || (TYPE_PRECISION (type)
14790 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14791 	return 0;
14792 
14793       /* .. fall through ...  */
14794 
14795     case SAVE_EXPR:
14796       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14797 
14798     case COND_EXPR:
14799       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14800 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14801 
14802     case INTEGER_CST:
14803       if (TREE_CODE (bottom) != INTEGER_CST
14804 	  || integer_zerop (bottom)
14805 	  || (TYPE_UNSIGNED (type)
14806 	      && (tree_int_cst_sgn (top) < 0
14807 		  || tree_int_cst_sgn (bottom) < 0)))
14808 	return 0;
14809       return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14810 					     top, bottom));
14811 
14812     default:
14813       return 0;
14814     }
14815 }
14816 
14817 /* Return true if CODE or TYPE is known to be non-negative. */
14818 
14819 static bool
14820 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14821 {
14822   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14823       && truth_value_p (code))
14824     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14825        have a signed:1 type (where the value is -1 and 0).  */
14826     return true;
14827   return false;
14828 }
14829 
14830 /* Return true if (CODE OP0) is known to be non-negative.  If the return
14831    value is based on the assumption that signed overflow is undefined,
14832    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14833    *STRICT_OVERFLOW_P.  */
14834 
14835 bool
14836 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14837 				bool *strict_overflow_p)
14838 {
14839   if (TYPE_UNSIGNED (type))
14840     return true;
14841 
14842   switch (code)
14843     {
14844     case ABS_EXPR:
14845       /* We can't return 1 if flag_wrapv is set because
14846 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14847       if (!INTEGRAL_TYPE_P (type))
14848 	return true;
14849       if (TYPE_OVERFLOW_UNDEFINED (type))
14850 	{
14851 	  *strict_overflow_p = true;
14852 	  return true;
14853 	}
14854       break;
14855 
14856     case NON_LVALUE_EXPR:
14857     case FLOAT_EXPR:
14858     case FIX_TRUNC_EXPR:
14859       return tree_expr_nonnegative_warnv_p (op0,
14860 					    strict_overflow_p);
14861 
14862     case NOP_EXPR:
14863       {
14864 	tree inner_type = TREE_TYPE (op0);
14865 	tree outer_type = type;
14866 
14867 	if (TREE_CODE (outer_type) == REAL_TYPE)
14868 	  {
14869 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14870 	      return tree_expr_nonnegative_warnv_p (op0,
14871 						    strict_overflow_p);
14872 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14873 	      {
14874 		if (TYPE_UNSIGNED (inner_type))
14875 		  return true;
14876 		return tree_expr_nonnegative_warnv_p (op0,
14877 						      strict_overflow_p);
14878 	      }
14879 	  }
14880 	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14881 	  {
14882 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14883 	      return tree_expr_nonnegative_warnv_p (op0,
14884 						    strict_overflow_p);
14885 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14886 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14887 		      && TYPE_UNSIGNED (inner_type);
14888 	  }
14889       }
14890       break;
14891 
14892     default:
14893       return tree_simple_nonnegative_warnv_p (code, type);
14894     }
14895 
14896   /* We don't know sign of `t', so be conservative and return false.  */
14897   return false;
14898 }
14899 
14900 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14901    value is based on the assumption that signed overflow is undefined,
14902    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14903    *STRICT_OVERFLOW_P.  */
14904 
14905 bool
14906 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14907 				      tree op1, bool *strict_overflow_p)
14908 {
14909   if (TYPE_UNSIGNED (type))
14910     return true;
14911 
14912   switch (code)
14913     {
14914     case POINTER_PLUS_EXPR:
14915     case PLUS_EXPR:
14916       if (FLOAT_TYPE_P (type))
14917 	return (tree_expr_nonnegative_warnv_p (op0,
14918 					       strict_overflow_p)
14919 		&& tree_expr_nonnegative_warnv_p (op1,
14920 						  strict_overflow_p));
14921 
14922       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14923 	 both unsigned and at least 2 bits shorter than the result.  */
14924       if (TREE_CODE (type) == INTEGER_TYPE
14925 	  && TREE_CODE (op0) == NOP_EXPR
14926 	  && TREE_CODE (op1) == NOP_EXPR)
14927 	{
14928 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14929 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14930 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14931 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14932 	    {
14933 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
14934 				       TYPE_PRECISION (inner2)) + 1;
14935 	      return prec < TYPE_PRECISION (type);
14936 	    }
14937 	}
14938       break;
14939 
14940     case MULT_EXPR:
14941       if (FLOAT_TYPE_P (type))
14942 	{
14943 	  /* x * x for floating point x is always non-negative.  */
14944 	  if (operand_equal_p (op0, op1, 0))
14945 	    return true;
14946 	  return (tree_expr_nonnegative_warnv_p (op0,
14947 						 strict_overflow_p)
14948 		  && tree_expr_nonnegative_warnv_p (op1,
14949 						    strict_overflow_p));
14950 	}
14951 
14952       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14953 	 both unsigned and their total bits is shorter than the result.  */
14954       if (TREE_CODE (type) == INTEGER_TYPE
14955 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14956 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14957 	{
14958 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14959 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
14960 	    : TREE_TYPE (op0);
14961 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14962 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
14963 	    : TREE_TYPE (op1);
14964 
14965 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
14966 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
14967 
14968 	  if (TREE_CODE (op0) == INTEGER_CST)
14969 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14970 
14971 	  if (TREE_CODE (op1) == INTEGER_CST)
14972 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14973 
14974 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14975 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14976 	    {
14977 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14978 		? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14979 		: TYPE_PRECISION (inner0);
14980 
14981 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14982 		? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14983 		: TYPE_PRECISION (inner1);
14984 
14985 	      return precision0 + precision1 < TYPE_PRECISION (type);
14986 	    }
14987 	}
14988       return false;
14989 
14990     case BIT_AND_EXPR:
14991     case MAX_EXPR:
14992       return (tree_expr_nonnegative_warnv_p (op0,
14993 					     strict_overflow_p)
14994 	      || tree_expr_nonnegative_warnv_p (op1,
14995 						strict_overflow_p));
14996 
14997     case BIT_IOR_EXPR:
14998     case BIT_XOR_EXPR:
14999     case MIN_EXPR:
15000     case RDIV_EXPR:
15001     case TRUNC_DIV_EXPR:
15002     case CEIL_DIV_EXPR:
15003     case FLOOR_DIV_EXPR:
15004     case ROUND_DIV_EXPR:
15005       return (tree_expr_nonnegative_warnv_p (op0,
15006 					     strict_overflow_p)
15007 	      && tree_expr_nonnegative_warnv_p (op1,
15008 						strict_overflow_p));
15009 
15010     case TRUNC_MOD_EXPR:
15011     case CEIL_MOD_EXPR:
15012     case FLOOR_MOD_EXPR:
15013     case ROUND_MOD_EXPR:
15014       return tree_expr_nonnegative_warnv_p (op0,
15015 					    strict_overflow_p);
15016     default:
15017       return tree_simple_nonnegative_warnv_p (code, type);
15018     }
15019 
15020   /* We don't know sign of `t', so be conservative and return false.  */
15021   return false;
15022 }
15023 
15024 /* Return true if T is known to be non-negative.  If the return
15025    value is based on the assumption that signed overflow is undefined,
15026    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15027    *STRICT_OVERFLOW_P.  */
15028 
15029 bool
15030 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15031 {
15032   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15033     return true;
15034 
15035   switch (TREE_CODE (t))
15036     {
15037     case INTEGER_CST:
15038       return tree_int_cst_sgn (t) >= 0;
15039 
15040     case REAL_CST:
15041       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15042 
15043     case FIXED_CST:
15044       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15045 
15046     case COND_EXPR:
15047       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15048 					     strict_overflow_p)
15049 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15050 						strict_overflow_p));
15051     default:
15052       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15053 						   TREE_TYPE (t));
15054     }
15055   /* We don't know sign of `t', so be conservative and return false.  */
15056   return false;
15057 }
15058 
15059 /* Return true if T is known to be non-negative.  If the return
15060    value is based on the assumption that signed overflow is undefined,
15061    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15062    *STRICT_OVERFLOW_P.  */
15063 
15064 bool
15065 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15066 			       tree arg0, tree arg1, bool *strict_overflow_p)
15067 {
15068   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15069     switch (DECL_FUNCTION_CODE (fndecl))
15070       {
15071 	CASE_FLT_FN (BUILT_IN_ACOS):
15072 	CASE_FLT_FN (BUILT_IN_ACOSH):
15073 	CASE_FLT_FN (BUILT_IN_CABS):
15074 	CASE_FLT_FN (BUILT_IN_COSH):
15075 	CASE_FLT_FN (BUILT_IN_ERFC):
15076 	CASE_FLT_FN (BUILT_IN_EXP):
15077 	CASE_FLT_FN (BUILT_IN_EXP10):
15078 	CASE_FLT_FN (BUILT_IN_EXP2):
15079 	CASE_FLT_FN (BUILT_IN_FABS):
15080 	CASE_FLT_FN (BUILT_IN_FDIM):
15081 	CASE_FLT_FN (BUILT_IN_HYPOT):
15082 	CASE_FLT_FN (BUILT_IN_POW10):
15083 	CASE_INT_FN (BUILT_IN_FFS):
15084 	CASE_INT_FN (BUILT_IN_PARITY):
15085 	CASE_INT_FN (BUILT_IN_POPCOUNT):
15086       case BUILT_IN_BSWAP32:
15087       case BUILT_IN_BSWAP64:
15088 	/* Always true.  */
15089 	return true;
15090 
15091 	CASE_FLT_FN (BUILT_IN_SQRT):
15092 	/* sqrt(-0.0) is -0.0.  */
15093 	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15094 	  return true;
15095 	return tree_expr_nonnegative_warnv_p (arg0,
15096 					      strict_overflow_p);
15097 
15098 	CASE_FLT_FN (BUILT_IN_ASINH):
15099 	CASE_FLT_FN (BUILT_IN_ATAN):
15100 	CASE_FLT_FN (BUILT_IN_ATANH):
15101 	CASE_FLT_FN (BUILT_IN_CBRT):
15102 	CASE_FLT_FN (BUILT_IN_CEIL):
15103 	CASE_FLT_FN (BUILT_IN_ERF):
15104 	CASE_FLT_FN (BUILT_IN_EXPM1):
15105 	CASE_FLT_FN (BUILT_IN_FLOOR):
15106 	CASE_FLT_FN (BUILT_IN_FMOD):
15107 	CASE_FLT_FN (BUILT_IN_FREXP):
15108 	CASE_FLT_FN (BUILT_IN_ICEIL):
15109 	CASE_FLT_FN (BUILT_IN_IFLOOR):
15110 	CASE_FLT_FN (BUILT_IN_IRINT):
15111 	CASE_FLT_FN (BUILT_IN_IROUND):
15112 	CASE_FLT_FN (BUILT_IN_LCEIL):
15113 	CASE_FLT_FN (BUILT_IN_LDEXP):
15114 	CASE_FLT_FN (BUILT_IN_LFLOOR):
15115 	CASE_FLT_FN (BUILT_IN_LLCEIL):
15116 	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15117 	CASE_FLT_FN (BUILT_IN_LLRINT):
15118 	CASE_FLT_FN (BUILT_IN_LLROUND):
15119 	CASE_FLT_FN (BUILT_IN_LRINT):
15120 	CASE_FLT_FN (BUILT_IN_LROUND):
15121 	CASE_FLT_FN (BUILT_IN_MODF):
15122 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15123 	CASE_FLT_FN (BUILT_IN_RINT):
15124 	CASE_FLT_FN (BUILT_IN_ROUND):
15125 	CASE_FLT_FN (BUILT_IN_SCALB):
15126 	CASE_FLT_FN (BUILT_IN_SCALBLN):
15127 	CASE_FLT_FN (BUILT_IN_SCALBN):
15128 	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15129 	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15130 	CASE_FLT_FN (BUILT_IN_SINH):
15131 	CASE_FLT_FN (BUILT_IN_TANH):
15132 	CASE_FLT_FN (BUILT_IN_TRUNC):
15133 	/* True if the 1st argument is nonnegative.  */
15134 	return tree_expr_nonnegative_warnv_p (arg0,
15135 					      strict_overflow_p);
15136 
15137 	CASE_FLT_FN (BUILT_IN_FMAX):
15138 	/* True if the 1st OR 2nd arguments are nonnegative.  */
15139 	return (tree_expr_nonnegative_warnv_p (arg0,
15140 					       strict_overflow_p)
15141 		|| (tree_expr_nonnegative_warnv_p (arg1,
15142 						   strict_overflow_p)));
15143 
15144 	CASE_FLT_FN (BUILT_IN_FMIN):
15145 	/* True if the 1st AND 2nd arguments are nonnegative.  */
15146 	return (tree_expr_nonnegative_warnv_p (arg0,
15147 					       strict_overflow_p)
15148 		&& (tree_expr_nonnegative_warnv_p (arg1,
15149 						   strict_overflow_p)));
15150 
15151 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15152 	/* True if the 2nd argument is nonnegative.  */
15153 	return tree_expr_nonnegative_warnv_p (arg1,
15154 					      strict_overflow_p);
15155 
15156 	CASE_FLT_FN (BUILT_IN_POWI):
15157 	/* True if the 1st argument is nonnegative or the second
15158 	   argument is an even integer.  */
15159 	if (TREE_CODE (arg1) == INTEGER_CST
15160 	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15161 	  return true;
15162 	return tree_expr_nonnegative_warnv_p (arg0,
15163 					      strict_overflow_p);
15164 
15165 	CASE_FLT_FN (BUILT_IN_POW):
15166 	/* True if the 1st argument is nonnegative or the second
15167 	   argument is an even integer valued real.  */
15168 	if (TREE_CODE (arg1) == REAL_CST)
15169 	  {
15170 	    REAL_VALUE_TYPE c;
15171 	    HOST_WIDE_INT n;
15172 
15173 	    c = TREE_REAL_CST (arg1);
15174 	    n = real_to_integer (&c);
15175 	    if ((n & 1) == 0)
15176 	      {
15177 		REAL_VALUE_TYPE cint;
15178 		real_from_integer (&cint, VOIDmode, n,
15179 				   n < 0 ? -1 : 0, 0);
15180 		if (real_identical (&c, &cint))
15181 		  return true;
15182 	      }
15183 	  }
15184 	return tree_expr_nonnegative_warnv_p (arg0,
15185 					      strict_overflow_p);
15186 
15187       default:
15188 	break;
15189       }
15190   return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15191 					  type);
15192 }
15193 
15194 /* Return true if T is known to be non-negative.  If the return
15195    value is based on the assumption that signed overflow is undefined,
15196    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15197    *STRICT_OVERFLOW_P.  */
15198 
15199 bool
15200 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15201 {
15202   enum tree_code code = TREE_CODE (t);
15203   if (TYPE_UNSIGNED (TREE_TYPE (t)))
15204     return true;
15205 
15206   switch (code)
15207     {
15208     case TARGET_EXPR:
15209       {
15210 	tree temp = TARGET_EXPR_SLOT (t);
15211 	t = TARGET_EXPR_INITIAL (t);
15212 
15213 	/* If the initializer is non-void, then it's a normal expression
15214 	   that will be assigned to the slot.  */
15215 	if (!VOID_TYPE_P (t))
15216 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15217 
15218 	/* Otherwise, the initializer sets the slot in some way.  One common
15219 	   way is an assignment statement at the end of the initializer.  */
15220 	while (1)
15221 	  {
15222 	    if (TREE_CODE (t) == BIND_EXPR)
15223 	      t = expr_last (BIND_EXPR_BODY (t));
15224 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15225 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15226 	      t = expr_last (TREE_OPERAND (t, 0));
15227 	    else if (TREE_CODE (t) == STATEMENT_LIST)
15228 	      t = expr_last (t);
15229 	    else
15230 	      break;
15231 	  }
15232 	if (TREE_CODE (t) == MODIFY_EXPR
15233 	    && TREE_OPERAND (t, 0) == temp)
15234 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15235 						strict_overflow_p);
15236 
15237 	return false;
15238       }
15239 
15240     case CALL_EXPR:
15241       {
15242 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15243 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15244 
15245 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15246 					      get_callee_fndecl (t),
15247 					      arg0,
15248 					      arg1,
15249 					      strict_overflow_p);
15250       }
15251     case COMPOUND_EXPR:
15252     case MODIFY_EXPR:
15253       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15254 					    strict_overflow_p);
15255     case BIND_EXPR:
15256       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15257 					    strict_overflow_p);
15258     case SAVE_EXPR:
15259       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15260 					    strict_overflow_p);
15261 
15262     default:
15263       return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15264 						   TREE_TYPE (t));
15265     }
15266 
15267   /* We don't know sign of `t', so be conservative and return false.  */
15268   return false;
15269 }
15270 
15271 /* Return true if T is known to be non-negative.  If the return
15272    value is based on the assumption that signed overflow is undefined,
15273    set *STRICT_OVERFLOW_P to true; otherwise, don't change
15274    *STRICT_OVERFLOW_P.  */
15275 
15276 bool
15277 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15278 {
15279   enum tree_code code;
15280   if (t == error_mark_node)
15281     return false;
15282 
15283   code = TREE_CODE (t);
15284   switch (TREE_CODE_CLASS (code))
15285     {
15286     case tcc_binary:
15287     case tcc_comparison:
15288       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15289 					      TREE_TYPE (t),
15290 					      TREE_OPERAND (t, 0),
15291 					      TREE_OPERAND (t, 1),
15292 					      strict_overflow_p);
15293 
15294     case tcc_unary:
15295       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15296 					     TREE_TYPE (t),
15297 					     TREE_OPERAND (t, 0),
15298 					     strict_overflow_p);
15299 
15300     case tcc_constant:
15301     case tcc_declaration:
15302     case tcc_reference:
15303       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15304 
15305     default:
15306       break;
15307     }
15308 
15309   switch (code)
15310     {
15311     case TRUTH_AND_EXPR:
15312     case TRUTH_OR_EXPR:
15313     case TRUTH_XOR_EXPR:
15314       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15315 					      TREE_TYPE (t),
15316 					      TREE_OPERAND (t, 0),
15317 					      TREE_OPERAND (t, 1),
15318 					      strict_overflow_p);
15319     case TRUTH_NOT_EXPR:
15320       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15321 					     TREE_TYPE (t),
15322 					     TREE_OPERAND (t, 0),
15323 					     strict_overflow_p);
15324 
15325     case COND_EXPR:
15326     case CONSTRUCTOR:
15327     case OBJ_TYPE_REF:
15328     case ASSERT_EXPR:
15329     case ADDR_EXPR:
15330     case WITH_SIZE_EXPR:
15331     case SSA_NAME:
15332       return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15333 
15334     default:
15335       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15336     }
15337 }
15338 
15339 /* Return true if `t' is known to be non-negative.  Handle warnings
15340    about undefined signed overflow.  */
15341 
15342 bool
15343 tree_expr_nonnegative_p (tree t)
15344 {
15345   bool ret, strict_overflow_p;
15346 
15347   strict_overflow_p = false;
15348   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15349   if (strict_overflow_p)
15350     fold_overflow_warning (("assuming signed overflow does not occur when "
15351 			    "determining that expression is always "
15352 			    "non-negative"),
15353 			   WARN_STRICT_OVERFLOW_MISC);
15354   return ret;
15355 }
15356 
15357 
15358 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15359    For floating point we further ensure that T is not denormal.
15360    Similar logic is present in nonzero_address in rtlanal.h.
15361 
15362    If the return value is based on the assumption that signed overflow
15363    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15364    change *STRICT_OVERFLOW_P.  */
15365 
15366 bool
15367 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15368 				 bool *strict_overflow_p)
15369 {
15370   switch (code)
15371     {
15372     case ABS_EXPR:
15373       return tree_expr_nonzero_warnv_p (op0,
15374 					strict_overflow_p);
15375 
15376     case NOP_EXPR:
15377       {
15378 	tree inner_type = TREE_TYPE (op0);
15379 	tree outer_type = type;
15380 
15381 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15382 		&& tree_expr_nonzero_warnv_p (op0,
15383 					      strict_overflow_p));
15384       }
15385       break;
15386 
15387     case NON_LVALUE_EXPR:
15388       return tree_expr_nonzero_warnv_p (op0,
15389 					strict_overflow_p);
15390 
15391     default:
15392       break;
15393   }
15394 
15395   return false;
15396 }
15397 
15398 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15399    For floating point we further ensure that T is not denormal.
15400    Similar logic is present in nonzero_address in rtlanal.h.
15401 
15402    If the return value is based on the assumption that signed overflow
15403    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15404    change *STRICT_OVERFLOW_P.  */
15405 
15406 bool
15407 tree_binary_nonzero_warnv_p (enum tree_code code,
15408 			     tree type,
15409 			     tree op0,
15410 			     tree op1, bool *strict_overflow_p)
15411 {
15412   bool sub_strict_overflow_p;
15413   switch (code)
15414     {
15415     case POINTER_PLUS_EXPR:
15416     case PLUS_EXPR:
15417       if (TYPE_OVERFLOW_UNDEFINED (type))
15418 	{
15419 	  /* With the presence of negative values it is hard
15420 	     to say something.  */
15421 	  sub_strict_overflow_p = false;
15422 	  if (!tree_expr_nonnegative_warnv_p (op0,
15423 					      &sub_strict_overflow_p)
15424 	      || !tree_expr_nonnegative_warnv_p (op1,
15425 						 &sub_strict_overflow_p))
15426 	    return false;
15427 	  /* One of operands must be positive and the other non-negative.  */
15428 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15429 	     overflows, on a twos-complement machine the sum of two
15430 	     nonnegative numbers can never be zero.  */
15431 	  return (tree_expr_nonzero_warnv_p (op0,
15432 					     strict_overflow_p)
15433 		  || tree_expr_nonzero_warnv_p (op1,
15434 						strict_overflow_p));
15435 	}
15436       break;
15437 
15438     case MULT_EXPR:
15439       if (TYPE_OVERFLOW_UNDEFINED (type))
15440 	{
15441 	  if (tree_expr_nonzero_warnv_p (op0,
15442 					 strict_overflow_p)
15443 	      && tree_expr_nonzero_warnv_p (op1,
15444 					    strict_overflow_p))
15445 	    {
15446 	      *strict_overflow_p = true;
15447 	      return true;
15448 	    }
15449 	}
15450       break;
15451 
15452     case MIN_EXPR:
15453       sub_strict_overflow_p = false;
15454       if (tree_expr_nonzero_warnv_p (op0,
15455 				     &sub_strict_overflow_p)
15456 	  && tree_expr_nonzero_warnv_p (op1,
15457 					&sub_strict_overflow_p))
15458 	{
15459 	  if (sub_strict_overflow_p)
15460 	    *strict_overflow_p = true;
15461 	}
15462       break;
15463 
15464     case MAX_EXPR:
15465       sub_strict_overflow_p = false;
15466       if (tree_expr_nonzero_warnv_p (op0,
15467 				     &sub_strict_overflow_p))
15468 	{
15469 	  if (sub_strict_overflow_p)
15470 	    *strict_overflow_p = true;
15471 
15472 	  /* When both operands are nonzero, then MAX must be too.  */
15473 	  if (tree_expr_nonzero_warnv_p (op1,
15474 					 strict_overflow_p))
15475 	    return true;
15476 
15477 	  /* MAX where operand 0 is positive is positive.  */
15478 	  return tree_expr_nonnegative_warnv_p (op0,
15479 					       strict_overflow_p);
15480 	}
15481       /* MAX where operand 1 is positive is positive.  */
15482       else if (tree_expr_nonzero_warnv_p (op1,
15483 					  &sub_strict_overflow_p)
15484 	       && tree_expr_nonnegative_warnv_p (op1,
15485 						 &sub_strict_overflow_p))
15486 	{
15487 	  if (sub_strict_overflow_p)
15488 	    *strict_overflow_p = true;
15489 	  return true;
15490 	}
15491       break;
15492 
15493     case BIT_IOR_EXPR:
15494       return (tree_expr_nonzero_warnv_p (op1,
15495 					 strict_overflow_p)
15496 	      || tree_expr_nonzero_warnv_p (op0,
15497 					    strict_overflow_p));
15498 
15499     default:
15500       break;
15501   }
15502 
15503   return false;
15504 }
15505 
15506 /* Return true when T is an address and is known to be nonzero.
15507    For floating point we further ensure that T is not denormal.
15508    Similar logic is present in nonzero_address in rtlanal.h.
15509 
15510    If the return value is based on the assumption that signed overflow
15511    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15512    change *STRICT_OVERFLOW_P.  */
15513 
15514 bool
15515 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15516 {
15517   bool sub_strict_overflow_p;
15518   switch (TREE_CODE (t))
15519     {
15520     case INTEGER_CST:
15521       return !integer_zerop (t);
15522 
15523     case ADDR_EXPR:
15524       {
15525 	tree base = TREE_OPERAND (t, 0);
15526 	if (!DECL_P (base))
15527 	  base = get_base_address (base);
15528 
15529 	if (!base)
15530 	  return false;
15531 
15532 	/* Weak declarations may link to NULL.  Other things may also be NULL
15533 	   so protect with -fdelete-null-pointer-checks; but not variables
15534 	   allocated on the stack.  */
15535 	if (DECL_P (base)
15536 	    && (flag_delete_null_pointer_checks
15537 		|| (DECL_CONTEXT (base)
15538 		    && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15539 		    && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15540 	  return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15541 
15542 	/* Constants are never weak.  */
15543 	if (CONSTANT_CLASS_P (base))
15544 	  return true;
15545 
15546 	return false;
15547       }
15548 
15549     case COND_EXPR:
15550       sub_strict_overflow_p = false;
15551       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15552 				     &sub_strict_overflow_p)
15553 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15554 					&sub_strict_overflow_p))
15555 	{
15556 	  if (sub_strict_overflow_p)
15557 	    *strict_overflow_p = true;
15558 	  return true;
15559 	}
15560       break;
15561 
15562     default:
15563       break;
15564     }
15565   return false;
15566 }
15567 
15568 /* Return true when T is an address and is known to be nonzero.
15569    For floating point we further ensure that T is not denormal.
15570    Similar logic is present in nonzero_address in rtlanal.h.
15571 
15572    If the return value is based on the assumption that signed overflow
15573    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15574    change *STRICT_OVERFLOW_P.  */
15575 
15576 bool
15577 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15578 {
15579   tree type = TREE_TYPE (t);
15580   enum tree_code code;
15581 
15582   /* Doing something useful for floating point would need more work.  */
15583   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15584     return false;
15585 
15586   code = TREE_CODE (t);
15587   switch (TREE_CODE_CLASS (code))
15588     {
15589     case tcc_unary:
15590       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15591 					      strict_overflow_p);
15592     case tcc_binary:
15593     case tcc_comparison:
15594       return tree_binary_nonzero_warnv_p (code, type,
15595 					       TREE_OPERAND (t, 0),
15596 					       TREE_OPERAND (t, 1),
15597 					       strict_overflow_p);
15598     case tcc_constant:
15599     case tcc_declaration:
15600     case tcc_reference:
15601       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15602 
15603     default:
15604       break;
15605     }
15606 
15607   switch (code)
15608     {
15609     case TRUTH_NOT_EXPR:
15610       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15611 					      strict_overflow_p);
15612 
15613     case TRUTH_AND_EXPR:
15614     case TRUTH_OR_EXPR:
15615     case TRUTH_XOR_EXPR:
15616       return tree_binary_nonzero_warnv_p (code, type,
15617 					       TREE_OPERAND (t, 0),
15618 					       TREE_OPERAND (t, 1),
15619 					       strict_overflow_p);
15620 
15621     case COND_EXPR:
15622     case CONSTRUCTOR:
15623     case OBJ_TYPE_REF:
15624     case ASSERT_EXPR:
15625     case ADDR_EXPR:
15626     case WITH_SIZE_EXPR:
15627     case SSA_NAME:
15628       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15629 
15630     case COMPOUND_EXPR:
15631     case MODIFY_EXPR:
15632     case BIND_EXPR:
15633       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15634 					strict_overflow_p);
15635 
15636     case SAVE_EXPR:
15637       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15638 					strict_overflow_p);
15639 
15640     case CALL_EXPR:
15641       return alloca_call_p (t);
15642 
15643     default:
15644       break;
15645     }
15646   return false;
15647 }
15648 
15649 /* Return true when T is an address and is known to be nonzero.
15650    Handle warnings about undefined signed overflow.  */
15651 
15652 bool
15653 tree_expr_nonzero_p (tree t)
15654 {
15655   bool ret, strict_overflow_p;
15656 
15657   strict_overflow_p = false;
15658   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15659   if (strict_overflow_p)
15660     fold_overflow_warning (("assuming signed overflow does not occur when "
15661 			    "determining that expression is always "
15662 			    "non-zero"),
15663 			   WARN_STRICT_OVERFLOW_MISC);
15664   return ret;
15665 }
15666 
15667 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15668    attempt to fold the expression to a constant without modifying TYPE,
15669    OP0 or OP1.
15670 
15671    If the expression could be simplified to a constant, then return
15672    the constant.  If the expression would not be simplified to a
15673    constant, then return NULL_TREE.  */
15674 
15675 tree
15676 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15677 {
15678   tree tem = fold_binary (code, type, op0, op1);
15679   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15680 }
15681 
15682 /* Given the components of a unary expression CODE, TYPE and OP0,
15683    attempt to fold the expression to a constant without modifying
15684    TYPE or OP0.
15685 
15686    If the expression could be simplified to a constant, then return
15687    the constant.  If the expression would not be simplified to a
15688    constant, then return NULL_TREE.  */
15689 
15690 tree
15691 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15692 {
15693   tree tem = fold_unary (code, type, op0);
15694   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15695 }
15696 
15697 /* If EXP represents referencing an element in a constant string
15698    (either via pointer arithmetic or array indexing), return the
15699    tree representing the value accessed, otherwise return NULL.  */
15700 
15701 tree
15702 fold_read_from_constant_string (tree exp)
15703 {
15704   if ((TREE_CODE (exp) == INDIRECT_REF
15705        || TREE_CODE (exp) == ARRAY_REF)
15706       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15707     {
15708       tree exp1 = TREE_OPERAND (exp, 0);
15709       tree index;
15710       tree string;
15711       location_t loc = EXPR_LOCATION (exp);
15712 
15713       if (TREE_CODE (exp) == INDIRECT_REF)
15714 	string = string_constant (exp1, &index);
15715       else
15716 	{
15717 	  tree low_bound = array_ref_low_bound (exp);
15718 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15719 
15720 	  /* Optimize the special-case of a zero lower bound.
15721 
15722 	     We convert the low_bound to sizetype to avoid some problems
15723 	     with constant folding.  (E.g. suppose the lower bound is 1,
15724 	     and its mode is QI.  Without the conversion,l (ARRAY
15725 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15726 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15727 	  if (! integer_zerop (low_bound))
15728 	    index = size_diffop_loc (loc, index,
15729 				 fold_convert_loc (loc, sizetype, low_bound));
15730 
15731 	  string = exp1;
15732 	}
15733 
15734       if (string
15735 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15736 	  && TREE_CODE (string) == STRING_CST
15737 	  && TREE_CODE (index) == INTEGER_CST
15738 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15739 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15740 	      == MODE_INT)
15741 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15742 	return build_int_cst_type (TREE_TYPE (exp),
15743 				   (TREE_STRING_POINTER (string)
15744 				    [TREE_INT_CST_LOW (index)]));
15745     }
15746   return NULL;
15747 }
15748 
15749 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15750    an integer constant, real, or fixed-point constant.
15751 
15752    TYPE is the type of the result.  */
15753 
15754 static tree
15755 fold_negate_const (tree arg0, tree type)
15756 {
15757   tree t = NULL_TREE;
15758 
15759   switch (TREE_CODE (arg0))
15760     {
15761     case INTEGER_CST:
15762       {
15763 	double_int val = tree_to_double_int (arg0);
15764 	int overflow = neg_double (val.low, val.high, &val.low, &val.high);
15765 
15766 	t = force_fit_type_double (type, val, 1,
15767 				   (overflow | TREE_OVERFLOW (arg0))
15768 				   && !TYPE_UNSIGNED (type));
15769 	break;
15770       }
15771 
15772     case REAL_CST:
15773       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15774       break;
15775 
15776     case FIXED_CST:
15777       {
15778         FIXED_VALUE_TYPE f;
15779         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15780 					    &(TREE_FIXED_CST (arg0)), NULL,
15781 					    TYPE_SATURATING (type));
15782 	t = build_fixed (type, f);
15783 	/* Propagate overflow flags.  */
15784 	if (overflow_p | TREE_OVERFLOW (arg0))
15785 	  TREE_OVERFLOW (t) = 1;
15786 	break;
15787       }
15788 
15789     default:
15790       gcc_unreachable ();
15791     }
15792 
15793   return t;
15794 }
15795 
15796 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15797    an integer constant or real constant.
15798 
15799    TYPE is the type of the result.  */
15800 
15801 tree
15802 fold_abs_const (tree arg0, tree type)
15803 {
15804   tree t = NULL_TREE;
15805 
15806   switch (TREE_CODE (arg0))
15807     {
15808     case INTEGER_CST:
15809       {
15810 	double_int val = tree_to_double_int (arg0);
15811 
15812         /* If the value is unsigned or non-negative, then the absolute value
15813 	   is the same as the ordinary value.  */
15814 	if (TYPE_UNSIGNED (type)
15815 	    || !double_int_negative_p (val))
15816 	  t = arg0;
15817 
15818 	/* If the value is negative, then the absolute value is
15819 	   its negation.  */
15820 	else
15821 	  {
15822 	    int overflow;
15823 
15824 	    overflow = neg_double (val.low, val.high, &val.low, &val.high);
15825 	    t = force_fit_type_double (type, val, -1,
15826 				       overflow | TREE_OVERFLOW (arg0));
15827 	  }
15828       }
15829       break;
15830 
15831     case REAL_CST:
15832       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15833 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15834       else
15835 	t =  arg0;
15836       break;
15837 
15838     default:
15839       gcc_unreachable ();
15840     }
15841 
15842   return t;
15843 }
15844 
15845 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15846    constant.  TYPE is the type of the result.  */
15847 
15848 static tree
15849 fold_not_const (const_tree arg0, tree type)
15850 {
15851   double_int val;
15852 
15853   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15854 
15855   val = double_int_not (tree_to_double_int (arg0));
15856   return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
15857 }
15858 
15859 /* Given CODE, a relational operator, the target type, TYPE and two
15860    constant operands OP0 and OP1, return the result of the
15861    relational operation.  If the result is not a compile time
15862    constant, then return NULL_TREE.  */
15863 
15864 static tree
15865 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15866 {
15867   int result, invert;
15868 
15869   /* From here on, the only cases we handle are when the result is
15870      known to be a constant.  */
15871 
15872   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15873     {
15874       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15875       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15876 
15877       /* Handle the cases where either operand is a NaN.  */
15878       if (real_isnan (c0) || real_isnan (c1))
15879 	{
15880 	  switch (code)
15881 	    {
15882 	    case EQ_EXPR:
15883 	    case ORDERED_EXPR:
15884 	      result = 0;
15885 	      break;
15886 
15887 	    case NE_EXPR:
15888 	    case UNORDERED_EXPR:
15889 	    case UNLT_EXPR:
15890 	    case UNLE_EXPR:
15891 	    case UNGT_EXPR:
15892 	    case UNGE_EXPR:
15893 	    case UNEQ_EXPR:
15894               result = 1;
15895 	      break;
15896 
15897 	    case LT_EXPR:
15898 	    case LE_EXPR:
15899 	    case GT_EXPR:
15900 	    case GE_EXPR:
15901 	    case LTGT_EXPR:
15902 	      if (flag_trapping_math)
15903 		return NULL_TREE;
15904 	      result = 0;
15905 	      break;
15906 
15907 	    default:
15908 	      gcc_unreachable ();
15909 	    }
15910 
15911 	  return constant_boolean_node (result, type);
15912 	}
15913 
15914       return constant_boolean_node (real_compare (code, c0, c1), type);
15915     }
15916 
15917   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15918     {
15919       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15920       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15921       return constant_boolean_node (fixed_compare (code, c0, c1), type);
15922     }
15923 
15924   /* Handle equality/inequality of complex constants.  */
15925   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15926     {
15927       tree rcond = fold_relational_const (code, type,
15928 					  TREE_REALPART (op0),
15929 					  TREE_REALPART (op1));
15930       tree icond = fold_relational_const (code, type,
15931 					  TREE_IMAGPART (op0),
15932 					  TREE_IMAGPART (op1));
15933       if (code == EQ_EXPR)
15934 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15935       else if (code == NE_EXPR)
15936 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15937       else
15938 	return NULL_TREE;
15939     }
15940 
15941   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15942 
15943      To compute GT, swap the arguments and do LT.
15944      To compute GE, do LT and invert the result.
15945      To compute LE, swap the arguments, do LT and invert the result.
15946      To compute NE, do EQ and invert the result.
15947 
15948      Therefore, the code below must handle only EQ and LT.  */
15949 
15950   if (code == LE_EXPR || code == GT_EXPR)
15951     {
15952       tree tem = op0;
15953       op0 = op1;
15954       op1 = tem;
15955       code = swap_tree_comparison (code);
15956     }
15957 
15958   /* Note that it is safe to invert for real values here because we
15959      have already handled the one case that it matters.  */
15960 
15961   invert = 0;
15962   if (code == NE_EXPR || code == GE_EXPR)
15963     {
15964       invert = 1;
15965       code = invert_tree_comparison (code, false);
15966     }
15967 
15968   /* Compute a result for LT or EQ if args permit;
15969      Otherwise return T.  */
15970   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15971     {
15972       if (code == EQ_EXPR)
15973 	result = tree_int_cst_equal (op0, op1);
15974       else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15975 	result = INT_CST_LT_UNSIGNED (op0, op1);
15976       else
15977 	result = INT_CST_LT (op0, op1);
15978     }
15979   else
15980     return NULL_TREE;
15981 
15982   if (invert)
15983     result ^= 1;
15984   return constant_boolean_node (result, type);
15985 }
15986 
15987 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15988    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
15989    itself.  */
15990 
15991 tree
15992 fold_build_cleanup_point_expr (tree type, tree expr)
15993 {
15994   /* If the expression does not have side effects then we don't have to wrap
15995      it with a cleanup point expression.  */
15996   if (!TREE_SIDE_EFFECTS (expr))
15997     return expr;
15998 
15999   /* If the expression is a return, check to see if the expression inside the
16000      return has no side effects or the right hand side of the modify expression
16001      inside the return. If either don't have side effects set we don't need to
16002      wrap the expression in a cleanup point expression.  Note we don't check the
16003      left hand side of the modify because it should always be a return decl.  */
16004   if (TREE_CODE (expr) == RETURN_EXPR)
16005     {
16006       tree op = TREE_OPERAND (expr, 0);
16007       if (!op || !TREE_SIDE_EFFECTS (op))
16008         return expr;
16009       op = TREE_OPERAND (op, 1);
16010       if (!TREE_SIDE_EFFECTS (op))
16011         return expr;
16012     }
16013 
16014   return build1 (CLEANUP_POINT_EXPR, type, expr);
16015 }
16016 
16017 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16018    of an indirection through OP0, or NULL_TREE if no simplification is
16019    possible.  */
16020 
16021 tree
16022 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16023 {
16024   tree sub = op0;
16025   tree subtype;
16026 
16027   STRIP_NOPS (sub);
16028   subtype = TREE_TYPE (sub);
16029   if (!POINTER_TYPE_P (subtype))
16030     return NULL_TREE;
16031 
16032   if (TREE_CODE (sub) == ADDR_EXPR)
16033     {
16034       tree op = TREE_OPERAND (sub, 0);
16035       tree optype = TREE_TYPE (op);
16036       /* *&CONST_DECL -> to the value of the const decl.  */
16037       if (TREE_CODE (op) == CONST_DECL)
16038 	return DECL_INITIAL (op);
16039       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16040       if (type == optype)
16041 	{
16042 	  tree fop = fold_read_from_constant_string (op);
16043 	  if (fop)
16044 	    return fop;
16045 	  else
16046 	    return op;
16047 	}
16048       /* *(foo *)&fooarray => fooarray[0] */
16049       else if (TREE_CODE (optype) == ARRAY_TYPE
16050 	       && type == TREE_TYPE (optype)
16051 	       && (!in_gimple_form
16052 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16053 	{
16054 	  tree type_domain = TYPE_DOMAIN (optype);
16055 	  tree min_val = size_zero_node;
16056 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16057 	    min_val = TYPE_MIN_VALUE (type_domain);
16058 	  if (in_gimple_form
16059 	      && TREE_CODE (min_val) != INTEGER_CST)
16060 	    return NULL_TREE;
16061 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
16062 			     NULL_TREE, NULL_TREE);
16063 	}
16064       /* *(foo *)&complexfoo => __real__ complexfoo */
16065       else if (TREE_CODE (optype) == COMPLEX_TYPE
16066 	       && type == TREE_TYPE (optype))
16067 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16068       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16069       else if (TREE_CODE (optype) == VECTOR_TYPE
16070 	       && type == TREE_TYPE (optype))
16071 	{
16072 	  tree part_width = TYPE_SIZE (type);
16073 	  tree index = bitsize_int (0);
16074 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16075 	}
16076     }
16077 
16078   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16079       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16080     {
16081       tree op00 = TREE_OPERAND (sub, 0);
16082       tree op01 = TREE_OPERAND (sub, 1);
16083 
16084       STRIP_NOPS (op00);
16085       if (TREE_CODE (op00) == ADDR_EXPR)
16086 	{
16087 	  tree op00type;
16088 	  op00 = TREE_OPERAND (op00, 0);
16089 	  op00type = TREE_TYPE (op00);
16090 
16091 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16092 	  if (TREE_CODE (op00type) == VECTOR_TYPE
16093 	      && type == TREE_TYPE (op00type))
16094 	    {
16095 	      HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16096 	      tree part_width = TYPE_SIZE (type);
16097 	      unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16098 	      unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16099 	      tree index = bitsize_int (indexi);
16100 
16101 	      if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16102 		return fold_build3_loc (loc,
16103 					BIT_FIELD_REF, type, op00,
16104 					part_width, index);
16105 
16106 	    }
16107 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16108 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
16109 		   && type == TREE_TYPE (op00type))
16110 	    {
16111 	      tree size = TYPE_SIZE_UNIT (type);
16112 	      if (tree_int_cst_equal (size, op01))
16113 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16114 	    }
16115 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
16116 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
16117 		   && type == TREE_TYPE (op00type))
16118 	    {
16119 	      tree type_domain = TYPE_DOMAIN (op00type);
16120 	      tree min_val = size_zero_node;
16121 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
16122 		min_val = TYPE_MIN_VALUE (type_domain);
16123 	      op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16124 				     TYPE_SIZE_UNIT (type));
16125 	      op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16126 	      return build4_loc (loc, ARRAY_REF, type, op00, op01,
16127 				 NULL_TREE, NULL_TREE);
16128 	    }
16129 	}
16130     }
16131 
16132   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16133   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16134       && type == TREE_TYPE (TREE_TYPE (subtype))
16135       && (!in_gimple_form
16136 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16137     {
16138       tree type_domain;
16139       tree min_val = size_zero_node;
16140       sub = build_fold_indirect_ref_loc (loc, sub);
16141       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16142       if (type_domain && TYPE_MIN_VALUE (type_domain))
16143 	min_val = TYPE_MIN_VALUE (type_domain);
16144       if (in_gimple_form
16145 	  && TREE_CODE (min_val) != INTEGER_CST)
16146 	return NULL_TREE;
16147       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16148 			 NULL_TREE);
16149     }
16150 
16151   return NULL_TREE;
16152 }
16153 
16154 /* Builds an expression for an indirection through T, simplifying some
16155    cases.  */
16156 
16157 tree
16158 build_fold_indirect_ref_loc (location_t loc, tree t)
16159 {
16160   tree type = TREE_TYPE (TREE_TYPE (t));
16161   tree sub = fold_indirect_ref_1 (loc, type, t);
16162 
16163   if (sub)
16164     return sub;
16165 
16166   return build1_loc (loc, INDIRECT_REF, type, t);
16167 }
16168 
16169 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
16170 
16171 tree
16172 fold_indirect_ref_loc (location_t loc, tree t)
16173 {
16174   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16175 
16176   if (sub)
16177     return sub;
16178   else
16179     return t;
16180 }
16181 
16182 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16183    whose result is ignored.  The type of the returned tree need not be
16184    the same as the original expression.  */
16185 
16186 tree
16187 fold_ignored_result (tree t)
16188 {
16189   if (!TREE_SIDE_EFFECTS (t))
16190     return integer_zero_node;
16191 
16192   for (;;)
16193     switch (TREE_CODE_CLASS (TREE_CODE (t)))
16194       {
16195       case tcc_unary:
16196 	t = TREE_OPERAND (t, 0);
16197 	break;
16198 
16199       case tcc_binary:
16200       case tcc_comparison:
16201 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16202 	  t = TREE_OPERAND (t, 0);
16203 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16204 	  t = TREE_OPERAND (t, 1);
16205 	else
16206 	  return t;
16207 	break;
16208 
16209       case tcc_expression:
16210 	switch (TREE_CODE (t))
16211 	  {
16212 	  case COMPOUND_EXPR:
16213 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16214 	      return t;
16215 	    t = TREE_OPERAND (t, 0);
16216 	    break;
16217 
16218 	  case COND_EXPR:
16219 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16220 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16221 	      return t;
16222 	    t = TREE_OPERAND (t, 0);
16223 	    break;
16224 
16225 	  default:
16226 	    return t;
16227 	  }
16228 	break;
16229 
16230       default:
16231 	return t;
16232       }
16233 }
16234 
16235 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16236    This can only be applied to objects of a sizetype.  */
16237 
16238 tree
16239 round_up_loc (location_t loc, tree value, int divisor)
16240 {
16241   tree div = NULL_TREE;
16242 
16243   gcc_assert (divisor > 0);
16244   if (divisor == 1)
16245     return value;
16246 
16247   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16248      have to do anything.  Only do this when we are not given a const,
16249      because in that case, this check is more expensive than just
16250      doing it.  */
16251   if (TREE_CODE (value) != INTEGER_CST)
16252     {
16253       div = build_int_cst (TREE_TYPE (value), divisor);
16254 
16255       if (multiple_of_p (TREE_TYPE (value), value, div))
16256 	return value;
16257     }
16258 
16259   /* If divisor is a power of two, simplify this to bit manipulation.  */
16260   if (divisor == (divisor & -divisor))
16261     {
16262       if (TREE_CODE (value) == INTEGER_CST)
16263 	{
16264 	  double_int val = tree_to_double_int (value);
16265 	  bool overflow_p;
16266 
16267 	  if ((val.low & (divisor - 1)) == 0)
16268 	    return value;
16269 
16270 	  overflow_p = TREE_OVERFLOW (value);
16271 	  val.low &= ~(divisor - 1);
16272 	  val.low += divisor;
16273 	  if (val.low == 0)
16274 	    {
16275 	      val.high++;
16276 	      if (val.high == 0)
16277 		overflow_p = true;
16278 	    }
16279 
16280 	  return force_fit_type_double (TREE_TYPE (value), val,
16281 					-1, overflow_p);
16282 	}
16283       else
16284 	{
16285 	  tree t;
16286 
16287 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16288 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16289 	  t = build_int_cst (TREE_TYPE (value), -divisor);
16290 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16291 	}
16292     }
16293   else
16294     {
16295       if (!div)
16296 	div = build_int_cst (TREE_TYPE (value), divisor);
16297       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16298       value = size_binop_loc (loc, MULT_EXPR, value, div);
16299     }
16300 
16301   return value;
16302 }
16303 
16304 /* Likewise, but round down.  */
16305 
16306 tree
16307 round_down_loc (location_t loc, tree value, int divisor)
16308 {
16309   tree div = NULL_TREE;
16310 
16311   gcc_assert (divisor > 0);
16312   if (divisor == 1)
16313     return value;
16314 
16315   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16316      have to do anything.  Only do this when we are not given a const,
16317      because in that case, this check is more expensive than just
16318      doing it.  */
16319   if (TREE_CODE (value) != INTEGER_CST)
16320     {
16321       div = build_int_cst (TREE_TYPE (value), divisor);
16322 
16323       if (multiple_of_p (TREE_TYPE (value), value, div))
16324 	return value;
16325     }
16326 
16327   /* If divisor is a power of two, simplify this to bit manipulation.  */
16328   if (divisor == (divisor & -divisor))
16329     {
16330       tree t;
16331 
16332       t = build_int_cst (TREE_TYPE (value), -divisor);
16333       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16334     }
16335   else
16336     {
16337       if (!div)
16338 	div = build_int_cst (TREE_TYPE (value), divisor);
16339       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16340       value = size_binop_loc (loc, MULT_EXPR, value, div);
16341     }
16342 
16343   return value;
16344 }
16345 
16346 /* Returns the pointer to the base of the object addressed by EXP and
16347    extracts the information about the offset of the access, storing it
16348    to PBITPOS and POFFSET.  */
16349 
16350 static tree
16351 split_address_to_core_and_offset (tree exp,
16352 				  HOST_WIDE_INT *pbitpos, tree *poffset)
16353 {
16354   tree core;
16355   enum machine_mode mode;
16356   int unsignedp, volatilep;
16357   HOST_WIDE_INT bitsize;
16358   location_t loc = EXPR_LOCATION (exp);
16359 
16360   if (TREE_CODE (exp) == ADDR_EXPR)
16361     {
16362       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16363 				  poffset, &mode, &unsignedp, &volatilep,
16364 				  false);
16365       core = build_fold_addr_expr_loc (loc, core);
16366     }
16367   else
16368     {
16369       core = exp;
16370       *pbitpos = 0;
16371       *poffset = NULL_TREE;
16372     }
16373 
16374   return core;
16375 }
16376 
16377 /* Returns true if addresses of E1 and E2 differ by a constant, false
16378    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16379 
16380 bool
16381 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16382 {
16383   tree core1, core2;
16384   HOST_WIDE_INT bitpos1, bitpos2;
16385   tree toffset1, toffset2, tdiff, type;
16386 
16387   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16388   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16389 
16390   if (bitpos1 % BITS_PER_UNIT != 0
16391       || bitpos2 % BITS_PER_UNIT != 0
16392       || !operand_equal_p (core1, core2, 0))
16393     return false;
16394 
16395   if (toffset1 && toffset2)
16396     {
16397       type = TREE_TYPE (toffset1);
16398       if (type != TREE_TYPE (toffset2))
16399 	toffset2 = fold_convert (type, toffset2);
16400 
16401       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16402       if (!cst_and_fits_in_hwi (tdiff))
16403 	return false;
16404 
16405       *diff = int_cst_value (tdiff);
16406     }
16407   else if (toffset1 || toffset2)
16408     {
16409       /* If only one of the offsets is non-constant, the difference cannot
16410 	 be a constant.  */
16411       return false;
16412     }
16413   else
16414     *diff = 0;
16415 
16416   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16417   return true;
16418 }
16419 
16420 /* Simplify the floating point expression EXP when the sign of the
16421    result is not significant.  Return NULL_TREE if no simplification
16422    is possible.  */
16423 
16424 tree
16425 fold_strip_sign_ops (tree exp)
16426 {
16427   tree arg0, arg1;
16428   location_t loc = EXPR_LOCATION (exp);
16429 
16430   switch (TREE_CODE (exp))
16431     {
16432     case ABS_EXPR:
16433     case NEGATE_EXPR:
16434       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16435       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16436 
16437     case MULT_EXPR:
16438     case RDIV_EXPR:
16439       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16440 	return NULL_TREE;
16441       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16442       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16443       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16444 	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16445 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
16446 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
16447       break;
16448 
16449     case COMPOUND_EXPR:
16450       arg0 = TREE_OPERAND (exp, 0);
16451       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16452       if (arg1)
16453 	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16454       break;
16455 
16456     case COND_EXPR:
16457       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16458       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16459       if (arg0 || arg1)
16460 	return fold_build3_loc (loc,
16461 			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16462 			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
16463 			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
16464       break;
16465 
16466     case CALL_EXPR:
16467       {
16468 	const enum built_in_function fcode = builtin_mathfn_code (exp);
16469 	switch (fcode)
16470 	{
16471 	CASE_FLT_FN (BUILT_IN_COPYSIGN):
16472 	  /* Strip copysign function call, return the 1st argument. */
16473 	  arg0 = CALL_EXPR_ARG (exp, 0);
16474 	  arg1 = CALL_EXPR_ARG (exp, 1);
16475 	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16476 
16477 	default:
16478 	  /* Strip sign ops from the argument of "odd" math functions.  */
16479 	  if (negate_mathfn_p (fcode))
16480             {
16481 	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16482 	      if (arg0)
16483 		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16484 	    }
16485 	  break;
16486 	}
16487       }
16488       break;
16489 
16490     default:
16491       break;
16492     }
16493   return NULL_TREE;
16494 }
16495