xref: /openbsd/gnu/gcc/gcc/fold-const.c (revision ad47ef84)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING.  If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA.  */
22 
23 /*@@ This file should be rewritten to use an arbitrary precision
24   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26   @@ The routines that translate from the ap rep should
27   @@ warn if precision et. al. is lost.
28   @@ This would also make life easier when this technology is used
29   @@ for cross-compilers.  */
30 
31 /* The entry points in this file are fold, size_int_wide, size_binop
32    and force_fit_type.
33 
34    fold takes a tree as argument and returns a simplified tree.
35 
36    size_binop takes a tree code for an arithmetic operation
37    and two operands that are trees, and produces a tree for the
38    result, assuming the type comes from `sizetype'.
39 
40    size_int takes an integer value, and creates a tree constant
41    with type from `sizetype'.
42 
43    force_fit_type takes a constant, an overflowable flag and prior
44    overflow indicators.  It forces the value to fit the type and sets
45    TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.  */
46 
47 #include "config.h"
48 #include "system.h"
49 #include "coretypes.h"
50 #include "tm.h"
51 #include "flags.h"
52 #include "tree.h"
53 #include "real.h"
54 #include "rtl.h"
55 #include "expr.h"
56 #include "tm_p.h"
57 #include "toplev.h"
58 #include "intl.h"
59 #include "ggc.h"
60 #include "hashtab.h"
61 #include "langhooks.h"
62 #include "md5.h"
63 
64 /* Non-zero if we are folding constants inside an initializer; zero
65    otherwise.  */
66 int folding_initializer = 0;
67 
68 /* The following constants represent a bit based encoding of GCC's
69    comparison operators.  This encoding simplifies transformations
70    on relational comparison operators, such as AND and OR.  */
71 enum comparison_code {
72   COMPCODE_FALSE = 0,
73   COMPCODE_LT = 1,
74   COMPCODE_EQ = 2,
75   COMPCODE_LE = 3,
76   COMPCODE_GT = 4,
77   COMPCODE_LTGT = 5,
78   COMPCODE_GE = 6,
79   COMPCODE_ORD = 7,
80   COMPCODE_UNORD = 8,
81   COMPCODE_UNLT = 9,
82   COMPCODE_UNEQ = 10,
83   COMPCODE_UNLE = 11,
84   COMPCODE_UNGT = 12,
85   COMPCODE_NE = 13,
86   COMPCODE_UNGE = 14,
87   COMPCODE_TRUE = 15
88 };
89 
90 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92 static bool negate_mathfn_p (enum built_in_function);
93 static bool negate_expr_p (tree);
94 static tree negate_expr (tree);
95 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96 static tree associate_trees (tree, tree, enum tree_code, tree);
97 static tree const_binop (enum tree_code, tree, tree, int);
98 static enum comparison_code comparison_to_compcode (enum tree_code);
99 static enum tree_code compcode_to_comparison (enum comparison_code);
100 static tree combine_comparisons (enum tree_code, enum tree_code,
101 				 enum tree_code, tree, tree, tree);
102 static int truth_value_p (enum tree_code);
103 static int operand_equal_for_comparison_p (tree, tree, tree);
104 static int twoval_comparison_p (tree, tree *, tree *, int *);
105 static tree eval_subst (tree, tree, tree, tree, tree);
106 static tree pedantic_omit_one_operand (tree, tree, tree);
107 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108 static tree make_bit_field_ref (tree, tree, int, int, int);
109 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111 				    enum machine_mode *, int *, int *,
112 				    tree *, tree *);
113 static int all_ones_mask_p (tree, int);
114 static tree sign_bit_p (tree, tree);
115 static int simple_operand_p (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree make_range (tree, int *, tree *, tree *, bool *);
120 static tree build_range_check (tree, tree, int, tree, tree);
121 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122 			 tree);
123 static tree fold_range_test (enum tree_code, tree, tree, tree);
124 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125 static tree unextend (tree, int, int, tree);
126 static tree fold_truthop (enum tree_code, tree, tree, tree);
127 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130 static int multiple_of_p (tree, tree, tree);
131 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 						 tree, tree,
133 						 tree, tree, int);
134 static bool fold_real_zero_addition_p (tree, tree, int);
135 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136 				 tree, tree, tree);
137 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138 static tree fold_div_compare (enum tree_code, tree, tree, tree);
139 static bool reorder_operands_p (tree, tree);
140 static tree fold_negate_const (tree, tree);
141 static tree fold_not_const (tree, tree);
142 static tree fold_relational_const (enum tree_code, tree, tree, tree);
143 static int native_encode_expr (tree, unsigned char *, int);
144 static tree native_interpret_expr (tree, unsigned char *, int);
145 
146 
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148    overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
149    and SUM1.  Then this yields nonzero if overflow occurred during the
150    addition.
151 
152    Overflow occurs if A and B have the same sign, but A and SUM differ in
153    sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
154    sign.  */
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158    We do that by representing the two-word integer in 4 words, with only
159    HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160    number.  The value of the word is LOWPART + HIGHPART * BASE.  */
161 
162 #define LOWPART(x) \
163   ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165   ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 
168 /* Unpack a two-word integer into 4 words.
169    LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170    WORDS points to the array of HOST_WIDE_INTs.  */
171 
172 static void
encode(HOST_WIDE_INT * words,unsigned HOST_WIDE_INT low,HOST_WIDE_INT hi)173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 {
175   words[0] = LOWPART (low);
176   words[1] = HIGHPART (low);
177   words[2] = LOWPART (hi);
178   words[3] = HIGHPART (hi);
179 }
180 
181 /* Pack an array of 4 words into a two-word integer.
182    WORDS points to the array of words.
183    The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
184 
185 static void
decode(HOST_WIDE_INT * words,unsigned HOST_WIDE_INT * low,HOST_WIDE_INT * hi)186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 	HOST_WIDE_INT *hi)
188 {
189   *low = words[0] + words[1] * BASE;
190   *hi = words[2] + words[3] * BASE;
191 }
192 
193 /* T is an INT_CST node.  OVERFLOWABLE indicates if we are interested
194    in overflow of the value, when >0 we are only interested in signed
195    overflow, for <0 we are interested in any overflow.  OVERFLOWED
196    indicates whether overflow has already occurred.  CONST_OVERFLOWED
197    indicates whether constant overflow has already occurred.  We force
198    T's value to be within range of T's type (by setting to 0 or 1 all
199    the bits outside the type's range).  We set TREE_OVERFLOWED if,
200   	OVERFLOWED is nonzero,
201 	or OVERFLOWABLE is >0 and signed overflow occurs
202 	or OVERFLOWABLE is <0 and any overflow occurs
203    We set TREE_CONSTANT_OVERFLOWED if,
204         CONST_OVERFLOWED is nonzero
205 	or we set TREE_OVERFLOWED.
206   We return either the original T, or a copy.  */
207 
208 tree
force_fit_type(tree t,int overflowable,bool overflowed,bool overflowed_const)209 force_fit_type (tree t, int overflowable,
210 		bool overflowed, bool overflowed_const)
211 {
212   unsigned HOST_WIDE_INT low;
213   HOST_WIDE_INT high;
214   unsigned int prec;
215   int sign_extended_type;
216 
217   gcc_assert (TREE_CODE (t) == INTEGER_CST);
218 
219   low = TREE_INT_CST_LOW (t);
220   high = TREE_INT_CST_HIGH (t);
221 
222   if (POINTER_TYPE_P (TREE_TYPE (t))
223       || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224     prec = POINTER_SIZE;
225   else
226     prec = TYPE_PRECISION (TREE_TYPE (t));
227   /* Size types *are* sign extended.  */
228   sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229 			|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230 			    && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231 
232   /* First clear all bits that are beyond the type's precision.  */
233 
234   if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235     ;
236   else if (prec > HOST_BITS_PER_WIDE_INT)
237     high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238   else
239     {
240       high = 0;
241       if (prec < HOST_BITS_PER_WIDE_INT)
242 	low &= ~((HOST_WIDE_INT) (-1) << prec);
243     }
244 
245   if (!sign_extended_type)
246     /* No sign extension */;
247   else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248     /* Correct width already.  */;
249   else if (prec > HOST_BITS_PER_WIDE_INT)
250     {
251       /* Sign extend top half? */
252       if (high & ((unsigned HOST_WIDE_INT)1
253 		  << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254 	high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255     }
256   else if (prec == HOST_BITS_PER_WIDE_INT)
257     {
258       if ((HOST_WIDE_INT)low < 0)
259 	high = -1;
260     }
261   else
262     {
263       /* Sign extend bottom half? */
264       if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 	{
266 	  high = -1;
267 	  low |= (HOST_WIDE_INT)(-1) << prec;
268 	}
269     }
270 
271   /* If the value changed, return a new node.  */
272   if (overflowed || overflowed_const
273       || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274     {
275       t = build_int_cst_wide (TREE_TYPE (t), low, high);
276 
277       if (overflowed
278 	  || overflowable < 0
279 	  || (overflowable > 0 && sign_extended_type))
280 	{
281 	  t = copy_node (t);
282 	  TREE_OVERFLOW (t) = 1;
283 	  TREE_CONSTANT_OVERFLOW (t) = 1;
284 	}
285       else if (overflowed_const)
286 	{
287 	  t = copy_node (t);
288 	  TREE_CONSTANT_OVERFLOW (t) = 1;
289 	}
290     }
291 
292   return t;
293 }
294 
295 /* Add two doubleword integers with doubleword result.
296    Return nonzero if the operation overflows according to UNSIGNED_P.
297    Each argument is given as two `HOST_WIDE_INT' pieces.
298    One argument is L1 and H1; the other, L2 and H2.
299    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
300 
301 int
add_double_with_sign(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT l2,HOST_WIDE_INT h2,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,bool unsigned_p)302 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303 		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304 		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305 		      bool unsigned_p)
306 {
307   unsigned HOST_WIDE_INT l;
308   HOST_WIDE_INT h;
309 
310   l = l1 + l2;
311   h = h1 + h2 + (l < l1);
312 
313   *lv = l;
314   *hv = h;
315 
316   if (unsigned_p)
317     return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318   else
319     return OVERFLOW_SUM_SIGN (h1, h2, h);
320 }
321 
322 /* Negate a doubleword integer with doubleword result.
323    Return nonzero if the operation overflows, assuming it's signed.
324    The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
326 
327 int
neg_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)328 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329 	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330 {
331   if (l1 == 0)
332     {
333       *lv = 0;
334       *hv = - h1;
335       return (*hv & h1) < 0;
336     }
337   else
338     {
339       *lv = -l1;
340       *hv = ~h1;
341       return 0;
342     }
343 }
344 
345 /* Multiply two doubleword integers with doubleword result.
346    Return nonzero if the operation overflows according to UNSIGNED_P.
347    Each argument is given as two `HOST_WIDE_INT' pieces.
348    One argument is L1 and H1; the other, L2 and H2.
349    The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
350 
351 int
mul_double_with_sign(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT l2,HOST_WIDE_INT h2,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,bool unsigned_p)352 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353 		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354 		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355 		      bool unsigned_p)
356 {
357   HOST_WIDE_INT arg1[4];
358   HOST_WIDE_INT arg2[4];
359   HOST_WIDE_INT prod[4 * 2];
360   unsigned HOST_WIDE_INT carry;
361   int i, j, k;
362   unsigned HOST_WIDE_INT toplow, neglow;
363   HOST_WIDE_INT tophigh, neghigh;
364 
365   encode (arg1, l1, h1);
366   encode (arg2, l2, h2);
367 
368   memset (prod, 0, sizeof prod);
369 
370   for (i = 0; i < 4; i++)
371     {
372       carry = 0;
373       for (j = 0; j < 4; j++)
374 	{
375 	  k = i + j;
376 	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
377 	  carry += arg1[i] * arg2[j];
378 	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
379 	  carry += prod[k];
380 	  prod[k] = LOWPART (carry);
381 	  carry = HIGHPART (carry);
382 	}
383       prod[i + 4] = carry;
384     }
385 
386   decode (prod, lv, hv);
387   decode (prod + 4, &toplow, &tophigh);
388 
389   /* Unsigned overflow is immediate.  */
390   if (unsigned_p)
391     return (toplow | tophigh) != 0;
392 
393   /* Check for signed overflow by calculating the signed representation of the
394      top half of the result; it should agree with the low half's sign bit.  */
395   if (h1 < 0)
396     {
397       neg_double (l2, h2, &neglow, &neghigh);
398       add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399     }
400   if (h2 < 0)
401     {
402       neg_double (l1, h1, &neglow, &neghigh);
403       add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404     }
405   return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406 }
407 
408 /* Shift the doubleword integer in L1, H1 left by COUNT places
409    keeping only PREC bits of result.
410    Shift right if COUNT is negative.
411    ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
413 
414 void
lshift_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,int arith)415 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416 	       HOST_WIDE_INT count, unsigned int prec,
417 	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418 {
419   unsigned HOST_WIDE_INT signmask;
420 
421   if (count < 0)
422     {
423       rshift_double (l1, h1, -count, prec, lv, hv, arith);
424       return;
425     }
426 
427   if (SHIFT_COUNT_TRUNCATED)
428     count %= prec;
429 
430   if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431     {
432       /* Shifting by the host word size is undefined according to the
433 	 ANSI standard, so we must handle this as a special case.  */
434       *hv = 0;
435       *lv = 0;
436     }
437   else if (count >= HOST_BITS_PER_WIDE_INT)
438     {
439       *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440       *lv = 0;
441     }
442   else
443     {
444       *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445 	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446       *lv = l1 << count;
447     }
448 
449   /* Sign extend all bits that are beyond the precision.  */
450 
451   signmask = -((prec > HOST_BITS_PER_WIDE_INT
452 		? ((unsigned HOST_WIDE_INT) *hv
453 		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454 		: (*lv >> (prec - 1))) & 1);
455 
456   if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457     ;
458   else if (prec >= HOST_BITS_PER_WIDE_INT)
459     {
460       *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461       *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462     }
463   else
464     {
465       *hv = signmask;
466       *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467       *lv |= signmask << prec;
468     }
469 }
470 
471 /* Shift the doubleword integer in L1, H1 right by COUNT places
472    keeping only PREC bits of result.  COUNT must be positive.
473    ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
475 
476 void
rshift_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,int arith)477 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478 	       HOST_WIDE_INT count, unsigned int prec,
479 	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480 	       int arith)
481 {
482   unsigned HOST_WIDE_INT signmask;
483 
484   signmask = (arith
485 	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486 	      : 0);
487 
488   if (SHIFT_COUNT_TRUNCATED)
489     count %= prec;
490 
491   if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492     {
493       /* Shifting by the host word size is undefined according to the
494 	 ANSI standard, so we must handle this as a special case.  */
495       *hv = 0;
496       *lv = 0;
497     }
498   else if (count >= HOST_BITS_PER_WIDE_INT)
499     {
500       *hv = 0;
501       *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502     }
503   else
504     {
505       *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506       *lv = ((l1 >> count)
507 	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508     }
509 
510   /* Zero / sign extend all bits that are beyond the precision.  */
511 
512   if (count >= (HOST_WIDE_INT)prec)
513     {
514       *hv = signmask;
515       *lv = signmask;
516     }
517   else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518     ;
519   else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520     {
521       *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522       *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523     }
524   else
525     {
526       *hv = signmask;
527       *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528       *lv |= signmask << (prec - count);
529     }
530 }
531 
532 /* Rotate the doubleword integer in L1, H1 left by COUNT places
533    keeping only PREC bits of result.
534    Rotate right if COUNT is negative.
535    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
536 
537 void
lrotate_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)538 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 		HOST_WIDE_INT count, unsigned int prec,
540 		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 {
542   unsigned HOST_WIDE_INT s1l, s2l;
543   HOST_WIDE_INT s1h, s2h;
544 
545   count %= prec;
546   if (count < 0)
547     count += prec;
548 
549   lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550   rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551   *lv = s1l | s2l;
552   *hv = s1h | s2h;
553 }
554 
555 /* Rotate the doubleword integer in L1, H1 left by COUNT places
556    keeping only PREC bits of result.  COUNT must be positive.
557    Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558 
559 void
rrotate_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)560 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 		HOST_WIDE_INT count, unsigned int prec,
562 		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563 {
564   unsigned HOST_WIDE_INT s1l, s2l;
565   HOST_WIDE_INT s1h, s2h;
566 
567   count %= prec;
568   if (count < 0)
569     count += prec;
570 
571   rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572   lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573   *lv = s1l | s2l;
574   *hv = s1h | s2h;
575 }
576 
577 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578    for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579    CODE is a tree code for a kind of division, one of
580    TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581    or EXACT_DIV_EXPR
582    It controls how the quotient is rounded to an integer.
583    Return nonzero if the operation overflows.
584    UNS nonzero says do unsigned division.  */
585 
586 int
div_and_round_double(enum tree_code code,int uns,unsigned HOST_WIDE_INT lnum_orig,HOST_WIDE_INT hnum_orig,unsigned HOST_WIDE_INT lden_orig,HOST_WIDE_INT hden_orig,unsigned HOST_WIDE_INT * lquo,HOST_WIDE_INT * hquo,unsigned HOST_WIDE_INT * lrem,HOST_WIDE_INT * hrem)587 div_and_round_double (enum tree_code code, int uns,
588 		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589 		      HOST_WIDE_INT hnum_orig,
590 		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591 		      HOST_WIDE_INT hden_orig,
592 		      unsigned HOST_WIDE_INT *lquo,
593 		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594 		      HOST_WIDE_INT *hrem)
595 {
596   int quo_neg = 0;
597   HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
598   HOST_WIDE_INT den[4], quo[4];
599   int i, j;
600   unsigned HOST_WIDE_INT work;
601   unsigned HOST_WIDE_INT carry = 0;
602   unsigned HOST_WIDE_INT lnum = lnum_orig;
603   HOST_WIDE_INT hnum = hnum_orig;
604   unsigned HOST_WIDE_INT lden = lden_orig;
605   HOST_WIDE_INT hden = hden_orig;
606   int overflow = 0;
607 
608   if (hden == 0 && lden == 0)
609     overflow = 1, lden = 1;
610 
611   /* Calculate quotient sign and convert operands to unsigned.  */
612   if (!uns)
613     {
614       if (hnum < 0)
615 	{
616 	  quo_neg = ~ quo_neg;
617 	  /* (minimum integer) / (-1) is the only overflow case.  */
618 	  if (neg_double (lnum, hnum, &lnum, &hnum)
619 	      && ((HOST_WIDE_INT) lden & hden) == -1)
620 	    overflow = 1;
621 	}
622       if (hden < 0)
623 	{
624 	  quo_neg = ~ quo_neg;
625 	  neg_double (lden, hden, &lden, &hden);
626 	}
627     }
628 
629   if (hnum == 0 && hden == 0)
630     {				/* single precision */
631       *hquo = *hrem = 0;
632       /* This unsigned division rounds toward zero.  */
633       *lquo = lnum / lden;
634       goto finish_up;
635     }
636 
637   if (hnum == 0)
638     {				/* trivial case: dividend < divisor */
639       /* hden != 0 already checked.  */
640       *hquo = *lquo = 0;
641       *hrem = hnum;
642       *lrem = lnum;
643       goto finish_up;
644     }
645 
646   memset (quo, 0, sizeof quo);
647 
648   memset (num, 0, sizeof num);	/* to zero 9th element */
649   memset (den, 0, sizeof den);
650 
651   encode (num, lnum, hnum);
652   encode (den, lden, hden);
653 
654   /* Special code for when the divisor < BASE.  */
655   if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656     {
657       /* hnum != 0 already checked.  */
658       for (i = 4 - 1; i >= 0; i--)
659 	{
660 	  work = num[i] + carry * BASE;
661 	  quo[i] = work / lden;
662 	  carry = work % lden;
663 	}
664     }
665   else
666     {
667       /* Full double precision division,
668 	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
669       int num_hi_sig, den_hi_sig;
670       unsigned HOST_WIDE_INT quo_est, scale;
671 
672       /* Find the highest nonzero divisor digit.  */
673       for (i = 4 - 1;; i--)
674 	if (den[i] != 0)
675 	  {
676 	    den_hi_sig = i;
677 	    break;
678 	  }
679 
680       /* Insure that the first digit of the divisor is at least BASE/2.
681 	 This is required by the quotient digit estimation algorithm.  */
682 
683       scale = BASE / (den[den_hi_sig] + 1);
684       if (scale > 1)
685 	{		/* scale divisor and dividend */
686 	  carry = 0;
687 	  for (i = 0; i <= 4 - 1; i++)
688 	    {
689 	      work = (num[i] * scale) + carry;
690 	      num[i] = LOWPART (work);
691 	      carry = HIGHPART (work);
692 	    }
693 
694 	  num[4] = carry;
695 	  carry = 0;
696 	  for (i = 0; i <= 4 - 1; i++)
697 	    {
698 	      work = (den[i] * scale) + carry;
699 	      den[i] = LOWPART (work);
700 	      carry = HIGHPART (work);
701 	      if (den[i] != 0) den_hi_sig = i;
702 	    }
703 	}
704 
705       num_hi_sig = 4;
706 
707       /* Main loop */
708       for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709 	{
710 	  /* Guess the next quotient digit, quo_est, by dividing the first
711 	     two remaining dividend digits by the high order quotient digit.
712 	     quo_est is never low and is at most 2 high.  */
713 	  unsigned HOST_WIDE_INT tmp;
714 
715 	  num_hi_sig = i + den_hi_sig + 1;
716 	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717 	  if (num[num_hi_sig] != den[den_hi_sig])
718 	    quo_est = work / den[den_hi_sig];
719 	  else
720 	    quo_est = BASE - 1;
721 
722 	  /* Refine quo_est so it's usually correct, and at most one high.  */
723 	  tmp = work - quo_est * den[den_hi_sig];
724 	  if (tmp < BASE
725 	      && (den[den_hi_sig - 1] * quo_est
726 		  > (tmp * BASE + num[num_hi_sig - 2])))
727 	    quo_est--;
728 
729 	  /* Try QUO_EST as the quotient digit, by multiplying the
730 	     divisor by QUO_EST and subtracting from the remaining dividend.
731 	     Keep in mind that QUO_EST is the I - 1st digit.  */
732 
733 	  carry = 0;
734 	  for (j = 0; j <= den_hi_sig; j++)
735 	    {
736 	      work = quo_est * den[j] + carry;
737 	      carry = HIGHPART (work);
738 	      work = num[i + j] - LOWPART (work);
739 	      num[i + j] = LOWPART (work);
740 	      carry += HIGHPART (work) != 0;
741 	    }
742 
743 	  /* If quo_est was high by one, then num[i] went negative and
744 	     we need to correct things.  */
745 	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746 	    {
747 	      quo_est--;
748 	      carry = 0;		/* add divisor back in */
749 	      for (j = 0; j <= den_hi_sig; j++)
750 		{
751 		  work = num[i + j] + den[j] + carry;
752 		  carry = HIGHPART (work);
753 		  num[i + j] = LOWPART (work);
754 		}
755 
756 	      num [num_hi_sig] += carry;
757 	    }
758 
759 	  /* Store the quotient digit.  */
760 	  quo[i] = quo_est;
761 	}
762     }
763 
764   decode (quo, lquo, hquo);
765 
766  finish_up:
767   /* If result is negative, make it so.  */
768   if (quo_neg)
769     neg_double (*lquo, *hquo, lquo, hquo);
770 
771   /* Compute trial remainder:  rem = num - (quo * den)  */
772   mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773   neg_double (*lrem, *hrem, lrem, hrem);
774   add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775 
776   switch (code)
777     {
778     case TRUNC_DIV_EXPR:
779     case TRUNC_MOD_EXPR:	/* round toward zero */
780     case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
781       return overflow;
782 
783     case FLOOR_DIV_EXPR:
784     case FLOOR_MOD_EXPR:	/* round toward negative infinity */
785       if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
786 	{
787 	  /* quo = quo - 1;  */
788 	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
789 		      lquo, hquo);
790 	}
791       else
792 	return overflow;
793       break;
794 
795     case CEIL_DIV_EXPR:
796     case CEIL_MOD_EXPR:		/* round toward positive infinity */
797       if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
798 	{
799 	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 		      lquo, hquo);
801 	}
802       else
803 	return overflow;
804       break;
805 
806     case ROUND_DIV_EXPR:
807     case ROUND_MOD_EXPR:	/* round to closest integer */
808       {
809 	unsigned HOST_WIDE_INT labs_rem = *lrem;
810 	HOST_WIDE_INT habs_rem = *hrem;
811 	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812 	HOST_WIDE_INT habs_den = hden, htwice;
813 
814 	/* Get absolute values.  */
815 	if (*hrem < 0)
816 	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817 	if (hden < 0)
818 	  neg_double (lden, hden, &labs_den, &habs_den);
819 
820 	/* If (2 * abs (lrem) >= abs (lden)) */
821 	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822 		    labs_rem, habs_rem, &ltwice, &htwice);
823 
824 	if (((unsigned HOST_WIDE_INT) habs_den
825 	     < (unsigned HOST_WIDE_INT) htwice)
826 	    || (((unsigned HOST_WIDE_INT) habs_den
827 		 == (unsigned HOST_WIDE_INT) htwice)
828 		&& (labs_den < ltwice)))
829 	  {
830 	    if (*hquo < 0)
831 	      /* quo = quo - 1;  */
832 	      add_double (*lquo, *hquo,
833 			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834 	    else
835 	      /* quo = quo + 1; */
836 	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 			  lquo, hquo);
838 	  }
839 	else
840 	  return overflow;
841       }
842       break;
843 
844     default:
845       gcc_unreachable ();
846     }
847 
848   /* Compute true remainder:  rem = num - (quo * den)  */
849   mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850   neg_double (*lrem, *hrem, lrem, hrem);
851   add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852   return overflow;
853 }
854 
855 /* If ARG2 divides ARG1 with zero remainder, carries out the division
856    of type CODE and returns the quotient.
857    Otherwise returns NULL_TREE.  */
858 
859 static tree
div_if_zero_remainder(enum tree_code code,tree arg1,tree arg2)860 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861 {
862   unsigned HOST_WIDE_INT int1l, int2l;
863   HOST_WIDE_INT int1h, int2h;
864   unsigned HOST_WIDE_INT quol, reml;
865   HOST_WIDE_INT quoh, remh;
866   tree type = TREE_TYPE (arg1);
867   int uns = TYPE_UNSIGNED (type);
868 
869   int1l = TREE_INT_CST_LOW (arg1);
870   int1h = TREE_INT_CST_HIGH (arg1);
871   int2l = TREE_INT_CST_LOW (arg2);
872   int2h = TREE_INT_CST_HIGH (arg2);
873 
874   div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875 		  	&quol, &quoh, &reml, &remh);
876   if (remh != 0 || reml != 0)
877     return NULL_TREE;
878 
879   return build_int_cst_wide (type, quol, quoh);
880 }
881 
882 /* This is non-zero if we should defer warnings about undefined
883    overflow.  This facility exists because these warnings are a
884    special case.  The code to estimate loop iterations does not want
885    to issue any warnings, since it works with expressions which do not
886    occur in user code.  Various bits of cleanup code call fold(), but
887    only use the result if it has certain characteristics (e.g., is a
888    constant); that code only wants to issue a warning if the result is
889    used.  */
890 
891 static int fold_deferring_overflow_warnings;
892 
893 /* If a warning about undefined overflow is deferred, this is the
894    warning.  Note that this may cause us to turn two warnings into
895    one, but that is fine since it is sufficient to only give one
896    warning per expression.  */
897 
898 static const char* fold_deferred_overflow_warning;
899 
900 /* If a warning about undefined overflow is deferred, this is the
901    level at which the warning should be emitted.  */
902 
903 static enum warn_strict_overflow_code fold_deferred_overflow_code;
904 
905 /* Start deferring overflow warnings.  We could use a stack here to
906    permit nested calls, but at present it is not necessary.  */
907 
908 void
fold_defer_overflow_warnings(void)909 fold_defer_overflow_warnings (void)
910 {
911   ++fold_deferring_overflow_warnings;
912 }
913 
914 /* Stop deferring overflow warnings.  If there is a pending warning,
915    and ISSUE is true, then issue the warning if appropriate.  STMT is
916    the statement with which the warning should be associated (used for
917    location information); STMT may be NULL.  CODE is the level of the
918    warning--a warn_strict_overflow_code value.  This function will use
919    the smaller of CODE and the deferred code when deciding whether to
920    issue the warning.  CODE may be zero to mean to always use the
921    deferred code.  */
922 
923 void
fold_undefer_overflow_warnings(bool issue,tree stmt,int code)924 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925 {
926   const char *warnmsg;
927   location_t locus;
928 
929   gcc_assert (fold_deferring_overflow_warnings > 0);
930   --fold_deferring_overflow_warnings;
931   if (fold_deferring_overflow_warnings > 0)
932     {
933       if (fold_deferred_overflow_warning != NULL
934 	  && code != 0
935 	  && code < (int) fold_deferred_overflow_code)
936 	fold_deferred_overflow_code = code;
937       return;
938     }
939 
940   warnmsg = fold_deferred_overflow_warning;
941   fold_deferred_overflow_warning = NULL;
942 
943   if (!issue || warnmsg == NULL)
944     return;
945 
946   /* Use the smallest code level when deciding to issue the
947      warning.  */
948   if (code == 0 || code > (int) fold_deferred_overflow_code)
949     code = fold_deferred_overflow_code;
950 
951   if (!issue_strict_overflow_warning (code))
952     return;
953 
954   if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955     locus = input_location;
956   else
957     locus = EXPR_LOCATION (stmt);
958   warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
959 }
960 
961 /* Stop deferring overflow warnings, ignoring any deferred
962    warnings.  */
963 
964 void
fold_undefer_and_ignore_overflow_warnings(void)965 fold_undefer_and_ignore_overflow_warnings (void)
966 {
967   fold_undefer_overflow_warnings (false, NULL_TREE, 0);
968 }
969 
970 /* Whether we are deferring overflow warnings.  */
971 
972 bool
fold_deferring_overflow_warnings_p(void)973 fold_deferring_overflow_warnings_p (void)
974 {
975   return fold_deferring_overflow_warnings > 0;
976 }
977 
978 /* This is called when we fold something based on the fact that signed
979    overflow is undefined.  */
980 
981 static void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)982 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983 {
984   gcc_assert (!flag_wrapv && !flag_trapv);
985   if (fold_deferring_overflow_warnings > 0)
986     {
987       if (fold_deferred_overflow_warning == NULL
988 	  || wc < fold_deferred_overflow_code)
989 	{
990 	  fold_deferred_overflow_warning = gmsgid;
991 	  fold_deferred_overflow_code = wc;
992 	}
993     }
994   else if (issue_strict_overflow_warning (wc))
995     warning (OPT_Wstrict_overflow, gmsgid);
996 }
997 
998 /* Return true if the built-in mathematical function specified by CODE
999    is odd, i.e. -f(x) == f(-x).  */
1000 
1001 static bool
negate_mathfn_p(enum built_in_function code)1002 negate_mathfn_p (enum built_in_function code)
1003 {
1004   switch (code)
1005     {
1006     CASE_FLT_FN (BUILT_IN_ASIN):
1007     CASE_FLT_FN (BUILT_IN_ASINH):
1008     CASE_FLT_FN (BUILT_IN_ATAN):
1009     CASE_FLT_FN (BUILT_IN_ATANH):
1010     CASE_FLT_FN (BUILT_IN_CBRT):
1011     CASE_FLT_FN (BUILT_IN_SIN):
1012     CASE_FLT_FN (BUILT_IN_SINH):
1013     CASE_FLT_FN (BUILT_IN_TAN):
1014     CASE_FLT_FN (BUILT_IN_TANH):
1015       return true;
1016 
1017     default:
1018       break;
1019     }
1020   return false;
1021 }
1022 
1023 /* Check whether we may negate an integer constant T without causing
1024    overflow.  */
1025 
1026 bool
may_negate_without_overflow_p(tree t)1027 may_negate_without_overflow_p (tree t)
1028 {
1029   unsigned HOST_WIDE_INT val;
1030   unsigned int prec;
1031   tree type;
1032 
1033   gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034 
1035   type = TREE_TYPE (t);
1036   if (TYPE_UNSIGNED (type))
1037     return false;
1038 
1039   prec = TYPE_PRECISION (type);
1040   if (prec > HOST_BITS_PER_WIDE_INT)
1041     {
1042       if (TREE_INT_CST_LOW (t) != 0)
1043 	return true;
1044       prec -= HOST_BITS_PER_WIDE_INT;
1045       val = TREE_INT_CST_HIGH (t);
1046     }
1047   else
1048     val = TREE_INT_CST_LOW (t);
1049   if (prec < HOST_BITS_PER_WIDE_INT)
1050     val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051   return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1052 }
1053 
1054 /* Determine whether an expression T can be cheaply negated using
1055    the function negate_expr without introducing undefined overflow.  */
1056 
1057 static bool
negate_expr_p(tree t)1058 negate_expr_p (tree t)
1059 {
1060   tree type;
1061 
1062   if (t == 0)
1063     return false;
1064 
1065   type = TREE_TYPE (t);
1066 
1067   STRIP_SIGN_NOPS (t);
1068   switch (TREE_CODE (t))
1069     {
1070     case INTEGER_CST:
1071       if (TYPE_OVERFLOW_WRAPS (type))
1072 	return true;
1073 
1074       /* Check that -CST will not overflow type.  */
1075       return may_negate_without_overflow_p (t);
1076     case BIT_NOT_EXPR:
1077       return (INTEGRAL_TYPE_P (type)
1078 	      && TYPE_OVERFLOW_WRAPS (type));
1079 
1080     case REAL_CST:
1081     case NEGATE_EXPR:
1082       return true;
1083 
1084     case COMPLEX_CST:
1085       return negate_expr_p (TREE_REALPART (t))
1086 	     && negate_expr_p (TREE_IMAGPART (t));
1087 
1088     case PLUS_EXPR:
1089       if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090 	return false;
1091       /* -(A + B) -> (-B) - A.  */
1092       if (negate_expr_p (TREE_OPERAND (t, 1))
1093 	  && reorder_operands_p (TREE_OPERAND (t, 0),
1094 				 TREE_OPERAND (t, 1)))
1095 	return true;
1096       /* -(A + B) -> (-A) - B.  */
1097       return negate_expr_p (TREE_OPERAND (t, 0));
1098 
1099     case MINUS_EXPR:
1100       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1101       return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102 	     && reorder_operands_p (TREE_OPERAND (t, 0),
1103 				    TREE_OPERAND (t, 1));
1104 
1105     case MULT_EXPR:
1106       if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107         break;
1108 
1109       /* Fall through.  */
1110 
1111     case RDIV_EXPR:
1112       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113 	return negate_expr_p (TREE_OPERAND (t, 1))
1114 	       || negate_expr_p (TREE_OPERAND (t, 0));
1115       break;
1116 
1117     case TRUNC_DIV_EXPR:
1118     case ROUND_DIV_EXPR:
1119     case FLOOR_DIV_EXPR:
1120     case CEIL_DIV_EXPR:
1121     case EXACT_DIV_EXPR:
1122       /* In general we can't negate A / B, because if A is INT_MIN and
1123 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124 	 and actually traps on some architectures.  But if overflow is
1125 	 undefined, we can negate, because - (INT_MIN / 1) is an
1126 	 overflow.  */
1127       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128 	  && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129         break;
1130       return negate_expr_p (TREE_OPERAND (t, 1))
1131              || negate_expr_p (TREE_OPERAND (t, 0));
1132 
1133     case NOP_EXPR:
1134       /* Negate -((double)float) as (double)(-float).  */
1135       if (TREE_CODE (type) == REAL_TYPE)
1136 	{
1137 	  tree tem = strip_float_extensions (t);
1138 	  if (tem != t)
1139 	    return negate_expr_p (tem);
1140 	}
1141       break;
1142 
1143     case CALL_EXPR:
1144       /* Negate -f(x) as f(-x).  */
1145       if (negate_mathfn_p (builtin_mathfn_code (t)))
1146 	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147       break;
1148 
1149     case RSHIFT_EXPR:
1150       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1151       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152 	{
1153 	  tree op1 = TREE_OPERAND (t, 1);
1154 	  if (TREE_INT_CST_HIGH (op1) == 0
1155 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156 		 == TREE_INT_CST_LOW (op1))
1157 	    return true;
1158 	}
1159       break;
1160 
1161     default:
1162       break;
1163     }
1164   return false;
1165 }
1166 
1167 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168    simplification is possible.
1169    If negate_expr_p would return true for T, NULL_TREE will never be
1170    returned.  */
1171 
1172 static tree
fold_negate_expr(tree t)1173 fold_negate_expr (tree t)
1174 {
1175   tree type = TREE_TYPE (t);
1176   tree tem;
1177 
1178   switch (TREE_CODE (t))
1179     {
1180     /* Convert - (~A) to A + 1.  */
1181     case BIT_NOT_EXPR:
1182       if (INTEGRAL_TYPE_P (type))
1183         return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184                             build_int_cst (type, 1));
1185       break;
1186 
1187     case INTEGER_CST:
1188       tem = fold_negate_const (t, type);
1189       if (!TREE_OVERFLOW (tem)
1190 	  || !TYPE_OVERFLOW_TRAPS (type))
1191 	return tem;
1192       break;
1193 
1194     case REAL_CST:
1195       tem = fold_negate_const (t, type);
1196       /* Two's complement FP formats, such as c4x, may overflow.  */
1197       if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198 	return tem;
1199       break;
1200 
1201     case COMPLEX_CST:
1202       {
1203 	tree rpart = negate_expr (TREE_REALPART (t));
1204 	tree ipart = negate_expr (TREE_IMAGPART (t));
1205 
1206 	if ((TREE_CODE (rpart) == REAL_CST
1207 	     && TREE_CODE (ipart) == REAL_CST)
1208 	    || (TREE_CODE (rpart) == INTEGER_CST
1209 		&& TREE_CODE (ipart) == INTEGER_CST))
1210 	  return build_complex (type, rpart, ipart);
1211       }
1212       break;
1213 
1214     case NEGATE_EXPR:
1215       return TREE_OPERAND (t, 0);
1216 
1217     case PLUS_EXPR:
1218       if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219 	{
1220 	  /* -(A + B) -> (-B) - A.  */
1221 	  if (negate_expr_p (TREE_OPERAND (t, 1))
1222 	      && reorder_operands_p (TREE_OPERAND (t, 0),
1223 				     TREE_OPERAND (t, 1)))
1224 	    {
1225 	      tem = negate_expr (TREE_OPERAND (t, 1));
1226 	      return fold_build2 (MINUS_EXPR, type,
1227 				  tem, TREE_OPERAND (t, 0));
1228 	    }
1229 
1230 	  /* -(A + B) -> (-A) - B.  */
1231 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
1232 	    {
1233 	      tem = negate_expr (TREE_OPERAND (t, 0));
1234 	      return fold_build2 (MINUS_EXPR, type,
1235 				  tem, TREE_OPERAND (t, 1));
1236 	    }
1237 	}
1238       break;
1239 
1240     case MINUS_EXPR:
1241       /* - (A - B) -> B - A  */
1242       if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243 	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244 	return fold_build2 (MINUS_EXPR, type,
1245 			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246       break;
1247 
1248     case MULT_EXPR:
1249       if (TYPE_UNSIGNED (type))
1250         break;
1251 
1252       /* Fall through.  */
1253 
1254     case RDIV_EXPR:
1255       if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256 	{
1257 	  tem = TREE_OPERAND (t, 1);
1258 	  if (negate_expr_p (tem))
1259 	    return fold_build2 (TREE_CODE (t), type,
1260 				TREE_OPERAND (t, 0), negate_expr (tem));
1261 	  tem = TREE_OPERAND (t, 0);
1262 	  if (negate_expr_p (tem))
1263 	    return fold_build2 (TREE_CODE (t), type,
1264 				negate_expr (tem), TREE_OPERAND (t, 1));
1265 	}
1266       break;
1267 
1268     case TRUNC_DIV_EXPR:
1269     case ROUND_DIV_EXPR:
1270     case FLOOR_DIV_EXPR:
1271     case CEIL_DIV_EXPR:
1272     case EXACT_DIV_EXPR:
1273       /* In general we can't negate A / B, because if A is INT_MIN and
1274 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275 	 and actually traps on some architectures.  But if overflow is
1276 	 undefined, we can negate, because - (INT_MIN / 1) is an
1277 	 overflow.  */
1278       if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279         {
1280 	  const char * const warnmsg = G_("assuming signed overflow does not "
1281 					  "occur when negating a division");
1282           tem = TREE_OPERAND (t, 1);
1283           if (negate_expr_p (tem))
1284 	    {
1285 	      if (INTEGRAL_TYPE_P (type)
1286 		  && (TREE_CODE (tem) != INTEGER_CST
1287 		      || integer_onep (tem)))
1288 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289 	      return fold_build2 (TREE_CODE (t), type,
1290 				  TREE_OPERAND (t, 0), negate_expr (tem));
1291 	    }
1292           tem = TREE_OPERAND (t, 0);
1293           if (negate_expr_p (tem))
1294 	    {
1295 	      if (INTEGRAL_TYPE_P (type)
1296 		  && (TREE_CODE (tem) != INTEGER_CST
1297 		      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298 		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299 	      return fold_build2 (TREE_CODE (t), type,
1300 				  negate_expr (tem), TREE_OPERAND (t, 1));
1301 	    }
1302         }
1303       break;
1304 
1305     case NOP_EXPR:
1306       /* Convert -((double)float) into (double)(-float).  */
1307       if (TREE_CODE (type) == REAL_TYPE)
1308 	{
1309 	  tem = strip_float_extensions (t);
1310 	  if (tem != t && negate_expr_p (tem))
1311 	    return negate_expr (tem);
1312 	}
1313       break;
1314 
1315     case CALL_EXPR:
1316       /* Negate -f(x) as f(-x).  */
1317       if (negate_mathfn_p (builtin_mathfn_code (t))
1318 	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319 	{
1320 	  tree fndecl, arg, arglist;
1321 
1322 	  fndecl = get_callee_fndecl (t);
1323 	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324 	  arglist = build_tree_list (NULL_TREE, arg);
1325 	  return build_function_call_expr (fndecl, arglist);
1326 	}
1327       break;
1328 
1329     case RSHIFT_EXPR:
1330       /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1331       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332 	{
1333 	  tree op1 = TREE_OPERAND (t, 1);
1334 	  if (TREE_INT_CST_HIGH (op1) == 0
1335 	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336 		 == TREE_INT_CST_LOW (op1))
1337 	    {
1338 	      tree ntype = TYPE_UNSIGNED (type)
1339 			   ? lang_hooks.types.signed_type (type)
1340 			   : lang_hooks.types.unsigned_type (type);
1341 	      tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342 	      temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343 	      return fold_convert (type, temp);
1344 	    }
1345 	}
1346       break;
1347 
1348     default:
1349       break;
1350     }
1351 
1352   return NULL_TREE;
1353 }
1354 
1355 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1357    return NULL_TREE. */
1358 
1359 static tree
negate_expr(tree t)1360 negate_expr (tree t)
1361 {
1362   tree type, tem;
1363 
1364   if (t == NULL_TREE)
1365     return NULL_TREE;
1366 
1367   type = TREE_TYPE (t);
1368   STRIP_SIGN_NOPS (t);
1369 
1370   tem = fold_negate_expr (t);
1371   if (!tem)
1372     tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373   return fold_convert (type, tem);
1374 }
1375 
1376 /* Split a tree IN into a constant, literal and variable parts that could be
1377    combined with CODE to make IN.  "constant" means an expression with
1378    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1379    commutative arithmetic operation.  Store the constant part into *CONP,
1380    the literal in *LITP and return the variable part.  If a part isn't
1381    present, set it to null.  If the tree does not decompose in this way,
1382    return the entire tree as the variable part and the other parts as null.
1383 
1384    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1385    case, we negate an operand that was subtracted.  Except if it is a
1386    literal for which we use *MINUS_LITP instead.
1387 
1388    If NEGATE_P is true, we are negating all of IN, again except a literal
1389    for which we use *MINUS_LITP instead.
1390 
1391    If IN is itself a literal or constant, return it as appropriate.
1392 
1393    Note that we do not guarantee that any of the three values will be the
1394    same type as IN, but they will have the same signedness and mode.  */
1395 
1396 static tree
split_tree(tree in,enum tree_code code,tree * conp,tree * litp,tree * minus_litp,int negate_p)1397 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398 	    tree *minus_litp, int negate_p)
1399 {
1400   tree var = 0;
1401 
1402   *conp = 0;
1403   *litp = 0;
1404   *minus_litp = 0;
1405 
1406   /* Strip any conversions that don't change the machine mode or signedness.  */
1407   STRIP_SIGN_NOPS (in);
1408 
1409   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410     *litp = in;
1411   else if (TREE_CODE (in) == code
1412 	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413 	       /* We can associate addition and subtraction together (even
1414 		  though the C standard doesn't say so) for integers because
1415 		  the value is not affected.  For reals, the value might be
1416 		  affected, so we can't.  */
1417 	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418 		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419     {
1420       tree op0 = TREE_OPERAND (in, 0);
1421       tree op1 = TREE_OPERAND (in, 1);
1422       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424 
1425       /* First see if either of the operands is a literal, then a constant.  */
1426       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427 	*litp = op0, op0 = 0;
1428       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430 
1431       if (op0 != 0 && TREE_CONSTANT (op0))
1432 	*conp = op0, op0 = 0;
1433       else if (op1 != 0 && TREE_CONSTANT (op1))
1434 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435 
1436       /* If we haven't dealt with either operand, this is not a case we can
1437 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1438       if (op0 != 0 && op1 != 0)
1439 	var = in;
1440       else if (op0 != 0)
1441 	var = op0;
1442       else
1443 	var = op1, neg_var_p = neg1_p;
1444 
1445       /* Now do any needed negations.  */
1446       if (neg_litp_p)
1447 	*minus_litp = *litp, *litp = 0;
1448       if (neg_conp_p)
1449 	*conp = negate_expr (*conp);
1450       if (neg_var_p)
1451 	var = negate_expr (var);
1452     }
1453   else if (TREE_CONSTANT (in))
1454     *conp = in;
1455   else
1456     var = in;
1457 
1458   if (negate_p)
1459     {
1460       if (*litp)
1461 	*minus_litp = *litp, *litp = 0;
1462       else if (*minus_litp)
1463 	*litp = *minus_litp, *minus_litp = 0;
1464       *conp = negate_expr (*conp);
1465       var = negate_expr (var);
1466     }
1467 
1468   return var;
1469 }
1470 
1471 /* Re-associate trees split by the above function.  T1 and T2 are either
1472    expressions to associate or null.  Return the new expression, if any.  If
1473    we build an operation, do it in TYPE and with CODE.  */
1474 
1475 static tree
associate_trees(tree t1,tree t2,enum tree_code code,tree type)1476 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477 {
1478   if (t1 == 0)
1479     return t2;
1480   else if (t2 == 0)
1481     return t1;
1482 
1483   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484      try to fold this since we will have infinite recursion.  But do
1485      deal with any NEGATE_EXPRs.  */
1486   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488     {
1489       if (code == PLUS_EXPR)
1490 	{
1491 	  if (TREE_CODE (t1) == NEGATE_EXPR)
1492 	    return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493 			   fold_convert (type, TREE_OPERAND (t1, 0)));
1494 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1495 	    return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496 			   fold_convert (type, TREE_OPERAND (t2, 0)));
1497 	  else if (integer_zerop (t2))
1498 	    return fold_convert (type, t1);
1499 	}
1500       else if (code == MINUS_EXPR)
1501 	{
1502 	  if (integer_zerop (t2))
1503 	    return fold_convert (type, t1);
1504 	}
1505 
1506       return build2 (code, type, fold_convert (type, t1),
1507 		     fold_convert (type, t2));
1508     }
1509 
1510   return fold_build2 (code, type, fold_convert (type, t1),
1511 		      fold_convert (type, t2));
1512 }
1513 
1514 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1515    to produce a new constant.  Return NULL_TREE if we don't know how
1516    to evaluate CODE at compile-time.
1517 
1518    If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1519 
1520 tree
int_const_binop(enum tree_code code,tree arg1,tree arg2,int notrunc)1521 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522 {
1523   unsigned HOST_WIDE_INT int1l, int2l;
1524   HOST_WIDE_INT int1h, int2h;
1525   unsigned HOST_WIDE_INT low;
1526   HOST_WIDE_INT hi;
1527   unsigned HOST_WIDE_INT garbagel;
1528   HOST_WIDE_INT garbageh;
1529   tree t;
1530   tree type = TREE_TYPE (arg1);
1531   int uns = TYPE_UNSIGNED (type);
1532   int is_sizetype
1533     = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534   int overflow = 0;
1535 
1536   int1l = TREE_INT_CST_LOW (arg1);
1537   int1h = TREE_INT_CST_HIGH (arg1);
1538   int2l = TREE_INT_CST_LOW (arg2);
1539   int2h = TREE_INT_CST_HIGH (arg2);
1540 
1541   switch (code)
1542     {
1543     case BIT_IOR_EXPR:
1544       low = int1l | int2l, hi = int1h | int2h;
1545       break;
1546 
1547     case BIT_XOR_EXPR:
1548       low = int1l ^ int2l, hi = int1h ^ int2h;
1549       break;
1550 
1551     case BIT_AND_EXPR:
1552       low = int1l & int2l, hi = int1h & int2h;
1553       break;
1554 
1555     case RSHIFT_EXPR:
1556       int2l = -int2l;
1557     case LSHIFT_EXPR:
1558       /* It's unclear from the C standard whether shifts can overflow.
1559 	 The following code ignores overflow; perhaps a C standard
1560 	 interpretation ruling is needed.  */
1561       lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562 		     &low, &hi, !uns);
1563       break;
1564 
1565     case RROTATE_EXPR:
1566       int2l = - int2l;
1567     case LROTATE_EXPR:
1568       lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569 		      &low, &hi);
1570       break;
1571 
1572     case PLUS_EXPR:
1573       overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574       break;
1575 
1576     case MINUS_EXPR:
1577       neg_double (int2l, int2h, &low, &hi);
1578       add_double (int1l, int1h, low, hi, &low, &hi);
1579       overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580       break;
1581 
1582     case MULT_EXPR:
1583       overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584       break;
1585 
1586     case TRUNC_DIV_EXPR:
1587     case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588     case EXACT_DIV_EXPR:
1589       /* This is a shortcut for a common special case.  */
1590       if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591 	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1592 	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1593 	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594 	{
1595 	  if (code == CEIL_DIV_EXPR)
1596 	    int1l += int2l - 1;
1597 
1598 	  low = int1l / int2l, hi = 0;
1599 	  break;
1600 	}
1601 
1602       /* ... fall through ...  */
1603 
1604     case ROUND_DIV_EXPR:
1605       if (int2h == 0 && int2l == 0)
1606 	return NULL_TREE;
1607       if (int2h == 0 && int2l == 1)
1608 	{
1609 	  low = int1l, hi = int1h;
1610 	  break;
1611 	}
1612       if (int1l == int2l && int1h == int2h
1613 	  && ! (int1l == 0 && int1h == 0))
1614 	{
1615 	  low = 1, hi = 0;
1616 	  break;
1617 	}
1618       overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619 				       &low, &hi, &garbagel, &garbageh);
1620       break;
1621 
1622     case TRUNC_MOD_EXPR:
1623     case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624       /* This is a shortcut for a common special case.  */
1625       if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626 	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1627 	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1628 	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629 	{
1630 	  if (code == CEIL_MOD_EXPR)
1631 	    int1l += int2l - 1;
1632 	  low = int1l % int2l, hi = 0;
1633 	  break;
1634 	}
1635 
1636       /* ... fall through ...  */
1637 
1638     case ROUND_MOD_EXPR:
1639       if (int2h == 0 && int2l == 0)
1640 	return NULL_TREE;
1641       overflow = div_and_round_double (code, uns,
1642 				       int1l, int1h, int2l, int2h,
1643 				       &garbagel, &garbageh, &low, &hi);
1644       break;
1645 
1646     case MIN_EXPR:
1647     case MAX_EXPR:
1648       if (uns)
1649 	low = (((unsigned HOST_WIDE_INT) int1h
1650 		< (unsigned HOST_WIDE_INT) int2h)
1651 	       || (((unsigned HOST_WIDE_INT) int1h
1652 		    == (unsigned HOST_WIDE_INT) int2h)
1653 		   && int1l < int2l));
1654       else
1655 	low = (int1h < int2h
1656 	       || (int1h == int2h && int1l < int2l));
1657 
1658       if (low == (code == MIN_EXPR))
1659 	low = int1l, hi = int1h;
1660       else
1661 	low = int2l, hi = int2h;
1662       break;
1663 
1664     default:
1665       return NULL_TREE;
1666     }
1667 
1668   t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669 
1670   if (notrunc)
1671     {
1672       /* Propagate overflow flags ourselves.  */
1673       if (((!uns || is_sizetype) && overflow)
1674 	  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675 	{
1676 	  t = copy_node (t);
1677 	  TREE_OVERFLOW (t) = 1;
1678 	  TREE_CONSTANT_OVERFLOW (t) = 1;
1679 	}
1680       else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681 	{
1682 	  t = copy_node (t);
1683 	  TREE_CONSTANT_OVERFLOW (t) = 1;
1684 	}
1685     }
1686   else
1687     t = force_fit_type (t, 1,
1688 			((!uns || is_sizetype) && overflow)
1689 			| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690 			TREE_CONSTANT_OVERFLOW (arg1)
1691 			| TREE_CONSTANT_OVERFLOW (arg2));
1692 
1693   return t;
1694 }
1695 
1696 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1698    are the same kind of constant and the same machine mode.  Return zero if
1699    combining the constants is not allowed in the current operating mode.
1700 
1701    If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1702 
1703 static tree
const_binop(enum tree_code code,tree arg1,tree arg2,int notrunc)1704 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705 {
1706   /* Sanity check for the recursive cases.  */
1707   if (!arg1 || !arg2)
1708     return NULL_TREE;
1709 
1710   STRIP_NOPS (arg1);
1711   STRIP_NOPS (arg2);
1712 
1713   if (TREE_CODE (arg1) == INTEGER_CST)
1714     return int_const_binop (code, arg1, arg2, notrunc);
1715 
1716   if (TREE_CODE (arg1) == REAL_CST)
1717     {
1718       enum machine_mode mode;
1719       REAL_VALUE_TYPE d1;
1720       REAL_VALUE_TYPE d2;
1721       REAL_VALUE_TYPE value;
1722       REAL_VALUE_TYPE result;
1723       bool inexact;
1724       tree t, type;
1725 
1726       /* The following codes are handled by real_arithmetic.  */
1727       switch (code)
1728 	{
1729 	case PLUS_EXPR:
1730 	case MINUS_EXPR:
1731 	case MULT_EXPR:
1732 	case RDIV_EXPR:
1733 	case MIN_EXPR:
1734 	case MAX_EXPR:
1735 	  break;
1736 
1737 	default:
1738 	  return NULL_TREE;
1739 	}
1740 
1741       d1 = TREE_REAL_CST (arg1);
1742       d2 = TREE_REAL_CST (arg2);
1743 
1744       type = TREE_TYPE (arg1);
1745       mode = TYPE_MODE (type);
1746 
1747       /* Don't perform operation if we honor signaling NaNs and
1748 	 either operand is a NaN.  */
1749       if (HONOR_SNANS (mode)
1750 	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751 	return NULL_TREE;
1752 
1753       /* Don't perform operation if it would raise a division
1754 	 by zero exception.  */
1755       if (code == RDIV_EXPR
1756 	  && REAL_VALUES_EQUAL (d2, dconst0)
1757 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758 	return NULL_TREE;
1759 
1760       /* If either operand is a NaN, just return it.  Otherwise, set up
1761 	 for floating-point trap; we return an overflow.  */
1762       if (REAL_VALUE_ISNAN (d1))
1763 	return arg1;
1764       else if (REAL_VALUE_ISNAN (d2))
1765 	return arg2;
1766 
1767       inexact = real_arithmetic (&value, code, &d1, &d2);
1768       real_convert (&result, mode, &value);
1769 
1770       /* Don't constant fold this floating point operation if
1771 	 the result has overflowed and flag_trapping_math.  */
1772       if (flag_trapping_math
1773 	  && MODE_HAS_INFINITIES (mode)
1774 	  && REAL_VALUE_ISINF (result)
1775 	  && !REAL_VALUE_ISINF (d1)
1776 	  && !REAL_VALUE_ISINF (d2))
1777 	return NULL_TREE;
1778 
1779       /* Don't constant fold this floating point operation if the
1780 	 result may dependent upon the run-time rounding mode and
1781 	 flag_rounding_math is set, or if GCC's software emulation
1782 	 is unable to accurately represent the result.  */
1783       if ((flag_rounding_math
1784 	   || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785 	       && !flag_unsafe_math_optimizations))
1786 	  && (inexact || !real_identical (&result, &value)))
1787 	return NULL_TREE;
1788 
1789       t = build_real (type, result);
1790 
1791       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792       TREE_CONSTANT_OVERFLOW (t)
1793 	= TREE_OVERFLOW (t)
1794 	  | TREE_CONSTANT_OVERFLOW (arg1)
1795 	  | TREE_CONSTANT_OVERFLOW (arg2);
1796       return t;
1797     }
1798 
1799   if (TREE_CODE (arg1) == COMPLEX_CST)
1800     {
1801       tree type = TREE_TYPE (arg1);
1802       tree r1 = TREE_REALPART (arg1);
1803       tree i1 = TREE_IMAGPART (arg1);
1804       tree r2 = TREE_REALPART (arg2);
1805       tree i2 = TREE_IMAGPART (arg2);
1806       tree real, imag;
1807 
1808       switch (code)
1809 	{
1810 	case PLUS_EXPR:
1811 	case MINUS_EXPR:
1812 	  real = const_binop (code, r1, r2, notrunc);
1813 	  imag = const_binop (code, i1, i2, notrunc);
1814 	  break;
1815 
1816 	case MULT_EXPR:
1817 	  real = const_binop (MINUS_EXPR,
1818 			      const_binop (MULT_EXPR, r1, r2, notrunc),
1819 			      const_binop (MULT_EXPR, i1, i2, notrunc),
1820 			      notrunc);
1821 	  imag = const_binop (PLUS_EXPR,
1822 			      const_binop (MULT_EXPR, r1, i2, notrunc),
1823 			      const_binop (MULT_EXPR, i1, r2, notrunc),
1824 			      notrunc);
1825 	  break;
1826 
1827 	case RDIV_EXPR:
1828 	  {
1829 	    tree magsquared
1830 	      = const_binop (PLUS_EXPR,
1831 			     const_binop (MULT_EXPR, r2, r2, notrunc),
1832 			     const_binop (MULT_EXPR, i2, i2, notrunc),
1833 			     notrunc);
1834 	    tree t1
1835 	      = const_binop (PLUS_EXPR,
1836 			     const_binop (MULT_EXPR, r1, r2, notrunc),
1837 			     const_binop (MULT_EXPR, i1, i2, notrunc),
1838 			     notrunc);
1839 	    tree t2
1840 	      = const_binop (MINUS_EXPR,
1841 			     const_binop (MULT_EXPR, i1, r2, notrunc),
1842 			     const_binop (MULT_EXPR, r1, i2, notrunc),
1843 			     notrunc);
1844 
1845 	    if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846 	      code = TRUNC_DIV_EXPR;
1847 
1848 	    real = const_binop (code, t1, magsquared, notrunc);
1849 	    imag = const_binop (code, t2, magsquared, notrunc);
1850 	  }
1851 	  break;
1852 
1853 	default:
1854 	  return NULL_TREE;
1855 	}
1856 
1857       if (real && imag)
1858 	return build_complex (type, real, imag);
1859     }
1860 
1861   return NULL_TREE;
1862 }
1863 
1864 /* Create a size type INT_CST node with NUMBER sign extended.  KIND
1865    indicates which particular sizetype to create.  */
1866 
1867 tree
size_int_kind(HOST_WIDE_INT number,enum size_type_kind kind)1868 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869 {
1870   return build_int_cst (sizetype_tab[(int) kind], number);
1871 }
1872 
1873 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1874    is a tree code.  The type of the result is taken from the operands.
1875    Both must be the same type integer type and it must be a size type.
1876    If the operands are constant, so is the result.  */
1877 
1878 tree
size_binop(enum tree_code code,tree arg0,tree arg1)1879 size_binop (enum tree_code code, tree arg0, tree arg1)
1880 {
1881   tree type = TREE_TYPE (arg0);
1882 
1883   if (arg0 == error_mark_node || arg1 == error_mark_node)
1884     return error_mark_node;
1885 
1886   gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887 	      && type == TREE_TYPE (arg1));
1888 
1889   /* Handle the special case of two integer constants faster.  */
1890   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891     {
1892       /* And some specific cases even faster than that.  */
1893       if (code == PLUS_EXPR && integer_zerop (arg0))
1894 	return arg1;
1895       else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896 	       && integer_zerop (arg1))
1897 	return arg0;
1898       else if (code == MULT_EXPR && integer_onep (arg0))
1899 	return arg1;
1900 
1901       /* Handle general case of two integer constants.  */
1902       return int_const_binop (code, arg0, arg1, 0);
1903     }
1904 
1905   return fold_build2 (code, type, arg0, arg1);
1906 }
1907 
1908 /* Given two values, either both of sizetype or both of bitsizetype,
1909    compute the difference between the two values.  Return the value
1910    in signed type corresponding to the type of the operands.  */
1911 
1912 tree
size_diffop(tree arg0,tree arg1)1913 size_diffop (tree arg0, tree arg1)
1914 {
1915   tree type = TREE_TYPE (arg0);
1916   tree ctype;
1917 
1918   gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919 	      && type == TREE_TYPE (arg1));
1920 
1921   /* If the type is already signed, just do the simple thing.  */
1922   if (!TYPE_UNSIGNED (type))
1923     return size_binop (MINUS_EXPR, arg0, arg1);
1924 
1925   ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926 
1927   /* If either operand is not a constant, do the conversions to the signed
1928      type and subtract.  The hardware will do the right thing with any
1929      overflow in the subtraction.  */
1930   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931     return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932 		       fold_convert (ctype, arg1));
1933 
1934   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936      overflow) and negate (which can't either).  Special-case a result
1937      of zero while we're here.  */
1938   if (tree_int_cst_equal (arg0, arg1))
1939     return build_int_cst (ctype, 0);
1940   else if (tree_int_cst_lt (arg1, arg0))
1941     return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942   else
1943     return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944 		       fold_convert (ctype, size_binop (MINUS_EXPR,
1945 							arg1, arg0)));
1946 }
1947 
1948 /* A subroutine of fold_convert_const handling conversions of an
1949    INTEGER_CST to another integer type.  */
1950 
1951 static tree
fold_convert_const_int_from_int(tree type,tree arg1)1952 fold_convert_const_int_from_int (tree type, tree arg1)
1953 {
1954   tree t;
1955 
1956   /* Given an integer constant, make new constant with new type,
1957      appropriately sign-extended or truncated.  */
1958   t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959 			  TREE_INT_CST_HIGH (arg1));
1960 
1961   t = force_fit_type (t,
1962 		      /* Don't set the overflow when
1963 		      	 converting a pointer  */
1964 		      !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965 		      (TREE_INT_CST_HIGH (arg1) < 0
1966 		       && (TYPE_UNSIGNED (type)
1967 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968 		      | TREE_OVERFLOW (arg1),
1969 		      TREE_CONSTANT_OVERFLOW (arg1));
1970 
1971   return t;
1972 }
1973 
1974 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1975    to an integer type.  */
1976 
1977 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,tree arg1)1978 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979 {
1980   int overflow = 0;
1981   tree t;
1982 
1983   /* The following code implements the floating point to integer
1984      conversion rules required by the Java Language Specification,
1985      that IEEE NaNs are mapped to zero and values that overflow
1986      the target precision saturate, i.e. values greater than
1987      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988      are mapped to INT_MIN.  These semantics are allowed by the
1989      C and C++ standards that simply state that the behavior of
1990      FP-to-integer conversion is unspecified upon overflow.  */
1991 
1992   HOST_WIDE_INT high, low;
1993   REAL_VALUE_TYPE r;
1994   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995 
1996   switch (code)
1997     {
1998     case FIX_TRUNC_EXPR:
1999       real_trunc (&r, VOIDmode, &x);
2000       break;
2001 
2002     case FIX_CEIL_EXPR:
2003       real_ceil (&r, VOIDmode, &x);
2004       break;
2005 
2006     case FIX_FLOOR_EXPR:
2007       real_floor (&r, VOIDmode, &x);
2008       break;
2009 
2010     case FIX_ROUND_EXPR:
2011       real_round (&r, VOIDmode, &x);
2012       break;
2013 
2014     default:
2015       gcc_unreachable ();
2016     }
2017 
2018   /* If R is NaN, return zero and show we have an overflow.  */
2019   if (REAL_VALUE_ISNAN (r))
2020     {
2021       overflow = 1;
2022       high = 0;
2023       low = 0;
2024     }
2025 
2026   /* See if R is less than the lower bound or greater than the
2027      upper bound.  */
2028 
2029   if (! overflow)
2030     {
2031       tree lt = TYPE_MIN_VALUE (type);
2032       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033       if (REAL_VALUES_LESS (r, l))
2034 	{
2035 	  overflow = 1;
2036 	  high = TREE_INT_CST_HIGH (lt);
2037 	  low = TREE_INT_CST_LOW (lt);
2038 	}
2039     }
2040 
2041   if (! overflow)
2042     {
2043       tree ut = TYPE_MAX_VALUE (type);
2044       if (ut)
2045 	{
2046 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047 	  if (REAL_VALUES_LESS (u, r))
2048 	    {
2049 	      overflow = 1;
2050 	      high = TREE_INT_CST_HIGH (ut);
2051 	      low = TREE_INT_CST_LOW (ut);
2052 	    }
2053 	}
2054     }
2055 
2056   if (! overflow)
2057     REAL_VALUE_TO_INT (&low, &high, r);
2058 
2059   t = build_int_cst_wide (type, low, high);
2060 
2061   t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062 		      TREE_CONSTANT_OVERFLOW (arg1));
2063   return t;
2064 }
2065 
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067    to another floating point type.  */
2068 
2069 static tree
fold_convert_const_real_from_real(tree type,tree arg1)2070 fold_convert_const_real_from_real (tree type, tree arg1)
2071 {
2072   REAL_VALUE_TYPE value;
2073   tree t;
2074 
2075   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076   t = build_real (type, value);
2077 
2078   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079   TREE_CONSTANT_OVERFLOW (t)
2080     = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081   return t;
2082 }
2083 
2084 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2085    type TYPE.  If no simplification can be done return NULL_TREE.  */
2086 
2087 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2088 fold_convert_const (enum tree_code code, tree type, tree arg1)
2089 {
2090   if (TREE_TYPE (arg1) == type)
2091     return arg1;
2092 
2093   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094     {
2095       if (TREE_CODE (arg1) == INTEGER_CST)
2096 	return fold_convert_const_int_from_int (type, arg1);
2097       else if (TREE_CODE (arg1) == REAL_CST)
2098 	return fold_convert_const_int_from_real (code, type, arg1);
2099     }
2100   else if (TREE_CODE (type) == REAL_TYPE)
2101     {
2102       if (TREE_CODE (arg1) == INTEGER_CST)
2103 	return build_real_from_int_cst (type, arg1);
2104       if (TREE_CODE (arg1) == REAL_CST)
2105 	return fold_convert_const_real_from_real (type, arg1);
2106     }
2107   return NULL_TREE;
2108 }
2109 
2110 /* Construct a vector of zero elements of vector type TYPE.  */
2111 
2112 static tree
build_zero_vector(tree type)2113 build_zero_vector (tree type)
2114 {
2115   tree elem, list;
2116   int i, units;
2117 
2118   elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119   units = TYPE_VECTOR_SUBPARTS (type);
2120 
2121   list = NULL_TREE;
2122   for (i = 0; i < units; i++)
2123     list = tree_cons (NULL_TREE, elem, list);
2124   return build_vector (type, list);
2125 }
2126 
2127 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2128    simple conversions in preference to calling the front-end's convert.  */
2129 
2130 tree
fold_convert(tree type,tree arg)2131 fold_convert (tree type, tree arg)
2132 {
2133   tree orig = TREE_TYPE (arg);
2134   tree tem;
2135 
2136   if (type == orig)
2137     return arg;
2138 
2139   if (TREE_CODE (arg) == ERROR_MARK
2140       || TREE_CODE (type) == ERROR_MARK
2141       || TREE_CODE (orig) == ERROR_MARK)
2142     return error_mark_node;
2143 
2144   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145       || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146 					TYPE_MAIN_VARIANT (orig)))
2147     return fold_build1 (NOP_EXPR, type, arg);
2148 
2149   switch (TREE_CODE (type))
2150     {
2151     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152     case POINTER_TYPE: case REFERENCE_TYPE:
2153     case OFFSET_TYPE:
2154       if (TREE_CODE (arg) == INTEGER_CST)
2155 	{
2156 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2157 	  if (tem != NULL_TREE)
2158 	    return tem;
2159 	}
2160       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 	  || TREE_CODE (orig) == OFFSET_TYPE)
2162         return fold_build1 (NOP_EXPR, type, arg);
2163       if (TREE_CODE (orig) == COMPLEX_TYPE)
2164 	{
2165 	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166 	  return fold_convert (type, tem);
2167 	}
2168       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170       return fold_build1 (NOP_EXPR, type, arg);
2171 
2172     case REAL_TYPE:
2173       if (TREE_CODE (arg) == INTEGER_CST)
2174 	{
2175 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176 	  if (tem != NULL_TREE)
2177 	    return tem;
2178 	}
2179       else if (TREE_CODE (arg) == REAL_CST)
2180 	{
2181 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2182 	  if (tem != NULL_TREE)
2183 	    return tem;
2184 	}
2185 
2186       switch (TREE_CODE (orig))
2187 	{
2188 	case INTEGER_TYPE:
2189 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190 	case POINTER_TYPE: case REFERENCE_TYPE:
2191 	  return fold_build1 (FLOAT_EXPR, type, arg);
2192 
2193 	case REAL_TYPE:
2194 	  return fold_build1 (NOP_EXPR, type, arg);
2195 
2196 	case COMPLEX_TYPE:
2197 	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198 	  return fold_convert (type, tem);
2199 
2200 	default:
2201 	  gcc_unreachable ();
2202 	}
2203 
2204     case COMPLEX_TYPE:
2205       switch (TREE_CODE (orig))
2206 	{
2207 	case INTEGER_TYPE:
2208 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209 	case POINTER_TYPE: case REFERENCE_TYPE:
2210 	case REAL_TYPE:
2211 	  return build2 (COMPLEX_EXPR, type,
2212 			 fold_convert (TREE_TYPE (type), arg),
2213 			 fold_convert (TREE_TYPE (type), integer_zero_node));
2214 	case COMPLEX_TYPE:
2215 	  {
2216 	    tree rpart, ipart;
2217 
2218 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2219 	      {
2220 		rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221 		ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222 		return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2223 	      }
2224 
2225 	    arg = save_expr (arg);
2226 	    rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227 	    ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228 	    rpart = fold_convert (TREE_TYPE (type), rpart);
2229 	    ipart = fold_convert (TREE_TYPE (type), ipart);
2230 	    return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2231 	  }
2232 
2233 	default:
2234 	  gcc_unreachable ();
2235 	}
2236 
2237     case VECTOR_TYPE:
2238       if (integer_zerop (arg))
2239 	return build_zero_vector (type);
2240       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242 		  || TREE_CODE (orig) == VECTOR_TYPE);
2243       return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244 
2245     case VOID_TYPE:
2246       return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247 
2248     default:
2249       gcc_unreachable ();
2250     }
2251 }
2252 
2253 /* Return false if expr can be assumed not to be an lvalue, true
2254    otherwise.  */
2255 
2256 static bool
maybe_lvalue_p(tree x)2257 maybe_lvalue_p (tree x)
2258 {
2259   /* We only need to wrap lvalue tree codes.  */
2260   switch (TREE_CODE (x))
2261   {
2262   case VAR_DECL:
2263   case PARM_DECL:
2264   case RESULT_DECL:
2265   case LABEL_DECL:
2266   case FUNCTION_DECL:
2267   case SSA_NAME:
2268 
2269   case COMPONENT_REF:
2270   case INDIRECT_REF:
2271   case ALIGN_INDIRECT_REF:
2272   case MISALIGNED_INDIRECT_REF:
2273   case ARRAY_REF:
2274   case ARRAY_RANGE_REF:
2275   case BIT_FIELD_REF:
2276   case OBJ_TYPE_REF:
2277 
2278   case REALPART_EXPR:
2279   case IMAGPART_EXPR:
2280   case PREINCREMENT_EXPR:
2281   case PREDECREMENT_EXPR:
2282   case SAVE_EXPR:
2283   case TRY_CATCH_EXPR:
2284   case WITH_CLEANUP_EXPR:
2285   case COMPOUND_EXPR:
2286   case MODIFY_EXPR:
2287   case TARGET_EXPR:
2288   case COND_EXPR:
2289   case BIND_EXPR:
2290   case MIN_EXPR:
2291   case MAX_EXPR:
2292     break;
2293 
2294   default:
2295     /* Assume the worst for front-end tree codes.  */
2296     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2297       break;
2298     return false;
2299   }
2300 
2301   return true;
2302 }
2303 
2304 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2305 
2306 tree
non_lvalue(tree x)2307 non_lvalue (tree x)
2308 {
2309   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2310      us.  */
2311   if (in_gimple_form)
2312     return x;
2313 
2314   if (! maybe_lvalue_p (x))
2315     return x;
2316   return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2317 }
2318 
2319 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320    Zero means allow extended lvalues.  */
2321 
2322 int pedantic_lvalues;
2323 
2324 /* When pedantic, return an expr equal to X but certainly not valid as a
2325    pedantic lvalue.  Otherwise, return X.  */
2326 
2327 static tree
pedantic_non_lvalue(tree x)2328 pedantic_non_lvalue (tree x)
2329 {
2330   if (pedantic_lvalues)
2331     return non_lvalue (x);
2332   else
2333     return x;
2334 }
2335 
2336 /* Given a tree comparison code, return the code that is the logical inverse
2337    of the given code.  It is not safe to do this for floating-point
2338    comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339    as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2340 
2341 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2342 invert_tree_comparison (enum tree_code code, bool honor_nans)
2343 {
2344   if (honor_nans && flag_trapping_math)
2345     return ERROR_MARK;
2346 
2347   switch (code)
2348     {
2349     case EQ_EXPR:
2350       return NE_EXPR;
2351     case NE_EXPR:
2352       return EQ_EXPR;
2353     case GT_EXPR:
2354       return honor_nans ? UNLE_EXPR : LE_EXPR;
2355     case GE_EXPR:
2356       return honor_nans ? UNLT_EXPR : LT_EXPR;
2357     case LT_EXPR:
2358       return honor_nans ? UNGE_EXPR : GE_EXPR;
2359     case LE_EXPR:
2360       return honor_nans ? UNGT_EXPR : GT_EXPR;
2361     case LTGT_EXPR:
2362       return UNEQ_EXPR;
2363     case UNEQ_EXPR:
2364       return LTGT_EXPR;
2365     case UNGT_EXPR:
2366       return LE_EXPR;
2367     case UNGE_EXPR:
2368       return LT_EXPR;
2369     case UNLT_EXPR:
2370       return GE_EXPR;
2371     case UNLE_EXPR:
2372       return GT_EXPR;
2373     case ORDERED_EXPR:
2374       return UNORDERED_EXPR;
2375     case UNORDERED_EXPR:
2376       return ORDERED_EXPR;
2377     default:
2378       gcc_unreachable ();
2379     }
2380 }
2381 
2382 /* Similar, but return the comparison that results if the operands are
2383    swapped.  This is safe for floating-point.  */
2384 
2385 enum tree_code
swap_tree_comparison(enum tree_code code)2386 swap_tree_comparison (enum tree_code code)
2387 {
2388   switch (code)
2389     {
2390     case EQ_EXPR:
2391     case NE_EXPR:
2392     case ORDERED_EXPR:
2393     case UNORDERED_EXPR:
2394     case LTGT_EXPR:
2395     case UNEQ_EXPR:
2396       return code;
2397     case GT_EXPR:
2398       return LT_EXPR;
2399     case GE_EXPR:
2400       return LE_EXPR;
2401     case LT_EXPR:
2402       return GT_EXPR;
2403     case LE_EXPR:
2404       return GE_EXPR;
2405     case UNGT_EXPR:
2406       return UNLT_EXPR;
2407     case UNGE_EXPR:
2408       return UNLE_EXPR;
2409     case UNLT_EXPR:
2410       return UNGT_EXPR;
2411     case UNLE_EXPR:
2412       return UNGE_EXPR;
2413     default:
2414       gcc_unreachable ();
2415     }
2416 }
2417 
2418 
2419 /* Convert a comparison tree code from an enum tree_code representation
2420    into a compcode bit-based encoding.  This function is the inverse of
2421    compcode_to_comparison.  */
2422 
2423 static enum comparison_code
comparison_to_compcode(enum tree_code code)2424 comparison_to_compcode (enum tree_code code)
2425 {
2426   switch (code)
2427     {
2428     case LT_EXPR:
2429       return COMPCODE_LT;
2430     case EQ_EXPR:
2431       return COMPCODE_EQ;
2432     case LE_EXPR:
2433       return COMPCODE_LE;
2434     case GT_EXPR:
2435       return COMPCODE_GT;
2436     case NE_EXPR:
2437       return COMPCODE_NE;
2438     case GE_EXPR:
2439       return COMPCODE_GE;
2440     case ORDERED_EXPR:
2441       return COMPCODE_ORD;
2442     case UNORDERED_EXPR:
2443       return COMPCODE_UNORD;
2444     case UNLT_EXPR:
2445       return COMPCODE_UNLT;
2446     case UNEQ_EXPR:
2447       return COMPCODE_UNEQ;
2448     case UNLE_EXPR:
2449       return COMPCODE_UNLE;
2450     case UNGT_EXPR:
2451       return COMPCODE_UNGT;
2452     case LTGT_EXPR:
2453       return COMPCODE_LTGT;
2454     case UNGE_EXPR:
2455       return COMPCODE_UNGE;
2456     default:
2457       gcc_unreachable ();
2458     }
2459 }
2460 
2461 /* Convert a compcode bit-based encoding of a comparison operator back
2462    to GCC's enum tree_code representation.  This function is the
2463    inverse of comparison_to_compcode.  */
2464 
2465 static enum tree_code
compcode_to_comparison(enum comparison_code code)2466 compcode_to_comparison (enum comparison_code code)
2467 {
2468   switch (code)
2469     {
2470     case COMPCODE_LT:
2471       return LT_EXPR;
2472     case COMPCODE_EQ:
2473       return EQ_EXPR;
2474     case COMPCODE_LE:
2475       return LE_EXPR;
2476     case COMPCODE_GT:
2477       return GT_EXPR;
2478     case COMPCODE_NE:
2479       return NE_EXPR;
2480     case COMPCODE_GE:
2481       return GE_EXPR;
2482     case COMPCODE_ORD:
2483       return ORDERED_EXPR;
2484     case COMPCODE_UNORD:
2485       return UNORDERED_EXPR;
2486     case COMPCODE_UNLT:
2487       return UNLT_EXPR;
2488     case COMPCODE_UNEQ:
2489       return UNEQ_EXPR;
2490     case COMPCODE_UNLE:
2491       return UNLE_EXPR;
2492     case COMPCODE_UNGT:
2493       return UNGT_EXPR;
2494     case COMPCODE_LTGT:
2495       return LTGT_EXPR;
2496     case COMPCODE_UNGE:
2497       return UNGE_EXPR;
2498     default:
2499       gcc_unreachable ();
2500     }
2501 }
2502 
2503 /* Return a tree for the comparison which is the combination of
2504    doing the AND or OR (depending on CODE) of the two operations LCODE
2505    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2506    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507    if this makes the transformation invalid.  */
2508 
2509 tree
combine_comparisons(enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2510 combine_comparisons (enum tree_code code, enum tree_code lcode,
2511 		     enum tree_code rcode, tree truth_type,
2512 		     tree ll_arg, tree lr_arg)
2513 {
2514   bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517   enum comparison_code compcode;
2518 
2519   switch (code)
2520     {
2521     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522       compcode = lcompcode & rcompcode;
2523       break;
2524 
2525     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526       compcode = lcompcode | rcompcode;
2527       break;
2528 
2529     default:
2530       return NULL_TREE;
2531     }
2532 
2533   if (!honor_nans)
2534     {
2535       /* Eliminate unordered comparisons, as well as LTGT and ORD
2536 	 which are not used unless the mode has NaNs.  */
2537       compcode &= ~COMPCODE_UNORD;
2538       if (compcode == COMPCODE_LTGT)
2539 	compcode = COMPCODE_NE;
2540       else if (compcode == COMPCODE_ORD)
2541 	compcode = COMPCODE_TRUE;
2542     }
2543    else if (flag_trapping_math)
2544      {
2545 	/* Check that the original operation and the optimized ones will trap
2546 	   under the same condition.  */
2547 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548 		     && (lcompcode != COMPCODE_EQ)
2549 		     && (lcompcode != COMPCODE_ORD);
2550 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551 		     && (rcompcode != COMPCODE_EQ)
2552 		     && (rcompcode != COMPCODE_ORD);
2553 	bool trap = (compcode & COMPCODE_UNORD) == 0
2554 		    && (compcode != COMPCODE_EQ)
2555 		    && (compcode != COMPCODE_ORD);
2556 
2557         /* In a short-circuited boolean expression the LHS might be
2558 	   such that the RHS, if evaluated, will never trap.  For
2559 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2561 	   example, the expression above will never trap, hence
2562 	   optimizing it to x < y would be invalid).  */
2563         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2565           rtrap = false;
2566 
2567         /* If the comparison was short-circuited, and only the RHS
2568 	   trapped, we may now generate a spurious trap.  */
2569 	if (rtrap && !ltrap
2570 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2571 	  return NULL_TREE;
2572 
2573 	/* If we changed the conditions that cause a trap, we lose.  */
2574 	if ((ltrap || rtrap) != trap)
2575 	  return NULL_TREE;
2576       }
2577 
2578   if (compcode == COMPCODE_TRUE)
2579     return constant_boolean_node (true, truth_type);
2580   else if (compcode == COMPCODE_FALSE)
2581     return constant_boolean_node (false, truth_type);
2582   else
2583     return fold_build2 (compcode_to_comparison (compcode),
2584 			truth_type, ll_arg, lr_arg);
2585 }
2586 
2587 /* Return nonzero if CODE is a tree code that represents a truth value.  */
2588 
2589 static int
truth_value_p(enum tree_code code)2590 truth_value_p (enum tree_code code)
2591 {
2592   return (TREE_CODE_CLASS (code) == tcc_comparison
2593 	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594 	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595 	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2596 }
2597 
2598 /* Return nonzero if two operands (typically of the same tree node)
2599    are necessarily equal.  If either argument has side-effects this
2600    function returns zero.  FLAGS modifies behavior as follows:
2601 
2602    If OEP_ONLY_CONST is set, only return nonzero for constants.
2603    This function tests whether the operands are indistinguishable;
2604    it does not test whether they are equal using C's == operation.
2605    The distinction is important for IEEE floating point, because
2606    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608 
2609    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610    even though it may hold multiple values during a function.
2611    This is because a GCC tree node guarantees that nothing else is
2612    executed between the evaluation of its "operands" (which may often
2613    be evaluated in arbitrary order).  Hence if the operands themselves
2614    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2616    unset means assuming isochronic (or instantaneous) tree equivalence.
2617    Unless comparing arbitrary expression trees, such as from different
2618    statements, this flag can usually be left unset.
2619 
2620    If OEP_PURE_SAME is set, then pure functions with identical arguments
2621    are considered the same.  It is used when the caller has other ways
2622    to ensure that global memory is unchanged in between.  */
2623 
2624 int
operand_equal_p(tree arg0,tree arg1,unsigned int flags)2625 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626 {
2627   /* If either is ERROR_MARK, they aren't equal.  */
2628   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2629     return 0;
2630 
2631   /* If both types don't have the same signedness, then we can't consider
2632      them equal.  We must check this before the STRIP_NOPS calls
2633      because they may change the signedness of the arguments.  */
2634   if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635     return 0;
2636 
2637   /* If both types don't have the same precision, then it is not safe
2638      to strip NOPs.  */
2639   if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2640     return 0;
2641 
2642   STRIP_NOPS (arg0);
2643   STRIP_NOPS (arg1);
2644 
2645   /* In case both args are comparisons but with different comparison
2646      code, try to swap the comparison operands of one arg to produce
2647      a match and compare that variant.  */
2648   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649       && COMPARISON_CLASS_P (arg0)
2650       && COMPARISON_CLASS_P (arg1))
2651     {
2652       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653 
2654       if (TREE_CODE (arg0) == swap_code)
2655 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2656 			        TREE_OPERAND (arg1, 1), flags)
2657 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 				   TREE_OPERAND (arg1, 0), flags);
2659     }
2660 
2661   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662       /* This is needed for conversions and for COMPONENT_REF.
2663 	 Might as well play it safe and always test this.  */
2664       || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666       || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2667     return 0;
2668 
2669   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670      We don't care about side effects in that case because the SAVE_EXPR
2671      takes care of that for us. In all other cases, two expressions are
2672      equal if they have no side effects.  If we have two identical
2673      expressions with side effects that should be treated the same due
2674      to the only side effects being identical SAVE_EXPR's, that will
2675      be detected in the recursive calls below.  */
2676   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677       && (TREE_CODE (arg0) == SAVE_EXPR
2678 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2679     return 1;
2680 
2681   /* Next handle constant cases, those for which we can return 1 even
2682      if ONLY_CONST is set.  */
2683   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684     switch (TREE_CODE (arg0))
2685       {
2686       case INTEGER_CST:
2687 	return (! TREE_CONSTANT_OVERFLOW (arg0)
2688 		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2689 		&& tree_int_cst_equal (arg0, arg1));
2690 
2691       case REAL_CST:
2692 	return (! TREE_CONSTANT_OVERFLOW (arg0)
2693 		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2694 		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695 					  TREE_REAL_CST (arg1)));
2696 
2697       case VECTOR_CST:
2698 	{
2699 	  tree v1, v2;
2700 
2701 	  if (TREE_CONSTANT_OVERFLOW (arg0)
2702 	      || TREE_CONSTANT_OVERFLOW (arg1))
2703 	    return 0;
2704 
2705 	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2706 	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2707 	  while (v1 && v2)
2708 	    {
2709 	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2710 				    flags))
2711 		return 0;
2712 	      v1 = TREE_CHAIN (v1);
2713 	      v2 = TREE_CHAIN (v2);
2714 	    }
2715 
2716 	  return v1 == v2;
2717 	}
2718 
2719       case COMPLEX_CST:
2720 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2721 				 flags)
2722 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2723 				    flags));
2724 
2725       case STRING_CST:
2726 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727 		&& ! memcmp (TREE_STRING_POINTER (arg0),
2728 			      TREE_STRING_POINTER (arg1),
2729 			      TREE_STRING_LENGTH (arg0)));
2730 
2731       case ADDR_EXPR:
2732 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733 				0);
2734       default:
2735 	break;
2736       }
2737 
2738   if (flags & OEP_ONLY_CONST)
2739     return 0;
2740 
2741 /* Define macros to test an operand from arg0 and arg1 for equality and a
2742    variant that allows null and views null as being different from any
2743    non-null value.  In the latter case, if either is null, the both
2744    must be; otherwise, do the normal comparison.  */
2745 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2746 				    TREE_OPERAND (arg1, N), flags)
2747 
2748 #define OP_SAME_WITH_NULL(N)				\
2749   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2750    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751 
2752   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753     {
2754     case tcc_unary:
2755       /* Two conversions are equal only if signedness and modes match.  */
2756       switch (TREE_CODE (arg0))
2757         {
2758         case NOP_EXPR:
2759         case CONVERT_EXPR:
2760         case FIX_CEIL_EXPR:
2761         case FIX_TRUNC_EXPR:
2762         case FIX_FLOOR_EXPR:
2763         case FIX_ROUND_EXPR:
2764 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2766 	    return 0;
2767 	  break;
2768 	default:
2769 	  break;
2770 	}
2771 
2772       return OP_SAME (0);
2773 
2774 
2775     case tcc_comparison:
2776     case tcc_binary:
2777       if (OP_SAME (0) && OP_SAME (1))
2778 	return 1;
2779 
2780       /* For commutative ops, allow the other order.  */
2781       return (commutative_tree_code (TREE_CODE (arg0))
2782 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2783 				  TREE_OPERAND (arg1, 1), flags)
2784 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2785 				  TREE_OPERAND (arg1, 0), flags));
2786 
2787     case tcc_reference:
2788       /* If either of the pointer (or reference) expressions we are
2789 	 dereferencing contain a side effect, these cannot be equal.  */
2790       if (TREE_SIDE_EFFECTS (arg0)
2791 	  || TREE_SIDE_EFFECTS (arg1))
2792 	return 0;
2793 
2794       switch (TREE_CODE (arg0))
2795 	{
2796 	case INDIRECT_REF:
2797 	case ALIGN_INDIRECT_REF:
2798 	case MISALIGNED_INDIRECT_REF:
2799 	case REALPART_EXPR:
2800 	case IMAGPART_EXPR:
2801 	  return OP_SAME (0);
2802 
2803 	case ARRAY_REF:
2804 	case ARRAY_RANGE_REF:
2805 	  /* Operands 2 and 3 may be null.  */
2806 	  return (OP_SAME (0)
2807 		  && OP_SAME (1)
2808 		  && OP_SAME_WITH_NULL (2)
2809 		  && OP_SAME_WITH_NULL (3));
2810 
2811 	case COMPONENT_REF:
2812 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2813 	     may be NULL when we're called to compare MEM_EXPRs.  */
2814 	  return OP_SAME_WITH_NULL (0)
2815 		 && OP_SAME (1)
2816 		 && OP_SAME_WITH_NULL (2);
2817 
2818 	case BIT_FIELD_REF:
2819 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820 
2821 	default:
2822 	  return 0;
2823 	}
2824 
2825     case tcc_expression:
2826       switch (TREE_CODE (arg0))
2827 	{
2828 	case ADDR_EXPR:
2829 	case TRUTH_NOT_EXPR:
2830 	  return OP_SAME (0);
2831 
2832 	case TRUTH_ANDIF_EXPR:
2833 	case TRUTH_ORIF_EXPR:
2834 	  return OP_SAME (0) && OP_SAME (1);
2835 
2836 	case TRUTH_AND_EXPR:
2837 	case TRUTH_OR_EXPR:
2838 	case TRUTH_XOR_EXPR:
2839 	  if (OP_SAME (0) && OP_SAME (1))
2840 	    return 1;
2841 
2842 	  /* Otherwise take into account this is a commutative operation.  */
2843 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2844 				   TREE_OPERAND (arg1, 1), flags)
2845 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2846 				      TREE_OPERAND (arg1, 0), flags));
2847 
2848 	case CALL_EXPR:
2849 	  /* If the CALL_EXPRs call different functions, then they
2850 	     clearly can not be equal.  */
2851 	  if (!OP_SAME (0))
2852 	    return 0;
2853 
2854 	  {
2855 	    unsigned int cef = call_expr_flags (arg0);
2856 	    if (flags & OEP_PURE_SAME)
2857 	      cef &= ECF_CONST | ECF_PURE;
2858 	    else
2859 	      cef &= ECF_CONST;
2860 	    if (!cef)
2861 	      return 0;
2862 	  }
2863 
2864 	  /* Now see if all the arguments are the same.  operand_equal_p
2865 	     does not handle TREE_LIST, so we walk the operands here
2866 	     feeding them to operand_equal_p.  */
2867 	  arg0 = TREE_OPERAND (arg0, 1);
2868 	  arg1 = TREE_OPERAND (arg1, 1);
2869 	  while (arg0 && arg1)
2870 	    {
2871 	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2872 				     flags))
2873 		return 0;
2874 
2875 	      arg0 = TREE_CHAIN (arg0);
2876 	      arg1 = TREE_CHAIN (arg1);
2877 	    }
2878 
2879 	  /* If we get here and both argument lists are exhausted
2880 	     then the CALL_EXPRs are equal.  */
2881 	  return ! (arg0 || arg1);
2882 
2883 	default:
2884 	  return 0;
2885 	}
2886 
2887     case tcc_declaration:
2888       /* Consider __builtin_sqrt equal to sqrt.  */
2889       return (TREE_CODE (arg0) == FUNCTION_DECL
2890 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2891 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2892 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893 
2894     default:
2895       return 0;
2896     }
2897 
2898 #undef OP_SAME
2899 #undef OP_SAME_WITH_NULL
2900 }
2901 
2902 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2903    shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904 
2905    When in doubt, return 0.  */
2906 
2907 static int
operand_equal_for_comparison_p(tree arg0,tree arg1,tree other)2908 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909 {
2910   int unsignedp1, unsignedpo;
2911   tree primarg0, primarg1, primother;
2912   unsigned int correct_width;
2913 
2914   if (operand_equal_p (arg0, arg1, 0))
2915     return 1;
2916 
2917   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2918       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2919     return 0;
2920 
2921   /* Discard any conversions that don't change the modes of ARG0 and ARG1
2922      and see if the inner values are the same.  This removes any
2923      signedness comparison, which doesn't matter here.  */
2924   primarg0 = arg0, primarg1 = arg1;
2925   STRIP_NOPS (primarg0);
2926   STRIP_NOPS (primarg1);
2927   if (operand_equal_p (primarg0, primarg1, 0))
2928     return 1;
2929 
2930   /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2931      actual comparison operand, ARG0.
2932 
2933      First throw away any conversions to wider types
2934      already present in the operands.  */
2935 
2936   primarg1 = get_narrower (arg1, &unsignedp1);
2937   primother = get_narrower (other, &unsignedpo);
2938 
2939   correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2940   if (unsignedp1 == unsignedpo
2941       && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2942       && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943     {
2944       tree type = TREE_TYPE (arg0);
2945 
2946       /* Make sure shorter operand is extended the right way
2947 	 to match the longer operand.  */
2948       primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2949 			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950 
2951       if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2952 	return 1;
2953     }
2954 
2955   return 0;
2956 }
2957 
2958 /* See if ARG is an expression that is either a comparison or is performing
2959    arithmetic on comparisons.  The comparisons must only be comparing
2960    two different values, which will be stored in *CVAL1 and *CVAL2; if
2961    they are nonzero it means that some operands have already been found.
2962    No variables may be used anywhere else in the expression except in the
2963    comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2964    the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965 
2966    If this is true, return 1.  Otherwise, return zero.  */
2967 
2968 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2,int * save_p)2969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970 {
2971   enum tree_code code = TREE_CODE (arg);
2972   enum tree_code_class class = TREE_CODE_CLASS (code);
2973 
2974   /* We can handle some of the tcc_expression cases here.  */
2975   if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2976     class = tcc_unary;
2977   else if (class == tcc_expression
2978 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2979 	       || code == COMPOUND_EXPR))
2980     class = tcc_binary;
2981 
2982   else if (class == tcc_expression && code == SAVE_EXPR
2983 	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984     {
2985       /* If we've already found a CVAL1 or CVAL2, this expression is
2986 	 two complex to handle.  */
2987       if (*cval1 || *cval2)
2988 	return 0;
2989 
2990       class = tcc_unary;
2991       *save_p = 1;
2992     }
2993 
2994   switch (class)
2995     {
2996     case tcc_unary:
2997       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998 
2999     case tcc_binary:
3000       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3001 	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3002 				      cval1, cval2, save_p));
3003 
3004     case tcc_constant:
3005       return 1;
3006 
3007     case tcc_expression:
3008       if (code == COND_EXPR)
3009 	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3010 				     cval1, cval2, save_p)
3011 		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3012 					cval1, cval2, save_p)
3013 		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3014 					cval1, cval2, save_p));
3015       return 0;
3016 
3017     case tcc_comparison:
3018       /* First see if we can handle the first operand, then the second.  For
3019 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3020 	 one side of the comparison is each of the values; test for the
3021 	 case where this isn't true by failing if the two operands
3022 	 are the same.  */
3023 
3024       if (operand_equal_p (TREE_OPERAND (arg, 0),
3025 			   TREE_OPERAND (arg, 1), 0))
3026 	return 0;
3027 
3028       if (*cval1 == 0)
3029 	*cval1 = TREE_OPERAND (arg, 0);
3030       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031 	;
3032       else if (*cval2 == 0)
3033 	*cval2 = TREE_OPERAND (arg, 0);
3034       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035 	;
3036       else
3037 	return 0;
3038 
3039       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040 	;
3041       else if (*cval2 == 0)
3042 	*cval2 = TREE_OPERAND (arg, 1);
3043       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044 	;
3045       else
3046 	return 0;
3047 
3048       return 1;
3049 
3050     default:
3051       return 0;
3052     }
3053 }
3054 
3055 /* ARG is a tree that is known to contain just arithmetic operations and
3056    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3057    any occurrence of OLD0 as an operand of a comparison and likewise for
3058    NEW1 and OLD1.  */
3059 
3060 static tree
eval_subst(tree arg,tree old0,tree new0,tree old1,tree new1)3061 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062 {
3063   tree type = TREE_TYPE (arg);
3064   enum tree_code code = TREE_CODE (arg);
3065   enum tree_code_class class = TREE_CODE_CLASS (code);
3066 
3067   /* We can handle some of the tcc_expression cases here.  */
3068   if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3069     class = tcc_unary;
3070   else if (class == tcc_expression
3071 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3072     class = tcc_binary;
3073 
3074   switch (class)
3075     {
3076     case tcc_unary:
3077       return fold_build1 (code, type,
3078 			  eval_subst (TREE_OPERAND (arg, 0),
3079 				      old0, new0, old1, new1));
3080 
3081     case tcc_binary:
3082       return fold_build2 (code, type,
3083 			  eval_subst (TREE_OPERAND (arg, 0),
3084 				      old0, new0, old1, new1),
3085 			  eval_subst (TREE_OPERAND (arg, 1),
3086 				      old0, new0, old1, new1));
3087 
3088     case tcc_expression:
3089       switch (code)
3090 	{
3091 	case SAVE_EXPR:
3092 	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093 
3094 	case COMPOUND_EXPR:
3095 	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096 
3097 	case COND_EXPR:
3098 	  return fold_build3 (code, type,
3099 			      eval_subst (TREE_OPERAND (arg, 0),
3100 					  old0, new0, old1, new1),
3101 			      eval_subst (TREE_OPERAND (arg, 1),
3102 					  old0, new0, old1, new1),
3103 			      eval_subst (TREE_OPERAND (arg, 2),
3104 					  old0, new0, old1, new1));
3105 	default:
3106 	  break;
3107 	}
3108       /* Fall through - ???  */
3109 
3110     case tcc_comparison:
3111       {
3112 	tree arg0 = TREE_OPERAND (arg, 0);
3113 	tree arg1 = TREE_OPERAND (arg, 1);
3114 
3115 	/* We need to check both for exact equality and tree equality.  The
3116 	   former will be true if the operand has a side-effect.  In that
3117 	   case, we know the operand occurred exactly once.  */
3118 
3119 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3120 	  arg0 = new0;
3121 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3122 	  arg0 = new1;
3123 
3124 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3125 	  arg1 = new0;
3126 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3127 	  arg1 = new1;
3128 
3129 	return fold_build2 (code, type, arg0, arg1);
3130       }
3131 
3132     default:
3133       return arg;
3134     }
3135 }
3136 
3137 /* Return a tree for the case when the result of an expression is RESULT
3138    converted to TYPE and OMITTED was previously an operand of the expression
3139    but is now not needed (e.g., we folded OMITTED * 0).
3140 
3141    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3142    the conversion of RESULT to TYPE.  */
3143 
3144 tree
omit_one_operand(tree type,tree result,tree omitted)3145 omit_one_operand (tree type, tree result, tree omitted)
3146 {
3147   tree t = fold_convert (type, result);
3148 
3149   if (TREE_SIDE_EFFECTS (omitted))
3150     return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151 
3152   return non_lvalue (t);
3153 }
3154 
3155 /* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3156 
3157 static tree
pedantic_omit_one_operand(tree type,tree result,tree omitted)3158 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159 {
3160   tree t = fold_convert (type, result);
3161 
3162   if (TREE_SIDE_EFFECTS (omitted))
3163     return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164 
3165   return pedantic_non_lvalue (t);
3166 }
3167 
3168 /* Return a tree for the case when the result of an expression is RESULT
3169    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3170    of the expression but are now not needed.
3171 
3172    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3173    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3174    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3175    just do the conversion of RESULT to TYPE.  */
3176 
3177 tree
omit_two_operands(tree type,tree result,tree omitted1,tree omitted2)3178 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179 {
3180   tree t = fold_convert (type, result);
3181 
3182   if (TREE_SIDE_EFFECTS (omitted2))
3183     t = build2 (COMPOUND_EXPR, type, omitted2, t);
3184   if (TREE_SIDE_EFFECTS (omitted1))
3185     t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186 
3187   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3188 }
3189 
3190 
3191 /* Return a simplified tree node for the truth-negation of ARG.  This
3192    never alters ARG itself.  We assume that ARG is an operation that
3193    returns a truth value (0 or 1).
3194 
3195    FIXME: one would think we would fold the result, but it causes
3196    problems with the dominator optimizer.  */
3197 
3198 tree
fold_truth_not_expr(tree arg)3199 fold_truth_not_expr (tree arg)
3200 {
3201   tree type = TREE_TYPE (arg);
3202   enum tree_code code = TREE_CODE (arg);
3203 
3204   /* If this is a comparison, we can simply invert it, except for
3205      floating-point non-equality comparisons, in which case we just
3206      enclose a TRUTH_NOT_EXPR around what we have.  */
3207 
3208   if (TREE_CODE_CLASS (code) == tcc_comparison)
3209     {
3210       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3211       if (FLOAT_TYPE_P (op_type)
3212 	  && flag_trapping_math
3213 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3214 	  && code != NE_EXPR && code != EQ_EXPR)
3215 	return NULL_TREE;
3216       else
3217 	{
3218 	  code = invert_tree_comparison (code,
3219 					 HONOR_NANS (TYPE_MODE (op_type)));
3220 	  if (code == ERROR_MARK)
3221 	    return NULL_TREE;
3222 	  else
3223 	    return build2 (code, type,
3224 			   TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3225 	}
3226     }
3227 
3228   switch (code)
3229     {
3230     case INTEGER_CST:
3231       return constant_boolean_node (integer_zerop (arg), type);
3232 
3233     case TRUTH_AND_EXPR:
3234       return build2 (TRUTH_OR_EXPR, type,
3235 		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3236 		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3237 
3238     case TRUTH_OR_EXPR:
3239       return build2 (TRUTH_AND_EXPR, type,
3240 		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3241 		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3242 
3243     case TRUTH_XOR_EXPR:
3244       /* Here we can invert either operand.  We invert the first operand
3245 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3246 	 result is the XOR of the first operand with the inside of the
3247 	 negation of the second operand.  */
3248 
3249       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3250 	return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3251 		       TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3252       else
3253 	return build2 (TRUTH_XOR_EXPR, type,
3254 		       invert_truthvalue (TREE_OPERAND (arg, 0)),
3255 		       TREE_OPERAND (arg, 1));
3256 
3257     case TRUTH_ANDIF_EXPR:
3258       return build2 (TRUTH_ORIF_EXPR, type,
3259 		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3260 		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3261 
3262     case TRUTH_ORIF_EXPR:
3263       return build2 (TRUTH_ANDIF_EXPR, type,
3264 		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3265 		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3266 
3267     case TRUTH_NOT_EXPR:
3268       return TREE_OPERAND (arg, 0);
3269 
3270     case COND_EXPR:
3271       {
3272 	tree arg1 = TREE_OPERAND (arg, 1);
3273 	tree arg2 = TREE_OPERAND (arg, 2);
3274 	/* A COND_EXPR may have a throw as one operand, which
3275 	   then has void type.  Just leave void operands
3276 	   as they are.  */
3277 	return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3278 		       VOID_TYPE_P (TREE_TYPE (arg1))
3279 		       ? arg1 : invert_truthvalue (arg1),
3280 		       VOID_TYPE_P (TREE_TYPE (arg2))
3281 		       ? arg2 : invert_truthvalue (arg2));
3282       }
3283 
3284     case COMPOUND_EXPR:
3285       return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3286 		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3287 
3288     case NON_LVALUE_EXPR:
3289       return invert_truthvalue (TREE_OPERAND (arg, 0));
3290 
3291     case NOP_EXPR:
3292       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3293 	return build1 (TRUTH_NOT_EXPR, type, arg);
3294 
3295     case CONVERT_EXPR:
3296     case FLOAT_EXPR:
3297       return build1 (TREE_CODE (arg), type,
3298 		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3299 
3300     case BIT_AND_EXPR:
3301       if (!integer_onep (TREE_OPERAND (arg, 1)))
3302 	break;
3303       return build2 (EQ_EXPR, type, arg,
3304 		     build_int_cst (type, 0));
3305 
3306     case SAVE_EXPR:
3307       return build1 (TRUTH_NOT_EXPR, type, arg);
3308 
3309     case CLEANUP_POINT_EXPR:
3310       return build1 (CLEANUP_POINT_EXPR, type,
3311 		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3312 
3313     default:
3314       break;
3315     }
3316 
3317   return NULL_TREE;
3318 }
3319 
3320 /* Return a simplified tree node for the truth-negation of ARG.  This
3321    never alters ARG itself.  We assume that ARG is an operation that
3322    returns a truth value (0 or 1).
3323 
3324    FIXME: one would think we would fold the result, but it causes
3325    problems with the dominator optimizer.  */
3326 
3327 tree
invert_truthvalue(tree arg)3328 invert_truthvalue (tree arg)
3329 {
3330   tree tem;
3331 
3332   if (TREE_CODE (arg) == ERROR_MARK)
3333     return arg;
3334 
3335   tem = fold_truth_not_expr (arg);
3336   if (!tem)
3337     tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338 
3339   return tem;
3340 }
3341 
3342 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3343    operands are another bit-wise operation with a common input.  If so,
3344    distribute the bit operations to save an operation and possibly two if
3345    constants are involved.  For example, convert
3346 	(A | B) & (A | C) into A | (B & C)
3347    Further simplification will occur if B and C are constants.
3348 
3349    If this optimization cannot be done, 0 will be returned.  */
3350 
3351 static tree
distribute_bit_expr(enum tree_code code,tree type,tree arg0,tree arg1)3352 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353 {
3354   tree common;
3355   tree left, right;
3356 
3357   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3358       || TREE_CODE (arg0) == code
3359       || (TREE_CODE (arg0) != BIT_AND_EXPR
3360 	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3361     return 0;
3362 
3363   if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364     {
3365       common = TREE_OPERAND (arg0, 0);
3366       left = TREE_OPERAND (arg0, 1);
3367       right = TREE_OPERAND (arg1, 1);
3368     }
3369   else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370     {
3371       common = TREE_OPERAND (arg0, 0);
3372       left = TREE_OPERAND (arg0, 1);
3373       right = TREE_OPERAND (arg1, 0);
3374     }
3375   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376     {
3377       common = TREE_OPERAND (arg0, 1);
3378       left = TREE_OPERAND (arg0, 0);
3379       right = TREE_OPERAND (arg1, 1);
3380     }
3381   else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382     {
3383       common = TREE_OPERAND (arg0, 1);
3384       left = TREE_OPERAND (arg0, 0);
3385       right = TREE_OPERAND (arg1, 0);
3386     }
3387   else
3388     return 0;
3389 
3390   return fold_build2 (TREE_CODE (arg0), type, common,
3391 		      fold_build2 (code, type, left, right));
3392 }
3393 
3394 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3395    with code CODE.  This optimization is unsafe.  */
3396 static tree
distribute_real_division(enum tree_code code,tree type,tree arg0,tree arg1)3397 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398 {
3399   bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3400   bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401 
3402   /* (A / C) +- (B / C) -> (A +- B) / C.  */
3403   if (mul0 == mul1
3404       && operand_equal_p (TREE_OPERAND (arg0, 1),
3405 		       TREE_OPERAND (arg1, 1), 0))
3406     return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3407 			fold_build2 (code, type,
3408 				     TREE_OPERAND (arg0, 0),
3409 				     TREE_OPERAND (arg1, 0)),
3410 			TREE_OPERAND (arg0, 1));
3411 
3412   /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3413   if (operand_equal_p (TREE_OPERAND (arg0, 0),
3414 		       TREE_OPERAND (arg1, 0), 0)
3415       && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3416       && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417     {
3418       REAL_VALUE_TYPE r0, r1;
3419       r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3420       r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3421       if (!mul0)
3422 	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3423       if (!mul1)
3424         real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3425       real_arithmetic (&r0, code, &r0, &r1);
3426       return fold_build2 (MULT_EXPR, type,
3427 			  TREE_OPERAND (arg0, 0),
3428 			  build_real (type, r0));
3429     }
3430 
3431   return NULL_TREE;
3432 }
3433 
3434 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3435    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3436 
3437 static tree
make_bit_field_ref(tree inner,tree type,int bitsize,int bitpos,int unsignedp)3438 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3439 		    int unsignedp)
3440 {
3441   tree result;
3442 
3443   if (bitpos == 0)
3444     {
3445       tree size = TYPE_SIZE (TREE_TYPE (inner));
3446       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3447 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3448 	  && host_integerp (size, 0)
3449 	  && tree_low_cst (size, 0) == bitsize)
3450 	return fold_convert (type, inner);
3451     }
3452 
3453   result = build3 (BIT_FIELD_REF, type, inner,
3454 		   size_int (bitsize), bitsize_int (bitpos));
3455 
3456   BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457 
3458   return result;
3459 }
3460 
3461 /* Optimize a bit-field compare.
3462 
3463    There are two cases:  First is a compare against a constant and the
3464    second is a comparison of two items where the fields are at the same
3465    bit position relative to the start of a chunk (byte, halfword, word)
3466    large enough to contain it.  In these cases we can avoid the shift
3467    implicit in bitfield extractions.
3468 
3469    For constants, we emit a compare of the shifted constant with the
3470    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471    compared.  For two fields at the same position, we do the ANDs with the
3472    similar mask and compare the result of the ANDs.
3473 
3474    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475    COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476    are the left and right operands of the comparison, respectively.
3477 
3478    If the optimization described above can be done, we return the resulting
3479    tree.  Otherwise we return zero.  */
3480 
3481 static tree
optimize_bit_field_compare(enum tree_code code,tree compare_type,tree lhs,tree rhs)3482 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3483 			    tree lhs, tree rhs)
3484 {
3485   HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486   tree type = TREE_TYPE (lhs);
3487   tree signed_type, unsigned_type;
3488   int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489   enum machine_mode lmode, rmode, nmode;
3490   int lunsignedp, runsignedp;
3491   int lvolatilep = 0, rvolatilep = 0;
3492   tree linner, rinner = NULL_TREE;
3493   tree mask;
3494   tree offset;
3495 
3496   /* Get all the information about the extractions being done.  If the bit size
3497      if the same as the size of the underlying object, we aren't doing an
3498      extraction at all and so can do nothing.  We also don't want to
3499      do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500      then will no longer be able to replace it.  */
3501   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502 				&lunsignedp, &lvolatilep, false);
3503   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3505     return 0;
3506 
3507  if (!const_p)
3508    {
3509      /* If this is not a constant, we can only do something if bit positions,
3510 	sizes, and signedness are the same.  */
3511      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512 				   &runsignedp, &rvolatilep, false);
3513 
3514      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515 	 || lunsignedp != runsignedp || offset != 0
3516 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3517        return 0;
3518    }
3519 
3520   /* See if we can find a mode to refer to this field.  We should be able to,
3521      but fail if we can't.  */
3522   nmode = get_best_mode (lbitsize, lbitpos,
3523 			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524 			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525 				TYPE_ALIGN (TREE_TYPE (rinner))),
3526 			 word_mode, lvolatilep || rvolatilep);
3527   if (nmode == VOIDmode)
3528     return 0;
3529 
3530   /* Set signed and unsigned types of the precision of this mode for the
3531      shifts below.  */
3532   signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534 
3535   /* Compute the bit position and size for the new reference and our offset
3536      within it. If the new reference is the same size as the original, we
3537      won't optimize anything, so return zero.  */
3538   nbitsize = GET_MODE_BITSIZE (nmode);
3539   nbitpos = lbitpos & ~ (nbitsize - 1);
3540   lbitpos -= nbitpos;
3541   if (nbitsize == lbitsize)
3542     return 0;
3543 
3544   if (BYTES_BIG_ENDIAN)
3545     lbitpos = nbitsize - lbitsize - lbitpos;
3546 
3547   /* Make the mask to be used against the extracted field.  */
3548   mask = build_int_cst (unsigned_type, -1);
3549   mask = force_fit_type (mask, 0, false, false);
3550   mask = fold_convert (unsigned_type, mask);
3551   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3552   mask = const_binop (RSHIFT_EXPR, mask,
3553 		      size_int (nbitsize - lbitsize - lbitpos), 0);
3554 
3555   if (! const_p)
3556     /* If not comparing with constant, just rework the comparison
3557        and return.  */
3558     return build2 (code, compare_type,
3559 		   build2 (BIT_AND_EXPR, unsigned_type,
3560 			   make_bit_field_ref (linner, unsigned_type,
3561 					       nbitsize, nbitpos, 1),
3562 			   mask),
3563 		   build2 (BIT_AND_EXPR, unsigned_type,
3564 			   make_bit_field_ref (rinner, unsigned_type,
3565 					       nbitsize, nbitpos, 1),
3566 			   mask));
3567 
3568   /* Otherwise, we are handling the constant case. See if the constant is too
3569      big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3570      this not only for its own sake, but to avoid having to test for this
3571      error case below.  If we didn't, we might generate wrong code.
3572 
3573      For unsigned fields, the constant shifted right by the field length should
3574      be all zero.  For signed fields, the high-order bits should agree with
3575      the sign bit.  */
3576 
3577   if (lunsignedp)
3578     {
3579       if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580 					fold_convert (unsigned_type, rhs),
3581 					size_int (lbitsize), 0)))
3582 	{
3583 	  warning (0, "comparison is always %d due to width of bit-field",
3584 		   code == NE_EXPR);
3585 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3586 	}
3587     }
3588   else
3589     {
3590       tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3591 			      size_int (lbitsize - 1), 0);
3592       if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593 	{
3594 	  warning (0, "comparison is always %d due to width of bit-field",
3595 		   code == NE_EXPR);
3596 	  return constant_boolean_node (code == NE_EXPR, compare_type);
3597 	}
3598     }
3599 
3600   /* Single-bit compares should always be against zero.  */
3601   if (lbitsize == 1 && ! integer_zerop (rhs))
3602     {
3603       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3604       rhs = build_int_cst (type, 0);
3605     }
3606 
3607   /* Make a new bitfield reference, shift the constant over the
3608      appropriate number of bits and mask it with the computed mask
3609      (in case this was a signed field).  If we changed it, make a new one.  */
3610   lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3611   if (lvolatilep)
3612     {
3613       TREE_SIDE_EFFECTS (lhs) = 1;
3614       TREE_THIS_VOLATILE (lhs) = 1;
3615     }
3616 
3617   rhs = const_binop (BIT_AND_EXPR,
3618 		     const_binop (LSHIFT_EXPR,
3619 				  fold_convert (unsigned_type, rhs),
3620 				  size_int (lbitpos), 0),
3621 		     mask, 0);
3622 
3623   return build2 (code, compare_type,
3624 		 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3625 		 rhs);
3626 }
3627 
3628 /* Subroutine for fold_truthop: decode a field reference.
3629 
3630    If EXP is a comparison reference, we return the innermost reference.
3631 
3632    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3633    set to the starting bit number.
3634 
3635    If the innermost field can be completely contained in a mode-sized
3636    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3637 
3638    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3639    otherwise it is not changed.
3640 
3641    *PUNSIGNEDP is set to the signedness of the field.
3642 
3643    *PMASK is set to the mask used.  This is either contained in a
3644    BIT_AND_EXPR or derived from the width of the field.
3645 
3646    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647 
3648    Return 0 if this is not a component reference or is one that we can't
3649    do anything with.  */
3650 
3651 static tree
decode_field_reference(tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,tree * pmask,tree * pand_mask)3652 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3653 			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3654 			int *punsignedp, int *pvolatilep,
3655 			tree *pmask, tree *pand_mask)
3656 {
3657   tree outer_type = 0;
3658   tree and_mask = 0;
3659   tree mask, inner, offset;
3660   tree unsigned_type;
3661   unsigned int precision;
3662 
3663   /* All the optimizations using this function assume integer fields.
3664      There are problems with FP fields since the type_for_size call
3665      below can fail for, e.g., XFmode.  */
3666   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3667     return 0;
3668 
3669   /* We are interested in the bare arrangement of bits, so strip everything
3670      that doesn't affect the machine mode.  However, record the type of the
3671      outermost expression if it may matter below.  */
3672   if (TREE_CODE (exp) == NOP_EXPR
3673       || TREE_CODE (exp) == CONVERT_EXPR
3674       || TREE_CODE (exp) == NON_LVALUE_EXPR)
3675     outer_type = TREE_TYPE (exp);
3676   STRIP_NOPS (exp);
3677 
3678   if (TREE_CODE (exp) == BIT_AND_EXPR)
3679     {
3680       and_mask = TREE_OPERAND (exp, 1);
3681       exp = TREE_OPERAND (exp, 0);
3682       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3683       if (TREE_CODE (and_mask) != INTEGER_CST)
3684 	return 0;
3685     }
3686 
3687   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3688 			       punsignedp, pvolatilep, false);
3689   if ((inner == exp && and_mask == 0)
3690       || *pbitsize < 0 || offset != 0
3691       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3692     return 0;
3693 
3694   /* If the number of bits in the reference is the same as the bitsize of
3695      the outer type, then the outer type gives the signedness. Otherwise
3696      (in case of a small bitfield) the signedness is unchanged.  */
3697   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3698     *punsignedp = TYPE_UNSIGNED (outer_type);
3699 
3700   /* Compute the mask to access the bitfield.  */
3701   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3702   precision = TYPE_PRECISION (unsigned_type);
3703 
3704   mask = build_int_cst (unsigned_type, -1);
3705   mask = force_fit_type (mask, 0, false, false);
3706 
3707   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3708   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709 
3710   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3711   if (and_mask != 0)
3712     mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3713 			fold_convert (unsigned_type, and_mask), mask);
3714 
3715   *pmask = mask;
3716   *pand_mask = and_mask;
3717   return inner;
3718 }
3719 
3720 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721    bit positions.  */
3722 
3723 static int
all_ones_mask_p(tree mask,int size)3724 all_ones_mask_p (tree mask, int size)
3725 {
3726   tree type = TREE_TYPE (mask);
3727   unsigned int precision = TYPE_PRECISION (type);
3728   tree tmask;
3729 
3730   tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3731   tmask = force_fit_type (tmask, 0, false, false);
3732 
3733   return
3734     tree_int_cst_equal (mask,
3735 			const_binop (RSHIFT_EXPR,
3736 				     const_binop (LSHIFT_EXPR, tmask,
3737 						  size_int (precision - size),
3738 						  0),
3739 				     size_int (precision - size), 0));
3740 }
3741 
3742 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3743    represents the sign bit of EXP's type.  If EXP represents a sign
3744    or zero extension, also test VAL against the unextended type.
3745    The return value is the (sub)expression whose sign bit is VAL,
3746    or NULL_TREE otherwise.  */
3747 
3748 static tree
sign_bit_p(tree exp,tree val)3749 sign_bit_p (tree exp, tree val)
3750 {
3751   unsigned HOST_WIDE_INT mask_lo, lo;
3752   HOST_WIDE_INT mask_hi, hi;
3753   int width;
3754   tree t;
3755 
3756   /* Tree EXP must have an integral type.  */
3757   t = TREE_TYPE (exp);
3758   if (! INTEGRAL_TYPE_P (t))
3759     return NULL_TREE;
3760 
3761   /* Tree VAL must be an integer constant.  */
3762   if (TREE_CODE (val) != INTEGER_CST
3763       || TREE_CONSTANT_OVERFLOW (val))
3764     return NULL_TREE;
3765 
3766   width = TYPE_PRECISION (t);
3767   if (width > HOST_BITS_PER_WIDE_INT)
3768     {
3769       hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3770       lo = 0;
3771 
3772       mask_hi = ((unsigned HOST_WIDE_INT) -1
3773 		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3774       mask_lo = -1;
3775     }
3776   else
3777     {
3778       hi = 0;
3779       lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780 
3781       mask_hi = 0;
3782       mask_lo = ((unsigned HOST_WIDE_INT) -1
3783 		 >> (HOST_BITS_PER_WIDE_INT - width));
3784     }
3785 
3786   /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3787      treat VAL as if it were unsigned.  */
3788   if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3789       && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3790     return exp;
3791 
3792   /* Handle extension from a narrower type.  */
3793   if (TREE_CODE (exp) == NOP_EXPR
3794       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3795     return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796 
3797   return NULL_TREE;
3798 }
3799 
3800 /* Subroutine for fold_truthop: determine if an operand is simple enough
3801    to be evaluated unconditionally.  */
3802 
3803 static int
simple_operand_p(tree exp)3804 simple_operand_p (tree exp)
3805 {
3806   /* Strip any conversions that don't change the machine mode.  */
3807   STRIP_NOPS (exp);
3808 
3809   return (CONSTANT_CLASS_P (exp)
3810 	  || TREE_CODE (exp) == SSA_NAME
3811 	  || (DECL_P (exp)
3812 	      && ! TREE_ADDRESSABLE (exp)
3813 	      && ! TREE_THIS_VOLATILE (exp)
3814 	      && ! DECL_NONLOCAL (exp)
3815 	      /* Don't regard global variables as simple.  They may be
3816 		 allocated in ways unknown to the compiler (shared memory,
3817 		 #pragma weak, etc).  */
3818 	      && ! TREE_PUBLIC (exp)
3819 	      && ! DECL_EXTERNAL (exp)
3820 	      /* Loading a static variable is unduly expensive, but global
3821 		 registers aren't expensive.  */
3822 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3823 }
3824 
3825 /* The following functions are subroutines to fold_range_test and allow it to
3826    try to change a logical combination of comparisons into a range test.
3827 
3828    For example, both
3829 	X == 2 || X == 3 || X == 4 || X == 5
3830    and
3831 	X >= 2 && X <= 5
3832    are converted to
3833 	(unsigned) (X - 2) <= 3
3834 
3835    We describe each set of comparisons as being either inside or outside
3836    a range, using a variable named like IN_P, and then describe the
3837    range with a lower and upper bound.  If one of the bounds is omitted,
3838    it represents either the highest or lowest value of the type.
3839 
3840    In the comments below, we represent a range by two numbers in brackets
3841    preceded by a "+" to designate being inside that range, or a "-" to
3842    designate being outside that range, so the condition can be inverted by
3843    flipping the prefix.  An omitted bound is represented by a "-".  For
3844    example, "- [-, 10]" means being outside the range starting at the lowest
3845    possible value and ending at 10, in other words, being greater than 10.
3846    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3847    always false.
3848 
3849    We set up things so that the missing bounds are handled in a consistent
3850    manner so neither a missing bound nor "true" and "false" need to be
3851    handled using a special case.  */
3852 
3853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3854    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3855    and UPPER1_P are nonzero if the respective argument is an upper bound
3856    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3857    must be specified for a comparison.  ARG1 will be converted to ARG0's
3858    type if both are specified.  */
3859 
3860 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)3861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3862 	     tree arg1, int upper1_p)
3863 {
3864   tree tem;
3865   int result;
3866   int sgn0, sgn1;
3867 
3868   /* If neither arg represents infinity, do the normal operation.
3869      Else, if not a comparison, return infinity.  Else handle the special
3870      comparison rules. Note that most of the cases below won't occur, but
3871      are handled for consistency.  */
3872 
3873   if (arg0 != 0 && arg1 != 0)
3874     {
3875       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3876 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3877       STRIP_NOPS (tem);
3878       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3879     }
3880 
3881   if (TREE_CODE_CLASS (code) != tcc_comparison)
3882     return 0;
3883 
3884   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3885      for neither.  In real maths, we cannot assume open ended ranges are
3886      the same. But, this is computer arithmetic, where numbers are finite.
3887      We can therefore make the transformation of any unbounded range with
3888      the value Z, Z being greater than any representable number. This permits
3889      us to treat unbounded ranges as equal.  */
3890   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3891   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3892   switch (code)
3893     {
3894     case EQ_EXPR:
3895       result = sgn0 == sgn1;
3896       break;
3897     case NE_EXPR:
3898       result = sgn0 != sgn1;
3899       break;
3900     case LT_EXPR:
3901       result = sgn0 < sgn1;
3902       break;
3903     case LE_EXPR:
3904       result = sgn0 <= sgn1;
3905       break;
3906     case GT_EXPR:
3907       result = sgn0 > sgn1;
3908       break;
3909     case GE_EXPR:
3910       result = sgn0 >= sgn1;
3911       break;
3912     default:
3913       gcc_unreachable ();
3914     }
3915 
3916   return constant_boolean_node (result, type);
3917 }
3918 
3919 /* Given EXP, a logical expression, set the range it is testing into
3920    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3921    actually being tested.  *PLOW and *PHIGH will be made of the same
3922    type as the returned expression.  If EXP is not a comparison, we
3923    will most likely not be returning a useful value and range.  Set
3924    *STRICT_OVERFLOW_P to true if the return value is only valid
3925    because signed overflow is undefined; otherwise, do not change
3926    *STRICT_OVERFLOW_P.  */
3927 
3928 static tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)3929 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3930 	    bool *strict_overflow_p)
3931 {
3932   enum tree_code code;
3933   tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3934   tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3935   int in_p, n_in_p;
3936   tree low, high, n_low, n_high;
3937 
3938   /* Start with simply saying "EXP != 0" and then look at the code of EXP
3939      and see if we can refine the range.  Some of the cases below may not
3940      happen, but it doesn't seem worth worrying about this.  We "continue"
3941      the outer loop when we've changed something; otherwise we "break"
3942      the switch, which will "break" the while.  */
3943 
3944   in_p = 0;
3945   low = high = build_int_cst (TREE_TYPE (exp), 0);
3946 
3947   while (1)
3948     {
3949       code = TREE_CODE (exp);
3950       exp_type = TREE_TYPE (exp);
3951 
3952       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953 	{
3954 	  if (TREE_CODE_LENGTH (code) > 0)
3955 	    arg0 = TREE_OPERAND (exp, 0);
3956 	  if (TREE_CODE_CLASS (code) == tcc_comparison
3957 	      || TREE_CODE_CLASS (code) == tcc_unary
3958 	      || TREE_CODE_CLASS (code) == tcc_binary)
3959 	    arg0_type = TREE_TYPE (arg0);
3960 	  if (TREE_CODE_CLASS (code) == tcc_binary
3961 	      || TREE_CODE_CLASS (code) == tcc_comparison
3962 	      || (TREE_CODE_CLASS (code) == tcc_expression
3963 		  && TREE_CODE_LENGTH (code) > 1))
3964 	    arg1 = TREE_OPERAND (exp, 1);
3965 	}
3966 
3967       switch (code)
3968 	{
3969 	case TRUTH_NOT_EXPR:
3970 	  in_p = ! in_p, exp = arg0;
3971 	  continue;
3972 
3973 	case EQ_EXPR: case NE_EXPR:
3974 	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975 	  /* We can only do something if the range is testing for zero
3976 	     and if the second operand is an integer constant.  Note that
3977 	     saying something is "in" the range we make is done by
3978 	     complementing IN_P since it will set in the initial case of
3979 	     being not equal to zero; "out" is leaving it alone.  */
3980 	  if (low == 0 || high == 0
3981 	      || ! integer_zerop (low) || ! integer_zerop (high)
3982 	      || TREE_CODE (arg1) != INTEGER_CST)
3983 	    break;
3984 
3985 	  switch (code)
3986 	    {
3987 	    case NE_EXPR:  /* - [c, c]  */
3988 	      low = high = arg1;
3989 	      break;
3990 	    case EQ_EXPR:  /* + [c, c]  */
3991 	      in_p = ! in_p, low = high = arg1;
3992 	      break;
3993 	    case GT_EXPR:  /* - [-, c] */
3994 	      low = 0, high = arg1;
3995 	      break;
3996 	    case GE_EXPR:  /* + [c, -] */
3997 	      in_p = ! in_p, low = arg1, high = 0;
3998 	      break;
3999 	    case LT_EXPR:  /* - [c, -] */
4000 	      low = arg1, high = 0;
4001 	      break;
4002 	    case LE_EXPR:  /* + [-, c] */
4003 	      in_p = ! in_p, low = 0, high = arg1;
4004 	      break;
4005 	    default:
4006 	      gcc_unreachable ();
4007 	    }
4008 
4009 	  /* If this is an unsigned comparison, we also know that EXP is
4010 	     greater than or equal to zero.  We base the range tests we make
4011 	     on that fact, so we record it here so we can parse existing
4012 	     range tests.  We test arg0_type since often the return type
4013 	     of, e.g. EQ_EXPR, is boolean.  */
4014 	  if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015 	    {
4016 	      if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017 				  in_p, low, high, 1,
4018 				  build_int_cst (arg0_type, 0),
4019 				  NULL_TREE))
4020 		break;
4021 
4022 	      in_p = n_in_p, low = n_low, high = n_high;
4023 
4024 	      /* If the high bound is missing, but we have a nonzero low
4025 		 bound, reverse the range so it goes from zero to the low bound
4026 		 minus 1.  */
4027 	      if (high == 0 && low && ! integer_zerop (low))
4028 		{
4029 		  in_p = ! in_p;
4030 		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031 				      integer_one_node, 0);
4032 		  low = build_int_cst (arg0_type, 0);
4033 		}
4034 	    }
4035 
4036 	  exp = arg0;
4037 	  continue;
4038 
4039 	case NEGATE_EXPR:
4040 	  /* (-x) IN [a,b] -> x in [-b, -a]  */
4041 	  n_low = range_binop (MINUS_EXPR, exp_type,
4042 			       build_int_cst (exp_type, 0),
4043 			       0, high, 1);
4044 	  n_high = range_binop (MINUS_EXPR, exp_type,
4045 				build_int_cst (exp_type, 0),
4046 				0, low, 0);
4047 	  low = n_low, high = n_high;
4048 	  exp = arg0;
4049 	  continue;
4050 
4051 	case BIT_NOT_EXPR:
4052 	  /* ~ X -> -X - 1  */
4053 	  exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4054 			build_int_cst (exp_type, 1));
4055 	  continue;
4056 
4057 	case PLUS_EXPR:  case MINUS_EXPR:
4058 	  if (TREE_CODE (arg1) != INTEGER_CST)
4059 	    break;
4060 
4061 	  /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4062 	     move a constant to the other side.  */
4063 	  if (!TYPE_UNSIGNED (arg0_type)
4064 	      && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065 	    break;
4066 
4067 	  /* If EXP is signed, any overflow in the computation is undefined,
4068 	     so we don't worry about it so long as our computations on
4069 	     the bounds don't overflow.  For unsigned, overflow is defined
4070 	     and this is exactly the right thing.  */
4071 	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072 			       arg0_type, low, 0, arg1, 0);
4073 	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074 				arg0_type, high, 1, arg1, 0);
4075 	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
4076 	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
4077 	    break;
4078 
4079 	  if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4080 	    *strict_overflow_p = true;
4081 
4082 	  /* Check for an unsigned range which has wrapped around the maximum
4083 	     value thus making n_high < n_low, and normalize it.  */
4084 	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085 	    {
4086 	      low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4087 				 integer_one_node, 0);
4088 	      high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4089 				  integer_one_node, 0);
4090 
4091 	      /* If the range is of the form +/- [ x+1, x ], we won't
4092 		 be able to normalize it.  But then, it represents the
4093 		 whole range or the empty set, so make it
4094 		 +/- [ -, - ].  */
4095 	      if (tree_int_cst_equal (n_low, low)
4096 		  && tree_int_cst_equal (n_high, high))
4097 		low = high = 0;
4098 	      else
4099 		in_p = ! in_p;
4100 	    }
4101 	  else
4102 	    low = n_low, high = n_high;
4103 
4104 	  exp = arg0;
4105 	  continue;
4106 
4107 	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
4108 	  if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4109 	    break;
4110 
4111 	  if (! INTEGRAL_TYPE_P (arg0_type)
4112 	      || (low != 0 && ! int_fits_type_p (low, arg0_type))
4113 	      || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4114 	    break;
4115 
4116 	  n_low = low, n_high = high;
4117 
4118 	  if (n_low != 0)
4119 	    n_low = fold_convert (arg0_type, n_low);
4120 
4121 	  if (n_high != 0)
4122 	    n_high = fold_convert (arg0_type, n_high);
4123 
4124 
4125 	  /* If we're converting arg0 from an unsigned type, to exp,
4126 	     a signed type,  we will be doing the comparison as unsigned.
4127 	     The tests above have already verified that LOW and HIGH
4128 	     are both positive.
4129 
4130 	     So we have to ensure that we will handle large unsigned
4131 	     values the same way that the current signed bounds treat
4132 	     negative values.  */
4133 
4134 	  if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135 	    {
4136 	      tree high_positive;
4137 	      tree equiv_type = lang_hooks.types.type_for_mode
4138 		(TYPE_MODE (arg0_type), 1);
4139 
4140 	      /* A range without an upper bound is, naturally, unbounded.
4141 		 Since convert would have cropped a very large value, use
4142 		 the max value for the destination type.  */
4143 	      high_positive
4144 		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4145 		: TYPE_MAX_VALUE (arg0_type);
4146 
4147 	      if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4148 		high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4149 					     fold_convert (arg0_type,
4150 							   high_positive),
4151 					     fold_convert (arg0_type,
4152 							   integer_one_node));
4153 
4154 	      /* If the low bound is specified, "and" the range with the
4155 		 range for which the original unsigned value will be
4156 		 positive.  */
4157 	      if (low != 0)
4158 		{
4159 		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4160 				      1, n_low, n_high, 1,
4161 				      fold_convert (arg0_type,
4162 						    integer_zero_node),
4163 				      high_positive))
4164 		    break;
4165 
4166 		  in_p = (n_in_p == in_p);
4167 		}
4168 	      else
4169 		{
4170 		  /* Otherwise, "or" the range with the range of the input
4171 		     that will be interpreted as negative.  */
4172 		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4173 				      0, n_low, n_high, 1,
4174 				      fold_convert (arg0_type,
4175 						    integer_zero_node),
4176 				      high_positive))
4177 		    break;
4178 
4179 		  in_p = (in_p != n_in_p);
4180 		}
4181 	    }
4182 
4183 	  exp = arg0;
4184 	  low = n_low, high = n_high;
4185 	  continue;
4186 
4187 	default:
4188 	  break;
4189 	}
4190 
4191       break;
4192     }
4193 
4194   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4195   if (TREE_CODE (exp) == INTEGER_CST)
4196     {
4197       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4198 						 exp, 0, low, 0))
4199 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4200 						    exp, 1, high, 1)));
4201       low = high = 0;
4202       exp = 0;
4203     }
4204 
4205   *pin_p = in_p, *plow = low, *phigh = high;
4206   return exp;
4207 }
4208 
4209 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4210    type, TYPE, return an expression to test if EXP is in (or out of, depending
4211    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4212 
4213 static tree
build_range_check(tree type,tree exp,int in_p,tree low,tree high)4214 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215 {
4216   tree etype = TREE_TYPE (exp);
4217   tree value;
4218 
4219 #ifdef HAVE_canonicalize_funcptr_for_compare
4220   /* Disable this optimization for function pointer expressions
4221      on targets that require function pointer canonicalization.  */
4222   if (HAVE_canonicalize_funcptr_for_compare
4223       && TREE_CODE (etype) == POINTER_TYPE
4224       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4225     return NULL_TREE;
4226 #endif
4227 
4228   if (! in_p)
4229     {
4230       value = build_range_check (type, exp, 1, low, high);
4231       if (value != 0)
4232         return invert_truthvalue (value);
4233 
4234       return 0;
4235     }
4236 
4237   if (low == 0 && high == 0)
4238     return build_int_cst (type, 1);
4239 
4240   if (low == 0)
4241     return fold_build2 (LE_EXPR, type, exp,
4242 			fold_convert (etype, high));
4243 
4244   if (high == 0)
4245     return fold_build2 (GE_EXPR, type, exp,
4246 			fold_convert (etype, low));
4247 
4248   if (operand_equal_p (low, high, 0))
4249     return fold_build2 (EQ_EXPR, type, exp,
4250 			fold_convert (etype, low));
4251 
4252   if (integer_zerop (low))
4253     {
4254       if (! TYPE_UNSIGNED (etype))
4255 	{
4256 	  etype = lang_hooks.types.unsigned_type (etype);
4257 	  high = fold_convert (etype, high);
4258 	  exp = fold_convert (etype, exp);
4259 	}
4260       return build_range_check (type, exp, 1, 0, high);
4261     }
4262 
4263   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4264   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265     {
4266       unsigned HOST_WIDE_INT lo;
4267       HOST_WIDE_INT hi;
4268       int prec;
4269 
4270       prec = TYPE_PRECISION (etype);
4271       if (prec <= HOST_BITS_PER_WIDE_INT)
4272 	{
4273 	  hi = 0;
4274 	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275 	}
4276       else
4277 	{
4278 	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4279 	  lo = (unsigned HOST_WIDE_INT) -1;
4280 	}
4281 
4282       if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283 	{
4284 	  if (TYPE_UNSIGNED (etype))
4285 	    {
4286 	      etype = lang_hooks.types.signed_type (etype);
4287 	      exp = fold_convert (etype, exp);
4288 	    }
4289 	  return fold_build2 (GT_EXPR, type, exp,
4290 			      build_int_cst (etype, 0));
4291 	}
4292     }
4293 
4294   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295      This requires wrap-around arithmetics for the type of the expression.  */
4296   switch (TREE_CODE (etype))
4297     {
4298     case INTEGER_TYPE:
4299       /* There is no requirement that LOW be within the range of ETYPE
4300 	 if the latter is a subtype.  It must, however, be within the base
4301 	 type of ETYPE.  So be sure we do the subtraction in that type.  */
4302       if (TREE_TYPE (etype))
4303 	etype = TREE_TYPE (etype);
4304       break;
4305 
4306     case ENUMERAL_TYPE:
4307     case BOOLEAN_TYPE:
4308       etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4309 					      TYPE_UNSIGNED (etype));
4310       break;
4311 
4312     default:
4313       break;
4314     }
4315 
4316   /* If we don't have wrap-around arithmetics upfront, try to force it.  */
4317   if (TREE_CODE (etype) == INTEGER_TYPE
4318       && !TYPE_OVERFLOW_WRAPS (etype))
4319     {
4320       tree utype, minv, maxv;
4321 
4322       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4323 	 for the type in question, as we rely on this here.  */
4324       utype = lang_hooks.types.unsigned_type (etype);
4325       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4326       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4327 			  integer_one_node, 1);
4328       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329 
4330       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4331 				      minv, 1, maxv, 1)))
4332 	etype = utype;
4333       else
4334 	return 0;
4335     }
4336 
4337   high = fold_convert (etype, high);
4338   low = fold_convert (etype, low);
4339   exp = fold_convert (etype, exp);
4340 
4341   value = const_binop (MINUS_EXPR, high, low, 0);
4342 
4343   if (value != 0 && !TREE_OVERFLOW (value))
4344     return build_range_check (type,
4345 			      fold_build2 (MINUS_EXPR, etype, exp, low),
4346 			      1, build_int_cst (etype, 0), value);
4347 
4348   return 0;
4349 }
4350 
4351 /* Return the predecessor of VAL in its type, handling the infinite case.  */
4352 
4353 static tree
range_predecessor(tree val)4354 range_predecessor (tree val)
4355 {
4356   tree type = TREE_TYPE (val);
4357 
4358   if (INTEGRAL_TYPE_P (type)
4359       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4360     return 0;
4361   else
4362     return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4363 }
4364 
4365 /* Return the successor of VAL in its type, handling the infinite case.  */
4366 
4367 static tree
range_successor(tree val)4368 range_successor (tree val)
4369 {
4370   tree type = TREE_TYPE (val);
4371 
4372   if (INTEGRAL_TYPE_P (type)
4373       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374     return 0;
4375   else
4376     return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4377 }
4378 
4379 /* Given two ranges, see if we can merge them into one.  Return 1 if we
4380    can, 0 if we can't.  Set the output range into the specified parameters.  */
4381 
4382 static int
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)4383 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384 	      tree high0, int in1_p, tree low1, tree high1)
4385 {
4386   int no_overlap;
4387   int subset;
4388   int temp;
4389   tree tem;
4390   int in_p;
4391   tree low, high;
4392   int lowequal = ((low0 == 0 && low1 == 0)
4393 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394 						low0, 0, low1, 0)));
4395   int highequal = ((high0 == 0 && high1 == 0)
4396 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397 						 high0, 1, high1, 1)));
4398 
4399   /* Make range 0 be the range that starts first, or ends last if they
4400      start at the same value.  Swap them if it isn't.  */
4401   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402 				 low0, 0, low1, 0))
4403       || (lowequal
4404 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405 					high1, 1, high0, 1))))
4406     {
4407       temp = in0_p, in0_p = in1_p, in1_p = temp;
4408       tem = low0, low0 = low1, low1 = tem;
4409       tem = high0, high0 = high1, high1 = tem;
4410     }
4411 
4412   /* Now flag two cases, whether the ranges are disjoint or whether the
4413      second range is totally subsumed in the first.  Note that the tests
4414      below are simplified by the ones above.  */
4415   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416 					  high0, 1, low1, 0));
4417   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418 				      high1, 1, high0, 1));
4419 
4420   /* We now have four cases, depending on whether we are including or
4421      excluding the two ranges.  */
4422   if (in0_p && in1_p)
4423     {
4424       /* If they don't overlap, the result is false.  If the second range
4425 	 is a subset it is the result.  Otherwise, the range is from the start
4426 	 of the second to the end of the first.  */
4427       if (no_overlap)
4428 	in_p = 0, low = high = 0;
4429       else if (subset)
4430 	in_p = 1, low = low1, high = high1;
4431       else
4432 	in_p = 1, low = low1, high = high0;
4433     }
4434 
4435   else if (in0_p && ! in1_p)
4436     {
4437       /* If they don't overlap, the result is the first range.  If they are
4438 	 equal, the result is false.  If the second range is a subset of the
4439 	 first, and the ranges begin at the same place, we go from just after
4440 	 the end of the second range to the end of the first.  If the second
4441 	 range is not a subset of the first, or if it is a subset and both
4442 	 ranges end at the same place, the range starts at the start of the
4443 	 first range and ends just before the second range.
4444 	 Otherwise, we can't describe this as a single range.  */
4445       if (no_overlap)
4446 	in_p = 1, low = low0, high = high0;
4447       else if (lowequal && highequal)
4448 	in_p = 0, low = high = 0;
4449       else if (subset && lowequal)
4450 	{
4451 	  low = range_successor (high1);
4452 	  high = high0;
4453 	  in_p = 1;
4454 	  if (low == 0)
4455 	    {
4456 	      /* We are in the weird situation where high0 > high1 but
4457 		 high1 has no successor.  Punt.  */
4458 	      return 0;
4459 	    }
4460 	}
4461       else if (! subset || highequal)
4462 	{
4463 	  low = low0;
4464 	  high = range_predecessor (low1);
4465 	  in_p = 1;
4466 	  if (high == 0)
4467 	    {
4468 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4469 	      return 0;
4470 	    }
4471 	}
4472       else
4473 	return 0;
4474     }
4475 
4476   else if (! in0_p && in1_p)
4477     {
4478       /* If they don't overlap, the result is the second range.  If the second
4479 	 is a subset of the first, the result is false.  Otherwise,
4480 	 the range starts just after the first range and ends at the
4481 	 end of the second.  */
4482       if (no_overlap)
4483 	in_p = 1, low = low1, high = high1;
4484       else if (subset || highequal)
4485 	in_p = 0, low = high = 0;
4486       else
4487 	{
4488 	  low = range_successor (high0);
4489 	  high = high1;
4490 	  in_p = 1;
4491 	  if (low == 0)
4492 	    {
4493 	      /* high1 > high0 but high0 has no successor.  Punt.  */
4494 	      return 0;
4495 	    }
4496 	}
4497     }
4498 
4499   else
4500     {
4501       /* The case where we are excluding both ranges.  Here the complex case
4502 	 is if they don't overlap.  In that case, the only time we have a
4503 	 range is if they are adjacent.  If the second is a subset of the
4504 	 first, the result is the first.  Otherwise, the range to exclude
4505 	 starts at the beginning of the first range and ends at the end of the
4506 	 second.  */
4507       if (no_overlap)
4508 	{
4509 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510 					 range_successor (high0),
4511 					 1, low1, 0)))
4512 	    in_p = 0, low = low0, high = high1;
4513 	  else
4514 	    {
4515 	      /* Canonicalize - [min, x] into - [-, x].  */
4516 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4517 		switch (TREE_CODE (TREE_TYPE (low0)))
4518 		  {
4519 		  case ENUMERAL_TYPE:
4520 		    if (TYPE_PRECISION (TREE_TYPE (low0))
4521 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4522 		      break;
4523 		    /* FALLTHROUGH */
4524 		  case INTEGER_TYPE:
4525 		    if (tree_int_cst_equal (low0,
4526 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4527 		      low0 = 0;
4528 		    break;
4529 		  case POINTER_TYPE:
4530 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4531 			&& integer_zerop (low0))
4532 		      low0 = 0;
4533 		    break;
4534 		  default:
4535 		    break;
4536 		  }
4537 
4538 	      /* Canonicalize - [x, max] into - [x, -].  */
4539 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4540 		switch (TREE_CODE (TREE_TYPE (high1)))
4541 		  {
4542 		  case ENUMERAL_TYPE:
4543 		    if (TYPE_PRECISION (TREE_TYPE (high1))
4544 			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4545 		      break;
4546 		    /* FALLTHROUGH */
4547 		  case INTEGER_TYPE:
4548 		    if (tree_int_cst_equal (high1,
4549 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4550 		      high1 = 0;
4551 		    break;
4552 		  case POINTER_TYPE:
4553 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4554 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555 						       high1, 1,
4556 						       integer_one_node, 1)))
4557 		      high1 = 0;
4558 		    break;
4559 		  default:
4560 		    break;
4561 		  }
4562 
4563 	      /* The ranges might be also adjacent between the maximum and
4564 	         minimum values of the given type.  For
4565 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566 	         return + [x + 1, y - 1].  */
4567 	      if (low0 == 0 && high1 == 0)
4568 	        {
4569 		  low = range_successor (high0);
4570 		  high = range_predecessor (low1);
4571 		  if (low == 0 || high == 0)
4572 		    return 0;
4573 
4574 		  in_p = 1;
4575 		}
4576 	      else
4577 		return 0;
4578 	    }
4579 	}
4580       else if (subset)
4581 	in_p = 0, low = low0, high = high0;
4582       else
4583 	in_p = 0, low = low0, high = high1;
4584     }
4585 
4586   *pin_p = in_p, *plow = low, *phigh = high;
4587   return 1;
4588 }
4589 
4590 
4591 /* Subroutine of fold, looking inside expressions of the form
4592    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593    of the COND_EXPR.  This function is being used also to optimize
4594    A op B ? C : A, by reversing the comparison first.
4595 
4596    Return a folded expression whose code is not a COND_EXPR
4597    anymore, or NULL_TREE if no folding opportunity is found.  */
4598 
4599 static tree
fold_cond_expr_with_comparison(tree type,tree arg0,tree arg1,tree arg2)4600 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601 {
4602   enum tree_code comp_code = TREE_CODE (arg0);
4603   tree arg00 = TREE_OPERAND (arg0, 0);
4604   tree arg01 = TREE_OPERAND (arg0, 1);
4605   tree arg1_type = TREE_TYPE (arg1);
4606   tree tem;
4607 
4608   STRIP_NOPS (arg1);
4609   STRIP_NOPS (arg2);
4610 
4611   /* If we have A op 0 ? A : -A, consider applying the following
4612      transformations:
4613 
4614      A == 0? A : -A    same as -A
4615      A != 0? A : -A    same as A
4616      A >= 0? A : -A    same as abs (A)
4617      A > 0?  A : -A    same as abs (A)
4618      A <= 0? A : -A    same as -abs (A)
4619      A < 0?  A : -A    same as -abs (A)
4620 
4621      None of these transformations work for modes with signed
4622      zeros.  If A is +/-0, the first two transformations will
4623      change the sign of the result (from +0 to -0, or vice
4624      versa).  The last four will fix the sign of the result,
4625      even though the original expressions could be positive or
4626      negative, depending on the sign of A.
4627 
4628      Note that all these transformations are correct if A is
4629      NaN, since the two alternatives (A and -A) are also NaNs.  */
4630   if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4631        ? real_zerop (arg01)
4632        : integer_zerop (arg01))
4633       && ((TREE_CODE (arg2) == NEGATE_EXPR
4634 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4635 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4636 	        have already been folded to Y-X, check for that. */
4637 	  || (TREE_CODE (arg1) == MINUS_EXPR
4638 	      && TREE_CODE (arg2) == MINUS_EXPR
4639 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4640 				  TREE_OPERAND (arg2, 1), 0)
4641 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4642 				  TREE_OPERAND (arg2, 0), 0))))
4643     switch (comp_code)
4644       {
4645       case EQ_EXPR:
4646       case UNEQ_EXPR:
4647 	tem = fold_convert (arg1_type, arg1);
4648 	return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4649       case NE_EXPR:
4650       case LTGT_EXPR:
4651 	return pedantic_non_lvalue (fold_convert (type, arg1));
4652       case UNGE_EXPR:
4653       case UNGT_EXPR:
4654 	if (flag_trapping_math)
4655 	  break;
4656 	/* Fall through.  */
4657       case GE_EXPR:
4658       case GT_EXPR:
4659 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660 	  arg1 = fold_convert (lang_hooks.types.signed_type
4661 			       (TREE_TYPE (arg1)), arg1);
4662 	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4663 	return pedantic_non_lvalue (fold_convert (type, tem));
4664       case UNLE_EXPR:
4665       case UNLT_EXPR:
4666 	if (flag_trapping_math)
4667 	  break;
4668       case LE_EXPR:
4669       case LT_EXPR:
4670 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671 	  arg1 = fold_convert (lang_hooks.types.signed_type
4672 			       (TREE_TYPE (arg1)), arg1);
4673 	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4674 	return negate_expr (fold_convert (type, tem));
4675       default:
4676 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677 	break;
4678       }
4679 
4680   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4681      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4682      both transformations are correct when A is NaN: A != 0
4683      is then true, and A == 0 is false.  */
4684 
4685   if (integer_zerop (arg01) && integer_zerop (arg2))
4686     {
4687       if (comp_code == NE_EXPR)
4688 	return pedantic_non_lvalue (fold_convert (type, arg1));
4689       else if (comp_code == EQ_EXPR)
4690 	return build_int_cst (type, 0);
4691     }
4692 
4693   /* Try some transformations of A op B ? A : B.
4694 
4695      A == B? A : B    same as B
4696      A != B? A : B    same as A
4697      A >= B? A : B    same as max (A, B)
4698      A > B?  A : B    same as max (B, A)
4699      A <= B? A : B    same as min (A, B)
4700      A < B?  A : B    same as min (B, A)
4701 
4702      As above, these transformations don't work in the presence
4703      of signed zeros.  For example, if A and B are zeros of
4704      opposite sign, the first two transformations will change
4705      the sign of the result.  In the last four, the original
4706      expressions give different results for (A=+0, B=-0) and
4707      (A=-0, B=+0), but the transformed expressions do not.
4708 
4709      The first two transformations are correct if either A or B
4710      is a NaN.  In the first transformation, the condition will
4711      be false, and B will indeed be chosen.  In the case of the
4712      second transformation, the condition A != B will be true,
4713      and A will be chosen.
4714 
4715      The conversions to max() and min() are not correct if B is
4716      a number and A is not.  The conditions in the original
4717      expressions will be false, so all four give B.  The min()
4718      and max() versions would give a NaN instead.  */
4719   if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4720       /* Avoid these transformations if the COND_EXPR may be used
4721 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4722       && (in_gimple_form
4723 	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4724 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4725 	  || ! maybe_lvalue_p (arg1)
4726 	  || ! maybe_lvalue_p (arg2)))
4727     {
4728       tree comp_op0 = arg00;
4729       tree comp_op1 = arg01;
4730       tree comp_type = TREE_TYPE (comp_op0);
4731 
4732       /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4733       if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734 	{
4735 	  comp_type = type;
4736 	  comp_op0 = arg1;
4737 	  comp_op1 = arg2;
4738 	}
4739 
4740       switch (comp_code)
4741 	{
4742 	case EQ_EXPR:
4743 	  return pedantic_non_lvalue (fold_convert (type, arg2));
4744 	case NE_EXPR:
4745 	  return pedantic_non_lvalue (fold_convert (type, arg1));
4746 	case LE_EXPR:
4747 	case LT_EXPR:
4748 	case UNLE_EXPR:
4749 	case UNLT_EXPR:
4750 	  /* In C++ a ?: expression can be an lvalue, so put the
4751 	     operand which will be used if they are equal first
4752 	     so that we can convert this back to the
4753 	     corresponding COND_EXPR.  */
4754 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755 	    {
4756 	      comp_op0 = fold_convert (comp_type, comp_op0);
4757 	      comp_op1 = fold_convert (comp_type, comp_op1);
4758 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4759 		    ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4760 		    : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4761 	      return pedantic_non_lvalue (fold_convert (type, tem));
4762 	    }
4763 	  break;
4764 	case GE_EXPR:
4765 	case GT_EXPR:
4766 	case UNGE_EXPR:
4767 	case UNGT_EXPR:
4768 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769 	    {
4770 	      comp_op0 = fold_convert (comp_type, comp_op0);
4771 	      comp_op1 = fold_convert (comp_type, comp_op1);
4772 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4773 		    ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4774 		    : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4775 	      return pedantic_non_lvalue (fold_convert (type, tem));
4776 	    }
4777 	  break;
4778 	case UNEQ_EXPR:
4779 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4780 	    return pedantic_non_lvalue (fold_convert (type, arg2));
4781 	  break;
4782 	case LTGT_EXPR:
4783 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784 	    return pedantic_non_lvalue (fold_convert (type, arg1));
4785 	  break;
4786 	default:
4787 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4788 	  break;
4789 	}
4790     }
4791 
4792   /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4793      we might still be able to simplify this.  For example,
4794      if C1 is one less or one more than C2, this might have started
4795      out as a MIN or MAX and been transformed by this function.
4796      Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4797 
4798   if (INTEGRAL_TYPE_P (type)
4799       && TREE_CODE (arg01) == INTEGER_CST
4800       && TREE_CODE (arg2) == INTEGER_CST)
4801     switch (comp_code)
4802       {
4803       case EQ_EXPR:
4804 	/* We can replace A with C1 in this case.  */
4805 	arg1 = fold_convert (type, arg01);
4806 	return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807 
4808       case LT_EXPR:
4809 	/* If C1 is C2 + 1, this is min(A, C2).  */
4810 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4811 			       OEP_ONLY_CONST)
4812 	    && operand_equal_p (arg01,
4813 				const_binop (PLUS_EXPR, arg2,
4814 					     integer_one_node, 0),
4815 				OEP_ONLY_CONST))
4816 	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4817 						   type, arg1, arg2));
4818 	break;
4819 
4820       case LE_EXPR:
4821 	/* If C1 is C2 - 1, this is min(A, C2).  */
4822 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823 			       OEP_ONLY_CONST)
4824 	    && operand_equal_p (arg01,
4825 				const_binop (MINUS_EXPR, arg2,
4826 					     integer_one_node, 0),
4827 				OEP_ONLY_CONST))
4828 	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4829 						   type, arg1, arg2));
4830 	break;
4831 
4832       case GT_EXPR:
4833 	/* If C1 is C2 - 1, this is max(A, C2).  */
4834 	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4835 			       OEP_ONLY_CONST)
4836 	    && operand_equal_p (arg01,
4837 				const_binop (MINUS_EXPR, arg2,
4838 					     integer_one_node, 0),
4839 				OEP_ONLY_CONST))
4840 	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4841 						   type, arg1, arg2));
4842 	break;
4843 
4844       case GE_EXPR:
4845 	/* If C1 is C2 + 1, this is max(A, C2).  */
4846 	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4847 			       OEP_ONLY_CONST)
4848 	    && operand_equal_p (arg01,
4849 				const_binop (PLUS_EXPR, arg2,
4850 					     integer_one_node, 0),
4851 				OEP_ONLY_CONST))
4852 	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4853 						   type, arg1, arg2));
4854 	break;
4855       case NE_EXPR:
4856 	break;
4857       default:
4858 	gcc_unreachable ();
4859       }
4860 
4861   return NULL_TREE;
4862 }
4863 
4864 
4865 
4866 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4867 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4868 #endif
4869 
4870 /* EXP is some logical combination of boolean tests.  See if we can
4871    merge it into some range test.  Return the new tree if so.  */
4872 
4873 static tree
fold_range_test(enum tree_code code,tree type,tree op0,tree op1)4874 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875 {
4876   int or_op = (code == TRUTH_ORIF_EXPR
4877 	       || code == TRUTH_OR_EXPR);
4878   int in0_p, in1_p, in_p;
4879   tree low0, low1, low, high0, high1, high;
4880   bool strict_overflow_p = false;
4881   tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4882   tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4883   tree tem;
4884   const char * const warnmsg = G_("assuming signed overflow does not occur "
4885 				  "when simplifying range test");
4886 
4887   /* If this is an OR operation, invert both sides; we will invert
4888      again at the end.  */
4889   if (or_op)
4890     in0_p = ! in0_p, in1_p = ! in1_p;
4891 
4892   /* If both expressions are the same, if we can merge the ranges, and we
4893      can build the range test, return it or it inverted.  If one of the
4894      ranges is always true or always false, consider it to be the same
4895      expression as the other.  */
4896   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4897       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4898 		       in1_p, low1, high1)
4899       && 0 != (tem = (build_range_check (type,
4900 					 lhs != 0 ? lhs
4901 					 : rhs != 0 ? rhs : integer_zero_node,
4902 					 in_p, low, high))))
4903     {
4904       if (strict_overflow_p)
4905 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4906       return or_op ? invert_truthvalue (tem) : tem;
4907     }
4908 
4909   /* On machines where the branch cost is expensive, if this is a
4910      short-circuited branch and the underlying object on both sides
4911      is the same, make a non-short-circuit operation.  */
4912   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4913 	   && lhs != 0 && rhs != 0
4914 	   && (code == TRUTH_ANDIF_EXPR
4915 	       || code == TRUTH_ORIF_EXPR)
4916 	   && operand_equal_p (lhs, rhs, 0))
4917     {
4918       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4919 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4920 	 which cases we can't do this.  */
4921       if (simple_operand_p (lhs))
4922 	return build2 (code == TRUTH_ANDIF_EXPR
4923 		       ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924 		       type, op0, op1);
4925 
4926       else if (lang_hooks.decls.global_bindings_p () == 0
4927 	       && ! CONTAINS_PLACEHOLDER_P (lhs))
4928 	{
4929 	  tree common = save_expr (lhs);
4930 
4931 	  if (0 != (lhs = build_range_check (type, common,
4932 					     or_op ? ! in0_p : in0_p,
4933 					     low0, high0))
4934 	      && (0 != (rhs = build_range_check (type, common,
4935 						 or_op ? ! in1_p : in1_p,
4936 						 low1, high1))))
4937 	    {
4938 	      if (strict_overflow_p)
4939 		fold_overflow_warning (warnmsg,
4940 				       WARN_STRICT_OVERFLOW_COMPARISON);
4941 	      return build2 (code == TRUTH_ANDIF_EXPR
4942 			     ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4943 			     type, lhs, rhs);
4944 	    }
4945 	}
4946     }
4947 
4948   return 0;
4949 }
4950 
4951 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4952    bit value.  Arrange things so the extra bits will be set to zero if and
4953    only if C is signed-extended to its full width.  If MASK is nonzero,
4954    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4955 
4956 static tree
unextend(tree c,int p,int unsignedp,tree mask)4957 unextend (tree c, int p, int unsignedp, tree mask)
4958 {
4959   tree type = TREE_TYPE (c);
4960   int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4961   tree temp;
4962 
4963   if (p == modesize || unsignedp)
4964     return c;
4965 
4966   /* We work by getting just the sign bit into the low-order bit, then
4967      into the high-order bit, then sign-extend.  We then XOR that value
4968      with C.  */
4969   temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4970   temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971 
4972   /* We must use a signed type in order to get an arithmetic right shift.
4973      However, we must also avoid introducing accidental overflows, so that
4974      a subsequent call to integer_zerop will work.  Hence we must
4975      do the type conversion here.  At this point, the constant is either
4976      zero or one, and the conversion to a signed type can never overflow.
4977      We could get an overflow if this conversion is done anywhere else.  */
4978   if (TYPE_UNSIGNED (type))
4979     temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980 
4981   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4982   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4983   if (mask != 0)
4984     temp = const_binop (BIT_AND_EXPR, temp,
4985 			fold_convert (TREE_TYPE (c), mask), 0);
4986   /* If necessary, convert the type back to match the type of C.  */
4987   if (TYPE_UNSIGNED (type))
4988     temp = fold_convert (type, temp);
4989 
4990   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4991 }
4992 
4993 /* Find ways of folding logical expressions of LHS and RHS:
4994    Try to merge two comparisons to the same innermost item.
4995    Look for range tests like "ch >= '0' && ch <= '9'".
4996    Look for combinations of simple terms on machines with expensive branches
4997    and evaluate the RHS unconditionally.
4998 
4999    For example, if we have p->a == 2 && p->b == 4 and we can make an
5000    object large enough to span both A and B, we can do this with a comparison
5001    against the object ANDed with the a mask.
5002 
5003    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5004    operations to do this with one comparison.
5005 
5006    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5007    function and the one above.
5008 
5009    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5010    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011 
5012    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5013    two operands.
5014 
5015    We return the simplified tree or 0 if no optimization is possible.  */
5016 
5017 static tree
fold_truthop(enum tree_code code,tree truth_type,tree lhs,tree rhs)5018 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019 {
5020   /* If this is the "or" of two comparisons, we can do something if
5021      the comparisons are NE_EXPR.  If this is the "and", we can do something
5022      if the comparisons are EQ_EXPR.  I.e.,
5023 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5024 
5025      WANTED_CODE is this operation code.  For single bit fields, we can
5026      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5027      comparison for one-bit fields.  */
5028 
5029   enum tree_code wanted_code;
5030   enum tree_code lcode, rcode;
5031   tree ll_arg, lr_arg, rl_arg, rr_arg;
5032   tree ll_inner, lr_inner, rl_inner, rr_inner;
5033   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5034   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5035   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5036   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5037   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5038   enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5039   enum machine_mode lnmode, rnmode;
5040   tree ll_mask, lr_mask, rl_mask, rr_mask;
5041   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5042   tree l_const, r_const;
5043   tree lntype, rntype, result;
5044   int first_bit, end_bit;
5045   int volatilep;
5046   tree orig_lhs = lhs, orig_rhs = rhs;
5047   enum tree_code orig_code = code;
5048 
5049   /* Start by getting the comparison codes.  Fail if anything is volatile.
5050      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5051      it were surrounded with a NE_EXPR.  */
5052 
5053   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5054     return 0;
5055 
5056   lcode = TREE_CODE (lhs);
5057   rcode = TREE_CODE (rhs);
5058 
5059   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060     {
5061       lhs = build2 (NE_EXPR, truth_type, lhs,
5062 		    build_int_cst (TREE_TYPE (lhs), 0));
5063       lcode = NE_EXPR;
5064     }
5065 
5066   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067     {
5068       rhs = build2 (NE_EXPR, truth_type, rhs,
5069 		    build_int_cst (TREE_TYPE (rhs), 0));
5070       rcode = NE_EXPR;
5071     }
5072 
5073   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5074       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5075     return 0;
5076 
5077   ll_arg = TREE_OPERAND (lhs, 0);
5078   lr_arg = TREE_OPERAND (lhs, 1);
5079   rl_arg = TREE_OPERAND (rhs, 0);
5080   rr_arg = TREE_OPERAND (rhs, 1);
5081 
5082   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5083   if (simple_operand_p (ll_arg)
5084       && simple_operand_p (lr_arg))
5085     {
5086       tree result;
5087       if (operand_equal_p (ll_arg, rl_arg, 0)
5088           && operand_equal_p (lr_arg, rr_arg, 0))
5089 	{
5090           result = combine_comparisons (code, lcode, rcode,
5091 					truth_type, ll_arg, lr_arg);
5092 	  if (result)
5093 	    return result;
5094 	}
5095       else if (operand_equal_p (ll_arg, rr_arg, 0)
5096                && operand_equal_p (lr_arg, rl_arg, 0))
5097 	{
5098           result = combine_comparisons (code, lcode,
5099 					swap_tree_comparison (rcode),
5100 					truth_type, ll_arg, lr_arg);
5101 	  if (result)
5102 	    return result;
5103 	}
5104     }
5105 
5106   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5107 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108 
5109   /* If the RHS can be evaluated unconditionally and its operands are
5110      simple, it wins to evaluate the RHS unconditionally on machines
5111      with expensive branches.  In this case, this isn't a comparison
5112      that can be merged.  Avoid doing this if the RHS is a floating-point
5113      comparison since those can trap.  */
5114 
5115   if (BRANCH_COST >= 2
5116       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5117       && simple_operand_p (rl_arg)
5118       && simple_operand_p (rr_arg))
5119     {
5120       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5121       if (code == TRUTH_OR_EXPR
5122 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5123 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5124 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5125 	return build2 (NE_EXPR, truth_type,
5126 		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5127 			       ll_arg, rl_arg),
5128 		       build_int_cst (TREE_TYPE (ll_arg), 0));
5129 
5130       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5131       if (code == TRUTH_AND_EXPR
5132 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5133 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5134 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5135 	return build2 (EQ_EXPR, truth_type,
5136 		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137 			       ll_arg, rl_arg),
5138 		       build_int_cst (TREE_TYPE (ll_arg), 0));
5139 
5140       if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141 	{
5142 	  if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5143 	    return build2 (code, truth_type, lhs, rhs);
5144 	  return NULL_TREE;
5145 	}
5146     }
5147 
5148   /* See if the comparisons can be merged.  Then get all the parameters for
5149      each side.  */
5150 
5151   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5152       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5153     return 0;
5154 
5155   volatilep = 0;
5156   ll_inner = decode_field_reference (ll_arg,
5157 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5158 				     &ll_unsignedp, &volatilep, &ll_mask,
5159 				     &ll_and_mask);
5160   lr_inner = decode_field_reference (lr_arg,
5161 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5162 				     &lr_unsignedp, &volatilep, &lr_mask,
5163 				     &lr_and_mask);
5164   rl_inner = decode_field_reference (rl_arg,
5165 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5166 				     &rl_unsignedp, &volatilep, &rl_mask,
5167 				     &rl_and_mask);
5168   rr_inner = decode_field_reference (rr_arg,
5169 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5170 				     &rr_unsignedp, &volatilep, &rr_mask,
5171 				     &rr_and_mask);
5172 
5173   /* It must be true that the inner operation on the lhs of each
5174      comparison must be the same if we are to be able to do anything.
5175      Then see if we have constants.  If not, the same must be true for
5176      the rhs's.  */
5177   if (volatilep || ll_inner == 0 || rl_inner == 0
5178       || ! operand_equal_p (ll_inner, rl_inner, 0))
5179     return 0;
5180 
5181   if (TREE_CODE (lr_arg) == INTEGER_CST
5182       && TREE_CODE (rr_arg) == INTEGER_CST)
5183     l_const = lr_arg, r_const = rr_arg;
5184   else if (lr_inner == 0 || rr_inner == 0
5185 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5186     return 0;
5187   else
5188     l_const = r_const = 0;
5189 
5190   /* If either comparison code is not correct for our logical operation,
5191      fail.  However, we can convert a one-bit comparison against zero into
5192      the opposite comparison against that bit being set in the field.  */
5193 
5194   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5195   if (lcode != wanted_code)
5196     {
5197       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198 	{
5199 	  /* Make the left operand unsigned, since we are only interested
5200 	     in the value of one bit.  Otherwise we are doing the wrong
5201 	     thing below.  */
5202 	  ll_unsignedp = 1;
5203 	  l_const = ll_mask;
5204 	}
5205       else
5206 	return 0;
5207     }
5208 
5209   /* This is analogous to the code for l_const above.  */
5210   if (rcode != wanted_code)
5211     {
5212       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213 	{
5214 	  rl_unsignedp = 1;
5215 	  r_const = rl_mask;
5216 	}
5217       else
5218 	return 0;
5219     }
5220 
5221   /* After this point all optimizations will generate bit-field
5222      references, which we might not want.  */
5223   if (! lang_hooks.can_use_bit_fields_p ())
5224     return 0;
5225 
5226   /* See if we can find a mode that contains both fields being compared on
5227      the left.  If we can't, fail.  Otherwise, update all constants and masks
5228      to be relative to a field of that size.  */
5229   first_bit = MIN (ll_bitpos, rl_bitpos);
5230   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5231   lnmode = get_best_mode (end_bit - first_bit, first_bit,
5232 			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5233 			  volatilep);
5234   if (lnmode == VOIDmode)
5235     return 0;
5236 
5237   lnbitsize = GET_MODE_BITSIZE (lnmode);
5238   lnbitpos = first_bit & ~ (lnbitsize - 1);
5239   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5240   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241 
5242   if (BYTES_BIG_ENDIAN)
5243     {
5244       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5245       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5246     }
5247 
5248   ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5249 			 size_int (xll_bitpos), 0);
5250   rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5251 			 size_int (xrl_bitpos), 0);
5252 
5253   if (l_const)
5254     {
5255       l_const = fold_convert (lntype, l_const);
5256       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5257       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5258       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5259 					fold_build1 (BIT_NOT_EXPR,
5260 						     lntype, ll_mask),
5261 					0)))
5262 	{
5263 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264 
5265 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5266 	}
5267     }
5268   if (r_const)
5269     {
5270       r_const = fold_convert (lntype, r_const);
5271       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5272       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5273       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5274 					fold_build1 (BIT_NOT_EXPR,
5275 						     lntype, rl_mask),
5276 					0)))
5277 	{
5278 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279 
5280 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281 	}
5282     }
5283 
5284   /* If the right sides are not constant, do the same for it.  Also,
5285      disallow this optimization if a size or signedness mismatch occurs
5286      between the left and right sides.  */
5287   if (l_const == 0)
5288     {
5289       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5290 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5291 	  /* Make sure the two fields on the right
5292 	     correspond to the left without being swapped.  */
5293 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5294 	return 0;
5295 
5296       first_bit = MIN (lr_bitpos, rr_bitpos);
5297       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5298       rnmode = get_best_mode (end_bit - first_bit, first_bit,
5299 			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5300 			      volatilep);
5301       if (rnmode == VOIDmode)
5302 	return 0;
5303 
5304       rnbitsize = GET_MODE_BITSIZE (rnmode);
5305       rnbitpos = first_bit & ~ (rnbitsize - 1);
5306       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5307       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308 
5309       if (BYTES_BIG_ENDIAN)
5310 	{
5311 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5312 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5313 	}
5314 
5315       lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5316 			     size_int (xlr_bitpos), 0);
5317       rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5318 			     size_int (xrr_bitpos), 0);
5319 
5320       /* Make a mask that corresponds to both fields being compared.
5321 	 Do this for both items being compared.  If the operands are the
5322 	 same size and the bits being compared are in the same position
5323 	 then we can do this by masking both and comparing the masked
5324 	 results.  */
5325       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5326       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5327       if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328 	{
5329 	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5330 				    ll_unsignedp || rl_unsignedp);
5331 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5332 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333 
5334 	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5335 				    lr_unsignedp || rr_unsignedp);
5336 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5337 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338 
5339 	  return build2 (wanted_code, truth_type, lhs, rhs);
5340 	}
5341 
5342       /* There is still another way we can do something:  If both pairs of
5343 	 fields being compared are adjacent, we may be able to make a wider
5344 	 field containing them both.
5345 
5346 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5347 	 the mask must be shifted to account for the shift done by
5348 	 make_bit_field_ref.  */
5349       if ((ll_bitsize + ll_bitpos == rl_bitpos
5350 	   && lr_bitsize + lr_bitpos == rr_bitpos)
5351 	  || (ll_bitpos == rl_bitpos + rl_bitsize
5352 	      && lr_bitpos == rr_bitpos + rr_bitsize))
5353 	{
5354 	  tree type;
5355 
5356 	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5357 				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5358 	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5359 				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360 
5361 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5362 				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5363 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5364 				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365 
5366 	  /* Convert to the smaller type before masking out unwanted bits.  */
5367 	  type = lntype;
5368 	  if (lntype != rntype)
5369 	    {
5370 	      if (lnbitsize > rnbitsize)
5371 		{
5372 		  lhs = fold_convert (rntype, lhs);
5373 		  ll_mask = fold_convert (rntype, ll_mask);
5374 		  type = rntype;
5375 		}
5376 	      else if (lnbitsize < rnbitsize)
5377 		{
5378 		  rhs = fold_convert (lntype, rhs);
5379 		  lr_mask = fold_convert (lntype, lr_mask);
5380 		  type = lntype;
5381 		}
5382 	    }
5383 
5384 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5385 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386 
5387 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5388 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389 
5390 	  return build2 (wanted_code, truth_type, lhs, rhs);
5391 	}
5392 
5393       return 0;
5394     }
5395 
5396   /* Handle the case of comparisons with constants.  If there is something in
5397      common between the masks, those bits of the constants must be the same.
5398      If not, the condition is always false.  Test for this to avoid generating
5399      incorrect code below.  */
5400   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5401   if (! integer_zerop (result)
5402       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5403 			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404     {
5405       if (wanted_code == NE_EXPR)
5406 	{
5407 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5408 	  return constant_boolean_node (true, truth_type);
5409 	}
5410       else
5411 	{
5412 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5413 	  return constant_boolean_node (false, truth_type);
5414 	}
5415     }
5416 
5417   /* Construct the expression we will return.  First get the component
5418      reference we will make.  Unless the mask is all ones the width of
5419      that field, perform the mask operation.  Then compare with the
5420      merged constant.  */
5421   result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5422 			       ll_unsignedp || rl_unsignedp);
5423 
5424   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5425   if (! all_ones_mask_p (ll_mask, lnbitsize))
5426     result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427 
5428   return build2 (wanted_code, truth_type, result,
5429 		 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5430 }
5431 
5432 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5433    constant.  */
5434 
5435 static tree
optimize_minmax_comparison(enum tree_code code,tree type,tree op0,tree op1)5436 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437 {
5438   tree arg0 = op0;
5439   enum tree_code op_code;
5440   tree comp_const = op1;
5441   tree minmax_const;
5442   int consts_equal, consts_lt;
5443   tree inner;
5444 
5445   STRIP_SIGN_NOPS (arg0);
5446 
5447   op_code = TREE_CODE (arg0);
5448   minmax_const = TREE_OPERAND (arg0, 1);
5449   consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5450   consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5451   inner = TREE_OPERAND (arg0, 0);
5452 
5453   /* If something does not permit us to optimize, return the original tree.  */
5454   if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5455       || TREE_CODE (comp_const) != INTEGER_CST
5456       || TREE_CONSTANT_OVERFLOW (comp_const)
5457       || TREE_CODE (minmax_const) != INTEGER_CST
5458       || TREE_CONSTANT_OVERFLOW (minmax_const))
5459     return NULL_TREE;
5460 
5461   /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5462      and GT_EXPR, doing the rest with recursive calls using logical
5463      simplifications.  */
5464   switch (code)
5465     {
5466     case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5467       {
5468 	tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5469 					  type, op0, op1);
5470 	if (tem)
5471 	  return invert_truthvalue (tem);
5472 	return NULL_TREE;
5473       }
5474 
5475     case GE_EXPR:
5476       return
5477 	fold_build2 (TRUTH_ORIF_EXPR, type,
5478 		     optimize_minmax_comparison
5479 		     (EQ_EXPR, type, arg0, comp_const),
5480 		     optimize_minmax_comparison
5481 		     (GT_EXPR, type, arg0, comp_const));
5482 
5483     case EQ_EXPR:
5484       if (op_code == MAX_EXPR && consts_equal)
5485 	/* MAX (X, 0) == 0  ->  X <= 0  */
5486 	return fold_build2 (LE_EXPR, type, inner, comp_const);
5487 
5488       else if (op_code == MAX_EXPR && consts_lt)
5489 	/* MAX (X, 0) == 5  ->  X == 5   */
5490 	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491 
5492       else if (op_code == MAX_EXPR)
5493 	/* MAX (X, 0) == -1  ->  false  */
5494 	return omit_one_operand (type, integer_zero_node, inner);
5495 
5496       else if (consts_equal)
5497 	/* MIN (X, 0) == 0  ->  X >= 0  */
5498 	return fold_build2 (GE_EXPR, type, inner, comp_const);
5499 
5500       else if (consts_lt)
5501 	/* MIN (X, 0) == 5  ->  false  */
5502 	return omit_one_operand (type, integer_zero_node, inner);
5503 
5504       else
5505 	/* MIN (X, 0) == -1  ->  X == -1  */
5506 	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507 
5508     case GT_EXPR:
5509       if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510 	/* MAX (X, 0) > 0  ->  X > 0
5511 	   MAX (X, 0) > 5  ->  X > 5  */
5512 	return fold_build2 (GT_EXPR, type, inner, comp_const);
5513 
5514       else if (op_code == MAX_EXPR)
5515 	/* MAX (X, 0) > -1  ->  true  */
5516 	return omit_one_operand (type, integer_one_node, inner);
5517 
5518       else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519 	/* MIN (X, 0) > 0  ->  false
5520 	   MIN (X, 0) > 5  ->  false  */
5521 	return omit_one_operand (type, integer_zero_node, inner);
5522 
5523       else
5524 	/* MIN (X, 0) > -1  ->  X > -1  */
5525 	return fold_build2 (GT_EXPR, type, inner, comp_const);
5526 
5527     default:
5528       return NULL_TREE;
5529     }
5530 }
5531 
5532 /* T is an integer expression that is being multiplied, divided, or taken a
5533    modulus (CODE says which and what kind of divide or modulus) by a
5534    constant C.  See if we can eliminate that operation by folding it with
5535    other operations already in T.  WIDE_TYPE, if non-null, is a type that
5536    should be used for the computation if wider than our type.
5537 
5538    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539    (X * 2) + (Y * 4).  We must, however, be assured that either the original
5540    expression would not overflow or that overflow is undefined for the type
5541    in the language in question.
5542 
5543    We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5544    the machine has a multiply-accumulate insn or that this is part of an
5545    addressing calculation.
5546 
5547    If we return a non-null expression, it is an equivalent form of the
5548    original computation, but need not be in the original type.
5549 
5550    We set *STRICT_OVERFLOW_P to true if the return values depends on
5551    signed overflow being undefined.  Otherwise we do not change
5552    *STRICT_OVERFLOW_P.  */
5553 
5554 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5555 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5556 		bool *strict_overflow_p)
5557 {
5558   /* To avoid exponential search depth, refuse to allow recursion past
5559      three levels.  Beyond that (1) it's highly unlikely that we'll find
5560      something interesting and (2) we've probably processed it before
5561      when we built the inner expression.  */
5562 
5563   static int depth;
5564   tree ret;
5565 
5566   if (depth > 3)
5567     return NULL;
5568 
5569   depth++;
5570   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5571   depth--;
5572 
5573   return ret;
5574 }
5575 
5576 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5577 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5578 		  bool *strict_overflow_p)
5579 {
5580   tree type = TREE_TYPE (t);
5581   enum tree_code tcode = TREE_CODE (t);
5582   tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5583 				   > GET_MODE_SIZE (TYPE_MODE (type)))
5584 		? wide_type : type);
5585   tree t1, t2;
5586   int same_p = tcode == code;
5587   tree op0 = NULL_TREE, op1 = NULL_TREE;
5588   bool sub_strict_overflow_p;
5589 
5590   /* Don't deal with constants of zero here; they confuse the code below.  */
5591   if (integer_zerop (c))
5592     return NULL_TREE;
5593 
5594   if (TREE_CODE_CLASS (tcode) == tcc_unary)
5595     op0 = TREE_OPERAND (t, 0);
5596 
5597   if (TREE_CODE_CLASS (tcode) == tcc_binary)
5598     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599 
5600   /* Note that we need not handle conditional operations here since fold
5601      already handles those cases.  So just do arithmetic here.  */
5602   switch (tcode)
5603     {
5604     case INTEGER_CST:
5605       /* For a constant, we can always simplify if we are a multiply
5606 	 or (for divide and modulus) if it is a multiple of our constant.  */
5607       if (code == MULT_EXPR
5608 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5609 	return const_binop (code, fold_convert (ctype, t),
5610 			    fold_convert (ctype, c), 0);
5611       break;
5612 
5613     case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
5614       /* If op0 is an expression ...  */
5615       if ((COMPARISON_CLASS_P (op0)
5616 	   || UNARY_CLASS_P (op0)
5617 	   || BINARY_CLASS_P (op0)
5618 	   || EXPRESSION_CLASS_P (op0))
5619 	  /* ... and is unsigned, and its type is smaller than ctype,
5620 	     then we cannot pass through as widening.  */
5621 	  && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5622 	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5623 		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5624 	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
5625 	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5626 	      /* ... or this is a truncation (t is narrower than op0),
5627 		 then we cannot pass through this narrowing.  */
5628 	      || (GET_MODE_SIZE (TYPE_MODE (type))
5629 		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5630 	      /* ... or signedness changes for division or modulus,
5631 		 then we cannot pass through this conversion.  */
5632 	      || (code != MULT_EXPR
5633 		  && (TYPE_UNSIGNED (ctype)
5634 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5635 	break;
5636 
5637       /* Pass the constant down and see if we can make a simplification.  If
5638 	 we can, replace this expression with the inner simplification for
5639 	 possible later conversion to our or some other type.  */
5640       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5641 	  && TREE_CODE (t2) == INTEGER_CST
5642 	  && ! TREE_CONSTANT_OVERFLOW (t2)
5643 	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5644 					 code == MULT_EXPR
5645 					 ? ctype : NULL_TREE,
5646 					 strict_overflow_p))))
5647 	return t1;
5648       break;
5649 
5650     case ABS_EXPR:
5651       /* If widening the type changes it from signed to unsigned, then we
5652          must avoid building ABS_EXPR itself as unsigned.  */
5653       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654         {
5655           tree cstype = (*lang_hooks.types.signed_type) (ctype);
5656           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5657 	      != 0)
5658             {
5659               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5660               return fold_convert (ctype, t1);
5661             }
5662           break;
5663         }
5664       /* FALLTHROUGH */
5665     case NEGATE_EXPR:
5666       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5667 	  != 0)
5668 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5669       break;
5670 
5671     case MIN_EXPR:  case MAX_EXPR:
5672       /* If widening the type changes the signedness, then we can't perform
5673 	 this optimization as that changes the result.  */
5674       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5675 	break;
5676 
5677       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5678       sub_strict_overflow_p = false;
5679       if ((t1 = extract_muldiv (op0, c, code, wide_type,
5680 				&sub_strict_overflow_p)) != 0
5681 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5682 				   &sub_strict_overflow_p)) != 0)
5683 	{
5684 	  if (tree_int_cst_sgn (c) < 0)
5685 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5686 	  if (sub_strict_overflow_p)
5687 	    *strict_overflow_p = true;
5688 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5689 			      fold_convert (ctype, t2));
5690 	}
5691       break;
5692 
5693     case LSHIFT_EXPR:  case RSHIFT_EXPR:
5694       /* If the second operand is constant, this is a multiplication
5695 	 or floor division, by a power of two, so we can treat it that
5696 	 way unless the multiplier or divisor overflows.  Signed
5697 	 left-shift overflow is implementation-defined rather than
5698 	 undefined in C90, so do not convert signed left shift into
5699 	 multiplication.  */
5700       if (TREE_CODE (op1) == INTEGER_CST
5701 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5702 	  /* const_binop may not detect overflow correctly,
5703 	     so check for it explicitly here.  */
5704 	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5705 	  && TREE_INT_CST_HIGH (op1) == 0
5706 	  && 0 != (t1 = fold_convert (ctype,
5707 				      const_binop (LSHIFT_EXPR,
5708 						   size_one_node,
5709 						   op1, 0)))
5710 	  && ! TREE_OVERFLOW (t1))
5711 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5712 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5713 				       ctype, fold_convert (ctype, op0), t1),
5714 			       c, code, wide_type, strict_overflow_p);
5715       break;
5716 
5717     case PLUS_EXPR:  case MINUS_EXPR:
5718       /* See if we can eliminate the operation on both sides.  If we can, we
5719 	 can return a new PLUS or MINUS.  If we can't, the only remaining
5720 	 cases where we can do anything are if the second operand is a
5721 	 constant.  */
5722       sub_strict_overflow_p = false;
5723       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5724       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5725       if (t1 != 0 && t2 != 0
5726 	  && (code == MULT_EXPR
5727 	      /* If not multiplication, we can only do this if both operands
5728 		 are divisible by c.  */
5729 	      || (multiple_of_p (ctype, op0, c)
5730 	          && multiple_of_p (ctype, op1, c))))
5731 	{
5732 	  if (sub_strict_overflow_p)
5733 	    *strict_overflow_p = true;
5734 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5735 			      fold_convert (ctype, t2));
5736 	}
5737 
5738       /* If this was a subtraction, negate OP1 and set it to be an addition.
5739 	 This simplifies the logic below.  */
5740       if (tcode == MINUS_EXPR)
5741 	tcode = PLUS_EXPR, op1 = negate_expr (op1);
5742 
5743       if (TREE_CODE (op1) != INTEGER_CST)
5744 	break;
5745 
5746       /* If either OP1 or C are negative, this optimization is not safe for
5747 	 some of the division and remainder types while for others we need
5748 	 to change the code.  */
5749       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5750 	{
5751 	  if (code == CEIL_DIV_EXPR)
5752 	    code = FLOOR_DIV_EXPR;
5753 	  else if (code == FLOOR_DIV_EXPR)
5754 	    code = CEIL_DIV_EXPR;
5755 	  else if (code != MULT_EXPR
5756 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5757 	    break;
5758 	}
5759 
5760       /* If it's a multiply or a division/modulus operation of a multiple
5761          of our constant, do the operation and verify it doesn't overflow.  */
5762       if (code == MULT_EXPR
5763 	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5764 	{
5765 	  op1 = const_binop (code, fold_convert (ctype, op1),
5766 			     fold_convert (ctype, c), 0);
5767 	  /* We allow the constant to overflow with wrapping semantics.  */
5768 	  if (op1 == 0
5769 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5770 	    break;
5771 	}
5772       else
5773 	break;
5774 
5775       /* If we have an unsigned type is not a sizetype, we cannot widen
5776 	 the operation since it will change the result if the original
5777 	 computation overflowed.  */
5778       if (TYPE_UNSIGNED (ctype)
5779 	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5780 	  && ctype != type)
5781 	break;
5782 
5783       /* If we were able to eliminate our operation from the first side,
5784 	 apply our operation to the second side and reform the PLUS.  */
5785       if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5786 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5787 
5788       /* The last case is if we are a multiply.  In that case, we can
5789 	 apply the distributive law to commute the multiply and addition
5790 	 if the multiplication of the constants doesn't overflow.  */
5791       if (code == MULT_EXPR)
5792 	return fold_build2 (tcode, ctype,
5793 			    fold_build2 (code, ctype,
5794 					 fold_convert (ctype, op0),
5795 					 fold_convert (ctype, c)),
5796 			    op1);
5797 
5798       break;
5799 
5800     case MULT_EXPR:
5801       /* We have a special case here if we are doing something like
5802 	 (C * 8) % 4 since we know that's zero.  */
5803       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5804 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5805 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5806 	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5807 	return omit_one_operand (type, integer_zero_node, op0);
5808 
5809       /* ... fall through ...  */
5810 
5811     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5812     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5813       /* If we can extract our operation from the LHS, do so and return a
5814 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5815 	 do something only if the second operand is a constant.  */
5816       if (same_p
5817 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5818 				   strict_overflow_p)) != 0)
5819 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5820 			    fold_convert (ctype, op1));
5821       else if (tcode == MULT_EXPR && code == MULT_EXPR
5822 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5823 					strict_overflow_p)) != 0)
5824 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5825 			    fold_convert (ctype, t1));
5826       else if (TREE_CODE (op1) != INTEGER_CST)
5827 	return 0;
5828 
5829       /* If these are the same operation types, we can associate them
5830 	 assuming no overflow.  */
5831       if (tcode == code
5832 	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5833 				     fold_convert (ctype, c), 0))
5834 	  && ! TREE_OVERFLOW (t1))
5835 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5836 
5837       /* If these operations "cancel" each other, we have the main
5838 	 optimizations of this pass, which occur when either constant is a
5839 	 multiple of the other, in which case we replace this with either an
5840 	 operation or CODE or TCODE.
5841 
5842 	 If we have an unsigned type that is not a sizetype, we cannot do
5843 	 this since it will change the result if the original computation
5844 	 overflowed.  */
5845       if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5846 	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5847 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5848 	      || (tcode == MULT_EXPR
5849 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5850 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5851 	{
5852 	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5853 	    {
5854 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5855 		*strict_overflow_p = true;
5856 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5857 				  fold_convert (ctype,
5858 						const_binop (TRUNC_DIV_EXPR,
5859 							     op1, c, 0)));
5860 	    }
5861 	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5862 	    {
5863 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5864 		*strict_overflow_p = true;
5865 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
5866 				  fold_convert (ctype,
5867 						const_binop (TRUNC_DIV_EXPR,
5868 							     c, op1, 0)));
5869 	    }
5870 	}
5871       break;
5872 
5873     default:
5874       break;
5875     }
5876 
5877   return 0;
5878 }
5879 
5880 /* Return a node which has the indicated constant VALUE (either 0 or
5881    1), and is of the indicated TYPE.  */
5882 
5883 tree
constant_boolean_node(int value,tree type)5884 constant_boolean_node (int value, tree type)
5885 {
5886   if (type == integer_type_node)
5887     return value ? integer_one_node : integer_zero_node;
5888   else if (type == boolean_type_node)
5889     return value ? boolean_true_node : boolean_false_node;
5890   else
5891     return build_int_cst (type, value);
5892 }
5893 
5894 
5895 /* Return true if expr looks like an ARRAY_REF and set base and
5896    offset to the appropriate trees.  If there is no offset,
5897    offset is set to NULL_TREE.  Base will be canonicalized to
5898    something you can get the element type from using
5899    TREE_TYPE (TREE_TYPE (base)).  Offset will be the offset
5900    in bytes to the base.  */
5901 
5902 static bool
extract_array_ref(tree expr,tree * base,tree * offset)5903 extract_array_ref (tree expr, tree *base, tree *offset)
5904 {
5905   /* One canonical form is a PLUS_EXPR with the first
5906      argument being an ADDR_EXPR with a possible NOP_EXPR
5907      attached.  */
5908   if (TREE_CODE (expr) == PLUS_EXPR)
5909     {
5910       tree op0 = TREE_OPERAND (expr, 0);
5911       tree inner_base, dummy1;
5912       /* Strip NOP_EXPRs here because the C frontends and/or
5913 	 folders present us (int *)&x.a + 4B possibly.  */
5914       STRIP_NOPS (op0);
5915       if (extract_array_ref (op0, &inner_base, &dummy1))
5916 	{
5917 	  *base = inner_base;
5918 	  if (dummy1 == NULL_TREE)
5919 	    *offset = TREE_OPERAND (expr, 1);
5920 	  else
5921 	    *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5922 				   dummy1, TREE_OPERAND (expr, 1));
5923 	  return true;
5924 	}
5925     }
5926   /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5927      which we transform into an ADDR_EXPR with appropriate
5928      offset.  For other arguments to the ADDR_EXPR we assume
5929      zero offset and as such do not care about the ADDR_EXPR
5930      type and strip possible nops from it.  */
5931   else if (TREE_CODE (expr) == ADDR_EXPR)
5932     {
5933       tree op0 = TREE_OPERAND (expr, 0);
5934       if (TREE_CODE (op0) == ARRAY_REF)
5935 	{
5936 	  tree idx = TREE_OPERAND (op0, 1);
5937 	  *base = TREE_OPERAND (op0, 0);
5938 	  *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5939 				 array_ref_element_size (op0));
5940 	}
5941       else
5942 	{
5943 	  /* Handle array-to-pointer decay as &a.  */
5944 	  if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5945 	    *base = TREE_OPERAND (expr, 0);
5946 	  else
5947 	    *base = expr;
5948 	  *offset = NULL_TREE;
5949 	}
5950       return true;
5951     }
5952   /* The next canonical form is a VAR_DECL with POINTER_TYPE.  */
5953   else if (SSA_VAR_P (expr)
5954 	   && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5955     {
5956       *base = expr;
5957       *offset = NULL_TREE;
5958       return true;
5959     }
5960 
5961   return false;
5962 }
5963 
5964 
5965 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5966    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5967    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5968    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5969    COND is the first argument to CODE; otherwise (as in the example
5970    given here), it is the second argument.  TYPE is the type of the
5971    original expression.  Return NULL_TREE if no simplification is
5972    possible.  */
5973 
5974 static tree
fold_binary_op_with_conditional_arg(enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)5975 fold_binary_op_with_conditional_arg (enum tree_code code,
5976 				     tree type, tree op0, tree op1,
5977 				     tree cond, tree arg, int cond_first_p)
5978 {
5979   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5980   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5981   tree test, true_value, false_value;
5982   tree lhs = NULL_TREE;
5983   tree rhs = NULL_TREE;
5984 
5985   /* This transformation is only worthwhile if we don't have to wrap
5986      arg in a SAVE_EXPR, and the operation can be simplified on at least
5987      one of the branches once its pushed inside the COND_EXPR.  */
5988   if (!TREE_CONSTANT (arg))
5989     return NULL_TREE;
5990 
5991   if (TREE_CODE (cond) == COND_EXPR)
5992     {
5993       test = TREE_OPERAND (cond, 0);
5994       true_value = TREE_OPERAND (cond, 1);
5995       false_value = TREE_OPERAND (cond, 2);
5996       /* If this operand throws an expression, then it does not make
5997 	 sense to try to perform a logical or arithmetic operation
5998 	 involving it.  */
5999       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6000 	lhs = true_value;
6001       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6002 	rhs = false_value;
6003     }
6004   else
6005     {
6006       tree testtype = TREE_TYPE (cond);
6007       test = cond;
6008       true_value = constant_boolean_node (true, testtype);
6009       false_value = constant_boolean_node (false, testtype);
6010     }
6011 
6012   arg = fold_convert (arg_type, arg);
6013   if (lhs == 0)
6014     {
6015       true_value = fold_convert (cond_type, true_value);
6016       if (cond_first_p)
6017 	lhs = fold_build2 (code, type, true_value, arg);
6018       else
6019 	lhs = fold_build2 (code, type, arg, true_value);
6020     }
6021   if (rhs == 0)
6022     {
6023       false_value = fold_convert (cond_type, false_value);
6024       if (cond_first_p)
6025 	rhs = fold_build2 (code, type, false_value, arg);
6026       else
6027 	rhs = fold_build2 (code, type, arg, false_value);
6028     }
6029 
6030   test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6031   return fold_convert (type, test);
6032 }
6033 
6034 
6035 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6036 
6037    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6038    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6039    ADDEND is the same as X.
6040 
6041    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6042    and finite.  The problematic cases are when X is zero, and its mode
6043    has signed zeros.  In the case of rounding towards -infinity,
6044    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6045    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6046 
6047 static bool
fold_real_zero_addition_p(tree type,tree addend,int negate)6048 fold_real_zero_addition_p (tree type, tree addend, int negate)
6049 {
6050   if (!real_zerop (addend))
6051     return false;
6052 
6053   /* Don't allow the fold with -fsignaling-nans.  */
6054   if (HONOR_SNANS (TYPE_MODE (type)))
6055     return false;
6056 
6057   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6058   if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6059     return true;
6060 
6061   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6062   if (TREE_CODE (addend) == REAL_CST
6063       && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6064     negate = !negate;
6065 
6066   /* The mode has signed zeros, and we have to honor their sign.
6067      In this situation, there is only one case we can return true for.
6068      X - 0 is the same as X unless rounding towards -infinity is
6069      supported.  */
6070   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6071 }
6072 
6073 /* Subroutine of fold() that checks comparisons of built-in math
6074    functions against real constants.
6075 
6076    FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6077    operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6078    is the type of the result and ARG0 and ARG1 are the operands of the
6079    comparison.  ARG1 must be a TREE_REAL_CST.
6080 
6081    The function returns the constant folded tree if a simplification
6082    can be made, and NULL_TREE otherwise.  */
6083 
6084 static tree
fold_mathfn_compare(enum built_in_function fcode,enum tree_code code,tree type,tree arg0,tree arg1)6085 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6086 		     tree type, tree arg0, tree arg1)
6087 {
6088   REAL_VALUE_TYPE c;
6089 
6090   if (BUILTIN_SQRT_P (fcode))
6091     {
6092       tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6093       enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6094 
6095       c = TREE_REAL_CST (arg1);
6096       if (REAL_VALUE_NEGATIVE (c))
6097 	{
6098 	  /* sqrt(x) < y is always false, if y is negative.  */
6099 	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6100 	    return omit_one_operand (type, integer_zero_node, arg);
6101 
6102 	  /* sqrt(x) > y is always true, if y is negative and we
6103 	     don't care about NaNs, i.e. negative values of x.  */
6104 	  if (code == NE_EXPR || !HONOR_NANS (mode))
6105 	    return omit_one_operand (type, integer_one_node, arg);
6106 
6107 	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6108 	  return fold_build2 (GE_EXPR, type, arg,
6109 			      build_real (TREE_TYPE (arg), dconst0));
6110 	}
6111       else if (code == GT_EXPR || code == GE_EXPR)
6112 	{
6113 	  REAL_VALUE_TYPE c2;
6114 
6115 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6116 	  real_convert (&c2, mode, &c2);
6117 
6118 	  if (REAL_VALUE_ISINF (c2))
6119 	    {
6120 	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6121 	      if (HONOR_INFINITIES (mode))
6122 		return fold_build2 (EQ_EXPR, type, arg,
6123 				    build_real (TREE_TYPE (arg), c2));
6124 
6125 	      /* sqrt(x) > y is always false, when y is very large
6126 		 and we don't care about infinities.  */
6127 	      return omit_one_operand (type, integer_zero_node, arg);
6128 	    }
6129 
6130 	  /* sqrt(x) > c is the same as x > c*c.  */
6131 	  return fold_build2 (code, type, arg,
6132 			      build_real (TREE_TYPE (arg), c2));
6133 	}
6134       else if (code == LT_EXPR || code == LE_EXPR)
6135 	{
6136 	  REAL_VALUE_TYPE c2;
6137 
6138 	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6139 	  real_convert (&c2, mode, &c2);
6140 
6141 	  if (REAL_VALUE_ISINF (c2))
6142 	    {
6143 	      /* sqrt(x) < y is always true, when y is a very large
6144 		 value and we don't care about NaNs or Infinities.  */
6145 	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6146 		return omit_one_operand (type, integer_one_node, arg);
6147 
6148 	      /* sqrt(x) < y is x != +Inf when y is very large and we
6149 		 don't care about NaNs.  */
6150 	      if (! HONOR_NANS (mode))
6151 		return fold_build2 (NE_EXPR, type, arg,
6152 				    build_real (TREE_TYPE (arg), c2));
6153 
6154 	      /* sqrt(x) < y is x >= 0 when y is very large and we
6155 		 don't care about Infinities.  */
6156 	      if (! HONOR_INFINITIES (mode))
6157 		return fold_build2 (GE_EXPR, type, arg,
6158 				    build_real (TREE_TYPE (arg), dconst0));
6159 
6160 	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6161 	      if (lang_hooks.decls.global_bindings_p () != 0
6162 		  || CONTAINS_PLACEHOLDER_P (arg))
6163 		return NULL_TREE;
6164 
6165 	      arg = save_expr (arg);
6166 	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6167 				  fold_build2 (GE_EXPR, type, arg,
6168 					       build_real (TREE_TYPE (arg),
6169 							   dconst0)),
6170 				  fold_build2 (NE_EXPR, type, arg,
6171 					       build_real (TREE_TYPE (arg),
6172 							   c2)));
6173 	    }
6174 
6175 	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6176 	  if (! HONOR_NANS (mode))
6177 	    return fold_build2 (code, type, arg,
6178 				build_real (TREE_TYPE (arg), c2));
6179 
6180 	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6181 	  if (lang_hooks.decls.global_bindings_p () == 0
6182 	      && ! CONTAINS_PLACEHOLDER_P (arg))
6183 	    {
6184 	      arg = save_expr (arg);
6185 	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6186 				  fold_build2 (GE_EXPR, type, arg,
6187 					       build_real (TREE_TYPE (arg),
6188 							   dconst0)),
6189 				  fold_build2 (code, type, arg,
6190 					       build_real (TREE_TYPE (arg),
6191 							   c2)));
6192 	    }
6193 	}
6194     }
6195 
6196   return NULL_TREE;
6197 }
6198 
6199 /* Subroutine of fold() that optimizes comparisons against Infinities,
6200    either +Inf or -Inf.
6201 
6202    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6203    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6204    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6205 
6206    The function returns the constant folded tree if a simplification
6207    can be made, and NULL_TREE otherwise.  */
6208 
6209 static tree
fold_inf_compare(enum tree_code code,tree type,tree arg0,tree arg1)6210 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6211 {
6212   enum machine_mode mode;
6213   REAL_VALUE_TYPE max;
6214   tree temp;
6215   bool neg;
6216 
6217   mode = TYPE_MODE (TREE_TYPE (arg0));
6218 
6219   /* For negative infinity swap the sense of the comparison.  */
6220   neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6221   if (neg)
6222     code = swap_tree_comparison (code);
6223 
6224   switch (code)
6225     {
6226     case GT_EXPR:
6227       /* x > +Inf is always false, if with ignore sNANs.  */
6228       if (HONOR_SNANS (mode))
6229         return NULL_TREE;
6230       return omit_one_operand (type, integer_zero_node, arg0);
6231 
6232     case LE_EXPR:
6233       /* x <= +Inf is always true, if we don't case about NaNs.  */
6234       if (! HONOR_NANS (mode))
6235 	return omit_one_operand (type, integer_one_node, arg0);
6236 
6237       /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6238       if (lang_hooks.decls.global_bindings_p () == 0
6239 	  && ! CONTAINS_PLACEHOLDER_P (arg0))
6240 	{
6241 	  arg0 = save_expr (arg0);
6242 	  return fold_build2 (EQ_EXPR, type, arg0, arg0);
6243 	}
6244       break;
6245 
6246     case EQ_EXPR:
6247     case GE_EXPR:
6248       /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6249       real_maxval (&max, neg, mode);
6250       return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6251 			  arg0, build_real (TREE_TYPE (arg0), max));
6252 
6253     case LT_EXPR:
6254       /* x < +Inf is always equal to x <= DBL_MAX.  */
6255       real_maxval (&max, neg, mode);
6256       return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6257 			  arg0, build_real (TREE_TYPE (arg0), max));
6258 
6259     case NE_EXPR:
6260       /* x != +Inf is always equal to !(x > DBL_MAX).  */
6261       real_maxval (&max, neg, mode);
6262       if (! HONOR_NANS (mode))
6263 	return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6264 			    arg0, build_real (TREE_TYPE (arg0), max));
6265 
6266       temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6267 			  arg0, build_real (TREE_TYPE (arg0), max));
6268       return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6269 
6270     default:
6271       break;
6272     }
6273 
6274   return NULL_TREE;
6275 }
6276 
6277 /* Subroutine of fold() that optimizes comparisons of a division by
6278    a nonzero integer constant against an integer constant, i.e.
6279    X/C1 op C2.
6280 
6281    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6282    GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6283    are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6284 
6285    The function returns the constant folded tree if a simplification
6286    can be made, and NULL_TREE otherwise.  */
6287 
6288 static tree
fold_div_compare(enum tree_code code,tree type,tree arg0,tree arg1)6289 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6290 {
6291   tree prod, tmp, hi, lo;
6292   tree arg00 = TREE_OPERAND (arg0, 0);
6293   tree arg01 = TREE_OPERAND (arg0, 1);
6294   unsigned HOST_WIDE_INT lpart;
6295   HOST_WIDE_INT hpart;
6296   bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6297   bool neg_overflow;
6298   int overflow;
6299 
6300   /* We have to do this the hard way to detect unsigned overflow.
6301      prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
6302   overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6303 				   TREE_INT_CST_HIGH (arg01),
6304 				   TREE_INT_CST_LOW (arg1),
6305 				   TREE_INT_CST_HIGH (arg1),
6306 				   &lpart, &hpart, unsigned_p);
6307   prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6308   prod = force_fit_type (prod, -1, overflow, false);
6309   neg_overflow = false;
6310 
6311   if (unsigned_p)
6312     {
6313       tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6314       lo = prod;
6315 
6316       /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
6317       overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6318 				       TREE_INT_CST_HIGH (prod),
6319 				       TREE_INT_CST_LOW (tmp),
6320 				       TREE_INT_CST_HIGH (tmp),
6321 				       &lpart, &hpart, unsigned_p);
6322       hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6323       hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6324 			   TREE_CONSTANT_OVERFLOW (prod));
6325     }
6326   else if (tree_int_cst_sgn (arg01) >= 0)
6327     {
6328       tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6329       switch (tree_int_cst_sgn (arg1))
6330 	{
6331 	case -1:
6332 	  neg_overflow = true;
6333 	  lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6334 	  hi = prod;
6335 	  break;
6336 
6337 	case  0:
6338 	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6339 	  hi = tmp;
6340 	  break;
6341 
6342 	case  1:
6343           hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6344 	  lo = prod;
6345 	  break;
6346 
6347 	default:
6348 	  gcc_unreachable ();
6349 	}
6350     }
6351   else
6352     {
6353       /* A negative divisor reverses the relational operators.  */
6354       code = swap_tree_comparison (code);
6355 
6356       tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6357       switch (tree_int_cst_sgn (arg1))
6358 	{
6359 	case -1:
6360 	  hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6361 	  lo = prod;
6362 	  break;
6363 
6364 	case  0:
6365 	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6366 	  lo = tmp;
6367 	  break;
6368 
6369 	case  1:
6370 	  neg_overflow = true;
6371 	  lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6372 	  hi = prod;
6373 	  break;
6374 
6375 	default:
6376 	  gcc_unreachable ();
6377 	}
6378     }
6379 
6380   switch (code)
6381     {
6382     case EQ_EXPR:
6383       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6384 	return omit_one_operand (type, integer_zero_node, arg00);
6385       if (TREE_OVERFLOW (hi))
6386 	return fold_build2 (GE_EXPR, type, arg00, lo);
6387       if (TREE_OVERFLOW (lo))
6388 	return fold_build2 (LE_EXPR, type, arg00, hi);
6389       return build_range_check (type, arg00, 1, lo, hi);
6390 
6391     case NE_EXPR:
6392       if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6393 	return omit_one_operand (type, integer_one_node, arg00);
6394       if (TREE_OVERFLOW (hi))
6395 	return fold_build2 (LT_EXPR, type, arg00, lo);
6396       if (TREE_OVERFLOW (lo))
6397 	return fold_build2 (GT_EXPR, type, arg00, hi);
6398       return build_range_check (type, arg00, 0, lo, hi);
6399 
6400     case LT_EXPR:
6401       if (TREE_OVERFLOW (lo))
6402 	{
6403 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6404 	  return omit_one_operand (type, tmp, arg00);
6405 	}
6406       return fold_build2 (LT_EXPR, type, arg00, lo);
6407 
6408     case LE_EXPR:
6409       if (TREE_OVERFLOW (hi))
6410 	{
6411 	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6412 	  return omit_one_operand (type, tmp, arg00);
6413 	}
6414       return fold_build2 (LE_EXPR, type, arg00, hi);
6415 
6416     case GT_EXPR:
6417       if (TREE_OVERFLOW (hi))
6418 	{
6419 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6420 	  return omit_one_operand (type, tmp, arg00);
6421 	}
6422       return fold_build2 (GT_EXPR, type, arg00, hi);
6423 
6424     case GE_EXPR:
6425       if (TREE_OVERFLOW (lo))
6426 	{
6427 	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6428 	  return omit_one_operand (type, tmp, arg00);
6429 	}
6430       return fold_build2 (GE_EXPR, type, arg00, lo);
6431 
6432     default:
6433       break;
6434     }
6435 
6436   return NULL_TREE;
6437 }
6438 
6439 
6440 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6441    equality/inequality test, then return a simplified form of the test
6442    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6443    result type.  */
6444 
6445 static tree
fold_single_bit_test_into_sign_test(enum tree_code code,tree arg0,tree arg1,tree result_type)6446 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6447 				     tree result_type)
6448 {
6449   /* If this is testing a single bit, we can optimize the test.  */
6450   if ((code == NE_EXPR || code == EQ_EXPR)
6451       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6452       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6453     {
6454       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6455 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6456       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6457 
6458       if (arg00 != NULL_TREE
6459 	  /* This is only a win if casting to a signed type is cheap,
6460 	     i.e. when arg00's type is not a partial mode.  */
6461 	  && TYPE_PRECISION (TREE_TYPE (arg00))
6462 	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6463 	{
6464 	  tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6465 	  return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6466 			      result_type, fold_convert (stype, arg00),
6467 			      build_int_cst (stype, 0));
6468 	}
6469     }
6470 
6471   return NULL_TREE;
6472 }
6473 
6474 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6475    equality/inequality test, then return a simplified form of
6476    the test using shifts and logical operations.  Otherwise return
6477    NULL.  TYPE is the desired result type.  */
6478 
6479 tree
fold_single_bit_test(enum tree_code code,tree arg0,tree arg1,tree result_type)6480 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6481 		      tree result_type)
6482 {
6483   /* If this is testing a single bit, we can optimize the test.  */
6484   if ((code == NE_EXPR || code == EQ_EXPR)
6485       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6486       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6487     {
6488       tree inner = TREE_OPERAND (arg0, 0);
6489       tree type = TREE_TYPE (arg0);
6490       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6491       enum machine_mode operand_mode = TYPE_MODE (type);
6492       int ops_unsigned;
6493       tree signed_type, unsigned_type, intermediate_type;
6494       tree tem;
6495 
6496       /* First, see if we can fold the single bit test into a sign-bit
6497 	 test.  */
6498       tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6499 						 result_type);
6500       if (tem)
6501 	return tem;
6502 
6503       /* Otherwise we have (A & C) != 0 where C is a single bit,
6504 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6505 	 Similarly for (A & C) == 0.  */
6506 
6507       /* If INNER is a right shift of a constant and it plus BITNUM does
6508 	 not overflow, adjust BITNUM and INNER.  */
6509       if (TREE_CODE (inner) == RSHIFT_EXPR
6510 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6511 	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6512 	  && bitnum < TYPE_PRECISION (type)
6513 	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6514 				   bitnum - TYPE_PRECISION (type)))
6515 	{
6516 	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6517 	  inner = TREE_OPERAND (inner, 0);
6518 	}
6519 
6520       /* If we are going to be able to omit the AND below, we must do our
6521 	 operations as unsigned.  If we must use the AND, we have a choice.
6522 	 Normally unsigned is faster, but for some machines signed is.  */
6523 #ifdef LOAD_EXTEND_OP
6524       ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6525 		      && !flag_syntax_only) ? 0 : 1;
6526 #else
6527       ops_unsigned = 1;
6528 #endif
6529 
6530       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6531       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6532       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6533       inner = fold_convert (intermediate_type, inner);
6534 
6535       if (bitnum != 0)
6536 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6537 			inner, size_int (bitnum));
6538 
6539       if (code == EQ_EXPR)
6540 	inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6541 			     inner, integer_one_node);
6542 
6543       /* Put the AND last so it can combine with more things.  */
6544       inner = build2 (BIT_AND_EXPR, intermediate_type,
6545 		      inner, integer_one_node);
6546 
6547       /* Make sure to return the proper type.  */
6548       inner = fold_convert (result_type, inner);
6549 
6550       return inner;
6551     }
6552   return NULL_TREE;
6553 }
6554 
6555 /* Check whether we are allowed to reorder operands arg0 and arg1,
6556    such that the evaluation of arg1 occurs before arg0.  */
6557 
6558 static bool
reorder_operands_p(tree arg0,tree arg1)6559 reorder_operands_p (tree arg0, tree arg1)
6560 {
6561   if (! flag_evaluation_order)
6562       return true;
6563   if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6564     return true;
6565   return ! TREE_SIDE_EFFECTS (arg0)
6566 	 && ! TREE_SIDE_EFFECTS (arg1);
6567 }
6568 
6569 /* Test whether it is preferable two swap two operands, ARG0 and
6570    ARG1, for example because ARG0 is an integer constant and ARG1
6571    isn't.  If REORDER is true, only recommend swapping if we can
6572    evaluate the operands in reverse order.  */
6573 
6574 bool
tree_swap_operands_p(tree arg0,tree arg1,bool reorder)6575 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6576 {
6577   STRIP_SIGN_NOPS (arg0);
6578   STRIP_SIGN_NOPS (arg1);
6579 
6580   if (TREE_CODE (arg1) == INTEGER_CST)
6581     return 0;
6582   if (TREE_CODE (arg0) == INTEGER_CST)
6583     return 1;
6584 
6585   if (TREE_CODE (arg1) == REAL_CST)
6586     return 0;
6587   if (TREE_CODE (arg0) == REAL_CST)
6588     return 1;
6589 
6590   if (TREE_CODE (arg1) == COMPLEX_CST)
6591     return 0;
6592   if (TREE_CODE (arg0) == COMPLEX_CST)
6593     return 1;
6594 
6595   if (TREE_CONSTANT (arg1))
6596     return 0;
6597   if (TREE_CONSTANT (arg0))
6598     return 1;
6599 
6600   if (optimize_size)
6601     return 0;
6602 
6603   if (reorder && flag_evaluation_order
6604       && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6605     return 0;
6606 
6607   if (DECL_P (arg1))
6608     return 0;
6609   if (DECL_P (arg0))
6610     return 1;
6611 
6612   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6613      for commutative and comparison operators.  Ensuring a canonical
6614      form allows the optimizers to find additional redundancies without
6615      having to explicitly check for both orderings.  */
6616   if (TREE_CODE (arg0) == SSA_NAME
6617       && TREE_CODE (arg1) == SSA_NAME
6618       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6619     return 1;
6620 
6621   return 0;
6622 }
6623 
6624 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6625    ARG0 is extended to a wider type.  */
6626 
6627 static tree
fold_widened_comparison(enum tree_code code,tree type,tree arg0,tree arg1)6628 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6629 {
6630   tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6631   tree arg1_unw;
6632   tree shorter_type, outer_type;
6633   tree min, max;
6634   bool above, below;
6635 
6636   if (arg0_unw == arg0)
6637     return NULL_TREE;
6638   shorter_type = TREE_TYPE (arg0_unw);
6639 
6640 #ifdef HAVE_canonicalize_funcptr_for_compare
6641   /* Disable this optimization if we're casting a function pointer
6642      type on targets that require function pointer canonicalization.  */
6643   if (HAVE_canonicalize_funcptr_for_compare
6644       && TREE_CODE (shorter_type) == POINTER_TYPE
6645       && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6646     return NULL_TREE;
6647 #endif
6648 
6649   if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6650     return NULL_TREE;
6651 
6652   arg1_unw = get_unwidened (arg1, shorter_type);
6653 
6654   /* If possible, express the comparison in the shorter mode.  */
6655   if ((code == EQ_EXPR || code == NE_EXPR
6656        || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6657       && (TREE_TYPE (arg1_unw) == shorter_type
6658 	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6659 	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6660 		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6661 	      && int_fits_type_p (arg1_unw, shorter_type))))
6662     return fold_build2 (code, type, arg0_unw,
6663 		       fold_convert (shorter_type, arg1_unw));
6664 
6665   if (TREE_CODE (arg1_unw) != INTEGER_CST
6666       || TREE_CODE (shorter_type) != INTEGER_TYPE
6667       || !int_fits_type_p (arg1_unw, shorter_type))
6668     return NULL_TREE;
6669 
6670   /* If we are comparing with the integer that does not fit into the range
6671      of the shorter type, the result is known.  */
6672   outer_type = TREE_TYPE (arg1_unw);
6673   min = lower_bound_in_type (outer_type, shorter_type);
6674   max = upper_bound_in_type (outer_type, shorter_type);
6675 
6676   above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6677 						   max, arg1_unw));
6678   below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6679 						   arg1_unw, min));
6680 
6681   switch (code)
6682     {
6683     case EQ_EXPR:
6684       if (above || below)
6685 	return omit_one_operand (type, integer_zero_node, arg0);
6686       break;
6687 
6688     case NE_EXPR:
6689       if (above || below)
6690 	return omit_one_operand (type, integer_one_node, arg0);
6691       break;
6692 
6693     case LT_EXPR:
6694     case LE_EXPR:
6695       if (above)
6696 	return omit_one_operand (type, integer_one_node, arg0);
6697       else if (below)
6698 	return omit_one_operand (type, integer_zero_node, arg0);
6699 
6700     case GT_EXPR:
6701     case GE_EXPR:
6702       if (above)
6703 	return omit_one_operand (type, integer_zero_node, arg0);
6704       else if (below)
6705 	return omit_one_operand (type, integer_one_node, arg0);
6706 
6707     default:
6708       break;
6709     }
6710 
6711   return NULL_TREE;
6712 }
6713 
6714 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6715    ARG0 just the signedness is changed.  */
6716 
6717 static tree
fold_sign_changed_comparison(enum tree_code code,tree type,tree arg0,tree arg1)6718 fold_sign_changed_comparison (enum tree_code code, tree type,
6719 			      tree arg0, tree arg1)
6720 {
6721   tree arg0_inner, tmp;
6722   tree inner_type, outer_type;
6723 
6724   if (TREE_CODE (arg0) != NOP_EXPR
6725       && TREE_CODE (arg0) != CONVERT_EXPR)
6726     return NULL_TREE;
6727 
6728   outer_type = TREE_TYPE (arg0);
6729   arg0_inner = TREE_OPERAND (arg0, 0);
6730   inner_type = TREE_TYPE (arg0_inner);
6731 
6732 #ifdef HAVE_canonicalize_funcptr_for_compare
6733   /* Disable this optimization if we're casting a function pointer
6734      type on targets that require function pointer canonicalization.  */
6735   if (HAVE_canonicalize_funcptr_for_compare
6736       && TREE_CODE (inner_type) == POINTER_TYPE
6737       && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6738     return NULL_TREE;
6739 #endif
6740 
6741   if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6742     return NULL_TREE;
6743 
6744   if (TREE_CODE (arg1) != INTEGER_CST
6745       && !((TREE_CODE (arg1) == NOP_EXPR
6746 	    || TREE_CODE (arg1) == CONVERT_EXPR)
6747 	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6748     return NULL_TREE;
6749 
6750   if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6751       && code != NE_EXPR
6752       && code != EQ_EXPR)
6753     return NULL_TREE;
6754 
6755   if (TREE_CODE (arg1) == INTEGER_CST)
6756     {
6757       tmp = build_int_cst_wide (inner_type,
6758 				TREE_INT_CST_LOW (arg1),
6759 				TREE_INT_CST_HIGH (arg1));
6760       arg1 = force_fit_type (tmp, 0,
6761 			     TREE_OVERFLOW (arg1),
6762 			     TREE_CONSTANT_OVERFLOW (arg1));
6763     }
6764   else
6765     arg1 = fold_convert (inner_type, arg1);
6766 
6767   return fold_build2 (code, type, arg0_inner, arg1);
6768 }
6769 
6770 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6771    step of the array.  Reconstructs s and delta in the case of s * delta
6772    being an integer constant (and thus already folded).
6773    ADDR is the address. MULT is the multiplicative expression.
6774    If the function succeeds, the new address expression is returned.  Otherwise
6775    NULL_TREE is returned.  */
6776 
6777 static tree
try_move_mult_to_index(enum tree_code code,tree addr,tree op1)6778 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6779 {
6780   tree s, delta, step;
6781   tree ref = TREE_OPERAND (addr, 0), pref;
6782   tree ret, pos;
6783   tree itype;
6784 
6785   /* Canonicalize op1 into a possibly non-constant delta
6786      and an INTEGER_CST s.  */
6787   if (TREE_CODE (op1) == MULT_EXPR)
6788     {
6789       tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6790 
6791       STRIP_NOPS (arg0);
6792       STRIP_NOPS (arg1);
6793 
6794       if (TREE_CODE (arg0) == INTEGER_CST)
6795         {
6796           s = arg0;
6797           delta = arg1;
6798         }
6799       else if (TREE_CODE (arg1) == INTEGER_CST)
6800         {
6801           s = arg1;
6802           delta = arg0;
6803         }
6804       else
6805         return NULL_TREE;
6806     }
6807   else if (TREE_CODE (op1) == INTEGER_CST)
6808     {
6809       delta = op1;
6810       s = NULL_TREE;
6811     }
6812   else
6813     {
6814       /* Simulate we are delta * 1.  */
6815       delta = op1;
6816       s = integer_one_node;
6817     }
6818 
6819   for (;; ref = TREE_OPERAND (ref, 0))
6820     {
6821       if (TREE_CODE (ref) == ARRAY_REF)
6822 	{
6823 	  itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6824 	  if (! itype)
6825 	    continue;
6826 
6827 	  step = array_ref_element_size (ref);
6828 	  if (TREE_CODE (step) != INTEGER_CST)
6829 	    continue;
6830 
6831 	  if (s)
6832 	    {
6833 	      if (! tree_int_cst_equal (step, s))
6834                 continue;
6835 	    }
6836 	  else
6837 	    {
6838 	      /* Try if delta is a multiple of step.  */
6839 	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6840 	      if (! tmp)
6841 		continue;
6842 	      delta = tmp;
6843 	    }
6844 
6845 	  break;
6846 	}
6847 
6848       if (!handled_component_p (ref))
6849 	return NULL_TREE;
6850     }
6851 
6852   /* We found the suitable array reference.  So copy everything up to it,
6853      and replace the index.  */
6854 
6855   pref = TREE_OPERAND (addr, 0);
6856   ret = copy_node (pref);
6857   pos = ret;
6858 
6859   while (pref != ref)
6860     {
6861       pref = TREE_OPERAND (pref, 0);
6862       TREE_OPERAND (pos, 0) = copy_node (pref);
6863       pos = TREE_OPERAND (pos, 0);
6864     }
6865 
6866   TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6867 				       fold_convert (itype,
6868 						     TREE_OPERAND (pos, 1)),
6869 				       fold_convert (itype, delta));
6870 
6871   return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6872 }
6873 
6874 
6875 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6876    means A >= Y && A != MAX, but in this case we know that
6877    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6878 
6879 static tree
fold_to_nonsharp_ineq_using_bound(tree ineq,tree bound)6880 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6881 {
6882   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6883 
6884   if (TREE_CODE (bound) == LT_EXPR)
6885     a = TREE_OPERAND (bound, 0);
6886   else if (TREE_CODE (bound) == GT_EXPR)
6887     a = TREE_OPERAND (bound, 1);
6888   else
6889     return NULL_TREE;
6890 
6891   typea = TREE_TYPE (a);
6892   if (!INTEGRAL_TYPE_P (typea)
6893       && !POINTER_TYPE_P (typea))
6894     return NULL_TREE;
6895 
6896   if (TREE_CODE (ineq) == LT_EXPR)
6897     {
6898       a1 = TREE_OPERAND (ineq, 1);
6899       y = TREE_OPERAND (ineq, 0);
6900     }
6901   else if (TREE_CODE (ineq) == GT_EXPR)
6902     {
6903       a1 = TREE_OPERAND (ineq, 0);
6904       y = TREE_OPERAND (ineq, 1);
6905     }
6906   else
6907     return NULL_TREE;
6908 
6909   if (TREE_TYPE (a1) != typea)
6910     return NULL_TREE;
6911 
6912   diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6913   if (!integer_onep (diff))
6914     return NULL_TREE;
6915 
6916   return fold_build2 (GE_EXPR, type, a, y);
6917 }
6918 
6919 /* Fold a sum or difference of at least one multiplication.
6920    Returns the folded tree or NULL if no simplification could be made.  */
6921 
6922 static tree
fold_plusminus_mult_expr(enum tree_code code,tree type,tree arg0,tree arg1)6923 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6924 {
6925   tree arg00, arg01, arg10, arg11;
6926   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6927 
6928   /* (A * C) +- (B * C) -> (A+-B) * C.
6929      (A * C) +- A -> A * (C+-1).
6930      We are most concerned about the case where C is a constant,
6931      but other combinations show up during loop reduction.  Since
6932      it is not difficult, try all four possibilities.  */
6933 
6934   if (TREE_CODE (arg0) == MULT_EXPR)
6935     {
6936       arg00 = TREE_OPERAND (arg0, 0);
6937       arg01 = TREE_OPERAND (arg0, 1);
6938     }
6939   else
6940     {
6941       arg00 = arg0;
6942       arg01 = build_one_cst (type);
6943     }
6944   if (TREE_CODE (arg1) == MULT_EXPR)
6945     {
6946       arg10 = TREE_OPERAND (arg1, 0);
6947       arg11 = TREE_OPERAND (arg1, 1);
6948     }
6949   else
6950     {
6951       arg10 = arg1;
6952       arg11 = build_one_cst (type);
6953     }
6954   same = NULL_TREE;
6955 
6956   if (operand_equal_p (arg01, arg11, 0))
6957     same = arg01, alt0 = arg00, alt1 = arg10;
6958   else if (operand_equal_p (arg00, arg10, 0))
6959     same = arg00, alt0 = arg01, alt1 = arg11;
6960   else if (operand_equal_p (arg00, arg11, 0))
6961     same = arg00, alt0 = arg01, alt1 = arg10;
6962   else if (operand_equal_p (arg01, arg10, 0))
6963     same = arg01, alt0 = arg00, alt1 = arg11;
6964 
6965   /* No identical multiplicands; see if we can find a common
6966      power-of-two factor in non-power-of-two multiplies.  This
6967      can help in multi-dimensional array access.  */
6968   else if (host_integerp (arg01, 0)
6969 	   && host_integerp (arg11, 0))
6970     {
6971       HOST_WIDE_INT int01, int11, tmp;
6972       bool swap = false;
6973       tree maybe_same;
6974       int01 = TREE_INT_CST_LOW (arg01);
6975       int11 = TREE_INT_CST_LOW (arg11);
6976 
6977       /* Move min of absolute values to int11.  */
6978       if ((int01 >= 0 ? int01 : -int01)
6979 	  < (int11 >= 0 ? int11 : -int11))
6980         {
6981 	  tmp = int01, int01 = int11, int11 = tmp;
6982 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
6983 	  maybe_same = arg01;
6984 	  swap = true;
6985 	}
6986       else
6987 	maybe_same = arg11;
6988 
6989       if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6990         {
6991 	  alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6992 			      build_int_cst (TREE_TYPE (arg00),
6993 					     int01 / int11));
6994 	  alt1 = arg10;
6995 	  same = maybe_same;
6996 	  if (swap)
6997 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6998 	}
6999     }
7000 
7001   if (same)
7002     return fold_build2 (MULT_EXPR, type,
7003 			fold_build2 (code, type,
7004 				     fold_convert (type, alt0),
7005 				     fold_convert (type, alt1)),
7006 			fold_convert (type, same));
7007 
7008   return NULL_TREE;
7009 }
7010 
7011 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7012    specified by EXPR into the buffer PTR of length LEN bytes.
7013    Return the number of bytes placed in the buffer, or zero
7014    upon failure.  */
7015 
7016 static int
native_encode_int(tree expr,unsigned char * ptr,int len)7017 native_encode_int (tree expr, unsigned char *ptr, int len)
7018 {
7019   tree type = TREE_TYPE (expr);
7020   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7021   int byte, offset, word, words;
7022   unsigned char value;
7023 
7024   if (total_bytes > len)
7025     return 0;
7026   words = total_bytes / UNITS_PER_WORD;
7027 
7028   for (byte = 0; byte < total_bytes; byte++)
7029     {
7030       int bitpos = byte * BITS_PER_UNIT;
7031       if (bitpos < HOST_BITS_PER_WIDE_INT)
7032 	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7033       else
7034 	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7035 				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7036 
7037       if (total_bytes > UNITS_PER_WORD)
7038 	{
7039 	  word = byte / UNITS_PER_WORD;
7040 	  if (WORDS_BIG_ENDIAN)
7041 	    word = (words - 1) - word;
7042 	  offset = word * UNITS_PER_WORD;
7043 	  if (BYTES_BIG_ENDIAN)
7044 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7045 	  else
7046 	    offset += byte % UNITS_PER_WORD;
7047 	}
7048       else
7049 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7050       ptr[offset] = value;
7051     }
7052   return total_bytes;
7053 }
7054 
7055 
7056 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7057    specified by EXPR into the buffer PTR of length LEN bytes.
7058    Return the number of bytes placed in the buffer, or zero
7059    upon failure.  */
7060 
7061 static int
native_encode_real(tree expr,unsigned char * ptr,int len)7062 native_encode_real (tree expr, unsigned char *ptr, int len)
7063 {
7064   tree type = TREE_TYPE (expr);
7065   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7066   int byte, offset, word, words, bitpos;
7067   unsigned char value;
7068 
7069   /* There are always 32 bits in each long, no matter the size of
7070      the hosts long.  We handle floating point representations with
7071      up to 192 bits.  */
7072   long tmp[6];
7073 
7074   if (total_bytes > len)
7075     return 0;
7076   words = 32 / UNITS_PER_WORD;
7077 
7078   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7079 
7080   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7081        bitpos += BITS_PER_UNIT)
7082     {
7083       byte = (bitpos / BITS_PER_UNIT) & 3;
7084       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7085 
7086       if (UNITS_PER_WORD < 4)
7087 	{
7088 	  word = byte / UNITS_PER_WORD;
7089 	  if (WORDS_BIG_ENDIAN)
7090 	    word = (words - 1) - word;
7091 	  offset = word * UNITS_PER_WORD;
7092 	  if (BYTES_BIG_ENDIAN)
7093 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7094 	  else
7095 	    offset += byte % UNITS_PER_WORD;
7096 	}
7097       else
7098 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7099       ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7100     }
7101   return total_bytes;
7102 }
7103 
7104 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7105    specified by EXPR into the buffer PTR of length LEN bytes.
7106    Return the number of bytes placed in the buffer, or zero
7107    upon failure.  */
7108 
7109 static int
native_encode_complex(tree expr,unsigned char * ptr,int len)7110 native_encode_complex (tree expr, unsigned char *ptr, int len)
7111 {
7112   int rsize, isize;
7113   tree part;
7114 
7115   part = TREE_REALPART (expr);
7116   rsize = native_encode_expr (part, ptr, len);
7117   if (rsize == 0)
7118     return 0;
7119   part = TREE_IMAGPART (expr);
7120   isize = native_encode_expr (part, ptr+rsize, len-rsize);
7121   if (isize != rsize)
7122     return 0;
7123   return rsize + isize;
7124 }
7125 
7126 
7127 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7128    specified by EXPR into the buffer PTR of length LEN bytes.
7129    Return the number of bytes placed in the buffer, or zero
7130    upon failure.  */
7131 
7132 static int
native_encode_vector(tree expr,unsigned char * ptr,int len)7133 native_encode_vector (tree expr, unsigned char *ptr, int len)
7134 {
7135   int i, size, offset, count;
7136   tree itype, elem, elements;
7137 
7138   offset = 0;
7139   elements = TREE_VECTOR_CST_ELTS (expr);
7140   count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7141   itype = TREE_TYPE (TREE_TYPE (expr));
7142   size = GET_MODE_SIZE (TYPE_MODE (itype));
7143   for (i = 0; i < count; i++)
7144     {
7145       if (elements)
7146 	{
7147 	  elem = TREE_VALUE (elements);
7148 	  elements = TREE_CHAIN (elements);
7149 	}
7150       else
7151 	elem = NULL_TREE;
7152 
7153       if (elem)
7154 	{
7155 	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7156 	    return 0;
7157 	}
7158       else
7159 	{
7160 	  if (offset + size > len)
7161 	    return 0;
7162 	  memset (ptr+offset, 0, size);
7163 	}
7164       offset += size;
7165     }
7166   return offset;
7167 }
7168 
7169 
7170 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7171    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7172    buffer PTR of length LEN bytes.  Return the number of bytes
7173    placed in the buffer, or zero upon failure.  */
7174 
7175 static int
native_encode_expr(tree expr,unsigned char * ptr,int len)7176 native_encode_expr (tree expr, unsigned char *ptr, int len)
7177 {
7178   switch (TREE_CODE (expr))
7179     {
7180     case INTEGER_CST:
7181       return native_encode_int (expr, ptr, len);
7182 
7183     case REAL_CST:
7184       return native_encode_real (expr, ptr, len);
7185 
7186     case COMPLEX_CST:
7187       return native_encode_complex (expr, ptr, len);
7188 
7189     case VECTOR_CST:
7190       return native_encode_vector (expr, ptr, len);
7191 
7192     default:
7193       return 0;
7194     }
7195 }
7196 
7197 
7198 /* Subroutine of native_interpret_expr.  Interpret the contents of
7199    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7200    If the buffer cannot be interpreted, return NULL_TREE.  */
7201 
7202 static tree
native_interpret_int(tree type,unsigned char * ptr,int len)7203 native_interpret_int (tree type, unsigned char *ptr, int len)
7204 {
7205   int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7206   int byte, offset, word, words;
7207   unsigned char value;
7208   unsigned int HOST_WIDE_INT lo = 0;
7209   HOST_WIDE_INT hi = 0;
7210 
7211   if (total_bytes > len)
7212     return NULL_TREE;
7213   if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7214     return NULL_TREE;
7215   words = total_bytes / UNITS_PER_WORD;
7216 
7217   for (byte = 0; byte < total_bytes; byte++)
7218     {
7219       int bitpos = byte * BITS_PER_UNIT;
7220       if (total_bytes > UNITS_PER_WORD)
7221 	{
7222 	  word = byte / UNITS_PER_WORD;
7223 	  if (WORDS_BIG_ENDIAN)
7224 	    word = (words - 1) - word;
7225 	  offset = word * UNITS_PER_WORD;
7226 	  if (BYTES_BIG_ENDIAN)
7227 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7228 	  else
7229 	    offset += byte % UNITS_PER_WORD;
7230 	}
7231       else
7232 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7233       value = ptr[offset];
7234 
7235       if (bitpos < HOST_BITS_PER_WIDE_INT)
7236 	lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7237       else
7238 	hi |= (unsigned HOST_WIDE_INT) value
7239 	      << (bitpos - HOST_BITS_PER_WIDE_INT);
7240     }
7241 
7242   return force_fit_type (build_int_cst_wide (type, lo, hi),
7243 			 0, false, false);
7244 }
7245 
7246 
7247 /* Subroutine of native_interpret_expr.  Interpret the contents of
7248    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7249    If the buffer cannot be interpreted, return NULL_TREE.  */
7250 
7251 static tree
native_interpret_real(tree type,unsigned char * ptr,int len)7252 native_interpret_real (tree type, unsigned char *ptr, int len)
7253 {
7254   enum machine_mode mode = TYPE_MODE (type);
7255   int total_bytes = GET_MODE_SIZE (mode);
7256   int byte, offset, word, words, bitpos;
7257   unsigned char value;
7258   /* There are always 32 bits in each long, no matter the size of
7259      the hosts long.  We handle floating point representations with
7260      up to 192 bits.  */
7261   REAL_VALUE_TYPE r;
7262   long tmp[6];
7263 
7264   total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7265   if (total_bytes > len || total_bytes > 24)
7266     return NULL_TREE;
7267   words = 32 / UNITS_PER_WORD;
7268 
7269   memset (tmp, 0, sizeof (tmp));
7270   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7271        bitpos += BITS_PER_UNIT)
7272     {
7273       byte = (bitpos / BITS_PER_UNIT) & 3;
7274       if (UNITS_PER_WORD < 4)
7275 	{
7276 	  word = byte / UNITS_PER_WORD;
7277 	  if (WORDS_BIG_ENDIAN)
7278 	    word = (words - 1) - word;
7279 	  offset = word * UNITS_PER_WORD;
7280 	  if (BYTES_BIG_ENDIAN)
7281 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7282 	  else
7283 	    offset += byte % UNITS_PER_WORD;
7284 	}
7285       else
7286 	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7287       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7288 
7289       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7290     }
7291 
7292   real_from_target (&r, tmp, mode);
7293   return build_real (type, r);
7294 }
7295 
7296 
7297 /* Subroutine of native_interpret_expr.  Interpret the contents of
7298    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7299    If the buffer cannot be interpreted, return NULL_TREE.  */
7300 
7301 static tree
native_interpret_complex(tree type,unsigned char * ptr,int len)7302 native_interpret_complex (tree type, unsigned char *ptr, int len)
7303 {
7304   tree etype, rpart, ipart;
7305   int size;
7306 
7307   etype = TREE_TYPE (type);
7308   size = GET_MODE_SIZE (TYPE_MODE (etype));
7309   if (size * 2 > len)
7310     return NULL_TREE;
7311   rpart = native_interpret_expr (etype, ptr, size);
7312   if (!rpart)
7313     return NULL_TREE;
7314   ipart = native_interpret_expr (etype, ptr+size, size);
7315   if (!ipart)
7316     return NULL_TREE;
7317   return build_complex (type, rpart, ipart);
7318 }
7319 
7320 
7321 /* Subroutine of native_interpret_expr.  Interpret the contents of
7322    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7323    If the buffer cannot be interpreted, return NULL_TREE.  */
7324 
7325 static tree
native_interpret_vector(tree type,unsigned char * ptr,int len)7326 native_interpret_vector (tree type, unsigned char *ptr, int len)
7327 {
7328   tree etype, elem, elements;
7329   int i, size, count;
7330 
7331   etype = TREE_TYPE (type);
7332   size = GET_MODE_SIZE (TYPE_MODE (etype));
7333   count = TYPE_VECTOR_SUBPARTS (type);
7334   if (size * count > len)
7335     return NULL_TREE;
7336 
7337   elements = NULL_TREE;
7338   for (i = count - 1; i >= 0; i--)
7339     {
7340       elem = native_interpret_expr (etype, ptr+(i*size), size);
7341       if (!elem)
7342 	return NULL_TREE;
7343       elements = tree_cons (NULL_TREE, elem, elements);
7344     }
7345   return build_vector (type, elements);
7346 }
7347 
7348 
7349 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7350    the buffer PTR of length LEN as a constant of type TYPE.  For
7351    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7352    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7353    return NULL_TREE.  */
7354 
7355 static tree
native_interpret_expr(tree type,unsigned char * ptr,int len)7356 native_interpret_expr (tree type, unsigned char *ptr, int len)
7357 {
7358   switch (TREE_CODE (type))
7359     {
7360     case INTEGER_TYPE:
7361     case ENUMERAL_TYPE:
7362     case BOOLEAN_TYPE:
7363       return native_interpret_int (type, ptr, len);
7364 
7365     case REAL_TYPE:
7366       return native_interpret_real (type, ptr, len);
7367 
7368     case COMPLEX_TYPE:
7369       return native_interpret_complex (type, ptr, len);
7370 
7371     case VECTOR_TYPE:
7372       return native_interpret_vector (type, ptr, len);
7373 
7374     default:
7375       return NULL_TREE;
7376     }
7377 }
7378 
7379 
7380 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7381    TYPE at compile-time.  If we're unable to perform the conversion
7382    return NULL_TREE.  */
7383 
7384 static tree
fold_view_convert_expr(tree type,tree expr)7385 fold_view_convert_expr (tree type, tree expr)
7386 {
7387   /* We support up to 512-bit values (for V8DFmode).  */
7388   unsigned char buffer[64];
7389   int len;
7390 
7391   /* Check that the host and target are sane.  */
7392   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7393     return NULL_TREE;
7394 
7395   len = native_encode_expr (expr, buffer, sizeof (buffer));
7396   if (len == 0)
7397     return NULL_TREE;
7398 
7399   return native_interpret_expr (type, buffer, len);
7400 }
7401 
7402 
7403 /* Fold a unary expression of code CODE and type TYPE with operand
7404    OP0.  Return the folded expression if folding is successful.
7405    Otherwise, return NULL_TREE.  */
7406 
7407 tree
fold_unary(enum tree_code code,tree type,tree op0)7408 fold_unary (enum tree_code code, tree type, tree op0)
7409 {
7410   tree tem;
7411   tree arg0;
7412   enum tree_code_class kind = TREE_CODE_CLASS (code);
7413 
7414   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7415 	      && TREE_CODE_LENGTH (code) == 1);
7416 
7417   arg0 = op0;
7418   if (arg0)
7419     {
7420       if (code == NOP_EXPR || code == CONVERT_EXPR
7421 	  || code == FLOAT_EXPR || code == ABS_EXPR)
7422 	{
7423 	  /* Don't use STRIP_NOPS, because signedness of argument type
7424 	     matters.  */
7425 	  STRIP_SIGN_NOPS (arg0);
7426 	}
7427       else
7428 	{
7429 	  /* Strip any conversions that don't change the mode.  This
7430 	     is safe for every expression, except for a comparison
7431 	     expression because its signedness is derived from its
7432 	     operands.
7433 
7434 	     Note that this is done as an internal manipulation within
7435 	     the constant folder, in order to find the simplest
7436 	     representation of the arguments so that their form can be
7437 	     studied.  In any cases, the appropriate type conversions
7438 	     should be put back in the tree that will get out of the
7439 	     constant folder.  */
7440 	  STRIP_NOPS (arg0);
7441 	}
7442     }
7443 
7444   if (TREE_CODE_CLASS (code) == tcc_unary)
7445     {
7446       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7447 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7448 		       fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7449       else if (TREE_CODE (arg0) == COND_EXPR)
7450 	{
7451 	  tree arg01 = TREE_OPERAND (arg0, 1);
7452 	  tree arg02 = TREE_OPERAND (arg0, 2);
7453 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7454 	    arg01 = fold_build1 (code, type, arg01);
7455 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7456 	    arg02 = fold_build1 (code, type, arg02);
7457 	  tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7458 			     arg01, arg02);
7459 
7460 	  /* If this was a conversion, and all we did was to move into
7461 	     inside the COND_EXPR, bring it back out.  But leave it if
7462 	     it is a conversion from integer to integer and the
7463 	     result precision is no wider than a word since such a
7464 	     conversion is cheap and may be optimized away by combine,
7465 	     while it couldn't if it were outside the COND_EXPR.  Then return
7466 	     so we don't get into an infinite recursion loop taking the
7467 	     conversion out and then back in.  */
7468 
7469 	  if ((code == NOP_EXPR || code == CONVERT_EXPR
7470 	       || code == NON_LVALUE_EXPR)
7471 	      && TREE_CODE (tem) == COND_EXPR
7472 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7473 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7474 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7475 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7476 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7477 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7478 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7479 		     && (INTEGRAL_TYPE_P
7480 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7481 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7482 		  || flag_syntax_only))
7483 	    tem = build1 (code, type,
7484 			  build3 (COND_EXPR,
7485 				  TREE_TYPE (TREE_OPERAND
7486 					     (TREE_OPERAND (tem, 1), 0)),
7487 				  TREE_OPERAND (tem, 0),
7488 				  TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7489 				  TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7490 	  return tem;
7491 	}
7492       else if (COMPARISON_CLASS_P (arg0))
7493 	{
7494 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7495 	    {
7496 	      arg0 = copy_node (arg0);
7497 	      TREE_TYPE (arg0) = type;
7498 	      return arg0;
7499 	    }
7500 	  else if (TREE_CODE (type) != INTEGER_TYPE)
7501 	    return fold_build3 (COND_EXPR, type, arg0,
7502 				fold_build1 (code, type,
7503 					     integer_one_node),
7504 				fold_build1 (code, type,
7505 					     integer_zero_node));
7506 	}
7507    }
7508 
7509   switch (code)
7510     {
7511     case NOP_EXPR:
7512     case FLOAT_EXPR:
7513     case CONVERT_EXPR:
7514     case FIX_TRUNC_EXPR:
7515     case FIX_CEIL_EXPR:
7516     case FIX_FLOOR_EXPR:
7517     case FIX_ROUND_EXPR:
7518       if (TREE_TYPE (op0) == type)
7519 	return op0;
7520 
7521       /* If we have (type) (a CMP b) and type is an integral type, return
7522          new expression involving the new type.  */
7523       if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7524 	return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7525 			    TREE_OPERAND (op0, 1));
7526 
7527       /* Handle cases of two conversions in a row.  */
7528       if (TREE_CODE (op0) == NOP_EXPR
7529 	  || TREE_CODE (op0) == CONVERT_EXPR)
7530 	{
7531 	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7532 	  tree inter_type = TREE_TYPE (op0);
7533 	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7534 	  int inside_ptr = POINTER_TYPE_P (inside_type);
7535 	  int inside_float = FLOAT_TYPE_P (inside_type);
7536 	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7537 	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7538 	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7539 	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7540 	  int inter_ptr = POINTER_TYPE_P (inter_type);
7541 	  int inter_float = FLOAT_TYPE_P (inter_type);
7542 	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7543 	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7544 	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7545 	  int final_int = INTEGRAL_TYPE_P (type);
7546 	  int final_ptr = POINTER_TYPE_P (type);
7547 	  int final_float = FLOAT_TYPE_P (type);
7548 	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7549 	  unsigned int final_prec = TYPE_PRECISION (type);
7550 	  int final_unsignedp = TYPE_UNSIGNED (type);
7551 
7552 	  /* In addition to the cases of two conversions in a row
7553 	     handled below, if we are converting something to its own
7554 	     type via an object of identical or wider precision, neither
7555 	     conversion is needed.  */
7556 	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7557 	      && (((inter_int || inter_ptr) && final_int)
7558 		  || (inter_float && final_float))
7559 	      && inter_prec >= final_prec)
7560 	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7561 
7562 	  /* Likewise, if the intermediate and final types are either both
7563 	     float or both integer, we don't need the middle conversion if
7564 	     it is wider than the final type and doesn't change the signedness
7565 	     (for integers).  Avoid this if the final type is a pointer
7566 	     since then we sometimes need the inner conversion.  Likewise if
7567 	     the outer has a precision not equal to the size of its mode.  */
7568 	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7569 	       || (inter_float && inside_float)
7570 	       || (inter_vec && inside_vec))
7571 	      && inter_prec >= inside_prec
7572 	      && (inter_float || inter_vec
7573 		  || inter_unsignedp == inside_unsignedp)
7574 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7575 		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7576 	      && ! final_ptr
7577 	      && (! final_vec || inter_prec == inside_prec))
7578 	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7579 
7580 	  /* If we have a sign-extension of a zero-extended value, we can
7581 	     replace that by a single zero-extension.  */
7582 	  if (inside_int && inter_int && final_int
7583 	      && inside_prec < inter_prec && inter_prec < final_prec
7584 	      && inside_unsignedp && !inter_unsignedp)
7585 	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7586 
7587 	  /* Two conversions in a row are not needed unless:
7588 	     - some conversion is floating-point (overstrict for now), or
7589 	     - some conversion is a vector (overstrict for now), or
7590 	     - the intermediate type is narrower than both initial and
7591 	       final, or
7592 	     - the intermediate type and innermost type differ in signedness,
7593 	       and the outermost type is wider than the intermediate, or
7594 	     - the initial type is a pointer type and the precisions of the
7595 	       intermediate and final types differ, or
7596 	     - the final type is a pointer type and the precisions of the
7597 	       initial and intermediate types differ.
7598 	     - the final type is a pointer type and the initial type not
7599 	     - the initial type is a pointer to an array and the final type
7600 	       not.  */
7601 	  /* Java pointer type conversions generate checks in some
7602 	     cases, so we explicitly disallow this optimization.  */
7603 	  if (! inside_float && ! inter_float && ! final_float
7604 	      && ! inside_vec && ! inter_vec && ! final_vec
7605 	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7606 	      && ! (inside_int && inter_int
7607 		    && inter_unsignedp != inside_unsignedp
7608 		    && inter_prec < final_prec)
7609 	      && ((inter_unsignedp && inter_prec > inside_prec)
7610 		  == (final_unsignedp && final_prec > inter_prec))
7611 	      && ! (inside_ptr && inter_prec != final_prec)
7612 	      && ! (final_ptr && inside_prec != inter_prec)
7613 	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7614 		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7615 	      && final_ptr == inside_ptr
7616 	      && ! (inside_ptr
7617 		    && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7618 		    && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7619 	      && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7620 		    && final_ptr))
7621 	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7622 	}
7623 
7624       /* Handle (T *)&A.B.C for A being of type T and B and C
7625 	 living at offset zero.  This occurs frequently in
7626 	 C++ upcasting and then accessing the base.  */
7627       if (TREE_CODE (op0) == ADDR_EXPR
7628 	  && POINTER_TYPE_P (type)
7629 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7630         {
7631 	  HOST_WIDE_INT bitsize, bitpos;
7632 	  tree offset;
7633 	  enum machine_mode mode;
7634 	  int unsignedp, volatilep;
7635           tree base = TREE_OPERAND (op0, 0);
7636 	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7637 				      &mode, &unsignedp, &volatilep, false);
7638 	  /* If the reference was to a (constant) zero offset, we can use
7639 	     the address of the base if it has the same base type
7640 	     as the result type.  */
7641 	  if (! offset && bitpos == 0
7642 	      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7643 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7644 	    return fold_convert (type, build_fold_addr_expr (base));
7645         }
7646 
7647       if (TREE_CODE (op0) == MODIFY_EXPR
7648 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7649 	  /* Detect assigning a bitfield.  */
7650 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7651 	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7652 	{
7653 	  /* Don't leave an assignment inside a conversion
7654 	     unless assigning a bitfield.  */
7655 	  tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7656 	  /* First do the assignment, then return converted constant.  */
7657 	  tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7658 	  TREE_NO_WARNING (tem) = 1;
7659 	  TREE_USED (tem) = 1;
7660 	  return tem;
7661 	}
7662 
7663       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7664 	 constants (if x has signed type, the sign bit cannot be set
7665 	 in c).  This folds extension into the BIT_AND_EXPR.  */
7666       if (INTEGRAL_TYPE_P (type)
7667 	  && TREE_CODE (type) != BOOLEAN_TYPE
7668 	  && TREE_CODE (op0) == BIT_AND_EXPR
7669 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7670 	{
7671 	  tree and = op0;
7672 	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7673 	  int change = 0;
7674 
7675 	  if (TYPE_UNSIGNED (TREE_TYPE (and))
7676 	      || (TYPE_PRECISION (type)
7677 		  <= TYPE_PRECISION (TREE_TYPE (and))))
7678 	    change = 1;
7679 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7680 		   <= HOST_BITS_PER_WIDE_INT
7681 		   && host_integerp (and1, 1))
7682 	    {
7683 	      unsigned HOST_WIDE_INT cst;
7684 
7685 	      cst = tree_low_cst (and1, 1);
7686 	      cst &= (HOST_WIDE_INT) -1
7687 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7688 	      change = (cst == 0);
7689 #ifdef LOAD_EXTEND_OP
7690 	      if (change
7691 		  && !flag_syntax_only
7692 		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7693 		      == ZERO_EXTEND))
7694 		{
7695 		  tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7696 		  and0 = fold_convert (uns, and0);
7697 		  and1 = fold_convert (uns, and1);
7698 		}
7699 #endif
7700 	    }
7701 	  if (change)
7702 	    {
7703 	      tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7704 					TREE_INT_CST_HIGH (and1));
7705 	      tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7706 				    TREE_CONSTANT_OVERFLOW (and1));
7707 	      return fold_build2 (BIT_AND_EXPR, type,
7708 				  fold_convert (type, and0), tem);
7709 	    }
7710 	}
7711 
7712       /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7713 	 T2 being pointers to types of the same size.  */
7714       if (POINTER_TYPE_P (type)
7715 	  && BINARY_CLASS_P (arg0)
7716 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7717 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7718 	{
7719 	  tree arg00 = TREE_OPERAND (arg0, 0);
7720 	  tree t0 = type;
7721 	  tree t1 = TREE_TYPE (arg00);
7722 	  tree tt0 = TREE_TYPE (t0);
7723 	  tree tt1 = TREE_TYPE (t1);
7724 	  tree s0 = TYPE_SIZE (tt0);
7725 	  tree s1 = TYPE_SIZE (tt1);
7726 
7727 	  if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7728 	    return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7729 			   TREE_OPERAND (arg0, 1));
7730 	}
7731 
7732       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7733 	 of the same precision, and X is a integer type not narrower than
7734 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7735       if (INTEGRAL_TYPE_P (type)
7736 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7737 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7738 	  && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7739 	      || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7740 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7741 	{
7742 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7743 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7744 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7745 	    return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7746 	}
7747 
7748       tem = fold_convert_const (code, type, op0);
7749       return tem ? tem : NULL_TREE;
7750 
7751     case VIEW_CONVERT_EXPR:
7752       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7753 	return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7754       return fold_view_convert_expr (type, op0);
7755 
7756     case NEGATE_EXPR:
7757       tem = fold_negate_expr (arg0);
7758       if (tem)
7759 	return fold_convert (type, tem);
7760       return NULL_TREE;
7761 
7762     case ABS_EXPR:
7763       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7764 	return fold_abs_const (arg0, type);
7765       else if (TREE_CODE (arg0) == NEGATE_EXPR)
7766 	return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7767       /* Convert fabs((double)float) into (double)fabsf(float).  */
7768       else if (TREE_CODE (arg0) == NOP_EXPR
7769 	       && TREE_CODE (type) == REAL_TYPE)
7770 	{
7771 	  tree targ0 = strip_float_extensions (arg0);
7772 	  if (targ0 != arg0)
7773 	    return fold_convert (type, fold_build1 (ABS_EXPR,
7774 						    TREE_TYPE (targ0),
7775 						    targ0));
7776 	}
7777       /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
7778       else if (TREE_CODE (arg0) == ABS_EXPR)
7779 	return arg0;
7780       else if (tree_expr_nonnegative_p (arg0))
7781 	return arg0;
7782 
7783       /* Strip sign ops from argument.  */
7784       if (TREE_CODE (type) == REAL_TYPE)
7785 	{
7786 	  tem = fold_strip_sign_ops (arg0);
7787 	  if (tem)
7788 	    return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7789 	}
7790       return NULL_TREE;
7791 
7792     case CONJ_EXPR:
7793       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7794 	return fold_convert (type, arg0);
7795       if (TREE_CODE (arg0) == COMPLEX_EXPR)
7796 	{
7797 	  tree itype = TREE_TYPE (type);
7798 	  tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7799 	  tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7800 	  return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7801 	}
7802       if (TREE_CODE (arg0) == COMPLEX_CST)
7803 	{
7804 	  tree itype = TREE_TYPE (type);
7805 	  tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7806 	  tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7807 	  return build_complex (type, rpart, negate_expr (ipart));
7808 	}
7809       if (TREE_CODE (arg0) == CONJ_EXPR)
7810 	return fold_convert (type, TREE_OPERAND (arg0, 0));
7811       return NULL_TREE;
7812 
7813     case BIT_NOT_EXPR:
7814       if (TREE_CODE (arg0) == INTEGER_CST)
7815         return fold_not_const (arg0, type);
7816       else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7817 	return TREE_OPERAND (arg0, 0);
7818       /* Convert ~ (-A) to A - 1.  */
7819       else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7820 	return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7821 			    build_int_cst (type, 1));
7822       /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
7823       else if (INTEGRAL_TYPE_P (type)
7824 	       && ((TREE_CODE (arg0) == MINUS_EXPR
7825 		    && integer_onep (TREE_OPERAND (arg0, 1)))
7826 		   || (TREE_CODE (arg0) == PLUS_EXPR
7827 		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7828 	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7829       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
7830       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7831 	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7832 			       	     fold_convert (type,
7833 					     	   TREE_OPERAND (arg0, 0)))))
7834 	return fold_build2 (BIT_XOR_EXPR, type, tem,
7835 			    fold_convert (type, TREE_OPERAND (arg0, 1)));
7836       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7837 	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7838 			       	     fold_convert (type,
7839 					     	   TREE_OPERAND (arg0, 1)))))
7840 	return fold_build2 (BIT_XOR_EXPR, type,
7841 			    fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7842 
7843       return NULL_TREE;
7844 
7845     case TRUTH_NOT_EXPR:
7846       /* The argument to invert_truthvalue must have Boolean type.  */
7847       if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7848           arg0 = fold_convert (boolean_type_node, arg0);
7849 
7850       /* Note that the operand of this must be an int
7851 	 and its values must be 0 or 1.
7852 	 ("true" is a fixed value perhaps depending on the language,
7853 	 but we don't handle values other than 1 correctly yet.)  */
7854       tem = fold_truth_not_expr (arg0);
7855       if (!tem)
7856 	return NULL_TREE;
7857       return fold_convert (type, tem);
7858 
7859     case REALPART_EXPR:
7860       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7861 	return fold_convert (type, arg0);
7862       if (TREE_CODE (arg0) == COMPLEX_EXPR)
7863 	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7864 				 TREE_OPERAND (arg0, 1));
7865       if (TREE_CODE (arg0) == COMPLEX_CST)
7866 	return fold_convert (type, TREE_REALPART (arg0));
7867       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7868 	{
7869 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7870 	  tem = fold_build2 (TREE_CODE (arg0), itype,
7871 			     fold_build1 (REALPART_EXPR, itype,
7872 					  TREE_OPERAND (arg0, 0)),
7873 			     fold_build1 (REALPART_EXPR, itype,
7874 					  TREE_OPERAND (arg0, 1)));
7875 	  return fold_convert (type, tem);
7876 	}
7877       if (TREE_CODE (arg0) == CONJ_EXPR)
7878 	{
7879 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7880 	  tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7881 	  return fold_convert (type, tem);
7882 	}
7883       return NULL_TREE;
7884 
7885     case IMAGPART_EXPR:
7886       if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7887 	return fold_convert (type, integer_zero_node);
7888       if (TREE_CODE (arg0) == COMPLEX_EXPR)
7889 	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7890 				 TREE_OPERAND (arg0, 0));
7891       if (TREE_CODE (arg0) == COMPLEX_CST)
7892 	return fold_convert (type, TREE_IMAGPART (arg0));
7893       if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7894 	{
7895 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7896 	  tem = fold_build2 (TREE_CODE (arg0), itype,
7897 			     fold_build1 (IMAGPART_EXPR, itype,
7898 					  TREE_OPERAND (arg0, 0)),
7899 			     fold_build1 (IMAGPART_EXPR, itype,
7900 					  TREE_OPERAND (arg0, 1)));
7901 	  return fold_convert (type, tem);
7902 	}
7903       if (TREE_CODE (arg0) == CONJ_EXPR)
7904 	{
7905 	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7906 	  tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7907 	  return fold_convert (type, negate_expr (tem));
7908 	}
7909       return NULL_TREE;
7910 
7911     default:
7912       return NULL_TREE;
7913     } /* switch (code) */
7914 }
7915 
7916 /* Fold a binary expression of code CODE and type TYPE with operands
7917    OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7918    Return the folded expression if folding is successful.  Otherwise,
7919    return NULL_TREE.  */
7920 
7921 static tree
fold_minmax(enum tree_code code,tree type,tree op0,tree op1)7922 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7923 {
7924   enum tree_code compl_code;
7925 
7926   if (code == MIN_EXPR)
7927     compl_code = MAX_EXPR;
7928   else if (code == MAX_EXPR)
7929     compl_code = MIN_EXPR;
7930   else
7931     gcc_unreachable ();
7932 
7933   /* MIN (MAX (a, b), b) == b.  */
7934   if (TREE_CODE (op0) == compl_code
7935       && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7936     return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7937 
7938   /* MIN (MAX (b, a), b) == b.  */
7939   if (TREE_CODE (op0) == compl_code
7940       && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7941       && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7942     return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7943 
7944   /* MIN (a, MAX (a, b)) == a.  */
7945   if (TREE_CODE (op1) == compl_code
7946       && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7947       && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7948     return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7949 
7950   /* MIN (a, MAX (b, a)) == a.  */
7951   if (TREE_CODE (op1) == compl_code
7952       && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7953       && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7954     return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7955 
7956   return NULL_TREE;
7957 }
7958 
7959 /* Subroutine of fold_binary.  This routine performs all of the
7960    transformations that are common to the equality/inequality
7961    operators (EQ_EXPR and NE_EXPR) and the ordering operators
7962    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
7963    fold_binary should call fold_binary.  Fold a comparison with
7964    tree code CODE and type TYPE with operands OP0 and OP1.  Return
7965    the folded comparison or NULL_TREE.  */
7966 
7967 static tree
fold_comparison(enum tree_code code,tree type,tree op0,tree op1)7968 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7969 {
7970   tree arg0, arg1, tem;
7971 
7972   arg0 = op0;
7973   arg1 = op1;
7974 
7975   STRIP_SIGN_NOPS (arg0);
7976   STRIP_SIGN_NOPS (arg1);
7977 
7978   tem = fold_relational_const (code, type, arg0, arg1);
7979   if (tem != NULL_TREE)
7980     return tem;
7981 
7982   /* If one arg is a real or integer constant, put it last.  */
7983   if (tree_swap_operands_p (arg0, arg1, true))
7984     return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7985 
7986   /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
7987   if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7988       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7989 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7990 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7991       && (TREE_CODE (arg1) == INTEGER_CST
7992 	  && !TREE_OVERFLOW (arg1)))
7993     {
7994       tree const1 = TREE_OPERAND (arg0, 1);
7995       tree const2 = arg1;
7996       tree variable = TREE_OPERAND (arg0, 0);
7997       tree lhs;
7998       int lhs_add;
7999       lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8000 
8001       lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8002 			 TREE_TYPE (arg1), const2, const1);
8003       if (TREE_CODE (lhs) == TREE_CODE (arg1)
8004 	  && (TREE_CODE (lhs) != INTEGER_CST
8005 	      || !TREE_OVERFLOW (lhs)))
8006 	{
8007 	  fold_overflow_warning (("assuming signed overflow does not occur "
8008 				  "when changing X +- C1 cmp C2 to "
8009 				  "X cmp C1 +- C2"),
8010 				 WARN_STRICT_OVERFLOW_COMPARISON);
8011 	  return fold_build2 (code, type, variable, lhs);
8012 	}
8013     }
8014 
8015   /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8016      same object, then we can fold this to a comparison of the two offsets in
8017      signed size type.  This is possible because pointer arithmetic is
8018      restricted to retain within an object and overflow on pointer differences
8019      is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8020 
8021      We check flag_wrapv directly because pointers types are unsigned,
8022      and therefore TYPE_OVERFLOW_WRAPS returns true for them.  That is
8023      normally what we want to avoid certain odd overflow cases, but
8024      not here.  */
8025   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8026       && !flag_wrapv
8027       && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8028     {
8029       tree base0, offset0, base1, offset1;
8030 
8031       if (extract_array_ref (arg0, &base0, &offset0)
8032 	  && extract_array_ref (arg1, &base1, &offset1)
8033 	  && operand_equal_p (base0, base1, 0))
8034         {
8035 	  tree signed_size_type_node;
8036 	  signed_size_type_node = signed_type_for (size_type_node);
8037 
8038 	  /* By converting to signed size type we cover middle-end pointer
8039 	     arithmetic which operates on unsigned pointer types of size
8040 	     type size and ARRAY_REF offsets which are properly sign or
8041 	     zero extended from their type in case it is narrower than
8042 	     size type.  */
8043 	  if (offset0 == NULL_TREE)
8044 	    offset0 = build_int_cst (signed_size_type_node, 0);
8045 	  else
8046 	    offset0 = fold_convert (signed_size_type_node, offset0);
8047 	  if (offset1 == NULL_TREE)
8048 	    offset1 = build_int_cst (signed_size_type_node, 0);
8049 	  else
8050 	    offset1 = fold_convert (signed_size_type_node, offset1);
8051 
8052 	  return fold_build2 (code, type, offset0, offset1);
8053 	}
8054     }
8055 
8056   if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8057     {
8058       tree targ0 = strip_float_extensions (arg0);
8059       tree targ1 = strip_float_extensions (arg1);
8060       tree newtype = TREE_TYPE (targ0);
8061 
8062       if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8063 	newtype = TREE_TYPE (targ1);
8064 
8065       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
8066       if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8067 	return fold_build2 (code, type, fold_convert (newtype, targ0),
8068 			    fold_convert (newtype, targ1));
8069 
8070       /* (-a) CMP (-b) -> b CMP a  */
8071       if (TREE_CODE (arg0) == NEGATE_EXPR
8072 	  && TREE_CODE (arg1) == NEGATE_EXPR)
8073 	return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8074 			    TREE_OPERAND (arg0, 0));
8075 
8076       if (TREE_CODE (arg1) == REAL_CST)
8077 	{
8078 	  REAL_VALUE_TYPE cst;
8079 	  cst = TREE_REAL_CST (arg1);
8080 
8081 	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
8082 	  if (TREE_CODE (arg0) == NEGATE_EXPR)
8083 	    return fold_build2 (swap_tree_comparison (code), type,
8084 				TREE_OPERAND (arg0, 0),
8085 				build_real (TREE_TYPE (arg1),
8086 					    REAL_VALUE_NEGATE (cst)));
8087 
8088 	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
8089 	  /* a CMP (-0) -> a CMP 0  */
8090 	  if (REAL_VALUE_MINUS_ZERO (cst))
8091 	    return fold_build2 (code, type, arg0,
8092 				build_real (TREE_TYPE (arg1), dconst0));
8093 
8094 	  /* x != NaN is always true, other ops are always false.  */
8095 	  if (REAL_VALUE_ISNAN (cst)
8096 	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8097 	    {
8098 	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8099 	      return omit_one_operand (type, tem, arg0);
8100 	    }
8101 
8102 	  /* Fold comparisons against infinity.  */
8103 	  if (REAL_VALUE_ISINF (cst))
8104 	    {
8105 	      tem = fold_inf_compare (code, type, arg0, arg1);
8106 	      if (tem != NULL_TREE)
8107 		return tem;
8108 	    }
8109 	}
8110 
8111       /* If this is a comparison of a real constant with a PLUS_EXPR
8112 	 or a MINUS_EXPR of a real constant, we can convert it into a
8113 	 comparison with a revised real constant as long as no overflow
8114 	 occurs when unsafe_math_optimizations are enabled.  */
8115       if (flag_unsafe_math_optimizations
8116 	  && TREE_CODE (arg1) == REAL_CST
8117 	  && (TREE_CODE (arg0) == PLUS_EXPR
8118 	      || TREE_CODE (arg0) == MINUS_EXPR)
8119 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8120 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8121 				      ? MINUS_EXPR : PLUS_EXPR,
8122 				      arg1, TREE_OPERAND (arg0, 1), 0))
8123 	  && ! TREE_CONSTANT_OVERFLOW (tem))
8124 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8125 
8126       /* Likewise, we can simplify a comparison of a real constant with
8127          a MINUS_EXPR whose first operand is also a real constant, i.e.
8128          (c1 - x) < c2 becomes x > c1-c2.  */
8129       if (flag_unsafe_math_optimizations
8130 	  && TREE_CODE (arg1) == REAL_CST
8131 	  && TREE_CODE (arg0) == MINUS_EXPR
8132 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8133 	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8134 				      arg1, 0))
8135 	  && ! TREE_CONSTANT_OVERFLOW (tem))
8136 	return fold_build2 (swap_tree_comparison (code), type,
8137 			    TREE_OPERAND (arg0, 1), tem);
8138 
8139       /* Fold comparisons against built-in math functions.  */
8140       if (TREE_CODE (arg1) == REAL_CST
8141 	  && flag_unsafe_math_optimizations
8142 	  && ! flag_errno_math)
8143 	{
8144 	  enum built_in_function fcode = builtin_mathfn_code (arg0);
8145 
8146 	  if (fcode != END_BUILTINS)
8147 	    {
8148 	      tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8149 	      if (tem != NULL_TREE)
8150 		return tem;
8151 	    }
8152 	}
8153     }
8154 
8155   /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
8156   if (TREE_CONSTANT (arg1)
8157       && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8158 	  || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8159       /* This optimization is invalid for ordered comparisons
8160          if CONST+INCR overflows or if foo+incr might overflow.
8161 	 This optimization is invalid for floating point due to rounding.
8162 	 For pointer types we assume overflow doesn't happen.  */
8163       && (POINTER_TYPE_P (TREE_TYPE (arg0))
8164 	  || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8165 	      && (code == EQ_EXPR || code == NE_EXPR))))
8166     {
8167       tree varop, newconst;
8168 
8169       if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8170 	{
8171 	  newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8172 				  arg1, TREE_OPERAND (arg0, 1));
8173 	  varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8174 			  TREE_OPERAND (arg0, 0),
8175 			  TREE_OPERAND (arg0, 1));
8176 	}
8177       else
8178 	{
8179 	  newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8180 				  arg1, TREE_OPERAND (arg0, 1));
8181 	  varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8182 			  TREE_OPERAND (arg0, 0),
8183 			  TREE_OPERAND (arg0, 1));
8184 	}
8185 
8186 
8187       /* If VAROP is a reference to a bitfield, we must mask
8188 	 the constant by the width of the field.  */
8189       if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8190 	  && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8191 	  && host_integerp (DECL_SIZE (TREE_OPERAND
8192 					 (TREE_OPERAND (varop, 0), 1)), 1))
8193 	{
8194 	  tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8195 	  HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8196 	  tree folded_compare, shift;
8197 
8198 	  /* First check whether the comparison would come out
8199 	     always the same.  If we don't do that we would
8200 	     change the meaning with the masking.  */
8201 	  folded_compare = fold_build2 (code, type,
8202 					TREE_OPERAND (varop, 0), arg1);
8203 	  if (TREE_CODE (folded_compare) == INTEGER_CST)
8204 	    return omit_one_operand (type, folded_compare, varop);
8205 
8206 	  shift = build_int_cst (NULL_TREE,
8207 				 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8208 	  shift = fold_convert (TREE_TYPE (varop), shift);
8209 	  newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8210 				  newconst, shift);
8211 	  newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8212 				  newconst, shift);
8213 	}
8214 
8215       return fold_build2 (code, type, varop, newconst);
8216     }
8217 
8218   if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8219       && (TREE_CODE (arg0) == NOP_EXPR
8220 	  || TREE_CODE (arg0) == CONVERT_EXPR))
8221     {
8222       /* If we are widening one operand of an integer comparison,
8223 	 see if the other operand is similarly being widened.  Perhaps we
8224 	 can do the comparison in the narrower type.  */
8225       tem = fold_widened_comparison (code, type, arg0, arg1);
8226       if (tem)
8227 	return tem;
8228 
8229       /* Or if we are changing signedness.  */
8230       tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8231       if (tem)
8232 	return tem;
8233     }
8234 
8235   /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8236      constant, we can simplify it.  */
8237   if (TREE_CODE (arg1) == INTEGER_CST
8238       && (TREE_CODE (arg0) == MIN_EXPR
8239 	  || TREE_CODE (arg0) == MAX_EXPR)
8240       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8241     {
8242       tem = optimize_minmax_comparison (code, type, op0, op1);
8243       if (tem)
8244 	return tem;
8245     }
8246 
8247   /* Simplify comparison of something with itself.  (For IEEE
8248      floating-point, we can only do some of these simplifications.)  */
8249   if (operand_equal_p (arg0, arg1, 0))
8250     {
8251       switch (code)
8252 	{
8253 	case EQ_EXPR:
8254 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8255 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8256 	    return constant_boolean_node (1, type);
8257 	  break;
8258 
8259 	case GE_EXPR:
8260 	case LE_EXPR:
8261 	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8262 	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8263 	    return constant_boolean_node (1, type);
8264 	  return fold_build2 (EQ_EXPR, type, arg0, arg1);
8265 
8266 	case NE_EXPR:
8267 	  /* For NE, we can only do this simplification if integer
8268 	     or we don't honor IEEE floating point NaNs.  */
8269 	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8270 	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8271 	    break;
8272 	  /* ... fall through ...  */
8273 	case GT_EXPR:
8274 	case LT_EXPR:
8275 	  return constant_boolean_node (0, type);
8276 	default:
8277 	  gcc_unreachable ();
8278 	}
8279     }
8280 
8281   /* If we are comparing an expression that just has comparisons
8282      of two integer values, arithmetic expressions of those comparisons,
8283      and constants, we can simplify it.  There are only three cases
8284      to check: the two values can either be equal, the first can be
8285      greater, or the second can be greater.  Fold the expression for
8286      those three values.  Since each value must be 0 or 1, we have
8287      eight possibilities, each of which corresponds to the constant 0
8288      or 1 or one of the six possible comparisons.
8289 
8290      This handles common cases like (a > b) == 0 but also handles
8291      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8292      occur in macroized code.  */
8293 
8294   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8295     {
8296       tree cval1 = 0, cval2 = 0;
8297       int save_p = 0;
8298 
8299       if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8300 	  /* Don't handle degenerate cases here; they should already
8301 	     have been handled anyway.  */
8302 	  && cval1 != 0 && cval2 != 0
8303 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8304 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8305 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8306 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8307 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8308 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8309 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8310 	{
8311 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8312 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8313 
8314 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8315 	     was the same as ARG1.  */
8316 
8317 	  tree high_result
8318 		= fold_build2 (code, type,
8319 			       eval_subst (arg0, cval1, maxval,
8320 					   cval2, minval),
8321 			       arg1);
8322 	  tree equal_result
8323 		= fold_build2 (code, type,
8324 			       eval_subst (arg0, cval1, maxval,
8325 					   cval2, maxval),
8326 			       arg1);
8327 	  tree low_result
8328 		= fold_build2 (code, type,
8329 			       eval_subst (arg0, cval1, minval,
8330 					   cval2, maxval),
8331 			       arg1);
8332 
8333 	  /* All three of these results should be 0 or 1.  Confirm they are.
8334 	     Then use those values to select the proper code to use.  */
8335 
8336 	  if (TREE_CODE (high_result) == INTEGER_CST
8337 	      && TREE_CODE (equal_result) == INTEGER_CST
8338 	      && TREE_CODE (low_result) == INTEGER_CST)
8339 	    {
8340 	      /* Make a 3-bit mask with the high-order bit being the
8341 		 value for `>', the next for '=', and the low for '<'.  */
8342 	      switch ((integer_onep (high_result) * 4)
8343 		      + (integer_onep (equal_result) * 2)
8344 		      + integer_onep (low_result))
8345 		{
8346 		case 0:
8347 		  /* Always false.  */
8348 		  return omit_one_operand (type, integer_zero_node, arg0);
8349 		case 1:
8350 		  code = LT_EXPR;
8351 		  break;
8352 		case 2:
8353 		  code = EQ_EXPR;
8354 		  break;
8355 		case 3:
8356 		  code = LE_EXPR;
8357 		  break;
8358 		case 4:
8359 		  code = GT_EXPR;
8360 		  break;
8361 		case 5:
8362 		  code = NE_EXPR;
8363 		  break;
8364 		case 6:
8365 		  code = GE_EXPR;
8366 		  break;
8367 		case 7:
8368 		  /* Always true.  */
8369 		  return omit_one_operand (type, integer_one_node, arg0);
8370 		}
8371 
8372 	      if (save_p)
8373 		return save_expr (build2 (code, type, cval1, cval2));
8374 	      return fold_build2 (code, type, cval1, cval2);
8375 	    }
8376 	}
8377     }
8378 
8379   /* Fold a comparison of the address of COMPONENT_REFs with the same
8380      type and component to a comparison of the address of the base
8381      object.  In short, &x->a OP &y->a to x OP y and
8382      &x->a OP &y.a to x OP &y  */
8383   if (TREE_CODE (arg0) == ADDR_EXPR
8384       && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8385       && TREE_CODE (arg1) == ADDR_EXPR
8386       && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8387     {
8388       tree cref0 = TREE_OPERAND (arg0, 0);
8389       tree cref1 = TREE_OPERAND (arg1, 0);
8390       if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8391 	{
8392 	  tree op0 = TREE_OPERAND (cref0, 0);
8393 	  tree op1 = TREE_OPERAND (cref1, 0);
8394 	  return fold_build2 (code, type,
8395 			      build_fold_addr_expr (op0),
8396 			      build_fold_addr_expr (op1));
8397 	}
8398     }
8399 
8400   /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8401      into a single range test.  */
8402   if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8403        || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8404       && TREE_CODE (arg1) == INTEGER_CST
8405       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8406       && !integer_zerop (TREE_OPERAND (arg0, 1))
8407       && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8408       && !TREE_OVERFLOW (arg1))
8409     {
8410       tem = fold_div_compare (code, type, arg0, arg1);
8411       if (tem != NULL_TREE)
8412 	return tem;
8413     }
8414 
8415   return NULL_TREE;
8416 }
8417 
8418 
8419 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8420    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8421    argument EXPR represents the expression "z" of type TYPE.  */
8422 
8423 static tree
fold_mult_zconjz(tree type,tree expr)8424 fold_mult_zconjz (tree type, tree expr)
8425 {
8426   tree itype = TREE_TYPE (type);
8427   tree rpart, ipart, tem;
8428 
8429   if (TREE_CODE (expr) == COMPLEX_EXPR)
8430     {
8431       rpart = TREE_OPERAND (expr, 0);
8432       ipart = TREE_OPERAND (expr, 1);
8433     }
8434   else if (TREE_CODE (expr) == COMPLEX_CST)
8435     {
8436       rpart = TREE_REALPART (expr);
8437       ipart = TREE_IMAGPART (expr);
8438     }
8439   else
8440     {
8441       expr = save_expr (expr);
8442       rpart = fold_build1 (REALPART_EXPR, itype, expr);
8443       ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8444     }
8445 
8446   rpart = save_expr (rpart);
8447   ipart = save_expr (ipart);
8448   tem = fold_build2 (PLUS_EXPR, itype,
8449 		     fold_build2 (MULT_EXPR, itype, rpart, rpart),
8450 		     fold_build2 (MULT_EXPR, itype, ipart, ipart));
8451   return fold_build2 (COMPLEX_EXPR, type, tem,
8452 		      fold_convert (itype, integer_zero_node));
8453 }
8454 
8455 
8456 /* Fold a binary expression of code CODE and type TYPE with operands
8457    OP0 and OP1.  Return the folded expression if folding is
8458    successful.  Otherwise, return NULL_TREE.  */
8459 
8460 tree
fold_binary(enum tree_code code,tree type,tree op0,tree op1)8461 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8462 {
8463   enum tree_code_class kind = TREE_CODE_CLASS (code);
8464   tree arg0, arg1, tem;
8465   tree t1 = NULL_TREE;
8466   bool strict_overflow_p;
8467 
8468   gcc_assert (IS_EXPR_CODE_CLASS (kind)
8469 	      && TREE_CODE_LENGTH (code) == 2
8470 	      && op0 != NULL_TREE
8471 	      && op1 != NULL_TREE);
8472 
8473   arg0 = op0;
8474   arg1 = op1;
8475 
8476   /* Strip any conversions that don't change the mode.  This is
8477      safe for every expression, except for a comparison expression
8478      because its signedness is derived from its operands.  So, in
8479      the latter case, only strip conversions that don't change the
8480      signedness.
8481 
8482      Note that this is done as an internal manipulation within the
8483      constant folder, in order to find the simplest representation
8484      of the arguments so that their form can be studied.  In any
8485      cases, the appropriate type conversions should be put back in
8486      the tree that will get out of the constant folder.  */
8487 
8488   if (kind == tcc_comparison)
8489     {
8490       STRIP_SIGN_NOPS (arg0);
8491       STRIP_SIGN_NOPS (arg1);
8492     }
8493   else
8494     {
8495       STRIP_NOPS (arg0);
8496       STRIP_NOPS (arg1);
8497     }
8498 
8499   /* Note that TREE_CONSTANT isn't enough: static var addresses are
8500      constant but we can't do arithmetic on them.  */
8501   if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8502       || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8503       || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8504       || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8505     {
8506       if (kind == tcc_binary)
8507 	tem = const_binop (code, arg0, arg1, 0);
8508       else if (kind == tcc_comparison)
8509 	tem = fold_relational_const (code, type, arg0, arg1);
8510       else
8511 	tem = NULL_TREE;
8512 
8513       if (tem != NULL_TREE)
8514 	{
8515 	  if (TREE_TYPE (tem) != type)
8516 	    tem = fold_convert (type, tem);
8517 	  return tem;
8518 	}
8519     }
8520 
8521   /* If this is a commutative operation, and ARG0 is a constant, move it
8522      to ARG1 to reduce the number of tests below.  */
8523   if (commutative_tree_code (code)
8524       && tree_swap_operands_p (arg0, arg1, true))
8525     return fold_build2 (code, type, op1, op0);
8526 
8527   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8528 
8529      First check for cases where an arithmetic operation is applied to a
8530      compound, conditional, or comparison operation.  Push the arithmetic
8531      operation inside the compound or conditional to see if any folding
8532      can then be done.  Convert comparison to conditional for this purpose.
8533      The also optimizes non-constant cases that used to be done in
8534      expand_expr.
8535 
8536      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8537      one of the operands is a comparison and the other is a comparison, a
8538      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
8539      code below would make the expression more complex.  Change it to a
8540      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
8541      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
8542 
8543   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8544        || code == EQ_EXPR || code == NE_EXPR)
8545       && ((truth_value_p (TREE_CODE (arg0))
8546 	   && (truth_value_p (TREE_CODE (arg1))
8547 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
8548 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
8549 	  || (truth_value_p (TREE_CODE (arg1))
8550 	      && (truth_value_p (TREE_CODE (arg0))
8551 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
8552 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
8553     {
8554       tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8555 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8556 			 : TRUTH_XOR_EXPR,
8557 			 boolean_type_node,
8558 			 fold_convert (boolean_type_node, arg0),
8559 			 fold_convert (boolean_type_node, arg1));
8560 
8561       if (code == EQ_EXPR)
8562 	tem = invert_truthvalue (tem);
8563 
8564       return fold_convert (type, tem);
8565     }
8566 
8567   if (TREE_CODE_CLASS (code) == tcc_binary
8568       || TREE_CODE_CLASS (code) == tcc_comparison)
8569     {
8570       if (TREE_CODE (arg0) == COMPOUND_EXPR)
8571 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8572 		       fold_build2 (code, type,
8573 				    TREE_OPERAND (arg0, 1), op1));
8574       if (TREE_CODE (arg1) == COMPOUND_EXPR
8575 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8576 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8577 		       fold_build2 (code, type,
8578 				    op0, TREE_OPERAND (arg1, 1)));
8579 
8580       if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8581 	{
8582 	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8583 						     arg0, arg1,
8584 						     /*cond_first_p=*/1);
8585 	  if (tem != NULL_TREE)
8586 	    return tem;
8587 	}
8588 
8589       if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8590 	{
8591 	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8592 						     arg1, arg0,
8593 					             /*cond_first_p=*/0);
8594 	  if (tem != NULL_TREE)
8595 	    return tem;
8596 	}
8597     }
8598 
8599   switch (code)
8600     {
8601     case PLUS_EXPR:
8602       /* A + (-B) -> A - B */
8603       if (TREE_CODE (arg1) == NEGATE_EXPR)
8604 	return fold_build2 (MINUS_EXPR, type,
8605 			    fold_convert (type, arg0),
8606 			    fold_convert (type, TREE_OPERAND (arg1, 0)));
8607       /* (-A) + B -> B - A */
8608       if (TREE_CODE (arg0) == NEGATE_EXPR
8609 	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8610 	return fold_build2 (MINUS_EXPR, type,
8611 			    fold_convert (type, arg1),
8612 			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8613       /* Convert ~A + 1 to -A.  */
8614       if (INTEGRAL_TYPE_P (type)
8615 	  && TREE_CODE (arg0) == BIT_NOT_EXPR
8616 	  && integer_onep (arg1))
8617 	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8618 
8619       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8620 	 same or one.  */
8621       if ((TREE_CODE (arg0) == MULT_EXPR
8622 	   || TREE_CODE (arg1) == MULT_EXPR)
8623 	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8624         {
8625 	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8626 	  if (tem)
8627 	    return tem;
8628 	}
8629 
8630       if (! FLOAT_TYPE_P (type))
8631 	{
8632 	  if (integer_zerop (arg1))
8633 	    return non_lvalue (fold_convert (type, arg0));
8634 
8635 	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8636 	     with a constant, and the two constants have no bits in common,
8637 	     we should treat this as a BIT_IOR_EXPR since this may produce more
8638 	     simplifications.  */
8639 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
8640 	      && TREE_CODE (arg1) == BIT_AND_EXPR
8641 	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8642 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8643 	      && integer_zerop (const_binop (BIT_AND_EXPR,
8644 					     TREE_OPERAND (arg0, 1),
8645 					     TREE_OPERAND (arg1, 1), 0)))
8646 	    {
8647 	      code = BIT_IOR_EXPR;
8648 	      goto bit_ior;
8649 	    }
8650 
8651 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8652 	     (plus (plus (mult) (mult)) (foo)) so that we can
8653 	     take advantage of the factoring cases below.  */
8654 	  if (((TREE_CODE (arg0) == PLUS_EXPR
8655 		|| TREE_CODE (arg0) == MINUS_EXPR)
8656 	       && TREE_CODE (arg1) == MULT_EXPR)
8657 	      || ((TREE_CODE (arg1) == PLUS_EXPR
8658 		   || TREE_CODE (arg1) == MINUS_EXPR)
8659 		  && TREE_CODE (arg0) == MULT_EXPR))
8660 	    {
8661 	      tree parg0, parg1, parg, marg;
8662 	      enum tree_code pcode;
8663 
8664 	      if (TREE_CODE (arg1) == MULT_EXPR)
8665 		parg = arg0, marg = arg1;
8666 	      else
8667 		parg = arg1, marg = arg0;
8668 	      pcode = TREE_CODE (parg);
8669 	      parg0 = TREE_OPERAND (parg, 0);
8670 	      parg1 = TREE_OPERAND (parg, 1);
8671 	      STRIP_NOPS (parg0);
8672 	      STRIP_NOPS (parg1);
8673 
8674 	      if (TREE_CODE (parg0) == MULT_EXPR
8675 		  && TREE_CODE (parg1) != MULT_EXPR)
8676 		return fold_build2 (pcode, type,
8677 				    fold_build2 (PLUS_EXPR, type,
8678 						 fold_convert (type, parg0),
8679 						 fold_convert (type, marg)),
8680 				    fold_convert (type, parg1));
8681 	      if (TREE_CODE (parg0) != MULT_EXPR
8682 		  && TREE_CODE (parg1) == MULT_EXPR)
8683 		return fold_build2 (PLUS_EXPR, type,
8684 				    fold_convert (type, parg0),
8685 				    fold_build2 (pcode, type,
8686 						 fold_convert (type, marg),
8687 						 fold_convert (type,
8688 							       parg1)));
8689 	    }
8690 
8691 	  /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8692 	     of the array.  Loop optimizer sometimes produce this type of
8693 	     expressions.  */
8694 	  if (TREE_CODE (arg0) == ADDR_EXPR)
8695 	    {
8696 	      tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8697 	      if (tem)
8698 		return fold_convert (type, tem);
8699 	    }
8700 	  else if (TREE_CODE (arg1) == ADDR_EXPR)
8701 	    {
8702 	      tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8703 	      if (tem)
8704 		return fold_convert (type, tem);
8705 	    }
8706 	}
8707       else
8708 	{
8709 	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
8710 	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8711 	    return non_lvalue (fold_convert (type, arg0));
8712 
8713 	  /* Likewise if the operands are reversed.  */
8714 	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8715 	    return non_lvalue (fold_convert (type, arg1));
8716 
8717 	  /* Convert X + -C into X - C.  */
8718 	  if (TREE_CODE (arg1) == REAL_CST
8719 	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8720 	    {
8721 	      tem = fold_negate_const (arg1, type);
8722 	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8723 		return fold_build2 (MINUS_EXPR, type,
8724 				    fold_convert (type, arg0),
8725 				    fold_convert (type, tem));
8726 	    }
8727 
8728           if (flag_unsafe_math_optimizations
8729 	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8730 	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8731 	      && (tem = distribute_real_division (code, type, arg0, arg1)))
8732 	    return tem;
8733 
8734 	  /* Convert x+x into x*2.0.  */
8735 	  if (operand_equal_p (arg0, arg1, 0)
8736 	      && SCALAR_FLOAT_TYPE_P (type))
8737 	    return fold_build2 (MULT_EXPR, type, arg0,
8738 				build_real (type, dconst2));
8739 
8740           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.  */
8741           if (flag_unsafe_math_optimizations
8742               && TREE_CODE (arg1) == PLUS_EXPR
8743               && TREE_CODE (arg0) != MULT_EXPR)
8744             {
8745               tree tree10 = TREE_OPERAND (arg1, 0);
8746               tree tree11 = TREE_OPERAND (arg1, 1);
8747               if (TREE_CODE (tree11) == MULT_EXPR
8748 		  && TREE_CODE (tree10) == MULT_EXPR)
8749                 {
8750                   tree tree0;
8751                   tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8752                   return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8753                 }
8754             }
8755           /* Convert (b*c + d*e) + a into b*c + (d*e +a).  */
8756           if (flag_unsafe_math_optimizations
8757               && TREE_CODE (arg0) == PLUS_EXPR
8758               && TREE_CODE (arg1) != MULT_EXPR)
8759             {
8760               tree tree00 = TREE_OPERAND (arg0, 0);
8761               tree tree01 = TREE_OPERAND (arg0, 1);
8762               if (TREE_CODE (tree01) == MULT_EXPR
8763 		  && TREE_CODE (tree00) == MULT_EXPR)
8764                 {
8765                   tree tree0;
8766                   tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8767                   return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8768                 }
8769             }
8770 	}
8771 
8772      bit_rotate:
8773       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8774 	 is a rotate of A by C1 bits.  */
8775       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8776 	 is a rotate of A by B bits.  */
8777       {
8778 	enum tree_code code0, code1;
8779 	code0 = TREE_CODE (arg0);
8780 	code1 = TREE_CODE (arg1);
8781 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8782 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8783 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
8784 			        TREE_OPERAND (arg1, 0), 0)
8785 	    && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8786 	  {
8787 	    tree tree01, tree11;
8788 	    enum tree_code code01, code11;
8789 
8790 	    tree01 = TREE_OPERAND (arg0, 1);
8791 	    tree11 = TREE_OPERAND (arg1, 1);
8792 	    STRIP_NOPS (tree01);
8793 	    STRIP_NOPS (tree11);
8794 	    code01 = TREE_CODE (tree01);
8795 	    code11 = TREE_CODE (tree11);
8796 	    if (code01 == INTEGER_CST
8797 		&& code11 == INTEGER_CST
8798 		&& TREE_INT_CST_HIGH (tree01) == 0
8799 		&& TREE_INT_CST_HIGH (tree11) == 0
8800 		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8801 		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8802 	      return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8803 			     code0 == LSHIFT_EXPR ? tree01 : tree11);
8804 	    else if (code11 == MINUS_EXPR)
8805 	      {
8806 		tree tree110, tree111;
8807 		tree110 = TREE_OPERAND (tree11, 0);
8808 		tree111 = TREE_OPERAND (tree11, 1);
8809 		STRIP_NOPS (tree110);
8810 		STRIP_NOPS (tree111);
8811 		if (TREE_CODE (tree110) == INTEGER_CST
8812 		    && 0 == compare_tree_int (tree110,
8813 					      TYPE_PRECISION
8814 					      (TREE_TYPE (TREE_OPERAND
8815 							  (arg0, 0))))
8816 		    && operand_equal_p (tree01, tree111, 0))
8817 		  return build2 ((code0 == LSHIFT_EXPR
8818 				  ? LROTATE_EXPR
8819 				  : RROTATE_EXPR),
8820 				 type, TREE_OPERAND (arg0, 0), tree01);
8821 	      }
8822 	    else if (code01 == MINUS_EXPR)
8823 	      {
8824 		tree tree010, tree011;
8825 		tree010 = TREE_OPERAND (tree01, 0);
8826 		tree011 = TREE_OPERAND (tree01, 1);
8827 		STRIP_NOPS (tree010);
8828 		STRIP_NOPS (tree011);
8829 		if (TREE_CODE (tree010) == INTEGER_CST
8830 		    && 0 == compare_tree_int (tree010,
8831 					      TYPE_PRECISION
8832 					      (TREE_TYPE (TREE_OPERAND
8833 							  (arg0, 0))))
8834 		    && operand_equal_p (tree11, tree011, 0))
8835 		  return build2 ((code0 != LSHIFT_EXPR
8836 				  ? LROTATE_EXPR
8837 				  : RROTATE_EXPR),
8838 				 type, TREE_OPERAND (arg0, 0), tree11);
8839 	      }
8840 	  }
8841       }
8842 
8843     associate:
8844       /* In most languages, can't associate operations on floats through
8845 	 parentheses.  Rather than remember where the parentheses were, we
8846 	 don't associate floats at all, unless the user has specified
8847 	 -funsafe-math-optimizations.  */
8848 
8849       if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8850 	{
8851 	  tree var0, con0, lit0, minus_lit0;
8852 	  tree var1, con1, lit1, minus_lit1;
8853 	  bool ok = true;
8854 
8855 	  /* Split both trees into variables, constants, and literals.  Then
8856 	     associate each group together, the constants with literals,
8857 	     then the result with variables.  This increases the chances of
8858 	     literals being recombined later and of generating relocatable
8859 	     expressions for the sum of a constant and literal.  */
8860 	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8861 	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8862 			     code == MINUS_EXPR);
8863 
8864 	  /* With undefined overflow we can only associate constants
8865 	     with one variable.  */
8866 	  if ((POINTER_TYPE_P (type)
8867 	       || (INTEGRAL_TYPE_P (type)
8868 		   && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8869 	      && var0 && var1)
8870 	    {
8871 	      tree tmp0 = var0;
8872 	      tree tmp1 = var1;
8873 
8874 	      if (TREE_CODE (tmp0) == NEGATE_EXPR)
8875 	        tmp0 = TREE_OPERAND (tmp0, 0);
8876 	      if (TREE_CODE (tmp1) == NEGATE_EXPR)
8877 	        tmp1 = TREE_OPERAND (tmp1, 0);
8878 	      /* The only case we can still associate with two variables
8879 		 is if they are the same, modulo negation.  */
8880 	      if (!operand_equal_p (tmp0, tmp1, 0))
8881 	        ok = false;
8882 	    }
8883 
8884 	  /* Only do something if we found more than two objects.  Otherwise,
8885 	     nothing has changed and we risk infinite recursion.  */
8886 	  if (ok
8887 	      && (2 < ((var0 != 0) + (var1 != 0)
8888 		       + (con0 != 0) + (con1 != 0)
8889 		       + (lit0 != 0) + (lit1 != 0)
8890 		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
8891 	    {
8892 	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
8893 	      if (code == MINUS_EXPR)
8894 		code = PLUS_EXPR;
8895 
8896 	      var0 = associate_trees (var0, var1, code, type);
8897 	      con0 = associate_trees (con0, con1, code, type);
8898 	      lit0 = associate_trees (lit0, lit1, code, type);
8899 	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8900 
8901 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
8902 		 greater than the positive part.  Otherwise, the multiplicative
8903 		 folding code (i.e extract_muldiv) may be fooled in case
8904 		 unsigned constants are subtracted, like in the following
8905 		 example: ((X*2 + 4) - 8U)/2.  */
8906 	      if (minus_lit0 && lit0)
8907 		{
8908 		  if (TREE_CODE (lit0) == INTEGER_CST
8909 		      && TREE_CODE (minus_lit0) == INTEGER_CST
8910 		      && tree_int_cst_lt (lit0, minus_lit0))
8911 		    {
8912 		      minus_lit0 = associate_trees (minus_lit0, lit0,
8913 						    MINUS_EXPR, type);
8914 		      lit0 = 0;
8915 		    }
8916 		  else
8917 		    {
8918 		      lit0 = associate_trees (lit0, minus_lit0,
8919 					      MINUS_EXPR, type);
8920 		      minus_lit0 = 0;
8921 		    }
8922 		}
8923 	      if (minus_lit0)
8924 		{
8925 		  if (con0 == 0)
8926 		    return fold_convert (type,
8927 					 associate_trees (var0, minus_lit0,
8928 							  MINUS_EXPR, type));
8929 		  else
8930 		    {
8931 		      con0 = associate_trees (con0, minus_lit0,
8932 					      MINUS_EXPR, type);
8933 		      return fold_convert (type,
8934 					   associate_trees (var0, con0,
8935 							    PLUS_EXPR, type));
8936 		    }
8937 		}
8938 
8939 	      con0 = associate_trees (con0, lit0, code, type);
8940 	      return fold_convert (type, associate_trees (var0, con0,
8941 							  code, type));
8942 	    }
8943 	}
8944 
8945       return NULL_TREE;
8946 
8947     case MINUS_EXPR:
8948       /* A - (-B) -> A + B */
8949       if (TREE_CODE (arg1) == NEGATE_EXPR)
8950 	return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8951       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
8952       if (TREE_CODE (arg0) == NEGATE_EXPR
8953 	  && (FLOAT_TYPE_P (type)
8954 	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8955 	  && negate_expr_p (arg1)
8956 	  && reorder_operands_p (arg0, arg1))
8957 	return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8958 			    TREE_OPERAND (arg0, 0));
8959       /* Convert -A - 1 to ~A.  */
8960       if (INTEGRAL_TYPE_P (type)
8961 	  && TREE_CODE (arg0) == NEGATE_EXPR
8962 	  && integer_onep (arg1))
8963 	return fold_build1 (BIT_NOT_EXPR, type,
8964 			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8965 
8966       /* Convert -1 - A to ~A.  */
8967       if (INTEGRAL_TYPE_P (type)
8968 	  && integer_all_onesp (arg0))
8969 	return fold_build1 (BIT_NOT_EXPR, type, arg1);
8970 
8971       if (! FLOAT_TYPE_P (type))
8972 	{
8973 	  if (integer_zerop (arg0))
8974 	    return negate_expr (fold_convert (type, arg1));
8975 	  if (integer_zerop (arg1))
8976 	    return non_lvalue (fold_convert (type, arg0));
8977 
8978 	  /* Fold A - (A & B) into ~B & A.  */
8979 	  if (!TREE_SIDE_EFFECTS (arg0)
8980 	      && TREE_CODE (arg1) == BIT_AND_EXPR)
8981 	    {
8982 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8983 		return fold_build2 (BIT_AND_EXPR, type,
8984 				    fold_build1 (BIT_NOT_EXPR, type,
8985 						 TREE_OPERAND (arg1, 0)),
8986 				    arg0);
8987 	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8988 		return fold_build2 (BIT_AND_EXPR, type,
8989 				    fold_build1 (BIT_NOT_EXPR, type,
8990 						 TREE_OPERAND (arg1, 1)),
8991 				    arg0);
8992 	    }
8993 
8994 	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8995 	     any power of 2 minus 1.  */
8996 	  if (TREE_CODE (arg0) == BIT_AND_EXPR
8997 	      && TREE_CODE (arg1) == BIT_AND_EXPR
8998 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
8999 				  TREE_OPERAND (arg1, 0), 0))
9000 	    {
9001 	      tree mask0 = TREE_OPERAND (arg0, 1);
9002 	      tree mask1 = TREE_OPERAND (arg1, 1);
9003 	      tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9004 
9005 	      if (operand_equal_p (tem, mask1, 0))
9006 		{
9007 		  tem = fold_build2 (BIT_XOR_EXPR, type,
9008 				     TREE_OPERAND (arg0, 0), mask1);
9009 		  return fold_build2 (MINUS_EXPR, type, tem, mask1);
9010 		}
9011 	    }
9012 	}
9013 
9014       /* See if ARG1 is zero and X - ARG1 reduces to X.  */
9015       else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9016 	return non_lvalue (fold_convert (type, arg0));
9017 
9018       /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
9019 	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9020 	 (-ARG1 + ARG0) reduces to -ARG1.  */
9021       else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9022 	return negate_expr (fold_convert (type, arg1));
9023 
9024       /* Fold &x - &x.  This can happen from &x.foo - &x.
9025 	 This is unsafe for certain floats even in non-IEEE formats.
9026 	 In IEEE, it is unsafe because it does wrong for NaNs.
9027 	 Also note that operand_equal_p is always false if an operand
9028 	 is volatile.  */
9029 
9030       if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9031 	  && operand_equal_p (arg0, arg1, 0))
9032 	return fold_convert (type, integer_zero_node);
9033 
9034       /* A - B -> A + (-B) if B is easily negatable.  */
9035       if (negate_expr_p (arg1)
9036 	  && ((FLOAT_TYPE_P (type)
9037                /* Avoid this transformation if B is a positive REAL_CST.  */
9038 	       && (TREE_CODE (arg1) != REAL_CST
9039 		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9040 	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9041 	return fold_build2 (PLUS_EXPR, type,
9042 			    fold_convert (type, arg0),
9043 			    fold_convert (type, negate_expr (arg1)));
9044 
9045       /* Try folding difference of addresses.  */
9046       {
9047 	HOST_WIDE_INT diff;
9048 
9049 	if ((TREE_CODE (arg0) == ADDR_EXPR
9050 	     || TREE_CODE (arg1) == ADDR_EXPR)
9051 	    && ptr_difference_const (arg0, arg1, &diff))
9052 	  return build_int_cst_type (type, diff);
9053       }
9054 
9055       /* Fold &a[i] - &a[j] to i-j.  */
9056       if (TREE_CODE (arg0) == ADDR_EXPR
9057 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9058 	  && TREE_CODE (arg1) == ADDR_EXPR
9059 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9060         {
9061 	  tree aref0 = TREE_OPERAND (arg0, 0);
9062 	  tree aref1 = TREE_OPERAND (arg1, 0);
9063 	  if (operand_equal_p (TREE_OPERAND (aref0, 0),
9064 			       TREE_OPERAND (aref1, 0), 0))
9065 	    {
9066 	      tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9067 	      tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9068 	      tree esz = array_ref_element_size (aref0);
9069 	      tree diff = build2 (MINUS_EXPR, type, op0, op1);
9070 	      return fold_build2 (MULT_EXPR, type, diff,
9071 			          fold_convert (type, esz));
9072 
9073 	    }
9074 	}
9075 
9076       /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9077 	 of the array.  Loop optimizer sometimes produce this type of
9078 	 expressions.  */
9079       if (TREE_CODE (arg0) == ADDR_EXPR)
9080 	{
9081 	  tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9082 	  if (tem)
9083 	    return fold_convert (type, tem);
9084 	}
9085 
9086       if (flag_unsafe_math_optimizations
9087 	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9088 	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9089 	  && (tem = distribute_real_division (code, type, arg0, arg1)))
9090 	return tem;
9091 
9092       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9093 	 same or one.  */
9094       if ((TREE_CODE (arg0) == MULT_EXPR
9095 	   || TREE_CODE (arg1) == MULT_EXPR)
9096 	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9097         {
9098 	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9099 	  if (tem)
9100 	    return tem;
9101 	}
9102 
9103       goto associate;
9104 
9105     case MULT_EXPR:
9106       /* (-A) * (-B) -> A * B  */
9107       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9108 	return fold_build2 (MULT_EXPR, type,
9109 			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9110 			    fold_convert (type, negate_expr (arg1)));
9111       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9112 	return fold_build2 (MULT_EXPR, type,
9113 			    fold_convert (type, negate_expr (arg0)),
9114 			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9115 
9116       if (! FLOAT_TYPE_P (type))
9117 	{
9118 	  if (integer_zerop (arg1))
9119 	    return omit_one_operand (type, arg1, arg0);
9120 	  if (integer_onep (arg1))
9121 	    return non_lvalue (fold_convert (type, arg0));
9122 	  /* Transform x * -1 into -x.  */
9123 	  if (integer_all_onesp (arg1))
9124 	    return fold_convert (type, negate_expr (arg0));
9125 
9126 	  /* (a * (1 << b)) is (a << b)  */
9127 	  if (TREE_CODE (arg1) == LSHIFT_EXPR
9128 	      && integer_onep (TREE_OPERAND (arg1, 0)))
9129 	    return fold_build2 (LSHIFT_EXPR, type, arg0,
9130 				TREE_OPERAND (arg1, 1));
9131 	  if (TREE_CODE (arg0) == LSHIFT_EXPR
9132 	      && integer_onep (TREE_OPERAND (arg0, 0)))
9133 	    return fold_build2 (LSHIFT_EXPR, type, arg1,
9134 				TREE_OPERAND (arg0, 1));
9135 
9136 	  strict_overflow_p = false;
9137 	  if (TREE_CODE (arg1) == INTEGER_CST
9138 	      && 0 != (tem = extract_muldiv (op0,
9139 					     fold_convert (type, arg1),
9140 					     code, NULL_TREE,
9141 					     &strict_overflow_p)))
9142 	    {
9143 	      if (strict_overflow_p)
9144 		fold_overflow_warning (("assuming signed overflow does not "
9145 					"occur when simplifying "
9146 					"multiplication"),
9147 				       WARN_STRICT_OVERFLOW_MISC);
9148 	      return fold_convert (type, tem);
9149 	    }
9150 
9151 	  /* Optimize z * conj(z) for integer complex numbers.  */
9152 	  if (TREE_CODE (arg0) == CONJ_EXPR
9153 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9154 	    return fold_mult_zconjz (type, arg1);
9155 	  if (TREE_CODE (arg1) == CONJ_EXPR
9156 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9157 	    return fold_mult_zconjz (type, arg0);
9158 	}
9159       else
9160 	{
9161 	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
9162 	     when x is NaN, since x * 0 is also NaN.  Nor are they the
9163 	     same in modes with signed zeros, since multiplying a
9164 	     negative value by 0 gives -0, not +0.  */
9165 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9166 	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9167 	      && real_zerop (arg1))
9168 	    return omit_one_operand (type, arg1, arg0);
9169 	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
9170 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9171 	      && real_onep (arg1))
9172 	    return non_lvalue (fold_convert (type, arg0));
9173 
9174 	  /* Transform x * -1.0 into -x.  */
9175 	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9176 	      && real_minus_onep (arg1))
9177 	    return fold_convert (type, negate_expr (arg0));
9178 
9179 	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
9180 	  if (flag_unsafe_math_optimizations
9181 	      && TREE_CODE (arg0) == RDIV_EXPR
9182 	      && TREE_CODE (arg1) == REAL_CST
9183 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9184 	    {
9185 	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9186 				      arg1, 0);
9187 	      if (tem)
9188 		return fold_build2 (RDIV_EXPR, type, tem,
9189 				    TREE_OPERAND (arg0, 1));
9190 	    }
9191 
9192           /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
9193 	  if (operand_equal_p (arg0, arg1, 0))
9194 	    {
9195 	      tree tem = fold_strip_sign_ops (arg0);
9196 	      if (tem != NULL_TREE)
9197 		{
9198 		  tem = fold_convert (type, tem);
9199 		  return fold_build2 (MULT_EXPR, type, tem, tem);
9200 		}
9201 	    }
9202 
9203 	  /* Optimize z * conj(z) for floating point complex numbers.
9204 	     Guarded by flag_unsafe_math_optimizations as non-finite
9205 	     imaginary components don't produce scalar results.  */
9206 	  if (flag_unsafe_math_optimizations
9207 	      && TREE_CODE (arg0) == CONJ_EXPR
9208 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9209 	    return fold_mult_zconjz (type, arg1);
9210 	  if (flag_unsafe_math_optimizations
9211 	      && TREE_CODE (arg1) == CONJ_EXPR
9212 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9213 	    return fold_mult_zconjz (type, arg0);
9214 
9215 	  if (flag_unsafe_math_optimizations)
9216 	    {
9217 	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9218 	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9219 
9220 	      /* Optimizations of root(...)*root(...).  */
9221 	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9222 		{
9223 		  tree rootfn, arg, arglist;
9224 		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9225 		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9226 
9227 		  /* Optimize sqrt(x)*sqrt(x) as x.  */
9228 		  if (BUILTIN_SQRT_P (fcode0)
9229 		      && operand_equal_p (arg00, arg10, 0)
9230 		      && ! HONOR_SNANS (TYPE_MODE (type)))
9231 		    return arg00;
9232 
9233 	          /* Optimize root(x)*root(y) as root(x*y).  */
9234 		  rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9235 		  arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9236 		  arglist = build_tree_list (NULL_TREE, arg);
9237 		  return build_function_call_expr (rootfn, arglist);
9238 		}
9239 
9240 	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
9241 	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9242 		{
9243 		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9244 		  tree arg = fold_build2 (PLUS_EXPR, type,
9245 					  TREE_VALUE (TREE_OPERAND (arg0, 1)),
9246 					  TREE_VALUE (TREE_OPERAND (arg1, 1)));
9247 		  tree arglist = build_tree_list (NULL_TREE, arg);
9248 		  return build_function_call_expr (expfn, arglist);
9249 		}
9250 
9251 	      /* Optimizations of pow(...)*pow(...).  */
9252 	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9253 		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9254 		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9255 		{
9256 		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9257 		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9258 								     1)));
9259 		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9260 		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9261 								     1)));
9262 
9263 		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
9264 		  if (operand_equal_p (arg01, arg11, 0))
9265 		    {
9266 		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9267 		      tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9268 		      tree arglist = tree_cons (NULL_TREE, arg,
9269 						build_tree_list (NULL_TREE,
9270 								 arg01));
9271 		      return build_function_call_expr (powfn, arglist);
9272 		    }
9273 
9274 		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
9275 		  if (operand_equal_p (arg00, arg10, 0))
9276 		    {
9277 		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9278 		      tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9279 		      tree arglist = tree_cons (NULL_TREE, arg00,
9280 						build_tree_list (NULL_TREE,
9281 								 arg));
9282 		      return build_function_call_expr (powfn, arglist);
9283 		    }
9284 		}
9285 
9286 	      /* Optimize tan(x)*cos(x) as sin(x).  */
9287 	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9288 		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9289 		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9290 		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9291 		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9292 		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9293 		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9294 				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9295 		{
9296 		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9297 
9298 		  if (sinfn != NULL_TREE)
9299 		    return build_function_call_expr (sinfn,
9300 						     TREE_OPERAND (arg0, 1));
9301 		}
9302 
9303 	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
9304 	      if (fcode1 == BUILT_IN_POW
9305 		  || fcode1 == BUILT_IN_POWF
9306 		  || fcode1 == BUILT_IN_POWL)
9307 		{
9308 		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9309 		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9310 								     1)));
9311 		  if (TREE_CODE (arg11) == REAL_CST
9312 		      && ! TREE_CONSTANT_OVERFLOW (arg11)
9313 		      && operand_equal_p (arg0, arg10, 0))
9314 		    {
9315 		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9316 		      REAL_VALUE_TYPE c;
9317 		      tree arg, arglist;
9318 
9319 		      c = TREE_REAL_CST (arg11);
9320 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9321 		      arg = build_real (type, c);
9322 		      arglist = build_tree_list (NULL_TREE, arg);
9323 		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9324 		      return build_function_call_expr (powfn, arglist);
9325 		    }
9326 		}
9327 
9328 	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
9329 	      if (fcode0 == BUILT_IN_POW
9330 		  || fcode0 == BUILT_IN_POWF
9331 		  || fcode0 == BUILT_IN_POWL)
9332 		{
9333 		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9334 		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9335 								     1)));
9336 		  if (TREE_CODE (arg01) == REAL_CST
9337 		      && ! TREE_CONSTANT_OVERFLOW (arg01)
9338 		      && operand_equal_p (arg1, arg00, 0))
9339 		    {
9340 		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9341 		      REAL_VALUE_TYPE c;
9342 		      tree arg, arglist;
9343 
9344 		      c = TREE_REAL_CST (arg01);
9345 		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9346 		      arg = build_real (type, c);
9347 		      arglist = build_tree_list (NULL_TREE, arg);
9348 		      arglist = tree_cons (NULL_TREE, arg1, arglist);
9349 		      return build_function_call_expr (powfn, arglist);
9350 		    }
9351 		}
9352 
9353 	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
9354 	      if (! optimize_size
9355 		  && operand_equal_p (arg0, arg1, 0))
9356 		{
9357 		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9358 
9359 		  if (powfn)
9360 		    {
9361 		      tree arg = build_real (type, dconst2);
9362 		      tree arglist = build_tree_list (NULL_TREE, arg);
9363 		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9364 		      return build_function_call_expr (powfn, arglist);
9365 		    }
9366 		}
9367 	    }
9368 	}
9369       goto associate;
9370 
9371     case BIT_IOR_EXPR:
9372     bit_ior:
9373       if (integer_all_onesp (arg1))
9374 	return omit_one_operand (type, arg1, arg0);
9375       if (integer_zerop (arg1))
9376 	return non_lvalue (fold_convert (type, arg0));
9377       if (operand_equal_p (arg0, arg1, 0))
9378 	return non_lvalue (fold_convert (type, arg0));
9379 
9380       /* ~X | X is -1.  */
9381       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9382 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9383 	{
9384 	  t1 = build_int_cst (type, -1);
9385 	  t1 = force_fit_type (t1, 0, false, false);
9386 	  return omit_one_operand (type, t1, arg1);
9387 	}
9388 
9389       /* X | ~X is -1.  */
9390       if (TREE_CODE (arg1) == BIT_NOT_EXPR
9391 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9392 	{
9393 	  t1 = build_int_cst (type, -1);
9394 	  t1 = force_fit_type (t1, 0, false, false);
9395 	  return omit_one_operand (type, t1, arg0);
9396 	}
9397 
9398       /* Canonicalize (X & C1) | C2.  */
9399       if (TREE_CODE (arg0) == BIT_AND_EXPR
9400 	  && TREE_CODE (arg1) == INTEGER_CST
9401 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9402 	{
9403 	  unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9404 	  int width = TYPE_PRECISION (type);
9405 	  hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9406 	  lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9407 	  hi2 = TREE_INT_CST_HIGH (arg1);
9408 	  lo2 = TREE_INT_CST_LOW (arg1);
9409 
9410 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
9411 	  if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9412 	    return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9413 
9414 	  if (width > HOST_BITS_PER_WIDE_INT)
9415 	    {
9416 	      mhi = (unsigned HOST_WIDE_INT) -1
9417 		    >> (2 * HOST_BITS_PER_WIDE_INT - width);
9418 	      mlo = -1;
9419 	    }
9420 	  else
9421 	    {
9422 	      mhi = 0;
9423 	      mlo = (unsigned HOST_WIDE_INT) -1
9424 		    >> (HOST_BITS_PER_WIDE_INT - width);
9425 	    }
9426 
9427 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
9428 	  if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9429 	    return fold_build2 (BIT_IOR_EXPR, type,
9430 				TREE_OPERAND (arg0, 0), arg1);
9431 
9432 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
9433 	  hi1 &= mhi;
9434 	  lo1 &= mlo;
9435 	  if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9436 	    return fold_build2 (BIT_IOR_EXPR, type,
9437 				fold_build2 (BIT_AND_EXPR, type,
9438 					     TREE_OPERAND (arg0, 0),
9439 					     build_int_cst_wide (type,
9440 								 lo1 & ~lo2,
9441 								 hi1 & ~hi2)),
9442 				arg1);
9443 	}
9444 
9445       /* (X & Y) | Y is (X, Y).  */
9446       if (TREE_CODE (arg0) == BIT_AND_EXPR
9447 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9448 	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9449       /* (X & Y) | X is (Y, X).  */
9450       if (TREE_CODE (arg0) == BIT_AND_EXPR
9451 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9452 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9453 	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9454       /* X | (X & Y) is (Y, X).  */
9455       if (TREE_CODE (arg1) == BIT_AND_EXPR
9456 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9457 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9458 	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9459       /* X | (Y & X) is (Y, X).  */
9460       if (TREE_CODE (arg1) == BIT_AND_EXPR
9461 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9462 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9463 	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9464 
9465       t1 = distribute_bit_expr (code, type, arg0, arg1);
9466       if (t1 != NULL_TREE)
9467 	return t1;
9468 
9469       /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9470 
9471 	 This results in more efficient code for machines without a NAND
9472 	 instruction.  Combine will canonicalize to the first form
9473 	 which will allow use of NAND instructions provided by the
9474 	 backend if they exist.  */
9475       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9476 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9477 	{
9478 	  return fold_build1 (BIT_NOT_EXPR, type,
9479 			      build2 (BIT_AND_EXPR, type,
9480 				      TREE_OPERAND (arg0, 0),
9481 				      TREE_OPERAND (arg1, 0)));
9482 	}
9483 
9484       /* See if this can be simplified into a rotate first.  If that
9485 	 is unsuccessful continue in the association code.  */
9486       goto bit_rotate;
9487 
9488     case BIT_XOR_EXPR:
9489       if (integer_zerop (arg1))
9490 	return non_lvalue (fold_convert (type, arg0));
9491       if (integer_all_onesp (arg1))
9492 	return fold_build1 (BIT_NOT_EXPR, type, arg0);
9493       if (operand_equal_p (arg0, arg1, 0))
9494 	return omit_one_operand (type, integer_zero_node, arg0);
9495 
9496       /* ~X ^ X is -1.  */
9497       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9498 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9499 	{
9500 	  t1 = build_int_cst (type, -1);
9501 	  t1 = force_fit_type (t1, 0, false, false);
9502 	  return omit_one_operand (type, t1, arg1);
9503 	}
9504 
9505       /* X ^ ~X is -1.  */
9506       if (TREE_CODE (arg1) == BIT_NOT_EXPR
9507 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9508 	{
9509 	  t1 = build_int_cst (type, -1);
9510 	  t1 = force_fit_type (t1, 0, false, false);
9511 	  return omit_one_operand (type, t1, arg0);
9512 	}
9513 
9514       /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9515          with a constant, and the two constants have no bits in common,
9516 	 we should treat this as a BIT_IOR_EXPR since this may produce more
9517 	 simplifications.  */
9518       if (TREE_CODE (arg0) == BIT_AND_EXPR
9519 	  && TREE_CODE (arg1) == BIT_AND_EXPR
9520 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9521 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9522 	  && integer_zerop (const_binop (BIT_AND_EXPR,
9523 					 TREE_OPERAND (arg0, 1),
9524 					 TREE_OPERAND (arg1, 1), 0)))
9525 	{
9526 	  code = BIT_IOR_EXPR;
9527 	  goto bit_ior;
9528 	}
9529 
9530       /* (X | Y) ^ X -> Y & ~ X*/
9531       if (TREE_CODE (arg0) == BIT_IOR_EXPR
9532           && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9533         {
9534 	  tree t2 = TREE_OPERAND (arg0, 1);
9535 	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9536 			    arg1);
9537 	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9538 			    fold_convert (type, t1));
9539 	  return t1;
9540 	}
9541 
9542       /* (Y | X) ^ X -> Y & ~ X*/
9543       if (TREE_CODE (arg0) == BIT_IOR_EXPR
9544           && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9545         {
9546 	  tree t2 = TREE_OPERAND (arg0, 0);
9547 	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9548 			    arg1);
9549 	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9550 			    fold_convert (type, t1));
9551 	  return t1;
9552 	}
9553 
9554       /* X ^ (X | Y) -> Y & ~ X*/
9555       if (TREE_CODE (arg1) == BIT_IOR_EXPR
9556           && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9557         {
9558 	  tree t2 = TREE_OPERAND (arg1, 1);
9559 	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9560 			    arg0);
9561 	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9562 			    fold_convert (type, t1));
9563 	  return t1;
9564 	}
9565 
9566       /* X ^ (Y | X) -> Y & ~ X*/
9567       if (TREE_CODE (arg1) == BIT_IOR_EXPR
9568           && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9569         {
9570 	  tree t2 = TREE_OPERAND (arg1, 0);
9571 	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9572 			    arg0);
9573 	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9574 			    fold_convert (type, t1));
9575 	  return t1;
9576 	}
9577 
9578       /* Convert ~X ^ ~Y to X ^ Y.  */
9579       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9580 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9581 	return fold_build2 (code, type,
9582 			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9583 			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9584 
9585       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
9586       if (TREE_CODE (arg0) == BIT_AND_EXPR
9587 	  && integer_onep (TREE_OPERAND (arg0, 1))
9588 	  && integer_onep (arg1))
9589 	return fold_build2 (EQ_EXPR, type, arg0,
9590 			    build_int_cst (TREE_TYPE (arg0), 0));
9591 
9592       /* Fold (X & Y) ^ Y as ~X & Y.  */
9593       if (TREE_CODE (arg0) == BIT_AND_EXPR
9594 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9595 	{
9596 	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9597 	  return fold_build2 (BIT_AND_EXPR, type,
9598 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9599 			      fold_convert (type, arg1));
9600 	}
9601       /* Fold (X & Y) ^ X as ~Y & X.  */
9602       if (TREE_CODE (arg0) == BIT_AND_EXPR
9603 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9604 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9605 	{
9606 	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9607 	  return fold_build2 (BIT_AND_EXPR, type,
9608 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9609 			      fold_convert (type, arg1));
9610 	}
9611       /* Fold X ^ (X & Y) as X & ~Y.  */
9612       if (TREE_CODE (arg1) == BIT_AND_EXPR
9613 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9614 	{
9615 	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9616 	  return fold_build2 (BIT_AND_EXPR, type,
9617 			      fold_convert (type, arg0),
9618 			      fold_build1 (BIT_NOT_EXPR, type, tem));
9619 	}
9620       /* Fold X ^ (Y & X) as ~Y & X.  */
9621       if (TREE_CODE (arg1) == BIT_AND_EXPR
9622 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9623 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9624 	{
9625 	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9626 	  return fold_build2 (BIT_AND_EXPR, type,
9627 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9628 			      fold_convert (type, arg0));
9629 	}
9630 
9631       /* See if this can be simplified into a rotate first.  If that
9632 	 is unsuccessful continue in the association code.  */
9633       goto bit_rotate;
9634 
9635     case BIT_AND_EXPR:
9636       if (integer_all_onesp (arg1))
9637 	return non_lvalue (fold_convert (type, arg0));
9638       if (integer_zerop (arg1))
9639 	return omit_one_operand (type, arg1, arg0);
9640       if (operand_equal_p (arg0, arg1, 0))
9641 	return non_lvalue (fold_convert (type, arg0));
9642 
9643       /* ~X & X is always zero.  */
9644       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9645 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9646 	return omit_one_operand (type, integer_zero_node, arg1);
9647 
9648       /* X & ~X is always zero.  */
9649       if (TREE_CODE (arg1) == BIT_NOT_EXPR
9650 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9651 	return omit_one_operand (type, integer_zero_node, arg0);
9652 
9653       /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
9654       if (TREE_CODE (arg0) == BIT_IOR_EXPR
9655 	  && TREE_CODE (arg1) == INTEGER_CST
9656 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9657 	return fold_build2 (BIT_IOR_EXPR, type,
9658 			    fold_build2 (BIT_AND_EXPR, type,
9659 					 TREE_OPERAND (arg0, 0), arg1),
9660 			    fold_build2 (BIT_AND_EXPR, type,
9661 					 TREE_OPERAND (arg0, 1), arg1));
9662 
9663       /* (X | Y) & Y is (X, Y).  */
9664       if (TREE_CODE (arg0) == BIT_IOR_EXPR
9665 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9666 	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9667       /* (X | Y) & X is (Y, X).  */
9668       if (TREE_CODE (arg0) == BIT_IOR_EXPR
9669 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9670 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9671 	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9672       /* X & (X | Y) is (Y, X).  */
9673       if (TREE_CODE (arg1) == BIT_IOR_EXPR
9674 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9675 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9676 	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9677       /* X & (Y | X) is (Y, X).  */
9678       if (TREE_CODE (arg1) == BIT_IOR_EXPR
9679 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9680 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9681 	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9682 
9683       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
9684       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9685 	  && integer_onep (TREE_OPERAND (arg0, 1))
9686 	  && integer_onep (arg1))
9687 	{
9688 	  tem = TREE_OPERAND (arg0, 0);
9689 	  return fold_build2 (EQ_EXPR, type,
9690 			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9691 					   build_int_cst (TREE_TYPE (tem), 1)),
9692 			      build_int_cst (TREE_TYPE (tem), 0));
9693 	}
9694       /* Fold ~X & 1 as (X & 1) == 0.  */
9695       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9696 	  && integer_onep (arg1))
9697 	{
9698 	  tem = TREE_OPERAND (arg0, 0);
9699 	  return fold_build2 (EQ_EXPR, type,
9700 			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9701 					   build_int_cst (TREE_TYPE (tem), 1)),
9702 			      build_int_cst (TREE_TYPE (tem), 0));
9703 	}
9704 
9705       /* Fold (X ^ Y) & Y as ~X & Y.  */
9706       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9707 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9708 	{
9709 	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9710 	  return fold_build2 (BIT_AND_EXPR, type,
9711 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9712 			      fold_convert (type, arg1));
9713 	}
9714       /* Fold (X ^ Y) & X as ~Y & X.  */
9715       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9716 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9717 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9718 	{
9719 	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9720 	  return fold_build2 (BIT_AND_EXPR, type,
9721 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9722 			      fold_convert (type, arg1));
9723 	}
9724       /* Fold X & (X ^ Y) as X & ~Y.  */
9725       if (TREE_CODE (arg1) == BIT_XOR_EXPR
9726 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9727 	{
9728 	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9729 	  return fold_build2 (BIT_AND_EXPR, type,
9730 			      fold_convert (type, arg0),
9731 			      fold_build1 (BIT_NOT_EXPR, type, tem));
9732 	}
9733       /* Fold X & (Y ^ X) as ~Y & X.  */
9734       if (TREE_CODE (arg1) == BIT_XOR_EXPR
9735 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9736 	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9737 	{
9738 	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9739 	  return fold_build2 (BIT_AND_EXPR, type,
9740 			      fold_build1 (BIT_NOT_EXPR, type, tem),
9741 			      fold_convert (type, arg0));
9742 	}
9743 
9744       t1 = distribute_bit_expr (code, type, arg0, arg1);
9745       if (t1 != NULL_TREE)
9746 	return t1;
9747       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
9748       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9749 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9750 	{
9751 	  unsigned int prec
9752 	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9753 
9754 	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9755 	      && (~TREE_INT_CST_LOW (arg1)
9756 		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9757 	    return fold_convert (type, TREE_OPERAND (arg0, 0));
9758 	}
9759 
9760       /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9761 
9762 	 This results in more efficient code for machines without a NOR
9763 	 instruction.  Combine will canonicalize to the first form
9764 	 which will allow use of NOR instructions provided by the
9765 	 backend if they exist.  */
9766       if (TREE_CODE (arg0) == BIT_NOT_EXPR
9767 	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9768 	{
9769 	  return fold_build1 (BIT_NOT_EXPR, type,
9770 			      build2 (BIT_IOR_EXPR, type,
9771 				      TREE_OPERAND (arg0, 0),
9772 				      TREE_OPERAND (arg1, 0)));
9773 	}
9774 
9775       goto associate;
9776 
9777     case RDIV_EXPR:
9778       /* Don't touch a floating-point divide by zero unless the mode
9779 	 of the constant can represent infinity.  */
9780       if (TREE_CODE (arg1) == REAL_CST
9781 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9782 	  && real_zerop (arg1))
9783 	return NULL_TREE;
9784 
9785       /* Optimize A / A to 1.0 if we don't care about
9786 	 NaNs or Infinities.  Skip the transformation
9787 	 for non-real operands.  */
9788       if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9789 	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9790 	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9791 	  && operand_equal_p (arg0, arg1, 0))
9792 	{
9793 	  tree r = build_real (TREE_TYPE (arg0), dconst1);
9794 
9795 	  return omit_two_operands (type, r, arg0, arg1);
9796 	}
9797 
9798       /* The complex version of the above A / A optimization.  */
9799       if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9800 	  && operand_equal_p (arg0, arg1, 0))
9801 	{
9802 	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9803 	  if (! HONOR_NANS (TYPE_MODE (elem_type))
9804 	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9805 	    {
9806 	      tree r = build_real (elem_type, dconst1);
9807 	      /* omit_two_operands will call fold_convert for us.  */
9808 	      return omit_two_operands (type, r, arg0, arg1);
9809 	    }
9810 	}
9811 
9812       /* (-A) / (-B) -> A / B  */
9813       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9814 	return fold_build2 (RDIV_EXPR, type,
9815 			    TREE_OPERAND (arg0, 0),
9816 			    negate_expr (arg1));
9817       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9818 	return fold_build2 (RDIV_EXPR, type,
9819 			    negate_expr (arg0),
9820 			    TREE_OPERAND (arg1, 0));
9821 
9822       /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
9823       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9824 	  && real_onep (arg1))
9825 	return non_lvalue (fold_convert (type, arg0));
9826 
9827       /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
9828       if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9829 	  && real_minus_onep (arg1))
9830 	return non_lvalue (fold_convert (type, negate_expr (arg0)));
9831 
9832       /* If ARG1 is a constant, we can convert this to a multiply by the
9833 	 reciprocal.  This does not have the same rounding properties,
9834 	 so only do this if -funsafe-math-optimizations.  We can actually
9835 	 always safely do it if ARG1 is a power of two, but it's hard to
9836 	 tell if it is or not in a portable manner.  */
9837       if (TREE_CODE (arg1) == REAL_CST)
9838 	{
9839 	  if (flag_unsafe_math_optimizations
9840 	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
9841 					  arg1, 0)))
9842 	    return fold_build2 (MULT_EXPR, type, arg0, tem);
9843 	  /* Find the reciprocal if optimizing and the result is exact.  */
9844 	  if (optimize)
9845 	    {
9846 	      REAL_VALUE_TYPE r;
9847 	      r = TREE_REAL_CST (arg1);
9848 	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9849 		{
9850 		  tem = build_real (type, r);
9851 		  return fold_build2 (MULT_EXPR, type,
9852 				      fold_convert (type, arg0), tem);
9853 		}
9854 	    }
9855 	}
9856       /* Convert A/B/C to A/(B*C).  */
9857       if (flag_unsafe_math_optimizations
9858 	  && TREE_CODE (arg0) == RDIV_EXPR)
9859 	return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9860 			    fold_build2 (MULT_EXPR, type,
9861 					 TREE_OPERAND (arg0, 1), arg1));
9862 
9863       /* Convert A/(B/C) to (A/B)*C.  */
9864       if (flag_unsafe_math_optimizations
9865 	  && TREE_CODE (arg1) == RDIV_EXPR)
9866 	return fold_build2 (MULT_EXPR, type,
9867 			    fold_build2 (RDIV_EXPR, type, arg0,
9868 					 TREE_OPERAND (arg1, 0)),
9869 			    TREE_OPERAND (arg1, 1));
9870 
9871       /* Convert C1/(X*C2) into (C1/C2)/X.  */
9872       if (flag_unsafe_math_optimizations
9873 	  && TREE_CODE (arg1) == MULT_EXPR
9874 	  && TREE_CODE (arg0) == REAL_CST
9875 	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9876 	{
9877 	  tree tem = const_binop (RDIV_EXPR, arg0,
9878 				  TREE_OPERAND (arg1, 1), 0);
9879 	  if (tem)
9880 	    return fold_build2 (RDIV_EXPR, type, tem,
9881 				TREE_OPERAND (arg1, 0));
9882 	}
9883 
9884       if (flag_unsafe_math_optimizations)
9885 	{
9886 	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9887 	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9888 
9889 	  /* Optimize sin(x)/cos(x) as tan(x).  */
9890 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9891 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9892 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9893 	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9894 				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9895 	    {
9896 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9897 
9898 	      if (tanfn != NULL_TREE)
9899 		return build_function_call_expr (tanfn,
9900 						 TREE_OPERAND (arg0, 1));
9901 	    }
9902 
9903 	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
9904 	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9905 	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9906 	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9907 	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9908 				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9909 	    {
9910 	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9911 
9912 	      if (tanfn != NULL_TREE)
9913 		{
9914 		  tree tmp = TREE_OPERAND (arg0, 1);
9915 		  tmp = build_function_call_expr (tanfn, tmp);
9916 		  return fold_build2 (RDIV_EXPR, type,
9917 				      build_real (type, dconst1), tmp);
9918 		}
9919 	    }
9920 
9921  	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9922 	     NaNs or Infinities.  */
9923  	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9924  	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9925  	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9926 	    {
9927 	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9928 	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9929 
9930 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9931 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9932 		  && operand_equal_p (arg00, arg01, 0))
9933 		{
9934 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9935 
9936 		  if (cosfn != NULL_TREE)
9937 		    return build_function_call_expr (cosfn,
9938 						     TREE_OPERAND (arg0, 1));
9939 		}
9940 	    }
9941 
9942  	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9943 	     NaNs or Infinities.  */
9944  	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9945  	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9946  	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9947 	    {
9948 	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9949 	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9950 
9951 	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9952 		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9953 		  && operand_equal_p (arg00, arg01, 0))
9954 		{
9955 		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9956 
9957 		  if (cosfn != NULL_TREE)
9958 		    {
9959 		      tree tmp = TREE_OPERAND (arg0, 1);
9960 		      tmp = build_function_call_expr (cosfn, tmp);
9961 		      return fold_build2 (RDIV_EXPR, type,
9962 					  build_real (type, dconst1),
9963 					  tmp);
9964 		    }
9965 		}
9966 	    }
9967 
9968 	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
9969 	  if (fcode0 == BUILT_IN_POW
9970 	      || fcode0 == BUILT_IN_POWF
9971 	      || fcode0 == BUILT_IN_POWL)
9972 	    {
9973 	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9974 	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9975 	      if (TREE_CODE (arg01) == REAL_CST
9976 		  && ! TREE_CONSTANT_OVERFLOW (arg01)
9977 		  && operand_equal_p (arg1, arg00, 0))
9978 		{
9979 		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9980 		  REAL_VALUE_TYPE c;
9981 		  tree arg, arglist;
9982 
9983 		  c = TREE_REAL_CST (arg01);
9984 		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9985 		  arg = build_real (type, c);
9986 		  arglist = build_tree_list (NULL_TREE, arg);
9987 		  arglist = tree_cons (NULL_TREE, arg1, arglist);
9988 		  return build_function_call_expr (powfn, arglist);
9989 		}
9990 	    }
9991 
9992 	  /* Optimize x/expN(y) into x*expN(-y).  */
9993 	  if (BUILTIN_EXPONENT_P (fcode1))
9994 	    {
9995 	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9996 	      tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9997 	      tree arglist = build_tree_list (NULL_TREE,
9998 					      fold_convert (type, arg));
9999 	      arg1 = build_function_call_expr (expfn, arglist);
10000 	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10001 	    }
10002 
10003 	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
10004 	  if (fcode1 == BUILT_IN_POW
10005 	      || fcode1 == BUILT_IN_POWF
10006 	      || fcode1 == BUILT_IN_POWL)
10007 	    {
10008 	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10009 	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10010 	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10011 	      tree neg11 = fold_convert (type, negate_expr (arg11));
10012 	      tree arglist = tree_cons(NULL_TREE, arg10,
10013 				       build_tree_list (NULL_TREE, neg11));
10014 	      arg1 = build_function_call_expr (powfn, arglist);
10015 	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10016 	    }
10017 	}
10018       return NULL_TREE;
10019 
10020     case TRUNC_DIV_EXPR:
10021     case FLOOR_DIV_EXPR:
10022       /* Simplify A / (B << N) where A and B are positive and B is
10023 	 a power of 2, to A >> (N + log2(B)).  */
10024       strict_overflow_p = false;
10025       if (TREE_CODE (arg1) == LSHIFT_EXPR
10026 	  && (TYPE_UNSIGNED (type)
10027 	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10028 	{
10029 	  tree sval = TREE_OPERAND (arg1, 0);
10030 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10031 	    {
10032 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10033 	      unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10034 
10035 	      if (strict_overflow_p)
10036 		fold_overflow_warning (("assuming signed overflow does not "
10037 					"occur when simplifying A / (B << N)"),
10038 				       WARN_STRICT_OVERFLOW_MISC);
10039 
10040 	      sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10041 				    sh_cnt, build_int_cst (NULL_TREE, pow2));
10042 	      return fold_build2 (RSHIFT_EXPR, type,
10043 				  fold_convert (type, arg0), sh_cnt);
10044 	    }
10045 	}
10046       /* Fall thru */
10047 
10048     case ROUND_DIV_EXPR:
10049     case CEIL_DIV_EXPR:
10050     case EXACT_DIV_EXPR:
10051       if (integer_onep (arg1))
10052 	return non_lvalue (fold_convert (type, arg0));
10053       if (integer_zerop (arg1))
10054 	return NULL_TREE;
10055       /* X / -1 is -X.  */
10056       if (!TYPE_UNSIGNED (type)
10057 	  && TREE_CODE (arg1) == INTEGER_CST
10058 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10059 	  && TREE_INT_CST_HIGH (arg1) == -1)
10060 	return fold_convert (type, negate_expr (arg0));
10061 
10062       /* Convert -A / -B to A / B when the type is signed and overflow is
10063 	 undefined.  */
10064       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10065 	  && TREE_CODE (arg0) == NEGATE_EXPR
10066 	  && negate_expr_p (arg1))
10067 	{
10068 	  if (INTEGRAL_TYPE_P (type))
10069 	    fold_overflow_warning (("assuming signed overflow does not occur "
10070 				    "when distributing negation across "
10071 				    "division"),
10072 				   WARN_STRICT_OVERFLOW_MISC);
10073 	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10074 			      negate_expr (arg1));
10075 	}
10076       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10077 	  && TREE_CODE (arg1) == NEGATE_EXPR
10078 	  && negate_expr_p (arg0))
10079 	{
10080 	  if (INTEGRAL_TYPE_P (type))
10081 	    fold_overflow_warning (("assuming signed overflow does not occur "
10082 				    "when distributing negation across "
10083 				    "division"),
10084 				   WARN_STRICT_OVERFLOW_MISC);
10085 	  return fold_build2 (code, type, negate_expr (arg0),
10086 			      TREE_OPERAND (arg1, 0));
10087 	}
10088 
10089       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10090 	 operation, EXACT_DIV_EXPR.
10091 
10092 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10093 	 At one time others generated faster code, it's not clear if they do
10094 	 after the last round to changes to the DIV code in expmed.c.  */
10095       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10096 	  && multiple_of_p (type, arg0, arg1))
10097 	return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10098 
10099       strict_overflow_p = false;
10100       if (TREE_CODE (arg1) == INTEGER_CST
10101 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10102 					 &strict_overflow_p)))
10103 	{
10104 	  if (strict_overflow_p)
10105 	    fold_overflow_warning (("assuming signed overflow does not occur "
10106 				    "when simplifying division"),
10107 				   WARN_STRICT_OVERFLOW_MISC);
10108 	  return fold_convert (type, tem);
10109 	}
10110 
10111       return NULL_TREE;
10112 
10113     case CEIL_MOD_EXPR:
10114     case FLOOR_MOD_EXPR:
10115     case ROUND_MOD_EXPR:
10116     case TRUNC_MOD_EXPR:
10117       /* X % 1 is always zero, but be sure to preserve any side
10118 	 effects in X.  */
10119       if (integer_onep (arg1))
10120 	return omit_one_operand (type, integer_zero_node, arg0);
10121 
10122       /* X % 0, return X % 0 unchanged so that we can get the
10123 	 proper warnings and errors.  */
10124       if (integer_zerop (arg1))
10125 	return NULL_TREE;
10126 
10127       /* 0 % X is always zero, but be sure to preserve any side
10128 	 effects in X.  Place this after checking for X == 0.  */
10129       if (integer_zerop (arg0))
10130 	return omit_one_operand (type, integer_zero_node, arg1);
10131 
10132       /* X % -1 is zero.  */
10133       if (!TYPE_UNSIGNED (type)
10134 	  && TREE_CODE (arg1) == INTEGER_CST
10135 	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10136 	  && TREE_INT_CST_HIGH (arg1) == -1)
10137 	return omit_one_operand (type, integer_zero_node, arg0);
10138 
10139       /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10140          i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
10141       strict_overflow_p = false;
10142       if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10143 	  && (TYPE_UNSIGNED (type)
10144 	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10145 	{
10146 	  tree c = arg1;
10147 	  /* Also optimize A % (C << N)  where C is a power of 2,
10148 	     to A & ((C << N) - 1).  */
10149 	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
10150 	    c = TREE_OPERAND (arg1, 0);
10151 
10152 	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10153 	    {
10154 	      tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10155 				       arg1, integer_one_node);
10156 	      if (strict_overflow_p)
10157 		fold_overflow_warning (("assuming signed overflow does not "
10158 					"occur when simplifying "
10159 					"X % (power of two)"),
10160 				       WARN_STRICT_OVERFLOW_MISC);
10161 	      return fold_build2 (BIT_AND_EXPR, type,
10162 				  fold_convert (type, arg0),
10163 				  fold_convert (type, mask));
10164 	    }
10165 	}
10166 
10167       /* X % -C is the same as X % C.  */
10168       if (code == TRUNC_MOD_EXPR
10169 	  && !TYPE_UNSIGNED (type)
10170 	  && TREE_CODE (arg1) == INTEGER_CST
10171 	  && !TREE_CONSTANT_OVERFLOW (arg1)
10172 	  && TREE_INT_CST_HIGH (arg1) < 0
10173 	  && !TYPE_OVERFLOW_TRAPS (type)
10174 	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
10175 	  && !sign_bit_p (arg1, arg1))
10176 	return fold_build2 (code, type, fold_convert (type, arg0),
10177 			    fold_convert (type, negate_expr (arg1)));
10178 
10179       /* X % -Y is the same as X % Y.  */
10180       if (code == TRUNC_MOD_EXPR
10181 	  && !TYPE_UNSIGNED (type)
10182 	  && TREE_CODE (arg1) == NEGATE_EXPR
10183 	  && !TYPE_OVERFLOW_TRAPS (type))
10184 	return fold_build2 (code, type, fold_convert (type, arg0),
10185 			    fold_convert (type, TREE_OPERAND (arg1, 0)));
10186 
10187       if (TREE_CODE (arg1) == INTEGER_CST
10188 	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10189 					 &strict_overflow_p)))
10190 	{
10191 	  if (strict_overflow_p)
10192 	    fold_overflow_warning (("assuming signed overflow does not occur "
10193 				    "when simplifying modulos"),
10194 				   WARN_STRICT_OVERFLOW_MISC);
10195 	  return fold_convert (type, tem);
10196 	}
10197 
10198       return NULL_TREE;
10199 
10200     case LROTATE_EXPR:
10201     case RROTATE_EXPR:
10202       if (integer_all_onesp (arg0))
10203 	return omit_one_operand (type, arg0, arg1);
10204       goto shift;
10205 
10206     case RSHIFT_EXPR:
10207       /* Optimize -1 >> x for arithmetic right shifts.  */
10208       if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10209 	return omit_one_operand (type, arg0, arg1);
10210       /* ... fall through ...  */
10211 
10212     case LSHIFT_EXPR:
10213     shift:
10214       if (integer_zerop (arg1))
10215 	return non_lvalue (fold_convert (type, arg0));
10216       if (integer_zerop (arg0))
10217 	return omit_one_operand (type, arg0, arg1);
10218 
10219       /* Since negative shift count is not well-defined,
10220 	 don't try to compute it in the compiler.  */
10221       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10222 	return NULL_TREE;
10223 
10224       /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
10225       if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10226 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10227 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10228 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10229 	{
10230 	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10231 			       + TREE_INT_CST_LOW (arg1));
10232 
10233 	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10234 	     being well defined.  */
10235 	  if (low >= TYPE_PRECISION (type))
10236 	    {
10237 	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10238 	        low = low % TYPE_PRECISION (type);
10239 	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10240 	        return build_int_cst (type, 0);
10241 	      else
10242 		low = TYPE_PRECISION (type) - 1;
10243 	    }
10244 
10245 	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10246 			      build_int_cst (type, low));
10247 	}
10248 
10249       /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10250          into x & ((unsigned)-1 >> c) for unsigned types.  */
10251       if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10252            || (TYPE_UNSIGNED (type)
10253 	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10254 	  && host_integerp (arg1, false)
10255 	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10256 	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10257 	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10258 	{
10259 	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10260 	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10261 	  tree lshift;
10262 	  tree arg00;
10263 
10264 	  if (low0 == low1)
10265 	    {
10266 	      arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10267 
10268 	      lshift = build_int_cst (type, -1);
10269 	      lshift = int_const_binop (code, lshift, arg1, 0);
10270 
10271 	      return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10272 	    }
10273 	}
10274 
10275       /* Rewrite an LROTATE_EXPR by a constant into an
10276 	 RROTATE_EXPR by a new constant.  */
10277       if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10278 	{
10279 	  tree tem = build_int_cst (NULL_TREE,
10280 				    GET_MODE_BITSIZE (TYPE_MODE (type)));
10281 	  tem = fold_convert (TREE_TYPE (arg1), tem);
10282 	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10283 	  return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10284 	}
10285 
10286       /* If we have a rotate of a bit operation with the rotate count and
10287 	 the second operand of the bit operation both constant,
10288 	 permute the two operations.  */
10289       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10290 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10291 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10292 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10293 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10294 	return fold_build2 (TREE_CODE (arg0), type,
10295 			    fold_build2 (code, type,
10296 					 TREE_OPERAND (arg0, 0), arg1),
10297 			    fold_build2 (code, type,
10298 					 TREE_OPERAND (arg0, 1), arg1));
10299 
10300       /* Two consecutive rotates adding up to the width of the mode can
10301 	 be ignored.  */
10302       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10303 	  && TREE_CODE (arg0) == RROTATE_EXPR
10304 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10305 	  && TREE_INT_CST_HIGH (arg1) == 0
10306 	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10307 	  && ((TREE_INT_CST_LOW (arg1)
10308 	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10309 	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10310 	return TREE_OPERAND (arg0, 0);
10311 
10312       return NULL_TREE;
10313 
10314     case MIN_EXPR:
10315       if (operand_equal_p (arg0, arg1, 0))
10316 	return omit_one_operand (type, arg0, arg1);
10317       if (INTEGRAL_TYPE_P (type)
10318 	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10319 	return omit_one_operand (type, arg1, arg0);
10320       tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10321       if (tem)
10322 	return tem;
10323       goto associate;
10324 
10325     case MAX_EXPR:
10326       if (operand_equal_p (arg0, arg1, 0))
10327 	return omit_one_operand (type, arg0, arg1);
10328       if (INTEGRAL_TYPE_P (type)
10329 	  && TYPE_MAX_VALUE (type)
10330 	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10331 	return omit_one_operand (type, arg1, arg0);
10332       tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10333       if (tem)
10334 	return tem;
10335       goto associate;
10336 
10337     case TRUTH_ANDIF_EXPR:
10338       /* Note that the operands of this must be ints
10339 	 and their values must be 0 or 1.
10340 	 ("true" is a fixed value perhaps depending on the language.)  */
10341       /* If first arg is constant zero, return it.  */
10342       if (integer_zerop (arg0))
10343 	return fold_convert (type, arg0);
10344     case TRUTH_AND_EXPR:
10345       /* If either arg is constant true, drop it.  */
10346       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10347 	return non_lvalue (fold_convert (type, arg1));
10348       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10349 	  /* Preserve sequence points.  */
10350 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10351 	return non_lvalue (fold_convert (type, arg0));
10352       /* If second arg is constant zero, result is zero, but first arg
10353 	 must be evaluated.  */
10354       if (integer_zerop (arg1))
10355 	return omit_one_operand (type, arg1, arg0);
10356       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10357 	 case will be handled here.  */
10358       if (integer_zerop (arg0))
10359 	return omit_one_operand (type, arg0, arg1);
10360 
10361       /* !X && X is always false.  */
10362       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10363 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10364 	return omit_one_operand (type, integer_zero_node, arg1);
10365       /* X && !X is always false.  */
10366       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10367 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10368 	return omit_one_operand (type, integer_zero_node, arg0);
10369 
10370       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10371 	 means A >= Y && A != MAX, but in this case we know that
10372 	 A < X <= MAX.  */
10373 
10374       if (!TREE_SIDE_EFFECTS (arg0)
10375 	  && !TREE_SIDE_EFFECTS (arg1))
10376 	{
10377 	  tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10378 	  if (tem && !operand_equal_p (tem, arg0, 0))
10379 	    return fold_build2 (code, type, tem, arg1);
10380 
10381 	  tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10382 	  if (tem && !operand_equal_p (tem, arg1, 0))
10383 	    return fold_build2 (code, type, arg0, tem);
10384 	}
10385 
10386     truth_andor:
10387       /* We only do these simplifications if we are optimizing.  */
10388       if (!optimize)
10389 	return NULL_TREE;
10390 
10391       /* Check for things like (A || B) && (A || C).  We can convert this
10392 	 to A || (B && C).  Note that either operator can be any of the four
10393 	 truth and/or operations and the transformation will still be
10394 	 valid.   Also note that we only care about order for the
10395 	 ANDIF and ORIF operators.  If B contains side effects, this
10396 	 might change the truth-value of A.  */
10397       if (TREE_CODE (arg0) == TREE_CODE (arg1)
10398 	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10399 	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10400 	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
10401 	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10402 	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10403 	{
10404 	  tree a00 = TREE_OPERAND (arg0, 0);
10405 	  tree a01 = TREE_OPERAND (arg0, 1);
10406 	  tree a10 = TREE_OPERAND (arg1, 0);
10407 	  tree a11 = TREE_OPERAND (arg1, 1);
10408 	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10409 			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10410 			     && (code == TRUTH_AND_EXPR
10411 				 || code == TRUTH_OR_EXPR));
10412 
10413 	  if (operand_equal_p (a00, a10, 0))
10414 	    return fold_build2 (TREE_CODE (arg0), type, a00,
10415 				fold_build2 (code, type, a01, a11));
10416 	  else if (commutative && operand_equal_p (a00, a11, 0))
10417 	    return fold_build2 (TREE_CODE (arg0), type, a00,
10418 				fold_build2 (code, type, a01, a10));
10419 	  else if (commutative && operand_equal_p (a01, a10, 0))
10420 	    return fold_build2 (TREE_CODE (arg0), type, a01,
10421 				fold_build2 (code, type, a00, a11));
10422 
10423 	  /* This case if tricky because we must either have commutative
10424 	     operators or else A10 must not have side-effects.  */
10425 
10426 	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10427 		   && operand_equal_p (a01, a11, 0))
10428 	    return fold_build2 (TREE_CODE (arg0), type,
10429 				fold_build2 (code, type, a00, a10),
10430 				a01);
10431 	}
10432 
10433       /* See if we can build a range comparison.  */
10434       if (0 != (tem = fold_range_test (code, type, op0, op1)))
10435 	return tem;
10436 
10437       /* Check for the possibility of merging component references.  If our
10438 	 lhs is another similar operation, try to merge its rhs with our
10439 	 rhs.  Then try to merge our lhs and rhs.  */
10440       if (TREE_CODE (arg0) == code
10441 	  && 0 != (tem = fold_truthop (code, type,
10442 				       TREE_OPERAND (arg0, 1), arg1)))
10443 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10444 
10445       if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10446 	return tem;
10447 
10448       return NULL_TREE;
10449 
10450     case TRUTH_ORIF_EXPR:
10451       /* Note that the operands of this must be ints
10452 	 and their values must be 0 or true.
10453 	 ("true" is a fixed value perhaps depending on the language.)  */
10454       /* If first arg is constant true, return it.  */
10455       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10456 	return fold_convert (type, arg0);
10457     case TRUTH_OR_EXPR:
10458       /* If either arg is constant zero, drop it.  */
10459       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10460 	return non_lvalue (fold_convert (type, arg1));
10461       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10462 	  /* Preserve sequence points.  */
10463 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10464 	return non_lvalue (fold_convert (type, arg0));
10465       /* If second arg is constant true, result is true, but we must
10466 	 evaluate first arg.  */
10467       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10468 	return omit_one_operand (type, arg1, arg0);
10469       /* Likewise for first arg, but note this only occurs here for
10470 	 TRUTH_OR_EXPR.  */
10471       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10472 	return omit_one_operand (type, arg0, arg1);
10473 
10474       /* !X || X is always true.  */
10475       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10476 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10477 	return omit_one_operand (type, integer_one_node, arg1);
10478       /* X || !X is always true.  */
10479       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10480 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10481 	return omit_one_operand (type, integer_one_node, arg0);
10482 
10483       goto truth_andor;
10484 
10485     case TRUTH_XOR_EXPR:
10486       /* If the second arg is constant zero, drop it.  */
10487       if (integer_zerop (arg1))
10488 	return non_lvalue (fold_convert (type, arg0));
10489       /* If the second arg is constant true, this is a logical inversion.  */
10490       if (integer_onep (arg1))
10491 	{
10492 	  /* Only call invert_truthvalue if operand is a truth value.  */
10493 	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10494 	    tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10495 	  else
10496 	    tem = invert_truthvalue (arg0);
10497 	  return non_lvalue (fold_convert (type, tem));
10498 	}
10499       /* Identical arguments cancel to zero.  */
10500       if (operand_equal_p (arg0, arg1, 0))
10501 	return omit_one_operand (type, integer_zero_node, arg0);
10502 
10503       /* !X ^ X is always true.  */
10504       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10505 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10506 	return omit_one_operand (type, integer_one_node, arg1);
10507 
10508       /* X ^ !X is always true.  */
10509       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10510 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10511 	return omit_one_operand (type, integer_one_node, arg0);
10512 
10513       return NULL_TREE;
10514 
10515     case EQ_EXPR:
10516     case NE_EXPR:
10517       tem = fold_comparison (code, type, op0, op1);
10518       if (tem != NULL_TREE)
10519 	return tem;
10520 
10521       /* bool_var != 0 becomes bool_var. */
10522       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10523           && code == NE_EXPR)
10524         return non_lvalue (fold_convert (type, arg0));
10525 
10526       /* bool_var == 1 becomes bool_var. */
10527       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10528           && code == EQ_EXPR)
10529         return non_lvalue (fold_convert (type, arg0));
10530 
10531       /* bool_var != 1 becomes !bool_var. */
10532       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10533           && code == NE_EXPR)
10534         return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10535 
10536       /* bool_var == 0 becomes !bool_var. */
10537       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10538           && code == EQ_EXPR)
10539         return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10540 
10541       /*  ~a != C becomes a != ~C where C is a constant.  Likewise for ==.  */
10542       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10543 	  && TREE_CODE (arg1) == INTEGER_CST)
10544 	{
10545 	  tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10546 	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10547 			      fold_build1 (BIT_NOT_EXPR, cmp_type,
10548 					   fold_convert (cmp_type, arg1)));
10549 	}
10550 
10551       /* If this is an equality comparison of the address of a non-weak
10552 	 object against zero, then we know the result.  */
10553       if (TREE_CODE (arg0) == ADDR_EXPR
10554 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10555 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10556 	  && integer_zerop (arg1))
10557 	return constant_boolean_node (code != EQ_EXPR, type);
10558 
10559       /* If this is an equality comparison of the address of two non-weak,
10560 	 unaliased symbols neither of which are extern (since we do not
10561 	 have access to attributes for externs), then we know the result.  */
10562       if (TREE_CODE (arg0) == ADDR_EXPR
10563 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10564 	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10565 	  && ! lookup_attribute ("alias",
10566 				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10567 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10568 	  && TREE_CODE (arg1) == ADDR_EXPR
10569 	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10570 	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10571 	  && ! lookup_attribute ("alias",
10572 				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10573 	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10574 	{
10575 	  /* We know that we're looking at the address of two
10576 	     non-weak, unaliased, static _DECL nodes.
10577 
10578 	     It is both wasteful and incorrect to call operand_equal_p
10579 	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
10580 	     all we need to do is test pointer equality for the arguments
10581 	     to the two ADDR_EXPR nodes.  It is incorrect to use
10582 	     operand_equal_p as that function is NOT equivalent to a
10583 	     C equality test.  It can in fact return false for two
10584 	     objects which would test as equal using the C equality
10585 	     operator.  */
10586 	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10587 	  return constant_boolean_node (equal
10588 				        ? code == EQ_EXPR : code != EQ_EXPR,
10589 				        type);
10590 	}
10591 
10592       /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10593 	 a MINUS_EXPR of a constant, we can convert it into a comparison with
10594 	 a revised constant as long as no overflow occurs.  */
10595       if (TREE_CODE (arg1) == INTEGER_CST
10596 	  && (TREE_CODE (arg0) == PLUS_EXPR
10597 	      || TREE_CODE (arg0) == MINUS_EXPR)
10598 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10599 	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10600 				      ? MINUS_EXPR : PLUS_EXPR,
10601 				      fold_convert (TREE_TYPE (arg0), arg1),
10602 				      TREE_OPERAND (arg0, 1), 0))
10603 	  && ! TREE_CONSTANT_OVERFLOW (tem))
10604 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10605 
10606       /* Similarly for a NEGATE_EXPR.  */
10607       if (TREE_CODE (arg0) == NEGATE_EXPR
10608 	  && TREE_CODE (arg1) == INTEGER_CST
10609 	  && 0 != (tem = negate_expr (arg1))
10610 	  && TREE_CODE (tem) == INTEGER_CST
10611 	  && ! TREE_CONSTANT_OVERFLOW (tem))
10612 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10613 
10614       /* If we have X - Y == 0, we can convert that to X == Y and similarly
10615 	 for !=.  Don't do this for ordered comparisons due to overflow.  */
10616       if (TREE_CODE (arg0) == MINUS_EXPR
10617 	  && integer_zerop (arg1))
10618 	return fold_build2 (code, type,
10619 			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10620 
10621       /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
10622       if (TREE_CODE (arg0) == ABS_EXPR
10623 	  && (integer_zerop (arg1) || real_zerop (arg1)))
10624 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10625 
10626       /* If this is an EQ or NE comparison with zero and ARG0 is
10627 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10628 	 two operations, but the latter can be done in one less insn
10629 	 on machines that have only two-operand insns or on which a
10630 	 constant cannot be the first operand.  */
10631       if (TREE_CODE (arg0) == BIT_AND_EXPR
10632 	  && integer_zerop (arg1))
10633 	{
10634 	  tree arg00 = TREE_OPERAND (arg0, 0);
10635 	  tree arg01 = TREE_OPERAND (arg0, 1);
10636 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10637 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10638 	    return
10639 	      fold_build2 (code, type,
10640 			   build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10641 				   build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10642 					   arg01, TREE_OPERAND (arg00, 1)),
10643 				   fold_convert (TREE_TYPE (arg0),
10644 						 integer_one_node)),
10645 			   arg1);
10646 	  else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10647 		   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10648 	    return
10649 	      fold_build2 (code, type,
10650 			   build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10651 				   build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10652 					   arg00, TREE_OPERAND (arg01, 1)),
10653 				   fold_convert (TREE_TYPE (arg0),
10654 						 integer_one_node)),
10655 			   arg1);
10656 	}
10657 
10658       /* If this is an NE or EQ comparison of zero against the result of a
10659 	 signed MOD operation whose second operand is a power of 2, make
10660 	 the MOD operation unsigned since it is simpler and equivalent.  */
10661       if (integer_zerop (arg1)
10662 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10663 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10664 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10665 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10666 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10667 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10668 	{
10669 	  tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10670 	  tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10671 				     fold_convert (newtype,
10672 						   TREE_OPERAND (arg0, 0)),
10673 				     fold_convert (newtype,
10674 						   TREE_OPERAND (arg0, 1)));
10675 
10676 	  return fold_build2 (code, type, newmod,
10677 			      fold_convert (newtype, arg1));
10678 	}
10679 
10680       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10681 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10682 	 a single bit.  */
10683       if (TREE_CODE (arg0) == BIT_AND_EXPR
10684 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10685 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10686 	     == INTEGER_CST
10687 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10688 	  && integer_zerop (arg1))
10689 	{
10690 	  tree itype = TREE_TYPE (arg0);
10691 	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10692 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10693 
10694 	  /* Check for a valid shift count.  */
10695 	  if (TREE_INT_CST_HIGH (arg001) == 0
10696 	      && TREE_INT_CST_LOW (arg001) < prec)
10697 	    {
10698 	      tree arg01 = TREE_OPERAND (arg0, 1);
10699 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10700 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10701 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10702 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10703 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10704 		{
10705 		  tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10706 		  tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10707 		  return fold_build2 (code, type, tem, arg1);
10708 		}
10709 	      /* Otherwise, for signed (arithmetic) shifts,
10710 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10711 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10712 	      else if (!TYPE_UNSIGNED (itype))
10713 		return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10714 				    arg000, build_int_cst (itype, 0));
10715 	      /* Otherwise, of unsigned (logical) shifts,
10716 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10717 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10718 	      else
10719 		return omit_one_operand (type,
10720 					 code == EQ_EXPR ? integer_one_node
10721 							 : integer_zero_node,
10722 					 arg000);
10723 	    }
10724 	}
10725 
10726       /* If this is an NE comparison of zero with an AND of one, remove the
10727 	 comparison since the AND will give the correct value.  */
10728       if (code == NE_EXPR
10729 	  && integer_zerop (arg1)
10730 	  && TREE_CODE (arg0) == BIT_AND_EXPR
10731 	  && integer_onep (TREE_OPERAND (arg0, 1)))
10732 	return fold_convert (type, arg0);
10733 
10734       /* If we have (A & C) == C where C is a power of 2, convert this into
10735 	 (A & C) != 0.  Similarly for NE_EXPR.  */
10736       if (TREE_CODE (arg0) == BIT_AND_EXPR
10737 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10738 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10739 	return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10740 			    arg0, fold_convert (TREE_TYPE (arg0),
10741 						integer_zero_node));
10742 
10743       /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10744 	 bit, then fold the expression into A < 0 or A >= 0.  */
10745       tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10746       if (tem)
10747 	return tem;
10748 
10749       /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10750 	 Similarly for NE_EXPR.  */
10751       if (TREE_CODE (arg0) == BIT_AND_EXPR
10752 	  && TREE_CODE (arg1) == INTEGER_CST
10753 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10754 	{
10755 	  tree notc = fold_build1 (BIT_NOT_EXPR,
10756 				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
10757 				   TREE_OPERAND (arg0, 1));
10758 	  tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10759 				       arg1, notc);
10760 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10761 	  if (integer_nonzerop (dandnotc))
10762 	    return omit_one_operand (type, rslt, arg0);
10763 	}
10764 
10765       /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10766 	 Similarly for NE_EXPR.  */
10767       if (TREE_CODE (arg0) == BIT_IOR_EXPR
10768 	  && TREE_CODE (arg1) == INTEGER_CST
10769 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10770 	{
10771 	  tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10772 	  tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10773 				       TREE_OPERAND (arg0, 1), notd);
10774 	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10775 	  if (integer_nonzerop (candnotd))
10776 	    return omit_one_operand (type, rslt, arg0);
10777 	}
10778 
10779       /* If this is a comparison of a field, we may be able to simplify it.  */
10780       if (((TREE_CODE (arg0) == COMPONENT_REF
10781 	    && lang_hooks.can_use_bit_fields_p ())
10782 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10783 	  /* Handle the constant case even without -O
10784 	     to make sure the warnings are given.  */
10785 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10786 	{
10787 	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10788 	  if (t1)
10789 	    return t1;
10790 	}
10791 
10792       /* Optimize comparisons of strlen vs zero to a compare of the
10793 	 first character of the string vs zero.  To wit,
10794 		strlen(ptr) == 0   =>  *ptr == 0
10795 		strlen(ptr) != 0   =>  *ptr != 0
10796 	 Other cases should reduce to one of these two (or a constant)
10797 	 due to the return value of strlen being unsigned.  */
10798       if (TREE_CODE (arg0) == CALL_EXPR
10799 	  && integer_zerop (arg1))
10800 	{
10801 	  tree fndecl = get_callee_fndecl (arg0);
10802 	  tree arglist;
10803 
10804 	  if (fndecl
10805 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10806 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10807 	      && (arglist = TREE_OPERAND (arg0, 1))
10808 	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10809 	      && ! TREE_CHAIN (arglist))
10810 	    {
10811 	      tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10812 	      return fold_build2 (code, type, iref,
10813 				  build_int_cst (TREE_TYPE (iref), 0));
10814 	    }
10815 	}
10816 
10817       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10818 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10819       if (TREE_CODE (arg0) == RSHIFT_EXPR
10820 	  && integer_zerop (arg1)
10821 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10822 	{
10823 	  tree arg00 = TREE_OPERAND (arg0, 0);
10824 	  tree arg01 = TREE_OPERAND (arg0, 1);
10825 	  tree itype = TREE_TYPE (arg00);
10826 	  if (TREE_INT_CST_HIGH (arg01) == 0
10827 	      && TREE_INT_CST_LOW (arg01)
10828 		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10829 	    {
10830 	      if (TYPE_UNSIGNED (itype))
10831 		{
10832 		  itype = lang_hooks.types.signed_type (itype);
10833 		  arg00 = fold_convert (itype, arg00);
10834 		}
10835 	      return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10836 				  type, arg00, build_int_cst (itype, 0));
10837 	    }
10838 	}
10839 
10840       /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
10841       if (integer_zerop (arg1)
10842 	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
10843 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10844 			    TREE_OPERAND (arg0, 1));
10845 
10846       /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
10847       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10848 	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10849 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10850 			    build_int_cst (TREE_TYPE (arg1), 0));
10851       /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
10852       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10853 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10854 	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10855 	return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10856 			    build_int_cst (TREE_TYPE (arg1), 0));
10857 
10858       /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
10859       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10860 	  && TREE_CODE (arg1) == INTEGER_CST
10861 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10862 	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10863 			    fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10864 					 TREE_OPERAND (arg0, 1), arg1));
10865 
10866       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10867 	 (X & C) == 0 when C is a single bit.  */
10868       if (TREE_CODE (arg0) == BIT_AND_EXPR
10869 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10870 	  && integer_zerop (arg1)
10871 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10872 	{
10873 	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10874 			     TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10875 			     TREE_OPERAND (arg0, 1));
10876 	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10877 			      type, tem, arg1);
10878 	}
10879 
10880       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10881 	 constant C is a power of two, i.e. a single bit.  */
10882       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10883 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10884 	  && integer_zerop (arg1)
10885 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10886 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10887 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10888 	{
10889 	  tree arg00 = TREE_OPERAND (arg0, 0);
10890 	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10891 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10892 	}
10893 
10894       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10895 	 when is C is a power of two, i.e. a single bit.  */
10896       if (TREE_CODE (arg0) == BIT_AND_EXPR
10897 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10898 	  && integer_zerop (arg1)
10899 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10900 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10901 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10902 	{
10903 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10904 	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10905 			     arg000, TREE_OPERAND (arg0, 1));
10906 	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10907 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10908 	}
10909 
10910       if (integer_zerop (arg1)
10911 	  && tree_expr_nonzero_p (arg0))
10912         {
10913 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10914 	  return omit_one_operand (type, res, arg0);
10915 	}
10916       return NULL_TREE;
10917 
10918     case LT_EXPR:
10919     case GT_EXPR:
10920     case LE_EXPR:
10921     case GE_EXPR:
10922       tem = fold_comparison (code, type, op0, op1);
10923       if (tem != NULL_TREE)
10924 	return tem;
10925 
10926       /* Transform comparisons of the form X +- C CMP X.  */
10927       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10928 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10929 	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10930 	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10931 	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10932 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10933 	{
10934 	  tree arg01 = TREE_OPERAND (arg0, 1);
10935 	  enum tree_code code0 = TREE_CODE (arg0);
10936 	  int is_positive;
10937 
10938 	  if (TREE_CODE (arg01) == REAL_CST)
10939 	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10940 	  else
10941 	    is_positive = tree_int_cst_sgn (arg01);
10942 
10943 	  /* (X - c) > X becomes false.  */
10944 	  if (code == GT_EXPR
10945 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10946 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10947 	    {
10948 	      if (TREE_CODE (arg01) == INTEGER_CST
10949 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10950 		fold_overflow_warning (("assuming signed overflow does not "
10951 					"occur when assuming that (X - c) > X "
10952 					"is always false"),
10953 				       WARN_STRICT_OVERFLOW_ALL);
10954 	      return constant_boolean_node (0, type);
10955 	    }
10956 
10957 	  /* Likewise (X + c) < X becomes false.  */
10958 	  if (code == LT_EXPR
10959 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
10960 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
10961 	    {
10962 	      if (TREE_CODE (arg01) == INTEGER_CST
10963 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10964 		fold_overflow_warning (("assuming signed overflow does not "
10965 					"occur when assuming that "
10966 					"(X + c) < X is always false"),
10967 				       WARN_STRICT_OVERFLOW_ALL);
10968 	      return constant_boolean_node (0, type);
10969 	    }
10970 
10971 	  /* Convert (X - c) <= X to true.  */
10972 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10973 	      && code == LE_EXPR
10974 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10975 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10976 	    {
10977 	      if (TREE_CODE (arg01) == INTEGER_CST
10978 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10979 		fold_overflow_warning (("assuming signed overflow does not "
10980 					"occur when assuming that "
10981 					"(X - c) <= X is always true"),
10982 				       WARN_STRICT_OVERFLOW_ALL);
10983 	      return constant_boolean_node (1, type);
10984 	    }
10985 
10986 	  /* Convert (X + c) >= X to true.  */
10987 	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10988 	      && code == GE_EXPR
10989 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
10990 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
10991 	    {
10992 	      if (TREE_CODE (arg01) == INTEGER_CST
10993 		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10994 		fold_overflow_warning (("assuming signed overflow does not "
10995 					"occur when assuming that "
10996 					"(X + c) >= X is always true"),
10997 				       WARN_STRICT_OVERFLOW_ALL);
10998 	      return constant_boolean_node (1, type);
10999 	    }
11000 
11001 	  if (TREE_CODE (arg01) == INTEGER_CST)
11002 	    {
11003 	      /* Convert X + c > X and X - c < X to true for integers.  */
11004 	      if (code == GT_EXPR
11005 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11006 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11007 		{
11008 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11009 		    fold_overflow_warning (("assuming signed overflow does "
11010 					    "not occur when assuming that "
11011 					    "(X + c) > X is always true"),
11012 					   WARN_STRICT_OVERFLOW_ALL);
11013 		  return constant_boolean_node (1, type);
11014 		}
11015 
11016 	      if (code == LT_EXPR
11017 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11018 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11019 		{
11020 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11021 		    fold_overflow_warning (("assuming signed overflow does "
11022 					    "not occur when assuming that "
11023 					    "(X - c) < X is always true"),
11024 					   WARN_STRICT_OVERFLOW_ALL);
11025 		  return constant_boolean_node (1, type);
11026 		}
11027 
11028 	      /* Convert X + c <= X and X - c >= X to false for integers.  */
11029 	      if (code == LE_EXPR
11030 	          && ((code0 == PLUS_EXPR && is_positive > 0)
11031 		      || (code0 == MINUS_EXPR && is_positive < 0)))
11032 		{
11033 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11034 		    fold_overflow_warning (("assuming signed overflow does "
11035 					    "not occur when assuming that "
11036 					    "(X + c) <= X is always false"),
11037 					   WARN_STRICT_OVERFLOW_ALL);
11038 		  return constant_boolean_node (0, type);
11039 		}
11040 
11041 	      if (code == GE_EXPR
11042 	          && ((code0 == MINUS_EXPR && is_positive > 0)
11043 		      || (code0 == PLUS_EXPR && is_positive < 0)))
11044 		{
11045 		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11046 		    fold_overflow_warning (("assuming signed overflow does "
11047 					    "not occur when assuming that "
11048 					    "(X - c) >= X is always true"),
11049 					   WARN_STRICT_OVERFLOW_ALL);
11050 		  return constant_boolean_node (0, type);
11051 		}
11052 	    }
11053 	}
11054 
11055       /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11056 	 This transformation affects the cases which are handled in later
11057 	 optimizations involving comparisons with non-negative constants.  */
11058       if (TREE_CODE (arg1) == INTEGER_CST
11059 	  && TREE_CODE (arg0) != INTEGER_CST
11060 	  && tree_int_cst_sgn (arg1) > 0)
11061 	{
11062 	  if (code == GE_EXPR)
11063 	    {
11064 	      arg1 = const_binop (MINUS_EXPR, arg1,
11065 			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11066 	      return fold_build2 (GT_EXPR, type, arg0,
11067 				  fold_convert (TREE_TYPE (arg0), arg1));
11068 	    }
11069 	  if (code == LT_EXPR)
11070 	    {
11071 	      arg1 = const_binop (MINUS_EXPR, arg1,
11072 			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11073 	      return fold_build2 (LE_EXPR, type, arg0,
11074 				  fold_convert (TREE_TYPE (arg0), arg1));
11075 	    }
11076 	}
11077 
11078       /* Comparisons with the highest or lowest possible integer of
11079 	 the specified size will have known values.  */
11080       {
11081 	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11082 
11083 	if (TREE_CODE (arg1) == INTEGER_CST
11084 	    && ! TREE_CONSTANT_OVERFLOW (arg1)
11085 	    && width <= 2 * HOST_BITS_PER_WIDE_INT
11086 	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11087 		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
11088 	  {
11089 	    HOST_WIDE_INT signed_max_hi;
11090 	    unsigned HOST_WIDE_INT signed_max_lo;
11091 	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11092 
11093 	    if (width <= HOST_BITS_PER_WIDE_INT)
11094 	      {
11095 		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11096 				- 1;
11097 		signed_max_hi = 0;
11098 		max_hi = 0;
11099 
11100 		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11101 		  {
11102 		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11103 		    min_lo = 0;
11104 		    min_hi = 0;
11105 		  }
11106 		else
11107 		  {
11108 		    max_lo = signed_max_lo;
11109 		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11110 		    min_hi = -1;
11111 		  }
11112 	      }
11113 	    else
11114 	      {
11115 		width -= HOST_BITS_PER_WIDE_INT;
11116 		signed_max_lo = -1;
11117 		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11118 				- 1;
11119 		max_lo = -1;
11120 		min_lo = 0;
11121 
11122 		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11123 		  {
11124 		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11125 		    min_hi = 0;
11126 		  }
11127 		else
11128 		  {
11129 		    max_hi = signed_max_hi;
11130 		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11131 		  }
11132 	      }
11133 
11134 	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11135 		&& TREE_INT_CST_LOW (arg1) == max_lo)
11136 	      switch (code)
11137 		{
11138 		case GT_EXPR:
11139 		  return omit_one_operand (type, integer_zero_node, arg0);
11140 
11141 		case GE_EXPR:
11142 		  return fold_build2 (EQ_EXPR, type, op0, op1);
11143 
11144 		case LE_EXPR:
11145 		  return omit_one_operand (type, integer_one_node, arg0);
11146 
11147 		case LT_EXPR:
11148 		  return fold_build2 (NE_EXPR, type, op0, op1);
11149 
11150 		/* The GE_EXPR and LT_EXPR cases above are not normally
11151 		   reached because of previous transformations.  */
11152 
11153 		default:
11154 		  break;
11155 		}
11156 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11157 		     == max_hi
11158 		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11159 	      switch (code)
11160 		{
11161 		case GT_EXPR:
11162 		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11163 		  return fold_build2 (EQ_EXPR, type,
11164 				      fold_convert (TREE_TYPE (arg1), arg0),
11165 				      arg1);
11166 		case LE_EXPR:
11167 		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11168 		  return fold_build2 (NE_EXPR, type,
11169 				      fold_convert (TREE_TYPE (arg1), arg0),
11170 				      arg1);
11171 		default:
11172 		  break;
11173 		}
11174 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11175 		     == min_hi
11176 		     && TREE_INT_CST_LOW (arg1) == min_lo)
11177 	      switch (code)
11178 		{
11179 		case LT_EXPR:
11180 		  return omit_one_operand (type, integer_zero_node, arg0);
11181 
11182 		case LE_EXPR:
11183 		  return fold_build2 (EQ_EXPR, type, op0, op1);
11184 
11185 		case GE_EXPR:
11186 		  return omit_one_operand (type, integer_one_node, arg0);
11187 
11188 		case GT_EXPR:
11189 		  return fold_build2 (NE_EXPR, type, op0, op1);
11190 
11191 		default:
11192 		  break;
11193 		}
11194 	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11195 		     == min_hi
11196 		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11197 	      switch (code)
11198 		{
11199 		case GE_EXPR:
11200 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11201 		  return fold_build2 (NE_EXPR, type,
11202 				      fold_convert (TREE_TYPE (arg1), arg0),
11203 				      arg1);
11204 		case LT_EXPR:
11205 		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11206 		  return fold_build2 (EQ_EXPR, type,
11207 				      fold_convert (TREE_TYPE (arg1), arg0),
11208 				      arg1);
11209 		default:
11210 		  break;
11211 		}
11212 
11213 	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11214 		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
11215 		     && TYPE_UNSIGNED (TREE_TYPE (arg1))
11216 		     /* signed_type does not work on pointer types.  */
11217 		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11218 	      {
11219 		/* The following case also applies to X < signed_max+1
11220 		   and X >= signed_max+1 because previous transformations.  */
11221 		if (code == LE_EXPR || code == GT_EXPR)
11222 		  {
11223 		    tree st;
11224 		    st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11225 		    return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11226 					type, fold_convert (st, arg0),
11227 					build_int_cst (st, 0));
11228 		  }
11229 	      }
11230 	  }
11231       }
11232 
11233       /* If we are comparing an ABS_EXPR with a constant, we can
11234 	 convert all the cases into explicit comparisons, but they may
11235 	 well not be faster than doing the ABS and one comparison.
11236 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11237 	 and a comparison, and is probably faster.  */
11238       if (code == LE_EXPR
11239 	  && TREE_CODE (arg1) == INTEGER_CST
11240 	  && TREE_CODE (arg0) == ABS_EXPR
11241 	  && ! TREE_SIDE_EFFECTS (arg0)
11242 	  && (0 != (tem = negate_expr (arg1)))
11243 	  && TREE_CODE (tem) == INTEGER_CST
11244 	  && ! TREE_CONSTANT_OVERFLOW (tem))
11245 	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11246 			    build2 (GE_EXPR, type,
11247 				    TREE_OPERAND (arg0, 0), tem),
11248 			    build2 (LE_EXPR, type,
11249 				    TREE_OPERAND (arg0, 0), arg1));
11250 
11251       /* Convert ABS_EXPR<x> >= 0 to true.  */
11252       strict_overflow_p = false;
11253       if (code == GE_EXPR
11254 	  && (integer_zerop (arg1)
11255 	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11256 		  && real_zerop (arg1)))
11257 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11258 	{
11259 	  if (strict_overflow_p)
11260 	    fold_overflow_warning (("assuming signed overflow does not occur "
11261 				    "when simplifying comparison of "
11262 				    "absolute value and zero"),
11263 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11264 	  return omit_one_operand (type, integer_one_node, arg0);
11265 	}
11266 
11267       /* Convert ABS_EXPR<x> < 0 to false.  */
11268       strict_overflow_p = false;
11269       if (code == LT_EXPR
11270 	  && (integer_zerop (arg1) || real_zerop (arg1))
11271 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11272 	{
11273 	  if (strict_overflow_p)
11274 	    fold_overflow_warning (("assuming signed overflow does not occur "
11275 				    "when simplifying comparison of "
11276 				    "absolute value and zero"),
11277 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11278 	  return omit_one_operand (type, integer_zero_node, arg0);
11279 	}
11280 
11281       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11282 	 and similarly for >= into !=.  */
11283       if ((code == LT_EXPR || code == GE_EXPR)
11284 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11285 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11286 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11287 	return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11288 		       build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11289 			       TREE_OPERAND (arg1, 1)),
11290 		       build_int_cst (TREE_TYPE (arg0), 0));
11291 
11292       if ((code == LT_EXPR || code == GE_EXPR)
11293 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11294 	  && (TREE_CODE (arg1) == NOP_EXPR
11295 	      || TREE_CODE (arg1) == CONVERT_EXPR)
11296 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11297 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11298 	return
11299 	  build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11300 		  fold_convert (TREE_TYPE (arg0),
11301 				build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11302 					TREE_OPERAND (TREE_OPERAND (arg1, 0),
11303 						      1))),
11304 		  build_int_cst (TREE_TYPE (arg0), 0));
11305 
11306       return NULL_TREE;
11307 
11308     case UNORDERED_EXPR:
11309     case ORDERED_EXPR:
11310     case UNLT_EXPR:
11311     case UNLE_EXPR:
11312     case UNGT_EXPR:
11313     case UNGE_EXPR:
11314     case UNEQ_EXPR:
11315     case LTGT_EXPR:
11316       if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11317 	{
11318 	  t1 = fold_relational_const (code, type, arg0, arg1);
11319 	  if (t1 != NULL_TREE)
11320 	    return t1;
11321 	}
11322 
11323       /* If the first operand is NaN, the result is constant.  */
11324       if (TREE_CODE (arg0) == REAL_CST
11325 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11326 	  && (code != LTGT_EXPR || ! flag_trapping_math))
11327 	{
11328 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11329 	       ? integer_zero_node
11330 	       : integer_one_node;
11331 	  return omit_one_operand (type, t1, arg1);
11332 	}
11333 
11334       /* If the second operand is NaN, the result is constant.  */
11335       if (TREE_CODE (arg1) == REAL_CST
11336 	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11337 	  && (code != LTGT_EXPR || ! flag_trapping_math))
11338 	{
11339 	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11340 	       ? integer_zero_node
11341 	       : integer_one_node;
11342 	  return omit_one_operand (type, t1, arg0);
11343 	}
11344 
11345       /* Simplify unordered comparison of something with itself.  */
11346       if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11347 	  && operand_equal_p (arg0, arg1, 0))
11348 	return constant_boolean_node (1, type);
11349 
11350       if (code == LTGT_EXPR
11351 	  && !flag_trapping_math
11352 	  && operand_equal_p (arg0, arg1, 0))
11353 	return constant_boolean_node (0, type);
11354 
11355       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11356       {
11357 	tree targ0 = strip_float_extensions (arg0);
11358 	tree targ1 = strip_float_extensions (arg1);
11359 	tree newtype = TREE_TYPE (targ0);
11360 
11361 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11362 	  newtype = TREE_TYPE (targ1);
11363 
11364 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11365 	  return fold_build2 (code, type, fold_convert (newtype, targ0),
11366 			      fold_convert (newtype, targ1));
11367       }
11368 
11369       return NULL_TREE;
11370 
11371     case COMPOUND_EXPR:
11372       /* When pedantic, a compound expression can be neither an lvalue
11373 	 nor an integer constant expression.  */
11374       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11375 	return NULL_TREE;
11376       /* Don't let (0, 0) be null pointer constant.  */
11377       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11378 				 : fold_convert (type, arg1);
11379       return pedantic_non_lvalue (tem);
11380 
11381     case COMPLEX_EXPR:
11382       if ((TREE_CODE (arg0) == REAL_CST
11383 	   && TREE_CODE (arg1) == REAL_CST)
11384 	  || (TREE_CODE (arg0) == INTEGER_CST
11385 	      && TREE_CODE (arg1) == INTEGER_CST))
11386 	return build_complex (type, arg0, arg1);
11387       return NULL_TREE;
11388 
11389     case ASSERT_EXPR:
11390       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11391       gcc_unreachable ();
11392 
11393     default:
11394       return NULL_TREE;
11395     } /* switch (code) */
11396 }
11397 
11398 /* Callback for walk_tree, looking for LABEL_EXPR.
11399    Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11400    Do not check the sub-tree of GOTO_EXPR.  */
11401 
11402 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)11403 contains_label_1 (tree *tp,
11404                   int *walk_subtrees,
11405                   void *data ATTRIBUTE_UNUSED)
11406 {
11407   switch (TREE_CODE (*tp))
11408     {
11409     case LABEL_EXPR:
11410       return *tp;
11411     case GOTO_EXPR:
11412       *walk_subtrees = 0;
11413     /* no break */
11414     default:
11415       return NULL_TREE;
11416     }
11417 }
11418 
11419 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11420    accessible from outside the sub-tree. Returns NULL_TREE if no
11421    addressable label is found.  */
11422 
11423 static bool
contains_label_p(tree st)11424 contains_label_p (tree st)
11425 {
11426   return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11427 }
11428 
11429 /* Fold a ternary expression of code CODE and type TYPE with operands
11430    OP0, OP1, and OP2.  Return the folded expression if folding is
11431    successful.  Otherwise, return NULL_TREE.  */
11432 
11433 tree
fold_ternary(enum tree_code code,tree type,tree op0,tree op1,tree op2)11434 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11435 {
11436   tree tem;
11437   tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11438   enum tree_code_class kind = TREE_CODE_CLASS (code);
11439 
11440   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11441 	      && TREE_CODE_LENGTH (code) == 3);
11442 
11443   /* Strip any conversions that don't change the mode.  This is safe
11444      for every expression, except for a comparison expression because
11445      its signedness is derived from its operands.  So, in the latter
11446      case, only strip conversions that don't change the signedness.
11447 
11448      Note that this is done as an internal manipulation within the
11449      constant folder, in order to find the simplest representation of
11450      the arguments so that their form can be studied.  In any cases,
11451      the appropriate type conversions should be put back in the tree
11452      that will get out of the constant folder.  */
11453   if (op0)
11454     {
11455       arg0 = op0;
11456       STRIP_NOPS (arg0);
11457     }
11458 
11459   if (op1)
11460     {
11461       arg1 = op1;
11462       STRIP_NOPS (arg1);
11463     }
11464 
11465   switch (code)
11466     {
11467     case COMPONENT_REF:
11468       if (TREE_CODE (arg0) == CONSTRUCTOR
11469 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11470 	{
11471 	  unsigned HOST_WIDE_INT idx;
11472 	  tree field, value;
11473 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11474 	    if (field == arg1)
11475 	      return value;
11476 	}
11477       return NULL_TREE;
11478 
11479     case COND_EXPR:
11480       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11481 	 so all simple results must be passed through pedantic_non_lvalue.  */
11482       if (TREE_CODE (arg0) == INTEGER_CST)
11483 	{
11484 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11485 	  tem = integer_zerop (arg0) ? op2 : op1;
11486 	  /* Only optimize constant conditions when the selected branch
11487 	     has the same type as the COND_EXPR.  This avoids optimizing
11488              away "c ? x : throw", where the throw has a void type.
11489              Avoid throwing away that operand which contains label.  */
11490           if ((!TREE_SIDE_EFFECTS (unused_op)
11491                || !contains_label_p (unused_op))
11492               && (! VOID_TYPE_P (TREE_TYPE (tem))
11493                   || VOID_TYPE_P (type)))
11494 	    return pedantic_non_lvalue (tem);
11495 	  return NULL_TREE;
11496 	}
11497       if (operand_equal_p (arg1, op2, 0))
11498 	return pedantic_omit_one_operand (type, arg1, arg0);
11499 
11500       /* If we have A op B ? A : C, we may be able to convert this to a
11501 	 simpler expression, depending on the operation and the values
11502 	 of B and C.  Signed zeros prevent all of these transformations,
11503 	 for reasons given above each one.
11504 
11505          Also try swapping the arguments and inverting the conditional.  */
11506       if (COMPARISON_CLASS_P (arg0)
11507 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11508 					     arg1, TREE_OPERAND (arg0, 1))
11509 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11510 	{
11511 	  tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11512 	  if (tem)
11513 	    return tem;
11514 	}
11515 
11516       if (COMPARISON_CLASS_P (arg0)
11517 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11518 					     op2,
11519 					     TREE_OPERAND (arg0, 1))
11520 	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11521 	{
11522 	  tem = fold_truth_not_expr (arg0);
11523 	  if (tem && COMPARISON_CLASS_P (tem))
11524 	    {
11525 	      tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11526 	      if (tem)
11527 		return tem;
11528 	    }
11529 	}
11530 
11531       /* If the second operand is simpler than the third, swap them
11532 	 since that produces better jump optimization results.  */
11533       if (truth_value_p (TREE_CODE (arg0))
11534 	  && tree_swap_operands_p (op1, op2, false))
11535 	{
11536 	  /* See if this can be inverted.  If it can't, possibly because
11537 	     it was a floating-point inequality comparison, don't do
11538 	     anything.  */
11539 	  tem = fold_truth_not_expr (arg0);
11540 	  if (tem)
11541 	    return fold_build3 (code, type, tem, op2, op1);
11542 	}
11543 
11544       /* Convert A ? 1 : 0 to simply A.  */
11545       if (integer_onep (op1)
11546 	  && integer_zerop (op2)
11547 	  /* If we try to convert OP0 to our type, the
11548 	     call to fold will try to move the conversion inside
11549 	     a COND, which will recurse.  In that case, the COND_EXPR
11550 	     is probably the best choice, so leave it alone.  */
11551 	  && type == TREE_TYPE (arg0))
11552 	return pedantic_non_lvalue (arg0);
11553 
11554       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11555 	 over COND_EXPR in cases such as floating point comparisons.  */
11556       if (integer_zerop (op1)
11557 	  && integer_onep (op2)
11558 	  && truth_value_p (TREE_CODE (arg0)))
11559 	return pedantic_non_lvalue (fold_convert (type,
11560 						  invert_truthvalue (arg0)));
11561 
11562       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11563       if (TREE_CODE (arg0) == LT_EXPR
11564 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11565 	  && integer_zerop (op2)
11566 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11567 	{
11568 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11569 	     If <sign bit of A> has wider type than A, bits outside
11570 	     of A's precision in <sign bit of A> need to be checked.
11571 	     If they are all 0, this optimization needs to be done
11572 	     in unsigned A's type, if they are all 1 in signed A's type,
11573 	     otherwise this can't be done.  */
11574 	  if (TYPE_PRECISION (TREE_TYPE (tem))
11575 	      < TYPE_PRECISION (TREE_TYPE (arg1))
11576 	      && TYPE_PRECISION (TREE_TYPE (tem))
11577 		 < TYPE_PRECISION (type))
11578 	    {
11579 	      unsigned HOST_WIDE_INT mask_lo;
11580 	      HOST_WIDE_INT mask_hi;
11581 	      int inner_width, outer_width;
11582 	      tree tem_type;
11583 
11584 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11585 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11586 	      if (outer_width > TYPE_PRECISION (type))
11587 		outer_width = TYPE_PRECISION (type);
11588 
11589 	      if (outer_width > HOST_BITS_PER_WIDE_INT)
11590 		{
11591 		  mask_hi = ((unsigned HOST_WIDE_INT) -1
11592 			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11593 		  mask_lo = -1;
11594 		}
11595 	      else
11596 		{
11597 		  mask_hi = 0;
11598 		  mask_lo = ((unsigned HOST_WIDE_INT) -1
11599 			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
11600 		}
11601 	      if (inner_width > HOST_BITS_PER_WIDE_INT)
11602 		{
11603 		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11604 			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
11605 		  mask_lo = 0;
11606 		}
11607 	      else
11608 		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11609 			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
11610 
11611 	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11612 		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11613 		{
11614 		  tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11615 		  tem = fold_convert (tem_type, tem);
11616 		}
11617 	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11618 		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11619 		{
11620 		  tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11621 		  tem = fold_convert (tem_type, tem);
11622 		}
11623 	      else
11624 		tem = NULL;
11625 	    }
11626 
11627 	  if (tem)
11628 	    return fold_convert (type,
11629 				 fold_build2 (BIT_AND_EXPR,
11630 					      TREE_TYPE (tem), tem,
11631 					      fold_convert (TREE_TYPE (tem),
11632 							    arg1)));
11633 	}
11634 
11635       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11636 	 already handled above.  */
11637       if (TREE_CODE (arg0) == BIT_AND_EXPR
11638 	  && integer_onep (TREE_OPERAND (arg0, 1))
11639 	  && integer_zerop (op2)
11640 	  && integer_pow2p (arg1))
11641 	{
11642 	  tree tem = TREE_OPERAND (arg0, 0);
11643 	  STRIP_NOPS (tem);
11644 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11645               && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11646               && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11647 	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11648 	    return fold_build2 (BIT_AND_EXPR, type,
11649 				TREE_OPERAND (tem, 0), arg1);
11650 	}
11651 
11652       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11653 	 is probably obsolete because the first operand should be a
11654 	 truth value (that's why we have the two cases above), but let's
11655 	 leave it in until we can confirm this for all front-ends.  */
11656       if (integer_zerop (op2)
11657 	  && TREE_CODE (arg0) == NE_EXPR
11658 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11659 	  && integer_pow2p (arg1)
11660 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11661 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11662 			      arg1, OEP_ONLY_CONST))
11663 	return pedantic_non_lvalue (fold_convert (type,
11664 						  TREE_OPERAND (arg0, 0)));
11665 
11666       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11667       if (integer_zerop (op2)
11668 	  && truth_value_p (TREE_CODE (arg0))
11669 	  && truth_value_p (TREE_CODE (arg1)))
11670 	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11671 			    fold_convert (type, arg0),
11672 			    arg1);
11673 
11674       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11675       if (integer_onep (op2)
11676 	  && truth_value_p (TREE_CODE (arg0))
11677 	  && truth_value_p (TREE_CODE (arg1)))
11678 	{
11679 	  /* Only perform transformation if ARG0 is easily inverted.  */
11680 	  tem = fold_truth_not_expr (arg0);
11681 	  if (tem)
11682 	    return fold_build2 (TRUTH_ORIF_EXPR, type,
11683 				fold_convert (type, tem),
11684 				arg1);
11685 	}
11686 
11687       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11688       if (integer_zerop (arg1)
11689 	  && truth_value_p (TREE_CODE (arg0))
11690 	  && truth_value_p (TREE_CODE (op2)))
11691 	{
11692 	  /* Only perform transformation if ARG0 is easily inverted.  */
11693 	  tem = fold_truth_not_expr (arg0);
11694 	  if (tem)
11695 	    return fold_build2 (TRUTH_ANDIF_EXPR, type,
11696 				fold_convert (type, tem),
11697 				op2);
11698 	}
11699 
11700       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11701       if (integer_onep (arg1)
11702 	  && truth_value_p (TREE_CODE (arg0))
11703 	  && truth_value_p (TREE_CODE (op2)))
11704 	return fold_build2 (TRUTH_ORIF_EXPR, type,
11705 			    fold_convert (type, arg0),
11706 			    op2);
11707 
11708       return NULL_TREE;
11709 
11710     case CALL_EXPR:
11711       /* Check for a built-in function.  */
11712       if (TREE_CODE (op0) == ADDR_EXPR
11713 	  && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11714 	  && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11715 	return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11716       return NULL_TREE;
11717 
11718     case BIT_FIELD_REF:
11719       if (TREE_CODE (arg0) == VECTOR_CST
11720 	  && type == TREE_TYPE (TREE_TYPE (arg0))
11721 	  && host_integerp (arg1, 1)
11722 	  && host_integerp (op2, 1))
11723 	{
11724 	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11725 	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11726 
11727 	  if (width != 0
11728 	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11729 	      && (idx % width) == 0
11730 	      && (idx = idx / width)
11731 		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11732 	    {
11733 	      tree elements = TREE_VECTOR_CST_ELTS (arg0);
11734 	      while (idx-- > 0 && elements)
11735 		elements = TREE_CHAIN (elements);
11736 	      if (elements)
11737 		return TREE_VALUE (elements);
11738 	      else
11739 		return fold_convert (type, integer_zero_node);
11740 	    }
11741 	}
11742       return NULL_TREE;
11743 
11744     default:
11745       return NULL_TREE;
11746     } /* switch (code) */
11747 }
11748 
11749 /* Perform constant folding and related simplification of EXPR.
11750    The related simplifications include x*1 => x, x*0 => 0, etc.,
11751    and application of the associative law.
11752    NOP_EXPR conversions may be removed freely (as long as we
11753    are careful not to change the type of the overall expression).
11754    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11755    but we can constant-fold them if they have constant operands.  */
11756 
11757 #ifdef ENABLE_FOLD_CHECKING
11758 # define fold(x) fold_1 (x)
11759 static tree fold_1 (tree);
11760 static
11761 #endif
11762 tree
fold(tree expr)11763 fold (tree expr)
11764 {
11765   const tree t = expr;
11766   enum tree_code code = TREE_CODE (t);
11767   enum tree_code_class kind = TREE_CODE_CLASS (code);
11768   tree tem;
11769 
11770   /* Return right away if a constant.  */
11771   if (kind == tcc_constant)
11772     return t;
11773 
11774   if (IS_EXPR_CODE_CLASS (kind))
11775     {
11776       tree type = TREE_TYPE (t);
11777       tree op0, op1, op2;
11778 
11779       switch (TREE_CODE_LENGTH (code))
11780 	{
11781 	case 1:
11782 	  op0 = TREE_OPERAND (t, 0);
11783 	  tem = fold_unary (code, type, op0);
11784 	  return tem ? tem : expr;
11785 	case 2:
11786 	  op0 = TREE_OPERAND (t, 0);
11787 	  op1 = TREE_OPERAND (t, 1);
11788 	  tem = fold_binary (code, type, op0, op1);
11789 	  return tem ? tem : expr;
11790 	case 3:
11791 	  op0 = TREE_OPERAND (t, 0);
11792 	  op1 = TREE_OPERAND (t, 1);
11793 	  op2 = TREE_OPERAND (t, 2);
11794 	  tem = fold_ternary (code, type, op0, op1, op2);
11795 	  return tem ? tem : expr;
11796 	default:
11797 	  break;
11798 	}
11799     }
11800 
11801   switch (code)
11802     {
11803     case CONST_DECL:
11804       return fold (DECL_INITIAL (t));
11805 
11806     default:
11807       return t;
11808     } /* switch (code) */
11809 }
11810 
11811 #ifdef ENABLE_FOLD_CHECKING
11812 #undef fold
11813 
11814 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11815 static void fold_check_failed (tree, tree);
11816 void print_fold_checksum (tree);
11817 
11818 /* When --enable-checking=fold, compute a digest of expr before
11819    and after actual fold call to see if fold did not accidentally
11820    change original expr.  */
11821 
11822 tree
fold(tree expr)11823 fold (tree expr)
11824 {
11825   tree ret;
11826   struct md5_ctx ctx;
11827   unsigned char checksum_before[16], checksum_after[16];
11828   htab_t ht;
11829 
11830   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11831   md5_init_ctx (&ctx);
11832   fold_checksum_tree (expr, &ctx, ht);
11833   md5_finish_ctx (&ctx, checksum_before);
11834   htab_empty (ht);
11835 
11836   ret = fold_1 (expr);
11837 
11838   md5_init_ctx (&ctx);
11839   fold_checksum_tree (expr, &ctx, ht);
11840   md5_finish_ctx (&ctx, checksum_after);
11841   htab_delete (ht);
11842 
11843   if (memcmp (checksum_before, checksum_after, 16))
11844     fold_check_failed (expr, ret);
11845 
11846   return ret;
11847 }
11848 
11849 void
print_fold_checksum(tree expr)11850 print_fold_checksum (tree expr)
11851 {
11852   struct md5_ctx ctx;
11853   unsigned char checksum[16], cnt;
11854   htab_t ht;
11855 
11856   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11857   md5_init_ctx (&ctx);
11858   fold_checksum_tree (expr, &ctx, ht);
11859   md5_finish_ctx (&ctx, checksum);
11860   htab_delete (ht);
11861   for (cnt = 0; cnt < 16; ++cnt)
11862     fprintf (stderr, "%02x", checksum[cnt]);
11863   putc ('\n', stderr);
11864 }
11865 
11866 static void
fold_check_failed(tree expr ATTRIBUTE_UNUSED,tree ret ATTRIBUTE_UNUSED)11867 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11868 {
11869   internal_error ("fold check: original tree changed by fold");
11870 }
11871 
11872 static void
fold_checksum_tree(tree expr,struct md5_ctx * ctx,htab_t ht)11873 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11874 {
11875   void **slot;
11876   enum tree_code code;
11877   struct tree_function_decl buf;
11878   int i, len;
11879 
11880 recursive_label:
11881 
11882   gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11883 	       <= sizeof (struct tree_function_decl))
11884 	      && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11885   if (expr == NULL)
11886     return;
11887   slot = htab_find_slot (ht, expr, INSERT);
11888   if (*slot != NULL)
11889     return;
11890   *slot = expr;
11891   code = TREE_CODE (expr);
11892   if (TREE_CODE_CLASS (code) == tcc_declaration
11893       && DECL_ASSEMBLER_NAME_SET_P (expr))
11894     {
11895       /* Allow DECL_ASSEMBLER_NAME to be modified.  */
11896       memcpy ((char *) &buf, expr, tree_size (expr));
11897       expr = (tree) &buf;
11898       SET_DECL_ASSEMBLER_NAME (expr, NULL);
11899     }
11900   else if (TREE_CODE_CLASS (code) == tcc_type
11901 	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11902 	       || TYPE_CACHED_VALUES_P (expr)
11903 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11904     {
11905       /* Allow these fields to be modified.  */
11906       memcpy ((char *) &buf, expr, tree_size (expr));
11907       expr = (tree) &buf;
11908       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11909       TYPE_POINTER_TO (expr) = NULL;
11910       TYPE_REFERENCE_TO (expr) = NULL;
11911       if (TYPE_CACHED_VALUES_P (expr))
11912 	{
11913 	  TYPE_CACHED_VALUES_P (expr) = 0;
11914 	  TYPE_CACHED_VALUES (expr) = NULL;
11915 	}
11916     }
11917   md5_process_bytes (expr, tree_size (expr), ctx);
11918   fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11919   if (TREE_CODE_CLASS (code) != tcc_type
11920       && TREE_CODE_CLASS (code) != tcc_declaration
11921       && code != TREE_LIST)
11922     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11923   switch (TREE_CODE_CLASS (code))
11924     {
11925     case tcc_constant:
11926       switch (code)
11927 	{
11928 	case STRING_CST:
11929 	  md5_process_bytes (TREE_STRING_POINTER (expr),
11930 			     TREE_STRING_LENGTH (expr), ctx);
11931 	  break;
11932 	case COMPLEX_CST:
11933 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11934 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11935 	  break;
11936 	case VECTOR_CST:
11937 	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11938 	  break;
11939 	default:
11940 	  break;
11941 	}
11942       break;
11943     case tcc_exceptional:
11944       switch (code)
11945 	{
11946 	case TREE_LIST:
11947 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11948 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11949 	  expr = TREE_CHAIN (expr);
11950 	  goto recursive_label;
11951 	  break;
11952 	case TREE_VEC:
11953 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11954 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11955 	  break;
11956 	default:
11957 	  break;
11958 	}
11959       break;
11960     case tcc_expression:
11961     case tcc_reference:
11962     case tcc_comparison:
11963     case tcc_unary:
11964     case tcc_binary:
11965     case tcc_statement:
11966       len = TREE_CODE_LENGTH (code);
11967       for (i = 0; i < len; ++i)
11968 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11969       break;
11970     case tcc_declaration:
11971       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11972       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11973       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11974 	{
11975 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11976 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11977 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11978 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11979 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11980 	}
11981       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11982 	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11983 
11984       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11985 	{
11986 	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11987 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11988 	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11989 	}
11990       break;
11991     case tcc_type:
11992       if (TREE_CODE (expr) == ENUMERAL_TYPE)
11993         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11994       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11995       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11996       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11997       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11998       if (INTEGRAL_TYPE_P (expr)
11999           || SCALAR_FLOAT_TYPE_P (expr))
12000 	{
12001 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12002 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12003 	}
12004       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12005       if (TREE_CODE (expr) == RECORD_TYPE
12006 	  || TREE_CODE (expr) == UNION_TYPE
12007 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12008 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12009       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12010       break;
12011     default:
12012       break;
12013     }
12014 }
12015 
12016 #endif
12017 
12018 /* Fold a unary tree expression with code CODE of type TYPE with an
12019    operand OP0.  Return a folded expression if successful.  Otherwise,
12020    return a tree expression with code CODE of type TYPE with an
12021    operand OP0.  */
12022 
12023 tree
fold_build1_stat(enum tree_code code,tree type,tree op0 MEM_STAT_DECL)12024 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12025 {
12026   tree tem;
12027 #ifdef ENABLE_FOLD_CHECKING
12028   unsigned char checksum_before[16], checksum_after[16];
12029   struct md5_ctx ctx;
12030   htab_t ht;
12031 
12032   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12033   md5_init_ctx (&ctx);
12034   fold_checksum_tree (op0, &ctx, ht);
12035   md5_finish_ctx (&ctx, checksum_before);
12036   htab_empty (ht);
12037 #endif
12038 
12039   tem = fold_unary (code, type, op0);
12040   if (!tem)
12041     tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12042 
12043 #ifdef ENABLE_FOLD_CHECKING
12044   md5_init_ctx (&ctx);
12045   fold_checksum_tree (op0, &ctx, ht);
12046   md5_finish_ctx (&ctx, checksum_after);
12047   htab_delete (ht);
12048 
12049   if (memcmp (checksum_before, checksum_after, 16))
12050     fold_check_failed (op0, tem);
12051 #endif
12052   return tem;
12053 }
12054 
12055 /* Fold a binary tree expression with code CODE of type TYPE with
12056    operands OP0 and OP1.  Return a folded expression if successful.
12057    Otherwise, return a tree expression with code CODE of type TYPE
12058    with operands OP0 and OP1.  */
12059 
12060 tree
fold_build2_stat(enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)12061 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12062 		  MEM_STAT_DECL)
12063 {
12064   tree tem;
12065 #ifdef ENABLE_FOLD_CHECKING
12066   unsigned char checksum_before_op0[16],
12067                 checksum_before_op1[16],
12068 		checksum_after_op0[16],
12069 		checksum_after_op1[16];
12070   struct md5_ctx ctx;
12071   htab_t ht;
12072 
12073   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12074   md5_init_ctx (&ctx);
12075   fold_checksum_tree (op0, &ctx, ht);
12076   md5_finish_ctx (&ctx, checksum_before_op0);
12077   htab_empty (ht);
12078 
12079   md5_init_ctx (&ctx);
12080   fold_checksum_tree (op1, &ctx, ht);
12081   md5_finish_ctx (&ctx, checksum_before_op1);
12082   htab_empty (ht);
12083 #endif
12084 
12085   tem = fold_binary (code, type, op0, op1);
12086   if (!tem)
12087     tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12088 
12089 #ifdef ENABLE_FOLD_CHECKING
12090   md5_init_ctx (&ctx);
12091   fold_checksum_tree (op0, &ctx, ht);
12092   md5_finish_ctx (&ctx, checksum_after_op0);
12093   htab_empty (ht);
12094 
12095   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12096     fold_check_failed (op0, tem);
12097 
12098   md5_init_ctx (&ctx);
12099   fold_checksum_tree (op1, &ctx, ht);
12100   md5_finish_ctx (&ctx, checksum_after_op1);
12101   htab_delete (ht);
12102 
12103   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12104     fold_check_failed (op1, tem);
12105 #endif
12106   return tem;
12107 }
12108 
12109 /* Fold a ternary tree expression with code CODE of type TYPE with
12110    operands OP0, OP1, and OP2.  Return a folded expression if
12111    successful.  Otherwise, return a tree expression with code CODE of
12112    type TYPE with operands OP0, OP1, and OP2.  */
12113 
12114 tree
fold_build3_stat(enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)12115 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12116 	     MEM_STAT_DECL)
12117 {
12118   tree tem;
12119 #ifdef ENABLE_FOLD_CHECKING
12120   unsigned char checksum_before_op0[16],
12121                 checksum_before_op1[16],
12122                 checksum_before_op2[16],
12123 		checksum_after_op0[16],
12124 		checksum_after_op1[16],
12125 		checksum_after_op2[16];
12126   struct md5_ctx ctx;
12127   htab_t ht;
12128 
12129   ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12130   md5_init_ctx (&ctx);
12131   fold_checksum_tree (op0, &ctx, ht);
12132   md5_finish_ctx (&ctx, checksum_before_op0);
12133   htab_empty (ht);
12134 
12135   md5_init_ctx (&ctx);
12136   fold_checksum_tree (op1, &ctx, ht);
12137   md5_finish_ctx (&ctx, checksum_before_op1);
12138   htab_empty (ht);
12139 
12140   md5_init_ctx (&ctx);
12141   fold_checksum_tree (op2, &ctx, ht);
12142   md5_finish_ctx (&ctx, checksum_before_op2);
12143   htab_empty (ht);
12144 #endif
12145 
12146   tem = fold_ternary (code, type, op0, op1, op2);
12147   if (!tem)
12148     tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12149 
12150 #ifdef ENABLE_FOLD_CHECKING
12151   md5_init_ctx (&ctx);
12152   fold_checksum_tree (op0, &ctx, ht);
12153   md5_finish_ctx (&ctx, checksum_after_op0);
12154   htab_empty (ht);
12155 
12156   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12157     fold_check_failed (op0, tem);
12158 
12159   md5_init_ctx (&ctx);
12160   fold_checksum_tree (op1, &ctx, ht);
12161   md5_finish_ctx (&ctx, checksum_after_op1);
12162   htab_empty (ht);
12163 
12164   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12165     fold_check_failed (op1, tem);
12166 
12167   md5_init_ctx (&ctx);
12168   fold_checksum_tree (op2, &ctx, ht);
12169   md5_finish_ctx (&ctx, checksum_after_op2);
12170   htab_delete (ht);
12171 
12172   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12173     fold_check_failed (op2, tem);
12174 #endif
12175   return tem;
12176 }
12177 
12178 /* Perform constant folding and related simplification of initializer
12179    expression EXPR.  These behave identically to "fold_buildN" but ignore
12180    potential run-time traps and exceptions that fold must preserve.  */
12181 
12182 #define START_FOLD_INIT \
12183   int saved_signaling_nans = flag_signaling_nans;\
12184   int saved_trapping_math = flag_trapping_math;\
12185   int saved_rounding_math = flag_rounding_math;\
12186   int saved_trapv = flag_trapv;\
12187   int saved_folding_initializer = folding_initializer;\
12188   flag_signaling_nans = 0;\
12189   flag_trapping_math = 0;\
12190   flag_rounding_math = 0;\
12191   flag_trapv = 0;\
12192   folding_initializer = 1;
12193 
12194 #define END_FOLD_INIT \
12195   flag_signaling_nans = saved_signaling_nans;\
12196   flag_trapping_math = saved_trapping_math;\
12197   flag_rounding_math = saved_rounding_math;\
12198   flag_trapv = saved_trapv;\
12199   folding_initializer = saved_folding_initializer;
12200 
12201 tree
fold_build1_initializer(enum tree_code code,tree type,tree op)12202 fold_build1_initializer (enum tree_code code, tree type, tree op)
12203 {
12204   tree result;
12205   START_FOLD_INIT;
12206 
12207   result = fold_build1 (code, type, op);
12208 
12209   END_FOLD_INIT;
12210   return result;
12211 }
12212 
12213 tree
fold_build2_initializer(enum tree_code code,tree type,tree op0,tree op1)12214 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12215 {
12216   tree result;
12217   START_FOLD_INIT;
12218 
12219   result = fold_build2 (code, type, op0, op1);
12220 
12221   END_FOLD_INIT;
12222   return result;
12223 }
12224 
12225 tree
fold_build3_initializer(enum tree_code code,tree type,tree op0,tree op1,tree op2)12226 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12227 			 tree op2)
12228 {
12229   tree result;
12230   START_FOLD_INIT;
12231 
12232   result = fold_build3 (code, type, op0, op1, op2);
12233 
12234   END_FOLD_INIT;
12235   return result;
12236 }
12237 
12238 #undef START_FOLD_INIT
12239 #undef END_FOLD_INIT
12240 
12241 /* Determine if first argument is a multiple of second argument.  Return 0 if
12242    it is not, or we cannot easily determined it to be.
12243 
12244    An example of the sort of thing we care about (at this point; this routine
12245    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12246    fold cases do now) is discovering that
12247 
12248      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12249 
12250    is a multiple of
12251 
12252      SAVE_EXPR (J * 8)
12253 
12254    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12255 
12256    This code also handles discovering that
12257 
12258      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12259 
12260    is a multiple of 8 so we don't have to worry about dealing with a
12261    possible remainder.
12262 
12263    Note that we *look* inside a SAVE_EXPR only to determine how it was
12264    calculated; it is not safe for fold to do much of anything else with the
12265    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12266    at run time.  For example, the latter example above *cannot* be implemented
12267    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12268    evaluation time of the original SAVE_EXPR is not necessarily the same at
12269    the time the new expression is evaluated.  The only optimization of this
12270    sort that would be valid is changing
12271 
12272      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12273 
12274    divided by 8 to
12275 
12276      SAVE_EXPR (I) * SAVE_EXPR (J)
12277 
12278    (where the same SAVE_EXPR (J) is used in the original and the
12279    transformed version).  */
12280 
12281 static int
multiple_of_p(tree type,tree top,tree bottom)12282 multiple_of_p (tree type, tree top, tree bottom)
12283 {
12284   if (operand_equal_p (top, bottom, 0))
12285     return 1;
12286 
12287   if (TREE_CODE (type) != INTEGER_TYPE)
12288     return 0;
12289 
12290   switch (TREE_CODE (top))
12291     {
12292     case BIT_AND_EXPR:
12293       /* Bitwise and provides a power of two multiple.  If the mask is
12294 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12295       if (!integer_pow2p (bottom))
12296 	return 0;
12297       /* FALLTHRU */
12298 
12299     case MULT_EXPR:
12300       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12301 	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12302 
12303     case PLUS_EXPR:
12304     case MINUS_EXPR:
12305       return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12306 	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12307 
12308     case LSHIFT_EXPR:
12309       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12310 	{
12311 	  tree op1, t1;
12312 
12313 	  op1 = TREE_OPERAND (top, 1);
12314 	  /* const_binop may not detect overflow correctly,
12315 	     so check for it explicitly here.  */
12316 	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12317 	      > TREE_INT_CST_LOW (op1)
12318 	      && TREE_INT_CST_HIGH (op1) == 0
12319 	      && 0 != (t1 = fold_convert (type,
12320 					  const_binop (LSHIFT_EXPR,
12321 						       size_one_node,
12322 						       op1, 0)))
12323 	      && ! TREE_OVERFLOW (t1))
12324 	    return multiple_of_p (type, t1, bottom);
12325 	}
12326       return 0;
12327 
12328     case NOP_EXPR:
12329       /* Can't handle conversions from non-integral or wider integral type.  */
12330       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12331 	  || (TYPE_PRECISION (type)
12332 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12333 	return 0;
12334 
12335       /* .. fall through ...  */
12336 
12337     case SAVE_EXPR:
12338       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12339 
12340     case INTEGER_CST:
12341       if (TREE_CODE (bottom) != INTEGER_CST
12342 	  || (TYPE_UNSIGNED (type)
12343 	      && (tree_int_cst_sgn (top) < 0
12344 		  || tree_int_cst_sgn (bottom) < 0)))
12345 	return 0;
12346       return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12347 					 top, bottom, 0));
12348 
12349     default:
12350       return 0;
12351     }
12352 }
12353 
12354 /* Return true if `t' is known to be non-negative.  If the return
12355    value is based on the assumption that signed overflow is undefined,
12356    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12357    *STRICT_OVERFLOW_P.  */
12358 
12359 int
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p)12360 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12361 {
12362   if (t == error_mark_node)
12363     return 0;
12364 
12365   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12366     return 1;
12367 
12368   switch (TREE_CODE (t))
12369     {
12370     case SSA_NAME:
12371       /* Query VRP to see if it has recorded any information about
12372 	 the range of this object.  */
12373       return ssa_name_nonnegative_p (t);
12374 
12375     case ABS_EXPR:
12376       /* We can't return 1 if flag_wrapv is set because
12377 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12378       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12379 	return 1;
12380       if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12381 	{
12382 	  *strict_overflow_p = true;
12383 	  return 1;
12384 	}
12385       break;
12386 
12387     case INTEGER_CST:
12388       return tree_int_cst_sgn (t) >= 0;
12389 
12390     case REAL_CST:
12391       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12392 
12393     case PLUS_EXPR:
12394       if (FLOAT_TYPE_P (TREE_TYPE (t)))
12395 	return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12396 					       strict_overflow_p)
12397 		&& tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12398 						  strict_overflow_p));
12399 
12400       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12401 	 both unsigned and at least 2 bits shorter than the result.  */
12402       if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12403 	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12404 	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12405 	{
12406 	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12407 	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12408 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12409 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12410 	    {
12411 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12412 				       TYPE_PRECISION (inner2)) + 1;
12413 	      return prec < TYPE_PRECISION (TREE_TYPE (t));
12414 	    }
12415 	}
12416       break;
12417 
12418     case MULT_EXPR:
12419       if (FLOAT_TYPE_P (TREE_TYPE (t)))
12420 	{
12421 	  /* x * x for floating point x is always non-negative.  */
12422 	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12423 	    return 1;
12424 	  return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12425 						 strict_overflow_p)
12426 		  && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12427 						    strict_overflow_p));
12428 	}
12429 
12430       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12431 	 both unsigned and their total bits is shorter than the result.  */
12432       if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12433 	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12434 	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12435 	{
12436 	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12437 	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12438 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12439 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12440 	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12441 		   < TYPE_PRECISION (TREE_TYPE (t));
12442 	}
12443       return 0;
12444 
12445     case BIT_AND_EXPR:
12446     case MAX_EXPR:
12447       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12448 					     strict_overflow_p)
12449 	      || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12450 						strict_overflow_p));
12451 
12452     case BIT_IOR_EXPR:
12453     case BIT_XOR_EXPR:
12454     case MIN_EXPR:
12455     case RDIV_EXPR:
12456     case TRUNC_DIV_EXPR:
12457     case CEIL_DIV_EXPR:
12458     case FLOOR_DIV_EXPR:
12459     case ROUND_DIV_EXPR:
12460       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12461 					     strict_overflow_p)
12462 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12463 						strict_overflow_p));
12464 
12465     case TRUNC_MOD_EXPR:
12466     case CEIL_MOD_EXPR:
12467     case FLOOR_MOD_EXPR:
12468     case ROUND_MOD_EXPR:
12469     case SAVE_EXPR:
12470     case NON_LVALUE_EXPR:
12471     case FLOAT_EXPR:
12472     case FIX_TRUNC_EXPR:
12473       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12474 					    strict_overflow_p);
12475 
12476     case COMPOUND_EXPR:
12477     case MODIFY_EXPR:
12478       return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12479 					    strict_overflow_p);
12480 
12481     case BIND_EXPR:
12482       return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12483 					    strict_overflow_p);
12484 
12485     case COND_EXPR:
12486       return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12487 					     strict_overflow_p)
12488 	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12489 						strict_overflow_p));
12490 
12491     case NOP_EXPR:
12492       {
12493 	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12494 	tree outer_type = TREE_TYPE (t);
12495 
12496 	if (TREE_CODE (outer_type) == REAL_TYPE)
12497 	  {
12498 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12499 	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12500 						    strict_overflow_p);
12501 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12502 	      {
12503 		if (TYPE_UNSIGNED (inner_type))
12504 		  return 1;
12505 		return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12506 						      strict_overflow_p);
12507 	      }
12508 	  }
12509 	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12510 	  {
12511 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12512 	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12513 						    strict_overflow_p);
12514 	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12515 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12516 		      && TYPE_UNSIGNED (inner_type);
12517 	  }
12518       }
12519       break;
12520 
12521     case TARGET_EXPR:
12522       {
12523 	tree temp = TARGET_EXPR_SLOT (t);
12524 	t = TARGET_EXPR_INITIAL (t);
12525 
12526 	/* If the initializer is non-void, then it's a normal expression
12527 	   that will be assigned to the slot.  */
12528 	if (!VOID_TYPE_P (t))
12529 	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12530 
12531 	/* Otherwise, the initializer sets the slot in some way.  One common
12532 	   way is an assignment statement at the end of the initializer.  */
12533 	while (1)
12534 	  {
12535 	    if (TREE_CODE (t) == BIND_EXPR)
12536 	      t = expr_last (BIND_EXPR_BODY (t));
12537 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12538 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
12539 	      t = expr_last (TREE_OPERAND (t, 0));
12540 	    else if (TREE_CODE (t) == STATEMENT_LIST)
12541 	      t = expr_last (t);
12542 	    else
12543 	      break;
12544 	  }
12545 	if (TREE_CODE (t) == MODIFY_EXPR
12546 	    && TREE_OPERAND (t, 0) == temp)
12547 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12548 						strict_overflow_p);
12549 
12550 	return 0;
12551       }
12552 
12553     case CALL_EXPR:
12554       {
12555 	tree fndecl = get_callee_fndecl (t);
12556 	tree arglist = TREE_OPERAND (t, 1);
12557 	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12558 	  switch (DECL_FUNCTION_CODE (fndecl))
12559 	    {
12560 	    CASE_FLT_FN (BUILT_IN_ACOS):
12561 	    CASE_FLT_FN (BUILT_IN_ACOSH):
12562 	    CASE_FLT_FN (BUILT_IN_CABS):
12563 	    CASE_FLT_FN (BUILT_IN_COSH):
12564 	    CASE_FLT_FN (BUILT_IN_ERFC):
12565 	    CASE_FLT_FN (BUILT_IN_EXP):
12566 	    CASE_FLT_FN (BUILT_IN_EXP10):
12567 	    CASE_FLT_FN (BUILT_IN_EXP2):
12568 	    CASE_FLT_FN (BUILT_IN_FABS):
12569 	    CASE_FLT_FN (BUILT_IN_FDIM):
12570 	    CASE_FLT_FN (BUILT_IN_HYPOT):
12571 	    CASE_FLT_FN (BUILT_IN_POW10):
12572 	    CASE_INT_FN (BUILT_IN_FFS):
12573 	    CASE_INT_FN (BUILT_IN_PARITY):
12574 	    CASE_INT_FN (BUILT_IN_POPCOUNT):
12575 	      /* Always true.  */
12576 	      return 1;
12577 
12578 	    CASE_FLT_FN (BUILT_IN_SQRT):
12579 	      /* sqrt(-0.0) is -0.0.  */
12580 	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12581 		return 1;
12582 	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12583 						    strict_overflow_p);
12584 
12585 	    CASE_FLT_FN (BUILT_IN_ASINH):
12586 	    CASE_FLT_FN (BUILT_IN_ATAN):
12587 	    CASE_FLT_FN (BUILT_IN_ATANH):
12588 	    CASE_FLT_FN (BUILT_IN_CBRT):
12589 	    CASE_FLT_FN (BUILT_IN_CEIL):
12590 	    CASE_FLT_FN (BUILT_IN_ERF):
12591 	    CASE_FLT_FN (BUILT_IN_EXPM1):
12592 	    CASE_FLT_FN (BUILT_IN_FLOOR):
12593 	    CASE_FLT_FN (BUILT_IN_FMOD):
12594 	    CASE_FLT_FN (BUILT_IN_FREXP):
12595 	    CASE_FLT_FN (BUILT_IN_LCEIL):
12596 	    CASE_FLT_FN (BUILT_IN_LDEXP):
12597 	    CASE_FLT_FN (BUILT_IN_LFLOOR):
12598 	    CASE_FLT_FN (BUILT_IN_LLCEIL):
12599 	    CASE_FLT_FN (BUILT_IN_LLFLOOR):
12600 	    CASE_FLT_FN (BUILT_IN_LLRINT):
12601 	    CASE_FLT_FN (BUILT_IN_LLROUND):
12602 	    CASE_FLT_FN (BUILT_IN_LRINT):
12603 	    CASE_FLT_FN (BUILT_IN_LROUND):
12604 	    CASE_FLT_FN (BUILT_IN_MODF):
12605 	    CASE_FLT_FN (BUILT_IN_NEARBYINT):
12606 	    CASE_FLT_FN (BUILT_IN_POW):
12607 	    CASE_FLT_FN (BUILT_IN_RINT):
12608 	    CASE_FLT_FN (BUILT_IN_ROUND):
12609 	    CASE_FLT_FN (BUILT_IN_SIGNBIT):
12610 	    CASE_FLT_FN (BUILT_IN_SINH):
12611 	    CASE_FLT_FN (BUILT_IN_TANH):
12612 	    CASE_FLT_FN (BUILT_IN_TRUNC):
12613 	      /* True if the 1st argument is nonnegative.  */
12614 	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12615 						    strict_overflow_p);
12616 
12617 	    CASE_FLT_FN (BUILT_IN_FMAX):
12618 	      /* True if the 1st OR 2nd arguments are nonnegative.  */
12619 	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12620 						     strict_overflow_p)
12621 		      || (tree_expr_nonnegative_warnv_p
12622 			  (TREE_VALUE (TREE_CHAIN (arglist)),
12623 			   strict_overflow_p)));
12624 
12625 	    CASE_FLT_FN (BUILT_IN_FMIN):
12626 	      /* True if the 1st AND 2nd arguments are nonnegative.  */
12627 	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12628 						     strict_overflow_p)
12629 		      && (tree_expr_nonnegative_warnv_p
12630 			  (TREE_VALUE (TREE_CHAIN (arglist)),
12631 			   strict_overflow_p)));
12632 
12633 	    CASE_FLT_FN (BUILT_IN_COPYSIGN):
12634 	      /* True if the 2nd argument is nonnegative.  */
12635 	      return (tree_expr_nonnegative_warnv_p
12636 		      (TREE_VALUE (TREE_CHAIN (arglist)),
12637 		       strict_overflow_p));
12638 
12639 	    default:
12640 	      break;
12641 	    }
12642       }
12643 
12644       /* ... fall through ...  */
12645 
12646     default:
12647       {
12648 	tree type = TREE_TYPE (t);
12649 	if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12650 	    && truth_value_p (TREE_CODE (t)))
12651 	  /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12652              have a signed:1 type (where the value is -1 and 0).  */
12653 	  return true;
12654       }
12655     }
12656 
12657   /* We don't know sign of `t', so be conservative and return false.  */
12658   return 0;
12659 }
12660 
12661 /* Return true if `t' is known to be non-negative.  Handle warnings
12662    about undefined signed overflow.  */
12663 
12664 int
tree_expr_nonnegative_p(tree t)12665 tree_expr_nonnegative_p (tree t)
12666 {
12667   int ret;
12668   bool strict_overflow_p;
12669 
12670   strict_overflow_p = false;
12671   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12672   if (strict_overflow_p)
12673     fold_overflow_warning (("assuming signed overflow does not occur when "
12674 			    "determining that expression is always "
12675 			    "non-negative"),
12676 			   WARN_STRICT_OVERFLOW_MISC);
12677   return ret;
12678 }
12679 
12680 /* Return true when T is an address and is known to be nonzero.
12681    For floating point we further ensure that T is not denormal.
12682    Similar logic is present in nonzero_address in rtlanal.h.
12683 
12684    If the return value is based on the assumption that signed overflow
12685    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12686    change *STRICT_OVERFLOW_P.  */
12687 
12688 bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)12689 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12690 {
12691   tree type = TREE_TYPE (t);
12692   bool sub_strict_overflow_p;
12693 
12694   /* Doing something useful for floating point would need more work.  */
12695   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12696     return false;
12697 
12698   switch (TREE_CODE (t))
12699     {
12700     case SSA_NAME:
12701       /* Query VRP to see if it has recorded any information about
12702 	 the range of this object.  */
12703       return ssa_name_nonzero_p (t);
12704 
12705     case ABS_EXPR:
12706       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12707 					strict_overflow_p);
12708 
12709     case INTEGER_CST:
12710       /* We used to test for !integer_zerop here.  This does not work correctly
12711 	 if TREE_CONSTANT_OVERFLOW (t).  */
12712       return (TREE_INT_CST_LOW (t) != 0
12713 	      || TREE_INT_CST_HIGH (t) != 0);
12714 
12715     case PLUS_EXPR:
12716       if (TYPE_OVERFLOW_UNDEFINED (type))
12717 	{
12718 	  /* With the presence of negative values it is hard
12719 	     to say something.  */
12720 	  sub_strict_overflow_p = false;
12721 	  if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12722 					      &sub_strict_overflow_p)
12723 	      || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12724 						 &sub_strict_overflow_p))
12725 	    return false;
12726 	  /* One of operands must be positive and the other non-negative.  */
12727 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
12728 	     overflows, on a twos-complement machine the sum of two
12729 	     nonnegative numbers can never be zero.  */
12730 	  return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12731 					     strict_overflow_p)
12732 	          || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12733 						strict_overflow_p));
12734 	}
12735       break;
12736 
12737     case MULT_EXPR:
12738       if (TYPE_OVERFLOW_UNDEFINED (type))
12739 	{
12740 	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12741 					 strict_overflow_p)
12742 	      && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12743 					    strict_overflow_p))
12744 	    {
12745 	      *strict_overflow_p = true;
12746 	      return true;
12747 	    }
12748 	}
12749       break;
12750 
12751     case NOP_EXPR:
12752       {
12753 	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12754 	tree outer_type = TREE_TYPE (t);
12755 
12756 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12757 		&& tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12758 					      strict_overflow_p));
12759       }
12760       break;
12761 
12762    case ADDR_EXPR:
12763       {
12764 	tree base = get_base_address (TREE_OPERAND (t, 0));
12765 
12766 	if (!base)
12767 	  return false;
12768 
12769 	/* Weak declarations may link to NULL.  */
12770 	if (VAR_OR_FUNCTION_DECL_P (base))
12771 	  return !DECL_WEAK (base);
12772 
12773 	/* Constants are never weak.  */
12774 	if (CONSTANT_CLASS_P (base))
12775 	  return true;
12776 
12777 	return false;
12778       }
12779 
12780     case COND_EXPR:
12781       sub_strict_overflow_p = false;
12782       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12783 				     &sub_strict_overflow_p)
12784 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12785 					&sub_strict_overflow_p))
12786 	{
12787 	  if (sub_strict_overflow_p)
12788 	    *strict_overflow_p = true;
12789 	  return true;
12790 	}
12791       break;
12792 
12793     case MIN_EXPR:
12794       sub_strict_overflow_p = false;
12795       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12796 				     &sub_strict_overflow_p)
12797 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12798 					&sub_strict_overflow_p))
12799 	{
12800 	  if (sub_strict_overflow_p)
12801 	    *strict_overflow_p = true;
12802 	}
12803       break;
12804 
12805     case MAX_EXPR:
12806       sub_strict_overflow_p = false;
12807       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12808 				     &sub_strict_overflow_p))
12809 	{
12810 	  if (sub_strict_overflow_p)
12811 	    *strict_overflow_p = true;
12812 
12813 	  /* When both operands are nonzero, then MAX must be too.  */
12814 	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12815 					 strict_overflow_p))
12816 	    return true;
12817 
12818 	  /* MAX where operand 0 is positive is positive.  */
12819 	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12820 					       strict_overflow_p);
12821 	}
12822       /* MAX where operand 1 is positive is positive.  */
12823       else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12824 					  &sub_strict_overflow_p)
12825 	       && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12826 						 &sub_strict_overflow_p))
12827 	{
12828 	  if (sub_strict_overflow_p)
12829 	    *strict_overflow_p = true;
12830 	  return true;
12831 	}
12832       break;
12833 
12834     case COMPOUND_EXPR:
12835     case MODIFY_EXPR:
12836     case BIND_EXPR:
12837       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12838 					strict_overflow_p);
12839 
12840     case SAVE_EXPR:
12841     case NON_LVALUE_EXPR:
12842       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12843 					strict_overflow_p);
12844 
12845     case BIT_IOR_EXPR:
12846       return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12847 					strict_overflow_p)
12848 	      || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12849 					    strict_overflow_p));
12850 
12851     case CALL_EXPR:
12852       return alloca_call_p (t);
12853 
12854     default:
12855       break;
12856     }
12857   return false;
12858 }
12859 
12860 /* Return true when T is an address and is known to be nonzero.
12861    Handle warnings about undefined signed overflow.  */
12862 
12863 bool
tree_expr_nonzero_p(tree t)12864 tree_expr_nonzero_p (tree t)
12865 {
12866   bool ret, strict_overflow_p;
12867 
12868   strict_overflow_p = false;
12869   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12870   if (strict_overflow_p)
12871     fold_overflow_warning (("assuming signed overflow does not occur when "
12872 			    "determining that expression is always "
12873 			    "non-zero"),
12874 			   WARN_STRICT_OVERFLOW_MISC);
12875   return ret;
12876 }
12877 
12878 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12879    attempt to fold the expression to a constant without modifying TYPE,
12880    OP0 or OP1.
12881 
12882    If the expression could be simplified to a constant, then return
12883    the constant.  If the expression would not be simplified to a
12884    constant, then return NULL_TREE.  */
12885 
12886 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)12887 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12888 {
12889   tree tem = fold_binary (code, type, op0, op1);
12890   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12891 }
12892 
12893 /* Given the components of a unary expression CODE, TYPE and OP0,
12894    attempt to fold the expression to a constant without modifying
12895    TYPE or OP0.
12896 
12897    If the expression could be simplified to a constant, then return
12898    the constant.  If the expression would not be simplified to a
12899    constant, then return NULL_TREE.  */
12900 
12901 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)12902 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12903 {
12904   tree tem = fold_unary (code, type, op0);
12905   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12906 }
12907 
12908 /* If EXP represents referencing an element in a constant string
12909    (either via pointer arithmetic or array indexing), return the
12910    tree representing the value accessed, otherwise return NULL.  */
12911 
12912 tree
fold_read_from_constant_string(tree exp)12913 fold_read_from_constant_string (tree exp)
12914 {
12915   if ((TREE_CODE (exp) == INDIRECT_REF
12916        || TREE_CODE (exp) == ARRAY_REF)
12917       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12918     {
12919       tree exp1 = TREE_OPERAND (exp, 0);
12920       tree index;
12921       tree string;
12922 
12923       if (TREE_CODE (exp) == INDIRECT_REF)
12924 	string = string_constant (exp1, &index);
12925       else
12926 	{
12927 	  tree low_bound = array_ref_low_bound (exp);
12928 	  index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12929 
12930 	  /* Optimize the special-case of a zero lower bound.
12931 
12932 	     We convert the low_bound to sizetype to avoid some problems
12933 	     with constant folding.  (E.g. suppose the lower bound is 1,
12934 	     and its mode is QI.  Without the conversion,l (ARRAY
12935 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12936 	     +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
12937 	  if (! integer_zerop (low_bound))
12938 	    index = size_diffop (index, fold_convert (sizetype, low_bound));
12939 
12940 	  string = exp1;
12941 	}
12942 
12943       if (string
12944 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12945 	  && TREE_CODE (string) == STRING_CST
12946 	  && TREE_CODE (index) == INTEGER_CST
12947 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12948 	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12949 	      == MODE_INT)
12950 	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12951 	return fold_convert (TREE_TYPE (exp),
12952 			     build_int_cst (NULL_TREE,
12953 					    (TREE_STRING_POINTER (string)
12954 					     [TREE_INT_CST_LOW (index)])));
12955     }
12956   return NULL;
12957 }
12958 
12959 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12960    an integer constant or real constant.
12961 
12962    TYPE is the type of the result.  */
12963 
12964 static tree
fold_negate_const(tree arg0,tree type)12965 fold_negate_const (tree arg0, tree type)
12966 {
12967   tree t = NULL_TREE;
12968 
12969   switch (TREE_CODE (arg0))
12970     {
12971     case INTEGER_CST:
12972       {
12973 	unsigned HOST_WIDE_INT low;
12974 	HOST_WIDE_INT high;
12975 	int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12976 				   TREE_INT_CST_HIGH (arg0),
12977 				   &low, &high);
12978 	t = build_int_cst_wide (type, low, high);
12979 	t = force_fit_type (t, 1,
12980 			    (overflow | TREE_OVERFLOW (arg0))
12981 			    && !TYPE_UNSIGNED (type),
12982 			    TREE_CONSTANT_OVERFLOW (arg0));
12983 	break;
12984       }
12985 
12986     case REAL_CST:
12987       t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12988       break;
12989 
12990     default:
12991       gcc_unreachable ();
12992     }
12993 
12994   return t;
12995 }
12996 
12997 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12998    an integer constant or real constant.
12999 
13000    TYPE is the type of the result.  */
13001 
13002 tree
fold_abs_const(tree arg0,tree type)13003 fold_abs_const (tree arg0, tree type)
13004 {
13005   tree t = NULL_TREE;
13006 
13007   switch (TREE_CODE (arg0))
13008     {
13009     case INTEGER_CST:
13010       /* If the value is unsigned, then the absolute value is
13011 	 the same as the ordinary value.  */
13012       if (TYPE_UNSIGNED (type))
13013 	t = arg0;
13014       /* Similarly, if the value is non-negative.  */
13015       else if (INT_CST_LT (integer_minus_one_node, arg0))
13016 	t = arg0;
13017       /* If the value is negative, then the absolute value is
13018 	 its negation.  */
13019       else
13020 	{
13021 	  unsigned HOST_WIDE_INT low;
13022 	  HOST_WIDE_INT high;
13023 	  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13024 				     TREE_INT_CST_HIGH (arg0),
13025 				     &low, &high);
13026 	  t = build_int_cst_wide (type, low, high);
13027 	  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13028 			      TREE_CONSTANT_OVERFLOW (arg0));
13029 	}
13030       break;
13031 
13032     case REAL_CST:
13033       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13034 	t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13035       else
13036 	t =  arg0;
13037       break;
13038 
13039     default:
13040       gcc_unreachable ();
13041     }
13042 
13043   return t;
13044 }
13045 
13046 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13047    constant.  TYPE is the type of the result.  */
13048 
13049 static tree
fold_not_const(tree arg0,tree type)13050 fold_not_const (tree arg0, tree type)
13051 {
13052   tree t = NULL_TREE;
13053 
13054   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13055 
13056   t = build_int_cst_wide (type,
13057 			  ~ TREE_INT_CST_LOW (arg0),
13058 			  ~ TREE_INT_CST_HIGH (arg0));
13059   t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13060 		      TREE_CONSTANT_OVERFLOW (arg0));
13061 
13062   return t;
13063 }
13064 
13065 /* Given CODE, a relational operator, the target type, TYPE and two
13066    constant operands OP0 and OP1, return the result of the
13067    relational operation.  If the result is not a compile time
13068    constant, then return NULL_TREE.  */
13069 
13070 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)13071 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13072 {
13073   int result, invert;
13074 
13075   /* From here on, the only cases we handle are when the result is
13076      known to be a constant.  */
13077 
13078   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13079     {
13080       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13081       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13082 
13083       /* Handle the cases where either operand is a NaN.  */
13084       if (real_isnan (c0) || real_isnan (c1))
13085 	{
13086 	  switch (code)
13087 	    {
13088 	    case EQ_EXPR:
13089 	    case ORDERED_EXPR:
13090 	      result = 0;
13091 	      break;
13092 
13093 	    case NE_EXPR:
13094 	    case UNORDERED_EXPR:
13095 	    case UNLT_EXPR:
13096 	    case UNLE_EXPR:
13097 	    case UNGT_EXPR:
13098 	    case UNGE_EXPR:
13099 	    case UNEQ_EXPR:
13100               result = 1;
13101 	      break;
13102 
13103 	    case LT_EXPR:
13104 	    case LE_EXPR:
13105 	    case GT_EXPR:
13106 	    case GE_EXPR:
13107 	    case LTGT_EXPR:
13108 	      if (flag_trapping_math)
13109 		return NULL_TREE;
13110 	      result = 0;
13111 	      break;
13112 
13113 	    default:
13114 	      gcc_unreachable ();
13115 	    }
13116 
13117 	  return constant_boolean_node (result, type);
13118 	}
13119 
13120       return constant_boolean_node (real_compare (code, c0, c1), type);
13121     }
13122 
13123   /* Handle equality/inequality of complex constants.  */
13124   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13125     {
13126       tree rcond = fold_relational_const (code, type,
13127 					  TREE_REALPART (op0),
13128 					  TREE_REALPART (op1));
13129       tree icond = fold_relational_const (code, type,
13130 					  TREE_IMAGPART (op0),
13131 					  TREE_IMAGPART (op1));
13132       if (code == EQ_EXPR)
13133 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13134       else if (code == NE_EXPR)
13135 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13136       else
13137 	return NULL_TREE;
13138     }
13139 
13140   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13141 
13142      To compute GT, swap the arguments and do LT.
13143      To compute GE, do LT and invert the result.
13144      To compute LE, swap the arguments, do LT and invert the result.
13145      To compute NE, do EQ and invert the result.
13146 
13147      Therefore, the code below must handle only EQ and LT.  */
13148 
13149   if (code == LE_EXPR || code == GT_EXPR)
13150     {
13151       tree tem = op0;
13152       op0 = op1;
13153       op1 = tem;
13154       code = swap_tree_comparison (code);
13155     }
13156 
13157   /* Note that it is safe to invert for real values here because we
13158      have already handled the one case that it matters.  */
13159 
13160   invert = 0;
13161   if (code == NE_EXPR || code == GE_EXPR)
13162     {
13163       invert = 1;
13164       code = invert_tree_comparison (code, false);
13165     }
13166 
13167   /* Compute a result for LT or EQ if args permit;
13168      Otherwise return T.  */
13169   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13170     {
13171       if (code == EQ_EXPR)
13172 	result = tree_int_cst_equal (op0, op1);
13173       else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13174 	result = INT_CST_LT_UNSIGNED (op0, op1);
13175       else
13176 	result = INT_CST_LT (op0, op1);
13177     }
13178   else
13179     return NULL_TREE;
13180 
13181   if (invert)
13182     result ^= 1;
13183   return constant_boolean_node (result, type);
13184 }
13185 
13186 /* Build an expression for the a clean point containing EXPR with type TYPE.
13187    Don't build a cleanup point expression for EXPR which don't have side
13188    effects.  */
13189 
13190 tree
fold_build_cleanup_point_expr(tree type,tree expr)13191 fold_build_cleanup_point_expr (tree type, tree expr)
13192 {
13193   /* If the expression does not have side effects then we don't have to wrap
13194      it with a cleanup point expression.  */
13195   if (!TREE_SIDE_EFFECTS (expr))
13196     return expr;
13197 
13198   /* If the expression is a return, check to see if the expression inside the
13199      return has no side effects or the right hand side of the modify expression
13200      inside the return. If either don't have side effects set we don't need to
13201      wrap the expression in a cleanup point expression.  Note we don't check the
13202      left hand side of the modify because it should always be a return decl.  */
13203   if (TREE_CODE (expr) == RETURN_EXPR)
13204     {
13205       tree op = TREE_OPERAND (expr, 0);
13206       if (!op || !TREE_SIDE_EFFECTS (op))
13207         return expr;
13208       op = TREE_OPERAND (op, 1);
13209       if (!TREE_SIDE_EFFECTS (op))
13210         return expr;
13211     }
13212 
13213   return build1 (CLEANUP_POINT_EXPR, type, expr);
13214 }
13215 
13216 /* Build an expression for the address of T.  Folds away INDIRECT_REF to
13217    avoid confusing the gimplify process.  */
13218 
13219 tree
build_fold_addr_expr_with_type(tree t,tree ptrtype)13220 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13221 {
13222   /* The size of the object is not relevant when talking about its address.  */
13223   if (TREE_CODE (t) == WITH_SIZE_EXPR)
13224     t = TREE_OPERAND (t, 0);
13225 
13226   /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13227   if (TREE_CODE (t) == INDIRECT_REF
13228       || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13229     {
13230       t = TREE_OPERAND (t, 0);
13231       if (TREE_TYPE (t) != ptrtype)
13232 	t = build1 (NOP_EXPR, ptrtype, t);
13233     }
13234   else
13235     {
13236       tree base = t;
13237 
13238       while (handled_component_p (base))
13239 	base = TREE_OPERAND (base, 0);
13240       if (DECL_P (base))
13241 	TREE_ADDRESSABLE (base) = 1;
13242 
13243       t = build1 (ADDR_EXPR, ptrtype, t);
13244     }
13245 
13246   return t;
13247 }
13248 
13249 tree
build_fold_addr_expr(tree t)13250 build_fold_addr_expr (tree t)
13251 {
13252   return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13253 }
13254 
13255 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13256    of an indirection through OP0, or NULL_TREE if no simplification is
13257    possible.  */
13258 
13259 tree
fold_indirect_ref_1(tree type,tree op0)13260 fold_indirect_ref_1 (tree type, tree op0)
13261 {
13262   tree sub = op0;
13263   tree subtype;
13264 
13265   STRIP_NOPS (sub);
13266   subtype = TREE_TYPE (sub);
13267   if (!POINTER_TYPE_P (subtype))
13268     return NULL_TREE;
13269 
13270   if (TREE_CODE (sub) == ADDR_EXPR)
13271     {
13272       tree op = TREE_OPERAND (sub, 0);
13273       tree optype = TREE_TYPE (op);
13274       /* *&CONST_DECL -> to the value of the const decl.  */
13275       if (TREE_CODE (op) == CONST_DECL)
13276 	return DECL_INITIAL (op);
13277       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
13278       if (type == optype)
13279 	{
13280 	  tree fop = fold_read_from_constant_string (op);
13281 	  if (fop)
13282 	    return fop;
13283 	  else
13284 	    return op;
13285 	}
13286       /* *(foo *)&fooarray => fooarray[0] */
13287       else if (TREE_CODE (optype) == ARRAY_TYPE
13288 	       && type == TREE_TYPE (optype))
13289 	{
13290 	  tree type_domain = TYPE_DOMAIN (optype);
13291 	  tree min_val = size_zero_node;
13292 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
13293 	    min_val = TYPE_MIN_VALUE (type_domain);
13294 	  return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13295 	}
13296       /* *(foo *)&complexfoo => __real__ complexfoo */
13297       else if (TREE_CODE (optype) == COMPLEX_TYPE
13298 	       && type == TREE_TYPE (optype))
13299 	return fold_build1 (REALPART_EXPR, type, op);
13300     }
13301 
13302   /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13303   if (TREE_CODE (sub) == PLUS_EXPR
13304       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13305     {
13306       tree op00 = TREE_OPERAND (sub, 0);
13307       tree op01 = TREE_OPERAND (sub, 1);
13308       tree op00type;
13309 
13310       STRIP_NOPS (op00);
13311       op00type = TREE_TYPE (op00);
13312       if (TREE_CODE (op00) == ADDR_EXPR
13313  	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13314 	  && type == TREE_TYPE (TREE_TYPE (op00type)))
13315 	{
13316 	  tree size = TYPE_SIZE_UNIT (type);
13317 	  if (tree_int_cst_equal (size, op01))
13318 	    return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13319 	}
13320     }
13321 
13322   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13323   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13324       && type == TREE_TYPE (TREE_TYPE (subtype)))
13325     {
13326       tree type_domain;
13327       tree min_val = size_zero_node;
13328       sub = build_fold_indirect_ref (sub);
13329       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13330       if (type_domain && TYPE_MIN_VALUE (type_domain))
13331 	min_val = TYPE_MIN_VALUE (type_domain);
13332       return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13333     }
13334 
13335   return NULL_TREE;
13336 }
13337 
13338 /* Builds an expression for an indirection through T, simplifying some
13339    cases.  */
13340 
13341 tree
build_fold_indirect_ref(tree t)13342 build_fold_indirect_ref (tree t)
13343 {
13344   tree type = TREE_TYPE (TREE_TYPE (t));
13345   tree sub = fold_indirect_ref_1 (type, t);
13346 
13347   if (sub)
13348     return sub;
13349   else
13350     return build1 (INDIRECT_REF, type, t);
13351 }
13352 
13353 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
13354 
13355 tree
fold_indirect_ref(tree t)13356 fold_indirect_ref (tree t)
13357 {
13358   tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13359 
13360   if (sub)
13361     return sub;
13362   else
13363     return t;
13364 }
13365 
13366 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13367    whose result is ignored.  The type of the returned tree need not be
13368    the same as the original expression.  */
13369 
13370 tree
fold_ignored_result(tree t)13371 fold_ignored_result (tree t)
13372 {
13373   if (!TREE_SIDE_EFFECTS (t))
13374     return integer_zero_node;
13375 
13376   for (;;)
13377     switch (TREE_CODE_CLASS (TREE_CODE (t)))
13378       {
13379       case tcc_unary:
13380 	t = TREE_OPERAND (t, 0);
13381 	break;
13382 
13383       case tcc_binary:
13384       case tcc_comparison:
13385 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13386 	  t = TREE_OPERAND (t, 0);
13387 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13388 	  t = TREE_OPERAND (t, 1);
13389 	else
13390 	  return t;
13391 	break;
13392 
13393       case tcc_expression:
13394 	switch (TREE_CODE (t))
13395 	  {
13396 	  case COMPOUND_EXPR:
13397 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13398 	      return t;
13399 	    t = TREE_OPERAND (t, 0);
13400 	    break;
13401 
13402 	  case COND_EXPR:
13403 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13404 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13405 	      return t;
13406 	    t = TREE_OPERAND (t, 0);
13407 	    break;
13408 
13409 	  default:
13410 	    return t;
13411 	  }
13412 	break;
13413 
13414       default:
13415 	return t;
13416       }
13417 }
13418 
13419 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13420    This can only be applied to objects of a sizetype.  */
13421 
13422 tree
round_up(tree value,int divisor)13423 round_up (tree value, int divisor)
13424 {
13425   tree div = NULL_TREE;
13426 
13427   gcc_assert (divisor > 0);
13428   if (divisor == 1)
13429     return value;
13430 
13431   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13432      have to do anything.  Only do this when we are not given a const,
13433      because in that case, this check is more expensive than just
13434      doing it.  */
13435   if (TREE_CODE (value) != INTEGER_CST)
13436     {
13437       div = build_int_cst (TREE_TYPE (value), divisor);
13438 
13439       if (multiple_of_p (TREE_TYPE (value), value, div))
13440 	return value;
13441     }
13442 
13443   /* If divisor is a power of two, simplify this to bit manipulation.  */
13444   if (divisor == (divisor & -divisor))
13445     {
13446       tree t;
13447 
13448       t = build_int_cst (TREE_TYPE (value), divisor - 1);
13449       value = size_binop (PLUS_EXPR, value, t);
13450       t = build_int_cst (TREE_TYPE (value), -divisor);
13451       value = size_binop (BIT_AND_EXPR, value, t);
13452     }
13453   else
13454     {
13455       if (!div)
13456 	div = build_int_cst (TREE_TYPE (value), divisor);
13457       value = size_binop (CEIL_DIV_EXPR, value, div);
13458       value = size_binop (MULT_EXPR, value, div);
13459     }
13460 
13461   return value;
13462 }
13463 
13464 /* Likewise, but round down.  */
13465 
13466 tree
round_down(tree value,int divisor)13467 round_down (tree value, int divisor)
13468 {
13469   tree div = NULL_TREE;
13470 
13471   gcc_assert (divisor > 0);
13472   if (divisor == 1)
13473     return value;
13474 
13475   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13476      have to do anything.  Only do this when we are not given a const,
13477      because in that case, this check is more expensive than just
13478      doing it.  */
13479   if (TREE_CODE (value) != INTEGER_CST)
13480     {
13481       div = build_int_cst (TREE_TYPE (value), divisor);
13482 
13483       if (multiple_of_p (TREE_TYPE (value), value, div))
13484 	return value;
13485     }
13486 
13487   /* If divisor is a power of two, simplify this to bit manipulation.  */
13488   if (divisor == (divisor & -divisor))
13489     {
13490       tree t;
13491 
13492       t = build_int_cst (TREE_TYPE (value), -divisor);
13493       value = size_binop (BIT_AND_EXPR, value, t);
13494     }
13495   else
13496     {
13497       if (!div)
13498 	div = build_int_cst (TREE_TYPE (value), divisor);
13499       value = size_binop (FLOOR_DIV_EXPR, value, div);
13500       value = size_binop (MULT_EXPR, value, div);
13501     }
13502 
13503   return value;
13504 }
13505 
13506 /* Returns the pointer to the base of the object addressed by EXP and
13507    extracts the information about the offset of the access, storing it
13508    to PBITPOS and POFFSET.  */
13509 
13510 static tree
split_address_to_core_and_offset(tree exp,HOST_WIDE_INT * pbitpos,tree * poffset)13511 split_address_to_core_and_offset (tree exp,
13512 				  HOST_WIDE_INT *pbitpos, tree *poffset)
13513 {
13514   tree core;
13515   enum machine_mode mode;
13516   int unsignedp, volatilep;
13517   HOST_WIDE_INT bitsize;
13518 
13519   if (TREE_CODE (exp) == ADDR_EXPR)
13520     {
13521       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13522 				  poffset, &mode, &unsignedp, &volatilep,
13523 				  false);
13524       core = build_fold_addr_expr (core);
13525     }
13526   else
13527     {
13528       core = exp;
13529       *pbitpos = 0;
13530       *poffset = NULL_TREE;
13531     }
13532 
13533   return core;
13534 }
13535 
13536 /* Returns true if addresses of E1 and E2 differ by a constant, false
13537    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
13538 
13539 bool
ptr_difference_const(tree e1,tree e2,HOST_WIDE_INT * diff)13540 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13541 {
13542   tree core1, core2;
13543   HOST_WIDE_INT bitpos1, bitpos2;
13544   tree toffset1, toffset2, tdiff, type;
13545 
13546   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13547   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13548 
13549   if (bitpos1 % BITS_PER_UNIT != 0
13550       || bitpos2 % BITS_PER_UNIT != 0
13551       || !operand_equal_p (core1, core2, 0))
13552     return false;
13553 
13554   if (toffset1 && toffset2)
13555     {
13556       type = TREE_TYPE (toffset1);
13557       if (type != TREE_TYPE (toffset2))
13558 	toffset2 = fold_convert (type, toffset2);
13559 
13560       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13561       if (!cst_and_fits_in_hwi (tdiff))
13562 	return false;
13563 
13564       *diff = int_cst_value (tdiff);
13565     }
13566   else if (toffset1 || toffset2)
13567     {
13568       /* If only one of the offsets is non-constant, the difference cannot
13569 	 be a constant.  */
13570       return false;
13571     }
13572   else
13573     *diff = 0;
13574 
13575   *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13576   return true;
13577 }
13578 
13579 /* Simplify the floating point expression EXP when the sign of the
13580    result is not significant.  Return NULL_TREE if no simplification
13581    is possible.  */
13582 
13583 tree
fold_strip_sign_ops(tree exp)13584 fold_strip_sign_ops (tree exp)
13585 {
13586   tree arg0, arg1;
13587 
13588   switch (TREE_CODE (exp))
13589     {
13590     case ABS_EXPR:
13591     case NEGATE_EXPR:
13592       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13593       return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13594 
13595     case MULT_EXPR:
13596     case RDIV_EXPR:
13597       if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13598 	return NULL_TREE;
13599       arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13600       arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13601       if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13602 	return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13603 			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
13604 			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
13605       break;
13606 
13607     default:
13608       break;
13609     }
13610   return NULL_TREE;
13611 }
13612 
13613