1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
140
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
145
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
150
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
156
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
160
161 static void
encode(HOST_WIDE_INT * words,unsigned HOST_WIDE_INT low,HOST_WIDE_INT hi)162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
163 {
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
168 }
169
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
173
174 static void
decode(HOST_WIDE_INT * words,unsigned HOST_WIDE_INT * low,HOST_WIDE_INT * hi)175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
177 {
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
180 }
181
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
196
197 tree
force_fit_type(tree t,int overflowable,bool overflowed,bool overflowed_const)198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
200 {
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
205
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
207
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
210
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
220
221 /* First clear all bits that are beyond the type's precision. */
222
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 ;
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
232 }
233
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
263 {
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
265
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
269 {
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
273 }
274 else if (overflowed_const)
275 {
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
278 }
279 }
280
281 return t;
282 }
283
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
288
289 int
add_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT l2,HOST_WIDE_INT h2,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 {
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
296
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
299
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
303 }
304
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309
310 int
neg_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 {
314 if (l1 == 0)
315 {
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
319 }
320 else
321 {
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
325 }
326 }
327
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
333
334 int
mul_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,unsigned HOST_WIDE_INT l2,HOST_WIDE_INT h2,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
338 {
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
346
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
349
350 memset (prod, 0, sizeof prod);
351
352 for (i = 0; i < 4; i++)
353 {
354 carry = 0;
355 for (j = 0; j < 4; j++)
356 {
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
364 }
365 prod[i + 4] = carry;
366 }
367
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
369
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
374 {
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377 }
378 if (h2 < 0)
379 {
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 }
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
384 }
385
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
391
392 void
lshift_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,int arith)393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
396 {
397 unsigned HOST_WIDE_INT signmask;
398
399 if (count < 0)
400 {
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
403 }
404
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
407
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409 {
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
414 }
415 else if (count >= HOST_BITS_PER_WIDE_INT)
416 {
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
419 }
420 else
421 {
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
425 }
426
427 /* Sign extend all bits that are beyond the precision. */
428
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
433
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
435 ;
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
437 {
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
440 }
441 else
442 {
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
446 }
447 }
448
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
453
454 void
rshift_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv,int arith)455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
459 {
460 unsigned HOST_WIDE_INT signmask;
461
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
465
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
468
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470 {
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
475 }
476 else if (count >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486 }
487
488 /* Zero / sign extend all bits that are beyond the precision. */
489
490 if (count >= (HOST_WIDE_INT)prec)
491 {
492 *hv = signmask;
493 *lv = signmask;
494 }
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496 ;
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498 {
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501 }
502 else
503 {
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
507 }
508 }
509
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514
515 void
lrotate_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
519 {
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
522
523 count %= prec;
524 if (count < 0)
525 count += prec;
526
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
531 }
532
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537 void
rrotate_double(unsigned HOST_WIDE_INT l1,HOST_WIDE_INT h1,HOST_WIDE_INT count,unsigned int prec,unsigned HOST_WIDE_INT * lv,HOST_WIDE_INT * hv)538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 {
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553 }
554
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
563
564 int
div_and_round_double(enum tree_code code,int uns,unsigned HOST_WIDE_INT lnum_orig,HOST_WIDE_INT hnum_orig,unsigned HOST_WIDE_INT lden_orig,HOST_WIDE_INT hden_orig,unsigned HOST_WIDE_INT * lquo,HOST_WIDE_INT * hquo,unsigned HOST_WIDE_INT * lrem,HOST_WIDE_INT * hrem)565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
573 {
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
585
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
588
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
591 {
592 if (hnum < 0)
593 {
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
599 }
600 if (hden < 0)
601 {
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
604 }
605 }
606
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
613 }
614
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
622 }
623
624 memset (quo, 0, sizeof quo);
625
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
628
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
631
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634 {
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
637 {
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
641 }
642 }
643 else
644 {
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
649
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
653 {
654 den_hi_sig = i;
655 break;
656 }
657
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
660
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
666 {
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
670 }
671
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
675 {
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
680 }
681 }
682
683 num_hi_sig = 4;
684
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687 {
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
692
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
699
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
706
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
710
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
719 }
720
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724 {
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
728 {
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
732 }
733
734 num [num_hi_sig] += carry;
735 }
736
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
739 }
740 }
741
742 decode (quo, lquo, hquo);
743
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
748
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
754 switch (code)
755 {
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
760
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
764 {
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
768 }
769 else
770 return overflow;
771 break;
772
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
776 {
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 break;
783
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
786 {
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
791
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
797
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, <wice, &htwice);
801
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
807 {
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
816 }
817 else
818 return overflow;
819 }
820 break;
821
822 default:
823 gcc_unreachable ();
824 }
825
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
831 }
832
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
836
837 static tree
div_if_zero_remainder(enum tree_code code,tree arg1,tree arg2)838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
839 {
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
846
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
851
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
856
857 return build_int_cst_wide (type, quol, quoh);
858 }
859
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
862
863 static bool
negate_mathfn_p(enum built_in_function code)864 negate_mathfn_p (enum built_in_function code)
865 {
866 switch (code)
867 {
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ATAN:
872 case BUILT_IN_ATANF:
873 case BUILT_IN_ATANL:
874 case BUILT_IN_SIN:
875 case BUILT_IN_SINF:
876 case BUILT_IN_SINL:
877 case BUILT_IN_TAN:
878 case BUILT_IN_TANF:
879 case BUILT_IN_TANL:
880 return true;
881
882 default:
883 break;
884 }
885 return false;
886 }
887
888 /* Check whether we may negate an integer constant T without causing
889 overflow. */
890
891 bool
may_negate_without_overflow_p(tree t)892 may_negate_without_overflow_p (tree t)
893 {
894 unsigned HOST_WIDE_INT val;
895 unsigned int prec;
896 tree type;
897
898 gcc_assert (TREE_CODE (t) == INTEGER_CST);
899
900 type = TREE_TYPE (t);
901 if (TYPE_UNSIGNED (type))
902 return false;
903
904 prec = TYPE_PRECISION (type);
905 if (prec > HOST_BITS_PER_WIDE_INT)
906 {
907 if (TREE_INT_CST_LOW (t) != 0)
908 return true;
909 prec -= HOST_BITS_PER_WIDE_INT;
910 val = TREE_INT_CST_HIGH (t);
911 }
912 else
913 val = TREE_INT_CST_LOW (t);
914 if (prec < HOST_BITS_PER_WIDE_INT)
915 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
916 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
917 }
918
919 /* Determine whether an expression T can be cheaply negated using
920 the function negate_expr. */
921
922 static bool
negate_expr_p(tree t)923 negate_expr_p (tree t)
924 {
925 tree type;
926
927 if (t == 0)
928 return false;
929
930 type = TREE_TYPE (t);
931
932 STRIP_SIGN_NOPS (t);
933 switch (TREE_CODE (t))
934 {
935 case INTEGER_CST:
936 if (TYPE_UNSIGNED (type) || ! flag_trapv)
937 return true;
938
939 /* Check that -CST will not overflow type. */
940 return may_negate_without_overflow_p (t);
941
942 case REAL_CST:
943 case NEGATE_EXPR:
944 return true;
945
946 case COMPLEX_CST:
947 return negate_expr_p (TREE_REALPART (t))
948 && negate_expr_p (TREE_IMAGPART (t));
949
950 case PLUS_EXPR:
951 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
952 return false;
953 /* -(A + B) -> (-B) - A. */
954 if (negate_expr_p (TREE_OPERAND (t, 1))
955 && reorder_operands_p (TREE_OPERAND (t, 0),
956 TREE_OPERAND (t, 1)))
957 return true;
958 /* -(A + B) -> (-A) - B. */
959 return negate_expr_p (TREE_OPERAND (t, 0));
960
961 case MINUS_EXPR:
962 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
963 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
964 && reorder_operands_p (TREE_OPERAND (t, 0),
965 TREE_OPERAND (t, 1));
966
967 case MULT_EXPR:
968 if (TYPE_UNSIGNED (TREE_TYPE (t)))
969 break;
970
971 /* Fall through. */
972
973 case RDIV_EXPR:
974 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
975 return negate_expr_p (TREE_OPERAND (t, 1))
976 || negate_expr_p (TREE_OPERAND (t, 0));
977 break;
978
979 case NOP_EXPR:
980 /* Negate -((double)float) as (double)(-float). */
981 if (TREE_CODE (type) == REAL_TYPE)
982 {
983 tree tem = strip_float_extensions (t);
984 if (tem != t)
985 return negate_expr_p (tem);
986 }
987 break;
988
989 case CALL_EXPR:
990 /* Negate -f(x) as f(-x). */
991 if (negate_mathfn_p (builtin_mathfn_code (t)))
992 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
993 break;
994
995 case RSHIFT_EXPR:
996 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
997 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
998 {
999 tree op1 = TREE_OPERAND (t, 1);
1000 if (TREE_INT_CST_HIGH (op1) == 0
1001 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1002 == TREE_INT_CST_LOW (op1))
1003 return true;
1004 }
1005 break;
1006
1007 default:
1008 break;
1009 }
1010 return false;
1011 }
1012
1013 /* Given T, an expression, return the negation of T. Allow for T to be
1014 null, in which case return null. */
1015
1016 static tree
negate_expr(tree t)1017 negate_expr (tree t)
1018 {
1019 tree type;
1020 tree tem;
1021
1022 if (t == 0)
1023 return 0;
1024
1025 type = TREE_TYPE (t);
1026 STRIP_SIGN_NOPS (t);
1027
1028 switch (TREE_CODE (t))
1029 {
1030 case INTEGER_CST:
1031 tem = fold_negate_const (t, type);
1032 if (! TREE_OVERFLOW (tem)
1033 || TYPE_UNSIGNED (type)
1034 || ! flag_trapv)
1035 return tem;
1036 break;
1037
1038 case REAL_CST:
1039 tem = fold_negate_const (t, type);
1040 /* Two's complement FP formats, such as c4x, may overflow. */
1041 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1042 return fold_convert (type, tem);
1043 break;
1044
1045 case COMPLEX_CST:
1046 {
1047 tree rpart = negate_expr (TREE_REALPART (t));
1048 tree ipart = negate_expr (TREE_IMAGPART (t));
1049
1050 if ((TREE_CODE (rpart) == REAL_CST
1051 && TREE_CODE (ipart) == REAL_CST)
1052 || (TREE_CODE (rpart) == INTEGER_CST
1053 && TREE_CODE (ipart) == INTEGER_CST))
1054 return build_complex (type, rpart, ipart);
1055 }
1056 break;
1057
1058 case NEGATE_EXPR:
1059 return fold_convert (type, TREE_OPERAND (t, 0));
1060
1061 case PLUS_EXPR:
1062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1063 {
1064 /* -(A + B) -> (-B) - A. */
1065 if (negate_expr_p (TREE_OPERAND (t, 1))
1066 && reorder_operands_p (TREE_OPERAND (t, 0),
1067 TREE_OPERAND (t, 1)))
1068 {
1069 tem = negate_expr (TREE_OPERAND (t, 1));
1070 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1071 tem, TREE_OPERAND (t, 0));
1072 return fold_convert (type, tem);
1073 }
1074
1075 /* -(A + B) -> (-A) - B. */
1076 if (negate_expr_p (TREE_OPERAND (t, 0)))
1077 {
1078 tem = negate_expr (TREE_OPERAND (t, 0));
1079 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1080 tem, TREE_OPERAND (t, 1));
1081 return fold_convert (type, tem);
1082 }
1083 }
1084 break;
1085
1086 case MINUS_EXPR:
1087 /* - (A - B) -> B - A */
1088 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1089 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1090 return fold_convert (type,
1091 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 TREE_OPERAND (t, 1),
1093 TREE_OPERAND (t, 0)));
1094 break;
1095
1096 case MULT_EXPR:
1097 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1098 break;
1099
1100 /* Fall through. */
1101
1102 case RDIV_EXPR:
1103 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1104 {
1105 tem = TREE_OPERAND (t, 1);
1106 if (negate_expr_p (tem))
1107 return fold_convert (type,
1108 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1109 TREE_OPERAND (t, 0),
1110 negate_expr (tem)));
1111 tem = TREE_OPERAND (t, 0);
1112 if (negate_expr_p (tem))
1113 return fold_convert (type,
1114 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1115 negate_expr (tem),
1116 TREE_OPERAND (t, 1)));
1117 }
1118 break;
1119
1120 case NOP_EXPR:
1121 /* Convert -((double)float) into (double)(-float). */
1122 if (TREE_CODE (type) == REAL_TYPE)
1123 {
1124 tem = strip_float_extensions (t);
1125 if (tem != t && negate_expr_p (tem))
1126 return fold_convert (type, negate_expr (tem));
1127 }
1128 break;
1129
1130 case CALL_EXPR:
1131 /* Negate -f(x) as f(-x). */
1132 if (negate_mathfn_p (builtin_mathfn_code (t))
1133 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1134 {
1135 tree fndecl, arg, arglist;
1136
1137 fndecl = get_callee_fndecl (t);
1138 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1139 arglist = build_tree_list (NULL_TREE, arg);
1140 return build_function_call_expr (fndecl, arglist);
1141 }
1142 break;
1143
1144 case RSHIFT_EXPR:
1145 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1146 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1147 {
1148 tree op1 = TREE_OPERAND (t, 1);
1149 if (TREE_INT_CST_HIGH (op1) == 0
1150 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1151 == TREE_INT_CST_LOW (op1))
1152 {
1153 tree ntype = TYPE_UNSIGNED (type)
1154 ? lang_hooks.types.signed_type (type)
1155 : lang_hooks.types.unsigned_type (type);
1156 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1157 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1158 return fold_convert (type, temp);
1159 }
1160 }
1161 break;
1162
1163 default:
1164 break;
1165 }
1166
1167 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1168 return fold_convert (type, tem);
1169 }
1170
1171 /* Split a tree IN into a constant, literal and variable parts that could be
1172 combined with CODE to make IN. "constant" means an expression with
1173 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1174 commutative arithmetic operation. Store the constant part into *CONP,
1175 the literal in *LITP and return the variable part. If a part isn't
1176 present, set it to null. If the tree does not decompose in this way,
1177 return the entire tree as the variable part and the other parts as null.
1178
1179 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1180 case, we negate an operand that was subtracted. Except if it is a
1181 literal for which we use *MINUS_LITP instead.
1182
1183 If NEGATE_P is true, we are negating all of IN, again except a literal
1184 for which we use *MINUS_LITP instead.
1185
1186 If IN is itself a literal or constant, return it as appropriate.
1187
1188 Note that we do not guarantee that any of the three values will be the
1189 same type as IN, but they will have the same signedness and mode. */
1190
1191 static tree
split_tree(tree in,enum tree_code code,tree * conp,tree * litp,tree * minus_litp,int negate_p)1192 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1193 tree *minus_litp, int negate_p)
1194 {
1195 tree var = 0;
1196
1197 *conp = 0;
1198 *litp = 0;
1199 *minus_litp = 0;
1200
1201 /* Strip any conversions that don't change the machine mode or signedness. */
1202 STRIP_SIGN_NOPS (in);
1203
1204 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1205 *litp = in;
1206 else if (TREE_CODE (in) == code
1207 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1208 /* We can associate addition and subtraction together (even
1209 though the C standard doesn't say so) for integers because
1210 the value is not affected. For reals, the value might be
1211 affected, so we can't. */
1212 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1213 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1214 {
1215 tree op0 = TREE_OPERAND (in, 0);
1216 tree op1 = TREE_OPERAND (in, 1);
1217 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1218 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1219
1220 /* First see if either of the operands is a literal, then a constant. */
1221 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1222 *litp = op0, op0 = 0;
1223 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1224 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1225
1226 if (op0 != 0 && TREE_CONSTANT (op0))
1227 *conp = op0, op0 = 0;
1228 else if (op1 != 0 && TREE_CONSTANT (op1))
1229 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1230
1231 /* If we haven't dealt with either operand, this is not a case we can
1232 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1233 if (op0 != 0 && op1 != 0)
1234 var = in;
1235 else if (op0 != 0)
1236 var = op0;
1237 else
1238 var = op1, neg_var_p = neg1_p;
1239
1240 /* Now do any needed negations. */
1241 if (neg_litp_p)
1242 *minus_litp = *litp, *litp = 0;
1243 if (neg_conp_p)
1244 *conp = negate_expr (*conp);
1245 if (neg_var_p)
1246 var = negate_expr (var);
1247 }
1248 else if (TREE_CONSTANT (in))
1249 *conp = in;
1250 else
1251 var = in;
1252
1253 if (negate_p)
1254 {
1255 if (*litp)
1256 *minus_litp = *litp, *litp = 0;
1257 else if (*minus_litp)
1258 *litp = *minus_litp, *minus_litp = 0;
1259 *conp = negate_expr (*conp);
1260 var = negate_expr (var);
1261 }
1262
1263 return var;
1264 }
1265
1266 /* Re-associate trees split by the above function. T1 and T2 are either
1267 expressions to associate or null. Return the new expression, if any. If
1268 we build an operation, do it in TYPE and with CODE. */
1269
1270 static tree
associate_trees(tree t1,tree t2,enum tree_code code,tree type)1271 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1272 {
1273 if (t1 == 0)
1274 return t2;
1275 else if (t2 == 0)
1276 return t1;
1277
1278 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1279 try to fold this since we will have infinite recursion. But do
1280 deal with any NEGATE_EXPRs. */
1281 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1282 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1283 {
1284 if (code == PLUS_EXPR)
1285 {
1286 if (TREE_CODE (t1) == NEGATE_EXPR)
1287 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1288 fold_convert (type, TREE_OPERAND (t1, 0)));
1289 else if (TREE_CODE (t2) == NEGATE_EXPR)
1290 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1291 fold_convert (type, TREE_OPERAND (t2, 0)));
1292 else if (integer_zerop (t2))
1293 return fold_convert (type, t1);
1294 }
1295 else if (code == MINUS_EXPR)
1296 {
1297 if (integer_zerop (t2))
1298 return fold_convert (type, t1);
1299 }
1300
1301 return build2 (code, type, fold_convert (type, t1),
1302 fold_convert (type, t2));
1303 }
1304
1305 return fold_build2 (code, type, fold_convert (type, t1),
1306 fold_convert (type, t2));
1307 }
1308
1309 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1310 to produce a new constant.
1311
1312 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1313
1314 tree
int_const_binop(enum tree_code code,tree arg1,tree arg2,int notrunc)1315 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1316 {
1317 unsigned HOST_WIDE_INT int1l, int2l;
1318 HOST_WIDE_INT int1h, int2h;
1319 unsigned HOST_WIDE_INT low;
1320 HOST_WIDE_INT hi;
1321 unsigned HOST_WIDE_INT garbagel;
1322 HOST_WIDE_INT garbageh;
1323 tree t;
1324 tree type = TREE_TYPE (arg1);
1325 int uns = TYPE_UNSIGNED (type);
1326 int is_sizetype
1327 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1328 int overflow = 0;
1329
1330 int1l = TREE_INT_CST_LOW (arg1);
1331 int1h = TREE_INT_CST_HIGH (arg1);
1332 int2l = TREE_INT_CST_LOW (arg2);
1333 int2h = TREE_INT_CST_HIGH (arg2);
1334
1335 switch (code)
1336 {
1337 case BIT_IOR_EXPR:
1338 low = int1l | int2l, hi = int1h | int2h;
1339 break;
1340
1341 case BIT_XOR_EXPR:
1342 low = int1l ^ int2l, hi = int1h ^ int2h;
1343 break;
1344
1345 case BIT_AND_EXPR:
1346 low = int1l & int2l, hi = int1h & int2h;
1347 break;
1348
1349 case RSHIFT_EXPR:
1350 int2l = -int2l;
1351 case LSHIFT_EXPR:
1352 /* It's unclear from the C standard whether shifts can overflow.
1353 The following code ignores overflow; perhaps a C standard
1354 interpretation ruling is needed. */
1355 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1356 &low, &hi, !uns);
1357 break;
1358
1359 case RROTATE_EXPR:
1360 int2l = - int2l;
1361 case LROTATE_EXPR:
1362 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 &low, &hi);
1364 break;
1365
1366 case PLUS_EXPR:
1367 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1368 break;
1369
1370 case MINUS_EXPR:
1371 neg_double (int2l, int2h, &low, &hi);
1372 add_double (int1l, int1h, low, hi, &low, &hi);
1373 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1374 break;
1375
1376 case MULT_EXPR:
1377 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1378 break;
1379
1380 case TRUNC_DIV_EXPR:
1381 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1382 case EXACT_DIV_EXPR:
1383 /* This is a shortcut for a common special case. */
1384 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1385 && ! TREE_CONSTANT_OVERFLOW (arg1)
1386 && ! TREE_CONSTANT_OVERFLOW (arg2)
1387 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1388 {
1389 if (code == CEIL_DIV_EXPR)
1390 int1l += int2l - 1;
1391
1392 low = int1l / int2l, hi = 0;
1393 break;
1394 }
1395
1396 /* ... fall through ... */
1397
1398 case ROUND_DIV_EXPR:
1399 if (int2h == 0 && int2l == 1)
1400 {
1401 low = int1l, hi = int1h;
1402 break;
1403 }
1404 if (int1l == int2l && int1h == int2h
1405 && ! (int1l == 0 && int1h == 0))
1406 {
1407 low = 1, hi = 0;
1408 break;
1409 }
1410 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1411 &low, &hi, &garbagel, &garbageh);
1412 break;
1413
1414 case TRUNC_MOD_EXPR:
1415 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1416 /* This is a shortcut for a common special case. */
1417 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1418 && ! TREE_CONSTANT_OVERFLOW (arg1)
1419 && ! TREE_CONSTANT_OVERFLOW (arg2)
1420 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1421 {
1422 if (code == CEIL_MOD_EXPR)
1423 int1l += int2l - 1;
1424 low = int1l % int2l, hi = 0;
1425 break;
1426 }
1427
1428 /* ... fall through ... */
1429
1430 case ROUND_MOD_EXPR:
1431 overflow = div_and_round_double (code, uns,
1432 int1l, int1h, int2l, int2h,
1433 &garbagel, &garbageh, &low, &hi);
1434 break;
1435
1436 case MIN_EXPR:
1437 case MAX_EXPR:
1438 if (uns)
1439 low = (((unsigned HOST_WIDE_INT) int1h
1440 < (unsigned HOST_WIDE_INT) int2h)
1441 || (((unsigned HOST_WIDE_INT) int1h
1442 == (unsigned HOST_WIDE_INT) int2h)
1443 && int1l < int2l));
1444 else
1445 low = (int1h < int2h
1446 || (int1h == int2h && int1l < int2l));
1447
1448 if (low == (code == MIN_EXPR))
1449 low = int1l, hi = int1h;
1450 else
1451 low = int2l, hi = int2h;
1452 break;
1453
1454 default:
1455 gcc_unreachable ();
1456 }
1457
1458 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1459
1460 if (notrunc)
1461 {
1462 /* Propagate overflow flags ourselves. */
1463 if (((!uns || is_sizetype) && overflow)
1464 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1465 {
1466 t = copy_node (t);
1467 TREE_OVERFLOW (t) = 1;
1468 TREE_CONSTANT_OVERFLOW (t) = 1;
1469 }
1470 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1471 {
1472 t = copy_node (t);
1473 TREE_CONSTANT_OVERFLOW (t) = 1;
1474 }
1475 }
1476 else
1477 t = force_fit_type (t, 1,
1478 ((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1480 TREE_CONSTANT_OVERFLOW (arg1)
1481 | TREE_CONSTANT_OVERFLOW (arg2));
1482
1483 return t;
1484 }
1485
1486 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1487 constant. We assume ARG1 and ARG2 have the same data type, or at least
1488 are the same kind of constant and the same machine mode. Return zero if
1489 combining the constants is not allowed in the current operating mode.
1490
1491 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1492
1493 static tree
const_binop(enum tree_code code,tree arg1,tree arg2,int notrunc)1494 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1495 {
1496 /* Sanity check for the recursive cases. */
1497 if (!arg1 || !arg2)
1498 return NULL_TREE;
1499
1500 STRIP_NOPS (arg1);
1501 STRIP_NOPS (arg2);
1502
1503 if (TREE_CODE (arg1) == INTEGER_CST)
1504 return int_const_binop (code, arg1, arg2, notrunc);
1505
1506 if (TREE_CODE (arg1) == REAL_CST)
1507 {
1508 enum machine_mode mode;
1509 REAL_VALUE_TYPE d1;
1510 REAL_VALUE_TYPE d2;
1511 REAL_VALUE_TYPE value;
1512 REAL_VALUE_TYPE result;
1513 bool inexact;
1514 tree t, type;
1515
1516 d1 = TREE_REAL_CST (arg1);
1517 d2 = TREE_REAL_CST (arg2);
1518
1519 type = TREE_TYPE (arg1);
1520 mode = TYPE_MODE (type);
1521
1522 /* Don't perform operation if we honor signaling NaNs and
1523 either operand is a NaN. */
1524 if (HONOR_SNANS (mode)
1525 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1526 return NULL_TREE;
1527
1528 /* Don't perform operation if it would raise a division
1529 by zero exception. */
1530 if (code == RDIV_EXPR
1531 && REAL_VALUES_EQUAL (d2, dconst0)
1532 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1533 return NULL_TREE;
1534
1535 /* If either operand is a NaN, just return it. Otherwise, set up
1536 for floating-point trap; we return an overflow. */
1537 if (REAL_VALUE_ISNAN (d1))
1538 return arg1;
1539 else if (REAL_VALUE_ISNAN (d2))
1540 return arg2;
1541
1542 inexact = real_arithmetic (&value, code, &d1, &d2);
1543 real_convert (&result, mode, &value);
1544
1545 /* Don't constant fold this floating point operation if
1546 the result has overflowed and flag_trapping_math. */
1547 if (flag_trapping_math
1548 && MODE_HAS_INFINITIES (mode)
1549 && REAL_VALUE_ISINF (result)
1550 && !REAL_VALUE_ISINF (d1)
1551 && !REAL_VALUE_ISINF (d2))
1552 return NULL_TREE;
1553
1554 /* Don't constant fold this floating point operation if the
1555 result may dependent upon the run-time rounding mode and
1556 flag_rounding_math is set, or if GCC's software emulation
1557 is unable to accurately represent the result. */
1558 if ((flag_rounding_math
1559 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1560 && !flag_unsafe_math_optimizations))
1561 && (inexact || !REAL_VALUES_IDENTICAL (result, value)))
1562 return NULL_TREE;
1563
1564 t = build_real (type, result);
1565
1566 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1567 TREE_CONSTANT_OVERFLOW (t)
1568 = TREE_OVERFLOW (t)
1569 | TREE_CONSTANT_OVERFLOW (arg1)
1570 | TREE_CONSTANT_OVERFLOW (arg2);
1571 return t;
1572 }
1573
1574 if (TREE_CODE (arg1) == COMPLEX_CST)
1575 {
1576 tree type = TREE_TYPE (arg1);
1577 tree r1 = TREE_REALPART (arg1);
1578 tree i1 = TREE_IMAGPART (arg1);
1579 tree r2 = TREE_REALPART (arg2);
1580 tree i2 = TREE_IMAGPART (arg2);
1581 tree real, imag;
1582
1583 switch (code)
1584 {
1585 case PLUS_EXPR:
1586 case MINUS_EXPR:
1587 real = const_binop (code, r1, r2, notrunc);
1588 imag = const_binop (code, i1, i2, notrunc);
1589 break;
1590
1591 case MULT_EXPR:
1592 real = const_binop (MINUS_EXPR,
1593 const_binop (MULT_EXPR, r1, r2, notrunc),
1594 const_binop (MULT_EXPR, i1, i2, notrunc),
1595 notrunc);
1596 imag = const_binop (PLUS_EXPR,
1597 const_binop (MULT_EXPR, r1, i2, notrunc),
1598 const_binop (MULT_EXPR, i1, r2, notrunc),
1599 notrunc);
1600 break;
1601
1602 case RDIV_EXPR:
1603 {
1604 tree magsquared
1605 = const_binop (PLUS_EXPR,
1606 const_binop (MULT_EXPR, r2, r2, notrunc),
1607 const_binop (MULT_EXPR, i2, i2, notrunc),
1608 notrunc);
1609 tree t1
1610 = const_binop (PLUS_EXPR,
1611 const_binop (MULT_EXPR, r1, r2, notrunc),
1612 const_binop (MULT_EXPR, i1, i2, notrunc),
1613 notrunc);
1614 tree t2
1615 = const_binop (MINUS_EXPR,
1616 const_binop (MULT_EXPR, i1, r2, notrunc),
1617 const_binop (MULT_EXPR, r1, i2, notrunc),
1618 notrunc);
1619
1620 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1621 code = TRUNC_DIV_EXPR;
1622
1623 real = const_binop (code, t1, magsquared, notrunc);
1624 imag = const_binop (code, t2, magsquared, notrunc);
1625 }
1626 break;
1627
1628 default:
1629 gcc_unreachable ();
1630 }
1631
1632 if (real && imag)
1633 return build_complex (type, real, imag);
1634 }
1635
1636 return NULL_TREE;
1637 }
1638
1639 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1640 indicates which particular sizetype to create. */
1641
1642 tree
size_int_kind(HOST_WIDE_INT number,enum size_type_kind kind)1643 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1644 {
1645 return build_int_cst (sizetype_tab[(int) kind], number);
1646 }
1647
1648 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1649 is a tree code. The type of the result is taken from the operands.
1650 Both must be the same type integer type and it must be a size type.
1651 If the operands are constant, so is the result. */
1652
1653 tree
size_binop(enum tree_code code,tree arg0,tree arg1)1654 size_binop (enum tree_code code, tree arg0, tree arg1)
1655 {
1656 tree type = TREE_TYPE (arg0);
1657
1658 if (arg0 == error_mark_node || arg1 == error_mark_node)
1659 return error_mark_node;
1660
1661 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1662 && type == TREE_TYPE (arg1));
1663
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 {
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 return arg1;
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1672 return arg0;
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1674 return arg1;
1675
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1678 }
1679
1680 return fold_build2 (code, type, arg0, arg1);
1681 }
1682
1683 /* Given two values, either both of sizetype or both of bitsizetype,
1684 compute the difference between the two values. Return the value
1685 in signed type corresponding to the type of the operands. */
1686
1687 tree
size_diffop(tree arg0,tree arg1)1688 size_diffop (tree arg0, tree arg1)
1689 {
1690 tree type = TREE_TYPE (arg0);
1691 tree ctype;
1692
1693 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1694 && type == TREE_TYPE (arg1));
1695
1696 /* If the type is already signed, just do the simple thing. */
1697 if (!TYPE_UNSIGNED (type))
1698 return size_binop (MINUS_EXPR, arg0, arg1);
1699
1700 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1701
1702 /* If either operand is not a constant, do the conversions to the signed
1703 type and subtract. The hardware will do the right thing with any
1704 overflow in the subtraction. */
1705 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1706 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1707 fold_convert (ctype, arg1));
1708
1709 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1710 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1711 overflow) and negate (which can't either). Special-case a result
1712 of zero while we're here. */
1713 if (tree_int_cst_equal (arg0, arg1))
1714 return fold_convert (ctype, integer_zero_node);
1715 else if (tree_int_cst_lt (arg1, arg0))
1716 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1717 else
1718 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1719 fold_convert (ctype, size_binop (MINUS_EXPR,
1720 arg1, arg0)));
1721 }
1722
1723 /* A subroutine of fold_convert_const handling conversions of an
1724 INTEGER_CST to another integer type. */
1725
1726 static tree
fold_convert_const_int_from_int(tree type,tree arg1)1727 fold_convert_const_int_from_int (tree type, tree arg1)
1728 {
1729 tree t;
1730
1731 /* Given an integer constant, make new constant with new type,
1732 appropriately sign-extended or truncated. */
1733 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1734 TREE_INT_CST_HIGH (arg1));
1735
1736 t = force_fit_type (t,
1737 /* Don't set the overflow when
1738 converting a pointer */
1739 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1740 (TREE_INT_CST_HIGH (arg1) < 0
1741 && (TYPE_UNSIGNED (type)
1742 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1743 | TREE_OVERFLOW (arg1),
1744 TREE_CONSTANT_OVERFLOW (arg1));
1745
1746 return t;
1747 }
1748
1749 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1750 to an integer type. */
1751
1752 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,tree arg1)1753 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1754 {
1755 int overflow = 0;
1756 tree t;
1757
1758 /* The following code implements the floating point to integer
1759 conversion rules required by the Java Language Specification,
1760 that IEEE NaNs are mapped to zero and values that overflow
1761 the target precision saturate, i.e. values greater than
1762 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1763 are mapped to INT_MIN. These semantics are allowed by the
1764 C and C++ standards that simply state that the behavior of
1765 FP-to-integer conversion is unspecified upon overflow. */
1766
1767 HOST_WIDE_INT high, low;
1768 #if 0
1769 REAL_VALUE_TYPE r;
1770 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1771
1772 switch (code)
1773 {
1774 case FIX_TRUNC_EXPR:
1775 real_trunc (&r, VOIDmode, &x);
1776 break;
1777
1778 case FIX_CEIL_EXPR:
1779 real_ceil (&r, VOIDmode, &x);
1780 break;
1781
1782 case FIX_FLOOR_EXPR:
1783 real_floor (&r, VOIDmode, &x);
1784 break;
1785
1786 case FIX_ROUND_EXPR:
1787 real_round (&r, VOIDmode, &x);
1788 break;
1789
1790 default:
1791 gcc_unreachable ();
1792 }
1793 #else
1794 REAL_VALUE_TYPE r = TREE_REAL_CST (arg1);
1795 #endif /* 0 */
1796
1797 /* If R is NaN, return zero and show we have an overflow. */
1798 if (REAL_VALUE_ISNANUINF (r))
1799 {
1800 overflow = 1;
1801 high = 0;
1802 low = 0;
1803 }
1804
1805 /* See if R is less than the lower bound or greater than the
1806 upper bound. */
1807
1808 if (! overflow)
1809 {
1810 tree lt = TYPE_MIN_VALUE (type);
1811 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1812 if (REAL_VALUES_LESS (r, l))
1813 {
1814 overflow = 1;
1815 high = TREE_INT_CST_HIGH (lt);
1816 low = TREE_INT_CST_LOW (lt);
1817 }
1818 }
1819
1820 if (! overflow)
1821 {
1822 tree ut = TYPE_MAX_VALUE (type);
1823 if (ut)
1824 {
1825 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1826 if (REAL_VALUES_LESS (u, r))
1827 {
1828 overflow = 1;
1829 high = TREE_INT_CST_HIGH (ut);
1830 low = TREE_INT_CST_LOW (ut);
1831 }
1832 }
1833 }
1834
1835 if (! overflow)
1836 REAL_VALUE_TO_INT (&low, &high, r);
1837
1838 t = build_int_cst_wide (type, low, high);
1839
1840 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1841 TREE_CONSTANT_OVERFLOW (arg1));
1842 return t;
1843 }
1844
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to another floating point type. */
1847
1848 static tree
fold_convert_const_real_from_real(tree type,tree arg1)1849 fold_convert_const_real_from_real (tree type, tree arg1)
1850 {
1851 REAL_VALUE_TYPE value;
1852 tree t;
1853
1854 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1855 t = build_real (type, value);
1856
1857 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1860 return t;
1861 }
1862
1863 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1864 type TYPE. If no simplification can be done return NULL_TREE. */
1865
1866 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)1867 fold_convert_const (enum tree_code code, tree type, tree arg1)
1868 {
1869 if (TREE_TYPE (arg1) == type)
1870 return arg1;
1871
1872 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1873 {
1874 if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_int_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_int_from_real (code, type, arg1);
1878 }
1879 else if (TREE_CODE (type) == REAL_TYPE)
1880 {
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return build_real_from_int_cst (type, arg1);
1883 if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_real_from_real (type, arg1);
1885 }
1886 return NULL_TREE;
1887 }
1888
1889 /* Construct a vector of zero elements of vector type TYPE. */
1890
1891 static tree
build_zero_vector(tree type)1892 build_zero_vector (tree type)
1893 {
1894 tree elem, list;
1895 int i, units;
1896
1897 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1898 units = TYPE_VECTOR_SUBPARTS (type);
1899
1900 list = NULL_TREE;
1901 for (i = 0; i < units; i++)
1902 list = tree_cons (NULL_TREE, elem, list);
1903 return build_vector (type, list);
1904 }
1905
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1908
1909 tree
fold_convert(tree type,tree arg)1910 fold_convert (tree type, tree arg)
1911 {
1912 tree orig = TREE_TYPE (arg);
1913 tree tem;
1914
1915 if (type == orig)
1916 return arg;
1917
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1922
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold_build1 (NOP_EXPR, type, arg);
1927
1928 switch (TREE_CODE (type))
1929 {
1930 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1932 case OFFSET_TYPE:
1933 if (TREE_CODE (arg) == INTEGER_CST)
1934 {
1935 tem = fold_convert_const (NOP_EXPR, type, arg);
1936 if (tem != NULL_TREE)
1937 return tem;
1938 }
1939 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1940 || TREE_CODE (orig) == OFFSET_TYPE)
1941 return fold_build1 (NOP_EXPR, type, arg);
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1943 {
1944 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1945 return fold_convert (type, tem);
1946 }
1947 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1948 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1949 return fold_build1 (NOP_EXPR, type, arg);
1950
1951 case REAL_TYPE:
1952 if (TREE_CODE (arg) == INTEGER_CST)
1953 {
1954 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1956 return tem;
1957 }
1958 else if (TREE_CODE (arg) == REAL_CST)
1959 {
1960 tem = fold_convert_const (NOP_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1962 return tem;
1963 }
1964
1965 switch (TREE_CODE (orig))
1966 {
1967 case INTEGER_TYPE: case CHAR_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 return fold_build1 (FLOAT_EXPR, type, arg);
1971
1972 case REAL_TYPE:
1973 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1974 type, arg);
1975
1976 case COMPLEX_TYPE:
1977 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert (type, tem);
1979
1980 default:
1981 gcc_unreachable ();
1982 }
1983
1984 case COMPLEX_TYPE:
1985 switch (TREE_CODE (orig))
1986 {
1987 case INTEGER_TYPE: case CHAR_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1990 case REAL_TYPE:
1991 return build2 (COMPLEX_EXPR, type,
1992 fold_convert (TREE_TYPE (type), arg),
1993 fold_convert (TREE_TYPE (type), integer_zero_node));
1994 case COMPLEX_TYPE:
1995 {
1996 tree rpart, ipart;
1997
1998 if (TREE_CODE (arg) == COMPLEX_EXPR)
1999 {
2000 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2001 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2002 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2003 }
2004
2005 arg = save_expr (arg);
2006 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2007 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2008 rpart = fold_convert (TREE_TYPE (type), rpart);
2009 ipart = fold_convert (TREE_TYPE (type), ipart);
2010 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2011 }
2012
2013 default:
2014 gcc_unreachable ();
2015 }
2016
2017 case VECTOR_TYPE:
2018 if (integer_zerop (arg))
2019 return build_zero_vector (type);
2020 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2021 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == VECTOR_TYPE);
2023 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2024
2025 case VOID_TYPE:
2026 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2027
2028 default:
2029 gcc_unreachable ();
2030 }
2031 }
2032
2033 /* Return false if expr can be assumed not to be an lvalue, true
2034 otherwise. */
2035
2036 static bool
maybe_lvalue_p(tree x)2037 maybe_lvalue_p (tree x)
2038 {
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2041 {
2042 case VAR_DECL:
2043 case PARM_DECL:
2044 case RESULT_DECL:
2045 case LABEL_DECL:
2046 case FUNCTION_DECL:
2047 case SSA_NAME:
2048
2049 case COMPONENT_REF:
2050 case INDIRECT_REF:
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2053 case ARRAY_REF:
2054 case ARRAY_RANGE_REF:
2055 case BIT_FIELD_REF:
2056 case OBJ_TYPE_REF:
2057
2058 case REALPART_EXPR:
2059 case IMAGPART_EXPR:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2062 case SAVE_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2065 case COMPOUND_EXPR:
2066 case MODIFY_EXPR:
2067 case TARGET_EXPR:
2068 case COND_EXPR:
2069 case BIND_EXPR:
2070 case MIN_EXPR:
2071 case MAX_EXPR:
2072 break;
2073
2074 default:
2075 /* Assume the worst for front-end tree codes. */
2076 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2077 break;
2078 return false;
2079 }
2080
2081 return true;
2082 }
2083
2084 /* Return an expr equal to X but certainly not valid as an lvalue. */
2085
2086 tree
non_lvalue(tree x)2087 non_lvalue (tree x)
2088 {
2089 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2090 us. */
2091 if (in_gimple_form)
2092 return x;
2093
2094 if (! maybe_lvalue_p (x))
2095 return x;
2096 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2097 }
2098
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2101
2102 int pedantic_lvalues;
2103
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2106
2107 tree
pedantic_non_lvalue(tree x)2108 pedantic_non_lvalue (tree x)
2109 {
2110 if (pedantic_lvalues)
2111 return non_lvalue (x);
2112 else
2113 return x;
2114 }
2115
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2120
2121 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2123 {
2124 if (honor_nans && flag_trapping_math)
2125 return ERROR_MARK;
2126
2127 switch (code)
2128 {
2129 case EQ_EXPR:
2130 return NE_EXPR;
2131 case NE_EXPR:
2132 return EQ_EXPR;
2133 case GT_EXPR:
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2135 case GE_EXPR:
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2137 case LT_EXPR:
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2139 case LE_EXPR:
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2141 case LTGT_EXPR:
2142 return UNEQ_EXPR;
2143 case UNEQ_EXPR:
2144 return LTGT_EXPR;
2145 case UNGT_EXPR:
2146 return LE_EXPR;
2147 case UNGE_EXPR:
2148 return LT_EXPR;
2149 case UNLT_EXPR:
2150 return GE_EXPR;
2151 case UNLE_EXPR:
2152 return GT_EXPR;
2153 case ORDERED_EXPR:
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2157 default:
2158 gcc_unreachable ();
2159 }
2160 }
2161
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2164
2165 enum tree_code
swap_tree_comparison(enum tree_code code)2166 swap_tree_comparison (enum tree_code code)
2167 {
2168 switch (code)
2169 {
2170 case EQ_EXPR:
2171 case NE_EXPR:
2172 case ORDERED_EXPR:
2173 case UNORDERED_EXPR:
2174 case LTGT_EXPR:
2175 case UNEQ_EXPR:
2176 return code;
2177 case GT_EXPR:
2178 return LT_EXPR;
2179 case GE_EXPR:
2180 return LE_EXPR;
2181 case LT_EXPR:
2182 return GT_EXPR;
2183 case LE_EXPR:
2184 return GE_EXPR;
2185 case UNGT_EXPR:
2186 return UNLT_EXPR;
2187 case UNGE_EXPR:
2188 return UNLE_EXPR;
2189 case UNLT_EXPR:
2190 return UNGT_EXPR;
2191 case UNLE_EXPR:
2192 return UNGE_EXPR;
2193 default:
2194 gcc_unreachable ();
2195 }
2196 }
2197
2198
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2202
2203 static enum comparison_code
comparison_to_compcode(enum tree_code code)2204 comparison_to_compcode (enum tree_code code)
2205 {
2206 switch (code)
2207 {
2208 case LT_EXPR:
2209 return COMPCODE_LT;
2210 case EQ_EXPR:
2211 return COMPCODE_EQ;
2212 case LE_EXPR:
2213 return COMPCODE_LE;
2214 case GT_EXPR:
2215 return COMPCODE_GT;
2216 case NE_EXPR:
2217 return COMPCODE_NE;
2218 case GE_EXPR:
2219 return COMPCODE_GE;
2220 case ORDERED_EXPR:
2221 return COMPCODE_ORD;
2222 case UNORDERED_EXPR:
2223 return COMPCODE_UNORD;
2224 case UNLT_EXPR:
2225 return COMPCODE_UNLT;
2226 case UNEQ_EXPR:
2227 return COMPCODE_UNEQ;
2228 case UNLE_EXPR:
2229 return COMPCODE_UNLE;
2230 case UNGT_EXPR:
2231 return COMPCODE_UNGT;
2232 case LTGT_EXPR:
2233 return COMPCODE_LTGT;
2234 case UNGE_EXPR:
2235 return COMPCODE_UNGE;
2236 default:
2237 gcc_unreachable ();
2238 }
2239 }
2240
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2244
2245 static enum tree_code
compcode_to_comparison(enum comparison_code code)2246 compcode_to_comparison (enum comparison_code code)
2247 {
2248 switch (code)
2249 {
2250 case COMPCODE_LT:
2251 return LT_EXPR;
2252 case COMPCODE_EQ:
2253 return EQ_EXPR;
2254 case COMPCODE_LE:
2255 return LE_EXPR;
2256 case COMPCODE_GT:
2257 return GT_EXPR;
2258 case COMPCODE_NE:
2259 return NE_EXPR;
2260 case COMPCODE_GE:
2261 return GE_EXPR;
2262 case COMPCODE_ORD:
2263 return ORDERED_EXPR;
2264 case COMPCODE_UNORD:
2265 return UNORDERED_EXPR;
2266 case COMPCODE_UNLT:
2267 return UNLT_EXPR;
2268 case COMPCODE_UNEQ:
2269 return UNEQ_EXPR;
2270 case COMPCODE_UNLE:
2271 return UNLE_EXPR;
2272 case COMPCODE_UNGT:
2273 return UNGT_EXPR;
2274 case COMPCODE_LTGT:
2275 return LTGT_EXPR;
2276 case COMPCODE_UNGE:
2277 return UNGE_EXPR;
2278 default:
2279 gcc_unreachable ();
2280 }
2281 }
2282
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2288
2289 tree
combine_comparisons(enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2290 combine_comparisons (enum tree_code code, enum tree_code lcode,
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2293 {
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2297 enum comparison_code compcode;
2298
2299 switch (code)
2300 {
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2303 break;
2304
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2307 break;
2308
2309 default:
2310 return NULL_TREE;
2311 }
2312
2313 if (!honor_nans)
2314 {
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2322 }
2323 else if (flag_trapping_math)
2324 {
2325 /* Check that the original operation and the optimized ones will trap
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2336
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2345 rtrap = false;
2346
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2349 if (rtrap && !ltrap
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2351 return NULL_TREE;
2352
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2355 return NULL_TREE;
2356 }
2357
2358 if (compcode == COMPCODE_TRUE)
2359 return constant_boolean_node (true, truth_type);
2360 else if (compcode == COMPCODE_FALSE)
2361 return constant_boolean_node (false, truth_type);
2362 else
2363 return fold_build2 (compcode_to_comparison (compcode),
2364 truth_type, ll_arg, lr_arg);
2365 }
2366
2367 /* Return nonzero if CODE is a tree code that represents a truth value. */
2368
2369 static int
truth_value_p(enum tree_code code)2370 truth_value_p (enum tree_code code)
2371 {
2372 return (TREE_CODE_CLASS (code) == tcc_comparison
2373 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2374 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2375 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2376 }
2377
2378 /* Return nonzero if two operands (typically of the same tree node)
2379 are necessarily equal. If either argument has side-effects this
2380 function returns zero. FLAGS modifies behavior as follows:
2381
2382 If OEP_ONLY_CONST is set, only return nonzero for constants.
2383 This function tests whether the operands are indistinguishable;
2384 it does not test whether they are equal using C's == operation.
2385 The distinction is important for IEEE floating point, because
2386 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2387 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2388
2389 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2390 even though it may hold multiple values during a function.
2391 This is because a GCC tree node guarantees that nothing else is
2392 executed between the evaluation of its "operands" (which may often
2393 be evaluated in arbitrary order). Hence if the operands themselves
2394 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2395 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2396 unset means assuming isochronic (or instantaneous) tree equivalence.
2397 Unless comparing arbitrary expression trees, such as from different
2398 statements, this flag can usually be left unset.
2399
2400 If OEP_PURE_SAME is set, then pure functions with identical arguments
2401 are considered the same. It is used when the caller has other ways
2402 to ensure that global memory is unchanged in between. */
2403
2404 int
operand_equal_p(tree arg0,tree arg1,unsigned int flags)2405 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2406 {
2407 /* If either is ERROR_MARK, they aren't equal. */
2408 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2409 return 0;
2410
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. */
2414 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2415 return 0;
2416
2417 /* If both types don't have the same precision, then it is not safe
2418 to strip NOPs. */
2419 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2420 return 0;
2421
2422 STRIP_NOPS (arg0);
2423 STRIP_NOPS (arg1);
2424
2425 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2426 /* This is needed for conversions and for COMPONENT_REF.
2427 Might as well play it safe and always test this. */
2428 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2429 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2430 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2431 return 0;
2432
2433 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2434 We don't care about side effects in that case because the SAVE_EXPR
2435 takes care of that for us. In all other cases, two expressions are
2436 equal if they have no side effects. If we have two identical
2437 expressions with side effects that should be treated the same due
2438 to the only side effects being identical SAVE_EXPR's, that will
2439 be detected in the recursive calls below. */
2440 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2441 && (TREE_CODE (arg0) == SAVE_EXPR
2442 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2443 return 1;
2444
2445 /* Next handle constant cases, those for which we can return 1 even
2446 if ONLY_CONST is set. */
2447 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2448 switch (TREE_CODE (arg0))
2449 {
2450 case INTEGER_CST:
2451 return (! TREE_CONSTANT_OVERFLOW (arg0)
2452 && ! TREE_CONSTANT_OVERFLOW (arg1)
2453 && tree_int_cst_equal (arg0, arg1));
2454
2455 case REAL_CST:
2456 return (! TREE_CONSTANT_OVERFLOW (arg0)
2457 && ! TREE_CONSTANT_OVERFLOW (arg1)
2458 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2459 TREE_REAL_CST (arg1)));
2460
2461 case VECTOR_CST:
2462 {
2463 tree v1, v2;
2464
2465 if (TREE_CONSTANT_OVERFLOW (arg0)
2466 || TREE_CONSTANT_OVERFLOW (arg1))
2467 return 0;
2468
2469 v1 = TREE_VECTOR_CST_ELTS (arg0);
2470 v2 = TREE_VECTOR_CST_ELTS (arg1);
2471 while (v1 && v2)
2472 {
2473 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2474 flags))
2475 return 0;
2476 v1 = TREE_CHAIN (v1);
2477 v2 = TREE_CHAIN (v2);
2478 }
2479
2480 return v1 == v2;
2481 }
2482
2483 case COMPLEX_CST:
2484 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2485 flags)
2486 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2487 flags));
2488
2489 case STRING_CST:
2490 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2491 && ! memcmp (TREE_STRING_POINTER (arg0),
2492 TREE_STRING_POINTER (arg1),
2493 TREE_STRING_LENGTH (arg0)));
2494
2495 case ADDR_EXPR:
2496 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2497 0);
2498 default:
2499 break;
2500 }
2501
2502 if (flags & OEP_ONLY_CONST)
2503 return 0;
2504
2505 /* Define macros to test an operand from arg0 and arg1 for equality and a
2506 variant that allows null and views null as being different from any
2507 non-null value. In the latter case, if either is null, the both
2508 must be; otherwise, do the normal comparison. */
2509 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2510 TREE_OPERAND (arg1, N), flags)
2511
2512 #define OP_SAME_WITH_NULL(N) \
2513 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2514 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2515
2516 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2517 {
2518 case tcc_unary:
2519 /* Two conversions are equal only if signedness and modes match. */
2520 switch (TREE_CODE (arg0))
2521 {
2522 case NOP_EXPR:
2523 case CONVERT_EXPR:
2524 case FIX_CEIL_EXPR:
2525 case FIX_TRUNC_EXPR:
2526 case FIX_FLOOR_EXPR:
2527 case FIX_ROUND_EXPR:
2528 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2529 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2530 return 0;
2531 break;
2532 default:
2533 break;
2534 }
2535
2536 return OP_SAME (0);
2537
2538
2539 case tcc_comparison:
2540 case tcc_binary:
2541 if (OP_SAME (0) && OP_SAME (1))
2542 return 1;
2543
2544 /* For commutative ops, allow the other order. */
2545 return (commutative_tree_code (TREE_CODE (arg0))
2546 && operand_equal_p (TREE_OPERAND (arg0, 0),
2547 TREE_OPERAND (arg1, 1), flags)
2548 && operand_equal_p (TREE_OPERAND (arg0, 1),
2549 TREE_OPERAND (arg1, 0), flags));
2550
2551 case tcc_reference:
2552 /* If either of the pointer (or reference) expressions we are
2553 dereferencing contain a side effect, these cannot be equal. */
2554 if (TREE_SIDE_EFFECTS (arg0)
2555 || TREE_SIDE_EFFECTS (arg1))
2556 return 0;
2557
2558 switch (TREE_CODE (arg0))
2559 {
2560 case INDIRECT_REF:
2561 case ALIGN_INDIRECT_REF:
2562 case MISALIGNED_INDIRECT_REF:
2563 case REALPART_EXPR:
2564 case IMAGPART_EXPR:
2565 return OP_SAME (0);
2566
2567 case ARRAY_REF:
2568 case ARRAY_RANGE_REF:
2569 /* Operands 2 and 3 may be null. */
2570 return (OP_SAME (0)
2571 && OP_SAME (1)
2572 && OP_SAME_WITH_NULL (2)
2573 && OP_SAME_WITH_NULL (3));
2574
2575 case COMPONENT_REF:
2576 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2577 may be NULL when we're called to compare MEM_EXPRs. */
2578 return OP_SAME_WITH_NULL (0)
2579 && OP_SAME (1)
2580 && OP_SAME_WITH_NULL (2);
2581
2582 case BIT_FIELD_REF:
2583 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2584
2585 default:
2586 return 0;
2587 }
2588
2589 case tcc_expression:
2590 switch (TREE_CODE (arg0))
2591 {
2592 case ADDR_EXPR:
2593 case TRUTH_NOT_EXPR:
2594 return OP_SAME (0);
2595
2596 case TRUTH_ANDIF_EXPR:
2597 case TRUTH_ORIF_EXPR:
2598 return OP_SAME (0) && OP_SAME (1);
2599
2600 case TRUTH_AND_EXPR:
2601 case TRUTH_OR_EXPR:
2602 case TRUTH_XOR_EXPR:
2603 if (OP_SAME (0) && OP_SAME (1))
2604 return 1;
2605
2606 /* Otherwise take into account this is a commutative operation. */
2607 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2608 TREE_OPERAND (arg1, 1), flags)
2609 && operand_equal_p (TREE_OPERAND (arg0, 1),
2610 TREE_OPERAND (arg1, 0), flags));
2611
2612 case CALL_EXPR:
2613 /* If the CALL_EXPRs call different functions, then they
2614 clearly can not be equal. */
2615 if (!OP_SAME (0))
2616 return 0;
2617
2618 {
2619 unsigned int cef = call_expr_flags (arg0);
2620 if (flags & OEP_PURE_SAME)
2621 cef &= ECF_CONST | ECF_PURE;
2622 else
2623 cef &= ECF_CONST;
2624 if (!cef)
2625 return 0;
2626 }
2627
2628 /* Now see if all the arguments are the same. operand_equal_p
2629 does not handle TREE_LIST, so we walk the operands here
2630 feeding them to operand_equal_p. */
2631 arg0 = TREE_OPERAND (arg0, 1);
2632 arg1 = TREE_OPERAND (arg1, 1);
2633 while (arg0 && arg1)
2634 {
2635 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2636 flags))
2637 return 0;
2638
2639 arg0 = TREE_CHAIN (arg0);
2640 arg1 = TREE_CHAIN (arg1);
2641 }
2642
2643 /* If we get here and both argument lists are exhausted
2644 then the CALL_EXPRs are equal. */
2645 return ! (arg0 || arg1);
2646
2647 default:
2648 return 0;
2649 }
2650
2651 case tcc_declaration:
2652 /* Consider __builtin_sqrt equal to sqrt. */
2653 return (TREE_CODE (arg0) == FUNCTION_DECL
2654 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2655 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2656 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2657
2658 default:
2659 return 0;
2660 }
2661
2662 #undef OP_SAME
2663 #undef OP_SAME_WITH_NULL
2664 }
2665
2666 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2667 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2668
2669 When in doubt, return 0. */
2670
2671 static int
operand_equal_for_comparison_p(tree arg0,tree arg1,tree other)2672 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2673 {
2674 int unsignedp1, unsignedpo;
2675 tree primarg0, primarg1, primother;
2676 unsigned int correct_width;
2677
2678 if (operand_equal_p (arg0, arg1, 0))
2679 return 1;
2680
2681 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2682 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2683 return 0;
2684
2685 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2686 and see if the inner values are the same. This removes any
2687 signedness comparison, which doesn't matter here. */
2688 primarg0 = arg0, primarg1 = arg1;
2689 STRIP_NOPS (primarg0);
2690 STRIP_NOPS (primarg1);
2691 if (operand_equal_p (primarg0, primarg1, 0))
2692 return 1;
2693
2694 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2695 actual comparison operand, ARG0.
2696
2697 First throw away any conversions to wider types
2698 already present in the operands. */
2699
2700 primarg1 = get_narrower (arg1, &unsignedp1);
2701 primother = get_narrower (other, &unsignedpo);
2702
2703 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2704 if (unsignedp1 == unsignedpo
2705 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2706 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2707 {
2708 tree type = TREE_TYPE (arg0);
2709
2710 /* Make sure shorter operand is extended the right way
2711 to match the longer operand. */
2712 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2713 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2714
2715 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2716 return 1;
2717 }
2718
2719 return 0;
2720 }
2721
2722 /* See if ARG is an expression that is either a comparison or is performing
2723 arithmetic on comparisons. The comparisons must only be comparing
2724 two different values, which will be stored in *CVAL1 and *CVAL2; if
2725 they are nonzero it means that some operands have already been found.
2726 No variables may be used anywhere else in the expression except in the
2727 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2728 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2729
2730 If this is true, return 1. Otherwise, return zero. */
2731
2732 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2,int * save_p)2733 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2734 {
2735 enum tree_code code = TREE_CODE (arg);
2736 enum tree_code_class class = TREE_CODE_CLASS (code);
2737
2738 /* We can handle some of the tcc_expression cases here. */
2739 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2740 class = tcc_unary;
2741 else if (class == tcc_expression
2742 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2743 || code == COMPOUND_EXPR))
2744 class = tcc_binary;
2745
2746 else if (class == tcc_expression && code == SAVE_EXPR
2747 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2748 {
2749 /* If we've already found a CVAL1 or CVAL2, this expression is
2750 two complex to handle. */
2751 if (*cval1 || *cval2)
2752 return 0;
2753
2754 class = tcc_unary;
2755 *save_p = 1;
2756 }
2757
2758 switch (class)
2759 {
2760 case tcc_unary:
2761 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2762
2763 case tcc_binary:
2764 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2765 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2766 cval1, cval2, save_p));
2767
2768 case tcc_constant:
2769 return 1;
2770
2771 case tcc_expression:
2772 if (code == COND_EXPR)
2773 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2774 cval1, cval2, save_p)
2775 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2776 cval1, cval2, save_p)
2777 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2778 cval1, cval2, save_p));
2779 return 0;
2780
2781 case tcc_comparison:
2782 /* First see if we can handle the first operand, then the second. For
2783 the second operand, we know *CVAL1 can't be zero. It must be that
2784 one side of the comparison is each of the values; test for the
2785 case where this isn't true by failing if the two operands
2786 are the same. */
2787
2788 if (operand_equal_p (TREE_OPERAND (arg, 0),
2789 TREE_OPERAND (arg, 1), 0))
2790 return 0;
2791
2792 if (*cval1 == 0)
2793 *cval1 = TREE_OPERAND (arg, 0);
2794 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2795 ;
2796 else if (*cval2 == 0)
2797 *cval2 = TREE_OPERAND (arg, 0);
2798 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2799 ;
2800 else
2801 return 0;
2802
2803 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2804 ;
2805 else if (*cval2 == 0)
2806 *cval2 = TREE_OPERAND (arg, 1);
2807 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2808 ;
2809 else
2810 return 0;
2811
2812 return 1;
2813
2814 default:
2815 return 0;
2816 }
2817 }
2818
2819 /* ARG is a tree that is known to contain just arithmetic operations and
2820 comparisons. Evaluate the operations in the tree substituting NEW0 for
2821 any occurrence of OLD0 as an operand of a comparison and likewise for
2822 NEW1 and OLD1. */
2823
2824 static tree
eval_subst(tree arg,tree old0,tree new0,tree old1,tree new1)2825 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2826 {
2827 tree type = TREE_TYPE (arg);
2828 enum tree_code code = TREE_CODE (arg);
2829 enum tree_code_class class = TREE_CODE_CLASS (code);
2830
2831 /* We can handle some of the tcc_expression cases here. */
2832 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2833 class = tcc_unary;
2834 else if (class == tcc_expression
2835 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2836 class = tcc_binary;
2837
2838 switch (class)
2839 {
2840 case tcc_unary:
2841 return fold_build1 (code, type,
2842 eval_subst (TREE_OPERAND (arg, 0),
2843 old0, new0, old1, new1));
2844
2845 case tcc_binary:
2846 return fold_build2 (code, type,
2847 eval_subst (TREE_OPERAND (arg, 0),
2848 old0, new0, old1, new1),
2849 eval_subst (TREE_OPERAND (arg, 1),
2850 old0, new0, old1, new1));
2851
2852 case tcc_expression:
2853 switch (code)
2854 {
2855 case SAVE_EXPR:
2856 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2857
2858 case COMPOUND_EXPR:
2859 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2860
2861 case COND_EXPR:
2862 return fold_build3 (code, type,
2863 eval_subst (TREE_OPERAND (arg, 0),
2864 old0, new0, old1, new1),
2865 eval_subst (TREE_OPERAND (arg, 1),
2866 old0, new0, old1, new1),
2867 eval_subst (TREE_OPERAND (arg, 2),
2868 old0, new0, old1, new1));
2869 default:
2870 break;
2871 }
2872 /* Fall through - ??? */
2873
2874 case tcc_comparison:
2875 {
2876 tree arg0 = TREE_OPERAND (arg, 0);
2877 tree arg1 = TREE_OPERAND (arg, 1);
2878
2879 /* We need to check both for exact equality and tree equality. The
2880 former will be true if the operand has a side-effect. In that
2881 case, we know the operand occurred exactly once. */
2882
2883 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2884 arg0 = new0;
2885 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2886 arg0 = new1;
2887
2888 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2889 arg1 = new0;
2890 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2891 arg1 = new1;
2892
2893 return fold_build2 (code, type, arg0, arg1);
2894 }
2895
2896 default:
2897 return arg;
2898 }
2899 }
2900
2901 /* Return a tree for the case when the result of an expression is RESULT
2902 converted to TYPE and OMITTED was previously an operand of the expression
2903 but is now not needed (e.g., we folded OMITTED * 0).
2904
2905 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2906 the conversion of RESULT to TYPE. */
2907
2908 tree
omit_one_operand(tree type,tree result,tree omitted)2909 omit_one_operand (tree type, tree result, tree omitted)
2910 {
2911 tree t = fold_convert (type, result);
2912
2913 if (TREE_SIDE_EFFECTS (omitted))
2914 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2915
2916 return non_lvalue (t);
2917 }
2918
2919 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2920
2921 static tree
pedantic_omit_one_operand(tree type,tree result,tree omitted)2922 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2923 {
2924 tree t = fold_convert (type, result);
2925
2926 if (TREE_SIDE_EFFECTS (omitted))
2927 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2928
2929 return pedantic_non_lvalue (t);
2930 }
2931
2932 /* Return a tree for the case when the result of an expression is RESULT
2933 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2934 of the expression but are now not needed.
2935
2936 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2937 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2938 evaluated before OMITTED2. Otherwise, if neither has side effects,
2939 just do the conversion of RESULT to TYPE. */
2940
2941 tree
omit_two_operands(tree type,tree result,tree omitted1,tree omitted2)2942 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2943 {
2944 tree t = fold_convert (type, result);
2945
2946 if (TREE_SIDE_EFFECTS (omitted2))
2947 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2948 if (TREE_SIDE_EFFECTS (omitted1))
2949 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2950
2951 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2952 }
2953
2954
2955 /* Return a simplified tree node for the truth-negation of ARG. This
2956 never alters ARG itself. We assume that ARG is an operation that
2957 returns a truth value (0 or 1).
2958
2959 FIXME: one would think we would fold the result, but it causes
2960 problems with the dominator optimizer. */
2961 tree
invert_truthvalue(tree arg)2962 invert_truthvalue (tree arg)
2963 {
2964 tree type = TREE_TYPE (arg);
2965 enum tree_code code = TREE_CODE (arg);
2966
2967 if (code == ERROR_MARK)
2968 return arg;
2969
2970 /* If this is a comparison, we can simply invert it, except for
2971 floating-point non-equality comparisons, in which case we just
2972 enclose a TRUTH_NOT_EXPR around what we have. */
2973
2974 if (TREE_CODE_CLASS (code) == tcc_comparison)
2975 {
2976 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2977 if (FLOAT_TYPE_P (op_type)
2978 && flag_trapping_math
2979 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2980 && code != NE_EXPR && code != EQ_EXPR)
2981 return build1 (TRUTH_NOT_EXPR, type, arg);
2982 else
2983 {
2984 code = invert_tree_comparison (code,
2985 HONOR_NANS (TYPE_MODE (op_type)));
2986 if (code == ERROR_MARK)
2987 return build1 (TRUTH_NOT_EXPR, type, arg);
2988 else
2989 return build2 (code, type,
2990 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2991 }
2992 }
2993
2994 switch (code)
2995 {
2996 case INTEGER_CST:
2997 return constant_boolean_node (integer_zerop (arg), type);
2998
2999 case TRUTH_AND_EXPR:
3000 return build2 (TRUTH_OR_EXPR, type,
3001 invert_truthvalue (TREE_OPERAND (arg, 0)),
3002 invert_truthvalue (TREE_OPERAND (arg, 1)));
3003
3004 case TRUTH_OR_EXPR:
3005 return build2 (TRUTH_AND_EXPR, type,
3006 invert_truthvalue (TREE_OPERAND (arg, 0)),
3007 invert_truthvalue (TREE_OPERAND (arg, 1)));
3008
3009 case TRUTH_XOR_EXPR:
3010 /* Here we can invert either operand. We invert the first operand
3011 unless the second operand is a TRUTH_NOT_EXPR in which case our
3012 result is the XOR of the first operand with the inside of the
3013 negation of the second operand. */
3014
3015 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3016 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3017 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3018 else
3019 return build2 (TRUTH_XOR_EXPR, type,
3020 invert_truthvalue (TREE_OPERAND (arg, 0)),
3021 TREE_OPERAND (arg, 1));
3022
3023 case TRUTH_ANDIF_EXPR:
3024 return build2 (TRUTH_ORIF_EXPR, type,
3025 invert_truthvalue (TREE_OPERAND (arg, 0)),
3026 invert_truthvalue (TREE_OPERAND (arg, 1)));
3027
3028 case TRUTH_ORIF_EXPR:
3029 return build2 (TRUTH_ANDIF_EXPR, type,
3030 invert_truthvalue (TREE_OPERAND (arg, 0)),
3031 invert_truthvalue (TREE_OPERAND (arg, 1)));
3032
3033 case TRUTH_NOT_EXPR:
3034 return TREE_OPERAND (arg, 0);
3035
3036 case COND_EXPR:
3037 {
3038 tree arg1 = TREE_OPERAND (arg, 1);
3039 tree arg2 = TREE_OPERAND (arg, 2);
3040 /* A COND_EXPR may have a throw as one operand, which
3041 then has void type. Just leave void operands
3042 as they are. */
3043 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3044 VOID_TYPE_P (TREE_TYPE (arg1))
3045 ? arg1 : invert_truthvalue (arg1),
3046 VOID_TYPE_P (TREE_TYPE (arg2))
3047 ? arg2 : invert_truthvalue (arg2));
3048 }
3049
3050 case COMPOUND_EXPR:
3051 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3052 invert_truthvalue (TREE_OPERAND (arg, 1)));
3053
3054 case NON_LVALUE_EXPR:
3055 return invert_truthvalue (TREE_OPERAND (arg, 0));
3056
3057 case NOP_EXPR:
3058 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3059 break;
3060
3061 case CONVERT_EXPR:
3062 case FLOAT_EXPR:
3063 return build1 (TREE_CODE (arg), type,
3064 invert_truthvalue (TREE_OPERAND (arg, 0)));
3065
3066 case BIT_AND_EXPR:
3067 if (!integer_onep (TREE_OPERAND (arg, 1)))
3068 break;
3069 return build2 (EQ_EXPR, type, arg,
3070 fold_convert (type, integer_zero_node));
3071
3072 case SAVE_EXPR:
3073 return build1 (TRUTH_NOT_EXPR, type, arg);
3074
3075 case CLEANUP_POINT_EXPR:
3076 return build1 (CLEANUP_POINT_EXPR, type,
3077 invert_truthvalue (TREE_OPERAND (arg, 0)));
3078
3079 default:
3080 break;
3081 }
3082 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3083 return build1 (TRUTH_NOT_EXPR, type, arg);
3084 }
3085
3086 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3087 operands are another bit-wise operation with a common input. If so,
3088 distribute the bit operations to save an operation and possibly two if
3089 constants are involved. For example, convert
3090 (A | B) & (A | C) into A | (B & C)
3091 Further simplification will occur if B and C are constants.
3092
3093 If this optimization cannot be done, 0 will be returned. */
3094
3095 static tree
distribute_bit_expr(enum tree_code code,tree type,tree arg0,tree arg1)3096 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3097 {
3098 tree common;
3099 tree left, right;
3100
3101 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3102 || TREE_CODE (arg0) == code
3103 || (TREE_CODE (arg0) != BIT_AND_EXPR
3104 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3105 return 0;
3106
3107 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3108 {
3109 common = TREE_OPERAND (arg0, 0);
3110 left = TREE_OPERAND (arg0, 1);
3111 right = TREE_OPERAND (arg1, 1);
3112 }
3113 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3114 {
3115 common = TREE_OPERAND (arg0, 0);
3116 left = TREE_OPERAND (arg0, 1);
3117 right = TREE_OPERAND (arg1, 0);
3118 }
3119 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3120 {
3121 common = TREE_OPERAND (arg0, 1);
3122 left = TREE_OPERAND (arg0, 0);
3123 right = TREE_OPERAND (arg1, 1);
3124 }
3125 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3126 {
3127 common = TREE_OPERAND (arg0, 1);
3128 left = TREE_OPERAND (arg0, 0);
3129 right = TREE_OPERAND (arg1, 0);
3130 }
3131 else
3132 return 0;
3133
3134 return fold_build2 (TREE_CODE (arg0), type, common,
3135 fold_build2 (code, type, left, right));
3136 }
3137
3138 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3139 with code CODE. This optimization is unsafe. */
3140 static tree
distribute_real_division(enum tree_code code,tree type,tree arg0,tree arg1)3141 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3142 {
3143 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3144 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3145
3146 /* (A / C) +- (B / C) -> (A +- B) / C. */
3147 if (mul0 == mul1
3148 && operand_equal_p (TREE_OPERAND (arg0, 1),
3149 TREE_OPERAND (arg1, 1), 0))
3150 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3151 fold_build2 (code, type,
3152 TREE_OPERAND (arg0, 0),
3153 TREE_OPERAND (arg1, 0)),
3154 TREE_OPERAND (arg0, 1));
3155
3156 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3157 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3158 TREE_OPERAND (arg1, 0), 0)
3159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3160 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3161 {
3162 REAL_VALUE_TYPE r0, r1;
3163 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3164 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3165 if (!mul0)
3166 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3167 if (!mul1)
3168 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3169 real_arithmetic (&r0, code, &r0, &r1);
3170 return fold_build2 (MULT_EXPR, type,
3171 TREE_OPERAND (arg0, 0),
3172 build_real (type, r0));
3173 }
3174
3175 return NULL_TREE;
3176 }
3177
3178 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3179 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3180
3181 static tree
make_bit_field_ref(tree inner,tree type,int bitsize,int bitpos,int unsignedp)3182 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3183 int unsignedp)
3184 {
3185 tree result;
3186
3187 if (bitpos == 0)
3188 {
3189 tree size = TYPE_SIZE (TREE_TYPE (inner));
3190 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3191 || POINTER_TYPE_P (TREE_TYPE (inner)))
3192 && host_integerp (size, 0)
3193 && tree_low_cst (size, 0) == bitsize)
3194 return fold_convert (type, inner);
3195 }
3196
3197 result = build3 (BIT_FIELD_REF, type, inner,
3198 size_int (bitsize), bitsize_int (bitpos));
3199
3200 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3201
3202 return result;
3203 }
3204
3205 /* Optimize a bit-field compare.
3206
3207 There are two cases: First is a compare against a constant and the
3208 second is a comparison of two items where the fields are at the same
3209 bit position relative to the start of a chunk (byte, halfword, word)
3210 large enough to contain it. In these cases we can avoid the shift
3211 implicit in bitfield extractions.
3212
3213 For constants, we emit a compare of the shifted constant with the
3214 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3215 compared. For two fields at the same position, we do the ANDs with the
3216 similar mask and compare the result of the ANDs.
3217
3218 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3219 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3220 are the left and right operands of the comparison, respectively.
3221
3222 If the optimization described above can be done, we return the resulting
3223 tree. Otherwise we return zero. */
3224
3225 static tree
optimize_bit_field_compare(enum tree_code code,tree compare_type,tree lhs,tree rhs)3226 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3227 tree lhs, tree rhs)
3228 {
3229 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3230 tree type = TREE_TYPE (lhs);
3231 tree signed_type, unsigned_type;
3232 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3233 enum machine_mode lmode, rmode, nmode;
3234 int lunsignedp, runsignedp;
3235 int lvolatilep = 0, rvolatilep = 0;
3236 tree linner, rinner = NULL_TREE;
3237 tree mask;
3238 tree offset;
3239
3240 /* Get all the information about the extractions being done. If the bit size
3241 if the same as the size of the underlying object, we aren't doing an
3242 extraction at all and so can do nothing. We also don't want to
3243 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3244 then will no longer be able to replace it. */
3245 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3246 &lunsignedp, &lvolatilep, false);
3247 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3248 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3249 return 0;
3250
3251 if (!const_p)
3252 {
3253 /* If this is not a constant, we can only do something if bit positions,
3254 sizes, and signedness are the same. */
3255 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3256 &runsignedp, &rvolatilep, false);
3257
3258 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3259 || lunsignedp != runsignedp || offset != 0
3260 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3261 return 0;
3262 }
3263
3264 /* See if we can find a mode to refer to this field. We should be able to,
3265 but fail if we can't. */
3266 nmode = get_best_mode (lbitsize, lbitpos,
3267 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3268 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3269 TYPE_ALIGN (TREE_TYPE (rinner))),
3270 word_mode, lvolatilep || rvolatilep);
3271 if (nmode == VOIDmode)
3272 return 0;
3273
3274 /* Set signed and unsigned types of the precision of this mode for the
3275 shifts below. */
3276 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3277 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3278
3279 /* Compute the bit position and size for the new reference and our offset
3280 within it. If the new reference is the same size as the original, we
3281 won't optimize anything, so return zero. */
3282 nbitsize = GET_MODE_BITSIZE (nmode);
3283 nbitpos = lbitpos & ~ (nbitsize - 1);
3284 lbitpos -= nbitpos;
3285 if (nbitsize == lbitsize)
3286 return 0;
3287
3288 if (BYTES_BIG_ENDIAN)
3289 lbitpos = nbitsize - lbitsize - lbitpos;
3290
3291 /* Make the mask to be used against the extracted field. */
3292 mask = build_int_cst (unsigned_type, -1);
3293 mask = force_fit_type (mask, 0, false, false);
3294 mask = fold_convert (unsigned_type, mask);
3295 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3296 mask = const_binop (RSHIFT_EXPR, mask,
3297 size_int (nbitsize - lbitsize - lbitpos), 0);
3298
3299 if (! const_p)
3300 /* If not comparing with constant, just rework the comparison
3301 and return. */
3302 return build2 (code, compare_type,
3303 build2 (BIT_AND_EXPR, unsigned_type,
3304 make_bit_field_ref (linner, unsigned_type,
3305 nbitsize, nbitpos, 1),
3306 mask),
3307 build2 (BIT_AND_EXPR, unsigned_type,
3308 make_bit_field_ref (rinner, unsigned_type,
3309 nbitsize, nbitpos, 1),
3310 mask));
3311
3312 /* Otherwise, we are handling the constant case. See if the constant is too
3313 big for the field. Warn and return a tree of for 0 (false) if so. We do
3314 this not only for its own sake, but to avoid having to test for this
3315 error case below. If we didn't, we might generate wrong code.
3316
3317 For unsigned fields, the constant shifted right by the field length should
3318 be all zero. For signed fields, the high-order bits should agree with
3319 the sign bit. */
3320
3321 if (lunsignedp)
3322 {
3323 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3324 fold_convert (unsigned_type, rhs),
3325 size_int (lbitsize), 0)))
3326 {
3327 warning (0, "comparison is always %d due to width of bit-field",
3328 code == NE_EXPR);
3329 return constant_boolean_node (code == NE_EXPR, compare_type);
3330 }
3331 }
3332 else
3333 {
3334 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3335 size_int (lbitsize - 1), 0);
3336 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3337 {
3338 warning (0, "comparison is always %d due to width of bit-field",
3339 code == NE_EXPR);
3340 return constant_boolean_node (code == NE_EXPR, compare_type);
3341 }
3342 }
3343
3344 /* Single-bit compares should always be against zero. */
3345 if (lbitsize == 1 && ! integer_zerop (rhs))
3346 {
3347 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3348 rhs = fold_convert (type, integer_zero_node);
3349 }
3350
3351 /* Make a new bitfield reference, shift the constant over the
3352 appropriate number of bits and mask it with the computed mask
3353 (in case this was a signed field). If we changed it, make a new one. */
3354 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3355 if (lvolatilep)
3356 {
3357 TREE_SIDE_EFFECTS (lhs) = 1;
3358 TREE_THIS_VOLATILE (lhs) = 1;
3359 }
3360
3361 rhs = const_binop (BIT_AND_EXPR,
3362 const_binop (LSHIFT_EXPR,
3363 fold_convert (unsigned_type, rhs),
3364 size_int (lbitpos), 0),
3365 mask, 0);
3366
3367 return build2 (code, compare_type,
3368 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3369 rhs);
3370 }
3371
3372 /* Subroutine for fold_truthop: decode a field reference.
3373
3374 If EXP is a comparison reference, we return the innermost reference.
3375
3376 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3377 set to the starting bit number.
3378
3379 If the innermost field can be completely contained in a mode-sized
3380 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3381
3382 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3383 otherwise it is not changed.
3384
3385 *PUNSIGNEDP is set to the signedness of the field.
3386
3387 *PMASK is set to the mask used. This is either contained in a
3388 BIT_AND_EXPR or derived from the width of the field.
3389
3390 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3391
3392 Return 0 if this is not a component reference or is one that we can't
3393 do anything with. */
3394
3395 static tree
decode_field_reference(tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,tree * pmask,tree * pand_mask)3396 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3397 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3398 int *punsignedp, int *pvolatilep,
3399 tree *pmask, tree *pand_mask)
3400 {
3401 tree outer_type = 0;
3402 tree and_mask = 0;
3403 tree mask, inner, offset;
3404 tree unsigned_type;
3405 unsigned int precision;
3406
3407 /* All the optimizations using this function assume integer fields.
3408 There are problems with FP fields since the type_for_size call
3409 below can fail for, e.g., XFmode. */
3410 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3411 return 0;
3412
3413 /* We are interested in the bare arrangement of bits, so strip everything
3414 that doesn't affect the machine mode. However, record the type of the
3415 outermost expression if it may matter below. */
3416 if (TREE_CODE (exp) == NOP_EXPR
3417 || TREE_CODE (exp) == CONVERT_EXPR
3418 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3419 outer_type = TREE_TYPE (exp);
3420 STRIP_NOPS (exp);
3421
3422 if (TREE_CODE (exp) == BIT_AND_EXPR)
3423 {
3424 and_mask = TREE_OPERAND (exp, 1);
3425 exp = TREE_OPERAND (exp, 0);
3426 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3427 if (TREE_CODE (and_mask) != INTEGER_CST)
3428 return 0;
3429 }
3430
3431 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3432 punsignedp, pvolatilep, false);
3433 if ((inner == exp && and_mask == 0)
3434 || *pbitsize < 0 || offset != 0
3435 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3436 return 0;
3437
3438 /* If the number of bits in the reference is the same as the bitsize of
3439 the outer type, then the outer type gives the signedness. Otherwise
3440 (in case of a small bitfield) the signedness is unchanged. */
3441 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3442 *punsignedp = TYPE_UNSIGNED (outer_type);
3443
3444 /* Compute the mask to access the bitfield. */
3445 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3446 precision = TYPE_PRECISION (unsigned_type);
3447
3448 mask = build_int_cst (unsigned_type, -1);
3449 mask = force_fit_type (mask, 0, false, false);
3450
3451 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3452 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3453
3454 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3455 if (and_mask != 0)
3456 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3457 fold_convert (unsigned_type, and_mask), mask);
3458
3459 *pmask = mask;
3460 *pand_mask = and_mask;
3461 return inner;
3462 }
3463
3464 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3465 bit positions. */
3466
3467 static int
all_ones_mask_p(tree mask,int size)3468 all_ones_mask_p (tree mask, int size)
3469 {
3470 tree type = TREE_TYPE (mask);
3471 unsigned int precision = TYPE_PRECISION (type);
3472 tree tmask;
3473
3474 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3475 tmask = force_fit_type (tmask, 0, false, false);
3476
3477 return
3478 tree_int_cst_equal (mask,
3479 const_binop (RSHIFT_EXPR,
3480 const_binop (LSHIFT_EXPR, tmask,
3481 size_int (precision - size),
3482 0),
3483 size_int (precision - size), 0));
3484 }
3485
3486 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3487 represents the sign bit of EXP's type. If EXP represents a sign
3488 or zero extension, also test VAL against the unextended type.
3489 The return value is the (sub)expression whose sign bit is VAL,
3490 or NULL_TREE otherwise. */
3491
3492 static tree
sign_bit_p(tree exp,tree val)3493 sign_bit_p (tree exp, tree val)
3494 {
3495 unsigned HOST_WIDE_INT mask_lo, lo;
3496 HOST_WIDE_INT mask_hi, hi;
3497 int width;
3498 tree t;
3499
3500 /* Tree EXP must have an integral type. */
3501 t = TREE_TYPE (exp);
3502 if (! INTEGRAL_TYPE_P (t))
3503 return NULL_TREE;
3504
3505 /* Tree VAL must be an integer constant. */
3506 if (TREE_CODE (val) != INTEGER_CST
3507 || TREE_CONSTANT_OVERFLOW (val))
3508 return NULL_TREE;
3509
3510 width = TYPE_PRECISION (t);
3511 if (width > HOST_BITS_PER_WIDE_INT)
3512 {
3513 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3514 lo = 0;
3515
3516 mask_hi = ((unsigned HOST_WIDE_INT) -1
3517 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3518 mask_lo = -1;
3519 }
3520 else
3521 {
3522 hi = 0;
3523 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3524
3525 mask_hi = 0;
3526 mask_lo = ((unsigned HOST_WIDE_INT) -1
3527 >> (HOST_BITS_PER_WIDE_INT - width));
3528 }
3529
3530 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3531 treat VAL as if it were unsigned. */
3532 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3533 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3534 return exp;
3535
3536 /* Handle extension from a narrower type. */
3537 if (TREE_CODE (exp) == NOP_EXPR
3538 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3539 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3540
3541 return NULL_TREE;
3542 }
3543
3544 /* Subroutine for fold_truthop: determine if an operand is simple enough
3545 to be evaluated unconditionally. */
3546
3547 static int
simple_operand_p(tree exp)3548 simple_operand_p (tree exp)
3549 {
3550 /* Strip any conversions that don't change the machine mode. */
3551 STRIP_NOPS (exp);
3552
3553 return (CONSTANT_CLASS_P (exp)
3554 || TREE_CODE (exp) == SSA_NAME
3555 || (DECL_P (exp)
3556 && ! TREE_ADDRESSABLE (exp)
3557 && ! TREE_THIS_VOLATILE (exp)
3558 && ! DECL_NONLOCAL (exp)
3559 /* Don't regard global variables as simple. They may be
3560 allocated in ways unknown to the compiler (shared memory,
3561 #pragma weak, etc). */
3562 && ! TREE_PUBLIC (exp)
3563 && ! DECL_EXTERNAL (exp)
3564 /* Loading a static variable is unduly expensive, but global
3565 registers aren't expensive. */
3566 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3567 }
3568
3569 /* The following functions are subroutines to fold_range_test and allow it to
3570 try to change a logical combination of comparisons into a range test.
3571
3572 For example, both
3573 X == 2 || X == 3 || X == 4 || X == 5
3574 and
3575 X >= 2 && X <= 5
3576 are converted to
3577 (unsigned) (X - 2) <= 3
3578
3579 We describe each set of comparisons as being either inside or outside
3580 a range, using a variable named like IN_P, and then describe the
3581 range with a lower and upper bound. If one of the bounds is omitted,
3582 it represents either the highest or lowest value of the type.
3583
3584 In the comments below, we represent a range by two numbers in brackets
3585 preceded by a "+" to designate being inside that range, or a "-" to
3586 designate being outside that range, so the condition can be inverted by
3587 flipping the prefix. An omitted bound is represented by a "-". For
3588 example, "- [-, 10]" means being outside the range starting at the lowest
3589 possible value and ending at 10, in other words, being greater than 10.
3590 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3591 always false.
3592
3593 We set up things so that the missing bounds are handled in a consistent
3594 manner so neither a missing bound nor "true" and "false" need to be
3595 handled using a special case. */
3596
3597 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3598 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3599 and UPPER1_P are nonzero if the respective argument is an upper bound
3600 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3601 must be specified for a comparison. ARG1 will be converted to ARG0's
3602 type if both are specified. */
3603
3604 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)3605 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3606 tree arg1, int upper1_p)
3607 {
3608 tree tem;
3609 int result;
3610 int sgn0, sgn1;
3611
3612 /* If neither arg represents infinity, do the normal operation.
3613 Else, if not a comparison, return infinity. Else handle the special
3614 comparison rules. Note that most of the cases below won't occur, but
3615 are handled for consistency. */
3616
3617 if (arg0 != 0 && arg1 != 0)
3618 {
3619 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3620 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3621 STRIP_NOPS (tem);
3622 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3623 }
3624
3625 if (TREE_CODE_CLASS (code) != tcc_comparison)
3626 return 0;
3627
3628 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3629 for neither. In real maths, we cannot assume open ended ranges are
3630 the same. But, this is computer arithmetic, where numbers are finite.
3631 We can therefore make the transformation of any unbounded range with
3632 the value Z, Z being greater than any representable number. This permits
3633 us to treat unbounded ranges as equal. */
3634 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3635 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3636 switch (code)
3637 {
3638 case EQ_EXPR:
3639 result = sgn0 == sgn1;
3640 break;
3641 case NE_EXPR:
3642 result = sgn0 != sgn1;
3643 break;
3644 case LT_EXPR:
3645 result = sgn0 < sgn1;
3646 break;
3647 case LE_EXPR:
3648 result = sgn0 <= sgn1;
3649 break;
3650 case GT_EXPR:
3651 result = sgn0 > sgn1;
3652 break;
3653 case GE_EXPR:
3654 result = sgn0 >= sgn1;
3655 break;
3656 default:
3657 gcc_unreachable ();
3658 }
3659
3660 return constant_boolean_node (result, type);
3661 }
3662
3663 /* Given EXP, a logical expression, set the range it is testing into
3664 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3665 actually being tested. *PLOW and *PHIGH will be made of the same type
3666 as the returned expression. If EXP is not a comparison, we will most
3667 likely not be returning a useful value and range. */
3668
3669 static tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh)3670 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3671 {
3672 enum tree_code code;
3673 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3674 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3675 int in_p, n_in_p;
3676 tree low, high, n_low, n_high;
3677
3678 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3679 and see if we can refine the range. Some of the cases below may not
3680 happen, but it doesn't seem worth worrying about this. We "continue"
3681 the outer loop when we've changed something; otherwise we "break"
3682 the switch, which will "break" the while. */
3683
3684 in_p = 0;
3685 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3686
3687 while (1)
3688 {
3689 code = TREE_CODE (exp);
3690 exp_type = TREE_TYPE (exp);
3691
3692 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3693 {
3694 if (TREE_CODE_LENGTH (code) > 0)
3695 arg0 = TREE_OPERAND (exp, 0);
3696 if (TREE_CODE_CLASS (code) == tcc_comparison
3697 || TREE_CODE_CLASS (code) == tcc_unary
3698 || TREE_CODE_CLASS (code) == tcc_binary)
3699 arg0_type = TREE_TYPE (arg0);
3700 if (TREE_CODE_CLASS (code) == tcc_binary
3701 || TREE_CODE_CLASS (code) == tcc_comparison
3702 || (TREE_CODE_CLASS (code) == tcc_expression
3703 && TREE_CODE_LENGTH (code) > 1))
3704 arg1 = TREE_OPERAND (exp, 1);
3705 }
3706
3707 switch (code)
3708 {
3709 case TRUTH_NOT_EXPR:
3710 in_p = ! in_p, exp = arg0;
3711 continue;
3712
3713 case EQ_EXPR: case NE_EXPR:
3714 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3715 /* We can only do something if the range is testing for zero
3716 and if the second operand is an integer constant. Note that
3717 saying something is "in" the range we make is done by
3718 complementing IN_P since it will set in the initial case of
3719 being not equal to zero; "out" is leaving it alone. */
3720 if (low == 0 || high == 0
3721 || ! integer_zerop (low) || ! integer_zerop (high)
3722 || TREE_CODE (arg1) != INTEGER_CST)
3723 break;
3724
3725 switch (code)
3726 {
3727 case NE_EXPR: /* - [c, c] */
3728 low = high = arg1;
3729 break;
3730 case EQ_EXPR: /* + [c, c] */
3731 in_p = ! in_p, low = high = arg1;
3732 break;
3733 case GT_EXPR: /* - [-, c] */
3734 low = 0, high = arg1;
3735 break;
3736 case GE_EXPR: /* + [c, -] */
3737 in_p = ! in_p, low = arg1, high = 0;
3738 break;
3739 case LT_EXPR: /* - [c, -] */
3740 low = arg1, high = 0;
3741 break;
3742 case LE_EXPR: /* + [-, c] */
3743 in_p = ! in_p, low = 0, high = arg1;
3744 break;
3745 default:
3746 gcc_unreachable ();
3747 }
3748
3749 /* If this is an unsigned comparison, we also know that EXP is
3750 greater than or equal to zero. We base the range tests we make
3751 on that fact, so we record it here so we can parse existing
3752 range tests. We test arg0_type since often the return type
3753 of, e.g. EQ_EXPR, is boolean. */
3754 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3755 {
3756 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3757 in_p, low, high, 1,
3758 fold_convert (arg0_type, integer_zero_node),
3759 NULL_TREE))
3760 break;
3761
3762 in_p = n_in_p, low = n_low, high = n_high;
3763
3764 /* If the high bound is missing, but we have a nonzero low
3765 bound, reverse the range so it goes from zero to the low bound
3766 minus 1. */
3767 if (high == 0 && low && ! integer_zerop (low))
3768 {
3769 in_p = ! in_p;
3770 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3771 integer_one_node, 0);
3772 low = fold_convert (arg0_type, integer_zero_node);
3773 }
3774 }
3775
3776 exp = arg0;
3777 continue;
3778
3779 case NEGATE_EXPR:
3780 /* (-x) IN [a,b] -> x in [-b, -a] */
3781 n_low = range_binop (MINUS_EXPR, exp_type,
3782 fold_convert (exp_type, integer_zero_node),
3783 0, high, 1);
3784 n_high = range_binop (MINUS_EXPR, exp_type,
3785 fold_convert (exp_type, integer_zero_node),
3786 0, low, 0);
3787 low = n_low, high = n_high;
3788 exp = arg0;
3789 continue;
3790
3791 case BIT_NOT_EXPR:
3792 /* ~ X -> -X - 1 */
3793 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3794 fold_convert (exp_type, integer_one_node));
3795 continue;
3796
3797 case PLUS_EXPR: case MINUS_EXPR:
3798 if (TREE_CODE (arg1) != INTEGER_CST)
3799 break;
3800
3801 /* If EXP is signed, any overflow in the computation is undefined,
3802 so we don't worry about it so long as our computations on
3803 the bounds don't overflow. For unsigned, overflow is defined
3804 and this is exactly the right thing. */
3805 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3806 arg0_type, low, 0, arg1, 0);
3807 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3808 arg0_type, high, 1, arg1, 0);
3809 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3810 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3811 break;
3812
3813 /* Check for an unsigned range which has wrapped around the maximum
3814 value thus making n_high < n_low, and normalize it. */
3815 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3816 {
3817 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3818 integer_one_node, 0);
3819 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3820 integer_one_node, 0);
3821
3822 /* If the range is of the form +/- [ x+1, x ], we won't
3823 be able to normalize it. But then, it represents the
3824 whole range or the empty set, so make it
3825 +/- [ -, - ]. */
3826 if (tree_int_cst_equal (n_low, low)
3827 && tree_int_cst_equal (n_high, high))
3828 low = high = 0;
3829 else
3830 in_p = ! in_p;
3831 }
3832 else
3833 low = n_low, high = n_high;
3834
3835 exp = arg0;
3836 continue;
3837
3838 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3839 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3840 break;
3841
3842 if (! INTEGRAL_TYPE_P (arg0_type)
3843 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3844 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3845 break;
3846
3847 n_low = low, n_high = high;
3848
3849 if (n_low != 0)
3850 n_low = fold_convert (arg0_type, n_low);
3851
3852 if (n_high != 0)
3853 n_high = fold_convert (arg0_type, n_high);
3854
3855
3856 /* If we're converting arg0 from an unsigned type, to exp,
3857 a signed type, we will be doing the comparison as unsigned.
3858 The tests above have already verified that LOW and HIGH
3859 are both positive.
3860
3861 So we have to ensure that we will handle large unsigned
3862 values the same way that the current signed bounds treat
3863 negative values. */
3864
3865 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3866 {
3867 tree high_positive;
3868 tree equiv_type = lang_hooks.types.type_for_mode
3869 (TYPE_MODE (arg0_type), 1);
3870
3871 /* A range without an upper bound is, naturally, unbounded.
3872 Since convert would have cropped a very large value, use
3873 the max value for the destination type. */
3874 high_positive
3875 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3876 : TYPE_MAX_VALUE (arg0_type);
3877
3878 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3879 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3880 fold_convert (arg0_type,
3881 high_positive),
3882 fold_convert (arg0_type,
3883 integer_one_node));
3884
3885 /* If the low bound is specified, "and" the range with the
3886 range for which the original unsigned value will be
3887 positive. */
3888 if (low != 0)
3889 {
3890 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3891 1, n_low, n_high, 1,
3892 fold_convert (arg0_type,
3893 integer_zero_node),
3894 high_positive))
3895 break;
3896
3897 in_p = (n_in_p == in_p);
3898 }
3899 else
3900 {
3901 /* Otherwise, "or" the range with the range of the input
3902 that will be interpreted as negative. */
3903 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3904 0, n_low, n_high, 1,
3905 fold_convert (arg0_type,
3906 integer_zero_node),
3907 high_positive))
3908 break;
3909
3910 in_p = (in_p != n_in_p);
3911 }
3912 }
3913
3914 exp = arg0;
3915 low = n_low, high = n_high;
3916 continue;
3917
3918 default:
3919 break;
3920 }
3921
3922 break;
3923 }
3924
3925 /* If EXP is a constant, we can evaluate whether this is true or false. */
3926 if (TREE_CODE (exp) == INTEGER_CST)
3927 {
3928 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3929 exp, 0, low, 0))
3930 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3931 exp, 1, high, 1)));
3932 low = high = 0;
3933 exp = 0;
3934 }
3935
3936 *pin_p = in_p, *plow = low, *phigh = high;
3937 return exp;
3938 }
3939
3940 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3941 type, TYPE, return an expression to test if EXP is in (or out of, depending
3942 on IN_P) the range. Return 0 if the test couldn't be created. */
3943
3944 static tree
build_range_check(tree type,tree exp,int in_p,tree low,tree high)3945 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3946 {
3947 tree etype = TREE_TYPE (exp);
3948 tree value;
3949
3950 #ifdef HAVE_canonicalize_funcptr_for_compare
3951 /* Disable this optimization for function pointer expressions
3952 on targets that require function pointer canonicalization. */
3953 if (HAVE_canonicalize_funcptr_for_compare
3954 && TREE_CODE (etype) == POINTER_TYPE
3955 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3956 return NULL_TREE;
3957 #endif
3958
3959 if (! in_p)
3960 {
3961 value = build_range_check (type, exp, 1, low, high);
3962 if (value != 0)
3963 return invert_truthvalue (value);
3964
3965 return 0;
3966 }
3967
3968 if (low == 0 && high == 0)
3969 return fold_convert (type, integer_one_node);
3970
3971 if (low == 0)
3972 return fold_build2 (LE_EXPR, type, exp,
3973 fold_convert (etype, high));
3974
3975 if (high == 0)
3976 return fold_build2 (GE_EXPR, type, exp,
3977 fold_convert (etype, low));
3978
3979 if (operand_equal_p (low, high, 0))
3980 return fold_build2 (EQ_EXPR, type, exp,
3981 fold_convert (etype, low));
3982
3983 if (integer_zerop (low))
3984 {
3985 if (! TYPE_UNSIGNED (etype))
3986 {
3987 etype = lang_hooks.types.unsigned_type (etype);
3988 high = fold_convert (etype, high);
3989 exp = fold_convert (etype, exp);
3990 }
3991 return build_range_check (type, exp, 1, 0, high);
3992 }
3993
3994 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3995 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3996 {
3997 unsigned HOST_WIDE_INT lo;
3998 HOST_WIDE_INT hi;
3999 int prec;
4000
4001 prec = TYPE_PRECISION (etype);
4002 if (prec <= HOST_BITS_PER_WIDE_INT)
4003 {
4004 hi = 0;
4005 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4006 }
4007 else
4008 {
4009 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4010 lo = (unsigned HOST_WIDE_INT) -1;
4011 }
4012
4013 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4014 {
4015 if (TYPE_UNSIGNED (etype))
4016 {
4017 etype = lang_hooks.types.signed_type (etype);
4018 exp = fold_convert (etype, exp);
4019 }
4020 return fold_build2 (GT_EXPR, type, exp,
4021 fold_convert (etype, integer_zero_node));
4022 }
4023 }
4024
4025 value = const_binop (MINUS_EXPR, high, low, 0);
4026 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4027 && ! TYPE_UNSIGNED (etype))
4028 {
4029 tree utype, minv, maxv;
4030
4031 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4032 for the type in question, as we rely on this here. */
4033 switch (TREE_CODE (etype))
4034 {
4035 case INTEGER_TYPE:
4036 case ENUMERAL_TYPE:
4037 case CHAR_TYPE:
4038 /* There is no requirement that LOW be within the range of ETYPE
4039 if the latter is a subtype. It must, however, be within the base
4040 type of ETYPE. So be sure we do the subtraction in that type. */
4041 if (TREE_TYPE (etype))
4042 etype = TREE_TYPE (etype);
4043 utype = lang_hooks.types.unsigned_type (etype);
4044 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4045 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4046 integer_one_node, 1);
4047 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4048 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4049 minv, 1, maxv, 1)))
4050 {
4051 etype = utype;
4052 high = fold_convert (etype, high);
4053 low = fold_convert (etype, low);
4054 exp = fold_convert (etype, exp);
4055 value = const_binop (MINUS_EXPR, high, low, 0);
4056 }
4057 break;
4058 default:
4059 break;
4060 }
4061 }
4062
4063 if (value != 0 && ! TREE_OVERFLOW (value))
4064 {
4065 /* There is no requirement that LOW be within the range of ETYPE
4066 if the latter is a subtype. It must, however, be within the base
4067 type of ETYPE. So be sure we do the subtraction in that type. */
4068 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4069 {
4070 etype = TREE_TYPE (etype);
4071 exp = fold_convert (etype, exp);
4072 low = fold_convert (etype, low);
4073 value = fold_convert (etype, value);
4074 }
4075
4076 return build_range_check (type,
4077 fold_build2 (MINUS_EXPR, etype, exp, low),
4078 1, build_int_cst (etype, 0), value);
4079 }
4080
4081 return 0;
4082 }
4083
4084 /* Given two ranges, see if we can merge them into one. Return 1 if we
4085 can, 0 if we can't. Set the output range into the specified parameters. */
4086
4087 static int
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)4088 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4089 tree high0, int in1_p, tree low1, tree high1)
4090 {
4091 int no_overlap;
4092 int subset;
4093 int temp;
4094 tree tem;
4095 int in_p;
4096 tree low, high;
4097 int lowequal = ((low0 == 0 && low1 == 0)
4098 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4099 low0, 0, low1, 0)));
4100 int highequal = ((high0 == 0 && high1 == 0)
4101 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4102 high0, 1, high1, 1)));
4103
4104 /* Make range 0 be the range that starts first, or ends last if they
4105 start at the same value. Swap them if it isn't. */
4106 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4107 low0, 0, low1, 0))
4108 || (lowequal
4109 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4110 high1, 1, high0, 1))))
4111 {
4112 temp = in0_p, in0_p = in1_p, in1_p = temp;
4113 tem = low0, low0 = low1, low1 = tem;
4114 tem = high0, high0 = high1, high1 = tem;
4115 }
4116
4117 /* Now flag two cases, whether the ranges are disjoint or whether the
4118 second range is totally subsumed in the first. Note that the tests
4119 below are simplified by the ones above. */
4120 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4121 high0, 1, low1, 0));
4122 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4123 high1, 1, high0, 1));
4124
4125 /* We now have four cases, depending on whether we are including or
4126 excluding the two ranges. */
4127 if (in0_p && in1_p)
4128 {
4129 /* If they don't overlap, the result is false. If the second range
4130 is a subset it is the result. Otherwise, the range is from the start
4131 of the second to the end of the first. */
4132 if (no_overlap)
4133 in_p = 0, low = high = 0;
4134 else if (subset)
4135 in_p = 1, low = low1, high = high1;
4136 else
4137 in_p = 1, low = low1, high = high0;
4138 }
4139
4140 else if (in0_p && ! in1_p)
4141 {
4142 /* If they don't overlap, the result is the first range. If they are
4143 equal, the result is false. If the second range is a subset of the
4144 first, and the ranges begin at the same place, we go from just after
4145 the end of the first range to the end of the second. If the second
4146 range is not a subset of the first, or if it is a subset and both
4147 ranges end at the same place, the range starts at the start of the
4148 first range and ends just before the second range.
4149 Otherwise, we can't describe this as a single range. */
4150 if (no_overlap)
4151 in_p = 1, low = low0, high = high0;
4152 else if (lowequal && highequal)
4153 in_p = 0, low = high = 0;
4154 else if (subset && lowequal)
4155 {
4156 in_p = 1, high = high0;
4157 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4158 integer_one_node, 0);
4159 }
4160 else if (! subset || highequal)
4161 {
4162 in_p = 1, low = low0;
4163 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4164 integer_one_node, 0);
4165 }
4166 else
4167 return 0;
4168 }
4169
4170 else if (! in0_p && in1_p)
4171 {
4172 /* If they don't overlap, the result is the second range. If the second
4173 is a subset of the first, the result is false. Otherwise,
4174 the range starts just after the first range and ends at the
4175 end of the second. */
4176 if (no_overlap)
4177 in_p = 1, low = low1, high = high1;
4178 else if (subset || highequal)
4179 in_p = 0, low = high = 0;
4180 else
4181 {
4182 in_p = 1, high = high1;
4183 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4184 integer_one_node, 0);
4185 }
4186 }
4187
4188 else
4189 {
4190 /* The case where we are excluding both ranges. Here the complex case
4191 is if they don't overlap. In that case, the only time we have a
4192 range is if they are adjacent. If the second is a subset of the
4193 first, the result is the first. Otherwise, the range to exclude
4194 starts at the beginning of the first range and ends at the end of the
4195 second. */
4196 if (no_overlap)
4197 {
4198 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4199 range_binop (PLUS_EXPR, NULL_TREE,
4200 high0, 1,
4201 integer_one_node, 1),
4202 1, low1, 0)))
4203 in_p = 0, low = low0, high = high1;
4204 else
4205 {
4206 /* Canonicalize - [min, x] into - [-, x]. */
4207 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4208 switch (TREE_CODE (TREE_TYPE (low0)))
4209 {
4210 case ENUMERAL_TYPE:
4211 if (TYPE_PRECISION (TREE_TYPE (low0))
4212 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4213 break;
4214 /* FALLTHROUGH */
4215 case INTEGER_TYPE:
4216 case CHAR_TYPE:
4217 if (tree_int_cst_equal (low0,
4218 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4219 low0 = 0;
4220 break;
4221 case POINTER_TYPE:
4222 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4223 && integer_zerop (low0))
4224 low0 = 0;
4225 break;
4226 default:
4227 break;
4228 }
4229
4230 /* Canonicalize - [x, max] into - [x, -]. */
4231 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4232 switch (TREE_CODE (TREE_TYPE (high1)))
4233 {
4234 case ENUMERAL_TYPE:
4235 if (TYPE_PRECISION (TREE_TYPE (high1))
4236 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4237 break;
4238 /* FALLTHROUGH */
4239 case INTEGER_TYPE:
4240 case CHAR_TYPE:
4241 if (tree_int_cst_equal (high1,
4242 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4243 high1 = 0;
4244 break;
4245 case POINTER_TYPE:
4246 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4247 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4248 high1, 1,
4249 integer_one_node, 1)))
4250 high1 = 0;
4251 break;
4252 default:
4253 break;
4254 }
4255
4256 /* The ranges might be also adjacent between the maximum and
4257 minimum values of the given type. For
4258 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4259 return + [x + 1, y - 1]. */
4260 if (low0 == 0 && high1 == 0)
4261 {
4262 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4263 integer_one_node, 1);
4264 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4265 integer_one_node, 0);
4266 if (low == 0 || high == 0)
4267 return 0;
4268
4269 in_p = 1;
4270 }
4271 else
4272 return 0;
4273 }
4274 }
4275 else if (subset)
4276 in_p = 0, low = low0, high = high0;
4277 else
4278 in_p = 0, low = low0, high = high1;
4279 }
4280
4281 *pin_p = in_p, *plow = low, *phigh = high;
4282 return 1;
4283 }
4284
4285
4286 /* Subroutine of fold, looking inside expressions of the form
4287 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4288 of the COND_EXPR. This function is being used also to optimize
4289 A op B ? C : A, by reversing the comparison first.
4290
4291 Return a folded expression whose code is not a COND_EXPR
4292 anymore, or NULL_TREE if no folding opportunity is found. */
4293
4294 static tree
fold_cond_expr_with_comparison(tree type,tree arg0,tree arg1,tree arg2)4295 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4296 {
4297 enum tree_code comp_code = TREE_CODE (arg0);
4298 tree arg00 = TREE_OPERAND (arg0, 0);
4299 tree arg01 = TREE_OPERAND (arg0, 1);
4300 tree arg1_type = TREE_TYPE (arg1);
4301 tree tem;
4302
4303 STRIP_NOPS (arg1);
4304 STRIP_NOPS (arg2);
4305
4306 /* If we have A op 0 ? A : -A, consider applying the following
4307 transformations:
4308
4309 A == 0? A : -A same as -A
4310 A != 0? A : -A same as A
4311 A >= 0? A : -A same as abs (A)
4312 A > 0? A : -A same as abs (A)
4313 A <= 0? A : -A same as -abs (A)
4314 A < 0? A : -A same as -abs (A)
4315
4316 None of these transformations work for modes with signed
4317 zeros. If A is +/-0, the first two transformations will
4318 change the sign of the result (from +0 to -0, or vice
4319 versa). The last four will fix the sign of the result,
4320 even though the original expressions could be positive or
4321 negative, depending on the sign of A.
4322
4323 Note that all these transformations are correct if A is
4324 NaN, since the two alternatives (A and -A) are also NaNs. */
4325 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4326 ? real_zerop (arg01)
4327 : integer_zerop (arg01))
4328 && ((TREE_CODE (arg2) == NEGATE_EXPR
4329 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4330 /* In the case that A is of the form X-Y, '-A' (arg2) may
4331 have already been folded to Y-X, check for that. */
4332 || (TREE_CODE (arg1) == MINUS_EXPR
4333 && TREE_CODE (arg2) == MINUS_EXPR
4334 && operand_equal_p (TREE_OPERAND (arg1, 0),
4335 TREE_OPERAND (arg2, 1), 0)
4336 && operand_equal_p (TREE_OPERAND (arg1, 1),
4337 TREE_OPERAND (arg2, 0), 0))))
4338 switch (comp_code)
4339 {
4340 case EQ_EXPR:
4341 case UNEQ_EXPR:
4342 tem = fold_convert (arg1_type, arg1);
4343 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4344 case NE_EXPR:
4345 case LTGT_EXPR:
4346 return pedantic_non_lvalue (fold_convert (type, arg1));
4347 case UNGE_EXPR:
4348 case UNGT_EXPR:
4349 if (flag_trapping_math)
4350 break;
4351 /* Fall through. */
4352 case GE_EXPR:
4353 case GT_EXPR:
4354 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4355 arg1 = fold_convert (lang_hooks.types.signed_type
4356 (TREE_TYPE (arg1)), arg1);
4357 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4358 return pedantic_non_lvalue (fold_convert (type, tem));
4359 case UNLE_EXPR:
4360 case UNLT_EXPR:
4361 if (flag_trapping_math)
4362 break;
4363 case LE_EXPR:
4364 case LT_EXPR:
4365 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4366 arg1 = fold_convert (lang_hooks.types.signed_type
4367 (TREE_TYPE (arg1)), arg1);
4368 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4369 return negate_expr (fold_convert (type, tem));
4370 default:
4371 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4372 break;
4373 }
4374
4375 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4376 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4377 both transformations are correct when A is NaN: A != 0
4378 is then true, and A == 0 is false. */
4379
4380 if (integer_zerop (arg01) && integer_zerop (arg2))
4381 {
4382 if (comp_code == NE_EXPR)
4383 return pedantic_non_lvalue (fold_convert (type, arg1));
4384 else if (comp_code == EQ_EXPR)
4385 return fold_convert (type, integer_zero_node);
4386 }
4387
4388 /* Try some transformations of A op B ? A : B.
4389
4390 A == B? A : B same as B
4391 A != B? A : B same as A
4392 A >= B? A : B same as max (A, B)
4393 A > B? A : B same as max (B, A)
4394 A <= B? A : B same as min (A, B)
4395 A < B? A : B same as min (B, A)
4396
4397 As above, these transformations don't work in the presence
4398 of signed zeros. For example, if A and B are zeros of
4399 opposite sign, the first two transformations will change
4400 the sign of the result. In the last four, the original
4401 expressions give different results for (A=+0, B=-0) and
4402 (A=-0, B=+0), but the transformed expressions do not.
4403
4404 The first two transformations are correct if either A or B
4405 is a NaN. In the first transformation, the condition will
4406 be false, and B will indeed be chosen. In the case of the
4407 second transformation, the condition A != B will be true,
4408 and A will be chosen.
4409
4410 The conversions to max() and min() are not correct if B is
4411 a number and A is not. The conditions in the original
4412 expressions will be false, so all four give B. The min()
4413 and max() versions would give a NaN instead. */
4414 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4415 /* Avoid these transformations if the COND_EXPR may be used
4416 as an lvalue in the C++ front-end. PR c++/19199. */
4417 && (in_gimple_form
4418 || strcmp (lang_hooks.name, "GNU C++") != 0
4419 || ! maybe_lvalue_p (arg1)
4420 || ! maybe_lvalue_p (arg2)))
4421 {
4422 tree comp_op0 = arg00;
4423 tree comp_op1 = arg01;
4424 tree comp_type = TREE_TYPE (comp_op0);
4425
4426 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4427 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4428 {
4429 comp_type = type;
4430 comp_op0 = arg1;
4431 comp_op1 = arg2;
4432 }
4433
4434 switch (comp_code)
4435 {
4436 case EQ_EXPR:
4437 return pedantic_non_lvalue (fold_convert (type, arg2));
4438 case NE_EXPR:
4439 return pedantic_non_lvalue (fold_convert (type, arg1));
4440 case LE_EXPR:
4441 case LT_EXPR:
4442 case UNLE_EXPR:
4443 case UNLT_EXPR:
4444 /* In C++ a ?: expression can be an lvalue, so put the
4445 operand which will be used if they are equal first
4446 so that we can convert this back to the
4447 corresponding COND_EXPR. */
4448 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4449 {
4450 comp_op0 = fold_convert (comp_type, comp_op0);
4451 comp_op1 = fold_convert (comp_type, comp_op1);
4452 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4453 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4454 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4455 return pedantic_non_lvalue (fold_convert (type, tem));
4456 }
4457 break;
4458 case GE_EXPR:
4459 case GT_EXPR:
4460 case UNGE_EXPR:
4461 case UNGT_EXPR:
4462 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4463 {
4464 comp_op0 = fold_convert (comp_type, comp_op0);
4465 comp_op1 = fold_convert (comp_type, comp_op1);
4466 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4467 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4468 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4469 return pedantic_non_lvalue (fold_convert (type, tem));
4470 }
4471 break;
4472 case UNEQ_EXPR:
4473 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4474 return pedantic_non_lvalue (fold_convert (type, arg2));
4475 break;
4476 case LTGT_EXPR:
4477 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4478 return pedantic_non_lvalue (fold_convert (type, arg1));
4479 break;
4480 default:
4481 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4482 break;
4483 }
4484 }
4485
4486 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4487 we might still be able to simplify this. For example,
4488 if C1 is one less or one more than C2, this might have started
4489 out as a MIN or MAX and been transformed by this function.
4490 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4491
4492 if (INTEGRAL_TYPE_P (type)
4493 && TREE_CODE (arg01) == INTEGER_CST
4494 && TREE_CODE (arg2) == INTEGER_CST)
4495 switch (comp_code)
4496 {
4497 case EQ_EXPR:
4498 /* We can replace A with C1 in this case. */
4499 arg1 = fold_convert (type, arg01);
4500 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4501
4502 case LT_EXPR:
4503 /* If C1 is C2 + 1, this is min(A, C2). */
4504 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4505 OEP_ONLY_CONST)
4506 && operand_equal_p (arg01,
4507 const_binop (PLUS_EXPR, arg2,
4508 integer_one_node, 0),
4509 OEP_ONLY_CONST))
4510 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4511 type, arg1, arg2));
4512 break;
4513
4514 case LE_EXPR:
4515 /* If C1 is C2 - 1, this is min(A, C2). */
4516 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4517 OEP_ONLY_CONST)
4518 && operand_equal_p (arg01,
4519 const_binop (MINUS_EXPR, arg2,
4520 integer_one_node, 0),
4521 OEP_ONLY_CONST))
4522 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4523 type, arg1, arg2));
4524 break;
4525
4526 case GT_EXPR:
4527 /* If C1 is C2 - 1, this is max(A, C2). */
4528 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4529 OEP_ONLY_CONST)
4530 && operand_equal_p (arg01,
4531 const_binop (MINUS_EXPR, arg2,
4532 integer_one_node, 0),
4533 OEP_ONLY_CONST))
4534 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4535 type, arg1, arg2));
4536 break;
4537
4538 case GE_EXPR:
4539 /* If C1 is C2 + 1, this is max(A, C2). */
4540 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4541 OEP_ONLY_CONST)
4542 && operand_equal_p (arg01,
4543 const_binop (PLUS_EXPR, arg2,
4544 integer_one_node, 0),
4545 OEP_ONLY_CONST))
4546 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4547 type, arg1, arg2));
4548 break;
4549 case NE_EXPR:
4550 break;
4551 default:
4552 gcc_unreachable ();
4553 }
4554
4555 return NULL_TREE;
4556 }
4557
4558
4559
4560 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4561 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4562 #endif
4563
4564 /* EXP is some logical combination of boolean tests. See if we can
4565 merge it into some range test. Return the new tree if so. */
4566
4567 static tree
fold_range_test(enum tree_code code,tree type,tree op0,tree op1)4568 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4569 {
4570 int or_op = (code == TRUTH_ORIF_EXPR
4571 || code == TRUTH_OR_EXPR);
4572 int in0_p, in1_p, in_p;
4573 tree low0, low1, low, high0, high1, high;
4574 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4575 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4576 tree tem;
4577
4578 /* If this is an OR operation, invert both sides; we will invert
4579 again at the end. */
4580 if (or_op)
4581 in0_p = ! in0_p, in1_p = ! in1_p;
4582
4583 /* If both expressions are the same, if we can merge the ranges, and we
4584 can build the range test, return it or it inverted. If one of the
4585 ranges is always true or always false, consider it to be the same
4586 expression as the other. */
4587 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4588 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4589 in1_p, low1, high1)
4590 && 0 != (tem = (build_range_check (type,
4591 lhs != 0 ? lhs
4592 : rhs != 0 ? rhs : integer_zero_node,
4593 in_p, low, high))))
4594 return or_op ? invert_truthvalue (tem) : tem;
4595
4596 /* On machines where the branch cost is expensive, if this is a
4597 short-circuited branch and the underlying object on both sides
4598 is the same, make a non-short-circuit operation. */
4599 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4600 && lhs != 0 && rhs != 0
4601 && (code == TRUTH_ANDIF_EXPR
4602 || code == TRUTH_ORIF_EXPR)
4603 && operand_equal_p (lhs, rhs, 0))
4604 {
4605 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4606 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4607 which cases we can't do this. */
4608 if (simple_operand_p (lhs))
4609 return build2 (code == TRUTH_ANDIF_EXPR
4610 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4611 type, op0, op1);
4612
4613 else if (lang_hooks.decls.global_bindings_p () == 0
4614 && ! CONTAINS_PLACEHOLDER_P (lhs))
4615 {
4616 tree common = save_expr (lhs);
4617
4618 if (0 != (lhs = build_range_check (type, common,
4619 or_op ? ! in0_p : in0_p,
4620 low0, high0))
4621 && (0 != (rhs = build_range_check (type, common,
4622 or_op ? ! in1_p : in1_p,
4623 low1, high1))))
4624 return build2 (code == TRUTH_ANDIF_EXPR
4625 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4626 type, lhs, rhs);
4627 }
4628 }
4629
4630 return 0;
4631 }
4632
4633 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4634 bit value. Arrange things so the extra bits will be set to zero if and
4635 only if C is signed-extended to its full width. If MASK is nonzero,
4636 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4637
4638 static tree
unextend(tree c,int p,int unsignedp,tree mask)4639 unextend (tree c, int p, int unsignedp, tree mask)
4640 {
4641 tree type = TREE_TYPE (c);
4642 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4643 tree temp;
4644
4645 if (p == modesize || unsignedp)
4646 return c;
4647
4648 /* We work by getting just the sign bit into the low-order bit, then
4649 into the high-order bit, then sign-extend. We then XOR that value
4650 with C. */
4651 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4652 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4653
4654 /* We must use a signed type in order to get an arithmetic right shift.
4655 However, we must also avoid introducing accidental overflows, so that
4656 a subsequent call to integer_zerop will work. Hence we must
4657 do the type conversion here. At this point, the constant is either
4658 zero or one, and the conversion to a signed type can never overflow.
4659 We could get an overflow if this conversion is done anywhere else. */
4660 if (TYPE_UNSIGNED (type))
4661 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4662
4663 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4664 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4665 if (mask != 0)
4666 temp = const_binop (BIT_AND_EXPR, temp,
4667 fold_convert (TREE_TYPE (c), mask), 0);
4668 /* If necessary, convert the type back to match the type of C. */
4669 if (TYPE_UNSIGNED (type))
4670 temp = fold_convert (type, temp);
4671
4672 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4673 }
4674
4675 /* Find ways of folding logical expressions of LHS and RHS:
4676 Try to merge two comparisons to the same innermost item.
4677 Look for range tests like "ch >= '0' && ch <= '9'".
4678 Look for combinations of simple terms on machines with expensive branches
4679 and evaluate the RHS unconditionally.
4680
4681 For example, if we have p->a == 2 && p->b == 4 and we can make an
4682 object large enough to span both A and B, we can do this with a comparison
4683 against the object ANDed with the a mask.
4684
4685 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4686 operations to do this with one comparison.
4687
4688 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4689 function and the one above.
4690
4691 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4692 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4693
4694 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4695 two operands.
4696
4697 We return the simplified tree or 0 if no optimization is possible. */
4698
4699 static tree
fold_truthop(enum tree_code code,tree truth_type,tree lhs,tree rhs)4700 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4701 {
4702 /* If this is the "or" of two comparisons, we can do something if
4703 the comparisons are NE_EXPR. If this is the "and", we can do something
4704 if the comparisons are EQ_EXPR. I.e.,
4705 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4706
4707 WANTED_CODE is this operation code. For single bit fields, we can
4708 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4709 comparison for one-bit fields. */
4710
4711 enum tree_code wanted_code;
4712 enum tree_code lcode, rcode;
4713 tree ll_arg, lr_arg, rl_arg, rr_arg;
4714 tree ll_inner, lr_inner, rl_inner, rr_inner;
4715 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4716 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4717 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4718 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4719 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4720 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4721 enum machine_mode lnmode, rnmode;
4722 tree ll_mask, lr_mask, rl_mask, rr_mask;
4723 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4724 tree l_const, r_const;
4725 tree lntype, rntype, result;
4726 int first_bit, end_bit;
4727 int volatilep;
4728
4729 /* Start by getting the comparison codes. Fail if anything is volatile.
4730 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4731 it were surrounded with a NE_EXPR. */
4732
4733 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4734 return 0;
4735
4736 lcode = TREE_CODE (lhs);
4737 rcode = TREE_CODE (rhs);
4738
4739 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4740 {
4741 lhs = build2 (NE_EXPR, truth_type, lhs,
4742 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4743 lcode = NE_EXPR;
4744 }
4745
4746 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4747 {
4748 rhs = build2 (NE_EXPR, truth_type, rhs,
4749 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4750 rcode = NE_EXPR;
4751 }
4752
4753 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4754 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4755 return 0;
4756
4757 ll_arg = TREE_OPERAND (lhs, 0);
4758 lr_arg = TREE_OPERAND (lhs, 1);
4759 rl_arg = TREE_OPERAND (rhs, 0);
4760 rr_arg = TREE_OPERAND (rhs, 1);
4761
4762 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4763 if (simple_operand_p (ll_arg)
4764 && simple_operand_p (lr_arg))
4765 {
4766 tree result;
4767 if (operand_equal_p (ll_arg, rl_arg, 0)
4768 && operand_equal_p (lr_arg, rr_arg, 0))
4769 {
4770 result = combine_comparisons (code, lcode, rcode,
4771 truth_type, ll_arg, lr_arg);
4772 if (result)
4773 return result;
4774 }
4775 else if (operand_equal_p (ll_arg, rr_arg, 0)
4776 && operand_equal_p (lr_arg, rl_arg, 0))
4777 {
4778 result = combine_comparisons (code, lcode,
4779 swap_tree_comparison (rcode),
4780 truth_type, ll_arg, lr_arg);
4781 if (result)
4782 return result;
4783 }
4784 }
4785
4786 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4787 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4788
4789 /* If the RHS can be evaluated unconditionally and its operands are
4790 simple, it wins to evaluate the RHS unconditionally on machines
4791 with expensive branches. In this case, this isn't a comparison
4792 that can be merged. Avoid doing this if the RHS is a floating-point
4793 comparison since those can trap. */
4794
4795 if (BRANCH_COST >= 2
4796 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4797 && simple_operand_p (rl_arg)
4798 && simple_operand_p (rr_arg))
4799 {
4800 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4801 if (code == TRUTH_OR_EXPR
4802 && lcode == NE_EXPR && integer_zerop (lr_arg)
4803 && rcode == NE_EXPR && integer_zerop (rr_arg)
4804 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4805 return build2 (NE_EXPR, truth_type,
4806 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4807 ll_arg, rl_arg),
4808 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4809
4810 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4811 if (code == TRUTH_AND_EXPR
4812 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4813 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4814 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4815 return build2 (EQ_EXPR, truth_type,
4816 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4817 ll_arg, rl_arg),
4818 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4819
4820 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4821 return build2 (code, truth_type, lhs, rhs);
4822 }
4823
4824 /* See if the comparisons can be merged. Then get all the parameters for
4825 each side. */
4826
4827 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4828 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4829 return 0;
4830
4831 volatilep = 0;
4832 ll_inner = decode_field_reference (ll_arg,
4833 &ll_bitsize, &ll_bitpos, &ll_mode,
4834 &ll_unsignedp, &volatilep, &ll_mask,
4835 &ll_and_mask);
4836 lr_inner = decode_field_reference (lr_arg,
4837 &lr_bitsize, &lr_bitpos, &lr_mode,
4838 &lr_unsignedp, &volatilep, &lr_mask,
4839 &lr_and_mask);
4840 rl_inner = decode_field_reference (rl_arg,
4841 &rl_bitsize, &rl_bitpos, &rl_mode,
4842 &rl_unsignedp, &volatilep, &rl_mask,
4843 &rl_and_mask);
4844 rr_inner = decode_field_reference (rr_arg,
4845 &rr_bitsize, &rr_bitpos, &rr_mode,
4846 &rr_unsignedp, &volatilep, &rr_mask,
4847 &rr_and_mask);
4848
4849 /* It must be true that the inner operation on the lhs of each
4850 comparison must be the same if we are to be able to do anything.
4851 Then see if we have constants. If not, the same must be true for
4852 the rhs's. */
4853 if (volatilep || ll_inner == 0 || rl_inner == 0
4854 || ! operand_equal_p (ll_inner, rl_inner, 0))
4855 return 0;
4856
4857 if (TREE_CODE (lr_arg) == INTEGER_CST
4858 && TREE_CODE (rr_arg) == INTEGER_CST)
4859 l_const = lr_arg, r_const = rr_arg;
4860 else if (lr_inner == 0 || rr_inner == 0
4861 || ! operand_equal_p (lr_inner, rr_inner, 0))
4862 return 0;
4863 else
4864 l_const = r_const = 0;
4865
4866 /* If either comparison code is not correct for our logical operation,
4867 fail. However, we can convert a one-bit comparison against zero into
4868 the opposite comparison against that bit being set in the field. */
4869
4870 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4871 if (lcode != wanted_code)
4872 {
4873 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4874 {
4875 /* Make the left operand unsigned, since we are only interested
4876 in the value of one bit. Otherwise we are doing the wrong
4877 thing below. */
4878 ll_unsignedp = 1;
4879 l_const = ll_mask;
4880 }
4881 else
4882 return 0;
4883 }
4884
4885 /* This is analogous to the code for l_const above. */
4886 if (rcode != wanted_code)
4887 {
4888 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4889 {
4890 rl_unsignedp = 1;
4891 r_const = rl_mask;
4892 }
4893 else
4894 return 0;
4895 }
4896
4897 /* After this point all optimizations will generate bit-field
4898 references, which we might not want. */
4899 if (! lang_hooks.can_use_bit_fields_p ())
4900 return 0;
4901
4902 /* See if we can find a mode that contains both fields being compared on
4903 the left. If we can't, fail. Otherwise, update all constants and masks
4904 to be relative to a field of that size. */
4905 first_bit = MIN (ll_bitpos, rl_bitpos);
4906 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4907 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4908 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4909 volatilep);
4910 if (lnmode == VOIDmode)
4911 return 0;
4912
4913 lnbitsize = GET_MODE_BITSIZE (lnmode);
4914 lnbitpos = first_bit & ~ (lnbitsize - 1);
4915 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4916 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4917
4918 if (BYTES_BIG_ENDIAN)
4919 {
4920 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4921 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4922 }
4923
4924 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4925 size_int (xll_bitpos), 0);
4926 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4927 size_int (xrl_bitpos), 0);
4928
4929 if (l_const)
4930 {
4931 l_const = fold_convert (lntype, l_const);
4932 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4933 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4934 if (integer_nonzerop (const_binop (BIT_AND_EXPR, l_const,
4935 fold_build1 (BIT_NOT_EXPR,
4936 lntype, ll_mask),
4937 0)))
4938 {
4939 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4940
4941 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4942 }
4943 }
4944 if (r_const)
4945 {
4946 r_const = fold_convert (lntype, r_const);
4947 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4948 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4949 if (integer_nonzerop (const_binop (BIT_AND_EXPR, r_const,
4950 fold_build1 (BIT_NOT_EXPR,
4951 lntype, rl_mask),
4952 0)))
4953 {
4954 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4955
4956 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4957 }
4958 }
4959
4960 /* If the right sides are not constant, do the same for it. Also,
4961 disallow this optimization if a size or signedness mismatch occurs
4962 between the left and right sides. */
4963 if (l_const == 0)
4964 {
4965 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4966 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4967 /* Make sure the two fields on the right
4968 correspond to the left without being swapped. */
4969 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4970 return 0;
4971
4972 first_bit = MIN (lr_bitpos, rr_bitpos);
4973 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4974 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4975 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4976 volatilep);
4977 if (rnmode == VOIDmode)
4978 return 0;
4979
4980 rnbitsize = GET_MODE_BITSIZE (rnmode);
4981 rnbitpos = first_bit & ~ (rnbitsize - 1);
4982 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4983 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4984
4985 if (BYTES_BIG_ENDIAN)
4986 {
4987 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4988 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4989 }
4990
4991 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4992 size_int (xlr_bitpos), 0);
4993 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4994 size_int (xrr_bitpos), 0);
4995
4996 /* Make a mask that corresponds to both fields being compared.
4997 Do this for both items being compared. If the operands are the
4998 same size and the bits being compared are in the same position
4999 then we can do this by masking both and comparing the masked
5000 results. */
5001 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5002 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5003 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5004 {
5005 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5006 ll_unsignedp || rl_unsignedp);
5007 if (! all_ones_mask_p (ll_mask, lnbitsize))
5008 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5009
5010 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5011 lr_unsignedp || rr_unsignedp);
5012 if (! all_ones_mask_p (lr_mask, rnbitsize))
5013 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5014
5015 return build2 (wanted_code, truth_type, lhs, rhs);
5016 }
5017
5018 /* There is still another way we can do something: If both pairs of
5019 fields being compared are adjacent, we may be able to make a wider
5020 field containing them both.
5021
5022 Note that we still must mask the lhs/rhs expressions. Furthermore,
5023 the mask must be shifted to account for the shift done by
5024 make_bit_field_ref. */
5025 if ((ll_bitsize + ll_bitpos == rl_bitpos
5026 && lr_bitsize + lr_bitpos == rr_bitpos)
5027 || (ll_bitpos == rl_bitpos + rl_bitsize
5028 && lr_bitpos == rr_bitpos + rr_bitsize))
5029 {
5030 tree type;
5031
5032 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5033 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5034 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5035 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5036
5037 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5038 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5039 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5040 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5041
5042 /* Convert to the smaller type before masking out unwanted bits. */
5043 type = lntype;
5044 if (lntype != rntype)
5045 {
5046 if (lnbitsize > rnbitsize)
5047 {
5048 lhs = fold_convert (rntype, lhs);
5049 ll_mask = fold_convert (rntype, ll_mask);
5050 type = rntype;
5051 }
5052 else if (lnbitsize < rnbitsize)
5053 {
5054 rhs = fold_convert (lntype, rhs);
5055 lr_mask = fold_convert (lntype, lr_mask);
5056 type = lntype;
5057 }
5058 }
5059
5060 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5061 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5062
5063 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5064 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5065
5066 return build2 (wanted_code, truth_type, lhs, rhs);
5067 }
5068
5069 return 0;
5070 }
5071
5072 /* Handle the case of comparisons with constants. If there is something in
5073 common between the masks, those bits of the constants must be the same.
5074 If not, the condition is always false. Test for this to avoid generating
5075 incorrect code below. */
5076 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5077 if (! integer_zerop (result)
5078 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5079 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5080 {
5081 if (wanted_code == NE_EXPR)
5082 {
5083 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5084 return constant_boolean_node (true, truth_type);
5085 }
5086 else
5087 {
5088 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5089 return constant_boolean_node (false, truth_type);
5090 }
5091 }
5092
5093 /* Construct the expression we will return. First get the component
5094 reference we will make. Unless the mask is all ones the width of
5095 that field, perform the mask operation. Then compare with the
5096 merged constant. */
5097 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5098 ll_unsignedp || rl_unsignedp);
5099
5100 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5101 if (! all_ones_mask_p (ll_mask, lnbitsize))
5102 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5103
5104 return build2 (wanted_code, truth_type, result,
5105 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5106 }
5107
5108 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5109 constant. */
5110
5111 static tree
optimize_minmax_comparison(enum tree_code code,tree type,tree op0,tree op1)5112 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5113 {
5114 tree arg0 = op0;
5115 enum tree_code op_code;
5116 tree comp_const = op1;
5117 tree minmax_const;
5118 int consts_equal, consts_lt;
5119 tree inner;
5120
5121 STRIP_SIGN_NOPS (arg0);
5122
5123 op_code = TREE_CODE (arg0);
5124 minmax_const = TREE_OPERAND (arg0, 1);
5125 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5126 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5127 inner = TREE_OPERAND (arg0, 0);
5128
5129 /* If something does not permit us to optimize, return the original tree. */
5130 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5131 || TREE_CODE (comp_const) != INTEGER_CST
5132 || TREE_CONSTANT_OVERFLOW (comp_const)
5133 || TREE_CODE (minmax_const) != INTEGER_CST
5134 || TREE_CONSTANT_OVERFLOW (minmax_const))
5135 return NULL_TREE;
5136
5137 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5138 and GT_EXPR, doing the rest with recursive calls using logical
5139 simplifications. */
5140 switch (code)
5141 {
5142 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5143 {
5144 /* FIXME: We should be able to invert code without building a
5145 scratch tree node, but doing so would require us to
5146 duplicate a part of invert_truthvalue here. */
5147 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5148 tem = optimize_minmax_comparison (TREE_CODE (tem),
5149 TREE_TYPE (tem),
5150 TREE_OPERAND (tem, 0),
5151 TREE_OPERAND (tem, 1));
5152 return invert_truthvalue (tem);
5153 }
5154
5155 case GE_EXPR:
5156 return
5157 fold_build2 (TRUTH_ORIF_EXPR, type,
5158 optimize_minmax_comparison
5159 (EQ_EXPR, type, arg0, comp_const),
5160 optimize_minmax_comparison
5161 (GT_EXPR, type, arg0, comp_const));
5162
5163 case EQ_EXPR:
5164 if (op_code == MAX_EXPR && consts_equal)
5165 /* MAX (X, 0) == 0 -> X <= 0 */
5166 return fold_build2 (LE_EXPR, type, inner, comp_const);
5167
5168 else if (op_code == MAX_EXPR && consts_lt)
5169 /* MAX (X, 0) == 5 -> X == 5 */
5170 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5171
5172 else if (op_code == MAX_EXPR)
5173 /* MAX (X, 0) == -1 -> false */
5174 return omit_one_operand (type, integer_zero_node, inner);
5175
5176 else if (consts_equal)
5177 /* MIN (X, 0) == 0 -> X >= 0 */
5178 return fold_build2 (GE_EXPR, type, inner, comp_const);
5179
5180 else if (consts_lt)
5181 /* MIN (X, 0) == 5 -> false */
5182 return omit_one_operand (type, integer_zero_node, inner);
5183
5184 else
5185 /* MIN (X, 0) == -1 -> X == -1 */
5186 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5187
5188 case GT_EXPR:
5189 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5190 /* MAX (X, 0) > 0 -> X > 0
5191 MAX (X, 0) > 5 -> X > 5 */
5192 return fold_build2 (GT_EXPR, type, inner, comp_const);
5193
5194 else if (op_code == MAX_EXPR)
5195 /* MAX (X, 0) > -1 -> true */
5196 return omit_one_operand (type, integer_one_node, inner);
5197
5198 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5199 /* MIN (X, 0) > 0 -> false
5200 MIN (X, 0) > 5 -> false */
5201 return omit_one_operand (type, integer_zero_node, inner);
5202
5203 else
5204 /* MIN (X, 0) > -1 -> X > -1 */
5205 return fold_build2 (GT_EXPR, type, inner, comp_const);
5206
5207 default:
5208 return NULL_TREE;
5209 }
5210 }
5211
5212 /* T is an integer expression that is being multiplied, divided, or taken a
5213 modulus (CODE says which and what kind of divide or modulus) by a
5214 constant C. See if we can eliminate that operation by folding it with
5215 other operations already in T. WIDE_TYPE, if non-null, is a type that
5216 should be used for the computation if wider than our type.
5217
5218 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5219 (X * 2) + (Y * 4). We must, however, be assured that either the original
5220 expression would not overflow or that overflow is undefined for the type
5221 in the language in question.
5222
5223 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5224 the machine has a multiply-accumulate insn or that this is part of an
5225 addressing calculation.
5226
5227 If we return a non-null expression, it is an equivalent form of the
5228 original computation, but need not be in the original type. */
5229
5230 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type)5231 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5232 {
5233 /* To avoid exponential search depth, refuse to allow recursion past
5234 three levels. Beyond that (1) it's highly unlikely that we'll find
5235 something interesting and (2) we've probably processed it before
5236 when we built the inner expression. */
5237
5238 static int depth;
5239 tree ret;
5240
5241 if (depth > 3)
5242 return NULL;
5243
5244 depth++;
5245 ret = extract_muldiv_1 (t, c, code, wide_type);
5246 depth--;
5247
5248 return ret;
5249 }
5250
5251 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type)5252 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5253 {
5254 tree type = TREE_TYPE (t);
5255 enum tree_code tcode = TREE_CODE (t);
5256 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5257 > GET_MODE_SIZE (TYPE_MODE (type)))
5258 ? wide_type : type);
5259 tree t1, t2;
5260 int same_p = tcode == code;
5261 tree op0 = NULL_TREE, op1 = NULL_TREE;
5262
5263 /* Don't deal with constants of zero here; they confuse the code below. */
5264 if (integer_zerop (c))
5265 return NULL_TREE;
5266
5267 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5268 op0 = TREE_OPERAND (t, 0);
5269
5270 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5271 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5272
5273 /* Note that we need not handle conditional operations here since fold
5274 already handles those cases. So just do arithmetic here. */
5275 switch (tcode)
5276 {
5277 case INTEGER_CST:
5278 /* For a constant, we can always simplify if we are a multiply
5279 or (for divide and modulus) if it is a multiple of our constant. */
5280 if (code == MULT_EXPR
5281 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5282 return const_binop (code, fold_convert (ctype, t),
5283 fold_convert (ctype, c), 0);
5284 break;
5285
5286 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5287 /* If op0 is an expression ... */
5288 if ((COMPARISON_CLASS_P (op0)
5289 || UNARY_CLASS_P (op0)
5290 || BINARY_CLASS_P (op0)
5291 || EXPRESSION_CLASS_P (op0))
5292 /* ... and is unsigned, and its type is smaller than ctype,
5293 then we cannot pass through as widening. */
5294 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5295 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5296 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5297 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5298 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5299 /* ... or this is a truncation (t is narrower than op0),
5300 then we cannot pass through this narrowing. */
5301 || (GET_MODE_SIZE (TYPE_MODE (type))
5302 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5303 /* ... or signedness changes for division or modulus,
5304 then we cannot pass through this conversion. */
5305 || (code != MULT_EXPR
5306 && (TYPE_UNSIGNED (ctype)
5307 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5308 break;
5309
5310 /* Pass the constant down and see if we can make a simplification. If
5311 we can, replace this expression with the inner simplification for
5312 possible later conversion to our or some other type. */
5313 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5314 && TREE_CODE (t2) == INTEGER_CST
5315 && ! TREE_CONSTANT_OVERFLOW (t2)
5316 && (0 != (t1 = extract_muldiv (op0, t2, code,
5317 code == MULT_EXPR
5318 ? ctype : NULL_TREE))))
5319 return t1;
5320 break;
5321
5322 case ABS_EXPR:
5323 /* If widening the type changes it from signed to unsigned, then we
5324 must avoid building ABS_EXPR itself as unsigned. */
5325 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5326 {
5327 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5328 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5329 {
5330 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5331 return fold_convert (ctype, t1);
5332 }
5333 break;
5334 }
5335 /* FALLTHROUGH */
5336 case NEGATE_EXPR:
5337 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5338 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5339 break;
5340
5341 case MIN_EXPR: case MAX_EXPR:
5342 /* If widening the type changes the signedness, then we can't perform
5343 this optimization as that changes the result. */
5344 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5345 break;
5346
5347 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5348 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5349 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5350 {
5351 if (tree_int_cst_sgn (c) < 0)
5352 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5353
5354 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5355 fold_convert (ctype, t2));
5356 }
5357 break;
5358
5359 case LSHIFT_EXPR: case RSHIFT_EXPR:
5360 /* If the second operand is constant, this is a multiplication
5361 or floor division, by a power of two, so we can treat it that
5362 way unless the multiplier or divisor overflows. Signed
5363 left-shift overflow is implementation-defined rather than
5364 undefined in C90, so do not convert signed left shift into
5365 multiplication. */
5366 if (TREE_CODE (op1) == INTEGER_CST
5367 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5368 /* const_binop may not detect overflow correctly,
5369 so check for it explicitly here. */
5370 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5371 && TREE_INT_CST_HIGH (op1) == 0
5372 && 0 != (t1 = fold_convert (ctype,
5373 const_binop (LSHIFT_EXPR,
5374 size_one_node,
5375 op1, 0)))
5376 && ! TREE_OVERFLOW (t1))
5377 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5378 ? MULT_EXPR : FLOOR_DIV_EXPR,
5379 ctype, fold_convert (ctype, op0), t1),
5380 c, code, wide_type);
5381 break;
5382
5383 case PLUS_EXPR: case MINUS_EXPR:
5384 /* See if we can eliminate the operation on both sides. If we can, we
5385 can return a new PLUS or MINUS. If we can't, the only remaining
5386 cases where we can do anything are if the second operand is a
5387 constant. */
5388 t1 = extract_muldiv (op0, c, code, wide_type);
5389 t2 = extract_muldiv (op1, c, code, wide_type);
5390 if (t1 != 0 && t2 != 0
5391 && (code == MULT_EXPR
5392 /* If not multiplication, we can only do this if both operands
5393 are divisible by c. */
5394 || (multiple_of_p (ctype, op0, c)
5395 && multiple_of_p (ctype, op1, c))))
5396 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5397 fold_convert (ctype, t2));
5398
5399 /* If this was a subtraction, negate OP1 and set it to be an addition.
5400 This simplifies the logic below. */
5401 if (tcode == MINUS_EXPR)
5402 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5403
5404 if (TREE_CODE (op1) != INTEGER_CST)
5405 break;
5406
5407 /* If either OP1 or C are negative, this optimization is not safe for
5408 some of the division and remainder types while for others we need
5409 to change the code. */
5410 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5411 {
5412 if (code == CEIL_DIV_EXPR)
5413 code = FLOOR_DIV_EXPR;
5414 else if (code == FLOOR_DIV_EXPR)
5415 code = CEIL_DIV_EXPR;
5416 else if (code != MULT_EXPR
5417 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5418 break;
5419 }
5420
5421 /* If it's a multiply or a division/modulus operation of a multiple
5422 of our constant, do the operation and verify it doesn't overflow. */
5423 if (code == MULT_EXPR
5424 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5425 {
5426 op1 = const_binop (code, fold_convert (ctype, op1),
5427 fold_convert (ctype, c), 0);
5428 /* We allow the constant to overflow with wrapping semantics. */
5429 if (op1 == 0
5430 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5431 break;
5432 }
5433 else
5434 break;
5435
5436 /* If we have an unsigned type is not a sizetype, we cannot widen
5437 the operation since it will change the result if the original
5438 computation overflowed. */
5439 if (TYPE_UNSIGNED (ctype)
5440 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5441 && ctype != type)
5442 break;
5443
5444 /* If we were able to eliminate our operation from the first side,
5445 apply our operation to the second side and reform the PLUS. */
5446 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5447 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5448
5449 /* The last case is if we are a multiply. In that case, we can
5450 apply the distributive law to commute the multiply and addition
5451 if the multiplication of the constants doesn't overflow. */
5452 if (code == MULT_EXPR)
5453 return fold_build2 (tcode, ctype,
5454 fold_build2 (code, ctype,
5455 fold_convert (ctype, op0),
5456 fold_convert (ctype, c)),
5457 op1);
5458
5459 break;
5460
5461 case MULT_EXPR:
5462 /* We have a special case here if we are doing something like
5463 (C * 8) % 4 since we know that's zero. */
5464 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5465 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5466 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5467 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5468 return omit_one_operand (type, integer_zero_node, op0);
5469
5470 /* ... fall through ... */
5471
5472 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5473 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5474 /* If we can extract our operation from the LHS, do so and return a
5475 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5476 do something only if the second operand is a constant. */
5477 if (same_p
5478 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5479 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5480 fold_convert (ctype, op1));
5481 else if (tcode == MULT_EXPR && code == MULT_EXPR
5482 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5483 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5484 fold_convert (ctype, t1));
5485 else if (TREE_CODE (op1) != INTEGER_CST)
5486 return 0;
5487
5488 /* If these are the same operation types, we can associate them
5489 assuming no overflow. */
5490 if (tcode == code
5491 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5492 fold_convert (ctype, c), 0))
5493 && ! TREE_OVERFLOW (t1))
5494 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5495
5496 /* If these operations "cancel" each other, we have the main
5497 optimizations of this pass, which occur when either constant is a
5498 multiple of the other, in which case we replace this with either an
5499 operation or CODE or TCODE.
5500
5501 If we have an unsigned type that is not a sizetype, we cannot do
5502 this since it will change the result if the original computation
5503 overflowed. */
5504 if ((! TYPE_UNSIGNED (ctype)
5505 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5506 && ! flag_wrapv
5507 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5508 || (tcode == MULT_EXPR
5509 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5510 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5511 {
5512 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5513 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5514 fold_convert (ctype,
5515 const_binop (TRUNC_DIV_EXPR,
5516 op1, c, 0)));
5517 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5518 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5519 fold_convert (ctype,
5520 const_binop (TRUNC_DIV_EXPR,
5521 c, op1, 0)));
5522 }
5523 break;
5524
5525 default:
5526 break;
5527 }
5528
5529 return 0;
5530 }
5531
5532 /* Return a node which has the indicated constant VALUE (either 0 or
5533 1), and is of the indicated TYPE. */
5534
5535 tree
constant_boolean_node(int value,tree type)5536 constant_boolean_node (int value, tree type)
5537 {
5538 if (type == integer_type_node)
5539 return value ? integer_one_node : integer_zero_node;
5540 else if (type == boolean_type_node)
5541 return value ? boolean_true_node : boolean_false_node;
5542 else
5543 return build_int_cst (type, value);
5544 }
5545
5546
5547 /* Return true if expr looks like an ARRAY_REF and set base and
5548 offset to the appropriate trees. If there is no offset,
5549 offset is set to NULL_TREE. Base will be canonicalized to
5550 something you can get the element type from using
5551 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5552 in bytes to the base. */
5553
5554 static bool
extract_array_ref(tree expr,tree * base,tree * offset)5555 extract_array_ref (tree expr, tree *base, tree *offset)
5556 {
5557 /* One canonical form is a PLUS_EXPR with the first
5558 argument being an ADDR_EXPR with a possible NOP_EXPR
5559 attached. */
5560 if (TREE_CODE (expr) == PLUS_EXPR)
5561 {
5562 tree op0 = TREE_OPERAND (expr, 0);
5563 tree inner_base, dummy1;
5564 /* Strip NOP_EXPRs here because the C frontends and/or
5565 folders present us (int *)&x.a + 4B possibly. */
5566 STRIP_NOPS (op0);
5567 if (extract_array_ref (op0, &inner_base, &dummy1))
5568 {
5569 *base = inner_base;
5570 if (dummy1 == NULL_TREE)
5571 *offset = TREE_OPERAND (expr, 1);
5572 else
5573 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5574 dummy1, TREE_OPERAND (expr, 1));
5575 return true;
5576 }
5577 }
5578 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5579 which we transform into an ADDR_EXPR with appropriate
5580 offset. For other arguments to the ADDR_EXPR we assume
5581 zero offset and as such do not care about the ADDR_EXPR
5582 type and strip possible nops from it. */
5583 else if (TREE_CODE (expr) == ADDR_EXPR)
5584 {
5585 tree op0 = TREE_OPERAND (expr, 0);
5586 if (TREE_CODE (op0) == ARRAY_REF)
5587 {
5588 tree idx = TREE_OPERAND (op0, 1);
5589 *base = TREE_OPERAND (op0, 0);
5590 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5591 array_ref_element_size (op0));
5592 }
5593 else
5594 {
5595 /* Handle array-to-pointer decay as &a. */
5596 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5597 *base = TREE_OPERAND (expr, 0);
5598 else
5599 *base = expr;
5600 *offset = NULL_TREE;
5601 }
5602 return true;
5603 }
5604 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5605 else if (SSA_VAR_P (expr)
5606 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5607 {
5608 *base = expr;
5609 *offset = NULL_TREE;
5610 return true;
5611 }
5612
5613 return false;
5614 }
5615
5616
5617 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5618 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5619 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5620 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5621 COND is the first argument to CODE; otherwise (as in the example
5622 given here), it is the second argument. TYPE is the type of the
5623 original expression. Return NULL_TREE if no simplification is
5624 possible. */
5625
5626 static tree
fold_binary_op_with_conditional_arg(enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)5627 fold_binary_op_with_conditional_arg (enum tree_code code,
5628 tree type, tree op0, tree op1,
5629 tree cond, tree arg, int cond_first_p)
5630 {
5631 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5632 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5633 tree test, true_value, false_value;
5634 tree lhs = NULL_TREE;
5635 tree rhs = NULL_TREE;
5636
5637 /* This transformation is only worthwhile if we don't have to wrap
5638 arg in a SAVE_EXPR, and the operation can be simplified on at least
5639 one of the branches once its pushed inside the COND_EXPR. */
5640 if (!TREE_CONSTANT (arg))
5641 return NULL_TREE;
5642
5643 if (TREE_CODE (cond) == COND_EXPR)
5644 {
5645 test = TREE_OPERAND (cond, 0);
5646 true_value = TREE_OPERAND (cond, 1);
5647 false_value = TREE_OPERAND (cond, 2);
5648 /* If this operand throws an expression, then it does not make
5649 sense to try to perform a logical or arithmetic operation
5650 involving it. */
5651 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5652 lhs = true_value;
5653 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5654 rhs = false_value;
5655 }
5656 else
5657 {
5658 tree testtype = TREE_TYPE (cond);
5659 test = cond;
5660 true_value = constant_boolean_node (true, testtype);
5661 false_value = constant_boolean_node (false, testtype);
5662 }
5663
5664 arg = fold_convert (arg_type, arg);
5665 if (lhs == 0)
5666 {
5667 true_value = fold_convert (cond_type, true_value);
5668 if (cond_first_p)
5669 lhs = fold_build2 (code, type, true_value, arg);
5670 else
5671 lhs = fold_build2 (code, type, arg, true_value);
5672 }
5673 if (rhs == 0)
5674 {
5675 false_value = fold_convert (cond_type, false_value);
5676 if (cond_first_p)
5677 rhs = fold_build2 (code, type, false_value, arg);
5678 else
5679 rhs = fold_build2 (code, type, arg, false_value);
5680 }
5681
5682 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5683 return fold_convert (type, test);
5684 }
5685
5686
5687 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5688
5689 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5690 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5691 ADDEND is the same as X.
5692
5693 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5694 and finite. The problematic cases are when X is zero, and its mode
5695 has signed zeros. In the case of rounding towards -infinity,
5696 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5697 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5698
5699 static bool
fold_real_zero_addition_p(tree type,tree addend,int negate)5700 fold_real_zero_addition_p (tree type, tree addend, int negate)
5701 {
5702 if (!real_zerop (addend))
5703 return false;
5704
5705 /* Don't allow the fold with -fsignaling-nans. */
5706 if (HONOR_SNANS (TYPE_MODE (type)))
5707 return false;
5708
5709 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5710 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5711 return true;
5712
5713 /* (TIGCC 20050210) This is invalid independently of the rounding mode and the
5714 type of the zero for 3-sign-zeros. */
5715 return false;
5716
5717 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5718 if (TREE_CODE (addend) == REAL_CST
5719 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5720 negate = !negate;
5721
5722 /* The mode has signed zeros, and we have to honor their sign.
5723 In this situation, there is only one case we can return true for.
5724 X - 0 is the same as X unless rounding towards -infinity is
5725 supported. */
5726 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5727 }
5728
5729 /* Subroutine of fold() that checks comparisons of built-in math
5730 functions against real constants.
5731
5732 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5733 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5734 is the type of the result and ARG0 and ARG1 are the operands of the
5735 comparison. ARG1 must be a TREE_REAL_CST.
5736
5737 The function returns the constant folded tree if a simplification
5738 can be made, and NULL_TREE otherwise. */
5739
5740 static tree
fold_mathfn_compare(enum built_in_function fcode,enum tree_code code,tree type,tree arg0,tree arg1)5741 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5742 tree type, tree arg0, tree arg1)
5743 {
5744 REAL_VALUE_TYPE c;
5745
5746 if (BUILTIN_SQRT_P (fcode))
5747 {
5748 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5749 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5750
5751 c = TREE_REAL_CST (arg1);
5752 if (REAL_VALUE_NEGATIVE (c))
5753 {
5754 /* sqrt(x) < y is always false, if y is negative. */
5755 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5756 return omit_one_operand (type, integer_zero_node, arg);
5757
5758 /* sqrt(x) > y is always true, if y is negative and we
5759 don't care about NaNs, i.e. negative values of x. */
5760 if (code == NE_EXPR || !HONOR_NANS (mode))
5761 return omit_one_operand (type, integer_one_node, arg);
5762
5763 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5764 return fold_build2 (GE_EXPR, type, arg,
5765 build_real (TREE_TYPE (arg), dconst0));
5766 }
5767 else if (code == GT_EXPR || code == GE_EXPR)
5768 {
5769 REAL_VALUE_TYPE c2;
5770
5771 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5772 real_convert (&c2, mode, &c2);
5773
5774 if (REAL_VALUE_ISINF (c2))
5775 {
5776 /* sqrt(x) > y is x == +Inf, when y is very large. */
5777 if (HONOR_INFINITIES (mode))
5778 return fold_build2 (EQ_EXPR, type, arg,
5779 build_real (TREE_TYPE (arg), c2));
5780
5781 /* sqrt(x) > y is always false, when y is very large
5782 and we don't care about infinities. */
5783 return omit_one_operand (type, integer_zero_node, arg);
5784 }
5785
5786 /* sqrt(x) > c is the same as x > c*c. */
5787 return fold_build2 (code, type, arg,
5788 build_real (TREE_TYPE (arg), c2));
5789 }
5790 else if (code == LT_EXPR || code == LE_EXPR)
5791 {
5792 REAL_VALUE_TYPE c2;
5793
5794 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5795 real_convert (&c2, mode, &c2);
5796
5797 if (REAL_VALUE_ISINF (c2))
5798 {
5799 /* sqrt(x) < y is always true, when y is a very large
5800 value and we don't care about NaNs or Infinities. */
5801 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5802 return omit_one_operand (type, integer_one_node, arg);
5803
5804 /* sqrt(x) < y is x != +Inf when y is very large and we
5805 don't care about NaNs. */
5806 if (! HONOR_NANS (mode))
5807 return fold_build2 (NE_EXPR, type, arg,
5808 build_real (TREE_TYPE (arg), c2));
5809
5810 /* sqrt(x) < y is x >= 0 when y is very large and we
5811 don't care about Infinities. */
5812 if (! HONOR_INFINITIES (mode))
5813 return fold_build2 (GE_EXPR, type, arg,
5814 build_real (TREE_TYPE (arg), dconst0));
5815
5816 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5817 if (lang_hooks.decls.global_bindings_p () != 0
5818 || CONTAINS_PLACEHOLDER_P (arg))
5819 return NULL_TREE;
5820
5821 arg = save_expr (arg);
5822 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5823 fold_build2 (GE_EXPR, type, arg,
5824 build_real (TREE_TYPE (arg),
5825 dconst0)),
5826 fold_build2 (NE_EXPR, type, arg,
5827 build_real (TREE_TYPE (arg),
5828 c2)));
5829 }
5830
5831 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5832 if (! HONOR_NANS (mode))
5833 return fold_build2 (code, type, arg,
5834 build_real (TREE_TYPE (arg), c2));
5835
5836 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5837 if (lang_hooks.decls.global_bindings_p () == 0
5838 && ! CONTAINS_PLACEHOLDER_P (arg))
5839 {
5840 arg = save_expr (arg);
5841 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5842 fold_build2 (GE_EXPR, type, arg,
5843 build_real (TREE_TYPE (arg),
5844 dconst0)),
5845 fold_build2 (code, type, arg,
5846 build_real (TREE_TYPE (arg),
5847 c2)));
5848 }
5849 }
5850 }
5851
5852 return NULL_TREE;
5853 }
5854
5855 /* Subroutine of fold() that optimizes comparisons against Infinities,
5856 either +Inf or -Inf.
5857
5858 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5859 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5860 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5861
5862 The function returns the constant folded tree if a simplification
5863 can be made, and NULL_TREE otherwise. */
5864
5865 static tree
fold_inf_compare(enum tree_code code,tree type,tree arg0,tree arg1)5866 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5867 {
5868 enum machine_mode mode;
5869 REAL_VALUE_TYPE max;
5870 tree temp;
5871 bool neg;
5872
5873 mode = TYPE_MODE (TREE_TYPE (arg0));
5874
5875 /* For negative infinity swap the sense of the comparison. */
5876 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5877 if (neg)
5878 code = swap_tree_comparison (code);
5879
5880 switch (code)
5881 {
5882 case GT_EXPR:
5883 /* x > +Inf is always false, if with ignore sNANs. */
5884 if (HONOR_SNANS (mode))
5885 return NULL_TREE;
5886 return omit_one_operand (type, integer_zero_node, arg0);
5887
5888 case LE_EXPR:
5889 /* x <= +Inf is always true, if we don't case about NaNs. */
5890 if (! HONOR_NANS (mode))
5891 return omit_one_operand (type, integer_one_node, arg0);
5892
5893 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5894 if (lang_hooks.decls.global_bindings_p () == 0
5895 && ! CONTAINS_PLACEHOLDER_P (arg0))
5896 {
5897 arg0 = save_expr (arg0);
5898 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5899 }
5900 break;
5901
5902 #if 0 /* (TIGCC 20050205) */
5903 case EQ_EXPR:
5904 case GE_EXPR:
5905 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5906 real_maxval (&max, neg, mode);
5907 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5908 arg0, build_real (TREE_TYPE (arg0), max));
5909
5910 case LT_EXPR:
5911 /* x < +Inf is always equal to x <= DBL_MAX. */
5912 real_maxval (&max, neg, mode);
5913 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5914 arg0, build_real (TREE_TYPE (arg0), max));
5915
5916 case NE_EXPR:
5917 /* x != +Inf is always equal to !(x > DBL_MAX). */
5918 real_maxval (&max, neg, mode);
5919 if (! HONOR_NANS (mode))
5920 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5921 arg0, build_real (TREE_TYPE (arg0), max));
5922
5923 /* The transformation below creates non-gimple code and thus is
5924 not appropriate if we are in gimple form. */
5925 if (in_gimple_form)
5926 return NULL_TREE;
5927
5928 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5929 arg0, build_real (TREE_TYPE (arg0), max));
5930 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5931 #endif /* 0 */
5932
5933 default:
5934 break;
5935 }
5936
5937 return NULL_TREE;
5938 }
5939
5940 /* Subroutine of fold() that optimizes comparisons of a division by
5941 a nonzero integer constant against an integer constant, i.e.
5942 X/C1 op C2.
5943
5944 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5945 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5946 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5947
5948 The function returns the constant folded tree if a simplification
5949 can be made, and NULL_TREE otherwise. */
5950
5951 static tree
fold_div_compare(enum tree_code code,tree type,tree arg0,tree arg1)5952 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5953 {
5954 tree prod, tmp, hi, lo;
5955 tree arg00 = TREE_OPERAND (arg0, 0);
5956 tree arg01 = TREE_OPERAND (arg0, 1);
5957 unsigned HOST_WIDE_INT lpart;
5958 HOST_WIDE_INT hpart;
5959 int overflow;
5960
5961 /* We have to do this the hard way to detect unsigned overflow.
5962 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5963 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5964 TREE_INT_CST_HIGH (arg01),
5965 TREE_INT_CST_LOW (arg1),
5966 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5967 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5968 prod = force_fit_type (prod, -1, overflow, false);
5969
5970 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5971 {
5972 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5973 lo = prod;
5974
5975 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5976 overflow = add_double (TREE_INT_CST_LOW (prod),
5977 TREE_INT_CST_HIGH (prod),
5978 TREE_INT_CST_LOW (tmp),
5979 TREE_INT_CST_HIGH (tmp),
5980 &lpart, &hpart);
5981 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5982 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5983 TREE_CONSTANT_OVERFLOW (prod));
5984 }
5985 else if (tree_int_cst_sgn (arg01) >= 0)
5986 {
5987 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5988 switch (tree_int_cst_sgn (arg1))
5989 {
5990 case -1:
5991 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5992 hi = prod;
5993 break;
5994
5995 case 0:
5996 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5997 hi = tmp;
5998 break;
5999
6000 case 1:
6001 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6002 lo = prod;
6003 break;
6004
6005 default:
6006 gcc_unreachable ();
6007 }
6008 }
6009 else
6010 {
6011 /* A negative divisor reverses the relational operators. */
6012 code = swap_tree_comparison (code);
6013
6014 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6015 switch (tree_int_cst_sgn (arg1))
6016 {
6017 case -1:
6018 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6019 lo = prod;
6020 break;
6021
6022 case 0:
6023 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6024 lo = tmp;
6025 break;
6026
6027 case 1:
6028 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6029 hi = prod;
6030 break;
6031
6032 default:
6033 gcc_unreachable ();
6034 }
6035 }
6036
6037 switch (code)
6038 {
6039 case EQ_EXPR:
6040 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6041 return omit_one_operand (type, integer_zero_node, arg00);
6042 if (TREE_OVERFLOW (hi))
6043 return fold_build2 (GE_EXPR, type, arg00, lo);
6044 if (TREE_OVERFLOW (lo))
6045 return fold_build2 (LE_EXPR, type, arg00, hi);
6046 return build_range_check (type, arg00, 1, lo, hi);
6047
6048 case NE_EXPR:
6049 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6050 return omit_one_operand (type, integer_one_node, arg00);
6051 if (TREE_OVERFLOW (hi))
6052 return fold_build2 (LT_EXPR, type, arg00, lo);
6053 if (TREE_OVERFLOW (lo))
6054 return fold_build2 (GT_EXPR, type, arg00, hi);
6055 return build_range_check (type, arg00, 0, lo, hi);
6056
6057 case LT_EXPR:
6058 if (TREE_OVERFLOW (lo))
6059 return omit_one_operand (type, integer_zero_node, arg00);
6060 return fold_build2 (LT_EXPR, type, arg00, lo);
6061
6062 case LE_EXPR:
6063 if (TREE_OVERFLOW (hi))
6064 return omit_one_operand (type, integer_one_node, arg00);
6065 return fold_build2 (LE_EXPR, type, arg00, hi);
6066
6067 case GT_EXPR:
6068 if (TREE_OVERFLOW (hi))
6069 return omit_one_operand (type, integer_zero_node, arg00);
6070 return fold_build2 (GT_EXPR, type, arg00, hi);
6071
6072 case GE_EXPR:
6073 if (TREE_OVERFLOW (lo))
6074 return omit_one_operand (type, integer_one_node, arg00);
6075 return fold_build2 (GE_EXPR, type, arg00, lo);
6076
6077 default:
6078 break;
6079 }
6080
6081 return NULL_TREE;
6082 }
6083
6084
6085 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6086 equality/inequality test, then return a simplified form of the test
6087 using a sign testing. Otherwise return NULL. TYPE is the desired
6088 result type. */
6089
6090 static tree
fold_single_bit_test_into_sign_test(enum tree_code code,tree arg0,tree arg1,tree result_type)6091 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6092 tree result_type)
6093 {
6094 /* If this is testing a single bit, we can optimize the test. */
6095 if ((code == NE_EXPR || code == EQ_EXPR)
6096 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6097 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6098 {
6099 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6100 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6101 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6102
6103 if (arg00 != NULL_TREE
6104 /* This is only a win if casting to a signed type is cheap,
6105 i.e. when arg00's type is not a partial mode. */
6106 && TYPE_PRECISION (TREE_TYPE (arg00))
6107 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6108 {
6109 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6110 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6111 result_type, fold_convert (stype, arg00),
6112 fold_convert (stype, integer_zero_node));
6113 }
6114 }
6115
6116 return NULL_TREE;
6117 }
6118
6119 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6120 equality/inequality test, then return a simplified form of
6121 the test using shifts and logical operations. Otherwise return
6122 NULL. TYPE is the desired result type. */
6123
6124 tree
fold_single_bit_test(enum tree_code code,tree arg0,tree arg1,tree result_type)6125 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6126 tree result_type)
6127 {
6128 /* If this is testing a single bit, we can optimize the test. */
6129 if ((code == NE_EXPR || code == EQ_EXPR)
6130 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6131 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6132 {
6133 tree inner = TREE_OPERAND (arg0, 0);
6134 tree type = TREE_TYPE (arg0);
6135 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6136 enum machine_mode operand_mode = TYPE_MODE (type);
6137 int ops_unsigned;
6138 tree signed_type, unsigned_type, intermediate_type;
6139 tree tem;
6140
6141 /* First, see if we can fold the single bit test into a sign-bit
6142 test. */
6143 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6144 result_type);
6145 if (tem)
6146 return tem;
6147
6148 /* Otherwise we have (A & C) != 0 where C is a single bit,
6149 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6150 Similarly for (A & C) == 0. */
6151
6152 /* If INNER is a right shift of a constant and it plus BITNUM does
6153 not overflow, adjust BITNUM and INNER. */
6154 if (TREE_CODE (inner) == RSHIFT_EXPR
6155 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6156 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6157 && bitnum < TYPE_PRECISION (type)
6158 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6159 bitnum - TYPE_PRECISION (type)))
6160 {
6161 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6162 inner = TREE_OPERAND (inner, 0);
6163 }
6164
6165 /* If we are going to be able to omit the AND below, we must do our
6166 operations as unsigned. If we must use the AND, we have a choice.
6167 Normally unsigned is faster, but for some machines signed is. */
6168 #ifdef LOAD_EXTEND_OP
6169 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6170 && !flag_syntax_only) ? 0 : 1;
6171 #else
6172 ops_unsigned = 1;
6173 #endif
6174
6175 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6176 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6177 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6178 inner = fold_convert (intermediate_type, inner);
6179
6180 if (bitnum != 0)
6181 inner = build2 (RSHIFT_EXPR, intermediate_type,
6182 inner, size_int (bitnum));
6183
6184 if (code == EQ_EXPR)
6185 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6186 inner, integer_one_node);
6187
6188 /* Put the AND last so it can combine with more things. */
6189 inner = build2 (BIT_AND_EXPR, intermediate_type,
6190 inner, integer_one_node);
6191
6192 /* Make sure to return the proper type. */
6193 inner = fold_convert (result_type, inner);
6194
6195 return inner;
6196 }
6197 return NULL_TREE;
6198 }
6199
6200 /* Check whether we are allowed to reorder operands arg0 and arg1,
6201 such that the evaluation of arg1 occurs before arg0. */
6202
6203 static bool
reorder_operands_p(tree arg0,tree arg1)6204 reorder_operands_p (tree arg0, tree arg1)
6205 {
6206 if (! flag_evaluation_order)
6207 return true;
6208 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6209 return true;
6210 return ! TREE_SIDE_EFFECTS (arg0)
6211 && ! TREE_SIDE_EFFECTS (arg1);
6212 }
6213
6214 /* Test whether it is preferable two swap two operands, ARG0 and
6215 ARG1, for example because ARG0 is an integer constant and ARG1
6216 isn't. If REORDER is true, only recommend swapping if we can
6217 evaluate the operands in reverse order. */
6218
6219 bool
tree_swap_operands_p(tree arg0,tree arg1,bool reorder)6220 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6221 {
6222 STRIP_SIGN_NOPS (arg0);
6223 STRIP_SIGN_NOPS (arg1);
6224
6225 if (TREE_CODE (arg1) == INTEGER_CST)
6226 return 0;
6227 if (TREE_CODE (arg0) == INTEGER_CST)
6228 return 1;
6229
6230 if (TREE_CODE (arg1) == REAL_CST)
6231 return 0;
6232 if (TREE_CODE (arg0) == REAL_CST)
6233 return 1;
6234
6235 if (TREE_CODE (arg1) == COMPLEX_CST)
6236 return 0;
6237 if (TREE_CODE (arg0) == COMPLEX_CST)
6238 return 1;
6239
6240 if (TREE_CONSTANT (arg1))
6241 return 0;
6242 if (TREE_CONSTANT (arg0))
6243 return 1;
6244
6245 if (optimize_size)
6246 return 0;
6247
6248 if (reorder && flag_evaluation_order
6249 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6250 return 0;
6251
6252 if (DECL_P (arg1))
6253 return 0;
6254 if (DECL_P (arg0))
6255 return 1;
6256
6257 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6258 for commutative and comparison operators. Ensuring a canonical
6259 form allows the optimizers to find additional redundancies without
6260 having to explicitly check for both orderings. */
6261 if (TREE_CODE (arg0) == SSA_NAME
6262 && TREE_CODE (arg1) == SSA_NAME
6263 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6264 return 1;
6265
6266 return 0;
6267 }
6268
6269 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6270 ARG0 is extended to a wider type. */
6271
6272 static tree
fold_widened_comparison(enum tree_code code,tree type,tree arg0,tree arg1)6273 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6274 {
6275 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6276 tree arg1_unw;
6277 tree shorter_type, outer_type;
6278 tree min, max;
6279 bool above, below;
6280
6281 if (arg0_unw == arg0)
6282 return NULL_TREE;
6283 shorter_type = TREE_TYPE (arg0_unw);
6284
6285 #ifdef HAVE_canonicalize_funcptr_for_compare
6286 /* Disable this optimization if we're casting a function pointer
6287 type on targets that require function pointer canonicalization. */
6288 if (HAVE_canonicalize_funcptr_for_compare
6289 && TREE_CODE (shorter_type) == POINTER_TYPE
6290 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6291 return NULL_TREE;
6292 #endif
6293
6294 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6295 return NULL_TREE;
6296
6297 arg1_unw = get_unwidened (arg1, shorter_type);
6298
6299 /* If possible, express the comparison in the shorter mode. */
6300 if ((code == EQ_EXPR || code == NE_EXPR
6301 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6302 && (TREE_TYPE (arg1_unw) == shorter_type
6303 || (TREE_CODE (arg1_unw) == INTEGER_CST
6304 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6305 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6306 && int_fits_type_p (arg1_unw, shorter_type))))
6307 return fold_build2 (code, type, arg0_unw,
6308 fold_convert (shorter_type, arg1_unw));
6309
6310 if (TREE_CODE (arg1_unw) != INTEGER_CST
6311 || TREE_CODE (shorter_type) != INTEGER_TYPE
6312 || !int_fits_type_p (arg1_unw, shorter_type))
6313 return NULL_TREE;
6314
6315 /* If we are comparing with the integer that does not fit into the range
6316 of the shorter type, the result is known. */
6317 outer_type = TREE_TYPE (arg1_unw);
6318 min = lower_bound_in_type (outer_type, shorter_type);
6319 max = upper_bound_in_type (outer_type, shorter_type);
6320
6321 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6322 max, arg1_unw));
6323 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6324 arg1_unw, min));
6325
6326 switch (code)
6327 {
6328 case EQ_EXPR:
6329 if (above || below)
6330 return omit_one_operand (type, integer_zero_node, arg0);
6331 break;
6332
6333 case NE_EXPR:
6334 if (above || below)
6335 return omit_one_operand (type, integer_one_node, arg0);
6336 break;
6337
6338 case LT_EXPR:
6339 case LE_EXPR:
6340 if (above)
6341 return omit_one_operand (type, integer_one_node, arg0);
6342 else if (below)
6343 return omit_one_operand (type, integer_zero_node, arg0);
6344
6345 case GT_EXPR:
6346 case GE_EXPR:
6347 if (above)
6348 return omit_one_operand (type, integer_zero_node, arg0);
6349 else if (below)
6350 return omit_one_operand (type, integer_one_node, arg0);
6351
6352 default:
6353 break;
6354 }
6355
6356 return NULL_TREE;
6357 }
6358
6359 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6360 ARG0 just the signedness is changed. */
6361
6362 static tree
fold_sign_changed_comparison(enum tree_code code,tree type,tree arg0,tree arg1)6363 fold_sign_changed_comparison (enum tree_code code, tree type,
6364 tree arg0, tree arg1)
6365 {
6366 tree arg0_inner, tmp;
6367 tree inner_type, outer_type;
6368
6369 if (TREE_CODE (arg0) != NOP_EXPR
6370 && TREE_CODE (arg0) != CONVERT_EXPR)
6371 return NULL_TREE;
6372
6373 outer_type = TREE_TYPE (arg0);
6374 arg0_inner = TREE_OPERAND (arg0, 0);
6375 inner_type = TREE_TYPE (arg0_inner);
6376
6377 #ifdef HAVE_canonicalize_funcptr_for_compare
6378 /* Disable this optimization if we're casting a function pointer
6379 type on targets that require function pointer canonicalization. */
6380 if (HAVE_canonicalize_funcptr_for_compare
6381 && TREE_CODE (inner_type) == POINTER_TYPE
6382 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6383 return NULL_TREE;
6384 #endif
6385
6386 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6387 return NULL_TREE;
6388
6389 if (TREE_CODE (arg1) != INTEGER_CST
6390 && !((TREE_CODE (arg1) == NOP_EXPR
6391 || TREE_CODE (arg1) == CONVERT_EXPR)
6392 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6393 return NULL_TREE;
6394
6395 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6396 && code != NE_EXPR
6397 && code != EQ_EXPR)
6398 return NULL_TREE;
6399
6400 if (TREE_CODE (arg1) == INTEGER_CST)
6401 {
6402 tmp = build_int_cst_wide (inner_type,
6403 TREE_INT_CST_LOW (arg1),
6404 TREE_INT_CST_HIGH (arg1));
6405 arg1 = force_fit_type (tmp, 0,
6406 TREE_OVERFLOW (arg1),
6407 TREE_CONSTANT_OVERFLOW (arg1));
6408 }
6409 else
6410 arg1 = fold_convert (inner_type, arg1);
6411
6412 return fold_build2 (code, type, arg0_inner, arg1);
6413 }
6414
6415 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6416 step of the array. Reconstructs s and delta in the case of s * delta
6417 being an integer constant (and thus already folded).
6418 ADDR is the address. MULT is the multiplicative expression.
6419 If the function succeeds, the new address expression is returned. Otherwise
6420 NULL_TREE is returned. */
6421
6422 static tree
try_move_mult_to_index(enum tree_code code,tree addr,tree op1)6423 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6424 {
6425 tree s, delta, step;
6426 tree ref = TREE_OPERAND (addr, 0), pref;
6427 tree ret, pos;
6428 tree itype;
6429
6430 /* Canonicalize op1 into a possibly non-constant delta
6431 and an INTEGER_CST s. */
6432 if (TREE_CODE (op1) == MULT_EXPR)
6433 {
6434 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6435
6436 STRIP_NOPS (arg0);
6437 STRIP_NOPS (arg1);
6438
6439 if (TREE_CODE (arg0) == INTEGER_CST)
6440 {
6441 s = arg0;
6442 delta = arg1;
6443 }
6444 else if (TREE_CODE (arg1) == INTEGER_CST)
6445 {
6446 s = arg1;
6447 delta = arg0;
6448 }
6449 else
6450 return NULL_TREE;
6451 }
6452 else if (TREE_CODE (op1) == INTEGER_CST)
6453 {
6454 delta = op1;
6455 s = NULL_TREE;
6456 }
6457 else
6458 {
6459 /* Simulate we are delta * 1. */
6460 delta = op1;
6461 s = integer_one_node;
6462 }
6463
6464 for (;; ref = TREE_OPERAND (ref, 0))
6465 {
6466 if (TREE_CODE (ref) == ARRAY_REF)
6467 {
6468 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6469 if (! itype)
6470 continue;
6471
6472 step = array_ref_element_size (ref);
6473 if (TREE_CODE (step) != INTEGER_CST)
6474 continue;
6475
6476 if (s)
6477 {
6478 if (! tree_int_cst_equal (step, s))
6479 continue;
6480 }
6481 else
6482 {
6483 /* Try if delta is a multiple of step. */
6484 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6485 if (! tmp)
6486 continue;
6487 delta = tmp;
6488 }
6489
6490 break;
6491 }
6492
6493 if (!handled_component_p (ref))
6494 return NULL_TREE;
6495 }
6496
6497 /* We found the suitable array reference. So copy everything up to it,
6498 and replace the index. */
6499
6500 pref = TREE_OPERAND (addr, 0);
6501 ret = copy_node (pref);
6502 pos = ret;
6503
6504 while (pref != ref)
6505 {
6506 pref = TREE_OPERAND (pref, 0);
6507 TREE_OPERAND (pos, 0) = copy_node (pref);
6508 pos = TREE_OPERAND (pos, 0);
6509 }
6510
6511 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6512 fold_convert (itype,
6513 TREE_OPERAND (pos, 1)),
6514 fold_convert (itype, delta));
6515
6516 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6517 }
6518
6519
6520 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6521 means A >= Y && A != MAX, but in this case we know that
6522 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6523
6524 static tree
fold_to_nonsharp_ineq_using_bound(tree ineq,tree bound)6525 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6526 {
6527 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6528
6529 if (TREE_CODE (bound) == LT_EXPR)
6530 a = TREE_OPERAND (bound, 0);
6531 else if (TREE_CODE (bound) == GT_EXPR)
6532 a = TREE_OPERAND (bound, 1);
6533 else
6534 return NULL_TREE;
6535
6536 typea = TREE_TYPE (a);
6537 if (!INTEGRAL_TYPE_P (typea)
6538 && !POINTER_TYPE_P (typea))
6539 return NULL_TREE;
6540
6541 if (TREE_CODE (ineq) == LT_EXPR)
6542 {
6543 a1 = TREE_OPERAND (ineq, 1);
6544 y = TREE_OPERAND (ineq, 0);
6545 }
6546 else if (TREE_CODE (ineq) == GT_EXPR)
6547 {
6548 a1 = TREE_OPERAND (ineq, 0);
6549 y = TREE_OPERAND (ineq, 1);
6550 }
6551 else
6552 return NULL_TREE;
6553
6554 if (TREE_TYPE (a1) != typea)
6555 return NULL_TREE;
6556
6557 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6558 if (!integer_onep (diff))
6559 return NULL_TREE;
6560
6561 return fold_build2 (GE_EXPR, type, a, y);
6562 }
6563
6564 /* Fold a unary expression of code CODE and type TYPE with operand
6565 OP0. Return the folded expression if folding is successful.
6566 Otherwise, return NULL_TREE. */
6567
6568 tree
fold_unary(enum tree_code code,tree type,tree op0)6569 fold_unary (enum tree_code code, tree type, tree op0)
6570 {
6571 tree tem;
6572 tree arg0;
6573 enum tree_code_class kind = TREE_CODE_CLASS (code);
6574
6575 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6576 && TREE_CODE_LENGTH (code) == 1);
6577
6578 arg0 = op0;
6579 if (arg0)
6580 {
6581 if (code == NOP_EXPR || code == CONVERT_EXPR
6582 || code == FLOAT_EXPR || code == ABS_EXPR)
6583 {
6584 /* Don't use STRIP_NOPS, because signedness of argument type
6585 matters. */
6586 STRIP_SIGN_NOPS (arg0);
6587 }
6588 else
6589 {
6590 /* Strip any conversions that don't change the mode. This
6591 is safe for every expression, except for a comparison
6592 expression because its signedness is derived from its
6593 operands.
6594
6595 Note that this is done as an internal manipulation within
6596 the constant folder, in order to find the simplest
6597 representation of the arguments so that their form can be
6598 studied. In any cases, the appropriate type conversions
6599 should be put back in the tree that will get out of the
6600 constant folder. */
6601 STRIP_NOPS (arg0);
6602 }
6603 }
6604
6605 if (TREE_CODE_CLASS (code) == tcc_unary)
6606 {
6607 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6608 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6609 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6610 else if (TREE_CODE (arg0) == COND_EXPR)
6611 {
6612 tree arg01 = TREE_OPERAND (arg0, 1);
6613 tree arg02 = TREE_OPERAND (arg0, 2);
6614 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6615 arg01 = fold_build1 (code, type, arg01);
6616 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6617 arg02 = fold_build1 (code, type, arg02);
6618 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6619 arg01, arg02);
6620
6621 /* If this was a conversion, and all we did was to move into
6622 inside the COND_EXPR, bring it back out. But leave it if
6623 it is a conversion from integer to integer and the
6624 result precision is no wider than a word since such a
6625 conversion is cheap and may be optimized away by combine,
6626 while it couldn't if it were outside the COND_EXPR. Then return
6627 so we don't get into an infinite recursion loop taking the
6628 conversion out and then back in. */
6629
6630 if ((code == NOP_EXPR || code == CONVERT_EXPR
6631 || code == NON_LVALUE_EXPR)
6632 && TREE_CODE (tem) == COND_EXPR
6633 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6634 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6635 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6636 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6637 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6638 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6639 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6640 && (INTEGRAL_TYPE_P
6641 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6642 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6643 || flag_syntax_only))
6644 tem = build1 (code, type,
6645 build3 (COND_EXPR,
6646 TREE_TYPE (TREE_OPERAND
6647 (TREE_OPERAND (tem, 1), 0)),
6648 TREE_OPERAND (tem, 0),
6649 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6650 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6651 return tem;
6652 }
6653 else if (COMPARISON_CLASS_P (arg0))
6654 {
6655 if (TREE_CODE (type) == BOOLEAN_TYPE)
6656 {
6657 arg0 = copy_node (arg0);
6658 TREE_TYPE (arg0) = type;
6659 return arg0;
6660 }
6661 else if (TREE_CODE (type) != INTEGER_TYPE)
6662 return fold_build3 (COND_EXPR, type, arg0,
6663 fold_build1 (code, type,
6664 integer_one_node),
6665 fold_build1 (code, type,
6666 integer_zero_node));
6667 }
6668 }
6669
6670 switch (code)
6671 {
6672 case NOP_EXPR:
6673 case FLOAT_EXPR:
6674 case CONVERT_EXPR:
6675 case FIX_TRUNC_EXPR:
6676 case FIX_CEIL_EXPR:
6677 case FIX_FLOOR_EXPR:
6678 case FIX_ROUND_EXPR:
6679 if (TREE_TYPE (op0) == type)
6680 return op0;
6681
6682 /* Handle cases of two conversions in a row. */
6683 if (TREE_CODE (op0) == NOP_EXPR
6684 || TREE_CODE (op0) == CONVERT_EXPR)
6685 {
6686 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6687 tree inter_type = TREE_TYPE (op0);
6688 int inside_int = INTEGRAL_TYPE_P (inside_type);
6689 int inside_ptr = POINTER_TYPE_P (inside_type);
6690 int inside_float = FLOAT_TYPE_P (inside_type);
6691 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6692 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6693 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6694 int inter_int = INTEGRAL_TYPE_P (inter_type);
6695 int inter_ptr = POINTER_TYPE_P (inter_type);
6696 int inter_float = FLOAT_TYPE_P (inter_type);
6697 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6698 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6699 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6700 int final_int = INTEGRAL_TYPE_P (type);
6701 int final_ptr = POINTER_TYPE_P (type);
6702 int final_float = FLOAT_TYPE_P (type);
6703 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6704 unsigned int final_prec = TYPE_PRECISION (type);
6705 int final_unsignedp = TYPE_UNSIGNED (type);
6706
6707 /* In addition to the cases of two conversions in a row
6708 handled below, if we are converting something to its own
6709 type via an object of identical or wider precision, neither
6710 conversion is needed. */
6711 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6712 && ((inter_int && final_int) || (inter_float && final_float))
6713 && inter_prec >= final_prec)
6714 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6715
6716 /* Likewise, if the intermediate and final types are either both
6717 float or both integer, we don't need the middle conversion if
6718 it is wider than the final type and doesn't change the signedness
6719 (for integers). Avoid this if the final type is a pointer
6720 since then we sometimes need the inner conversion. Likewise if
6721 the outer has a precision not equal to the size of its mode. */
6722 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6723 || (inter_float && inside_float)
6724 || (inter_vec && inside_vec))
6725 && inter_prec >= inside_prec
6726 && (inter_float || inter_vec
6727 || inter_unsignedp == inside_unsignedp)
6728 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6729 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6730 && ! final_ptr
6731 && (! final_vec || inter_prec == inside_prec))
6732 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6733
6734 /* If we have a sign-extension of a zero-extended value, we can
6735 replace that by a single zero-extension. */
6736 if (inside_int && inter_int && final_int
6737 && inside_prec < inter_prec && inter_prec < final_prec
6738 && inside_unsignedp && !inter_unsignedp)
6739 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6740
6741 /* Two conversions in a row are not needed unless:
6742 - some conversion is floating-point (overstrict for now), or
6743 - some conversion is a vector (overstrict for now), or
6744 - the intermediate type is narrower than both initial and
6745 final, or
6746 - the intermediate type and innermost type differ in signedness,
6747 and the outermost type is wider than the intermediate, or
6748 - the initial type is a pointer type and the precisions of the
6749 intermediate and final types differ, or
6750 - the final type is a pointer type and the precisions of the
6751 initial and intermediate types differ. */
6752 if (! inside_float && ! inter_float && ! final_float
6753 && ! inside_vec && ! inter_vec && ! final_vec
6754 && (inter_prec > inside_prec || inter_prec > final_prec)
6755 && ! (inside_int && inter_int
6756 && inter_unsignedp != inside_unsignedp
6757 && inter_prec < final_prec)
6758 && ((inter_unsignedp && inter_prec > inside_prec)
6759 == (final_unsignedp && final_prec > inter_prec))
6760 && ! (inside_ptr && inter_prec != final_prec)
6761 && ! (final_ptr && inside_prec != inter_prec)
6762 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6763 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6764 && ! final_ptr)
6765 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6766 }
6767
6768 /* Handle (T *)&A.B.C for A being of type T and B and C
6769 living at offset zero. This occurs frequently in
6770 C++ upcasting and then accessing the base. */
6771 if (TREE_CODE (op0) == ADDR_EXPR
6772 && POINTER_TYPE_P (type)
6773 && handled_component_p (TREE_OPERAND (op0, 0)))
6774 {
6775 HOST_WIDE_INT bitsize, bitpos;
6776 tree offset;
6777 enum machine_mode mode;
6778 int unsignedp, volatilep;
6779 tree base = TREE_OPERAND (op0, 0);
6780 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6781 &mode, &unsignedp, &volatilep, false);
6782 /* If the reference was to a (constant) zero offset, we can use
6783 the address of the base if it has the same base type
6784 as the result type. */
6785 if (! offset && bitpos == 0
6786 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6787 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6788 return fold_convert (type, build_fold_addr_expr (base));
6789 }
6790
6791 if (TREE_CODE (op0) == MODIFY_EXPR
6792 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6793 /* Detect assigning a bitfield. */
6794 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6795 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6796 {
6797 /* Don't leave an assignment inside a conversion
6798 unless assigning a bitfield. */
6799 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6800 /* First do the assignment, then return converted constant. */
6801 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6802 TREE_NO_WARNING (tem) = 1;
6803 TREE_USED (tem) = 1;
6804 return tem;
6805 }
6806
6807 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6808 constants (if x has signed type, the sign bit cannot be set
6809 in c). This folds extension into the BIT_AND_EXPR. */
6810 if (INTEGRAL_TYPE_P (type)
6811 && TREE_CODE (type) != BOOLEAN_TYPE
6812 && TREE_CODE (op0) == BIT_AND_EXPR
6813 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6814 {
6815 tree and = op0;
6816 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6817 int change = 0;
6818
6819 if (TYPE_UNSIGNED (TREE_TYPE (and))
6820 || (TYPE_PRECISION (type)
6821 <= TYPE_PRECISION (TREE_TYPE (and))))
6822 change = 1;
6823 else if (TYPE_PRECISION (TREE_TYPE (and1))
6824 <= HOST_BITS_PER_WIDE_INT
6825 && host_integerp (and1, 1))
6826 {
6827 unsigned HOST_WIDE_INT cst;
6828
6829 cst = tree_low_cst (and1, 1);
6830 cst &= (HOST_WIDE_INT) -1
6831 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6832 change = (cst == 0);
6833 #ifdef LOAD_EXTEND_OP
6834 if (change
6835 && !flag_syntax_only
6836 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6837 == ZERO_EXTEND))
6838 {
6839 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6840 and0 = fold_convert (uns, and0);
6841 and1 = fold_convert (uns, and1);
6842 }
6843 #endif
6844 }
6845 if (change)
6846 {
6847 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6848 TREE_INT_CST_HIGH (and1));
6849 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6850 TREE_CONSTANT_OVERFLOW (and1));
6851 return fold_build2 (BIT_AND_EXPR, type,
6852 fold_convert (type, and0), tem);
6853 }
6854 }
6855
6856 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6857 T2 being pointers to types of the same size. */
6858 if (POINTER_TYPE_P (type)
6859 && BINARY_CLASS_P (arg0)
6860 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6861 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6862 {
6863 tree arg00 = TREE_OPERAND (arg0, 0);
6864 tree t0 = type;
6865 tree t1 = TREE_TYPE (arg00);
6866 tree tt0 = TREE_TYPE (t0);
6867 tree tt1 = TREE_TYPE (t1);
6868 tree s0 = TYPE_SIZE (tt0);
6869 tree s1 = TYPE_SIZE (tt1);
6870
6871 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6872 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6873 TREE_OPERAND (arg0, 1));
6874 }
6875
6876 tem = fold_convert_const (code, type, arg0);
6877 return tem ? tem : NULL_TREE;
6878
6879 case VIEW_CONVERT_EXPR:
6880 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6881 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6882 return NULL_TREE;
6883
6884 case NEGATE_EXPR:
6885 if (negate_expr_p (arg0))
6886 return fold_convert (type, negate_expr (arg0));
6887 /* Convert - (~A) to A + 1. */
6888 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6889 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6890 build_int_cst (type, 1));
6891 return NULL_TREE;
6892
6893 case ABS_EXPR:
6894 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6895 return fold_abs_const (arg0, type);
6896 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6897 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6898 /* Convert fabs((double)float) into (double)fabsf(float). */
6899 else if (TREE_CODE (arg0) == NOP_EXPR
6900 && TREE_CODE (type) == REAL_TYPE)
6901 {
6902 tree targ0 = strip_float_extensions (arg0);
6903 if (targ0 != arg0)
6904 return fold_convert (type, fold_build1 (ABS_EXPR,
6905 TREE_TYPE (targ0),
6906 targ0));
6907 }
6908 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
6909 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6910 return arg0;
6911
6912 /* Strip sign ops from argument. */
6913 if (TREE_CODE (type) == REAL_TYPE)
6914 {
6915 tem = fold_strip_sign_ops (arg0);
6916 if (tem)
6917 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6918 }
6919 return NULL_TREE;
6920
6921 case CONJ_EXPR:
6922 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6923 return fold_convert (type, arg0);
6924 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6925 return build2 (COMPLEX_EXPR, type,
6926 TREE_OPERAND (arg0, 0),
6927 negate_expr (TREE_OPERAND (arg0, 1)));
6928 else if (TREE_CODE (arg0) == COMPLEX_CST)
6929 return build_complex (type, TREE_REALPART (arg0),
6930 negate_expr (TREE_IMAGPART (arg0)));
6931 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6932 return fold_build2 (TREE_CODE (arg0), type,
6933 fold_build1 (CONJ_EXPR, type,
6934 TREE_OPERAND (arg0, 0)),
6935 fold_build1 (CONJ_EXPR, type,
6936 TREE_OPERAND (arg0, 1)));
6937 else if (TREE_CODE (arg0) == CONJ_EXPR)
6938 return TREE_OPERAND (arg0, 0);
6939 return NULL_TREE;
6940
6941 case BIT_NOT_EXPR:
6942 if (TREE_CODE (arg0) == INTEGER_CST)
6943 return fold_not_const (arg0, type);
6944 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6945 return TREE_OPERAND (arg0, 0);
6946 /* Convert ~ (-A) to A - 1. */
6947 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6948 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6949 build_int_cst (type, 1));
6950 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6951 else if (INTEGRAL_TYPE_P (type)
6952 && ((TREE_CODE (arg0) == MINUS_EXPR
6953 && integer_onep (TREE_OPERAND (arg0, 1)))
6954 || (TREE_CODE (arg0) == PLUS_EXPR
6955 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6956 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6957 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6958 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6959 && (tem = fold_unary (BIT_NOT_EXPR, type,
6960 fold_convert (type,
6961 TREE_OPERAND (arg0, 0)))))
6962 return fold_build2 (BIT_XOR_EXPR, type, tem,
6963 fold_convert (type, TREE_OPERAND (arg0, 1)));
6964 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6965 && (tem = fold_unary (BIT_NOT_EXPR, type,
6966 fold_convert (type,
6967 TREE_OPERAND (arg0, 1)))))
6968 return fold_build2 (BIT_XOR_EXPR, type,
6969 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6970
6971 return NULL_TREE;
6972
6973 case TRUTH_NOT_EXPR:
6974 /* The argument to invert_truthvalue must have Boolean type. */
6975 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6976 arg0 = fold_convert (boolean_type_node, arg0);
6977
6978 /* Note that the operand of this must be an int
6979 and its values must be 0 or 1.
6980 ("true" is a fixed value perhaps depending on the language,
6981 but we don't handle values other than 1 correctly yet.) */
6982 tem = invert_truthvalue (arg0);
6983 /* Avoid infinite recursion. */
6984 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6985 return NULL_TREE;
6986 return fold_convert (type, tem);
6987
6988 case REALPART_EXPR:
6989 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6990 return NULL_TREE;
6991 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6992 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6993 TREE_OPERAND (arg0, 1));
6994 else if (TREE_CODE (arg0) == COMPLEX_CST)
6995 return TREE_REALPART (arg0);
6996 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6997 return fold_build2 (TREE_CODE (arg0), type,
6998 fold_build1 (REALPART_EXPR, type,
6999 TREE_OPERAND (arg0, 0)),
7000 fold_build1 (REALPART_EXPR, type,
7001 TREE_OPERAND (arg0, 1)));
7002 return NULL_TREE;
7003
7004 case IMAGPART_EXPR:
7005 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7006 return fold_convert (type, integer_zero_node);
7007 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7008 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7009 TREE_OPERAND (arg0, 0));
7010 else if (TREE_CODE (arg0) == COMPLEX_CST)
7011 return TREE_IMAGPART (arg0);
7012 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7013 return fold_build2 (TREE_CODE (arg0), type,
7014 fold_build1 (IMAGPART_EXPR, type,
7015 TREE_OPERAND (arg0, 0)),
7016 fold_build1 (IMAGPART_EXPR, type,
7017 TREE_OPERAND (arg0, 1)));
7018 return NULL_TREE;
7019
7020 default:
7021 return NULL_TREE;
7022 } /* switch (code) */
7023 }
7024
7025 /* Return 1 if EXPR is the real constant UNSIGNED_ZERO. */
7026
7027 static int
real_uzerop(tree expr)7028 real_uzerop (tree expr)
7029 {
7030 STRIP_NOPS (expr);
7031
7032 return ((TREE_CODE (expr) == REAL_CST
7033 && ! TREE_CONSTANT_OVERFLOW (expr)
7034 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (expr), UNSIGNED_ZERO))
7035 || (TREE_CODE (expr) == COMPLEX_CST
7036 && real_uzerop (TREE_REALPART (expr))
7037 && real_uzerop (TREE_IMAGPART (expr))));
7038 }
7039
7040 /* Fold a binary expression of code CODE and type TYPE with operands
7041 OP0 and OP1. Return the folded expression if folding is
7042 successful. Otherwise, return NULL_TREE. */
7043
7044 tree
fold_binary(enum tree_code code,tree type,tree op0,tree op1)7045 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7046 {
7047 tree t1 = NULL_TREE;
7048 tree tem;
7049 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7050 enum tree_code_class kind = TREE_CODE_CLASS (code);
7051
7052 /* WINS will be nonzero when the switch is done
7053 if all operands are constant. */
7054 int wins = 1;
7055
7056 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7057 && TREE_CODE_LENGTH (code) == 2);
7058
7059 arg0 = op0;
7060 arg1 = op1;
7061
7062 if (arg0)
7063 {
7064 tree subop;
7065
7066 /* Strip any conversions that don't change the mode. This is
7067 safe for every expression, except for a comparison expression
7068 because its signedness is derived from its operands. So, in
7069 the latter case, only strip conversions that don't change the
7070 signedness.
7071
7072 Note that this is done as an internal manipulation within the
7073 constant folder, in order to find the simplest representation
7074 of the arguments so that their form can be studied. In any
7075 cases, the appropriate type conversions should be put back in
7076 the tree that will get out of the constant folder. */
7077 if (kind == tcc_comparison)
7078 STRIP_SIGN_NOPS (arg0);
7079 else
7080 STRIP_NOPS (arg0);
7081
7082 if (TREE_CODE (arg0) == COMPLEX_CST)
7083 subop = TREE_REALPART (arg0);
7084 else
7085 subop = arg0;
7086
7087 if (TREE_CODE (subop) != INTEGER_CST
7088 && TREE_CODE (subop) != REAL_CST)
7089 /* Note that TREE_CONSTANT isn't enough:
7090 static var addresses are constant but we can't
7091 do arithmetic on them. */
7092 wins = 0;
7093 }
7094
7095 if (arg1)
7096 {
7097 tree subop;
7098
7099 /* Strip any conversions that don't change the mode. This is
7100 safe for every expression, except for a comparison expression
7101 because its signedness is derived from its operands. So, in
7102 the latter case, only strip conversions that don't change the
7103 signedness.
7104
7105 Note that this is done as an internal manipulation within the
7106 constant folder, in order to find the simplest representation
7107 of the arguments so that their form can be studied. In any
7108 cases, the appropriate type conversions should be put back in
7109 the tree that will get out of the constant folder. */
7110 if (kind == tcc_comparison)
7111 STRIP_SIGN_NOPS (arg1);
7112 else
7113 STRIP_NOPS (arg1);
7114
7115 if (TREE_CODE (arg1) == COMPLEX_CST)
7116 subop = TREE_REALPART (arg1);
7117 else
7118 subop = arg1;
7119
7120 if (TREE_CODE (subop) != INTEGER_CST
7121 && TREE_CODE (subop) != REAL_CST)
7122 /* Note that TREE_CONSTANT isn't enough:
7123 static var addresses are constant but we can't
7124 do arithmetic on them. */
7125 wins = 0;
7126 }
7127
7128 /* If this is a commutative operation, and ARG0 is a constant, move it
7129 to ARG1 to reduce the number of tests below. */
7130 if (commutative_tree_code (code)
7131 && tree_swap_operands_p (arg0, arg1, true))
7132 return fold_build2 (code, type, op1, op0);
7133
7134 /* Now WINS is set as described above,
7135 ARG0 is the first operand of EXPR,
7136 and ARG1 is the second operand (if it has more than one operand).
7137
7138 First check for cases where an arithmetic operation is applied to a
7139 compound, conditional, or comparison operation. Push the arithmetic
7140 operation inside the compound or conditional to see if any folding
7141 can then be done. Convert comparison to conditional for this purpose.
7142 The also optimizes non-constant cases that used to be done in
7143 expand_expr.
7144
7145 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7146 one of the operands is a comparison and the other is a comparison, a
7147 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7148 code below would make the expression more complex. Change it to a
7149 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7150 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7151
7152 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7153 || code == EQ_EXPR || code == NE_EXPR)
7154 && ((truth_value_p (TREE_CODE (arg0))
7155 && (truth_value_p (TREE_CODE (arg1))
7156 || (TREE_CODE (arg1) == BIT_AND_EXPR
7157 && integer_onep (TREE_OPERAND (arg1, 1)))))
7158 || (truth_value_p (TREE_CODE (arg1))
7159 && (truth_value_p (TREE_CODE (arg0))
7160 || (TREE_CODE (arg0) == BIT_AND_EXPR
7161 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7162 {
7163 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7164 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7165 : TRUTH_XOR_EXPR,
7166 boolean_type_node,
7167 fold_convert (boolean_type_node, arg0),
7168 fold_convert (boolean_type_node, arg1));
7169
7170 if (code == EQ_EXPR)
7171 tem = invert_truthvalue (tem);
7172
7173 return fold_convert (type, tem);
7174 }
7175
7176 if (TREE_CODE_CLASS (code) == tcc_binary
7177 || TREE_CODE_CLASS (code) == tcc_comparison)
7178 {
7179 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7180 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7181 fold_build2 (code, type,
7182 TREE_OPERAND (arg0, 1), op1));
7183 if (TREE_CODE (arg1) == COMPOUND_EXPR
7184 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7185 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7186 fold_build2 (code, type,
7187 op0, TREE_OPERAND (arg1, 1)));
7188
7189 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7190 {
7191 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7192 arg0, arg1,
7193 /*cond_first_p=*/1);
7194 if (tem != NULL_TREE)
7195 return tem;
7196 }
7197
7198 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7199 {
7200 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7201 arg1, arg0,
7202 /*cond_first_p=*/0);
7203 if (tem != NULL_TREE)
7204 return tem;
7205 }
7206 }
7207
7208 switch (code)
7209 {
7210 case PLUS_EXPR:
7211 /* A + (-B) -> A - B */
7212 if (TREE_CODE (arg1) == NEGATE_EXPR)
7213 return fold_build2 (MINUS_EXPR, type,
7214 fold_convert (type, arg0),
7215 fold_convert (type, TREE_OPERAND (arg1, 0)));
7216 /* (-A) + B -> B - A */
7217 if (TREE_CODE (arg0) == NEGATE_EXPR
7218 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7219 return fold_build2 (MINUS_EXPR, type,
7220 fold_convert (type, arg1),
7221 fold_convert (type, TREE_OPERAND (arg0, 0)));
7222 /* Convert ~A + 1 to -A. */
7223 if (INTEGRAL_TYPE_P (type)
7224 && TREE_CODE (arg0) == BIT_NOT_EXPR
7225 && integer_onep (arg1))
7226 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7227
7228 if (! FLOAT_TYPE_P (type))
7229 {
7230 if (integer_zerop (arg1))
7231 return non_lvalue (fold_convert (type, arg0));
7232
7233 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7234 with a constant, and the two constants have no bits in common,
7235 we should treat this as a BIT_IOR_EXPR since this may produce more
7236 simplifications. */
7237 if (TREE_CODE (arg0) == BIT_AND_EXPR
7238 && TREE_CODE (arg1) == BIT_AND_EXPR
7239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7240 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7241 && integer_zerop (const_binop (BIT_AND_EXPR,
7242 TREE_OPERAND (arg0, 1),
7243 TREE_OPERAND (arg1, 1), 0)))
7244 {
7245 code = BIT_IOR_EXPR;
7246 goto bit_ior;
7247 }
7248
7249 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7250 (plus (plus (mult) (mult)) (foo)) so that we can
7251 take advantage of the factoring cases below. */
7252 if (((TREE_CODE (arg0) == PLUS_EXPR
7253 || TREE_CODE (arg0) == MINUS_EXPR)
7254 && TREE_CODE (arg1) == MULT_EXPR)
7255 || ((TREE_CODE (arg1) == PLUS_EXPR
7256 || TREE_CODE (arg1) == MINUS_EXPR)
7257 && TREE_CODE (arg0) == MULT_EXPR))
7258 {
7259 tree parg0, parg1, parg, marg;
7260 enum tree_code pcode;
7261
7262 if (TREE_CODE (arg1) == MULT_EXPR)
7263 parg = arg0, marg = arg1;
7264 else
7265 parg = arg1, marg = arg0;
7266 pcode = TREE_CODE (parg);
7267 parg0 = TREE_OPERAND (parg, 0);
7268 parg1 = TREE_OPERAND (parg, 1);
7269 STRIP_NOPS (parg0);
7270 STRIP_NOPS (parg1);
7271
7272 if (TREE_CODE (parg0) == MULT_EXPR
7273 && TREE_CODE (parg1) != MULT_EXPR)
7274 return fold_build2 (pcode, type,
7275 fold_build2 (PLUS_EXPR, type,
7276 fold_convert (type, parg0),
7277 fold_convert (type, marg)),
7278 fold_convert (type, parg1));
7279 if (TREE_CODE (parg0) != MULT_EXPR
7280 && TREE_CODE (parg1) == MULT_EXPR)
7281 return fold_build2 (PLUS_EXPR, type,
7282 fold_convert (type, parg0),
7283 fold_build2 (pcode, type,
7284 fold_convert (type, marg),
7285 fold_convert (type,
7286 parg1)));
7287 }
7288
7289 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7290 {
7291 tree arg00, arg01, arg10, arg11;
7292 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7293
7294 /* (A * C) + (B * C) -> (A+B) * C.
7295 We are most concerned about the case where C is a constant,
7296 but other combinations show up during loop reduction. Since
7297 it is not difficult, try all four possibilities. */
7298
7299 arg00 = TREE_OPERAND (arg0, 0);
7300 arg01 = TREE_OPERAND (arg0, 1);
7301 arg10 = TREE_OPERAND (arg1, 0);
7302 arg11 = TREE_OPERAND (arg1, 1);
7303 same = NULL_TREE;
7304
7305 if (operand_equal_p (arg01, arg11, 0))
7306 same = arg01, alt0 = arg00, alt1 = arg10;
7307 else if (operand_equal_p (arg00, arg10, 0))
7308 same = arg00, alt0 = arg01, alt1 = arg11;
7309 else if (operand_equal_p (arg00, arg11, 0))
7310 same = arg00, alt0 = arg01, alt1 = arg10;
7311 else if (operand_equal_p (arg01, arg10, 0))
7312 same = arg01, alt0 = arg00, alt1 = arg11;
7313
7314 /* No identical multiplicands; see if we can find a common
7315 power-of-two factor in non-power-of-two multiplies. This
7316 can help in multi-dimensional array access. */
7317 else if (TREE_CODE (arg01) == INTEGER_CST
7318 && TREE_CODE (arg11) == INTEGER_CST
7319 && TREE_INT_CST_HIGH (arg01) == 0
7320 && TREE_INT_CST_HIGH (arg11) == 0)
7321 {
7322 HOST_WIDE_INT int01, int11, tmp;
7323 int01 = TREE_INT_CST_LOW (arg01);
7324 int11 = TREE_INT_CST_LOW (arg11);
7325
7326 /* Move min of absolute values to int11. */
7327 if ((int01 >= 0 ? int01 : -int01)
7328 < (int11 >= 0 ? int11 : -int11))
7329 {
7330 tmp = int01, int01 = int11, int11 = tmp;
7331 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7332 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7333 }
7334
7335 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7336 {
7337 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7338 build_int_cst (NULL_TREE,
7339 int01 / int11));
7340 alt1 = arg10;
7341 same = arg11;
7342 }
7343 }
7344
7345 if (same)
7346 return fold_build2 (MULT_EXPR, type,
7347 fold_build2 (PLUS_EXPR, type,
7348 fold_convert (type, alt0),
7349 fold_convert (type, alt1)),
7350 fold_convert (type, same));
7351 }
7352
7353 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7354 of the array. Loop optimizer sometimes produce this type of
7355 expressions. */
7356 if (TREE_CODE (arg0) == ADDR_EXPR)
7357 {
7358 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7359 if (tem)
7360 return fold_convert (type, tem);
7361 }
7362 else if (TREE_CODE (arg1) == ADDR_EXPR)
7363 {
7364 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7365 if (tem)
7366 return fold_convert (type, tem);
7367 }
7368 }
7369 else
7370 {
7371 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7372 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7373 return non_lvalue (fold_convert (type, arg0));
7374
7375 /* Likewise if the operands are reversed. */
7376 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7377 return non_lvalue (fold_convert (type, arg1));
7378
7379 /* Convert X + -C into X - C. */
7380 if (TREE_CODE (arg1) == REAL_CST
7381 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7382 {
7383 tem = fold_negate_const (arg1, type);
7384 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7385 return fold_build2 (MINUS_EXPR, type,
7386 fold_convert (type, arg0),
7387 fold_convert (type, tem));
7388 }
7389
7390 if (flag_unsafe_math_optimizations
7391 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7392 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7393 && (tem = distribute_real_division (code, type, arg0, arg1)))
7394 return tem;
7395
7396 /* Convert x+x into x*2.0. */
7397 if (operand_equal_p (arg0, arg1, 0)
7398 && SCALAR_FLOAT_TYPE_P (type))
7399 return fold_build2 (MULT_EXPR, type, arg0,
7400 build_real (type, dconst2));
7401
7402 /* Convert x*c+x into x*(c+1). */
7403 if (flag_unsafe_math_optimizations
7404 && TREE_CODE (arg0) == MULT_EXPR
7405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7406 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7407 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7408 {
7409 REAL_VALUE_TYPE c;
7410
7411 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7412 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7413 return fold_build2 (MULT_EXPR, type, arg1,
7414 build_real (type, c));
7415 }
7416
7417 /* Convert x+x*c into x*(c+1). */
7418 if (flag_unsafe_math_optimizations
7419 && TREE_CODE (arg1) == MULT_EXPR
7420 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7421 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7422 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7423 {
7424 REAL_VALUE_TYPE c;
7425
7426 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7427 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7428 return fold_build2 (MULT_EXPR, type, arg0,
7429 build_real (type, c));
7430 }
7431
7432 /* Convert x*c1+x*c2 into x*(c1+c2). */
7433 if (flag_unsafe_math_optimizations
7434 && TREE_CODE (arg0) == MULT_EXPR
7435 && TREE_CODE (arg1) == MULT_EXPR
7436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7437 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7438 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7439 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7440 && operand_equal_p (TREE_OPERAND (arg0, 0),
7441 TREE_OPERAND (arg1, 0), 0))
7442 {
7443 REAL_VALUE_TYPE c1, c2;
7444
7445 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7446 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7447 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7448 return fold_build2 (MULT_EXPR, type,
7449 TREE_OPERAND (arg0, 0),
7450 build_real (type, c1));
7451 }
7452 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7453 if (flag_unsafe_math_optimizations
7454 && TREE_CODE (arg1) == PLUS_EXPR
7455 && TREE_CODE (arg0) != MULT_EXPR)
7456 {
7457 tree tree10 = TREE_OPERAND (arg1, 0);
7458 tree tree11 = TREE_OPERAND (arg1, 1);
7459 if (TREE_CODE (tree11) == MULT_EXPR
7460 && TREE_CODE (tree10) == MULT_EXPR)
7461 {
7462 tree tree0;
7463 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7464 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7465 }
7466 }
7467 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7468 if (flag_unsafe_math_optimizations
7469 && TREE_CODE (arg0) == PLUS_EXPR
7470 && TREE_CODE (arg1) != MULT_EXPR)
7471 {
7472 tree tree00 = TREE_OPERAND (arg0, 0);
7473 tree tree01 = TREE_OPERAND (arg0, 1);
7474 if (TREE_CODE (tree01) == MULT_EXPR
7475 && TREE_CODE (tree00) == MULT_EXPR)
7476 {
7477 tree tree0;
7478 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7479 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7480 }
7481 }
7482 }
7483
7484 bit_rotate:
7485 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7486 is a rotate of A by C1 bits. */
7487 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7488 is a rotate of A by B bits. */
7489 {
7490 enum tree_code code0, code1;
7491 code0 = TREE_CODE (arg0);
7492 code1 = TREE_CODE (arg1);
7493 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7494 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7495 && operand_equal_p (TREE_OPERAND (arg0, 0),
7496 TREE_OPERAND (arg1, 0), 0)
7497 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7498 {
7499 tree tree01, tree11;
7500 enum tree_code code01, code11;
7501
7502 tree01 = TREE_OPERAND (arg0, 1);
7503 tree11 = TREE_OPERAND (arg1, 1);
7504 STRIP_NOPS (tree01);
7505 STRIP_NOPS (tree11);
7506 code01 = TREE_CODE (tree01);
7507 code11 = TREE_CODE (tree11);
7508 if (code01 == INTEGER_CST
7509 && code11 == INTEGER_CST
7510 && TREE_INT_CST_HIGH (tree01) == 0
7511 && TREE_INT_CST_HIGH (tree11) == 0
7512 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7513 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7514 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7515 code0 == LSHIFT_EXPR ? tree01 : tree11);
7516 else if (code11 == MINUS_EXPR)
7517 {
7518 tree tree110, tree111;
7519 tree110 = TREE_OPERAND (tree11, 0);
7520 tree111 = TREE_OPERAND (tree11, 1);
7521 STRIP_NOPS (tree110);
7522 STRIP_NOPS (tree111);
7523 if (TREE_CODE (tree110) == INTEGER_CST
7524 && 0 == compare_tree_int (tree110,
7525 TYPE_PRECISION
7526 (TREE_TYPE (TREE_OPERAND
7527 (arg0, 0))))
7528 && operand_equal_p (tree01, tree111, 0))
7529 return build2 ((code0 == LSHIFT_EXPR
7530 ? LROTATE_EXPR
7531 : RROTATE_EXPR),
7532 type, TREE_OPERAND (arg0, 0), tree01);
7533 }
7534 else if (code01 == MINUS_EXPR)
7535 {
7536 tree tree010, tree011;
7537 tree010 = TREE_OPERAND (tree01, 0);
7538 tree011 = TREE_OPERAND (tree01, 1);
7539 STRIP_NOPS (tree010);
7540 STRIP_NOPS (tree011);
7541 if (TREE_CODE (tree010) == INTEGER_CST
7542 && 0 == compare_tree_int (tree010,
7543 TYPE_PRECISION
7544 (TREE_TYPE (TREE_OPERAND
7545 (arg0, 0))))
7546 && operand_equal_p (tree11, tree011, 0))
7547 return build2 ((code0 != LSHIFT_EXPR
7548 ? LROTATE_EXPR
7549 : RROTATE_EXPR),
7550 type, TREE_OPERAND (arg0, 0), tree11);
7551 }
7552 }
7553 }
7554
7555 associate:
7556 /* In most languages, can't associate operations on floats through
7557 parentheses. Rather than remember where the parentheses were, we
7558 don't associate floats at all, unless the user has specified
7559 -funsafe-math-optimizations. */
7560
7561 if (! wins
7562 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7563 {
7564 tree var0, con0, lit0, minus_lit0;
7565 tree var1, con1, lit1, minus_lit1;
7566
7567 /* Split both trees into variables, constants, and literals. Then
7568 associate each group together, the constants with literals,
7569 then the result with variables. This increases the chances of
7570 literals being recombined later and of generating relocatable
7571 expressions for the sum of a constant and literal. */
7572 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7573 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7574 code == MINUS_EXPR);
7575
7576 /* Only do something if we found more than two objects. Otherwise,
7577 nothing has changed and we risk infinite recursion. */
7578 if (2 < ((var0 != 0) + (var1 != 0)
7579 + (con0 != 0) + (con1 != 0)
7580 + (lit0 != 0) + (lit1 != 0)
7581 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7582 {
7583 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7584 if (code == MINUS_EXPR)
7585 code = PLUS_EXPR;
7586
7587 var0 = associate_trees (var0, var1, code, type);
7588 con0 = associate_trees (con0, con1, code, type);
7589 lit0 = associate_trees (lit0, lit1, code, type);
7590 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7591
7592 /* Preserve the MINUS_EXPR if the negative part of the literal is
7593 greater than the positive part. Otherwise, the multiplicative
7594 folding code (i.e extract_muldiv) may be fooled in case
7595 unsigned constants are subtracted, like in the following
7596 example: ((X*2 + 4) - 8U)/2. */
7597 if (minus_lit0 && lit0)
7598 {
7599 if (TREE_CODE (lit0) == INTEGER_CST
7600 && TREE_CODE (minus_lit0) == INTEGER_CST
7601 && tree_int_cst_lt (lit0, minus_lit0))
7602 {
7603 minus_lit0 = associate_trees (minus_lit0, lit0,
7604 MINUS_EXPR, type);
7605 lit0 = 0;
7606 }
7607 else
7608 {
7609 lit0 = associate_trees (lit0, minus_lit0,
7610 MINUS_EXPR, type);
7611 minus_lit0 = 0;
7612 }
7613 }
7614 if (minus_lit0)
7615 {
7616 if (con0 == 0)
7617 return fold_convert (type,
7618 associate_trees (var0, minus_lit0,
7619 MINUS_EXPR, type));
7620 else
7621 {
7622 con0 = associate_trees (con0, minus_lit0,
7623 MINUS_EXPR, type);
7624 return fold_convert (type,
7625 associate_trees (var0, con0,
7626 PLUS_EXPR, type));
7627 }
7628 }
7629
7630 con0 = associate_trees (con0, lit0, code, type);
7631 return fold_convert (type, associate_trees (var0, con0,
7632 code, type));
7633 }
7634 }
7635
7636 binary:
7637 if (wins)
7638 t1 = const_binop (code, arg0, arg1, 0);
7639 if (t1 != NULL_TREE)
7640 {
7641 /* The return value should always have
7642 the same type as the original expression. */
7643 if (TREE_TYPE (t1) != type)
7644 t1 = fold_convert (type, t1);
7645
7646 return t1;
7647 }
7648 return NULL_TREE;
7649
7650 case MINUS_EXPR:
7651 /* A - (-B) -> A + B */
7652 if (TREE_CODE (arg1) == NEGATE_EXPR)
7653 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7654 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7655 if (TREE_CODE (arg0) == NEGATE_EXPR
7656 && (FLOAT_TYPE_P (type)
7657 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7658 && negate_expr_p (arg1)
7659 && reorder_operands_p (arg0, arg1))
7660 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7661 TREE_OPERAND (arg0, 0));
7662 /* Convert -A - 1 to ~A. */
7663 if (INTEGRAL_TYPE_P (type)
7664 && TREE_CODE (arg0) == NEGATE_EXPR
7665 && integer_onep (arg1))
7666 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7667
7668 /* Convert -1 - A to ~A. */
7669 if (INTEGRAL_TYPE_P (type)
7670 && integer_all_onesp (arg0))
7671 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7672
7673 if (! FLOAT_TYPE_P (type))
7674 {
7675 if (! wins && integer_zerop (arg0))
7676 return negate_expr (fold_convert (type, arg1));
7677 if (integer_zerop (arg1))
7678 return non_lvalue (fold_convert (type, arg0));
7679
7680 /* Fold A - (A & B) into ~B & A. */
7681 if (!TREE_SIDE_EFFECTS (arg0)
7682 && TREE_CODE (arg1) == BIT_AND_EXPR)
7683 {
7684 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7685 return fold_build2 (BIT_AND_EXPR, type,
7686 fold_build1 (BIT_NOT_EXPR, type,
7687 TREE_OPERAND (arg1, 0)),
7688 arg0);
7689 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7690 return fold_build2 (BIT_AND_EXPR, type,
7691 fold_build1 (BIT_NOT_EXPR, type,
7692 TREE_OPERAND (arg1, 1)),
7693 arg0);
7694 }
7695
7696 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7697 any power of 2 minus 1. */
7698 if (TREE_CODE (arg0) == BIT_AND_EXPR
7699 && TREE_CODE (arg1) == BIT_AND_EXPR
7700 && operand_equal_p (TREE_OPERAND (arg0, 0),
7701 TREE_OPERAND (arg1, 0), 0))
7702 {
7703 tree mask0 = TREE_OPERAND (arg0, 1);
7704 tree mask1 = TREE_OPERAND (arg1, 1);
7705 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7706
7707 if (operand_equal_p (tem, mask1, 0))
7708 {
7709 tem = fold_build2 (BIT_XOR_EXPR, type,
7710 TREE_OPERAND (arg0, 0), mask1);
7711 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7712 }
7713 }
7714 }
7715
7716 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7717 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7718 return non_lvalue (fold_convert (type, arg0));
7719
7720 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7721 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7722 (-ARG1 + ARG0) reduces to -ARG1. */
7723 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7724 return negate_expr (fold_convert (type, arg1));
7725
7726 /* Fold &x - &x. This can happen from &x.foo - &x.
7727 This is unsafe for certain floats even in non-IEEE formats.
7728 In IEEE, it is unsafe because it does wrong for NaNs.
7729 Also note that operand_equal_p is always false if an operand
7730 is volatile. */
7731
7732 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7733 && operand_equal_p (arg0, arg1, 0))
7734 return fold_convert (type, integer_zero_node);
7735
7736 /* A - B -> A + (-B) if B is easily negatable. */
7737 if (!wins && negate_expr_p (arg1)
7738 && ((FLOAT_TYPE_P (type)
7739 /* Avoid this transformation if B is a positive REAL_CST. */
7740 && (TREE_CODE (arg1) != REAL_CST
7741 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7742 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7743 return fold_build2 (PLUS_EXPR, type,
7744 fold_convert (type, arg0),
7745 fold_convert (type, negate_expr (arg1)));
7746
7747 /* Try folding difference of addresses. */
7748 {
7749 HOST_WIDE_INT diff;
7750
7751 if ((TREE_CODE (arg0) == ADDR_EXPR
7752 || TREE_CODE (arg1) == ADDR_EXPR)
7753 && ptr_difference_const (arg0, arg1, &diff))
7754 return build_int_cst_type (type, diff);
7755 }
7756
7757 /* Fold &a[i] - &a[j] to i-j. */
7758 if (TREE_CODE (arg0) == ADDR_EXPR
7759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7760 && TREE_CODE (arg1) == ADDR_EXPR
7761 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7762 {
7763 tree aref0 = TREE_OPERAND (arg0, 0);
7764 tree aref1 = TREE_OPERAND (arg1, 0);
7765 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7766 TREE_OPERAND (aref1, 0), 0))
7767 {
7768 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7769 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7770 tree esz = array_ref_element_size (aref0);
7771 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7772 return fold_build2 (MULT_EXPR, type, diff,
7773 fold_convert (type, esz));
7774
7775 }
7776 }
7777
7778 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7779 of the array. Loop optimizer sometimes produce this type of
7780 expressions. */
7781 if (TREE_CODE (arg0) == ADDR_EXPR)
7782 {
7783 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7784 if (tem)
7785 return fold_convert (type, tem);
7786 }
7787
7788 if (flag_unsafe_math_optimizations
7789 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7790 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7791 && (tem = distribute_real_division (code, type, arg0, arg1)))
7792 return tem;
7793
7794 if (TREE_CODE (arg0) == MULT_EXPR
7795 && TREE_CODE (arg1) == MULT_EXPR
7796 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7797 {
7798 /* (A * C) - (B * C) -> (A-B) * C. */
7799 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7800 TREE_OPERAND (arg1, 1), 0))
7801 return fold_build2 (MULT_EXPR, type,
7802 fold_build2 (MINUS_EXPR, type,
7803 TREE_OPERAND (arg0, 0),
7804 TREE_OPERAND (arg1, 0)),
7805 TREE_OPERAND (arg0, 1));
7806 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7807 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7808 TREE_OPERAND (arg1, 0), 0))
7809 return fold_build2 (MULT_EXPR, type,
7810 TREE_OPERAND (arg0, 0),
7811 fold_build2 (MINUS_EXPR, type,
7812 TREE_OPERAND (arg0, 1),
7813 TREE_OPERAND (arg1, 1)));
7814 }
7815
7816 goto associate;
7817
7818 case MULT_EXPR:
7819 /* (-A) * (-B) -> A * B */
7820 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7821 return fold_build2 (MULT_EXPR, type,
7822 fold_convert (type, TREE_OPERAND (arg0, 0)),
7823 fold_convert (type, negate_expr (arg1)));
7824 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7825 return fold_build2 (MULT_EXPR, type,
7826 fold_convert (type, negate_expr (arg0)),
7827 fold_convert (type, TREE_OPERAND (arg1, 0)));
7828
7829 if (! FLOAT_TYPE_P (type))
7830 {
7831 if (integer_zerop (arg1))
7832 return omit_one_operand (type, arg1, arg0);
7833 if (integer_onep (arg1))
7834 return non_lvalue (fold_convert (type, arg0));
7835 /* Transform x * -1 into -x. */
7836 if (integer_all_onesp (arg1))
7837 return fold_convert (type, negate_expr (arg0));
7838
7839 /* (a * (1 << b)) is (a << b) */
7840 if (TREE_CODE (arg1) == LSHIFT_EXPR
7841 && integer_onep (TREE_OPERAND (arg1, 0)))
7842 return fold_build2 (LSHIFT_EXPR, type, arg0,
7843 TREE_OPERAND (arg1, 1));
7844 if (TREE_CODE (arg0) == LSHIFT_EXPR
7845 && integer_onep (TREE_OPERAND (arg0, 0)))
7846 return fold_build2 (LSHIFT_EXPR, type, arg1,
7847 TREE_OPERAND (arg0, 1));
7848
7849 if (TREE_CODE (arg1) == INTEGER_CST
7850 && 0 != (tem = extract_muldiv (op0,
7851 fold_convert (type, arg1),
7852 code, NULL_TREE)))
7853 return fold_convert (type, tem);
7854
7855 }
7856 else
7857 {
7858 /* Maybe fold x * 0 to 0. The expressions aren't the same
7859 when x is NaN, since x * 0 is also NaN. Nor are they the
7860 same in modes with signed zeros, since multiplying a
7861 negative value by 0 gives -0, not +0. */
7862 /* (TIGCC 20050210) We can do this for UNSIGNED_ZERO even when honoring
7863 signed zeros. */
7864 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7865 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7866 || real_uzerop (arg1))
7867 && real_zerop (arg1))
7868 return omit_one_operand (type, arg1, arg0);
7869 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7870 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7871 && real_onep (arg1))
7872 return non_lvalue (fold_convert (type, arg0));
7873
7874 /* Transform x * -1.0 into -x. */
7875 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7876 && real_minus_onep (arg1))
7877 return fold_convert (type, negate_expr (arg0));
7878
7879 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7880 if (flag_unsafe_math_optimizations
7881 && TREE_CODE (arg0) == RDIV_EXPR
7882 && TREE_CODE (arg1) == REAL_CST
7883 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7884 {
7885 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7886 arg1, 0);
7887 if (tem)
7888 return fold_build2 (RDIV_EXPR, type, tem,
7889 TREE_OPERAND (arg0, 1));
7890 }
7891
7892 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7893 if (operand_equal_p (arg0, arg1, 0))
7894 {
7895 tree tem = fold_strip_sign_ops (arg0);
7896 if (tem != NULL_TREE)
7897 {
7898 tem = fold_convert (type, tem);
7899 return fold_build2 (MULT_EXPR, type, tem, tem);
7900 }
7901 }
7902
7903 if (flag_unsafe_math_optimizations)
7904 {
7905 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7906 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7907
7908 /* Optimizations of root(...)*root(...). */
7909 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7910 {
7911 tree rootfn, arg, arglist;
7912 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7913 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7914
7915 /* Optimize sqrt(x)*sqrt(x) as x. */
7916 if (BUILTIN_SQRT_P (fcode0)
7917 && operand_equal_p (arg00, arg10, 0)
7918 && ! HONOR_SNANS (TYPE_MODE (type)))
7919 return arg00;
7920
7921 /* Optimize root(x)*root(y) as root(x*y). */
7922 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7923 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7924 arglist = build_tree_list (NULL_TREE, arg);
7925 return build_function_call_expr (rootfn, arglist);
7926 }
7927
7928 /* Optimize expN(x)*expN(y) as expN(x+y). */
7929 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7930 {
7931 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7932 tree arg = fold_build2 (PLUS_EXPR, type,
7933 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7934 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7935 tree arglist = build_tree_list (NULL_TREE, arg);
7936 return build_function_call_expr (expfn, arglist);
7937 }
7938
7939 /* Optimizations of pow(...)*pow(...). */
7940 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7941 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7942 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7943 {
7944 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7945 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7946 1)));
7947 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7948 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7949 1)));
7950
7951 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7952 if (operand_equal_p (arg01, arg11, 0))
7953 {
7954 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7955 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7956 tree arglist = tree_cons (NULL_TREE, arg,
7957 build_tree_list (NULL_TREE,
7958 arg01));
7959 return build_function_call_expr (powfn, arglist);
7960 }
7961
7962 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7963 if (operand_equal_p (arg00, arg10, 0))
7964 {
7965 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7966 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7967 tree arglist = tree_cons (NULL_TREE, arg00,
7968 build_tree_list (NULL_TREE,
7969 arg));
7970 return build_function_call_expr (powfn, arglist);
7971 }
7972 }
7973
7974 /* Optimize tan(x)*cos(x) as sin(x). */
7975 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7976 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7977 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7978 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7979 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7980 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7981 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7982 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7983 {
7984 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7985
7986 if (sinfn != NULL_TREE)
7987 return build_function_call_expr (sinfn,
7988 TREE_OPERAND (arg0, 1));
7989 }
7990
7991 /* Optimize x*pow(x,c) as pow(x,c+1). */
7992 if (fcode1 == BUILT_IN_POW
7993 || fcode1 == BUILT_IN_POWF
7994 || fcode1 == BUILT_IN_POWL)
7995 {
7996 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7997 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7998 1)));
7999 if (TREE_CODE (arg11) == REAL_CST
8000 && ! TREE_CONSTANT_OVERFLOW (arg11)
8001 && operand_equal_p (arg0, arg10, 0))
8002 {
8003 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8004 REAL_VALUE_TYPE c;
8005 tree arg, arglist;
8006
8007 c = TREE_REAL_CST (arg11);
8008 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8009 arg = build_real (type, c);
8010 arglist = build_tree_list (NULL_TREE, arg);
8011 arglist = tree_cons (NULL_TREE, arg0, arglist);
8012 return build_function_call_expr (powfn, arglist);
8013 }
8014 }
8015
8016 /* Optimize pow(x,c)*x as pow(x,c+1). */
8017 if (fcode0 == BUILT_IN_POW
8018 || fcode0 == BUILT_IN_POWF
8019 || fcode0 == BUILT_IN_POWL)
8020 {
8021 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8022 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8023 1)));
8024 if (TREE_CODE (arg01) == REAL_CST
8025 && ! TREE_CONSTANT_OVERFLOW (arg01)
8026 && operand_equal_p (arg1, arg00, 0))
8027 {
8028 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8029 REAL_VALUE_TYPE c;
8030 tree arg, arglist;
8031
8032 c = TREE_REAL_CST (arg01);
8033 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8034 arg = build_real (type, c);
8035 arglist = build_tree_list (NULL_TREE, arg);
8036 arglist = tree_cons (NULL_TREE, arg1, arglist);
8037 return build_function_call_expr (powfn, arglist);
8038 }
8039 }
8040
8041 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8042 if (! optimize_size
8043 && operand_equal_p (arg0, arg1, 0))
8044 {
8045 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8046
8047 if (powfn)
8048 {
8049 tree arg = build_real (type, dconst2);
8050 tree arglist = build_tree_list (NULL_TREE, arg);
8051 arglist = tree_cons (NULL_TREE, arg0, arglist);
8052 return build_function_call_expr (powfn, arglist);
8053 }
8054 }
8055 }
8056 }
8057 goto associate;
8058
8059 case BIT_IOR_EXPR:
8060 bit_ior:
8061 if (integer_all_onesp (arg1))
8062 return omit_one_operand (type, arg1, arg0);
8063 if (integer_zerop (arg1))
8064 return non_lvalue (fold_convert (type, arg0));
8065 if (operand_equal_p (arg0, arg1, 0))
8066 return non_lvalue (fold_convert (type, arg0));
8067
8068 /* ~X | X is -1. */
8069 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8070 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8071 {
8072 t1 = build_int_cst (type, -1);
8073 t1 = force_fit_type (t1, 0, false, false);
8074 return omit_one_operand (type, t1, arg1);
8075 }
8076
8077 /* X | ~X is -1. */
8078 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8080 {
8081 t1 = build_int_cst (type, -1);
8082 t1 = force_fit_type (t1, 0, false, false);
8083 return omit_one_operand (type, t1, arg0);
8084 }
8085
8086 t1 = distribute_bit_expr (code, type, arg0, arg1);
8087 if (t1 != NULL_TREE)
8088 return t1;
8089
8090 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8091
8092 This results in more efficient code for machines without a NAND
8093 instruction. Combine will canonicalize to the first form
8094 which will allow use of NAND instructions provided by the
8095 backend if they exist. */
8096 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8097 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8098 {
8099 return fold_build1 (BIT_NOT_EXPR, type,
8100 build2 (BIT_AND_EXPR, type,
8101 TREE_OPERAND (arg0, 0),
8102 TREE_OPERAND (arg1, 0)));
8103 }
8104
8105 /* See if this can be simplified into a rotate first. If that
8106 is unsuccessful continue in the association code. */
8107 goto bit_rotate;
8108
8109 case BIT_XOR_EXPR:
8110 if (integer_zerop (arg1))
8111 return non_lvalue (fold_convert (type, arg0));
8112 if (integer_all_onesp (arg1))
8113 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8114 if (operand_equal_p (arg0, arg1, 0))
8115 return omit_one_operand (type, integer_zero_node, arg0);
8116
8117 /* ~X ^ X is -1. */
8118 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8120 {
8121 t1 = build_int_cst (type, -1);
8122 t1 = force_fit_type (t1, 0, false, false);
8123 return omit_one_operand (type, t1, arg1);
8124 }
8125
8126 /* X ^ ~X is -1. */
8127 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8128 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8129 {
8130 t1 = build_int_cst (type, -1);
8131 t1 = force_fit_type (t1, 0, false, false);
8132 return omit_one_operand (type, t1, arg0);
8133 }
8134
8135 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8136 with a constant, and the two constants have no bits in common,
8137 we should treat this as a BIT_IOR_EXPR since this may produce more
8138 simplifications. */
8139 if (TREE_CODE (arg0) == BIT_AND_EXPR
8140 && TREE_CODE (arg1) == BIT_AND_EXPR
8141 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8142 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8143 && integer_zerop (const_binop (BIT_AND_EXPR,
8144 TREE_OPERAND (arg0, 1),
8145 TREE_OPERAND (arg1, 1), 0)))
8146 {
8147 code = BIT_IOR_EXPR;
8148 goto bit_ior;
8149 }
8150
8151 /* (X | Y) ^ X -> Y & ~ X*/
8152 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8154 {
8155 tree t2 = TREE_OPERAND (arg0, 1);
8156 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8157 arg1);
8158 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8159 fold_convert (type, t1));
8160 return t1;
8161 }
8162
8163 /* (Y | X) ^ X -> Y & ~ X*/
8164 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8165 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8166 {
8167 tree t2 = TREE_OPERAND (arg0, 0);
8168 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8169 arg1);
8170 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8171 fold_convert (type, t1));
8172 return t1;
8173 }
8174
8175 /* X ^ (X | Y) -> Y & ~ X*/
8176 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8177 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8178 {
8179 tree t2 = TREE_OPERAND (arg1, 1);
8180 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8181 arg0);
8182 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8183 fold_convert (type, t1));
8184 return t1;
8185 }
8186
8187 /* X ^ (Y | X) -> Y & ~ X*/
8188 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8189 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8190 {
8191 tree t2 = TREE_OPERAND (arg1, 0);
8192 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8193 arg0);
8194 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8195 fold_convert (type, t1));
8196 return t1;
8197 }
8198
8199 /* Convert ~X ^ ~Y to X ^ Y. */
8200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8201 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8202 return fold_build2 (code, type,
8203 fold_convert (type, TREE_OPERAND (arg0, 0)),
8204 fold_convert (type, TREE_OPERAND (arg1, 0)));
8205
8206 /* See if this can be simplified into a rotate first. If that
8207 is unsuccessful continue in the association code. */
8208 goto bit_rotate;
8209
8210 case BIT_AND_EXPR:
8211 if (integer_all_onesp (arg1))
8212 return non_lvalue (fold_convert (type, arg0));
8213 if (integer_zerop (arg1))
8214 return omit_one_operand (type, arg1, arg0);
8215 if (operand_equal_p (arg0, arg1, 0))
8216 return non_lvalue (fold_convert (type, arg0));
8217
8218 /* ~X & X is always zero. */
8219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8221 return omit_one_operand (type, integer_zero_node, arg1);
8222
8223 /* X & ~X is always zero. */
8224 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8226 return omit_one_operand (type, integer_zero_node, arg0);
8227
8228 t1 = distribute_bit_expr (code, type, arg0, arg1);
8229 if (t1 != NULL_TREE)
8230 return t1;
8231 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8232 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8233 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8234 {
8235 unsigned int prec
8236 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8237
8238 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8239 && (~TREE_INT_CST_LOW (arg1)
8240 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8241 return fold_convert (type, TREE_OPERAND (arg0, 0));
8242 }
8243
8244 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8245
8246 This results in more efficient code for machines without a NOR
8247 instruction. Combine will canonicalize to the first form
8248 which will allow use of NOR instructions provided by the
8249 backend if they exist. */
8250 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8251 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8252 {
8253 return fold_build1 (BIT_NOT_EXPR, type,
8254 build2 (BIT_IOR_EXPR, type,
8255 TREE_OPERAND (arg0, 0),
8256 TREE_OPERAND (arg1, 0)));
8257 }
8258
8259 goto associate;
8260
8261 case RDIV_EXPR:
8262 /* Don't touch a floating-point divide by zero unless the mode
8263 of the constant can represent infinity. */
8264 if (TREE_CODE (arg1) == REAL_CST
8265 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8266 && real_zerop (arg1))
8267 return NULL_TREE;
8268
8269 /* (-A) / (-B) -> A / B */
8270 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8271 return fold_build2 (RDIV_EXPR, type,
8272 TREE_OPERAND (arg0, 0),
8273 negate_expr (arg1));
8274 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8275 return fold_build2 (RDIV_EXPR, type,
8276 negate_expr (arg0),
8277 TREE_OPERAND (arg1, 0));
8278
8279 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8280 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8281 && real_onep (arg1))
8282 return non_lvalue (fold_convert (type, arg0));
8283
8284 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8285 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8286 && real_minus_onep (arg1))
8287 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8288
8289 /* If ARG1 is a constant, we can convert this to a multiply by the
8290 reciprocal. This does not have the same rounding properties,
8291 so only do this if -funsafe-math-optimizations. We can actually
8292 always safely do it if ARG1 is a power of two, but it's hard to
8293 tell if it is or not in a portable manner. */
8294 if (TREE_CODE (arg1) == REAL_CST)
8295 {
8296 if (flag_unsafe_math_optimizations
8297 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8298 arg1, 0)))
8299 return fold_build2 (MULT_EXPR, type, arg0, tem);
8300 /* Find the reciprocal if optimizing and the result is exact. */
8301 if (optimize)
8302 {
8303 REAL_VALUE_TYPE r;
8304 r = TREE_REAL_CST (arg1);
8305 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8306 {
8307 tem = build_real (type, r);
8308 return fold_build2 (MULT_EXPR, type,
8309 fold_convert (type, arg0), tem);
8310 }
8311 }
8312 }
8313 /* Convert A/B/C to A/(B*C). */
8314 if (flag_unsafe_math_optimizations
8315 && TREE_CODE (arg0) == RDIV_EXPR)
8316 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8317 fold_build2 (MULT_EXPR, type,
8318 TREE_OPERAND (arg0, 1), arg1));
8319
8320 /* Convert A/(B/C) to (A/B)*C. */
8321 if (flag_unsafe_math_optimizations
8322 && TREE_CODE (arg1) == RDIV_EXPR)
8323 return fold_build2 (MULT_EXPR, type,
8324 fold_build2 (RDIV_EXPR, type, arg0,
8325 TREE_OPERAND (arg1, 0)),
8326 TREE_OPERAND (arg1, 1));
8327
8328 /* Convert C1/(X*C2) into (C1/C2)/X. */
8329 if (flag_unsafe_math_optimizations
8330 && TREE_CODE (arg1) == MULT_EXPR
8331 && TREE_CODE (arg0) == REAL_CST
8332 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8333 {
8334 tree tem = const_binop (RDIV_EXPR, arg0,
8335 TREE_OPERAND (arg1, 1), 0);
8336 if (tem)
8337 return fold_build2 (RDIV_EXPR, type, tem,
8338 TREE_OPERAND (arg1, 0));
8339 }
8340
8341 if (flag_unsafe_math_optimizations)
8342 {
8343 enum built_in_function fcode = builtin_mathfn_code (arg1);
8344 /* Optimize x/expN(y) into x*expN(-y). */
8345 if (BUILTIN_EXPONENT_P (fcode))
8346 {
8347 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8348 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8349 tree arglist = build_tree_list (NULL_TREE,
8350 fold_convert (type, arg));
8351 arg1 = build_function_call_expr (expfn, arglist);
8352 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8353 }
8354
8355 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8356 if (fcode == BUILT_IN_POW
8357 || fcode == BUILT_IN_POWF
8358 || fcode == BUILT_IN_POWL)
8359 {
8360 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8361 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8362 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8363 tree neg11 = fold_convert (type, negate_expr (arg11));
8364 tree arglist = tree_cons(NULL_TREE, arg10,
8365 build_tree_list (NULL_TREE, neg11));
8366 arg1 = build_function_call_expr (powfn, arglist);
8367 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8368 }
8369 }
8370
8371 if (flag_unsafe_math_optimizations)
8372 {
8373 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8374 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8375
8376 /* Optimize sin(x)/cos(x) as tan(x). */
8377 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8378 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8379 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8380 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8381 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8382 {
8383 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8384
8385 if (tanfn != NULL_TREE)
8386 return build_function_call_expr (tanfn,
8387 TREE_OPERAND (arg0, 1));
8388 }
8389
8390 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8391 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8392 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8393 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8394 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8395 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8396 {
8397 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8398
8399 if (tanfn != NULL_TREE)
8400 {
8401 tree tmp = TREE_OPERAND (arg0, 1);
8402 tmp = build_function_call_expr (tanfn, tmp);
8403 return fold_build2 (RDIV_EXPR, type,
8404 build_real (type, dconst1), tmp);
8405 }
8406 }
8407
8408 /* Optimize pow(x,c)/x as pow(x,c-1). */
8409 if (fcode0 == BUILT_IN_POW
8410 || fcode0 == BUILT_IN_POWF
8411 || fcode0 == BUILT_IN_POWL)
8412 {
8413 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8414 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8415 if (TREE_CODE (arg01) == REAL_CST
8416 && ! TREE_CONSTANT_OVERFLOW (arg01)
8417 && operand_equal_p (arg1, arg00, 0))
8418 {
8419 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8420 REAL_VALUE_TYPE c;
8421 tree arg, arglist;
8422
8423 c = TREE_REAL_CST (arg01);
8424 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8425 arg = build_real (type, c);
8426 arglist = build_tree_list (NULL_TREE, arg);
8427 arglist = tree_cons (NULL_TREE, arg1, arglist);
8428 return build_function_call_expr (powfn, arglist);
8429 }
8430 }
8431 }
8432 goto binary;
8433
8434 case TRUNC_DIV_EXPR:
8435 case ROUND_DIV_EXPR:
8436 case FLOOR_DIV_EXPR:
8437 case CEIL_DIV_EXPR:
8438 case EXACT_DIV_EXPR:
8439 if (integer_onep (arg1))
8440 return non_lvalue (fold_convert (type, arg0));
8441 if (integer_zerop (arg1))
8442 return NULL_TREE;
8443 /* X / -1 is -X. */
8444 if (!TYPE_UNSIGNED (type)
8445 && TREE_CODE (arg1) == INTEGER_CST
8446 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8447 && TREE_INT_CST_HIGH (arg1) == -1)
8448 return fold_convert (type, negate_expr (arg0));
8449
8450 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8451 operation, EXACT_DIV_EXPR.
8452
8453 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8454 At one time others generated faster code, it's not clear if they do
8455 after the last round to changes to the DIV code in expmed.c. */
8456 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8457 && multiple_of_p (type, arg0, arg1))
8458 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8459
8460 if (TREE_CODE (arg1) == INTEGER_CST
8461 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8462 return fold_convert (type, tem);
8463
8464 goto binary;
8465
8466 case CEIL_MOD_EXPR:
8467 case FLOOR_MOD_EXPR:
8468 case ROUND_MOD_EXPR:
8469 case TRUNC_MOD_EXPR:
8470 /* X % 1 is always zero, but be sure to preserve any side
8471 effects in X. */
8472 if (integer_onep (arg1))
8473 return omit_one_operand (type, integer_zero_node, arg0);
8474
8475 /* X % 0, return X % 0 unchanged so that we can get the
8476 proper warnings and errors. */
8477 if (integer_zerop (arg1))
8478 return NULL_TREE;
8479
8480 /* 0 % X is always zero, but be sure to preserve any side
8481 effects in X. Place this after checking for X == 0. */
8482 if (integer_zerop (arg0))
8483 return omit_one_operand (type, integer_zero_node, arg1);
8484
8485 /* X % -1 is zero. */
8486 if (!TYPE_UNSIGNED (type)
8487 && TREE_CODE (arg1) == INTEGER_CST
8488 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8489 && TREE_INT_CST_HIGH (arg1) == -1)
8490 return omit_one_operand (type, integer_zero_node, arg0);
8491
8492 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8493 i.e. "X % C" into "X & C2", if X and C are positive. */
8494 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8495 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8496 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8497 {
8498 unsigned HOST_WIDE_INT high, low;
8499 tree mask;
8500 int l;
8501
8502 l = tree_log2 (arg1);
8503 if (l >= HOST_BITS_PER_WIDE_INT)
8504 {
8505 high = ((unsigned HOST_WIDE_INT) 1
8506 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8507 low = -1;
8508 }
8509 else
8510 {
8511 high = 0;
8512 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8513 }
8514
8515 mask = build_int_cst_wide (type, low, high);
8516 return fold_build2 (BIT_AND_EXPR, type,
8517 fold_convert (type, arg0), mask);
8518 }
8519
8520 /* X % -C is the same as X % C. */
8521 if (code == TRUNC_MOD_EXPR
8522 && !TYPE_UNSIGNED (type)
8523 && TREE_CODE (arg1) == INTEGER_CST
8524 && !TREE_CONSTANT_OVERFLOW (arg1)
8525 && TREE_INT_CST_HIGH (arg1) < 0
8526 && !flag_trapv
8527 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8528 && !sign_bit_p (arg1, arg1))
8529 return fold_build2 (code, type, fold_convert (type, arg0),
8530 fold_convert (type, negate_expr (arg1)));
8531
8532 /* X % -Y is the same as X % Y. */
8533 if (code == TRUNC_MOD_EXPR
8534 && !TYPE_UNSIGNED (type)
8535 && TREE_CODE (arg1) == NEGATE_EXPR
8536 && !flag_trapv)
8537 return fold_build2 (code, type, fold_convert (type, arg0),
8538 fold_convert (type, TREE_OPERAND (arg1, 0)));
8539
8540 if (TREE_CODE (arg1) == INTEGER_CST
8541 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8542 return fold_convert (type, tem);
8543
8544 goto binary;
8545
8546 case LROTATE_EXPR:
8547 case RROTATE_EXPR:
8548 if (integer_all_onesp (arg0))
8549 return omit_one_operand (type, arg0, arg1);
8550 goto shift;
8551
8552 case RSHIFT_EXPR:
8553 /* Optimize -1 >> x for arithmetic right shifts. */
8554 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8555 return omit_one_operand (type, arg0, arg1);
8556 /* ... fall through ... */
8557
8558 case LSHIFT_EXPR:
8559 shift:
8560 if (integer_zerop (arg1))
8561 return non_lvalue (fold_convert (type, arg0));
8562 if (integer_zerop (arg0))
8563 return omit_one_operand (type, arg0, arg1);
8564
8565 /* Since negative shift count is not well-defined,
8566 don't try to compute it in the compiler. */
8567 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8568 return NULL_TREE;
8569
8570 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8571 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
8572 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8573 && host_integerp (TREE_OPERAND (arg0, 1), false)
8574 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8575 {
8576 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8577 + TREE_INT_CST_LOW (arg1));
8578
8579 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8580 being well defined. */
8581 if (low >= TYPE_PRECISION (type))
8582 {
8583 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8584 low = low % TYPE_PRECISION (type);
8585 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8586 return build_int_cst (type, 0);
8587 else
8588 low = TYPE_PRECISION (type) - 1;
8589 }
8590
8591 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8592 build_int_cst (type, low));
8593 }
8594
8595 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8596 into x & ((unsigned)-1 >> c) for unsigned types. */
8597 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8598 || (TYPE_UNSIGNED (type)
8599 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8600 && host_integerp (arg1, false)
8601 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8602 && host_integerp (TREE_OPERAND (arg0, 1), false)
8603 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8604 {
8605 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8606 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8607 tree lshift;
8608 tree arg00;
8609
8610 if (low0 == low1)
8611 {
8612 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8613
8614 lshift = build_int_cst (type, -1);
8615 lshift = int_const_binop (code, lshift, arg1, 0);
8616
8617 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8618 }
8619 }
8620
8621 /* Rewrite an LROTATE_EXPR by a constant into an
8622 RROTATE_EXPR by a new constant. */
8623 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8624 {
8625 tree tem = build_int_cst (NULL_TREE,
8626 GET_MODE_BITSIZE (TYPE_MODE (type)));
8627 tem = fold_convert (TREE_TYPE (arg1), tem);
8628 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8629 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8630 }
8631
8632 /* If we have a rotate of a bit operation with the rotate count and
8633 the second operand of the bit operation both constant,
8634 permute the two operations. */
8635 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8636 && (TREE_CODE (arg0) == BIT_AND_EXPR
8637 || TREE_CODE (arg0) == BIT_IOR_EXPR
8638 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8639 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8640 return fold_build2 (TREE_CODE (arg0), type,
8641 fold_build2 (code, type,
8642 TREE_OPERAND (arg0, 0), arg1),
8643 fold_build2 (code, type,
8644 TREE_OPERAND (arg0, 1), arg1));
8645
8646 /* Two consecutive rotates adding up to the width of the mode can
8647 be ignored. */
8648 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8649 && TREE_CODE (arg0) == RROTATE_EXPR
8650 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8651 && TREE_INT_CST_HIGH (arg1) == 0
8652 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8653 && ((TREE_INT_CST_LOW (arg1)
8654 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8655 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8656 return TREE_OPERAND (arg0, 0);
8657
8658 goto binary;
8659
8660 case MIN_EXPR:
8661 if (operand_equal_p (arg0, arg1, 0))
8662 return omit_one_operand (type, arg0, arg1);
8663 if (INTEGRAL_TYPE_P (type)
8664 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8665 return omit_one_operand (type, arg1, arg0);
8666 goto associate;
8667
8668 case MAX_EXPR:
8669 if (operand_equal_p (arg0, arg1, 0))
8670 return omit_one_operand (type, arg0, arg1);
8671 if (INTEGRAL_TYPE_P (type)
8672 && TYPE_MAX_VALUE (type)
8673 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8674 return omit_one_operand (type, arg1, arg0);
8675 goto associate;
8676
8677 case TRUTH_ANDIF_EXPR:
8678 /* Note that the operands of this must be ints
8679 and their values must be 0 or 1.
8680 ("true" is a fixed value perhaps depending on the language.) */
8681 /* If first arg is constant zero, return it. */
8682 if (integer_zerop (arg0))
8683 return fold_convert (type, arg0);
8684 case TRUTH_AND_EXPR:
8685 /* If either arg is constant true, drop it. */
8686 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8687 return non_lvalue (fold_convert (type, arg1));
8688 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8689 /* Preserve sequence points. */
8690 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8691 return non_lvalue (fold_convert (type, arg0));
8692 /* If second arg is constant zero, result is zero, but first arg
8693 must be evaluated. */
8694 if (integer_zerop (arg1))
8695 return omit_one_operand (type, arg1, arg0);
8696 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8697 case will be handled here. */
8698 if (integer_zerop (arg0))
8699 return omit_one_operand (type, arg0, arg1);
8700
8701 /* !X && X is always false. */
8702 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8703 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8704 return omit_one_operand (type, integer_zero_node, arg1);
8705 /* X && !X is always false. */
8706 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8707 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8708 return omit_one_operand (type, integer_zero_node, arg0);
8709
8710 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8711 means A >= Y && A != MAX, but in this case we know that
8712 A < X <= MAX. */
8713
8714 if (!TREE_SIDE_EFFECTS (arg0)
8715 && !TREE_SIDE_EFFECTS (arg1))
8716 {
8717 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8718 if (tem && !operand_equal_p (tem, arg0, 0))
8719 return fold_build2 (code, type, tem, arg1);
8720
8721 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8722 if (tem && !operand_equal_p (tem, arg1, 0))
8723 return fold_build2 (code, type, arg0, tem);
8724 }
8725
8726 truth_andor:
8727 /* We only do these simplifications if we are optimizing. */
8728 if (!optimize)
8729 return NULL_TREE;
8730
8731 /* Check for things like (A || B) && (A || C). We can convert this
8732 to A || (B && C). Note that either operator can be any of the four
8733 truth and/or operations and the transformation will still be
8734 valid. Also note that we only care about order for the
8735 ANDIF and ORIF operators. If B contains side effects, this
8736 might change the truth-value of A. */
8737 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8738 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8739 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8740 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8741 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8742 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8743 {
8744 tree a00 = TREE_OPERAND (arg0, 0);
8745 tree a01 = TREE_OPERAND (arg0, 1);
8746 tree a10 = TREE_OPERAND (arg1, 0);
8747 tree a11 = TREE_OPERAND (arg1, 1);
8748 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8749 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8750 && (code == TRUTH_AND_EXPR
8751 || code == TRUTH_OR_EXPR));
8752
8753 if (operand_equal_p (a00, a10, 0))
8754 return fold_build2 (TREE_CODE (arg0), type, a00,
8755 fold_build2 (code, type, a01, a11));
8756 else if (commutative && operand_equal_p (a00, a11, 0))
8757 return fold_build2 (TREE_CODE (arg0), type, a00,
8758 fold_build2 (code, type, a01, a10));
8759 else if (commutative && operand_equal_p (a01, a10, 0))
8760 return fold_build2 (TREE_CODE (arg0), type, a01,
8761 fold_build2 (code, type, a00, a11));
8762
8763 /* This case if tricky because we must either have commutative
8764 operators or else A10 must not have side-effects. */
8765
8766 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8767 && operand_equal_p (a01, a11, 0))
8768 return fold_build2 (TREE_CODE (arg0), type,
8769 fold_build2 (code, type, a00, a10),
8770 a01);
8771 }
8772
8773 /* See if we can build a range comparison. */
8774 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8775 return tem;
8776
8777 /* Check for the possibility of merging component references. If our
8778 lhs is another similar operation, try to merge its rhs with our
8779 rhs. Then try to merge our lhs and rhs. */
8780 if (TREE_CODE (arg0) == code
8781 && 0 != (tem = fold_truthop (code, type,
8782 TREE_OPERAND (arg0, 1), arg1)))
8783 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8784
8785 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8786 return tem;
8787
8788 return NULL_TREE;
8789
8790 case TRUTH_ORIF_EXPR:
8791 /* Note that the operands of this must be ints
8792 and their values must be 0 or true.
8793 ("true" is a fixed value perhaps depending on the language.) */
8794 /* If first arg is constant true, return it. */
8795 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8796 return fold_convert (type, arg0);
8797 case TRUTH_OR_EXPR:
8798 /* If either arg is constant zero, drop it. */
8799 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8800 return non_lvalue (fold_convert (type, arg1));
8801 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8802 /* Preserve sequence points. */
8803 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8804 return non_lvalue (fold_convert (type, arg0));
8805 /* If second arg is constant true, result is true, but we must
8806 evaluate first arg. */
8807 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8808 return omit_one_operand (type, arg1, arg0);
8809 /* Likewise for first arg, but note this only occurs here for
8810 TRUTH_OR_EXPR. */
8811 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8812 return omit_one_operand (type, arg0, arg1);
8813
8814 /* !X || X is always true. */
8815 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8817 return omit_one_operand (type, integer_one_node, arg1);
8818 /* X || !X is always true. */
8819 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8821 return omit_one_operand (type, integer_one_node, arg0);
8822
8823 goto truth_andor;
8824
8825 case TRUTH_XOR_EXPR:
8826 /* If the second arg is constant zero, drop it. */
8827 if (integer_zerop (arg1))
8828 return non_lvalue (fold_convert (type, arg0));
8829 /* If the second arg is constant true, this is a logical inversion. */
8830 if (integer_onep (arg1))
8831 {
8832 /* Only call invert_truthvalue if operand is a truth value. */
8833 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8834 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8835 else
8836 tem = invert_truthvalue (arg0);
8837 return non_lvalue (fold_convert (type, tem));
8838 }
8839 /* Identical arguments cancel to zero. */
8840 if (operand_equal_p (arg0, arg1, 0))
8841 return omit_one_operand (type, integer_zero_node, arg0);
8842
8843 /* !X ^ X is always true. */
8844 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8845 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8846 return omit_one_operand (type, integer_one_node, arg1);
8847
8848 /* X ^ !X is always true. */
8849 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8850 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8851 return omit_one_operand (type, integer_one_node, arg0);
8852
8853 return NULL_TREE;
8854
8855 case EQ_EXPR:
8856 case NE_EXPR:
8857 case LT_EXPR:
8858 case GT_EXPR:
8859 case LE_EXPR:
8860 case GE_EXPR:
8861 /* If one arg is a real or integer constant, put it last. */
8862 if (tree_swap_operands_p (arg0, arg1, true))
8863 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8864
8865 /* bool_var != 0 becomes bool_var. */
8866 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8867 && code == NE_EXPR)
8868 return non_lvalue (fold_convert (type, arg0));
8869
8870 /* bool_var == 1 becomes bool_var. */
8871 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8872 && code == EQ_EXPR)
8873 return non_lvalue (fold_convert (type, arg0));
8874
8875 /* If this is an equality comparison of the address of a non-weak
8876 object against zero, then we know the result. */
8877 if ((code == EQ_EXPR || code == NE_EXPR)
8878 && TREE_CODE (arg0) == ADDR_EXPR
8879 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8880 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8881 && integer_zerop (arg1))
8882 return constant_boolean_node (code != EQ_EXPR, type);
8883
8884 /* If this is an equality comparison of the address of two non-weak,
8885 unaliased symbols neither of which are extern (since we do not
8886 have access to attributes for externs), then we know the result. */
8887 if ((code == EQ_EXPR || code == NE_EXPR)
8888 && TREE_CODE (arg0) == ADDR_EXPR
8889 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8890 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8891 && ! lookup_attribute ("alias",
8892 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8893 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8894 && TREE_CODE (arg1) == ADDR_EXPR
8895 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8896 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8897 && ! lookup_attribute ("alias",
8898 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8899 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8900 {
8901 /* We know that we're looking at the address of two
8902 non-weak, unaliased, static _DECL nodes.
8903
8904 It is both wasteful and incorrect to call operand_equal_p
8905 to compare the two ADDR_EXPR nodes. It is wasteful in that
8906 all we need to do is test pointer equality for the arguments
8907 to the two ADDR_EXPR nodes. It is incorrect to use
8908 operand_equal_p as that function is NOT equivalent to a
8909 C equality test. It can in fact return false for two
8910 objects which would test as equal using the C equality
8911 operator. */
8912 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8913 return constant_boolean_node (equal
8914 ? code == EQ_EXPR : code != EQ_EXPR,
8915 type);
8916 }
8917
8918 /* If this is a comparison of two exprs that look like an
8919 ARRAY_REF of the same object, then we can fold this to a
8920 comparison of the two offsets. This is only safe for
8921 EQ_EXPR and NE_EXPR because of overflow issues. */
8922 if (code == EQ_EXPR || code == NE_EXPR)
8923 {
8924 tree base0, offset0, base1, offset1;
8925
8926 if (extract_array_ref (arg0, &base0, &offset0)
8927 && extract_array_ref (arg1, &base1, &offset1)
8928 && operand_equal_p (base0, base1, 0))
8929 {
8930 /* Handle no offsets on both sides specially. */
8931 if (offset0 == NULL_TREE
8932 && offset1 == NULL_TREE)
8933 return fold_build2 (code, type, integer_zero_node,
8934 integer_zero_node);
8935
8936 if (!offset0 || !offset1
8937 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8938 {
8939 if (offset0 == NULL_TREE)
8940 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8941 if (offset1 == NULL_TREE)
8942 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8943 return fold_build2 (code, type, offset0, offset1);
8944 }
8945 }
8946 }
8947
8948 /* Transform comparisons of the form X +- C CMP X. */
8949 if ((code != EQ_EXPR && code != NE_EXPR)
8950 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8951 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8952 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8953 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8954 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8955 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8956 && !(flag_wrapv || flag_trapv))))
8957 {
8958 tree arg01 = TREE_OPERAND (arg0, 1);
8959 enum tree_code code0 = TREE_CODE (arg0);
8960 int is_positive;
8961
8962 if (TREE_CODE (arg01) == REAL_CST)
8963 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8964 else
8965 is_positive = tree_int_cst_sgn (arg01);
8966
8967 /* (X - c) > X becomes false. */
8968 if (code == GT_EXPR
8969 && ((code0 == MINUS_EXPR && is_positive >= 0)
8970 || (code0 == PLUS_EXPR && is_positive <= 0)))
8971 return constant_boolean_node (0, type);
8972
8973 /* Likewise (X + c) < X becomes false. */
8974 if (code == LT_EXPR
8975 && ((code0 == PLUS_EXPR && is_positive >= 0)
8976 || (code0 == MINUS_EXPR && is_positive <= 0)))
8977 return constant_boolean_node (0, type);
8978
8979 /* Convert (X - c) <= X to true. */
8980 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8981 && code == LE_EXPR
8982 && ((code0 == MINUS_EXPR && is_positive >= 0)
8983 || (code0 == PLUS_EXPR && is_positive <= 0)))
8984 return constant_boolean_node (1, type);
8985
8986 /* Convert (X + c) >= X to true. */
8987 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8988 && code == GE_EXPR
8989 && ((code0 == PLUS_EXPR && is_positive >= 0)
8990 || (code0 == MINUS_EXPR && is_positive <= 0)))
8991 return constant_boolean_node (1, type);
8992
8993 if (TREE_CODE (arg01) == INTEGER_CST)
8994 {
8995 /* Convert X + c > X and X - c < X to true for integers. */
8996 if (code == GT_EXPR
8997 && ((code0 == PLUS_EXPR && is_positive > 0)
8998 || (code0 == MINUS_EXPR && is_positive < 0)))
8999 return constant_boolean_node (1, type);
9000
9001 if (code == LT_EXPR
9002 && ((code0 == MINUS_EXPR && is_positive > 0)
9003 || (code0 == PLUS_EXPR && is_positive < 0)))
9004 return constant_boolean_node (1, type);
9005
9006 /* Convert X + c <= X and X - c >= X to false for integers. */
9007 if (code == LE_EXPR
9008 && ((code0 == PLUS_EXPR && is_positive > 0)
9009 || (code0 == MINUS_EXPR && is_positive < 0)))
9010 return constant_boolean_node (0, type);
9011
9012 if (code == GE_EXPR
9013 && ((code0 == MINUS_EXPR && is_positive > 0)
9014 || (code0 == PLUS_EXPR && is_positive < 0)))
9015 return constant_boolean_node (0, type);
9016 }
9017 }
9018
9019 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9020 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9021 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9022 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9023 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9024 && !(flag_wrapv || flag_trapv))
9025 && (TREE_CODE (arg1) == INTEGER_CST
9026 && !TREE_OVERFLOW (arg1)))
9027 {
9028 tree const1 = TREE_OPERAND (arg0, 1);
9029 tree const2 = arg1;
9030 tree variable = TREE_OPERAND (arg0, 0);
9031 tree lhs;
9032 int lhs_add;
9033 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9034
9035 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9036 TREE_TYPE (arg1), const2, const1);
9037 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9038 && (TREE_CODE (lhs) != INTEGER_CST
9039 || !TREE_OVERFLOW (lhs)))
9040 return fold_build2 (code, type, variable, lhs);
9041 }
9042
9043 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9044 {
9045 tree targ0 = strip_float_extensions (arg0);
9046 tree targ1 = strip_float_extensions (arg1);
9047 tree newtype = TREE_TYPE (targ0);
9048
9049 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9050 newtype = TREE_TYPE (targ1);
9051
9052 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9053 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9054 return fold_build2 (code, type, fold_convert (newtype, targ0),
9055 fold_convert (newtype, targ1));
9056
9057 /* (-a) CMP (-b) -> b CMP a */
9058 if (TREE_CODE (arg0) == NEGATE_EXPR
9059 && TREE_CODE (arg1) == NEGATE_EXPR)
9060 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9061 TREE_OPERAND (arg0, 0));
9062
9063 if (TREE_CODE (arg1) == REAL_CST)
9064 {
9065 REAL_VALUE_TYPE cst;
9066 cst = TREE_REAL_CST (arg1);
9067
9068 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9069 if (TREE_CODE (arg0) == NEGATE_EXPR)
9070 return
9071 fold_build2 (swap_tree_comparison (code), type,
9072 TREE_OPERAND (arg0, 0),
9073 build_real (TREE_TYPE (arg1),
9074 REAL_VALUE_NEGATE (cst)));
9075
9076 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9077 /* a CMP (-0) -> a CMP 0 */
9078 if (REAL_VALUE_MINUS_ZERO (cst))
9079 return fold_build2 (code, type, arg0,
9080 build_real (TREE_TYPE (arg1), dconst0));
9081
9082 /* x != NaN is always true, other ops are always false. */
9083 if (REAL_VALUE_ISNANUINF (cst)
9084 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9085 {
9086 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9087 return omit_one_operand (type, tem, arg0);
9088 }
9089
9090 /* Fold comparisons against infinity. */
9091 if (REAL_VALUE_ISINF (cst))
9092 {
9093 tem = fold_inf_compare (code, type, arg0, arg1);
9094 if (tem != NULL_TREE)
9095 return tem;
9096 }
9097 }
9098
9099 /* If this is a comparison of a real constant with a PLUS_EXPR
9100 or a MINUS_EXPR of a real constant, we can convert it into a
9101 comparison with a revised real constant as long as no overflow
9102 occurs when unsafe_math_optimizations are enabled. */
9103 if (flag_unsafe_math_optimizations
9104 && TREE_CODE (arg1) == REAL_CST
9105 && (TREE_CODE (arg0) == PLUS_EXPR
9106 || TREE_CODE (arg0) == MINUS_EXPR)
9107 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9108 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9109 ? MINUS_EXPR : PLUS_EXPR,
9110 arg1, TREE_OPERAND (arg0, 1), 0))
9111 && ! TREE_CONSTANT_OVERFLOW (tem))
9112 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9113
9114 /* Likewise, we can simplify a comparison of a real constant with
9115 a MINUS_EXPR whose first operand is also a real constant, i.e.
9116 (c1 - x) < c2 becomes x > c1-c2. */
9117 if (flag_unsafe_math_optimizations
9118 && TREE_CODE (arg1) == REAL_CST
9119 && TREE_CODE (arg0) == MINUS_EXPR
9120 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9121 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9122 arg1, 0))
9123 && ! TREE_CONSTANT_OVERFLOW (tem))
9124 return fold_build2 (swap_tree_comparison (code), type,
9125 TREE_OPERAND (arg0, 1), tem);
9126
9127 /* Fold comparisons against built-in math functions. */
9128 if (TREE_CODE (arg1) == REAL_CST
9129 && flag_unsafe_math_optimizations
9130 && ! flag_errno_math)
9131 {
9132 enum built_in_function fcode = builtin_mathfn_code (arg0);
9133
9134 if (fcode != END_BUILTINS)
9135 {
9136 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9137 if (tem != NULL_TREE)
9138 return tem;
9139 }
9140 }
9141 }
9142
9143 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9144 if (TREE_CONSTANT (arg1)
9145 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9146 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9147 /* This optimization is invalid for ordered comparisons
9148 if CONST+INCR overflows or if foo+incr might overflow.
9149 This optimization is invalid for floating point due to rounding.
9150 For pointer types we assume overflow doesn't happen. */
9151 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9152 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9153 && (code == EQ_EXPR || code == NE_EXPR))))
9154 {
9155 tree varop, newconst;
9156
9157 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9158 {
9159 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9160 arg1, TREE_OPERAND (arg0, 1));
9161 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9162 TREE_OPERAND (arg0, 0),
9163 TREE_OPERAND (arg0, 1));
9164 }
9165 else
9166 {
9167 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9168 arg1, TREE_OPERAND (arg0, 1));
9169 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9170 TREE_OPERAND (arg0, 0),
9171 TREE_OPERAND (arg0, 1));
9172 }
9173
9174
9175 /* If VAROP is a reference to a bitfield, we must mask
9176 the constant by the width of the field. */
9177 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9178 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9179 && host_integerp (DECL_SIZE (TREE_OPERAND
9180 (TREE_OPERAND (varop, 0), 1)), 1))
9181 {
9182 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9183 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9184 tree folded_compare, shift;
9185
9186 /* First check whether the comparison would come out
9187 always the same. If we don't do that we would
9188 change the meaning with the masking. */
9189 folded_compare = fold_build2 (code, type,
9190 TREE_OPERAND (varop, 0), arg1);
9191 if (integer_zerop (folded_compare)
9192 || integer_onep (folded_compare))
9193 return omit_one_operand (type, folded_compare, varop);
9194
9195 shift = build_int_cst (NULL_TREE,
9196 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9197 shift = fold_convert (TREE_TYPE (varop), shift);
9198 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9199 newconst, shift);
9200 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9201 newconst, shift);
9202 }
9203
9204 return fold_build2 (code, type, varop, newconst);
9205 }
9206
9207 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9208 This transformation affects the cases which are handled in later
9209 optimizations involving comparisons with non-negative constants. */
9210 if (TREE_CODE (arg1) == INTEGER_CST
9211 && TREE_CODE (arg0) != INTEGER_CST
9212 && tree_int_cst_sgn (arg1) > 0)
9213 {
9214 switch (code)
9215 {
9216 case GE_EXPR:
9217 arg1 = const_binop (MINUS_EXPR, arg1,
9218 build_int_cst (TREE_TYPE (arg1), 1), 0);
9219 return fold_build2 (GT_EXPR, type, arg0,
9220 fold_convert (TREE_TYPE (arg0), arg1));
9221
9222 case LT_EXPR:
9223 arg1 = const_binop (MINUS_EXPR, arg1,
9224 build_int_cst (TREE_TYPE (arg1), 1), 0);
9225 return fold_build2 (LE_EXPR, type, arg0,
9226 fold_convert (TREE_TYPE (arg0), arg1));
9227
9228 default:
9229 break;
9230 }
9231 }
9232
9233 /* Comparisons with the highest or lowest possible integer of
9234 the specified size will have known values. */
9235 {
9236 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9237
9238 if (TREE_CODE (arg1) == INTEGER_CST
9239 && ! TREE_CONSTANT_OVERFLOW (arg1)
9240 && width <= 2 * HOST_BITS_PER_WIDE_INT
9241 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9242 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9243 {
9244 HOST_WIDE_INT signed_max_hi;
9245 unsigned HOST_WIDE_INT signed_max_lo;
9246 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9247
9248 if (width <= HOST_BITS_PER_WIDE_INT)
9249 {
9250 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9251 - 1;
9252 signed_max_hi = 0;
9253 max_hi = 0;
9254
9255 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9256 {
9257 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9258 min_lo = 0;
9259 min_hi = 0;
9260 }
9261 else
9262 {
9263 max_lo = signed_max_lo;
9264 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9265 min_hi = -1;
9266 }
9267 }
9268 else
9269 {
9270 width -= HOST_BITS_PER_WIDE_INT;
9271 signed_max_lo = -1;
9272 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9273 - 1;
9274 max_lo = -1;
9275 min_lo = 0;
9276
9277 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9278 {
9279 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9280 min_hi = 0;
9281 }
9282 else
9283 {
9284 max_hi = signed_max_hi;
9285 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9286 }
9287 }
9288
9289 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9290 && TREE_INT_CST_LOW (arg1) == max_lo)
9291 switch (code)
9292 {
9293 case GT_EXPR:
9294 return omit_one_operand (type, integer_zero_node, arg0);
9295
9296 case GE_EXPR:
9297 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9298
9299 case LE_EXPR:
9300 return omit_one_operand (type, integer_one_node, arg0);
9301
9302 case LT_EXPR:
9303 return fold_build2 (NE_EXPR, type, arg0, arg1);
9304
9305 /* The GE_EXPR and LT_EXPR cases above are not normally
9306 reached because of previous transformations. */
9307
9308 default:
9309 break;
9310 }
9311 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9312 == max_hi
9313 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9314 switch (code)
9315 {
9316 case GT_EXPR:
9317 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9318 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9319 case LE_EXPR:
9320 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9321 return fold_build2 (NE_EXPR, type, arg0, arg1);
9322 default:
9323 break;
9324 }
9325 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9326 == min_hi
9327 && TREE_INT_CST_LOW (arg1) == min_lo)
9328 switch (code)
9329 {
9330 case LT_EXPR:
9331 return omit_one_operand (type, integer_zero_node, arg0);
9332
9333 case LE_EXPR:
9334 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9335
9336 case GE_EXPR:
9337 return omit_one_operand (type, integer_one_node, arg0);
9338
9339 case GT_EXPR:
9340 return fold_build2 (NE_EXPR, type, op0, op1);
9341
9342 default:
9343 break;
9344 }
9345 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9346 == min_hi
9347 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9348 switch (code)
9349 {
9350 case GE_EXPR:
9351 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9352 return fold_build2 (NE_EXPR, type, arg0, arg1);
9353 case LT_EXPR:
9354 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9355 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9356 default:
9357 break;
9358 }
9359
9360 else if (!in_gimple_form
9361 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9362 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9363 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9364 /* signed_type does not work on pointer types. */
9365 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9366 {
9367 /* The following case also applies to X < signed_max+1
9368 and X >= signed_max+1 because previous transformations. */
9369 if (code == LE_EXPR || code == GT_EXPR)
9370 {
9371 tree st0, st1;
9372 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9373 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9374 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9375 type, fold_convert (st0, arg0),
9376 build_int_cst (st1, 0));
9377 }
9378 }
9379 }
9380 }
9381
9382 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9383 a MINUS_EXPR of a constant, we can convert it into a comparison with
9384 a revised constant as long as no overflow occurs. */
9385 if ((code == EQ_EXPR || code == NE_EXPR)
9386 && TREE_CODE (arg1) == INTEGER_CST
9387 && (TREE_CODE (arg0) == PLUS_EXPR
9388 || TREE_CODE (arg0) == MINUS_EXPR)
9389 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9390 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9391 ? MINUS_EXPR : PLUS_EXPR,
9392 arg1, TREE_OPERAND (arg0, 1), 0))
9393 && ! TREE_CONSTANT_OVERFLOW (tem))
9394 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9395
9396 /* Similarly for a NEGATE_EXPR. */
9397 else if ((code == EQ_EXPR || code == NE_EXPR)
9398 && TREE_CODE (arg0) == NEGATE_EXPR
9399 && TREE_CODE (arg1) == INTEGER_CST
9400 && 0 != (tem = negate_expr (arg1))
9401 && TREE_CODE (tem) == INTEGER_CST
9402 && ! TREE_CONSTANT_OVERFLOW (tem))
9403 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9404
9405 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9406 for !=. Don't do this for ordered comparisons due to overflow. */
9407 else if ((code == NE_EXPR || code == EQ_EXPR)
9408 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9409 return fold_build2 (code, type,
9410 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9411
9412 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9413 && (TREE_CODE (arg0) == NOP_EXPR
9414 || TREE_CODE (arg0) == CONVERT_EXPR))
9415 {
9416 /* If we are widening one operand of an integer comparison,
9417 see if the other operand is similarly being widened. Perhaps we
9418 can do the comparison in the narrower type. */
9419 tem = fold_widened_comparison (code, type, arg0, arg1);
9420 if (tem)
9421 return tem;
9422
9423 /* Or if we are changing signedness. */
9424 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9425 if (tem)
9426 return tem;
9427 }
9428
9429 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9430 constant, we can simplify it. */
9431 else if (TREE_CODE (arg1) == INTEGER_CST
9432 && (TREE_CODE (arg0) == MIN_EXPR
9433 || TREE_CODE (arg0) == MAX_EXPR)
9434 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9435 {
9436 tem = optimize_minmax_comparison (code, type, op0, op1);
9437 if (tem)
9438 return tem;
9439
9440 return NULL_TREE;
9441 }
9442
9443 /* If we are comparing an ABS_EXPR with a constant, we can
9444 convert all the cases into explicit comparisons, but they may
9445 well not be faster than doing the ABS and one comparison.
9446 But ABS (X) <= C is a range comparison, which becomes a subtraction
9447 and a comparison, and is probably faster. */
9448 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9449 && TREE_CODE (arg0) == ABS_EXPR
9450 && ! TREE_SIDE_EFFECTS (arg0)
9451 && (0 != (tem = negate_expr (arg1)))
9452 && TREE_CODE (tem) == INTEGER_CST
9453 && ! TREE_CONSTANT_OVERFLOW (tem))
9454 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9455 build2 (GE_EXPR, type,
9456 TREE_OPERAND (arg0, 0), tem),
9457 build2 (LE_EXPR, type,
9458 TREE_OPERAND (arg0, 0), arg1));
9459
9460 /* Convert ABS_EXPR<x> >= 0 to true. */
9461 else if (code == GE_EXPR
9462 && tree_expr_nonnegative_p (arg0)
9463 && (integer_zerop (arg1)
9464 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9465 && real_zerop (arg1))))
9466 return omit_one_operand (type, integer_one_node, arg0);
9467
9468 /* Convert ABS_EXPR<x> < 0 to false. */
9469 else if (code == LT_EXPR
9470 && tree_expr_nonnegative_p (arg0)
9471 && (integer_zerop (arg1) || real_zerop (arg1)))
9472 return omit_one_operand (type, integer_zero_node, arg0);
9473
9474 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9475 else if ((code == EQ_EXPR || code == NE_EXPR)
9476 && TREE_CODE (arg0) == ABS_EXPR
9477 && (integer_zerop (arg1) || real_zerop (arg1)))
9478 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9479
9480 /* If this is an EQ or NE comparison with zero and ARG0 is
9481 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9482 two operations, but the latter can be done in one less insn
9483 on machines that have only two-operand insns or on which a
9484 constant cannot be the first operand. */
9485 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9486 && TREE_CODE (arg0) == BIT_AND_EXPR)
9487 {
9488 tree arg00 = TREE_OPERAND (arg0, 0);
9489 tree arg01 = TREE_OPERAND (arg0, 1);
9490 if (TREE_CODE (arg00) == LSHIFT_EXPR
9491 && integer_onep (TREE_OPERAND (arg00, 0)))
9492 return
9493 fold_build2 (code, type,
9494 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9495 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9496 arg01, TREE_OPERAND (arg00, 1)),
9497 fold_convert (TREE_TYPE (arg0),
9498 integer_one_node)),
9499 arg1);
9500 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9501 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9502 return
9503 fold_build2 (code, type,
9504 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9505 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9506 arg00, TREE_OPERAND (arg01, 1)),
9507 fold_convert (TREE_TYPE (arg0),
9508 integer_one_node)),
9509 arg1);
9510 }
9511
9512 /* If this is an NE or EQ comparison of zero against the result of a
9513 signed MOD operation whose second operand is a power of 2, make
9514 the MOD operation unsigned since it is simpler and equivalent. */
9515 if ((code == NE_EXPR || code == EQ_EXPR)
9516 && integer_zerop (arg1)
9517 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9518 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9519 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9520 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9521 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9522 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9523 {
9524 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9525 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9526 fold_convert (newtype,
9527 TREE_OPERAND (arg0, 0)),
9528 fold_convert (newtype,
9529 TREE_OPERAND (arg0, 1)));
9530
9531 return fold_build2 (code, type, newmod,
9532 fold_convert (newtype, arg1));
9533 }
9534
9535 /* If this is an NE comparison of zero with an AND of one, remove the
9536 comparison since the AND will give the correct value. */
9537 if (code == NE_EXPR && integer_zerop (arg1)
9538 && TREE_CODE (arg0) == BIT_AND_EXPR
9539 && integer_onep (TREE_OPERAND (arg0, 1)))
9540 return fold_convert (type, arg0);
9541
9542 /* If we have (A & C) == C where C is a power of 2, convert this into
9543 (A & C) != 0. Similarly for NE_EXPR. */
9544 if ((code == EQ_EXPR || code == NE_EXPR)
9545 && TREE_CODE (arg0) == BIT_AND_EXPR
9546 && integer_pow2p (TREE_OPERAND (arg0, 1))
9547 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9548 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9549 arg0, fold_convert (TREE_TYPE (arg0),
9550 integer_zero_node));
9551
9552 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9553 bit, then fold the expression into A < 0 or A >= 0. */
9554 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9555 if (tem)
9556 return tem;
9557
9558 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9559 Similarly for NE_EXPR. */
9560 if ((code == EQ_EXPR || code == NE_EXPR)
9561 && TREE_CODE (arg0) == BIT_AND_EXPR
9562 && TREE_CODE (arg1) == INTEGER_CST
9563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9564 {
9565 tree notc = fold_build1 (BIT_NOT_EXPR,
9566 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9567 TREE_OPERAND (arg0, 1));
9568 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9569 arg1, notc);
9570 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9571 if (integer_nonzerop (dandnotc))
9572 return omit_one_operand (type, rslt, arg0);
9573 }
9574
9575 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9576 Similarly for NE_EXPR. */
9577 if ((code == EQ_EXPR || code == NE_EXPR)
9578 && TREE_CODE (arg0) == BIT_IOR_EXPR
9579 && TREE_CODE (arg1) == INTEGER_CST
9580 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9581 {
9582 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9583 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9584 TREE_OPERAND (arg0, 1), notd);
9585 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9586 if (integer_nonzerop (candnotd))
9587 return omit_one_operand (type, rslt, arg0);
9588 }
9589
9590 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9591 and similarly for >= into !=. */
9592 if ((code == LT_EXPR || code == GE_EXPR)
9593 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9594 && TREE_CODE (arg1) == LSHIFT_EXPR
9595 && integer_onep (TREE_OPERAND (arg1, 0)))
9596 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9597 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9598 TREE_OPERAND (arg1, 1)),
9599 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9600
9601 else if ((code == LT_EXPR || code == GE_EXPR)
9602 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9603 && (TREE_CODE (arg1) == NOP_EXPR
9604 || TREE_CODE (arg1) == CONVERT_EXPR)
9605 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9606 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9607 return
9608 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9609 fold_convert (TREE_TYPE (arg0),
9610 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9611 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9612 1))),
9613 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9614
9615 /* Simplify comparison of something with itself. (For IEEE
9616 floating-point, we can only do some of these simplifications.) */
9617 if (operand_equal_p (arg0, arg1, 0))
9618 {
9619 switch (code)
9620 {
9621 case EQ_EXPR:
9622 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9623 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9624 return constant_boolean_node (1, type);
9625 break;
9626
9627 case GE_EXPR:
9628 case LE_EXPR:
9629 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9630 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9631 return constant_boolean_node (1, type);
9632 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9633
9634 case NE_EXPR:
9635 /* For NE, we can only do this simplification if integer
9636 or we don't honor IEEE floating point NaNs. */
9637 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9638 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9639 break;
9640 /* ... fall through ... */
9641 case GT_EXPR:
9642 case LT_EXPR:
9643 return constant_boolean_node (0, type);
9644 default:
9645 gcc_unreachable ();
9646 }
9647 }
9648
9649 /* If we are comparing an expression that just has comparisons
9650 of two integer values, arithmetic expressions of those comparisons,
9651 and constants, we can simplify it. There are only three cases
9652 to check: the two values can either be equal, the first can be
9653 greater, or the second can be greater. Fold the expression for
9654 those three values. Since each value must be 0 or 1, we have
9655 eight possibilities, each of which corresponds to the constant 0
9656 or 1 or one of the six possible comparisons.
9657
9658 This handles common cases like (a > b) == 0 but also handles
9659 expressions like ((x > y) - (y > x)) > 0, which supposedly
9660 occur in macroized code. */
9661
9662 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9663 {
9664 tree cval1 = 0, cval2 = 0;
9665 int save_p = 0;
9666
9667 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9668 /* Don't handle degenerate cases here; they should already
9669 have been handled anyway. */
9670 && cval1 != 0 && cval2 != 0
9671 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9672 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9673 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9674 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9675 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9676 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9677 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9678 {
9679 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9680 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9681
9682 /* We can't just pass T to eval_subst in case cval1 or cval2
9683 was the same as ARG1. */
9684
9685 tree high_result
9686 = fold_build2 (code, type,
9687 eval_subst (arg0, cval1, maxval,
9688 cval2, minval),
9689 arg1);
9690 tree equal_result
9691 = fold_build2 (code, type,
9692 eval_subst (arg0, cval1, maxval,
9693 cval2, maxval),
9694 arg1);
9695 tree low_result
9696 = fold_build2 (code, type,
9697 eval_subst (arg0, cval1, minval,
9698 cval2, maxval),
9699 arg1);
9700
9701 /* All three of these results should be 0 or 1. Confirm they
9702 are. Then use those values to select the proper code
9703 to use. */
9704
9705 if ((integer_zerop (high_result)
9706 || integer_onep (high_result))
9707 && (integer_zerop (equal_result)
9708 || integer_onep (equal_result))
9709 && (integer_zerop (low_result)
9710 || integer_onep (low_result)))
9711 {
9712 /* Make a 3-bit mask with the high-order bit being the
9713 value for `>', the next for '=', and the low for '<'. */
9714 switch ((integer_onep (high_result) * 4)
9715 + (integer_onep (equal_result) * 2)
9716 + integer_onep (low_result))
9717 {
9718 case 0:
9719 /* Always false. */
9720 return omit_one_operand (type, integer_zero_node, arg0);
9721 case 1:
9722 code = LT_EXPR;
9723 break;
9724 case 2:
9725 code = EQ_EXPR;
9726 break;
9727 case 3:
9728 code = LE_EXPR;
9729 break;
9730 case 4:
9731 code = GT_EXPR;
9732 break;
9733 case 5:
9734 code = NE_EXPR;
9735 break;
9736 case 6:
9737 code = GE_EXPR;
9738 break;
9739 case 7:
9740 /* Always true. */
9741 return omit_one_operand (type, integer_one_node, arg0);
9742 }
9743
9744 if (save_p)
9745 return save_expr (build2 (code, type, cval1, cval2));
9746 else
9747 return fold_build2 (code, type, cval1, cval2);
9748 }
9749 }
9750 }
9751
9752 /* If this is a comparison of a field, we may be able to simplify it. */
9753 if (((TREE_CODE (arg0) == COMPONENT_REF
9754 && lang_hooks.can_use_bit_fields_p ())
9755 || TREE_CODE (arg0) == BIT_FIELD_REF)
9756 && (code == EQ_EXPR || code == NE_EXPR)
9757 /* Handle the constant case even without -O
9758 to make sure the warnings are given. */
9759 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9760 {
9761 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9762 if (t1)
9763 return t1;
9764 }
9765
9766 /* Fold a comparison of the address of COMPONENT_REFs with the same
9767 type and component to a comparison of the address of the base
9768 object. In short, &x->a OP &y->a to x OP y and
9769 &x->a OP &y.a to x OP &y */
9770 if (TREE_CODE (arg0) == ADDR_EXPR
9771 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9772 && TREE_CODE (arg1) == ADDR_EXPR
9773 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9774 {
9775 tree cref0 = TREE_OPERAND (arg0, 0);
9776 tree cref1 = TREE_OPERAND (arg1, 0);
9777 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9778 {
9779 tree op0 = TREE_OPERAND (cref0, 0);
9780 tree op1 = TREE_OPERAND (cref1, 0);
9781 return fold_build2 (code, type,
9782 build_fold_addr_expr (op0),
9783 build_fold_addr_expr (op1));
9784 }
9785 }
9786
9787 /* Optimize comparisons of strlen vs zero to a compare of the
9788 first character of the string vs zero. To wit,
9789 strlen(ptr) == 0 => *ptr == 0
9790 strlen(ptr) != 0 => *ptr != 0
9791 Other cases should reduce to one of these two (or a constant)
9792 due to the return value of strlen being unsigned. */
9793 if ((code == EQ_EXPR || code == NE_EXPR)
9794 && integer_zerop (arg1)
9795 && TREE_CODE (arg0) == CALL_EXPR)
9796 {
9797 tree fndecl = get_callee_fndecl (arg0);
9798 tree arglist;
9799
9800 if (fndecl
9801 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9802 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9803 && (arglist = TREE_OPERAND (arg0, 1))
9804 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9805 && ! TREE_CHAIN (arglist))
9806 {
9807 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9808 return fold_build2 (code, type, iref,
9809 build_int_cst (TREE_TYPE (iref), 0));
9810 }
9811 }
9812
9813 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9814 into a single range test. */
9815 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9816 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9817 && TREE_CODE (arg1) == INTEGER_CST
9818 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9819 && !integer_zerop (TREE_OPERAND (arg0, 1))
9820 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9821 && !TREE_OVERFLOW (arg1))
9822 {
9823 t1 = fold_div_compare (code, type, arg0, arg1);
9824 if (t1 != NULL_TREE)
9825 return t1;
9826 }
9827
9828 if ((code == EQ_EXPR || code == NE_EXPR)
9829 && integer_zerop (arg1)
9830 && tree_expr_nonzero_p (arg0))
9831 {
9832 tree res = constant_boolean_node (code==NE_EXPR, type);
9833 return omit_one_operand (type, res, arg0);
9834 }
9835
9836 t1 = fold_relational_const (code, type, arg0, arg1);
9837 return t1 == NULL_TREE ? NULL_TREE : t1;
9838
9839 case UNORDERED_EXPR:
9840 case ORDERED_EXPR:
9841 case UNLT_EXPR:
9842 case UNLE_EXPR:
9843 case UNGT_EXPR:
9844 case UNGE_EXPR:
9845 case UNEQ_EXPR:
9846 case LTGT_EXPR:
9847 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9848 {
9849 t1 = fold_relational_const (code, type, arg0, arg1);
9850 if (t1 != NULL_TREE)
9851 return t1;
9852 }
9853
9854 /* If the first operand is NaN, the result is constant. */
9855 if (TREE_CODE (arg0) == REAL_CST
9856 && REAL_VALUE_ISNANUINF (TREE_REAL_CST (arg0))
9857 && (code != LTGT_EXPR || ! flag_trapping_math))
9858 {
9859 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9860 ? integer_zero_node
9861 : integer_one_node;
9862 return omit_one_operand (type, t1, arg1);
9863 }
9864
9865 /* If the second operand is NaN, the result is constant. */
9866 if (TREE_CODE (arg1) == REAL_CST
9867 && REAL_VALUE_ISNANUINF (TREE_REAL_CST (arg1))
9868 && (code != LTGT_EXPR || ! flag_trapping_math))
9869 {
9870 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9871 ? integer_zero_node
9872 : integer_one_node;
9873 return omit_one_operand (type, t1, arg0);
9874 }
9875
9876 /* Simplify unordered comparison of something with itself. */
9877 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9878 && operand_equal_p (arg0, arg1, 0))
9879 return constant_boolean_node (1, type);
9880
9881 if (code == LTGT_EXPR
9882 && !flag_trapping_math
9883 && operand_equal_p (arg0, arg1, 0))
9884 return constant_boolean_node (0, type);
9885
9886 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9887 {
9888 tree targ0 = strip_float_extensions (arg0);
9889 tree targ1 = strip_float_extensions (arg1);
9890 tree newtype = TREE_TYPE (targ0);
9891
9892 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9893 newtype = TREE_TYPE (targ1);
9894
9895 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9896 return fold_build2 (code, type, fold_convert (newtype, targ0),
9897 fold_convert (newtype, targ1));
9898 }
9899
9900 return NULL_TREE;
9901
9902 case COMPOUND_EXPR:
9903 /* When pedantic, a compound expression can be neither an lvalue
9904 nor an integer constant expression. */
9905 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
9906 return NULL_TREE;
9907 /* Don't let (0, 0) be null pointer constant. */
9908 if (integer_zerop (arg1))
9909 return build1 (NOP_EXPR, type, arg1);
9910 return convert (type, arg1);
9911
9912 case COMPLEX_EXPR:
9913 if (wins)
9914 return build_complex (type, arg0, arg1);
9915 return NULL_TREE;
9916
9917 case ASSERT_EXPR:
9918 /* An ASSERT_EXPR should never be passed to fold_binary. */
9919 gcc_unreachable ();
9920
9921 default:
9922 return NULL_TREE;
9923 } /* switch (code) */
9924 }
9925
9926 /* Callback for walk_tree, looking for LABEL_EXPR.
9927 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9928 Do not check the sub-tree of GOTO_EXPR. */
9929
9930 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)9931 contains_label_1 (tree *tp,
9932 int *walk_subtrees,
9933 void *data ATTRIBUTE_UNUSED)
9934 {
9935 switch (TREE_CODE (*tp))
9936 {
9937 case LABEL_EXPR:
9938 return *tp;
9939 case GOTO_EXPR:
9940 *walk_subtrees = 0;
9941 /* no break */
9942 default:
9943 return NULL_TREE;
9944 }
9945 }
9946
9947 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9948 accessible from outside the sub-tree. Returns NULL_TREE if no
9949 addressable label is found. */
9950
9951 static bool
contains_label_p(tree st)9952 contains_label_p (tree st)
9953 {
9954 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9955 }
9956
9957 /* Fold a ternary expression of code CODE and type TYPE with operands
9958 OP0, OP1, and OP2. Return the folded expression if folding is
9959 successful. Otherwise, return NULL_TREE. */
9960
9961 tree
fold_ternary(enum tree_code code,tree type,tree op0,tree op1,tree op2)9962 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9963 {
9964 tree tem;
9965 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9966 enum tree_code_class kind = TREE_CODE_CLASS (code);
9967
9968 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9969 && TREE_CODE_LENGTH (code) == 3);
9970
9971 /* Strip any conversions that don't change the mode. This is safe
9972 for every expression, except for a comparison expression because
9973 its signedness is derived from its operands. So, in the latter
9974 case, only strip conversions that don't change the signedness.
9975
9976 Note that this is done as an internal manipulation within the
9977 constant folder, in order to find the simplest representation of
9978 the arguments so that their form can be studied. In any cases,
9979 the appropriate type conversions should be put back in the tree
9980 that will get out of the constant folder. */
9981 if (op0)
9982 {
9983 arg0 = op0;
9984 STRIP_NOPS (arg0);
9985 }
9986
9987 if (op1)
9988 {
9989 arg1 = op1;
9990 STRIP_NOPS (arg1);
9991 }
9992
9993 switch (code)
9994 {
9995 case COMPONENT_REF:
9996 if (TREE_CODE (arg0) == CONSTRUCTOR
9997 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9998 {
9999 unsigned HOST_WIDE_INT idx;
10000 tree field, value;
10001 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10002 if (field == arg1)
10003 return value;
10004 }
10005 return NULL_TREE;
10006
10007 case COND_EXPR:
10008 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10009 so all simple results must be passed through pedantic_non_lvalue. */
10010 if (TREE_CODE (arg0) == INTEGER_CST)
10011 {
10012 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10013 tem = integer_zerop (arg0) ? op2 : op1;
10014 /* Only optimize constant conditions when the selected branch
10015 has the same type as the COND_EXPR. This avoids optimizing
10016 away "c ? x : throw", where the throw has a void type.
10017 Avoid throwing away that operand which contains label. */
10018 if ((!TREE_SIDE_EFFECTS (unused_op)
10019 || !contains_label_p (unused_op))
10020 && (! VOID_TYPE_P (TREE_TYPE (tem))
10021 || VOID_TYPE_P (type)))
10022 return pedantic_non_lvalue (tem);
10023 return NULL_TREE;
10024 }
10025 if (operand_equal_p (arg1, op2, 0))
10026 return pedantic_omit_one_operand (type, arg1, arg0);
10027
10028 /* If we have A op B ? A : C, we may be able to convert this to a
10029 simpler expression, depending on the operation and the values
10030 of B and C. Signed zeros prevent all of these transformations,
10031 for reasons given above each one.
10032
10033 Also try swapping the arguments and inverting the conditional. */
10034 if (COMPARISON_CLASS_P (arg0)
10035 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10036 arg1, TREE_OPERAND (arg0, 1))
10037 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10038 {
10039 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10040 if (tem)
10041 return tem;
10042 }
10043
10044 if (COMPARISON_CLASS_P (arg0)
10045 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10046 op2,
10047 TREE_OPERAND (arg0, 1))
10048 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10049 {
10050 tem = invert_truthvalue (arg0);
10051 if (COMPARISON_CLASS_P (tem))
10052 {
10053 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10054 if (tem)
10055 return tem;
10056 }
10057 }
10058
10059 /* If the second operand is simpler than the third, swap them
10060 since that produces better jump optimization results. */
10061 if (truth_value_p (TREE_CODE (arg0))
10062 && tree_swap_operands_p (op1, op2, false))
10063 {
10064 /* See if this can be inverted. If it can't, possibly because
10065 it was a floating-point inequality comparison, don't do
10066 anything. */
10067 tem = invert_truthvalue (arg0);
10068
10069 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10070 return fold_build3 (code, type, tem, op2, op1);
10071 }
10072
10073 /* Convert A ? 1 : 0 to simply A. */
10074 if (integer_onep (op1)
10075 && integer_zerop (op2)
10076 /* If we try to convert OP0 to our type, the
10077 call to fold will try to move the conversion inside
10078 a COND, which will recurse. In that case, the COND_EXPR
10079 is probably the best choice, so leave it alone. */
10080 && type == TREE_TYPE (arg0))
10081 return pedantic_non_lvalue (arg0);
10082
10083 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10084 over COND_EXPR in cases such as floating point comparisons. */
10085 if (integer_zerop (op1)
10086 && integer_onep (op2)
10087 && truth_value_p (TREE_CODE (arg0)))
10088 return pedantic_non_lvalue (fold_convert (type,
10089 invert_truthvalue (arg0)));
10090
10091 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10092 if (TREE_CODE (arg0) == LT_EXPR
10093 && integer_zerop (TREE_OPERAND (arg0, 1))
10094 && integer_zerop (op2)
10095 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10096 return fold_convert (type,
10097 fold_build2 (BIT_AND_EXPR,
10098 TREE_TYPE (tem), tem,
10099 fold_convert (TREE_TYPE (tem), arg1)));
10100
10101 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10102 already handled above. */
10103 if (TREE_CODE (arg0) == BIT_AND_EXPR
10104 && integer_onep (TREE_OPERAND (arg0, 1))
10105 && integer_zerop (op2)
10106 && integer_pow2p (arg1))
10107 {
10108 tree tem = TREE_OPERAND (arg0, 0);
10109 STRIP_NOPS (tem);
10110 if (TREE_CODE (tem) == RSHIFT_EXPR
10111 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10112 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10113 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10114 return fold_build2 (BIT_AND_EXPR, type,
10115 TREE_OPERAND (tem, 0), arg1);
10116 }
10117
10118 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10119 is probably obsolete because the first operand should be a
10120 truth value (that's why we have the two cases above), but let's
10121 leave it in until we can confirm this for all front-ends. */
10122 if (integer_zerop (op2)
10123 && TREE_CODE (arg0) == NE_EXPR
10124 && integer_zerop (TREE_OPERAND (arg0, 1))
10125 && integer_pow2p (arg1)
10126 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10127 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10128 arg1, OEP_ONLY_CONST))
10129 return pedantic_non_lvalue (fold_convert (type,
10130 TREE_OPERAND (arg0, 0)));
10131
10132 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10133 if (integer_zerop (op2)
10134 && truth_value_p (TREE_CODE (arg0))
10135 && truth_value_p (TREE_CODE (arg1)))
10136 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10137 fold_convert (type, arg0),
10138 arg1);
10139
10140 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10141 if (integer_onep (op2)
10142 && truth_value_p (TREE_CODE (arg0))
10143 && truth_value_p (TREE_CODE (arg1)))
10144 {
10145 /* Only perform transformation if ARG0 is easily inverted. */
10146 tem = invert_truthvalue (arg0);
10147 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10148 return fold_build2 (TRUTH_ORIF_EXPR, type,
10149 fold_convert (type, tem),
10150 arg1);
10151 }
10152
10153 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10154 if (integer_zerop (arg1)
10155 && truth_value_p (TREE_CODE (arg0))
10156 && truth_value_p (TREE_CODE (op2)))
10157 {
10158 /* Only perform transformation if ARG0 is easily inverted. */
10159 tem = invert_truthvalue (arg0);
10160 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10161 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10162 fold_convert (type, tem),
10163 op2);
10164 }
10165
10166 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10167 if (integer_onep (arg1)
10168 && truth_value_p (TREE_CODE (arg0))
10169 && truth_value_p (TREE_CODE (op2)))
10170 return fold_build2 (TRUTH_ORIF_EXPR, type,
10171 fold_convert (type, arg0),
10172 op2);
10173
10174 return NULL_TREE;
10175
10176 case CALL_EXPR:
10177 /* Check for a built-in function. */
10178 if (TREE_CODE (op0) == ADDR_EXPR
10179 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10180 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10181 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10182 return NULL_TREE;
10183
10184 case BIT_FIELD_REF:
10185 if (TREE_CODE (arg0) == VECTOR_CST
10186 && type == TREE_TYPE (TREE_TYPE (arg0))
10187 && host_integerp (arg1, 1)
10188 && host_integerp (op2, 1))
10189 {
10190 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10191 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10192
10193 if (width != 0
10194 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10195 && (idx % width) == 0
10196 && (idx = idx / width)
10197 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10198 {
10199 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10200 while (idx-- > 0 && elements)
10201 elements = TREE_CHAIN (elements);
10202 if (elements)
10203 return TREE_VALUE (elements);
10204 else
10205 return fold_convert (type, integer_zero_node);
10206 }
10207 }
10208 return NULL_TREE;
10209
10210 default:
10211 return NULL_TREE;
10212 } /* switch (code) */
10213 }
10214
10215 /* Perform constant folding and related simplification of EXPR.
10216 The related simplifications include x*1 => x, x*0 => 0, etc.,
10217 and application of the associative law.
10218 NOP_EXPR conversions may be removed freely (as long as we
10219 are careful not to change the type of the overall expression).
10220 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10221 but we can constant-fold them if they have constant operands. */
10222
10223 #ifdef ENABLE_FOLD_CHECKING
10224 # define fold(x) fold_1 (x)
10225 static tree fold_1 (tree);
10226 static
10227 #endif
10228 tree
fold(tree expr)10229 fold (tree expr)
10230 {
10231 const tree t = expr;
10232 enum tree_code code = TREE_CODE (t);
10233 enum tree_code_class kind = TREE_CODE_CLASS (code);
10234 tree tem;
10235
10236 /* Return right away if a constant. */
10237 if (kind == tcc_constant)
10238 return t;
10239
10240 if (IS_EXPR_CODE_CLASS (kind))
10241 {
10242 tree type = TREE_TYPE (t);
10243 tree op0, op1, op2;
10244
10245 switch (TREE_CODE_LENGTH (code))
10246 {
10247 case 1:
10248 op0 = TREE_OPERAND (t, 0);
10249 tem = fold_unary (code, type, op0);
10250 return tem ? tem : expr;
10251 case 2:
10252 op0 = TREE_OPERAND (t, 0);
10253 op1 = TREE_OPERAND (t, 1);
10254 tem = fold_binary (code, type, op0, op1);
10255 return tem ? tem : expr;
10256 case 3:
10257 op0 = TREE_OPERAND (t, 0);
10258 op1 = TREE_OPERAND (t, 1);
10259 op2 = TREE_OPERAND (t, 2);
10260 tem = fold_ternary (code, type, op0, op1, op2);
10261 return tem ? tem : expr;
10262 default:
10263 break;
10264 }
10265 }
10266
10267 switch (code)
10268 {
10269 case CONST_DECL:
10270 return fold (DECL_INITIAL (t));
10271
10272 default:
10273 return t;
10274 } /* switch (code) */
10275 }
10276
10277 #ifdef ENABLE_FOLD_CHECKING
10278 #undef fold
10279
10280 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10281 static void fold_check_failed (tree, tree);
10282 void print_fold_checksum (tree);
10283
10284 /* When --enable-checking=fold, compute a digest of expr before
10285 and after actual fold call to see if fold did not accidentally
10286 change original expr. */
10287
10288 tree
fold(tree expr)10289 fold (tree expr)
10290 {
10291 tree ret;
10292 struct md5_ctx ctx;
10293 unsigned char checksum_before[16], checksum_after[16];
10294 htab_t ht;
10295
10296 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10297 md5_init_ctx (&ctx);
10298 fold_checksum_tree (expr, &ctx, ht);
10299 md5_finish_ctx (&ctx, checksum_before);
10300 htab_empty (ht);
10301
10302 ret = fold_1 (expr);
10303
10304 md5_init_ctx (&ctx);
10305 fold_checksum_tree (expr, &ctx, ht);
10306 md5_finish_ctx (&ctx, checksum_after);
10307 htab_delete (ht);
10308
10309 if (memcmp (checksum_before, checksum_after, 16))
10310 fold_check_failed (expr, ret);
10311
10312 return ret;
10313 }
10314
10315 void
print_fold_checksum(tree expr)10316 print_fold_checksum (tree expr)
10317 {
10318 struct md5_ctx ctx;
10319 unsigned char checksum[16], cnt;
10320 htab_t ht;
10321
10322 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10323 md5_init_ctx (&ctx);
10324 fold_checksum_tree (expr, &ctx, ht);
10325 md5_finish_ctx (&ctx, checksum);
10326 htab_delete (ht);
10327 for (cnt = 0; cnt < 16; ++cnt)
10328 fprintf (stderr, "%02x", checksum[cnt]);
10329 putc ('\n', stderr);
10330 }
10331
10332 static void
fold_check_failed(tree expr ATTRIBUTE_UNUSED,tree ret ATTRIBUTE_UNUSED)10333 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10334 {
10335 internal_error ("fold check: original tree changed by fold");
10336 }
10337
10338 static void
fold_checksum_tree(tree expr,struct md5_ctx * ctx,htab_t ht)10339 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10340 {
10341 void **slot;
10342 enum tree_code code;
10343 struct tree_function_decl buf;
10344 int i, len;
10345
10346 recursive_label:
10347
10348 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10349 <= sizeof (struct tree_function_decl))
10350 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10351 if (expr == NULL)
10352 return;
10353 slot = htab_find_slot (ht, expr, INSERT);
10354 if (*slot != NULL)
10355 return;
10356 *slot = expr;
10357 code = TREE_CODE (expr);
10358 if (TREE_CODE_CLASS (code) == tcc_declaration
10359 && DECL_ASSEMBLER_NAME_SET_P (expr))
10360 {
10361 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10362 memcpy ((char *) &buf, expr, tree_size (expr));
10363 expr = (tree) &buf;
10364 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10365 }
10366 else if (TREE_CODE_CLASS (code) == tcc_type
10367 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10368 || TYPE_CACHED_VALUES_P (expr)
10369 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10370 {
10371 /* Allow these fields to be modified. */
10372 memcpy ((char *) &buf, expr, tree_size (expr));
10373 expr = (tree) &buf;
10374 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10375 TYPE_POINTER_TO (expr) = NULL;
10376 TYPE_REFERENCE_TO (expr) = NULL;
10377 if (TYPE_CACHED_VALUES_P (expr))
10378 {
10379 TYPE_CACHED_VALUES_P (expr) = 0;
10380 TYPE_CACHED_VALUES (expr) = NULL;
10381 }
10382 }
10383 md5_process_bytes (expr, tree_size (expr), ctx);
10384 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10385 if (TREE_CODE_CLASS (code) != tcc_type
10386 && TREE_CODE_CLASS (code) != tcc_declaration
10387 && code != TREE_LIST)
10388 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10389 switch (TREE_CODE_CLASS (code))
10390 {
10391 case tcc_constant:
10392 switch (code)
10393 {
10394 case STRING_CST:
10395 md5_process_bytes (TREE_STRING_POINTER (expr),
10396 TREE_STRING_LENGTH (expr), ctx);
10397 break;
10398 case COMPLEX_CST:
10399 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10400 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10401 break;
10402 case VECTOR_CST:
10403 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10404 break;
10405 default:
10406 break;
10407 }
10408 break;
10409 case tcc_exceptional:
10410 switch (code)
10411 {
10412 case TREE_LIST:
10413 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10414 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10415 expr = TREE_CHAIN (expr);
10416 goto recursive_label;
10417 break;
10418 case TREE_VEC:
10419 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10420 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10421 break;
10422 default:
10423 break;
10424 }
10425 break;
10426 case tcc_expression:
10427 case tcc_reference:
10428 case tcc_comparison:
10429 case tcc_unary:
10430 case tcc_binary:
10431 case tcc_statement:
10432 len = TREE_CODE_LENGTH (code);
10433 for (i = 0; i < len; ++i)
10434 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10435 break;
10436 case tcc_declaration:
10437 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10438 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10439 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10440 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10441 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10442 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10443 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10444 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10445 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10446
10447 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10448 {
10449 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10450 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10451 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10452 }
10453 break;
10454 case tcc_type:
10455 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10456 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10457 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10458 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10459 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10460 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10461 if (INTEGRAL_TYPE_P (expr)
10462 || SCALAR_FLOAT_TYPE_P (expr))
10463 {
10464 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10465 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10466 }
10467 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10468 if (TREE_CODE (expr) == RECORD_TYPE
10469 || TREE_CODE (expr) == UNION_TYPE
10470 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10471 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10472 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10473 break;
10474 default:
10475 break;
10476 }
10477 }
10478
10479 #endif
10480
10481 /* Fold a unary tree expression with code CODE of type TYPE with an
10482 operand OP0. Return a folded expression if successful. Otherwise,
10483 return a tree expression with code CODE of type TYPE with an
10484 operand OP0. */
10485
10486 tree
fold_build1_stat(enum tree_code code,tree type,tree op0 MEM_STAT_DECL)10487 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10488 {
10489 tree tem;
10490 #ifdef ENABLE_FOLD_CHECKING
10491 unsigned char checksum_before[16], checksum_after[16];
10492 struct md5_ctx ctx;
10493 htab_t ht;
10494
10495 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10496 md5_init_ctx (&ctx);
10497 fold_checksum_tree (op0, &ctx, ht);
10498 md5_finish_ctx (&ctx, checksum_before);
10499 htab_empty (ht);
10500 #endif
10501
10502 tem = fold_unary (code, type, op0);
10503 if (!tem)
10504 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10505
10506 #ifdef ENABLE_FOLD_CHECKING
10507 md5_init_ctx (&ctx);
10508 fold_checksum_tree (op0, &ctx, ht);
10509 md5_finish_ctx (&ctx, checksum_after);
10510 htab_delete (ht);
10511
10512 if (memcmp (checksum_before, checksum_after, 16))
10513 fold_check_failed (op0, tem);
10514 #endif
10515 return tem;
10516 }
10517
10518 /* Fold a binary tree expression with code CODE of type TYPE with
10519 operands OP0 and OP1. Return a folded expression if successful.
10520 Otherwise, return a tree expression with code CODE of type TYPE
10521 with operands OP0 and OP1. */
10522
10523 tree
fold_build2_stat(enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)10524 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10525 MEM_STAT_DECL)
10526 {
10527 tree tem;
10528 #ifdef ENABLE_FOLD_CHECKING
10529 unsigned char checksum_before_op0[16],
10530 checksum_before_op1[16],
10531 checksum_after_op0[16],
10532 checksum_after_op1[16];
10533 struct md5_ctx ctx;
10534 htab_t ht;
10535
10536 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10537 md5_init_ctx (&ctx);
10538 fold_checksum_tree (op0, &ctx, ht);
10539 md5_finish_ctx (&ctx, checksum_before_op0);
10540 htab_empty (ht);
10541
10542 md5_init_ctx (&ctx);
10543 fold_checksum_tree (op1, &ctx, ht);
10544 md5_finish_ctx (&ctx, checksum_before_op1);
10545 htab_empty (ht);
10546 #endif
10547
10548 tem = fold_binary (code, type, op0, op1);
10549 if (!tem)
10550 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10551
10552 #ifdef ENABLE_FOLD_CHECKING
10553 md5_init_ctx (&ctx);
10554 fold_checksum_tree (op0, &ctx, ht);
10555 md5_finish_ctx (&ctx, checksum_after_op0);
10556 htab_empty (ht);
10557
10558 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10559 fold_check_failed (op0, tem);
10560
10561 md5_init_ctx (&ctx);
10562 fold_checksum_tree (op1, &ctx, ht);
10563 md5_finish_ctx (&ctx, checksum_after_op1);
10564 htab_delete (ht);
10565
10566 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10567 fold_check_failed (op1, tem);
10568 #endif
10569 return tem;
10570 }
10571
10572 /* Fold a ternary tree expression with code CODE of type TYPE with
10573 operands OP0, OP1, and OP2. Return a folded expression if
10574 successful. Otherwise, return a tree expression with code CODE of
10575 type TYPE with operands OP0, OP1, and OP2. */
10576
10577 tree
fold_build3_stat(enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)10578 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10579 MEM_STAT_DECL)
10580 {
10581 tree tem;
10582 #ifdef ENABLE_FOLD_CHECKING
10583 unsigned char checksum_before_op0[16],
10584 checksum_before_op1[16],
10585 checksum_before_op2[16],
10586 checksum_after_op0[16],
10587 checksum_after_op1[16],
10588 checksum_after_op2[16];
10589 struct md5_ctx ctx;
10590 htab_t ht;
10591
10592 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10593 md5_init_ctx (&ctx);
10594 fold_checksum_tree (op0, &ctx, ht);
10595 md5_finish_ctx (&ctx, checksum_before_op0);
10596 htab_empty (ht);
10597
10598 md5_init_ctx (&ctx);
10599 fold_checksum_tree (op1, &ctx, ht);
10600 md5_finish_ctx (&ctx, checksum_before_op1);
10601 htab_empty (ht);
10602
10603 md5_init_ctx (&ctx);
10604 fold_checksum_tree (op2, &ctx, ht);
10605 md5_finish_ctx (&ctx, checksum_before_op2);
10606 htab_empty (ht);
10607 #endif
10608
10609 tem = fold_ternary (code, type, op0, op1, op2);
10610 if (!tem)
10611 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10612
10613 #ifdef ENABLE_FOLD_CHECKING
10614 md5_init_ctx (&ctx);
10615 fold_checksum_tree (op0, &ctx, ht);
10616 md5_finish_ctx (&ctx, checksum_after_op0);
10617 htab_empty (ht);
10618
10619 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10620 fold_check_failed (op0, tem);
10621
10622 md5_init_ctx (&ctx);
10623 fold_checksum_tree (op1, &ctx, ht);
10624 md5_finish_ctx (&ctx, checksum_after_op1);
10625 htab_empty (ht);
10626
10627 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10628 fold_check_failed (op1, tem);
10629
10630 md5_init_ctx (&ctx);
10631 fold_checksum_tree (op2, &ctx, ht);
10632 md5_finish_ctx (&ctx, checksum_after_op2);
10633 htab_delete (ht);
10634
10635 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10636 fold_check_failed (op2, tem);
10637 #endif
10638 return tem;
10639 }
10640
10641 /* Perform constant folding and related simplification of initializer
10642 expression EXPR. These behave identically to "fold_buildN" but ignore
10643 potential run-time traps and exceptions that fold must preserve. */
10644
10645 #define START_FOLD_INIT \
10646 int saved_signaling_nans = flag_signaling_nans;\
10647 int saved_trapping_math = flag_trapping_math;\
10648 int saved_rounding_math = flag_rounding_math;\
10649 int saved_trapv = flag_trapv;\
10650 flag_signaling_nans = 0;\
10651 flag_trapping_math = 0;\
10652 flag_rounding_math = 0;\
10653 flag_trapv = 0
10654
10655 #define END_FOLD_INIT \
10656 flag_signaling_nans = saved_signaling_nans;\
10657 flag_trapping_math = saved_trapping_math;\
10658 flag_rounding_math = saved_rounding_math;\
10659 flag_trapv = saved_trapv
10660
10661 tree
fold_build1_initializer(enum tree_code code,tree type,tree op)10662 fold_build1_initializer (enum tree_code code, tree type, tree op)
10663 {
10664 tree result;
10665 START_FOLD_INIT;
10666
10667 result = fold_build1 (code, type, op);
10668
10669 END_FOLD_INIT;
10670 return result;
10671 }
10672
10673 tree
fold_build2_initializer(enum tree_code code,tree type,tree op0,tree op1)10674 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10675 {
10676 tree result;
10677 START_FOLD_INIT;
10678
10679 result = fold_build2 (code, type, op0, op1);
10680
10681 END_FOLD_INIT;
10682 return result;
10683 }
10684
10685 tree
fold_build3_initializer(enum tree_code code,tree type,tree op0,tree op1,tree op2)10686 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10687 tree op2)
10688 {
10689 tree result;
10690 START_FOLD_INIT;
10691
10692 result = fold_build3 (code, type, op0, op1, op2);
10693
10694 END_FOLD_INIT;
10695 return result;
10696 }
10697
10698 #undef START_FOLD_INIT
10699 #undef END_FOLD_INIT
10700
10701 /* Determine if first argument is a multiple of second argument. Return 0 if
10702 it is not, or we cannot easily determined it to be.
10703
10704 An example of the sort of thing we care about (at this point; this routine
10705 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10706 fold cases do now) is discovering that
10707
10708 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10709
10710 is a multiple of
10711
10712 SAVE_EXPR (J * 8)
10713
10714 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10715
10716 This code also handles discovering that
10717
10718 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10719
10720 is a multiple of 8 so we don't have to worry about dealing with a
10721 possible remainder.
10722
10723 Note that we *look* inside a SAVE_EXPR only to determine how it was
10724 calculated; it is not safe for fold to do much of anything else with the
10725 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10726 at run time. For example, the latter example above *cannot* be implemented
10727 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10728 evaluation time of the original SAVE_EXPR is not necessarily the same at
10729 the time the new expression is evaluated. The only optimization of this
10730 sort that would be valid is changing
10731
10732 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10733
10734 divided by 8 to
10735
10736 SAVE_EXPR (I) * SAVE_EXPR (J)
10737
10738 (where the same SAVE_EXPR (J) is used in the original and the
10739 transformed version). */
10740
10741 static int
multiple_of_p(tree type,tree top,tree bottom)10742 multiple_of_p (tree type, tree top, tree bottom)
10743 {
10744 if (operand_equal_p (top, bottom, 0))
10745 return 1;
10746
10747 if (TREE_CODE (type) != INTEGER_TYPE)
10748 return 0;
10749
10750 switch (TREE_CODE (top))
10751 {
10752 case BIT_AND_EXPR:
10753 /* Bitwise and provides a power of two multiple. If the mask is
10754 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10755 if (!integer_pow2p (bottom))
10756 return 0;
10757 /* FALLTHRU */
10758
10759 case MULT_EXPR:
10760 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10761 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10762
10763 case PLUS_EXPR:
10764 case MINUS_EXPR:
10765 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10766 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10767
10768 case LSHIFT_EXPR:
10769 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10770 {
10771 tree op1, t1;
10772
10773 op1 = TREE_OPERAND (top, 1);
10774 /* const_binop may not detect overflow correctly,
10775 so check for it explicitly here. */
10776 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10777 > TREE_INT_CST_LOW (op1)
10778 && TREE_INT_CST_HIGH (op1) == 0
10779 && 0 != (t1 = fold_convert (type,
10780 const_binop (LSHIFT_EXPR,
10781 size_one_node,
10782 op1, 0)))
10783 && ! TREE_OVERFLOW (t1))
10784 return multiple_of_p (type, t1, bottom);
10785 }
10786 return 0;
10787
10788 case NOP_EXPR:
10789 /* Can't handle conversions from non-integral or wider integral type. */
10790 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10791 || (TYPE_PRECISION (type)
10792 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10793 return 0;
10794
10795 /* .. fall through ... */
10796
10797 case SAVE_EXPR:
10798 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10799
10800 case INTEGER_CST:
10801 if (TREE_CODE (bottom) != INTEGER_CST
10802 || (TYPE_UNSIGNED (type)
10803 && (tree_int_cst_sgn (top) < 0
10804 || tree_int_cst_sgn (bottom) < 0)))
10805 return 0;
10806 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10807 top, bottom, 0));
10808
10809 default:
10810 return 0;
10811 }
10812 }
10813
10814 /* Return true if `t' is known to be non-negative. */
10815
10816 int
tree_expr_nonnegative_p(tree t)10817 tree_expr_nonnegative_p (tree t)
10818 {
10819 if (t == error_mark_node)
10820 return 0;
10821
10822 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10823 return 1;
10824
10825 switch (TREE_CODE (t))
10826 {
10827 case ABS_EXPR:
10828 /* We can't return 1 if flag_wrapv is set because
10829 ABS_EXPR<INT_MIN> = INT_MIN. */
10830 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10831 return 1;
10832 break;
10833
10834 case INTEGER_CST:
10835 return tree_int_cst_sgn (t) >= 0;
10836
10837 case REAL_CST:
10838 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10839
10840 case PLUS_EXPR:
10841 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10842 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10843 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10844
10845 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10846 both unsigned and at least 2 bits shorter than the result. */
10847 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10848 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10849 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10850 {
10851 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10852 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10853 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10854 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10855 {
10856 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10857 TYPE_PRECISION (inner2)) + 1;
10858 return prec < TYPE_PRECISION (TREE_TYPE (t));
10859 }
10860 }
10861 break;
10862
10863 case MULT_EXPR:
10864 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10865 {
10866 /* x * x for floating point x is always non-negative. */
10867 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10868 return 1;
10869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10870 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10871 }
10872
10873 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10874 both unsigned and their total bits is shorter than the result. */
10875 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10876 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10877 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10878 {
10879 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10880 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10881 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10882 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10883 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10884 < TYPE_PRECISION (TREE_TYPE (t));
10885 }
10886 return 0;
10887
10888 case BIT_AND_EXPR:
10889 case MAX_EXPR:
10890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10891 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10892
10893 case BIT_IOR_EXPR:
10894 case BIT_XOR_EXPR:
10895 case MIN_EXPR:
10896 case RDIV_EXPR:
10897 case TRUNC_DIV_EXPR:
10898 case CEIL_DIV_EXPR:
10899 case FLOOR_DIV_EXPR:
10900 case ROUND_DIV_EXPR:
10901 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10902 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10903
10904 case TRUNC_MOD_EXPR:
10905 case CEIL_MOD_EXPR:
10906 case FLOOR_MOD_EXPR:
10907 case ROUND_MOD_EXPR:
10908 case SAVE_EXPR:
10909 case NON_LVALUE_EXPR:
10910 case FLOAT_EXPR:
10911 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10912
10913 case COMPOUND_EXPR:
10914 case MODIFY_EXPR:
10915 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10916
10917 case BIND_EXPR:
10918 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10919
10920 case COND_EXPR:
10921 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10922 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10923
10924 case NOP_EXPR:
10925 {
10926 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10927 tree outer_type = TREE_TYPE (t);
10928
10929 if (TREE_CODE (outer_type) == REAL_TYPE)
10930 {
10931 if (TREE_CODE (inner_type) == REAL_TYPE)
10932 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10933 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10934 {
10935 if (TYPE_UNSIGNED (inner_type))
10936 return 1;
10937 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10938 }
10939 }
10940 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10941 {
10942 if (TREE_CODE (inner_type) == REAL_TYPE)
10943 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10944 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10945 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10946 && TYPE_UNSIGNED (inner_type);
10947 }
10948 }
10949 break;
10950
10951 case TARGET_EXPR:
10952 {
10953 tree temp = TARGET_EXPR_SLOT (t);
10954 t = TARGET_EXPR_INITIAL (t);
10955
10956 /* If the initializer is non-void, then it's a normal expression
10957 that will be assigned to the slot. */
10958 if (!VOID_TYPE_P (t))
10959 return tree_expr_nonnegative_p (t);
10960
10961 /* Otherwise, the initializer sets the slot in some way. One common
10962 way is an assignment statement at the end of the initializer. */
10963 while (1)
10964 {
10965 if (TREE_CODE (t) == BIND_EXPR)
10966 t = expr_last (BIND_EXPR_BODY (t));
10967 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10968 || TREE_CODE (t) == TRY_CATCH_EXPR)
10969 t = expr_last (TREE_OPERAND (t, 0));
10970 else if (TREE_CODE (t) == STATEMENT_LIST)
10971 t = expr_last (t);
10972 else
10973 break;
10974 }
10975 if (TREE_CODE (t) == MODIFY_EXPR
10976 && TREE_OPERAND (t, 0) == temp)
10977 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10978
10979 return 0;
10980 }
10981
10982 case CALL_EXPR:
10983 {
10984 tree fndecl = get_callee_fndecl (t);
10985 tree arglist = TREE_OPERAND (t, 1);
10986 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10987 switch (DECL_FUNCTION_CODE (fndecl))
10988 {
10989 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10990 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10991 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10992 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10993
10994 CASE_BUILTIN_F (BUILT_IN_ACOS)
10995 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10996 CASE_BUILTIN_F (BUILT_IN_CABS)
10997 CASE_BUILTIN_F (BUILT_IN_COSH)
10998 CASE_BUILTIN_F (BUILT_IN_ERFC)
10999 CASE_BUILTIN_F (BUILT_IN_EXP)
11000 CASE_BUILTIN_F (BUILT_IN_EXP10)
11001 CASE_BUILTIN_F (BUILT_IN_EXP2)
11002 CASE_BUILTIN_F (BUILT_IN_FABS)
11003 CASE_BUILTIN_F (BUILT_IN_FDIM)
11004 CASE_BUILTIN_F (BUILT_IN_HYPOT)
11005 CASE_BUILTIN_F (BUILT_IN_POW10)
11006 CASE_BUILTIN_I (BUILT_IN_FFS)
11007 CASE_BUILTIN_I (BUILT_IN_PARITY)
11008 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
11009 /* Always true. */
11010 return 1;
11011
11012 CASE_BUILTIN_F (BUILT_IN_SQRT)
11013 /* sqrt(-0.0) is -0.0. */
11014 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11015 return 1;
11016 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11017
11018 CASE_BUILTIN_F (BUILT_IN_ASINH)
11019 CASE_BUILTIN_F (BUILT_IN_ATAN)
11020 CASE_BUILTIN_F (BUILT_IN_ATANH)
11021 CASE_BUILTIN_F (BUILT_IN_CBRT)
11022 CASE_BUILTIN_F (BUILT_IN_CEIL)
11023 CASE_BUILTIN_F (BUILT_IN_ERF)
11024 CASE_BUILTIN_F (BUILT_IN_EXPM1)
11025 CASE_BUILTIN_F (BUILT_IN_FLOOR)
11026 CASE_BUILTIN_F (BUILT_IN_FMOD)
11027 CASE_BUILTIN_F (BUILT_IN_FREXP)
11028 CASE_BUILTIN_F (BUILT_IN_LCEIL)
11029 CASE_BUILTIN_F (BUILT_IN_LDEXP)
11030 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11031 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11032 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11033 CASE_BUILTIN_F (BUILT_IN_LLRINT)
11034 CASE_BUILTIN_F (BUILT_IN_LLROUND)
11035 CASE_BUILTIN_F (BUILT_IN_LRINT)
11036 CASE_BUILTIN_F (BUILT_IN_LROUND)
11037 CASE_BUILTIN_F (BUILT_IN_MODF)
11038 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11039 CASE_BUILTIN_F (BUILT_IN_POW)
11040 CASE_BUILTIN_F (BUILT_IN_RINT)
11041 CASE_BUILTIN_F (BUILT_IN_ROUND)
11042 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11043 CASE_BUILTIN_F (BUILT_IN_SINH)
11044 CASE_BUILTIN_F (BUILT_IN_TANH)
11045 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11046 /* True if the 1st argument is nonnegative. */
11047 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11048
11049 CASE_BUILTIN_F (BUILT_IN_FMAX)
11050 /* True if the 1st OR 2nd arguments are nonnegative. */
11051 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11052 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11053
11054 CASE_BUILTIN_F (BUILT_IN_FMIN)
11055 /* True if the 1st AND 2nd arguments are nonnegative. */
11056 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11057 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11058
11059 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11060 /* True if the 2nd argument is nonnegative. */
11061 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11062
11063 default:
11064 break;
11065 #undef CASE_BUILTIN_F
11066 #undef CASE_BUILTIN_I
11067 }
11068 }
11069
11070 /* ... fall through ... */
11071
11072 default:
11073 if (truth_value_p (TREE_CODE (t)))
11074 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11075 return 1;
11076 }
11077
11078 /* We don't know sign of `t', so be conservative and return false. */
11079 return 0;
11080 }
11081
11082 /* Return true when T is an address and is known to be nonzero.
11083 For floating point we further ensure that T is not denormal.
11084 Similar logic is present in nonzero_address in rtlanal.h. */
11085
11086 bool
tree_expr_nonzero_p(tree t)11087 tree_expr_nonzero_p (tree t)
11088 {
11089 tree type = TREE_TYPE (t);
11090
11091 /* Doing something useful for floating point would need more work. */
11092 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11093 return false;
11094
11095 switch (TREE_CODE (t))
11096 {
11097 case ABS_EXPR:
11098 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11099
11100 case INTEGER_CST:
11101 /* We used to test for !integer_zerop here. This does not work correctly
11102 if TREE_CONSTANT_OVERFLOW (t). */
11103 return (TREE_INT_CST_LOW (t) != 0
11104 || TREE_INT_CST_HIGH (t) != 0);
11105
11106 case PLUS_EXPR:
11107 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11108 {
11109 /* With the presence of negative values it is hard
11110 to say something. */
11111 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11112 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11113 return false;
11114 /* One of operands must be positive and the other non-negative. */
11115 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11116 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11117 }
11118 break;
11119
11120 case MULT_EXPR:
11121 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11122 {
11123 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11124 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11125 }
11126 break;
11127
11128 case NOP_EXPR:
11129 {
11130 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11131 tree outer_type = TREE_TYPE (t);
11132
11133 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11134 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11135 }
11136 break;
11137
11138 case ADDR_EXPR:
11139 {
11140 tree base = get_base_address (TREE_OPERAND (t, 0));
11141
11142 if (!base)
11143 return false;
11144
11145 /* Weak declarations may link to NULL. */
11146 if (VAR_OR_FUNCTION_DECL_P (base))
11147 return !DECL_WEAK (base);
11148
11149 /* Constants are never weak. */
11150 if (CONSTANT_CLASS_P (base))
11151 return true;
11152
11153 return false;
11154 }
11155
11156 case COND_EXPR:
11157 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11158 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11159
11160 case MIN_EXPR:
11161 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11162 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11163
11164 case MAX_EXPR:
11165 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11166 {
11167 /* When both operands are nonzero, then MAX must be too. */
11168 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11169 return true;
11170
11171 /* MAX where operand 0 is positive is positive. */
11172 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11173 }
11174 /* MAX where operand 1 is positive is positive. */
11175 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11176 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11177 return true;
11178 break;
11179
11180 case COMPOUND_EXPR:
11181 case MODIFY_EXPR:
11182 case BIND_EXPR:
11183 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11184
11185 case SAVE_EXPR:
11186 case NON_LVALUE_EXPR:
11187 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11188
11189 case BIT_IOR_EXPR:
11190 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11191 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11192
11193 case CALL_EXPR:
11194 return alloca_call_p (t);
11195
11196 default:
11197 break;
11198 }
11199 return false;
11200 }
11201
11202 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11203 attempt to fold the expression to a constant without modifying TYPE,
11204 OP0 or OP1.
11205
11206 If the expression could be simplified to a constant, then return
11207 the constant. If the expression would not be simplified to a
11208 constant, then return NULL_TREE. */
11209
11210 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)11211 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11212 {
11213 tree tem = fold_binary (code, type, op0, op1);
11214 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11215 }
11216
11217 /* Given the components of a unary expression CODE, TYPE and OP0,
11218 attempt to fold the expression to a constant without modifying
11219 TYPE or OP0.
11220
11221 If the expression could be simplified to a constant, then return
11222 the constant. If the expression would not be simplified to a
11223 constant, then return NULL_TREE. */
11224
11225 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)11226 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11227 {
11228 tree tem = fold_unary (code, type, op0);
11229 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11230 }
11231
11232 /* If EXP represents referencing an element in a constant string
11233 (either via pointer arithmetic or array indexing), return the
11234 tree representing the value accessed, otherwise return NULL. */
11235
11236 tree
fold_read_from_constant_string(tree exp)11237 fold_read_from_constant_string (tree exp)
11238 {
11239 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11240 {
11241 tree exp1 = TREE_OPERAND (exp, 0);
11242 tree index;
11243 tree string;
11244
11245 if (TREE_CODE (exp) == INDIRECT_REF)
11246 string = string_constant (exp1, &index);
11247 else
11248 {
11249 tree low_bound = array_ref_low_bound (exp);
11250 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11251
11252 /* Optimize the special-case of a zero lower bound.
11253
11254 We convert the low_bound to sizetype to avoid some problems
11255 with constant folding. (E.g. suppose the lower bound is 1,
11256 and its mode is QI. Without the conversion,l (ARRAY
11257 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11258 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11259 if (! integer_zerop (low_bound))
11260 index = size_diffop (index, fold_convert (sizetype, low_bound));
11261
11262 string = exp1;
11263 }
11264
11265 if (string
11266 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11267 && TREE_CODE (string) == STRING_CST
11268 && TREE_CODE (index) == INTEGER_CST
11269 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11270 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11271 == MODE_INT)
11272 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11273 return fold_convert (TREE_TYPE (exp),
11274 build_int_cst (NULL_TREE,
11275 (TREE_STRING_POINTER (string)
11276 [TREE_INT_CST_LOW (index)])));
11277 }
11278 return NULL;
11279 }
11280
11281 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11282 an integer constant or real constant.
11283
11284 TYPE is the type of the result. */
11285
11286 static tree
fold_negate_const(tree arg0,tree type)11287 fold_negate_const (tree arg0, tree type)
11288 {
11289 tree t = NULL_TREE;
11290
11291 switch (TREE_CODE (arg0))
11292 {
11293 case INTEGER_CST:
11294 {
11295 unsigned HOST_WIDE_INT low;
11296 HOST_WIDE_INT high;
11297 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11298 TREE_INT_CST_HIGH (arg0),
11299 &low, &high);
11300 t = build_int_cst_wide (type, low, high);
11301 t = force_fit_type (t, 1,
11302 (overflow | TREE_OVERFLOW (arg0))
11303 && !TYPE_UNSIGNED (type),
11304 TREE_CONSTANT_OVERFLOW (arg0));
11305 break;
11306 }
11307
11308 case REAL_CST:
11309 {
11310 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
11311 x = REAL_VALUE_NEGATE (x);
11312 t = build_real (type, x);
11313 }
11314 break;
11315
11316 default:
11317 gcc_unreachable ();
11318 }
11319
11320 return t;
11321 }
11322
11323 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11324 an integer constant or real constant.
11325
11326 TYPE is the type of the result. */
11327
11328 tree
fold_abs_const(tree arg0,tree type)11329 fold_abs_const (tree arg0, tree type)
11330 {
11331 tree t = NULL_TREE;
11332
11333 switch (TREE_CODE (arg0))
11334 {
11335 case INTEGER_CST:
11336 /* If the value is unsigned, then the absolute value is
11337 the same as the ordinary value. */
11338 if (TYPE_UNSIGNED (type))
11339 t = arg0;
11340 /* Similarly, if the value is non-negative. */
11341 else if (INT_CST_LT (integer_minus_one_node, arg0))
11342 t = arg0;
11343 /* If the value is negative, then the absolute value is
11344 its negation. */
11345 else
11346 {
11347 unsigned HOST_WIDE_INT low;
11348 HOST_WIDE_INT high;
11349 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11350 TREE_INT_CST_HIGH (arg0),
11351 &low, &high);
11352 t = build_int_cst_wide (type, low, high);
11353 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11354 TREE_CONSTANT_OVERFLOW (arg0));
11355 }
11356 break;
11357
11358 case REAL_CST:
11359 {
11360 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
11361 if (REAL_VALUE_NEGATIVE (x))
11362 t = build_real (type,
11363 REAL_VALUE_NEGATE (x));
11364 else
11365 t = arg0;
11366 }
11367 break;
11368
11369 default:
11370 gcc_unreachable ();
11371 }
11372
11373 return t;
11374 }
11375
11376 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11377 constant. TYPE is the type of the result. */
11378
11379 static tree
fold_not_const(tree arg0,tree type)11380 fold_not_const (tree arg0, tree type)
11381 {
11382 tree t = NULL_TREE;
11383
11384 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11385
11386 t = build_int_cst_wide (type,
11387 ~ TREE_INT_CST_LOW (arg0),
11388 ~ TREE_INT_CST_HIGH (arg0));
11389 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11390 TREE_CONSTANT_OVERFLOW (arg0));
11391
11392 return t;
11393 }
11394
11395 /* Given CODE, a relational operator, the target type, TYPE and two
11396 constant operands OP0 and OP1, return the result of the
11397 relational operation. If the result is not a compile time
11398 constant, then return NULL_TREE. */
11399
11400 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)11401 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11402 {
11403 int result, invert;
11404
11405 /* From here on, the only cases we handle are when the result is
11406 known to be a constant. */
11407
11408 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11409 {
11410 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11411 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11412
11413 /* Handle the cases where either operand is a NaN. */
11414 if (REAL_VALUE_ISNANUINF (*c0) || REAL_VALUE_ISNANUINF (*c1))
11415 {
11416 switch (code)
11417 {
11418 case EQ_EXPR:
11419 case ORDERED_EXPR:
11420 result = 0;
11421 break;
11422
11423 case NE_EXPR:
11424 case UNORDERED_EXPR:
11425 case UNLT_EXPR:
11426 case UNLE_EXPR:
11427 case UNGT_EXPR:
11428 case UNGE_EXPR:
11429 case UNEQ_EXPR:
11430 result = 1;
11431 break;
11432
11433 case LT_EXPR:
11434 case LE_EXPR:
11435 case GT_EXPR:
11436 case GE_EXPR:
11437 case LTGT_EXPR:
11438 if (flag_trapping_math)
11439 return NULL_TREE;
11440 result = 0;
11441 break;
11442
11443 default:
11444 gcc_unreachable ();
11445 }
11446
11447 return constant_boolean_node (result, type);
11448 }
11449
11450 return constant_boolean_node (real_compare (code, c0, c1), type);
11451 }
11452
11453 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11454
11455 To compute GT, swap the arguments and do LT.
11456 To compute GE, do LT and invert the result.
11457 To compute LE, swap the arguments, do LT and invert the result.
11458 To compute NE, do EQ and invert the result.
11459
11460 Therefore, the code below must handle only EQ and LT. */
11461
11462 if (code == LE_EXPR || code == GT_EXPR)
11463 {
11464 tree tem = op0;
11465 op0 = op1;
11466 op1 = tem;
11467 code = swap_tree_comparison (code);
11468 }
11469
11470 /* Note that it is safe to invert for real values here because we
11471 have already handled the one case that it matters. */
11472
11473 invert = 0;
11474 if (code == NE_EXPR || code == GE_EXPR)
11475 {
11476 invert = 1;
11477 code = invert_tree_comparison (code, false);
11478 }
11479
11480 /* Compute a result for LT or EQ if args permit;
11481 Otherwise return T. */
11482 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11483 {
11484 if (code == EQ_EXPR)
11485 result = tree_int_cst_equal (op0, op1);
11486 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11487 result = INT_CST_LT_UNSIGNED (op0, op1);
11488 else
11489 result = INT_CST_LT (op0, op1);
11490 }
11491 else
11492 return NULL_TREE;
11493
11494 if (invert)
11495 result ^= 1;
11496 return constant_boolean_node (result, type);
11497 }
11498
11499 /* Build an expression for the a clean point containing EXPR with type TYPE.
11500 Don't build a cleanup point expression for EXPR which don't have side
11501 effects. */
11502
11503 tree
fold_build_cleanup_point_expr(tree type,tree expr)11504 fold_build_cleanup_point_expr (tree type, tree expr)
11505 {
11506 /* If the expression does not have side effects then we don't have to wrap
11507 it with a cleanup point expression. */
11508 if (!TREE_SIDE_EFFECTS (expr))
11509 return expr;
11510
11511 /* If the expression is a return, check to see if the expression inside the
11512 return has no side effects or the right hand side of the modify expression
11513 inside the return. If either don't have side effects set we don't need to
11514 wrap the expression in a cleanup point expression. Note we don't check the
11515 left hand side of the modify because it should always be a return decl. */
11516 if (TREE_CODE (expr) == RETURN_EXPR)
11517 {
11518 tree op = TREE_OPERAND (expr, 0);
11519 if (!op || !TREE_SIDE_EFFECTS (op))
11520 return expr;
11521 op = TREE_OPERAND (op, 1);
11522 if (!TREE_SIDE_EFFECTS (op))
11523 return expr;
11524 }
11525
11526 return build1 (CLEANUP_POINT_EXPR, type, expr);
11527 }
11528
11529 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11530 avoid confusing the gimplify process. */
11531
11532 tree
build_fold_addr_expr_with_type(tree t,tree ptrtype)11533 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11534 {
11535 /* The size of the object is not relevant when talking about its address. */
11536 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11537 t = TREE_OPERAND (t, 0);
11538
11539 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11540 if (TREE_CODE (t) == INDIRECT_REF
11541 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11542 {
11543 t = TREE_OPERAND (t, 0);
11544 if (TREE_TYPE (t) != ptrtype)
11545 t = build1 (NOP_EXPR, ptrtype, t);
11546 }
11547 else
11548 {
11549 tree base = t;
11550
11551 while (handled_component_p (base))
11552 base = TREE_OPERAND (base, 0);
11553 if (DECL_P (base))
11554 TREE_ADDRESSABLE (base) = 1;
11555
11556 t = build1 (ADDR_EXPR, ptrtype, t);
11557 }
11558
11559 return t;
11560 }
11561
11562 tree
build_fold_addr_expr(tree t)11563 build_fold_addr_expr (tree t)
11564 {
11565 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11566 }
11567
11568 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11569 of an indirection through OP0, or NULL_TREE if no simplification is
11570 possible. */
11571
11572 tree
fold_indirect_ref_1(tree type,tree op0)11573 fold_indirect_ref_1 (tree type, tree op0)
11574 {
11575 tree sub = op0;
11576 tree subtype;
11577
11578 STRIP_NOPS (sub);
11579 subtype = TREE_TYPE (sub);
11580 if (!POINTER_TYPE_P (subtype))
11581 return NULL_TREE;
11582
11583 if (TREE_CODE (sub) == ADDR_EXPR)
11584 {
11585 tree op = TREE_OPERAND (sub, 0);
11586 tree optype = TREE_TYPE (op);
11587 /* *&p => p */
11588 if (type == optype)
11589 return op;
11590 /* *(foo *)&fooarray => fooarray[0] */
11591 else if (TREE_CODE (optype) == ARRAY_TYPE
11592 && type == TREE_TYPE (optype))
11593 {
11594 tree type_domain = TYPE_DOMAIN (optype);
11595 tree min_val = size_zero_node;
11596 if (type_domain && TYPE_MIN_VALUE (type_domain))
11597 min_val = TYPE_MIN_VALUE (type_domain);
11598 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11599 }
11600 }
11601
11602 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11603 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11604 && type == TREE_TYPE (TREE_TYPE (subtype)))
11605 {
11606 tree type_domain;
11607 tree min_val = size_zero_node;
11608 sub = build_fold_indirect_ref (sub);
11609 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11610 if (type_domain && TYPE_MIN_VALUE (type_domain))
11611 min_val = TYPE_MIN_VALUE (type_domain);
11612 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11613 }
11614
11615 return NULL_TREE;
11616 }
11617
11618 /* Builds an expression for an indirection through T, simplifying some
11619 cases. */
11620
11621 tree
build_fold_indirect_ref(tree t)11622 build_fold_indirect_ref (tree t)
11623 {
11624 tree type = TREE_TYPE (TREE_TYPE (t));
11625 tree sub = fold_indirect_ref_1 (type, t);
11626
11627 if (sub)
11628 return sub;
11629 else
11630 return build1 (INDIRECT_REF, type, t);
11631 }
11632
11633 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11634
11635 tree
fold_indirect_ref(tree t)11636 fold_indirect_ref (tree t)
11637 {
11638 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11639
11640 if (sub)
11641 return sub;
11642 else
11643 return t;
11644 }
11645
11646 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11647 whose result is ignored. The type of the returned tree need not be
11648 the same as the original expression. */
11649
11650 tree
fold_ignored_result(tree t)11651 fold_ignored_result (tree t)
11652 {
11653 if (!TREE_SIDE_EFFECTS (t))
11654 return integer_zero_node;
11655
11656 for (;;)
11657 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11658 {
11659 case tcc_unary:
11660 t = TREE_OPERAND (t, 0);
11661 break;
11662
11663 case tcc_binary:
11664 case tcc_comparison:
11665 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11666 t = TREE_OPERAND (t, 0);
11667 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11668 t = TREE_OPERAND (t, 1);
11669 else
11670 return t;
11671 break;
11672
11673 case tcc_expression:
11674 switch (TREE_CODE (t))
11675 {
11676 case COMPOUND_EXPR:
11677 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11678 return t;
11679 t = TREE_OPERAND (t, 0);
11680 break;
11681
11682 case COND_EXPR:
11683 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11684 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11685 return t;
11686 t = TREE_OPERAND (t, 0);
11687 break;
11688
11689 default:
11690 return t;
11691 }
11692 break;
11693
11694 default:
11695 return t;
11696 }
11697 }
11698
11699 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11700 This can only be applied to objects of a sizetype. */
11701
11702 tree
round_up(tree value,int divisor)11703 round_up (tree value, int divisor)
11704 {
11705 tree div = NULL_TREE;
11706
11707 gcc_assert (divisor > 0);
11708 if (divisor == 1)
11709 return value;
11710
11711 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11712 have to do anything. Only do this when we are not given a const,
11713 because in that case, this check is more expensive than just
11714 doing it. */
11715 if (TREE_CODE (value) != INTEGER_CST)
11716 {
11717 div = build_int_cst (TREE_TYPE (value), divisor);
11718
11719 if (multiple_of_p (TREE_TYPE (value), value, div))
11720 return value;
11721 }
11722
11723 /* If divisor is a power of two, simplify this to bit manipulation. */
11724 if (divisor == (divisor & -divisor))
11725 {
11726 tree t;
11727
11728 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11729 value = size_binop (PLUS_EXPR, value, t);
11730 t = build_int_cst (TREE_TYPE (value), -divisor);
11731 value = size_binop (BIT_AND_EXPR, value, t);
11732 }
11733 else
11734 {
11735 if (!div)
11736 div = build_int_cst (TREE_TYPE (value), divisor);
11737 value = size_binop (CEIL_DIV_EXPR, value, div);
11738 value = size_binop (MULT_EXPR, value, div);
11739 }
11740
11741 return value;
11742 }
11743
11744 /* Likewise, but round down. */
11745
11746 tree
round_down(tree value,int divisor)11747 round_down (tree value, int divisor)
11748 {
11749 tree div = NULL_TREE;
11750
11751 gcc_assert (divisor > 0);
11752 if (divisor == 1)
11753 return value;
11754
11755 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11756 have to do anything. Only do this when we are not given a const,
11757 because in that case, this check is more expensive than just
11758 doing it. */
11759 if (TREE_CODE (value) != INTEGER_CST)
11760 {
11761 div = build_int_cst (TREE_TYPE (value), divisor);
11762
11763 if (multiple_of_p (TREE_TYPE (value), value, div))
11764 return value;
11765 }
11766
11767 /* If divisor is a power of two, simplify this to bit manipulation. */
11768 if (divisor == (divisor & -divisor))
11769 {
11770 tree t;
11771
11772 t = build_int_cst (TREE_TYPE (value), -divisor);
11773 value = size_binop (BIT_AND_EXPR, value, t);
11774 }
11775 else
11776 {
11777 if (!div)
11778 div = build_int_cst (TREE_TYPE (value), divisor);
11779 value = size_binop (FLOOR_DIV_EXPR, value, div);
11780 value = size_binop (MULT_EXPR, value, div);
11781 }
11782
11783 return value;
11784 }
11785
11786 /* Returns the pointer to the base of the object addressed by EXP and
11787 extracts the information about the offset of the access, storing it
11788 to PBITPOS and POFFSET. */
11789
11790 static tree
split_address_to_core_and_offset(tree exp,HOST_WIDE_INT * pbitpos,tree * poffset)11791 split_address_to_core_and_offset (tree exp,
11792 HOST_WIDE_INT *pbitpos, tree *poffset)
11793 {
11794 tree core;
11795 enum machine_mode mode;
11796 int unsignedp, volatilep;
11797 HOST_WIDE_INT bitsize;
11798
11799 if (TREE_CODE (exp) == ADDR_EXPR)
11800 {
11801 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11802 poffset, &mode, &unsignedp, &volatilep,
11803 false);
11804 core = build_fold_addr_expr (core);
11805 }
11806 else
11807 {
11808 core = exp;
11809 *pbitpos = 0;
11810 *poffset = NULL_TREE;
11811 }
11812
11813 return core;
11814 }
11815
11816 /* Returns true if addresses of E1 and E2 differ by a constant, false
11817 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11818
11819 bool
ptr_difference_const(tree e1,tree e2,HOST_WIDE_INT * diff)11820 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11821 {
11822 tree core1, core2;
11823 HOST_WIDE_INT bitpos1, bitpos2;
11824 tree toffset1, toffset2, tdiff, type;
11825
11826 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11827 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11828
11829 if (bitpos1 % BITS_PER_UNIT != 0
11830 || bitpos2 % BITS_PER_UNIT != 0
11831 || !operand_equal_p (core1, core2, 0))
11832 return false;
11833
11834 if (toffset1 && toffset2)
11835 {
11836 type = TREE_TYPE (toffset1);
11837 if (type != TREE_TYPE (toffset2))
11838 toffset2 = fold_convert (type, toffset2);
11839
11840 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11841 if (!cst_and_fits_in_hwi (tdiff))
11842 return false;
11843
11844 *diff = int_cst_value (tdiff);
11845 }
11846 else if (toffset1 || toffset2)
11847 {
11848 /* If only one of the offsets is non-constant, the difference cannot
11849 be a constant. */
11850 return false;
11851 }
11852 else
11853 *diff = 0;
11854
11855 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11856 return true;
11857 }
11858
11859 /* Simplify the floating point expression EXP when the sign of the
11860 result is not significant. Return NULL_TREE if no simplification
11861 is possible. */
11862
11863 tree
fold_strip_sign_ops(tree exp)11864 fold_strip_sign_ops (tree exp)
11865 {
11866 tree arg0, arg1;
11867
11868 switch (TREE_CODE (exp))
11869 {
11870 case ABS_EXPR:
11871 case NEGATE_EXPR:
11872 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11873 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11874
11875 case MULT_EXPR:
11876 case RDIV_EXPR:
11877 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11878 return NULL_TREE;
11879 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11880 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11881 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11882 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11883 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11884 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11885 break;
11886
11887 default:
11888 break;
11889 }
11890 return NULL_TREE;
11891 }
11892
11893