1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
expr_location_or(tree t,location_t loc)143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
div_if_zero_remainder(enum tree_code code,const_tree arg1,const_tree arg2)172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
fold_defer_overflow_warnings(void)218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
fold_undefer_overflow_warnings(bool issue,const_gimple stmt,int code)233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
fold_undefer_and_ignore_overflow_warnings(void)277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
fold_deferring_overflow_warnings_p(void)285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
negate_mathfn_p(enum built_in_function code)313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
may_negate_without_overflow_p(const_tree t)358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
negate_expr_p(tree t)389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
453
454 /* Fall through. */
455
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
461
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
481 {
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
485 }
486 break;
487
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
493
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
497 {
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
503 }
504 break;
505
506 default:
507 break;
508 }
509 return false;
510 }
511
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
516
517 static tree
fold_negate_expr(location_t loc,tree t)518 fold_negate_expr (location_t loc, tree t)
519 {
520 tree type = TREE_TYPE (t);
521 tree tem;
522
523 switch (TREE_CODE (t))
524 {
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_int_cst (type, 1));
530 break;
531
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
538
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
545
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
549
550 case COMPLEX_CST:
551 {
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
554
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
560 }
561 break;
562
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
569
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
575
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
578
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
582 {
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
587 {
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
591 }
592
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
595 {
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
599 }
600 }
601 break;
602
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
611
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
615
616 /* Fall through. */
617
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
620 {
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
629 }
630 break;
631
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
643 {
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 {
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
655 }
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 {
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
665 }
666 }
667 break;
668
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
672 {
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
676 }
677 break;
678
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
683 {
684 tree fndecl, arg;
685
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
689 }
690 break;
691
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
695 {
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
700 {
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
707 }
708 }
709 break;
710
711 default:
712 break;
713 }
714
715 return NULL_TREE;
716 }
717
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
721
722 static tree
negate_expr(tree t)723 negate_expr (tree t)
724 {
725 tree type, tem;
726 location_t loc;
727
728 if (t == NULL_TREE)
729 return NULL_TREE;
730
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
734
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
739 }
740
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
748
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
752
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
755
756 If IN is itself a literal or constant, return it as appropriate.
757
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
760
761 static tree
split_tree(tree in,enum tree_code code,tree * conp,tree * litp,tree * minus_litp,int negate_p)762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
764 {
765 tree var = 0;
766
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
770
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
773
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
786 {
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
791
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
799
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
804
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
813
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
821 }
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
824 {
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
828 }
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
833
834 if (negate_p)
835 {
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
842 }
843
844 return var;
845 }
846
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
851
852 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
854 {
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
859
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
865 {
866 if (code == PLUS_EXPR)
867 {
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
880 }
881 else if (code == MINUS_EXPR)
882 {
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
885 }
886
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
889 }
890
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
893 }
894
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
897
898 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
900 {
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
905
906 switch (code)
907 {
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
913
914 default:
915 break;
916 }
917
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
921 }
922
923
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
927
928 static tree
int_const_binop_1(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
931 {
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
937
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
940
941 switch (code)
942 {
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
946
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
950
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
954
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
958
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
965
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
969
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
973
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
977
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
981
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
985
986 case MULT_HIGHPART_EXPR:
987 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
988 return NULL_TREE;
989 else
990 {
991 bool dummy_overflow;
992 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
993 is performed in twice the precision of arguments. */
994 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
995 res = tmp.rshift (TYPE_PRECISION (type),
996 2 * TYPE_PRECISION (type), !uns);
997 }
998 break;
999
1000 case TRUNC_DIV_EXPR:
1001 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1002 case EXACT_DIV_EXPR:
1003 /* This is a shortcut for a common special case. */
1004 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1005 && !TREE_OVERFLOW (arg1)
1006 && !TREE_OVERFLOW (arg2)
1007 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1008 {
1009 if (code == CEIL_DIV_EXPR)
1010 op1.low += op2.low - 1;
1011
1012 res.low = op1.low / op2.low, res.high = 0;
1013 break;
1014 }
1015
1016 /* ... fall through ... */
1017
1018 case ROUND_DIV_EXPR:
1019 if (op2.is_zero ())
1020 return NULL_TREE;
1021 if (op2.is_one ())
1022 {
1023 res = op1;
1024 break;
1025 }
1026 if (op1 == op2 && !op1.is_zero ())
1027 {
1028 res = double_int_one;
1029 break;
1030 }
1031 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1032 break;
1033
1034 case TRUNC_MOD_EXPR:
1035 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1036 /* This is a shortcut for a common special case. */
1037 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1038 && !TREE_OVERFLOW (arg1)
1039 && !TREE_OVERFLOW (arg2)
1040 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1041 {
1042 if (code == CEIL_MOD_EXPR)
1043 op1.low += op2.low - 1;
1044 res.low = op1.low % op2.low, res.high = 0;
1045 break;
1046 }
1047
1048 /* ... fall through ... */
1049
1050 case ROUND_MOD_EXPR:
1051 if (op2.is_zero ())
1052 return NULL_TREE;
1053 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1054 break;
1055
1056 case MIN_EXPR:
1057 res = op1.min (op2, uns);
1058 break;
1059
1060 case MAX_EXPR:
1061 res = op1.max (op2, uns);
1062 break;
1063
1064 default:
1065 return NULL_TREE;
1066 }
1067
1068 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1069 (!uns && overflow)
1070 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1071
1072 return t;
1073 }
1074
1075 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2)1076 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1077 {
1078 return int_const_binop_1 (code, arg1, arg2, 1);
1079 }
1080
1081 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1082 constant. We assume ARG1 and ARG2 have the same data type, or at least
1083 are the same kind of constant and the same machine mode. Return zero if
1084 combining the constants is not allowed in the current operating mode. */
1085
1086 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1087 const_binop (enum tree_code code, tree arg1, tree arg2)
1088 {
1089 /* Sanity check for the recursive cases. */
1090 if (!arg1 || !arg2)
1091 return NULL_TREE;
1092
1093 STRIP_NOPS (arg1);
1094 STRIP_NOPS (arg2);
1095
1096 if (TREE_CODE (arg1) == INTEGER_CST)
1097 return int_const_binop (code, arg1, arg2);
1098
1099 if (TREE_CODE (arg1) == REAL_CST)
1100 {
1101 enum machine_mode mode;
1102 REAL_VALUE_TYPE d1;
1103 REAL_VALUE_TYPE d2;
1104 REAL_VALUE_TYPE value;
1105 REAL_VALUE_TYPE result;
1106 bool inexact;
1107 tree t, type;
1108
1109 /* The following codes are handled by real_arithmetic. */
1110 switch (code)
1111 {
1112 case PLUS_EXPR:
1113 case MINUS_EXPR:
1114 case MULT_EXPR:
1115 case RDIV_EXPR:
1116 case MIN_EXPR:
1117 case MAX_EXPR:
1118 break;
1119
1120 default:
1121 return NULL_TREE;
1122 }
1123
1124 d1 = TREE_REAL_CST (arg1);
1125 d2 = TREE_REAL_CST (arg2);
1126
1127 type = TREE_TYPE (arg1);
1128 mode = TYPE_MODE (type);
1129
1130 /* Don't perform operation if we honor signaling NaNs and
1131 either operand is a NaN. */
1132 if (HONOR_SNANS (mode)
1133 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1134 return NULL_TREE;
1135
1136 /* Don't perform operation if it would raise a division
1137 by zero exception. */
1138 if (code == RDIV_EXPR
1139 && REAL_VALUES_EQUAL (d2, dconst0)
1140 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1141 return NULL_TREE;
1142
1143 /* If either operand is a NaN, just return it. Otherwise, set up
1144 for floating-point trap; we return an overflow. */
1145 if (REAL_VALUE_ISNAN (d1))
1146 return arg1;
1147 else if (REAL_VALUE_ISNAN (d2))
1148 return arg2;
1149
1150 inexact = real_arithmetic (&value, code, &d1, &d2);
1151 real_convert (&result, mode, &value);
1152
1153 /* Don't constant fold this floating point operation if
1154 the result has overflowed and flag_trapping_math. */
1155 if (flag_trapping_math
1156 && MODE_HAS_INFINITIES (mode)
1157 && REAL_VALUE_ISINF (result)
1158 && !REAL_VALUE_ISINF (d1)
1159 && !REAL_VALUE_ISINF (d2))
1160 return NULL_TREE;
1161
1162 /* Don't constant fold this floating point operation if the
1163 result may dependent upon the run-time rounding mode and
1164 flag_rounding_math is set, or if GCC's software emulation
1165 is unable to accurately represent the result. */
1166 if ((flag_rounding_math
1167 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1168 && (inexact || !real_identical (&result, &value)))
1169 return NULL_TREE;
1170
1171 t = build_real (type, result);
1172
1173 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1174 return t;
1175 }
1176
1177 if (TREE_CODE (arg1) == FIXED_CST)
1178 {
1179 FIXED_VALUE_TYPE f1;
1180 FIXED_VALUE_TYPE f2;
1181 FIXED_VALUE_TYPE result;
1182 tree t, type;
1183 int sat_p;
1184 bool overflow_p;
1185
1186 /* The following codes are handled by fixed_arithmetic. */
1187 switch (code)
1188 {
1189 case PLUS_EXPR:
1190 case MINUS_EXPR:
1191 case MULT_EXPR:
1192 case TRUNC_DIV_EXPR:
1193 f2 = TREE_FIXED_CST (arg2);
1194 break;
1195
1196 case LSHIFT_EXPR:
1197 case RSHIFT_EXPR:
1198 f2.data.high = TREE_INT_CST_HIGH (arg2);
1199 f2.data.low = TREE_INT_CST_LOW (arg2);
1200 f2.mode = SImode;
1201 break;
1202
1203 default:
1204 return NULL_TREE;
1205 }
1206
1207 f1 = TREE_FIXED_CST (arg1);
1208 type = TREE_TYPE (arg1);
1209 sat_p = TYPE_SATURATING (type);
1210 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1211 t = build_fixed (type, result);
1212 /* Propagate overflow flags. */
1213 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1214 TREE_OVERFLOW (t) = 1;
1215 return t;
1216 }
1217
1218 if (TREE_CODE (arg1) == COMPLEX_CST)
1219 {
1220 tree type = TREE_TYPE (arg1);
1221 tree r1 = TREE_REALPART (arg1);
1222 tree i1 = TREE_IMAGPART (arg1);
1223 tree r2 = TREE_REALPART (arg2);
1224 tree i2 = TREE_IMAGPART (arg2);
1225 tree real, imag;
1226
1227 switch (code)
1228 {
1229 case PLUS_EXPR:
1230 case MINUS_EXPR:
1231 real = const_binop (code, r1, r2);
1232 imag = const_binop (code, i1, i2);
1233 break;
1234
1235 case MULT_EXPR:
1236 if (COMPLEX_FLOAT_TYPE_P (type))
1237 return do_mpc_arg2 (arg1, arg2, type,
1238 /* do_nonfinite= */ folding_initializer,
1239 mpc_mul);
1240
1241 real = const_binop (MINUS_EXPR,
1242 const_binop (MULT_EXPR, r1, r2),
1243 const_binop (MULT_EXPR, i1, i2));
1244 imag = const_binop (PLUS_EXPR,
1245 const_binop (MULT_EXPR, r1, i2),
1246 const_binop (MULT_EXPR, i1, r2));
1247 break;
1248
1249 case RDIV_EXPR:
1250 if (COMPLEX_FLOAT_TYPE_P (type))
1251 return do_mpc_arg2 (arg1, arg2, type,
1252 /* do_nonfinite= */ folding_initializer,
1253 mpc_div);
1254 /* Fallthru ... */
1255 case TRUNC_DIV_EXPR:
1256 case CEIL_DIV_EXPR:
1257 case FLOOR_DIV_EXPR:
1258 case ROUND_DIV_EXPR:
1259 if (flag_complex_method == 0)
1260 {
1261 /* Keep this algorithm in sync with
1262 tree-complex.c:expand_complex_div_straight().
1263
1264 Expand complex division to scalars, straightforward algorithm.
1265 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1266 t = br*br + bi*bi
1267 */
1268 tree magsquared
1269 = const_binop (PLUS_EXPR,
1270 const_binop (MULT_EXPR, r2, r2),
1271 const_binop (MULT_EXPR, i2, i2));
1272 tree t1
1273 = const_binop (PLUS_EXPR,
1274 const_binop (MULT_EXPR, r1, r2),
1275 const_binop (MULT_EXPR, i1, i2));
1276 tree t2
1277 = const_binop (MINUS_EXPR,
1278 const_binop (MULT_EXPR, i1, r2),
1279 const_binop (MULT_EXPR, r1, i2));
1280
1281 real = const_binop (code, t1, magsquared);
1282 imag = const_binop (code, t2, magsquared);
1283 }
1284 else
1285 {
1286 /* Keep this algorithm in sync with
1287 tree-complex.c:expand_complex_div_wide().
1288
1289 Expand complex division to scalars, modified algorithm to minimize
1290 overflow with wide input ranges. */
1291 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1292 fold_abs_const (r2, TREE_TYPE (type)),
1293 fold_abs_const (i2, TREE_TYPE (type)));
1294
1295 if (integer_nonzerop (compare))
1296 {
1297 /* In the TRUE branch, we compute
1298 ratio = br/bi;
1299 div = (br * ratio) + bi;
1300 tr = (ar * ratio) + ai;
1301 ti = (ai * ratio) - ar;
1302 tr = tr / div;
1303 ti = ti / div; */
1304 tree ratio = const_binop (code, r2, i2);
1305 tree div = const_binop (PLUS_EXPR, i2,
1306 const_binop (MULT_EXPR, r2, ratio));
1307 real = const_binop (MULT_EXPR, r1, ratio);
1308 real = const_binop (PLUS_EXPR, real, i1);
1309 real = const_binop (code, real, div);
1310
1311 imag = const_binop (MULT_EXPR, i1, ratio);
1312 imag = const_binop (MINUS_EXPR, imag, r1);
1313 imag = const_binop (code, imag, div);
1314 }
1315 else
1316 {
1317 /* In the FALSE branch, we compute
1318 ratio = d/c;
1319 divisor = (d * ratio) + c;
1320 tr = (b * ratio) + a;
1321 ti = b - (a * ratio);
1322 tr = tr / div;
1323 ti = ti / div; */
1324 tree ratio = const_binop (code, i2, r2);
1325 tree div = const_binop (PLUS_EXPR, r2,
1326 const_binop (MULT_EXPR, i2, ratio));
1327
1328 real = const_binop (MULT_EXPR, i1, ratio);
1329 real = const_binop (PLUS_EXPR, real, r1);
1330 real = const_binop (code, real, div);
1331
1332 imag = const_binop (MULT_EXPR, r1, ratio);
1333 imag = const_binop (MINUS_EXPR, i1, imag);
1334 imag = const_binop (code, imag, div);
1335 }
1336 }
1337 break;
1338
1339 default:
1340 return NULL_TREE;
1341 }
1342
1343 if (real && imag)
1344 return build_complex (type, real, imag);
1345 }
1346
1347 if (TREE_CODE (arg1) == VECTOR_CST
1348 && TREE_CODE (arg2) == VECTOR_CST)
1349 {
1350 tree type = TREE_TYPE(arg1);
1351 int count = TYPE_VECTOR_SUBPARTS (type), i;
1352 tree *elts = XALLOCAVEC (tree, count);
1353
1354 for (i = 0; i < count; i++)
1355 {
1356 tree elem1 = VECTOR_CST_ELT (arg1, i);
1357 tree elem2 = VECTOR_CST_ELT (arg2, i);
1358
1359 elts[i] = const_binop (code, elem1, elem2);
1360
1361 /* It is possible that const_binop cannot handle the given
1362 code and return NULL_TREE */
1363 if(elts[i] == NULL_TREE)
1364 return NULL_TREE;
1365 }
1366
1367 return build_vector (type, elts);
1368 }
1369
1370 /* Shifts allow a scalar offset for a vector. */
1371 if (TREE_CODE (arg1) == VECTOR_CST
1372 && TREE_CODE (arg2) == INTEGER_CST)
1373 {
1374 tree type = TREE_TYPE (arg1);
1375 int count = TYPE_VECTOR_SUBPARTS (type), i;
1376 tree *elts = XALLOCAVEC (tree, count);
1377
1378 if (code == VEC_LSHIFT_EXPR
1379 || code == VEC_RSHIFT_EXPR)
1380 {
1381 if (!host_integerp (arg2, 1))
1382 return NULL_TREE;
1383
1384 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1385 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1386 unsigned HOST_WIDE_INT innerc
1387 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1388 if (shiftc >= outerc || (shiftc % innerc) != 0)
1389 return NULL_TREE;
1390 int offset = shiftc / innerc;
1391 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1392 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1393 for !BYTES_BIG_ENDIAN picks first vector element, but
1394 for BYTES_BIG_ENDIAN last element from the vector. */
1395 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1396 offset = -offset;
1397 tree zero = build_zero_cst (TREE_TYPE (type));
1398 for (i = 0; i < count; i++)
1399 {
1400 if (i + offset < 0 || i + offset >= count)
1401 elts[i] = zero;
1402 else
1403 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1404 }
1405 }
1406 else
1407 return NULL_TREE;
1408
1409 return build_vector (type, elts);
1410 }
1411 return NULL_TREE;
1412 }
1413
1414 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1415 indicates which particular sizetype to create. */
1416
1417 tree
size_int_kind(HOST_WIDE_INT number,enum size_type_kind kind)1418 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1419 {
1420 return build_int_cst (sizetype_tab[(int) kind], number);
1421 }
1422
1423 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1424 is a tree code. The type of the result is taken from the operands.
1425 Both must be equivalent integer types, ala int_binop_types_match_p.
1426 If the operands are constant, so is the result. */
1427
1428 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1429 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1430 {
1431 tree type = TREE_TYPE (arg0);
1432
1433 if (arg0 == error_mark_node || arg1 == error_mark_node)
1434 return error_mark_node;
1435
1436 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1437 TREE_TYPE (arg1)));
1438
1439 /* Handle the special case of two integer constants faster. */
1440 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1441 {
1442 /* And some specific cases even faster than that. */
1443 if (code == PLUS_EXPR)
1444 {
1445 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1446 return arg1;
1447 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1448 return arg0;
1449 }
1450 else if (code == MINUS_EXPR)
1451 {
1452 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1453 return arg0;
1454 }
1455 else if (code == MULT_EXPR)
1456 {
1457 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1458 return arg1;
1459 }
1460
1461 /* Handle general case of two integer constants. For sizetype
1462 constant calculations we always want to know about overflow,
1463 even in the unsigned case. */
1464 return int_const_binop_1 (code, arg0, arg1, -1);
1465 }
1466
1467 return fold_build2_loc (loc, code, type, arg0, arg1);
1468 }
1469
1470 /* Given two values, either both of sizetype or both of bitsizetype,
1471 compute the difference between the two values. Return the value
1472 in signed type corresponding to the type of the operands. */
1473
1474 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1475 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1476 {
1477 tree type = TREE_TYPE (arg0);
1478 tree ctype;
1479
1480 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1481 TREE_TYPE (arg1)));
1482
1483 /* If the type is already signed, just do the simple thing. */
1484 if (!TYPE_UNSIGNED (type))
1485 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1486
1487 if (type == sizetype)
1488 ctype = ssizetype;
1489 else if (type == bitsizetype)
1490 ctype = sbitsizetype;
1491 else
1492 ctype = signed_type_for (type);
1493
1494 /* If either operand is not a constant, do the conversions to the signed
1495 type and subtract. The hardware will do the right thing with any
1496 overflow in the subtraction. */
1497 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1498 return size_binop_loc (loc, MINUS_EXPR,
1499 fold_convert_loc (loc, ctype, arg0),
1500 fold_convert_loc (loc, ctype, arg1));
1501
1502 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1503 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1504 overflow) and negate (which can't either). Special-case a result
1505 of zero while we're here. */
1506 if (tree_int_cst_equal (arg0, arg1))
1507 return build_int_cst (ctype, 0);
1508 else if (tree_int_cst_lt (arg1, arg0))
1509 return fold_convert_loc (loc, ctype,
1510 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1511 else
1512 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1513 fold_convert_loc (loc, ctype,
1514 size_binop_loc (loc,
1515 MINUS_EXPR,
1516 arg1, arg0)));
1517 }
1518
1519 /* A subroutine of fold_convert_const handling conversions of an
1520 INTEGER_CST to another integer type. */
1521
1522 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1523 fold_convert_const_int_from_int (tree type, const_tree arg1)
1524 {
1525 tree t;
1526
1527 /* Given an integer constant, make new constant with new type,
1528 appropriately sign-extended or truncated. */
1529 t = force_fit_type_double (type, tree_to_double_int (arg1),
1530 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1531 (TREE_INT_CST_HIGH (arg1) < 0
1532 && (TYPE_UNSIGNED (type)
1533 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1534 | TREE_OVERFLOW (arg1));
1535
1536 return t;
1537 }
1538
1539 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1540 to an integer type. */
1541
1542 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)1543 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1544 {
1545 int overflow = 0;
1546 tree t;
1547
1548 /* The following code implements the floating point to integer
1549 conversion rules required by the Java Language Specification,
1550 that IEEE NaNs are mapped to zero and values that overflow
1551 the target precision saturate, i.e. values greater than
1552 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1553 are mapped to INT_MIN. These semantics are allowed by the
1554 C and C++ standards that simply state that the behavior of
1555 FP-to-integer conversion is unspecified upon overflow. */
1556
1557 double_int val;
1558 REAL_VALUE_TYPE r;
1559 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1560
1561 switch (code)
1562 {
1563 case FIX_TRUNC_EXPR:
1564 real_trunc (&r, VOIDmode, &x);
1565 break;
1566
1567 default:
1568 gcc_unreachable ();
1569 }
1570
1571 /* If R is NaN, return zero and show we have an overflow. */
1572 if (REAL_VALUE_ISNAN (r))
1573 {
1574 overflow = 1;
1575 val = double_int_zero;
1576 }
1577
1578 /* See if R is less than the lower bound or greater than the
1579 upper bound. */
1580
1581 if (! overflow)
1582 {
1583 tree lt = TYPE_MIN_VALUE (type);
1584 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1585 if (REAL_VALUES_LESS (r, l))
1586 {
1587 overflow = 1;
1588 val = tree_to_double_int (lt);
1589 }
1590 }
1591
1592 if (! overflow)
1593 {
1594 tree ut = TYPE_MAX_VALUE (type);
1595 if (ut)
1596 {
1597 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1598 if (REAL_VALUES_LESS (u, r))
1599 {
1600 overflow = 1;
1601 val = tree_to_double_int (ut);
1602 }
1603 }
1604 }
1605
1606 if (! overflow)
1607 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1608
1609 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1610 return t;
1611 }
1612
1613 /* A subroutine of fold_convert_const handling conversions of a
1614 FIXED_CST to an integer type. */
1615
1616 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)1617 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1618 {
1619 tree t;
1620 double_int temp, temp_trunc;
1621 unsigned int mode;
1622
1623 /* Right shift FIXED_CST to temp by fbit. */
1624 temp = TREE_FIXED_CST (arg1).data;
1625 mode = TREE_FIXED_CST (arg1).mode;
1626 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1627 {
1628 temp = temp.rshift (GET_MODE_FBIT (mode),
1629 HOST_BITS_PER_DOUBLE_INT,
1630 SIGNED_FIXED_POINT_MODE_P (mode));
1631
1632 /* Left shift temp to temp_trunc by fbit. */
1633 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1634 HOST_BITS_PER_DOUBLE_INT,
1635 SIGNED_FIXED_POINT_MODE_P (mode));
1636 }
1637 else
1638 {
1639 temp = double_int_zero;
1640 temp_trunc = double_int_zero;
1641 }
1642
1643 /* If FIXED_CST is negative, we need to round the value toward 0.
1644 By checking if the fractional bits are not zero to add 1 to temp. */
1645 if (SIGNED_FIXED_POINT_MODE_P (mode)
1646 && temp_trunc.is_negative ()
1647 && TREE_FIXED_CST (arg1).data != temp_trunc)
1648 temp += double_int_one;
1649
1650 /* Given a fixed-point constant, make new constant with new type,
1651 appropriately sign-extended or truncated. */
1652 t = force_fit_type_double (type, temp, -1,
1653 (temp.is_negative ()
1654 && (TYPE_UNSIGNED (type)
1655 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1656 | TREE_OVERFLOW (arg1));
1657
1658 return t;
1659 }
1660
1661 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1662 to another floating point type. */
1663
1664 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)1665 fold_convert_const_real_from_real (tree type, const_tree arg1)
1666 {
1667 REAL_VALUE_TYPE value;
1668 tree t;
1669
1670 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1671 t = build_real (type, value);
1672
1673 /* If converting an infinity or NAN to a representation that doesn't
1674 have one, set the overflow bit so that we can produce some kind of
1675 error message at the appropriate point if necessary. It's not the
1676 most user-friendly message, but it's better than nothing. */
1677 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1678 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1679 TREE_OVERFLOW (t) = 1;
1680 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1681 && !MODE_HAS_NANS (TYPE_MODE (type)))
1682 TREE_OVERFLOW (t) = 1;
1683 /* Regular overflow, conversion produced an infinity in a mode that
1684 can't represent them. */
1685 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1686 && REAL_VALUE_ISINF (value)
1687 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1688 TREE_OVERFLOW (t) = 1;
1689 else
1690 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1691 return t;
1692 }
1693
1694 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1695 to a floating point type. */
1696
1697 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)1698 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1699 {
1700 REAL_VALUE_TYPE value;
1701 tree t;
1702
1703 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1704 t = build_real (type, value);
1705
1706 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1707 return t;
1708 }
1709
1710 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1711 to another fixed-point type. */
1712
1713 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)1714 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1715 {
1716 FIXED_VALUE_TYPE value;
1717 tree t;
1718 bool overflow_p;
1719
1720 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1721 TYPE_SATURATING (type));
1722 t = build_fixed (type, value);
1723
1724 /* Propagate overflow flags. */
1725 if (overflow_p | TREE_OVERFLOW (arg1))
1726 TREE_OVERFLOW (t) = 1;
1727 return t;
1728 }
1729
1730 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1731 to a fixed-point type. */
1732
1733 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)1734 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1735 {
1736 FIXED_VALUE_TYPE value;
1737 tree t;
1738 bool overflow_p;
1739
1740 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1741 TREE_INT_CST (arg1),
1742 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1743 TYPE_SATURATING (type));
1744 t = build_fixed (type, value);
1745
1746 /* Propagate overflow flags. */
1747 if (overflow_p | TREE_OVERFLOW (arg1))
1748 TREE_OVERFLOW (t) = 1;
1749 return t;
1750 }
1751
1752 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1753 to a fixed-point type. */
1754
1755 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)1756 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1757 {
1758 FIXED_VALUE_TYPE value;
1759 tree t;
1760 bool overflow_p;
1761
1762 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1763 &TREE_REAL_CST (arg1),
1764 TYPE_SATURATING (type));
1765 t = build_fixed (type, value);
1766
1767 /* Propagate overflow flags. */
1768 if (overflow_p | TREE_OVERFLOW (arg1))
1769 TREE_OVERFLOW (t) = 1;
1770 return t;
1771 }
1772
1773 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1774 type TYPE. If no simplification can be done return NULL_TREE. */
1775
1776 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)1777 fold_convert_const (enum tree_code code, tree type, tree arg1)
1778 {
1779 if (TREE_TYPE (arg1) == type)
1780 return arg1;
1781
1782 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1783 || TREE_CODE (type) == OFFSET_TYPE)
1784 {
1785 if (TREE_CODE (arg1) == INTEGER_CST)
1786 return fold_convert_const_int_from_int (type, arg1);
1787 else if (TREE_CODE (arg1) == REAL_CST)
1788 return fold_convert_const_int_from_real (code, type, arg1);
1789 else if (TREE_CODE (arg1) == FIXED_CST)
1790 return fold_convert_const_int_from_fixed (type, arg1);
1791 }
1792 else if (TREE_CODE (type) == REAL_TYPE)
1793 {
1794 if (TREE_CODE (arg1) == INTEGER_CST)
1795 return build_real_from_int_cst (type, arg1);
1796 else if (TREE_CODE (arg1) == REAL_CST)
1797 return fold_convert_const_real_from_real (type, arg1);
1798 else if (TREE_CODE (arg1) == FIXED_CST)
1799 return fold_convert_const_real_from_fixed (type, arg1);
1800 }
1801 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1802 {
1803 if (TREE_CODE (arg1) == FIXED_CST)
1804 return fold_convert_const_fixed_from_fixed (type, arg1);
1805 else if (TREE_CODE (arg1) == INTEGER_CST)
1806 return fold_convert_const_fixed_from_int (type, arg1);
1807 else if (TREE_CODE (arg1) == REAL_CST)
1808 return fold_convert_const_fixed_from_real (type, arg1);
1809 }
1810 return NULL_TREE;
1811 }
1812
1813 /* Construct a vector of zero elements of vector type TYPE. */
1814
1815 static tree
build_zero_vector(tree type)1816 build_zero_vector (tree type)
1817 {
1818 tree t;
1819
1820 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1821 return build_vector_from_val (type, t);
1822 }
1823
1824 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1825
1826 bool
fold_convertible_p(const_tree type,const_tree arg)1827 fold_convertible_p (const_tree type, const_tree arg)
1828 {
1829 tree orig = TREE_TYPE (arg);
1830
1831 if (type == orig)
1832 return true;
1833
1834 if (TREE_CODE (arg) == ERROR_MARK
1835 || TREE_CODE (type) == ERROR_MARK
1836 || TREE_CODE (orig) == ERROR_MARK)
1837 return false;
1838
1839 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1840 return true;
1841
1842 switch (TREE_CODE (type))
1843 {
1844 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1845 case POINTER_TYPE: case REFERENCE_TYPE:
1846 case OFFSET_TYPE:
1847 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1848 || TREE_CODE (orig) == OFFSET_TYPE)
1849 return true;
1850 return (TREE_CODE (orig) == VECTOR_TYPE
1851 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1852
1853 case REAL_TYPE:
1854 case FIXED_POINT_TYPE:
1855 case COMPLEX_TYPE:
1856 case VECTOR_TYPE:
1857 case VOID_TYPE:
1858 return TREE_CODE (type) == TREE_CODE (orig);
1859
1860 default:
1861 return false;
1862 }
1863 }
1864
1865 /* Convert expression ARG to type TYPE. Used by the middle-end for
1866 simple conversions in preference to calling the front-end's convert. */
1867
1868 tree
fold_convert_loc(location_t loc,tree type,tree arg)1869 fold_convert_loc (location_t loc, tree type, tree arg)
1870 {
1871 tree orig = TREE_TYPE (arg);
1872 tree tem;
1873
1874 if (type == orig)
1875 return arg;
1876
1877 if (TREE_CODE (arg) == ERROR_MARK
1878 || TREE_CODE (type) == ERROR_MARK
1879 || TREE_CODE (orig) == ERROR_MARK)
1880 return error_mark_node;
1881
1882 switch (TREE_CODE (type))
1883 {
1884 case POINTER_TYPE:
1885 case REFERENCE_TYPE:
1886 /* Handle conversions between pointers to different address spaces. */
1887 if (POINTER_TYPE_P (orig)
1888 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1889 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1890 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1891 /* fall through */
1892
1893 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1894 case OFFSET_TYPE:
1895 if (TREE_CODE (arg) == INTEGER_CST)
1896 {
1897 tem = fold_convert_const (NOP_EXPR, type, arg);
1898 if (tem != NULL_TREE)
1899 return tem;
1900 }
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1902 || TREE_CODE (orig) == OFFSET_TYPE)
1903 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904 if (TREE_CODE (orig) == COMPLEX_TYPE)
1905 return fold_convert_loc (loc, type,
1906 fold_build1_loc (loc, REALPART_EXPR,
1907 TREE_TYPE (orig), arg));
1908 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1909 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1910 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1911
1912 case REAL_TYPE:
1913 if (TREE_CODE (arg) == INTEGER_CST)
1914 {
1915 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1916 if (tem != NULL_TREE)
1917 return tem;
1918 }
1919 else if (TREE_CODE (arg) == REAL_CST)
1920 {
1921 tem = fold_convert_const (NOP_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1923 return tem;
1924 }
1925 else if (TREE_CODE (arg) == FIXED_CST)
1926 {
1927 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1928 if (tem != NULL_TREE)
1929 return tem;
1930 }
1931
1932 switch (TREE_CODE (orig))
1933 {
1934 case INTEGER_TYPE:
1935 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1936 case POINTER_TYPE: case REFERENCE_TYPE:
1937 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1938
1939 case REAL_TYPE:
1940 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1941
1942 case FIXED_POINT_TYPE:
1943 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1944
1945 case COMPLEX_TYPE:
1946 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1947 return fold_convert_loc (loc, type, tem);
1948
1949 default:
1950 gcc_unreachable ();
1951 }
1952
1953 case FIXED_POINT_TYPE:
1954 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1955 || TREE_CODE (arg) == REAL_CST)
1956 {
1957 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1958 if (tem != NULL_TREE)
1959 goto fold_convert_exit;
1960 }
1961
1962 switch (TREE_CODE (orig))
1963 {
1964 case FIXED_POINT_TYPE:
1965 case INTEGER_TYPE:
1966 case ENUMERAL_TYPE:
1967 case BOOLEAN_TYPE:
1968 case REAL_TYPE:
1969 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1970
1971 case COMPLEX_TYPE:
1972 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1973 return fold_convert_loc (loc, type, tem);
1974
1975 default:
1976 gcc_unreachable ();
1977 }
1978
1979 case COMPLEX_TYPE:
1980 switch (TREE_CODE (orig))
1981 {
1982 case INTEGER_TYPE:
1983 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1984 case POINTER_TYPE: case REFERENCE_TYPE:
1985 case REAL_TYPE:
1986 case FIXED_POINT_TYPE:
1987 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1988 fold_convert_loc (loc, TREE_TYPE (type), arg),
1989 fold_convert_loc (loc, TREE_TYPE (type),
1990 integer_zero_node));
1991 case COMPLEX_TYPE:
1992 {
1993 tree rpart, ipart;
1994
1995 if (TREE_CODE (arg) == COMPLEX_EXPR)
1996 {
1997 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1998 TREE_OPERAND (arg, 0));
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2000 TREE_OPERAND (arg, 1));
2001 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2002 }
2003
2004 arg = save_expr (arg);
2005 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2006 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2007 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2008 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2009 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2010 }
2011
2012 default:
2013 gcc_unreachable ();
2014 }
2015
2016 case VECTOR_TYPE:
2017 if (integer_zerop (arg))
2018 return build_zero_vector (type);
2019 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2020 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == VECTOR_TYPE);
2022 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2023
2024 case VOID_TYPE:
2025 tem = fold_ignored_result (arg);
2026 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2027
2028 default:
2029 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2030 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2031 gcc_unreachable ();
2032 }
2033 fold_convert_exit:
2034 protected_set_expr_location_unshare (tem, loc);
2035 return tem;
2036 }
2037
2038 /* Return false if expr can be assumed not to be an lvalue, true
2039 otherwise. */
2040
2041 static bool
maybe_lvalue_p(const_tree x)2042 maybe_lvalue_p (const_tree x)
2043 {
2044 /* We only need to wrap lvalue tree codes. */
2045 switch (TREE_CODE (x))
2046 {
2047 case VAR_DECL:
2048 case PARM_DECL:
2049 case RESULT_DECL:
2050 case LABEL_DECL:
2051 case FUNCTION_DECL:
2052 case SSA_NAME:
2053
2054 case COMPONENT_REF:
2055 case MEM_REF:
2056 case INDIRECT_REF:
2057 case ARRAY_REF:
2058 case ARRAY_RANGE_REF:
2059 case BIT_FIELD_REF:
2060 case OBJ_TYPE_REF:
2061
2062 case REALPART_EXPR:
2063 case IMAGPART_EXPR:
2064 case PREINCREMENT_EXPR:
2065 case PREDECREMENT_EXPR:
2066 case SAVE_EXPR:
2067 case TRY_CATCH_EXPR:
2068 case WITH_CLEANUP_EXPR:
2069 case COMPOUND_EXPR:
2070 case MODIFY_EXPR:
2071 case TARGET_EXPR:
2072 case COND_EXPR:
2073 case BIND_EXPR:
2074 break;
2075
2076 default:
2077 /* Assume the worst for front-end tree codes. */
2078 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2079 break;
2080 return false;
2081 }
2082
2083 return true;
2084 }
2085
2086 /* Return an expr equal to X but certainly not valid as an lvalue. */
2087
2088 tree
non_lvalue_loc(location_t loc,tree x)2089 non_lvalue_loc (location_t loc, tree x)
2090 {
2091 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2092 us. */
2093 if (in_gimple_form)
2094 return x;
2095
2096 if (! maybe_lvalue_p (x))
2097 return x;
2098 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2099 }
2100
2101 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2102 Zero means allow extended lvalues. */
2103
2104 int pedantic_lvalues;
2105
2106 /* When pedantic, return an expr equal to X but certainly not valid as a
2107 pedantic lvalue. Otherwise, return X. */
2108
2109 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2110 pedantic_non_lvalue_loc (location_t loc, tree x)
2111 {
2112 if (pedantic_lvalues)
2113 return non_lvalue_loc (loc, x);
2114
2115 return protected_set_expr_location_unshare (x, loc);
2116 }
2117
2118 /* Given a tree comparison code, return the code that is the logical inverse.
2119 It is generally not safe to do this for floating-point comparisons, except
2120 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2121 ERROR_MARK in this case. */
2122
2123 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2124 invert_tree_comparison (enum tree_code code, bool honor_nans)
2125 {
2126 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2127 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2128 return ERROR_MARK;
2129
2130 switch (code)
2131 {
2132 case EQ_EXPR:
2133 return NE_EXPR;
2134 case NE_EXPR:
2135 return EQ_EXPR;
2136 case GT_EXPR:
2137 return honor_nans ? UNLE_EXPR : LE_EXPR;
2138 case GE_EXPR:
2139 return honor_nans ? UNLT_EXPR : LT_EXPR;
2140 case LT_EXPR:
2141 return honor_nans ? UNGE_EXPR : GE_EXPR;
2142 case LE_EXPR:
2143 return honor_nans ? UNGT_EXPR : GT_EXPR;
2144 case LTGT_EXPR:
2145 return UNEQ_EXPR;
2146 case UNEQ_EXPR:
2147 return LTGT_EXPR;
2148 case UNGT_EXPR:
2149 return LE_EXPR;
2150 case UNGE_EXPR:
2151 return LT_EXPR;
2152 case UNLT_EXPR:
2153 return GE_EXPR;
2154 case UNLE_EXPR:
2155 return GT_EXPR;
2156 case ORDERED_EXPR:
2157 return UNORDERED_EXPR;
2158 case UNORDERED_EXPR:
2159 return ORDERED_EXPR;
2160 default:
2161 gcc_unreachable ();
2162 }
2163 }
2164
2165 /* Similar, but return the comparison that results if the operands are
2166 swapped. This is safe for floating-point. */
2167
2168 enum tree_code
swap_tree_comparison(enum tree_code code)2169 swap_tree_comparison (enum tree_code code)
2170 {
2171 switch (code)
2172 {
2173 case EQ_EXPR:
2174 case NE_EXPR:
2175 case ORDERED_EXPR:
2176 case UNORDERED_EXPR:
2177 case LTGT_EXPR:
2178 case UNEQ_EXPR:
2179 return code;
2180 case GT_EXPR:
2181 return LT_EXPR;
2182 case GE_EXPR:
2183 return LE_EXPR;
2184 case LT_EXPR:
2185 return GT_EXPR;
2186 case LE_EXPR:
2187 return GE_EXPR;
2188 case UNGT_EXPR:
2189 return UNLT_EXPR;
2190 case UNGE_EXPR:
2191 return UNLE_EXPR;
2192 case UNLT_EXPR:
2193 return UNGT_EXPR;
2194 case UNLE_EXPR:
2195 return UNGE_EXPR;
2196 default:
2197 gcc_unreachable ();
2198 }
2199 }
2200
2201
2202 /* Convert a comparison tree code from an enum tree_code representation
2203 into a compcode bit-based encoding. This function is the inverse of
2204 compcode_to_comparison. */
2205
2206 static enum comparison_code
comparison_to_compcode(enum tree_code code)2207 comparison_to_compcode (enum tree_code code)
2208 {
2209 switch (code)
2210 {
2211 case LT_EXPR:
2212 return COMPCODE_LT;
2213 case EQ_EXPR:
2214 return COMPCODE_EQ;
2215 case LE_EXPR:
2216 return COMPCODE_LE;
2217 case GT_EXPR:
2218 return COMPCODE_GT;
2219 case NE_EXPR:
2220 return COMPCODE_NE;
2221 case GE_EXPR:
2222 return COMPCODE_GE;
2223 case ORDERED_EXPR:
2224 return COMPCODE_ORD;
2225 case UNORDERED_EXPR:
2226 return COMPCODE_UNORD;
2227 case UNLT_EXPR:
2228 return COMPCODE_UNLT;
2229 case UNEQ_EXPR:
2230 return COMPCODE_UNEQ;
2231 case UNLE_EXPR:
2232 return COMPCODE_UNLE;
2233 case UNGT_EXPR:
2234 return COMPCODE_UNGT;
2235 case LTGT_EXPR:
2236 return COMPCODE_LTGT;
2237 case UNGE_EXPR:
2238 return COMPCODE_UNGE;
2239 default:
2240 gcc_unreachable ();
2241 }
2242 }
2243
2244 /* Convert a compcode bit-based encoding of a comparison operator back
2245 to GCC's enum tree_code representation. This function is the
2246 inverse of comparison_to_compcode. */
2247
2248 static enum tree_code
compcode_to_comparison(enum comparison_code code)2249 compcode_to_comparison (enum comparison_code code)
2250 {
2251 switch (code)
2252 {
2253 case COMPCODE_LT:
2254 return LT_EXPR;
2255 case COMPCODE_EQ:
2256 return EQ_EXPR;
2257 case COMPCODE_LE:
2258 return LE_EXPR;
2259 case COMPCODE_GT:
2260 return GT_EXPR;
2261 case COMPCODE_NE:
2262 return NE_EXPR;
2263 case COMPCODE_GE:
2264 return GE_EXPR;
2265 case COMPCODE_ORD:
2266 return ORDERED_EXPR;
2267 case COMPCODE_UNORD:
2268 return UNORDERED_EXPR;
2269 case COMPCODE_UNLT:
2270 return UNLT_EXPR;
2271 case COMPCODE_UNEQ:
2272 return UNEQ_EXPR;
2273 case COMPCODE_UNLE:
2274 return UNLE_EXPR;
2275 case COMPCODE_UNGT:
2276 return UNGT_EXPR;
2277 case COMPCODE_LTGT:
2278 return LTGT_EXPR;
2279 case COMPCODE_UNGE:
2280 return UNGE_EXPR;
2281 default:
2282 gcc_unreachable ();
2283 }
2284 }
2285
2286 /* Return a tree for the comparison which is the combination of
2287 doing the AND or OR (depending on CODE) of the two operations LCODE
2288 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2289 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2290 if this makes the transformation invalid. */
2291
2292 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2293 combine_comparisons (location_t loc,
2294 enum tree_code code, enum tree_code lcode,
2295 enum tree_code rcode, tree truth_type,
2296 tree ll_arg, tree lr_arg)
2297 {
2298 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2299 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2300 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2301 int compcode;
2302
2303 switch (code)
2304 {
2305 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2306 compcode = lcompcode & rcompcode;
2307 break;
2308
2309 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2310 compcode = lcompcode | rcompcode;
2311 break;
2312
2313 default:
2314 return NULL_TREE;
2315 }
2316
2317 if (!honor_nans)
2318 {
2319 /* Eliminate unordered comparisons, as well as LTGT and ORD
2320 which are not used unless the mode has NaNs. */
2321 compcode &= ~COMPCODE_UNORD;
2322 if (compcode == COMPCODE_LTGT)
2323 compcode = COMPCODE_NE;
2324 else if (compcode == COMPCODE_ORD)
2325 compcode = COMPCODE_TRUE;
2326 }
2327 else if (flag_trapping_math)
2328 {
2329 /* Check that the original operation and the optimized ones will trap
2330 under the same condition. */
2331 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2332 && (lcompcode != COMPCODE_EQ)
2333 && (lcompcode != COMPCODE_ORD);
2334 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2335 && (rcompcode != COMPCODE_EQ)
2336 && (rcompcode != COMPCODE_ORD);
2337 bool trap = (compcode & COMPCODE_UNORD) == 0
2338 && (compcode != COMPCODE_EQ)
2339 && (compcode != COMPCODE_ORD);
2340
2341 /* In a short-circuited boolean expression the LHS might be
2342 such that the RHS, if evaluated, will never trap. For
2343 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2344 if neither x nor y is NaN. (This is a mixed blessing: for
2345 example, the expression above will never trap, hence
2346 optimizing it to x < y would be invalid). */
2347 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2348 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2349 rtrap = false;
2350
2351 /* If the comparison was short-circuited, and only the RHS
2352 trapped, we may now generate a spurious trap. */
2353 if (rtrap && !ltrap
2354 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2355 return NULL_TREE;
2356
2357 /* If we changed the conditions that cause a trap, we lose. */
2358 if ((ltrap || rtrap) != trap)
2359 return NULL_TREE;
2360 }
2361
2362 if (compcode == COMPCODE_TRUE)
2363 return constant_boolean_node (true, truth_type);
2364 else if (compcode == COMPCODE_FALSE)
2365 return constant_boolean_node (false, truth_type);
2366 else
2367 {
2368 enum tree_code tcode;
2369
2370 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2371 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2372 }
2373 }
2374
2375 /* Return nonzero if two operands (typically of the same tree node)
2376 are necessarily equal. If either argument has side-effects this
2377 function returns zero. FLAGS modifies behavior as follows:
2378
2379 If OEP_ONLY_CONST is set, only return nonzero for constants.
2380 This function tests whether the operands are indistinguishable;
2381 it does not test whether they are equal using C's == operation.
2382 The distinction is important for IEEE floating point, because
2383 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2384 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2385
2386 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2387 even though it may hold multiple values during a function.
2388 This is because a GCC tree node guarantees that nothing else is
2389 executed between the evaluation of its "operands" (which may often
2390 be evaluated in arbitrary order). Hence if the operands themselves
2391 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2392 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2393 unset means assuming isochronic (or instantaneous) tree equivalence.
2394 Unless comparing arbitrary expression trees, such as from different
2395 statements, this flag can usually be left unset.
2396
2397 If OEP_PURE_SAME is set, then pure functions with identical arguments
2398 are considered the same. It is used when the caller has other ways
2399 to ensure that global memory is unchanged in between. */
2400
2401 int
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2402 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2403 {
2404 /* If either is ERROR_MARK, they aren't equal. */
2405 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2406 || TREE_TYPE (arg0) == error_mark_node
2407 || TREE_TYPE (arg1) == error_mark_node)
2408 return 0;
2409
2410 /* Similar, if either does not have a type (like a released SSA name),
2411 they aren't equal. */
2412 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2413 return 0;
2414
2415 /* Check equality of integer constants before bailing out due to
2416 precision differences. */
2417 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2418 return tree_int_cst_equal (arg0, arg1);
2419
2420 /* If both types don't have the same signedness, then we can't consider
2421 them equal. We must check this before the STRIP_NOPS calls
2422 because they may change the signedness of the arguments. As pointers
2423 strictly don't have a signedness, require either two pointers or
2424 two non-pointers as well. */
2425 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2426 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2427 return 0;
2428
2429 /* We cannot consider pointers to different address space equal. */
2430 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2431 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2432 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2433 return 0;
2434
2435 /* If both types don't have the same precision, then it is not safe
2436 to strip NOPs. */
2437 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2438 return 0;
2439
2440 STRIP_NOPS (arg0);
2441 STRIP_NOPS (arg1);
2442
2443 /* In case both args are comparisons but with different comparison
2444 code, try to swap the comparison operands of one arg to produce
2445 a match and compare that variant. */
2446 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2447 && COMPARISON_CLASS_P (arg0)
2448 && COMPARISON_CLASS_P (arg1))
2449 {
2450 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2451
2452 if (TREE_CODE (arg0) == swap_code)
2453 return operand_equal_p (TREE_OPERAND (arg0, 0),
2454 TREE_OPERAND (arg1, 1), flags)
2455 && operand_equal_p (TREE_OPERAND (arg0, 1),
2456 TREE_OPERAND (arg1, 0), flags);
2457 }
2458
2459 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2460 /* This is needed for conversions and for COMPONENT_REF.
2461 Might as well play it safe and always test this. */
2462 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2463 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2464 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2465 return 0;
2466
2467 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2468 We don't care about side effects in that case because the SAVE_EXPR
2469 takes care of that for us. In all other cases, two expressions are
2470 equal if they have no side effects. If we have two identical
2471 expressions with side effects that should be treated the same due
2472 to the only side effects being identical SAVE_EXPR's, that will
2473 be detected in the recursive calls below.
2474 If we are taking an invariant address of two identical objects
2475 they are necessarily equal as well. */
2476 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2477 && (TREE_CODE (arg0) == SAVE_EXPR
2478 || (flags & OEP_CONSTANT_ADDRESS_OF)
2479 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2480 return 1;
2481
2482 /* Next handle constant cases, those for which we can return 1 even
2483 if ONLY_CONST is set. */
2484 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2485 switch (TREE_CODE (arg0))
2486 {
2487 case INTEGER_CST:
2488 return tree_int_cst_equal (arg0, arg1);
2489
2490 case FIXED_CST:
2491 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2492 TREE_FIXED_CST (arg1));
2493
2494 case REAL_CST:
2495 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2496 TREE_REAL_CST (arg1)))
2497 return 1;
2498
2499
2500 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2501 {
2502 /* If we do not distinguish between signed and unsigned zero,
2503 consider them equal. */
2504 if (real_zerop (arg0) && real_zerop (arg1))
2505 return 1;
2506 }
2507 return 0;
2508
2509 case VECTOR_CST:
2510 {
2511 unsigned i;
2512
2513 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2514 return 0;
2515
2516 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2517 {
2518 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2519 VECTOR_CST_ELT (arg1, i), flags))
2520 return 0;
2521 }
2522 return 1;
2523 }
2524
2525 case COMPLEX_CST:
2526 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2527 flags)
2528 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2529 flags));
2530
2531 case STRING_CST:
2532 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2533 && ! memcmp (TREE_STRING_POINTER (arg0),
2534 TREE_STRING_POINTER (arg1),
2535 TREE_STRING_LENGTH (arg0)));
2536
2537 case ADDR_EXPR:
2538 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2539 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2540 ? OEP_CONSTANT_ADDRESS_OF : 0);
2541 default:
2542 break;
2543 }
2544
2545 if (flags & OEP_ONLY_CONST)
2546 return 0;
2547
2548 /* Define macros to test an operand from arg0 and arg1 for equality and a
2549 variant that allows null and views null as being different from any
2550 non-null value. In the latter case, if either is null, the both
2551 must be; otherwise, do the normal comparison. */
2552 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2553 TREE_OPERAND (arg1, N), flags)
2554
2555 #define OP_SAME_WITH_NULL(N) \
2556 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2557 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2558
2559 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2560 {
2561 case tcc_unary:
2562 /* Two conversions are equal only if signedness and modes match. */
2563 switch (TREE_CODE (arg0))
2564 {
2565 CASE_CONVERT:
2566 case FIX_TRUNC_EXPR:
2567 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2568 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2569 return 0;
2570 break;
2571 default:
2572 break;
2573 }
2574
2575 return OP_SAME (0);
2576
2577
2578 case tcc_comparison:
2579 case tcc_binary:
2580 if (OP_SAME (0) && OP_SAME (1))
2581 return 1;
2582
2583 /* For commutative ops, allow the other order. */
2584 return (commutative_tree_code (TREE_CODE (arg0))
2585 && operand_equal_p (TREE_OPERAND (arg0, 0),
2586 TREE_OPERAND (arg1, 1), flags)
2587 && operand_equal_p (TREE_OPERAND (arg0, 1),
2588 TREE_OPERAND (arg1, 0), flags));
2589
2590 case tcc_reference:
2591 /* If either of the pointer (or reference) expressions we are
2592 dereferencing contain a side effect, these cannot be equal,
2593 but their addresses can be. */
2594 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2595 && (TREE_SIDE_EFFECTS (arg0)
2596 || TREE_SIDE_EFFECTS (arg1)))
2597 return 0;
2598
2599 switch (TREE_CODE (arg0))
2600 {
2601 case INDIRECT_REF:
2602 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2603 return OP_SAME (0);
2604
2605 case REALPART_EXPR:
2606 case IMAGPART_EXPR:
2607 return OP_SAME (0);
2608
2609 case TARGET_MEM_REF:
2610 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2611 /* Require equal extra operands and then fall through to MEM_REF
2612 handling of the two common operands. */
2613 if (!OP_SAME_WITH_NULL (2)
2614 || !OP_SAME_WITH_NULL (3)
2615 || !OP_SAME_WITH_NULL (4))
2616 return 0;
2617 /* Fallthru. */
2618 case MEM_REF:
2619 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2620 /* Require equal access sizes, and similar pointer types.
2621 We can have incomplete types for array references of
2622 variable-sized arrays from the Fortran frontent
2623 though. */
2624 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2625 || (TYPE_SIZE (TREE_TYPE (arg0))
2626 && TYPE_SIZE (TREE_TYPE (arg1))
2627 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2628 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2629 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2630 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2631 && OP_SAME (0) && OP_SAME (1));
2632
2633 case ARRAY_REF:
2634 case ARRAY_RANGE_REF:
2635 /* Operands 2 and 3 may be null.
2636 Compare the array index by value if it is constant first as we
2637 may have different types but same value here. */
2638 if (!OP_SAME (0))
2639 return 0;
2640 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2641 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2642 TREE_OPERAND (arg1, 1))
2643 || OP_SAME (1))
2644 && OP_SAME_WITH_NULL (2)
2645 && OP_SAME_WITH_NULL (3));
2646
2647 case COMPONENT_REF:
2648 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2649 may be NULL when we're called to compare MEM_EXPRs. */
2650 if (!OP_SAME_WITH_NULL (0))
2651 return 0;
2652 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2653 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2654
2655 case BIT_FIELD_REF:
2656 if (!OP_SAME (0))
2657 return 0;
2658 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2659 return OP_SAME (1) && OP_SAME (2);
2660
2661 default:
2662 return 0;
2663 }
2664
2665 case tcc_expression:
2666 switch (TREE_CODE (arg0))
2667 {
2668 case ADDR_EXPR:
2669 case TRUTH_NOT_EXPR:
2670 return OP_SAME (0);
2671
2672 case TRUTH_ANDIF_EXPR:
2673 case TRUTH_ORIF_EXPR:
2674 return OP_SAME (0) && OP_SAME (1);
2675
2676 case FMA_EXPR:
2677 case WIDEN_MULT_PLUS_EXPR:
2678 case WIDEN_MULT_MINUS_EXPR:
2679 if (!OP_SAME (2))
2680 return 0;
2681 /* The multiplcation operands are commutative. */
2682 /* FALLTHRU */
2683
2684 case TRUTH_AND_EXPR:
2685 case TRUTH_OR_EXPR:
2686 case TRUTH_XOR_EXPR:
2687 if (OP_SAME (0) && OP_SAME (1))
2688 return 1;
2689
2690 /* Otherwise take into account this is a commutative operation. */
2691 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2692 TREE_OPERAND (arg1, 1), flags)
2693 && operand_equal_p (TREE_OPERAND (arg0, 1),
2694 TREE_OPERAND (arg1, 0), flags));
2695
2696 case COND_EXPR:
2697 case VEC_COND_EXPR:
2698 case DOT_PROD_EXPR:
2699 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2700
2701 default:
2702 return 0;
2703 }
2704
2705 case tcc_vl_exp:
2706 switch (TREE_CODE (arg0))
2707 {
2708 case CALL_EXPR:
2709 /* If the CALL_EXPRs call different functions, then they
2710 clearly can not be equal. */
2711 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2712 flags))
2713 return 0;
2714
2715 {
2716 unsigned int cef = call_expr_flags (arg0);
2717 if (flags & OEP_PURE_SAME)
2718 cef &= ECF_CONST | ECF_PURE;
2719 else
2720 cef &= ECF_CONST;
2721 if (!cef)
2722 return 0;
2723 }
2724
2725 /* Now see if all the arguments are the same. */
2726 {
2727 const_call_expr_arg_iterator iter0, iter1;
2728 const_tree a0, a1;
2729 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2730 a1 = first_const_call_expr_arg (arg1, &iter1);
2731 a0 && a1;
2732 a0 = next_const_call_expr_arg (&iter0),
2733 a1 = next_const_call_expr_arg (&iter1))
2734 if (! operand_equal_p (a0, a1, flags))
2735 return 0;
2736
2737 /* If we get here and both argument lists are exhausted
2738 then the CALL_EXPRs are equal. */
2739 return ! (a0 || a1);
2740 }
2741 default:
2742 return 0;
2743 }
2744
2745 case tcc_declaration:
2746 /* Consider __builtin_sqrt equal to sqrt. */
2747 return (TREE_CODE (arg0) == FUNCTION_DECL
2748 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2749 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2750 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2751
2752 default:
2753 return 0;
2754 }
2755
2756 #undef OP_SAME
2757 #undef OP_SAME_WITH_NULL
2758 }
2759
2760 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2761 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2762
2763 When in doubt, return 0. */
2764
2765 static int
operand_equal_for_comparison_p(tree arg0,tree arg1,tree other)2766 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2767 {
2768 int unsignedp1, unsignedpo;
2769 tree primarg0, primarg1, primother;
2770 unsigned int correct_width;
2771
2772 if (operand_equal_p (arg0, arg1, 0))
2773 return 1;
2774
2775 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2776 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2777 return 0;
2778
2779 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2780 and see if the inner values are the same. This removes any
2781 signedness comparison, which doesn't matter here. */
2782 primarg0 = arg0, primarg1 = arg1;
2783 STRIP_NOPS (primarg0);
2784 STRIP_NOPS (primarg1);
2785 if (operand_equal_p (primarg0, primarg1, 0))
2786 return 1;
2787
2788 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2789 actual comparison operand, ARG0.
2790
2791 First throw away any conversions to wider types
2792 already present in the operands. */
2793
2794 primarg1 = get_narrower (arg1, &unsignedp1);
2795 primother = get_narrower (other, &unsignedpo);
2796
2797 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2798 if (unsignedp1 == unsignedpo
2799 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2800 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2801 {
2802 tree type = TREE_TYPE (arg0);
2803
2804 /* Make sure shorter operand is extended the right way
2805 to match the longer operand. */
2806 primarg1 = fold_convert (signed_or_unsigned_type_for
2807 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2808
2809 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2810 return 1;
2811 }
2812
2813 return 0;
2814 }
2815
2816 /* See if ARG is an expression that is either a comparison or is performing
2817 arithmetic on comparisons. The comparisons must only be comparing
2818 two different values, which will be stored in *CVAL1 and *CVAL2; if
2819 they are nonzero it means that some operands have already been found.
2820 No variables may be used anywhere else in the expression except in the
2821 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2822 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2823
2824 If this is true, return 1. Otherwise, return zero. */
2825
2826 static int
twoval_comparison_p(tree arg,tree * cval1,tree * cval2,int * save_p)2827 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2828 {
2829 enum tree_code code = TREE_CODE (arg);
2830 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2831
2832 /* We can handle some of the tcc_expression cases here. */
2833 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2834 tclass = tcc_unary;
2835 else if (tclass == tcc_expression
2836 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2837 || code == COMPOUND_EXPR))
2838 tclass = tcc_binary;
2839
2840 else if (tclass == tcc_expression && code == SAVE_EXPR
2841 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2842 {
2843 /* If we've already found a CVAL1 or CVAL2, this expression is
2844 two complex to handle. */
2845 if (*cval1 || *cval2)
2846 return 0;
2847
2848 tclass = tcc_unary;
2849 *save_p = 1;
2850 }
2851
2852 switch (tclass)
2853 {
2854 case tcc_unary:
2855 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2856
2857 case tcc_binary:
2858 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2859 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2860 cval1, cval2, save_p));
2861
2862 case tcc_constant:
2863 return 1;
2864
2865 case tcc_expression:
2866 if (code == COND_EXPR)
2867 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2868 cval1, cval2, save_p)
2869 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2870 cval1, cval2, save_p)
2871 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2872 cval1, cval2, save_p));
2873 return 0;
2874
2875 case tcc_comparison:
2876 /* First see if we can handle the first operand, then the second. For
2877 the second operand, we know *CVAL1 can't be zero. It must be that
2878 one side of the comparison is each of the values; test for the
2879 case where this isn't true by failing if the two operands
2880 are the same. */
2881
2882 if (operand_equal_p (TREE_OPERAND (arg, 0),
2883 TREE_OPERAND (arg, 1), 0))
2884 return 0;
2885
2886 if (*cval1 == 0)
2887 *cval1 = TREE_OPERAND (arg, 0);
2888 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2889 ;
2890 else if (*cval2 == 0)
2891 *cval2 = TREE_OPERAND (arg, 0);
2892 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2893 ;
2894 else
2895 return 0;
2896
2897 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2898 ;
2899 else if (*cval2 == 0)
2900 *cval2 = TREE_OPERAND (arg, 1);
2901 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2902 ;
2903 else
2904 return 0;
2905
2906 return 1;
2907
2908 default:
2909 return 0;
2910 }
2911 }
2912
2913 /* ARG is a tree that is known to contain just arithmetic operations and
2914 comparisons. Evaluate the operations in the tree substituting NEW0 for
2915 any occurrence of OLD0 as an operand of a comparison and likewise for
2916 NEW1 and OLD1. */
2917
2918 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)2919 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2920 tree old1, tree new1)
2921 {
2922 tree type = TREE_TYPE (arg);
2923 enum tree_code code = TREE_CODE (arg);
2924 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2925
2926 /* We can handle some of the tcc_expression cases here. */
2927 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2928 tclass = tcc_unary;
2929 else if (tclass == tcc_expression
2930 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2931 tclass = tcc_binary;
2932
2933 switch (tclass)
2934 {
2935 case tcc_unary:
2936 return fold_build1_loc (loc, code, type,
2937 eval_subst (loc, TREE_OPERAND (arg, 0),
2938 old0, new0, old1, new1));
2939
2940 case tcc_binary:
2941 return fold_build2_loc (loc, code, type,
2942 eval_subst (loc, TREE_OPERAND (arg, 0),
2943 old0, new0, old1, new1),
2944 eval_subst (loc, TREE_OPERAND (arg, 1),
2945 old0, new0, old1, new1));
2946
2947 case tcc_expression:
2948 switch (code)
2949 {
2950 case SAVE_EXPR:
2951 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2952 old1, new1);
2953
2954 case COMPOUND_EXPR:
2955 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2956 old1, new1);
2957
2958 case COND_EXPR:
2959 return fold_build3_loc (loc, code, type,
2960 eval_subst (loc, TREE_OPERAND (arg, 0),
2961 old0, new0, old1, new1),
2962 eval_subst (loc, TREE_OPERAND (arg, 1),
2963 old0, new0, old1, new1),
2964 eval_subst (loc, TREE_OPERAND (arg, 2),
2965 old0, new0, old1, new1));
2966 default:
2967 break;
2968 }
2969 /* Fall through - ??? */
2970
2971 case tcc_comparison:
2972 {
2973 tree arg0 = TREE_OPERAND (arg, 0);
2974 tree arg1 = TREE_OPERAND (arg, 1);
2975
2976 /* We need to check both for exact equality and tree equality. The
2977 former will be true if the operand has a side-effect. In that
2978 case, we know the operand occurred exactly once. */
2979
2980 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2981 arg0 = new0;
2982 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2983 arg0 = new1;
2984
2985 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2986 arg1 = new0;
2987 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2988 arg1 = new1;
2989
2990 return fold_build2_loc (loc, code, type, arg0, arg1);
2991 }
2992
2993 default:
2994 return arg;
2995 }
2996 }
2997
2998 /* Return a tree for the case when the result of an expression is RESULT
2999 converted to TYPE and OMITTED was previously an operand of the expression
3000 but is now not needed (e.g., we folded OMITTED * 0).
3001
3002 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3003 the conversion of RESULT to TYPE. */
3004
3005 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3006 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3007 {
3008 tree t = fold_convert_loc (loc, type, result);
3009
3010 /* If the resulting operand is an empty statement, just return the omitted
3011 statement casted to void. */
3012 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3013 return build1_loc (loc, NOP_EXPR, void_type_node,
3014 fold_ignored_result (omitted));
3015
3016 if (TREE_SIDE_EFFECTS (omitted))
3017 return build2_loc (loc, COMPOUND_EXPR, type,
3018 fold_ignored_result (omitted), t);
3019
3020 return non_lvalue_loc (loc, t);
3021 }
3022
3023 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3024
3025 static tree
pedantic_omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)3026 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3027 tree omitted)
3028 {
3029 tree t = fold_convert_loc (loc, type, result);
3030
3031 /* If the resulting operand is an empty statement, just return the omitted
3032 statement casted to void. */
3033 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3034 return build1_loc (loc, NOP_EXPR, void_type_node,
3035 fold_ignored_result (omitted));
3036
3037 if (TREE_SIDE_EFFECTS (omitted))
3038 return build2_loc (loc, COMPOUND_EXPR, type,
3039 fold_ignored_result (omitted), t);
3040
3041 return pedantic_non_lvalue_loc (loc, t);
3042 }
3043
3044 /* Return a tree for the case when the result of an expression is RESULT
3045 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3046 of the expression but are now not needed.
3047
3048 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3049 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3050 evaluated before OMITTED2. Otherwise, if neither has side effects,
3051 just do the conversion of RESULT to TYPE. */
3052
3053 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)3054 omit_two_operands_loc (location_t loc, tree type, tree result,
3055 tree omitted1, tree omitted2)
3056 {
3057 tree t = fold_convert_loc (loc, type, result);
3058
3059 if (TREE_SIDE_EFFECTS (omitted2))
3060 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3061 if (TREE_SIDE_EFFECTS (omitted1))
3062 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3063
3064 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3065 }
3066
3067
3068 /* Return a simplified tree node for the truth-negation of ARG. This
3069 never alters ARG itself. We assume that ARG is an operation that
3070 returns a truth value (0 or 1).
3071
3072 FIXME: one would think we would fold the result, but it causes
3073 problems with the dominator optimizer. */
3074
3075 tree
fold_truth_not_expr(location_t loc,tree arg)3076 fold_truth_not_expr (location_t loc, tree arg)
3077 {
3078 tree type = TREE_TYPE (arg);
3079 enum tree_code code = TREE_CODE (arg);
3080 location_t loc1, loc2;
3081
3082 /* If this is a comparison, we can simply invert it, except for
3083 floating-point non-equality comparisons, in which case we just
3084 enclose a TRUTH_NOT_EXPR around what we have. */
3085
3086 if (TREE_CODE_CLASS (code) == tcc_comparison)
3087 {
3088 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3089 if (FLOAT_TYPE_P (op_type)
3090 && flag_trapping_math
3091 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3092 && code != NE_EXPR && code != EQ_EXPR)
3093 return NULL_TREE;
3094
3095 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3096 if (code == ERROR_MARK)
3097 return NULL_TREE;
3098
3099 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3100 TREE_OPERAND (arg, 1));
3101 }
3102
3103 switch (code)
3104 {
3105 case INTEGER_CST:
3106 return constant_boolean_node (integer_zerop (arg), type);
3107
3108 case TRUTH_AND_EXPR:
3109 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3110 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3111 return build2_loc (loc, TRUTH_OR_EXPR, type,
3112 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3113 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114
3115 case TRUTH_OR_EXPR:
3116 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3117 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3118 return build2_loc (loc, TRUTH_AND_EXPR, type,
3119 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3120 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3121
3122 case TRUTH_XOR_EXPR:
3123 /* Here we can invert either operand. We invert the first operand
3124 unless the second operand is a TRUTH_NOT_EXPR in which case our
3125 result is the XOR of the first operand with the inside of the
3126 negation of the second operand. */
3127
3128 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3129 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3130 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3131 else
3132 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3133 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3134 TREE_OPERAND (arg, 1));
3135
3136 case TRUTH_ANDIF_EXPR:
3137 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3138 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3139 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3140 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3141 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3142
3143 case TRUTH_ORIF_EXPR:
3144 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3145 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3146 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3147 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3148 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3149
3150 case TRUTH_NOT_EXPR:
3151 return TREE_OPERAND (arg, 0);
3152
3153 case COND_EXPR:
3154 {
3155 tree arg1 = TREE_OPERAND (arg, 1);
3156 tree arg2 = TREE_OPERAND (arg, 2);
3157
3158 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3159 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3160
3161 /* A COND_EXPR may have a throw as one operand, which
3162 then has void type. Just leave void operands
3163 as they are. */
3164 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3165 VOID_TYPE_P (TREE_TYPE (arg1))
3166 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3167 VOID_TYPE_P (TREE_TYPE (arg2))
3168 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3169 }
3170
3171 case COMPOUND_EXPR:
3172 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3173 return build2_loc (loc, COMPOUND_EXPR, type,
3174 TREE_OPERAND (arg, 0),
3175 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3176
3177 case NON_LVALUE_EXPR:
3178 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3179 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3180
3181 CASE_CONVERT:
3182 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3183 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3184
3185 /* ... fall through ... */
3186
3187 case FLOAT_EXPR:
3188 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3189 return build1_loc (loc, TREE_CODE (arg), type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3191
3192 case BIT_AND_EXPR:
3193 if (!integer_onep (TREE_OPERAND (arg, 1)))
3194 return NULL_TREE;
3195 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3196
3197 case SAVE_EXPR:
3198 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3199
3200 case CLEANUP_POINT_EXPR:
3201 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3202 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3203 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3204
3205 default:
3206 return NULL_TREE;
3207 }
3208 }
3209
3210 /* Return a simplified tree node for the truth-negation of ARG. This
3211 never alters ARG itself. We assume that ARG is an operation that
3212 returns a truth value (0 or 1).
3213
3214 FIXME: one would think we would fold the result, but it causes
3215 problems with the dominator optimizer. */
3216
3217 tree
invert_truthvalue_loc(location_t loc,tree arg)3218 invert_truthvalue_loc (location_t loc, tree arg)
3219 {
3220 tree tem;
3221
3222 if (TREE_CODE (arg) == ERROR_MARK)
3223 return arg;
3224
3225 tem = fold_truth_not_expr (loc, arg);
3226 if (!tem)
3227 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3228
3229 return tem;
3230 }
3231
3232 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3233 operands are another bit-wise operation with a common input. If so,
3234 distribute the bit operations to save an operation and possibly two if
3235 constants are involved. For example, convert
3236 (A | B) & (A | C) into A | (B & C)
3237 Further simplification will occur if B and C are constants.
3238
3239 If this optimization cannot be done, 0 will be returned. */
3240
3241 static tree
distribute_bit_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)3242 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3243 tree arg0, tree arg1)
3244 {
3245 tree common;
3246 tree left, right;
3247
3248 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3249 || TREE_CODE (arg0) == code
3250 || (TREE_CODE (arg0) != BIT_AND_EXPR
3251 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3252 return 0;
3253
3254 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3255 {
3256 common = TREE_OPERAND (arg0, 0);
3257 left = TREE_OPERAND (arg0, 1);
3258 right = TREE_OPERAND (arg1, 1);
3259 }
3260 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3261 {
3262 common = TREE_OPERAND (arg0, 0);
3263 left = TREE_OPERAND (arg0, 1);
3264 right = TREE_OPERAND (arg1, 0);
3265 }
3266 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3267 {
3268 common = TREE_OPERAND (arg0, 1);
3269 left = TREE_OPERAND (arg0, 0);
3270 right = TREE_OPERAND (arg1, 1);
3271 }
3272 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3273 {
3274 common = TREE_OPERAND (arg0, 1);
3275 left = TREE_OPERAND (arg0, 0);
3276 right = TREE_OPERAND (arg1, 0);
3277 }
3278 else
3279 return 0;
3280
3281 common = fold_convert_loc (loc, type, common);
3282 left = fold_convert_loc (loc, type, left);
3283 right = fold_convert_loc (loc, type, right);
3284 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3285 fold_build2_loc (loc, code, type, left, right));
3286 }
3287
3288 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3289 with code CODE. This optimization is unsafe. */
3290 static tree
distribute_real_division(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)3291 distribute_real_division (location_t loc, enum tree_code code, tree type,
3292 tree arg0, tree arg1)
3293 {
3294 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3295 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3296
3297 /* (A / C) +- (B / C) -> (A +- B) / C. */
3298 if (mul0 == mul1
3299 && operand_equal_p (TREE_OPERAND (arg0, 1),
3300 TREE_OPERAND (arg1, 1), 0))
3301 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3302 fold_build2_loc (loc, code, type,
3303 TREE_OPERAND (arg0, 0),
3304 TREE_OPERAND (arg1, 0)),
3305 TREE_OPERAND (arg0, 1));
3306
3307 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3308 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3309 TREE_OPERAND (arg1, 0), 0)
3310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3311 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3312 {
3313 REAL_VALUE_TYPE r0, r1;
3314 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3315 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3316 if (!mul0)
3317 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3318 if (!mul1)
3319 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3320 real_arithmetic (&r0, code, &r0, &r1);
3321 return fold_build2_loc (loc, MULT_EXPR, type,
3322 TREE_OPERAND (arg0, 0),
3323 build_real (type, r0));
3324 }
3325
3326 return NULL_TREE;
3327 }
3328
3329 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3330 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3331
3332 static tree
make_bit_field_ref(location_t loc,tree inner,tree type,HOST_WIDE_INT bitsize,HOST_WIDE_INT bitpos,int unsignedp)3333 make_bit_field_ref (location_t loc, tree inner, tree type,
3334 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3335 {
3336 tree result, bftype;
3337
3338 if (bitpos == 0)
3339 {
3340 tree size = TYPE_SIZE (TREE_TYPE (inner));
3341 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3342 || POINTER_TYPE_P (TREE_TYPE (inner)))
3343 && host_integerp (size, 0)
3344 && tree_low_cst (size, 0) == bitsize)
3345 return fold_convert_loc (loc, type, inner);
3346 }
3347
3348 bftype = type;
3349 if (TYPE_PRECISION (bftype) != bitsize
3350 || TYPE_UNSIGNED (bftype) == !unsignedp)
3351 bftype = build_nonstandard_integer_type (bitsize, 0);
3352
3353 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3354 size_int (bitsize), bitsize_int (bitpos));
3355
3356 if (bftype != type)
3357 result = fold_convert_loc (loc, type, result);
3358
3359 return result;
3360 }
3361
3362 /* Optimize a bit-field compare.
3363
3364 There are two cases: First is a compare against a constant and the
3365 second is a comparison of two items where the fields are at the same
3366 bit position relative to the start of a chunk (byte, halfword, word)
3367 large enough to contain it. In these cases we can avoid the shift
3368 implicit in bitfield extractions.
3369
3370 For constants, we emit a compare of the shifted constant with the
3371 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3372 compared. For two fields at the same position, we do the ANDs with the
3373 similar mask and compare the result of the ANDs.
3374
3375 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3376 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3377 are the left and right operands of the comparison, respectively.
3378
3379 If the optimization described above can be done, we return the resulting
3380 tree. Otherwise we return zero. */
3381
3382 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)3383 optimize_bit_field_compare (location_t loc, enum tree_code code,
3384 tree compare_type, tree lhs, tree rhs)
3385 {
3386 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3387 tree type = TREE_TYPE (lhs);
3388 tree signed_type, unsigned_type;
3389 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3390 enum machine_mode lmode, rmode, nmode;
3391 int lunsignedp, runsignedp;
3392 int lvolatilep = 0, rvolatilep = 0;
3393 tree linner, rinner = NULL_TREE;
3394 tree mask;
3395 tree offset;
3396
3397 /* In the strict volatile bitfields case, doing code changes here may prevent
3398 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3399 if (flag_strict_volatile_bitfields > 0)
3400 return 0;
3401
3402 /* Get all the information about the extractions being done. If the bit size
3403 if the same as the size of the underlying object, we aren't doing an
3404 extraction at all and so can do nothing. We also don't want to
3405 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3406 then will no longer be able to replace it. */
3407 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3408 &lunsignedp, &lvolatilep, false);
3409 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3410 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3411 return 0;
3412
3413 if (!const_p)
3414 {
3415 /* If this is not a constant, we can only do something if bit positions,
3416 sizes, and signedness are the same. */
3417 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3418 &runsignedp, &rvolatilep, false);
3419
3420 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3421 || lunsignedp != runsignedp || offset != 0
3422 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3423 return 0;
3424 }
3425
3426 /* See if we can find a mode to refer to this field. We should be able to,
3427 but fail if we can't. */
3428 if (lvolatilep
3429 && GET_MODE_BITSIZE (lmode) > 0
3430 && flag_strict_volatile_bitfields > 0)
3431 nmode = lmode;
3432 else
3433 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3434 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3435 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3436 TYPE_ALIGN (TREE_TYPE (rinner))),
3437 word_mode, lvolatilep || rvolatilep);
3438 if (nmode == VOIDmode)
3439 return 0;
3440
3441 /* Set signed and unsigned types of the precision of this mode for the
3442 shifts below. */
3443 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3444 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3445
3446 /* Compute the bit position and size for the new reference and our offset
3447 within it. If the new reference is the same size as the original, we
3448 won't optimize anything, so return zero. */
3449 nbitsize = GET_MODE_BITSIZE (nmode);
3450 nbitpos = lbitpos & ~ (nbitsize - 1);
3451 lbitpos -= nbitpos;
3452 if (nbitsize == lbitsize)
3453 return 0;
3454
3455 if (BYTES_BIG_ENDIAN)
3456 lbitpos = nbitsize - lbitsize - lbitpos;
3457
3458 /* Make the mask to be used against the extracted field. */
3459 mask = build_int_cst_type (unsigned_type, -1);
3460 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3461 mask = const_binop (RSHIFT_EXPR, mask,
3462 size_int (nbitsize - lbitsize - lbitpos));
3463
3464 if (! const_p)
3465 /* If not comparing with constant, just rework the comparison
3466 and return. */
3467 return fold_build2_loc (loc, code, compare_type,
3468 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3469 make_bit_field_ref (loc, linner,
3470 unsigned_type,
3471 nbitsize, nbitpos,
3472 1),
3473 mask),
3474 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3475 make_bit_field_ref (loc, rinner,
3476 unsigned_type,
3477 nbitsize, nbitpos,
3478 1),
3479 mask));
3480
3481 /* Otherwise, we are handling the constant case. See if the constant is too
3482 big for the field. Warn and return a tree of for 0 (false) if so. We do
3483 this not only for its own sake, but to avoid having to test for this
3484 error case below. If we didn't, we might generate wrong code.
3485
3486 For unsigned fields, the constant shifted right by the field length should
3487 be all zero. For signed fields, the high-order bits should agree with
3488 the sign bit. */
3489
3490 if (lunsignedp)
3491 {
3492 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3493 fold_convert_loc (loc,
3494 unsigned_type, rhs),
3495 size_int (lbitsize))))
3496 {
3497 warning (0, "comparison is always %d due to width of bit-field",
3498 code == NE_EXPR);
3499 return constant_boolean_node (code == NE_EXPR, compare_type);
3500 }
3501 }
3502 else
3503 {
3504 tree tem = const_binop (RSHIFT_EXPR,
3505 fold_convert_loc (loc, signed_type, rhs),
3506 size_int (lbitsize - 1));
3507 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3508 {
3509 warning (0, "comparison is always %d due to width of bit-field",
3510 code == NE_EXPR);
3511 return constant_boolean_node (code == NE_EXPR, compare_type);
3512 }
3513 }
3514
3515 /* Single-bit compares should always be against zero. */
3516 if (lbitsize == 1 && ! integer_zerop (rhs))
3517 {
3518 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3519 rhs = build_int_cst (type, 0);
3520 }
3521
3522 /* Make a new bitfield reference, shift the constant over the
3523 appropriate number of bits and mask it with the computed mask
3524 (in case this was a signed field). If we changed it, make a new one. */
3525 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3526 if (lvolatilep)
3527 {
3528 TREE_SIDE_EFFECTS (lhs) = 1;
3529 TREE_THIS_VOLATILE (lhs) = 1;
3530 }
3531
3532 rhs = const_binop (BIT_AND_EXPR,
3533 const_binop (LSHIFT_EXPR,
3534 fold_convert_loc (loc, unsigned_type, rhs),
3535 size_int (lbitpos)),
3536 mask);
3537
3538 lhs = build2_loc (loc, code, compare_type,
3539 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3540 return lhs;
3541 }
3542
3543 /* Subroutine for fold_truth_andor_1: decode a field reference.
3544
3545 If EXP is a comparison reference, we return the innermost reference.
3546
3547 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3548 set to the starting bit number.
3549
3550 If the innermost field can be completely contained in a mode-sized
3551 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3552
3553 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3554 otherwise it is not changed.
3555
3556 *PUNSIGNEDP is set to the signedness of the field.
3557
3558 *PMASK is set to the mask used. This is either contained in a
3559 BIT_AND_EXPR or derived from the width of the field.
3560
3561 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3562
3563 Return 0 if this is not a component reference or is one that we can't
3564 do anything with. */
3565
3566 static tree
decode_field_reference(location_t loc,tree exp,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,enum machine_mode * pmode,int * punsignedp,int * pvolatilep,tree * pmask,tree * pand_mask)3567 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3568 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3569 int *punsignedp, int *pvolatilep,
3570 tree *pmask, tree *pand_mask)
3571 {
3572 tree outer_type = 0;
3573 tree and_mask = 0;
3574 tree mask, inner, offset;
3575 tree unsigned_type;
3576 unsigned int precision;
3577
3578 /* All the optimizations using this function assume integer fields.
3579 There are problems with FP fields since the type_for_size call
3580 below can fail for, e.g., XFmode. */
3581 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3582 return 0;
3583
3584 /* We are interested in the bare arrangement of bits, so strip everything
3585 that doesn't affect the machine mode. However, record the type of the
3586 outermost expression if it may matter below. */
3587 if (CONVERT_EXPR_P (exp)
3588 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3589 outer_type = TREE_TYPE (exp);
3590 STRIP_NOPS (exp);
3591
3592 if (TREE_CODE (exp) == BIT_AND_EXPR)
3593 {
3594 and_mask = TREE_OPERAND (exp, 1);
3595 exp = TREE_OPERAND (exp, 0);
3596 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3597 if (TREE_CODE (and_mask) != INTEGER_CST)
3598 return 0;
3599 }
3600
3601 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3602 punsignedp, pvolatilep, false);
3603 if ((inner == exp && and_mask == 0)
3604 || *pbitsize < 0 || offset != 0
3605 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3606 return 0;
3607
3608 /* If the number of bits in the reference is the same as the bitsize of
3609 the outer type, then the outer type gives the signedness. Otherwise
3610 (in case of a small bitfield) the signedness is unchanged. */
3611 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3612 *punsignedp = TYPE_UNSIGNED (outer_type);
3613
3614 /* Compute the mask to access the bitfield. */
3615 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3616 precision = TYPE_PRECISION (unsigned_type);
3617
3618 mask = build_int_cst_type (unsigned_type, -1);
3619
3620 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3621 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3622
3623 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3624 if (and_mask != 0)
3625 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3626 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3627
3628 *pmask = mask;
3629 *pand_mask = and_mask;
3630 return inner;
3631 }
3632
3633 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3634 bit positions. */
3635
3636 static int
all_ones_mask_p(const_tree mask,int size)3637 all_ones_mask_p (const_tree mask, int size)
3638 {
3639 tree type = TREE_TYPE (mask);
3640 unsigned int precision = TYPE_PRECISION (type);
3641 tree tmask;
3642
3643 tmask = build_int_cst_type (signed_type_for (type), -1);
3644
3645 return
3646 tree_int_cst_equal (mask,
3647 const_binop (RSHIFT_EXPR,
3648 const_binop (LSHIFT_EXPR, tmask,
3649 size_int (precision - size)),
3650 size_int (precision - size)));
3651 }
3652
3653 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3654 represents the sign bit of EXP's type. If EXP represents a sign
3655 or zero extension, also test VAL against the unextended type.
3656 The return value is the (sub)expression whose sign bit is VAL,
3657 or NULL_TREE otherwise. */
3658
3659 static tree
sign_bit_p(tree exp,const_tree val)3660 sign_bit_p (tree exp, const_tree val)
3661 {
3662 unsigned HOST_WIDE_INT mask_lo, lo;
3663 HOST_WIDE_INT mask_hi, hi;
3664 int width;
3665 tree t;
3666
3667 /* Tree EXP must have an integral type. */
3668 t = TREE_TYPE (exp);
3669 if (! INTEGRAL_TYPE_P (t))
3670 return NULL_TREE;
3671
3672 /* Tree VAL must be an integer constant. */
3673 if (TREE_CODE (val) != INTEGER_CST
3674 || TREE_OVERFLOW (val))
3675 return NULL_TREE;
3676
3677 width = TYPE_PRECISION (t);
3678 if (width > HOST_BITS_PER_WIDE_INT)
3679 {
3680 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3681 lo = 0;
3682
3683 mask_hi = ((unsigned HOST_WIDE_INT) -1
3684 >> (HOST_BITS_PER_DOUBLE_INT - width));
3685 mask_lo = -1;
3686 }
3687 else
3688 {
3689 hi = 0;
3690 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3691
3692 mask_hi = 0;
3693 mask_lo = ((unsigned HOST_WIDE_INT) -1
3694 >> (HOST_BITS_PER_WIDE_INT - width));
3695 }
3696
3697 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3698 treat VAL as if it were unsigned. */
3699 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3700 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3701 return exp;
3702
3703 /* Handle extension from a narrower type. */
3704 if (TREE_CODE (exp) == NOP_EXPR
3705 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3706 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3707
3708 return NULL_TREE;
3709 }
3710
3711 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3712 to be evaluated unconditionally. */
3713
3714 static int
simple_operand_p(const_tree exp)3715 simple_operand_p (const_tree exp)
3716 {
3717 /* Strip any conversions that don't change the machine mode. */
3718 STRIP_NOPS (exp);
3719
3720 return (CONSTANT_CLASS_P (exp)
3721 || TREE_CODE (exp) == SSA_NAME
3722 || (DECL_P (exp)
3723 && ! TREE_ADDRESSABLE (exp)
3724 && ! TREE_THIS_VOLATILE (exp)
3725 && ! DECL_NONLOCAL (exp)
3726 /* Don't regard global variables as simple. They may be
3727 allocated in ways unknown to the compiler (shared memory,
3728 #pragma weak, etc). */
3729 && ! TREE_PUBLIC (exp)
3730 && ! DECL_EXTERNAL (exp)
3731 /* Loading a static variable is unduly expensive, but global
3732 registers aren't expensive. */
3733 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3734 }
3735
3736 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3737 to be evaluated unconditionally.
3738 I addition to simple_operand_p, we assume that comparisons, conversions,
3739 and logic-not operations are simple, if their operands are simple, too. */
3740
3741 static bool
simple_operand_p_2(tree exp)3742 simple_operand_p_2 (tree exp)
3743 {
3744 enum tree_code code;
3745
3746 if (TREE_SIDE_EFFECTS (exp)
3747 || tree_could_trap_p (exp))
3748 return false;
3749
3750 while (CONVERT_EXPR_P (exp))
3751 exp = TREE_OPERAND (exp, 0);
3752
3753 code = TREE_CODE (exp);
3754
3755 if (TREE_CODE_CLASS (code) == tcc_comparison)
3756 return (simple_operand_p (TREE_OPERAND (exp, 0))
3757 && simple_operand_p (TREE_OPERAND (exp, 1)));
3758
3759 if (code == TRUTH_NOT_EXPR)
3760 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3761
3762 return simple_operand_p (exp);
3763 }
3764
3765
3766 /* The following functions are subroutines to fold_range_test and allow it to
3767 try to change a logical combination of comparisons into a range test.
3768
3769 For example, both
3770 X == 2 || X == 3 || X == 4 || X == 5
3771 and
3772 X >= 2 && X <= 5
3773 are converted to
3774 (unsigned) (X - 2) <= 3
3775
3776 We describe each set of comparisons as being either inside or outside
3777 a range, using a variable named like IN_P, and then describe the
3778 range with a lower and upper bound. If one of the bounds is omitted,
3779 it represents either the highest or lowest value of the type.
3780
3781 In the comments below, we represent a range by two numbers in brackets
3782 preceded by a "+" to designate being inside that range, or a "-" to
3783 designate being outside that range, so the condition can be inverted by
3784 flipping the prefix. An omitted bound is represented by a "-". For
3785 example, "- [-, 10]" means being outside the range starting at the lowest
3786 possible value and ending at 10, in other words, being greater than 10.
3787 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3788 always false.
3789
3790 We set up things so that the missing bounds are handled in a consistent
3791 manner so neither a missing bound nor "true" and "false" need to be
3792 handled using a special case. */
3793
3794 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3795 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3796 and UPPER1_P are nonzero if the respective argument is an upper bound
3797 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3798 must be specified for a comparison. ARG1 will be converted to ARG0's
3799 type if both are specified. */
3800
3801 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)3802 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3803 tree arg1, int upper1_p)
3804 {
3805 tree tem;
3806 int result;
3807 int sgn0, sgn1;
3808
3809 /* If neither arg represents infinity, do the normal operation.
3810 Else, if not a comparison, return infinity. Else handle the special
3811 comparison rules. Note that most of the cases below won't occur, but
3812 are handled for consistency. */
3813
3814 if (arg0 != 0 && arg1 != 0)
3815 {
3816 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3817 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3818 STRIP_NOPS (tem);
3819 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3820 }
3821
3822 if (TREE_CODE_CLASS (code) != tcc_comparison)
3823 return 0;
3824
3825 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3826 for neither. In real maths, we cannot assume open ended ranges are
3827 the same. But, this is computer arithmetic, where numbers are finite.
3828 We can therefore make the transformation of any unbounded range with
3829 the value Z, Z being greater than any representable number. This permits
3830 us to treat unbounded ranges as equal. */
3831 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3832 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3833 switch (code)
3834 {
3835 case EQ_EXPR:
3836 result = sgn0 == sgn1;
3837 break;
3838 case NE_EXPR:
3839 result = sgn0 != sgn1;
3840 break;
3841 case LT_EXPR:
3842 result = sgn0 < sgn1;
3843 break;
3844 case LE_EXPR:
3845 result = sgn0 <= sgn1;
3846 break;
3847 case GT_EXPR:
3848 result = sgn0 > sgn1;
3849 break;
3850 case GE_EXPR:
3851 result = sgn0 >= sgn1;
3852 break;
3853 default:
3854 gcc_unreachable ();
3855 }
3856
3857 return constant_boolean_node (result, type);
3858 }
3859
3860 /* Helper routine for make_range. Perform one step for it, return
3861 new expression if the loop should continue or NULL_TREE if it should
3862 stop. */
3863
3864 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)3865 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3866 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3867 bool *strict_overflow_p)
3868 {
3869 tree arg0_type = TREE_TYPE (arg0);
3870 tree n_low, n_high, low = *p_low, high = *p_high;
3871 int in_p = *p_in_p, n_in_p;
3872
3873 switch (code)
3874 {
3875 case TRUTH_NOT_EXPR:
3876 /* We can only do something if the range is testing for zero. */
3877 if (low == NULL_TREE || high == NULL_TREE
3878 || ! integer_zerop (low) || ! integer_zerop (high))
3879 return NULL_TREE;
3880 *p_in_p = ! in_p;
3881 return arg0;
3882
3883 case EQ_EXPR: case NE_EXPR:
3884 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3885 /* We can only do something if the range is testing for zero
3886 and if the second operand is an integer constant. Note that
3887 saying something is "in" the range we make is done by
3888 complementing IN_P since it will set in the initial case of
3889 being not equal to zero; "out" is leaving it alone. */
3890 if (low == NULL_TREE || high == NULL_TREE
3891 || ! integer_zerop (low) || ! integer_zerop (high)
3892 || TREE_CODE (arg1) != INTEGER_CST)
3893 return NULL_TREE;
3894
3895 switch (code)
3896 {
3897 case NE_EXPR: /* - [c, c] */
3898 low = high = arg1;
3899 break;
3900 case EQ_EXPR: /* + [c, c] */
3901 in_p = ! in_p, low = high = arg1;
3902 break;
3903 case GT_EXPR: /* - [-, c] */
3904 low = 0, high = arg1;
3905 break;
3906 case GE_EXPR: /* + [c, -] */
3907 in_p = ! in_p, low = arg1, high = 0;
3908 break;
3909 case LT_EXPR: /* - [c, -] */
3910 low = arg1, high = 0;
3911 break;
3912 case LE_EXPR: /* + [-, c] */
3913 in_p = ! in_p, low = 0, high = arg1;
3914 break;
3915 default:
3916 gcc_unreachable ();
3917 }
3918
3919 /* If this is an unsigned comparison, we also know that EXP is
3920 greater than or equal to zero. We base the range tests we make
3921 on that fact, so we record it here so we can parse existing
3922 range tests. We test arg0_type since often the return type
3923 of, e.g. EQ_EXPR, is boolean. */
3924 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3925 {
3926 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3927 in_p, low, high, 1,
3928 build_int_cst (arg0_type, 0),
3929 NULL_TREE))
3930 return NULL_TREE;
3931
3932 in_p = n_in_p, low = n_low, high = n_high;
3933
3934 /* If the high bound is missing, but we have a nonzero low
3935 bound, reverse the range so it goes from zero to the low bound
3936 minus 1. */
3937 if (high == 0 && low && ! integer_zerop (low))
3938 {
3939 in_p = ! in_p;
3940 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3941 integer_one_node, 0);
3942 low = build_int_cst (arg0_type, 0);
3943 }
3944 }
3945
3946 *p_low = low;
3947 *p_high = high;
3948 *p_in_p = in_p;
3949 return arg0;
3950
3951 case NEGATE_EXPR:
3952 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3953 low and high are non-NULL, then normalize will DTRT. */
3954 if (!TYPE_UNSIGNED (arg0_type)
3955 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3956 {
3957 if (low == NULL_TREE)
3958 low = TYPE_MIN_VALUE (arg0_type);
3959 if (high == NULL_TREE)
3960 high = TYPE_MAX_VALUE (arg0_type);
3961 }
3962
3963 /* (-x) IN [a,b] -> x in [-b, -a] */
3964 n_low = range_binop (MINUS_EXPR, exp_type,
3965 build_int_cst (exp_type, 0),
3966 0, high, 1);
3967 n_high = range_binop (MINUS_EXPR, exp_type,
3968 build_int_cst (exp_type, 0),
3969 0, low, 0);
3970 if (n_high != 0 && TREE_OVERFLOW (n_high))
3971 return NULL_TREE;
3972 goto normalize;
3973
3974 case BIT_NOT_EXPR:
3975 /* ~ X -> -X - 1 */
3976 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
3977 build_int_cst (exp_type, 1));
3978
3979 case PLUS_EXPR:
3980 case MINUS_EXPR:
3981 if (TREE_CODE (arg1) != INTEGER_CST)
3982 return NULL_TREE;
3983
3984 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3985 move a constant to the other side. */
3986 if (!TYPE_UNSIGNED (arg0_type)
3987 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3988 return NULL_TREE;
3989
3990 /* If EXP is signed, any overflow in the computation is undefined,
3991 so we don't worry about it so long as our computations on
3992 the bounds don't overflow. For unsigned, overflow is defined
3993 and this is exactly the right thing. */
3994 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3995 arg0_type, low, 0, arg1, 0);
3996 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3997 arg0_type, high, 1, arg1, 0);
3998 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3999 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4000 return NULL_TREE;
4001
4002 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4003 *strict_overflow_p = true;
4004
4005 normalize:
4006 /* Check for an unsigned range which has wrapped around the maximum
4007 value thus making n_high < n_low, and normalize it. */
4008 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4009 {
4010 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4011 integer_one_node, 0);
4012 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4013 integer_one_node, 0);
4014
4015 /* If the range is of the form +/- [ x+1, x ], we won't
4016 be able to normalize it. But then, it represents the
4017 whole range or the empty set, so make it
4018 +/- [ -, - ]. */
4019 if (tree_int_cst_equal (n_low, low)
4020 && tree_int_cst_equal (n_high, high))
4021 low = high = 0;
4022 else
4023 in_p = ! in_p;
4024 }
4025 else
4026 low = n_low, high = n_high;
4027
4028 *p_low = low;
4029 *p_high = high;
4030 *p_in_p = in_p;
4031 return arg0;
4032
4033 CASE_CONVERT:
4034 case NON_LVALUE_EXPR:
4035 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4036 return NULL_TREE;
4037
4038 if (! INTEGRAL_TYPE_P (arg0_type)
4039 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4040 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4041 return NULL_TREE;
4042
4043 n_low = low, n_high = high;
4044
4045 if (n_low != 0)
4046 n_low = fold_convert_loc (loc, arg0_type, n_low);
4047
4048 if (n_high != 0)
4049 n_high = fold_convert_loc (loc, arg0_type, n_high);
4050
4051 /* If we're converting arg0 from an unsigned type, to exp,
4052 a signed type, we will be doing the comparison as unsigned.
4053 The tests above have already verified that LOW and HIGH
4054 are both positive.
4055
4056 So we have to ensure that we will handle large unsigned
4057 values the same way that the current signed bounds treat
4058 negative values. */
4059
4060 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4061 {
4062 tree high_positive;
4063 tree equiv_type;
4064 /* For fixed-point modes, we need to pass the saturating flag
4065 as the 2nd parameter. */
4066 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4067 equiv_type
4068 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4069 TYPE_SATURATING (arg0_type));
4070 else
4071 equiv_type
4072 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4073
4074 /* A range without an upper bound is, naturally, unbounded.
4075 Since convert would have cropped a very large value, use
4076 the max value for the destination type. */
4077 high_positive
4078 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4079 : TYPE_MAX_VALUE (arg0_type);
4080
4081 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4082 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4083 fold_convert_loc (loc, arg0_type,
4084 high_positive),
4085 build_int_cst (arg0_type, 1));
4086
4087 /* If the low bound is specified, "and" the range with the
4088 range for which the original unsigned value will be
4089 positive. */
4090 if (low != 0)
4091 {
4092 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4093 1, fold_convert_loc (loc, arg0_type,
4094 integer_zero_node),
4095 high_positive))
4096 return NULL_TREE;
4097
4098 in_p = (n_in_p == in_p);
4099 }
4100 else
4101 {
4102 /* Otherwise, "or" the range with the range of the input
4103 that will be interpreted as negative. */
4104 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4105 1, fold_convert_loc (loc, arg0_type,
4106 integer_zero_node),
4107 high_positive))
4108 return NULL_TREE;
4109
4110 in_p = (in_p != n_in_p);
4111 }
4112 }
4113
4114 *p_low = n_low;
4115 *p_high = n_high;
4116 *p_in_p = in_p;
4117 return arg0;
4118
4119 default:
4120 return NULL_TREE;
4121 }
4122 }
4123
4124 /* Given EXP, a logical expression, set the range it is testing into
4125 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4126 actually being tested. *PLOW and *PHIGH will be made of the same
4127 type as the returned expression. If EXP is not a comparison, we
4128 will most likely not be returning a useful value and range. Set
4129 *STRICT_OVERFLOW_P to true if the return value is only valid
4130 because signed overflow is undefined; otherwise, do not change
4131 *STRICT_OVERFLOW_P. */
4132
4133 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)4134 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4135 bool *strict_overflow_p)
4136 {
4137 enum tree_code code;
4138 tree arg0, arg1 = NULL_TREE;
4139 tree exp_type, nexp;
4140 int in_p;
4141 tree low, high;
4142 location_t loc = EXPR_LOCATION (exp);
4143
4144 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4145 and see if we can refine the range. Some of the cases below may not
4146 happen, but it doesn't seem worth worrying about this. We "continue"
4147 the outer loop when we've changed something; otherwise we "break"
4148 the switch, which will "break" the while. */
4149
4150 in_p = 0;
4151 low = high = build_int_cst (TREE_TYPE (exp), 0);
4152
4153 while (1)
4154 {
4155 code = TREE_CODE (exp);
4156 exp_type = TREE_TYPE (exp);
4157 arg0 = NULL_TREE;
4158
4159 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4160 {
4161 if (TREE_OPERAND_LENGTH (exp) > 0)
4162 arg0 = TREE_OPERAND (exp, 0);
4163 if (TREE_CODE_CLASS (code) == tcc_binary
4164 || TREE_CODE_CLASS (code) == tcc_comparison
4165 || (TREE_CODE_CLASS (code) == tcc_expression
4166 && TREE_OPERAND_LENGTH (exp) > 1))
4167 arg1 = TREE_OPERAND (exp, 1);
4168 }
4169 if (arg0 == NULL_TREE)
4170 break;
4171
4172 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4173 &high, &in_p, strict_overflow_p);
4174 if (nexp == NULL_TREE)
4175 break;
4176 exp = nexp;
4177 }
4178
4179 /* If EXP is a constant, we can evaluate whether this is true or false. */
4180 if (TREE_CODE (exp) == INTEGER_CST)
4181 {
4182 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4183 exp, 0, low, 0))
4184 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4185 exp, 1, high, 1)));
4186 low = high = 0;
4187 exp = 0;
4188 }
4189
4190 *pin_p = in_p, *plow = low, *phigh = high;
4191 return exp;
4192 }
4193
4194 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4195 type, TYPE, return an expression to test if EXP is in (or out of, depending
4196 on IN_P) the range. Return 0 if the test couldn't be created. */
4197
4198 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)4199 build_range_check (location_t loc, tree type, tree exp, int in_p,
4200 tree low, tree high)
4201 {
4202 tree etype = TREE_TYPE (exp), value;
4203
4204 #ifdef HAVE_canonicalize_funcptr_for_compare
4205 /* Disable this optimization for function pointer expressions
4206 on targets that require function pointer canonicalization. */
4207 if (HAVE_canonicalize_funcptr_for_compare
4208 && TREE_CODE (etype) == POINTER_TYPE
4209 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4210 return NULL_TREE;
4211 #endif
4212
4213 if (! in_p)
4214 {
4215 value = build_range_check (loc, type, exp, 1, low, high);
4216 if (value != 0)
4217 return invert_truthvalue_loc (loc, value);
4218
4219 return 0;
4220 }
4221
4222 if (low == 0 && high == 0)
4223 return build_int_cst (type, 1);
4224
4225 if (low == 0)
4226 return fold_build2_loc (loc, LE_EXPR, type, exp,
4227 fold_convert_loc (loc, etype, high));
4228
4229 if (high == 0)
4230 return fold_build2_loc (loc, GE_EXPR, type, exp,
4231 fold_convert_loc (loc, etype, low));
4232
4233 if (operand_equal_p (low, high, 0))
4234 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4235 fold_convert_loc (loc, etype, low));
4236
4237 if (integer_zerop (low))
4238 {
4239 if (! TYPE_UNSIGNED (etype))
4240 {
4241 etype = unsigned_type_for (etype);
4242 high = fold_convert_loc (loc, etype, high);
4243 exp = fold_convert_loc (loc, etype, exp);
4244 }
4245 return build_range_check (loc, type, exp, 1, 0, high);
4246 }
4247
4248 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4249 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4250 {
4251 unsigned HOST_WIDE_INT lo;
4252 HOST_WIDE_INT hi;
4253 int prec;
4254
4255 prec = TYPE_PRECISION (etype);
4256 if (prec <= HOST_BITS_PER_WIDE_INT)
4257 {
4258 hi = 0;
4259 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4260 }
4261 else
4262 {
4263 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4264 lo = (unsigned HOST_WIDE_INT) -1;
4265 }
4266
4267 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4268 {
4269 if (TYPE_UNSIGNED (etype))
4270 {
4271 tree signed_etype = signed_type_for (etype);
4272 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4273 etype
4274 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4275 else
4276 etype = signed_etype;
4277 exp = fold_convert_loc (loc, etype, exp);
4278 }
4279 return fold_build2_loc (loc, GT_EXPR, type, exp,
4280 build_int_cst (etype, 0));
4281 }
4282 }
4283
4284 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4285 This requires wrap-around arithmetics for the type of the expression.
4286 First make sure that arithmetics in this type is valid, then make sure
4287 that it wraps around. */
4288 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4289 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4290 TYPE_UNSIGNED (etype));
4291
4292 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4293 {
4294 tree utype, minv, maxv;
4295
4296 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4297 for the type in question, as we rely on this here. */
4298 utype = unsigned_type_for (etype);
4299 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4300 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4301 integer_one_node, 1);
4302 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4303
4304 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4305 minv, 1, maxv, 1)))
4306 etype = utype;
4307 else
4308 return 0;
4309 }
4310
4311 high = fold_convert_loc (loc, etype, high);
4312 low = fold_convert_loc (loc, etype, low);
4313 exp = fold_convert_loc (loc, etype, exp);
4314
4315 value = const_binop (MINUS_EXPR, high, low);
4316
4317
4318 if (POINTER_TYPE_P (etype))
4319 {
4320 if (value != 0 && !TREE_OVERFLOW (value))
4321 {
4322 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4323 return build_range_check (loc, type,
4324 fold_build_pointer_plus_loc (loc, exp, low),
4325 1, build_int_cst (etype, 0), value);
4326 }
4327 return 0;
4328 }
4329
4330 if (value != 0 && !TREE_OVERFLOW (value))
4331 return build_range_check (loc, type,
4332 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4333 1, build_int_cst (etype, 0), value);
4334
4335 return 0;
4336 }
4337
4338 /* Return the predecessor of VAL in its type, handling the infinite case. */
4339
4340 static tree
range_predecessor(tree val)4341 range_predecessor (tree val)
4342 {
4343 tree type = TREE_TYPE (val);
4344
4345 if (INTEGRAL_TYPE_P (type)
4346 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4347 return 0;
4348 else
4349 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4350 }
4351
4352 /* Return the successor of VAL in its type, handling the infinite case. */
4353
4354 static tree
range_successor(tree val)4355 range_successor (tree val)
4356 {
4357 tree type = TREE_TYPE (val);
4358
4359 if (INTEGRAL_TYPE_P (type)
4360 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4361 return 0;
4362 else
4363 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4364 }
4365
4366 /* Given two ranges, see if we can merge them into one. Return 1 if we
4367 can, 0 if we can't. Set the output range into the specified parameters. */
4368
4369 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)4370 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4371 tree high0, int in1_p, tree low1, tree high1)
4372 {
4373 int no_overlap;
4374 int subset;
4375 int temp;
4376 tree tem;
4377 int in_p;
4378 tree low, high;
4379 int lowequal = ((low0 == 0 && low1 == 0)
4380 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4381 low0, 0, low1, 0)));
4382 int highequal = ((high0 == 0 && high1 == 0)
4383 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4384 high0, 1, high1, 1)));
4385
4386 /* Make range 0 be the range that starts first, or ends last if they
4387 start at the same value. Swap them if it isn't. */
4388 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4389 low0, 0, low1, 0))
4390 || (lowequal
4391 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4392 high1, 1, high0, 1))))
4393 {
4394 temp = in0_p, in0_p = in1_p, in1_p = temp;
4395 tem = low0, low0 = low1, low1 = tem;
4396 tem = high0, high0 = high1, high1 = tem;
4397 }
4398
4399 /* Now flag two cases, whether the ranges are disjoint or whether the
4400 second range is totally subsumed in the first. Note that the tests
4401 below are simplified by the ones above. */
4402 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4403 high0, 1, low1, 0));
4404 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4405 high1, 1, high0, 1));
4406
4407 /* We now have four cases, depending on whether we are including or
4408 excluding the two ranges. */
4409 if (in0_p && in1_p)
4410 {
4411 /* If they don't overlap, the result is false. If the second range
4412 is a subset it is the result. Otherwise, the range is from the start
4413 of the second to the end of the first. */
4414 if (no_overlap)
4415 in_p = 0, low = high = 0;
4416 else if (subset)
4417 in_p = 1, low = low1, high = high1;
4418 else
4419 in_p = 1, low = low1, high = high0;
4420 }
4421
4422 else if (in0_p && ! in1_p)
4423 {
4424 /* If they don't overlap, the result is the first range. If they are
4425 equal, the result is false. If the second range is a subset of the
4426 first, and the ranges begin at the same place, we go from just after
4427 the end of the second range to the end of the first. If the second
4428 range is not a subset of the first, or if it is a subset and both
4429 ranges end at the same place, the range starts at the start of the
4430 first range and ends just before the second range.
4431 Otherwise, we can't describe this as a single range. */
4432 if (no_overlap)
4433 in_p = 1, low = low0, high = high0;
4434 else if (lowequal && highequal)
4435 in_p = 0, low = high = 0;
4436 else if (subset && lowequal)
4437 {
4438 low = range_successor (high1);
4439 high = high0;
4440 in_p = 1;
4441 if (low == 0)
4442 {
4443 /* We are in the weird situation where high0 > high1 but
4444 high1 has no successor. Punt. */
4445 return 0;
4446 }
4447 }
4448 else if (! subset || highequal)
4449 {
4450 low = low0;
4451 high = range_predecessor (low1);
4452 in_p = 1;
4453 if (high == 0)
4454 {
4455 /* low0 < low1 but low1 has no predecessor. Punt. */
4456 return 0;
4457 }
4458 }
4459 else
4460 return 0;
4461 }
4462
4463 else if (! in0_p && in1_p)
4464 {
4465 /* If they don't overlap, the result is the second range. If the second
4466 is a subset of the first, the result is false. Otherwise,
4467 the range starts just after the first range and ends at the
4468 end of the second. */
4469 if (no_overlap)
4470 in_p = 1, low = low1, high = high1;
4471 else if (subset || highequal)
4472 in_p = 0, low = high = 0;
4473 else
4474 {
4475 low = range_successor (high0);
4476 high = high1;
4477 in_p = 1;
4478 if (low == 0)
4479 {
4480 /* high1 > high0 but high0 has no successor. Punt. */
4481 return 0;
4482 }
4483 }
4484 }
4485
4486 else
4487 {
4488 /* The case where we are excluding both ranges. Here the complex case
4489 is if they don't overlap. In that case, the only time we have a
4490 range is if they are adjacent. If the second is a subset of the
4491 first, the result is the first. Otherwise, the range to exclude
4492 starts at the beginning of the first range and ends at the end of the
4493 second. */
4494 if (no_overlap)
4495 {
4496 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4497 range_successor (high0),
4498 1, low1, 0)))
4499 in_p = 0, low = low0, high = high1;
4500 else
4501 {
4502 /* Canonicalize - [min, x] into - [-, x]. */
4503 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4504 switch (TREE_CODE (TREE_TYPE (low0)))
4505 {
4506 case ENUMERAL_TYPE:
4507 if (TYPE_PRECISION (TREE_TYPE (low0))
4508 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4509 break;
4510 /* FALLTHROUGH */
4511 case INTEGER_TYPE:
4512 if (tree_int_cst_equal (low0,
4513 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4514 low0 = 0;
4515 break;
4516 case POINTER_TYPE:
4517 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4518 && integer_zerop (low0))
4519 low0 = 0;
4520 break;
4521 default:
4522 break;
4523 }
4524
4525 /* Canonicalize - [x, max] into - [x, -]. */
4526 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4527 switch (TREE_CODE (TREE_TYPE (high1)))
4528 {
4529 case ENUMERAL_TYPE:
4530 if (TYPE_PRECISION (TREE_TYPE (high1))
4531 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4532 break;
4533 /* FALLTHROUGH */
4534 case INTEGER_TYPE:
4535 if (tree_int_cst_equal (high1,
4536 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4537 high1 = 0;
4538 break;
4539 case POINTER_TYPE:
4540 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4541 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4542 high1, 1,
4543 integer_one_node, 1)))
4544 high1 = 0;
4545 break;
4546 default:
4547 break;
4548 }
4549
4550 /* The ranges might be also adjacent between the maximum and
4551 minimum values of the given type. For
4552 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4553 return + [x + 1, y - 1]. */
4554 if (low0 == 0 && high1 == 0)
4555 {
4556 low = range_successor (high0);
4557 high = range_predecessor (low1);
4558 if (low == 0 || high == 0)
4559 return 0;
4560
4561 in_p = 1;
4562 }
4563 else
4564 return 0;
4565 }
4566 }
4567 else if (subset)
4568 in_p = 0, low = low0, high = high0;
4569 else
4570 in_p = 0, low = low0, high = high1;
4571 }
4572
4573 *pin_p = in_p, *plow = low, *phigh = high;
4574 return 1;
4575 }
4576
4577
4578 /* Subroutine of fold, looking inside expressions of the form
4579 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4580 of the COND_EXPR. This function is being used also to optimize
4581 A op B ? C : A, by reversing the comparison first.
4582
4583 Return a folded expression whose code is not a COND_EXPR
4584 anymore, or NULL_TREE if no folding opportunity is found. */
4585
4586 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)4587 fold_cond_expr_with_comparison (location_t loc, tree type,
4588 tree arg0, tree arg1, tree arg2)
4589 {
4590 enum tree_code comp_code = TREE_CODE (arg0);
4591 tree arg00 = TREE_OPERAND (arg0, 0);
4592 tree arg01 = TREE_OPERAND (arg0, 1);
4593 tree arg1_type = TREE_TYPE (arg1);
4594 tree tem;
4595
4596 STRIP_NOPS (arg1);
4597 STRIP_NOPS (arg2);
4598
4599 /* If we have A op 0 ? A : -A, consider applying the following
4600 transformations:
4601
4602 A == 0? A : -A same as -A
4603 A != 0? A : -A same as A
4604 A >= 0? A : -A same as abs (A)
4605 A > 0? A : -A same as abs (A)
4606 A <= 0? A : -A same as -abs (A)
4607 A < 0? A : -A same as -abs (A)
4608
4609 None of these transformations work for modes with signed
4610 zeros. If A is +/-0, the first two transformations will
4611 change the sign of the result (from +0 to -0, or vice
4612 versa). The last four will fix the sign of the result,
4613 even though the original expressions could be positive or
4614 negative, depending on the sign of A.
4615
4616 Note that all these transformations are correct if A is
4617 NaN, since the two alternatives (A and -A) are also NaNs. */
4618 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4619 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4620 ? real_zerop (arg01)
4621 : integer_zerop (arg01))
4622 && ((TREE_CODE (arg2) == NEGATE_EXPR
4623 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4624 /* In the case that A is of the form X-Y, '-A' (arg2) may
4625 have already been folded to Y-X, check for that. */
4626 || (TREE_CODE (arg1) == MINUS_EXPR
4627 && TREE_CODE (arg2) == MINUS_EXPR
4628 && operand_equal_p (TREE_OPERAND (arg1, 0),
4629 TREE_OPERAND (arg2, 1), 0)
4630 && operand_equal_p (TREE_OPERAND (arg1, 1),
4631 TREE_OPERAND (arg2, 0), 0))))
4632 switch (comp_code)
4633 {
4634 case EQ_EXPR:
4635 case UNEQ_EXPR:
4636 tem = fold_convert_loc (loc, arg1_type, arg1);
4637 return pedantic_non_lvalue_loc (loc,
4638 fold_convert_loc (loc, type,
4639 negate_expr (tem)));
4640 case NE_EXPR:
4641 case LTGT_EXPR:
4642 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4643 case UNGE_EXPR:
4644 case UNGT_EXPR:
4645 if (flag_trapping_math)
4646 break;
4647 /* Fall through. */
4648 case GE_EXPR:
4649 case GT_EXPR:
4650 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4651 arg1 = fold_convert_loc (loc, signed_type_for
4652 (TREE_TYPE (arg1)), arg1);
4653 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4654 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4655 case UNLE_EXPR:
4656 case UNLT_EXPR:
4657 if (flag_trapping_math)
4658 break;
4659 case LE_EXPR:
4660 case LT_EXPR:
4661 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4662 arg1 = fold_convert_loc (loc, signed_type_for
4663 (TREE_TYPE (arg1)), arg1);
4664 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4665 return negate_expr (fold_convert_loc (loc, type, tem));
4666 default:
4667 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4668 break;
4669 }
4670
4671 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4672 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4673 both transformations are correct when A is NaN: A != 0
4674 is then true, and A == 0 is false. */
4675
4676 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4677 && integer_zerop (arg01) && integer_zerop (arg2))
4678 {
4679 if (comp_code == NE_EXPR)
4680 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4681 else if (comp_code == EQ_EXPR)
4682 return build_int_cst (type, 0);
4683 }
4684
4685 /* Try some transformations of A op B ? A : B.
4686
4687 A == B? A : B same as B
4688 A != B? A : B same as A
4689 A >= B? A : B same as max (A, B)
4690 A > B? A : B same as max (B, A)
4691 A <= B? A : B same as min (A, B)
4692 A < B? A : B same as min (B, A)
4693
4694 As above, these transformations don't work in the presence
4695 of signed zeros. For example, if A and B are zeros of
4696 opposite sign, the first two transformations will change
4697 the sign of the result. In the last four, the original
4698 expressions give different results for (A=+0, B=-0) and
4699 (A=-0, B=+0), but the transformed expressions do not.
4700
4701 The first two transformations are correct if either A or B
4702 is a NaN. In the first transformation, the condition will
4703 be false, and B will indeed be chosen. In the case of the
4704 second transformation, the condition A != B will be true,
4705 and A will be chosen.
4706
4707 The conversions to max() and min() are not correct if B is
4708 a number and A is not. The conditions in the original
4709 expressions will be false, so all four give B. The min()
4710 and max() versions would give a NaN instead. */
4711 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4712 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4713 /* Avoid these transformations if the COND_EXPR may be used
4714 as an lvalue in the C++ front-end. PR c++/19199. */
4715 && (in_gimple_form
4716 || (strcmp (lang_hooks.name, "GNU C++") != 0
4717 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4718 || ! maybe_lvalue_p (arg1)
4719 || ! maybe_lvalue_p (arg2)))
4720 {
4721 tree comp_op0 = arg00;
4722 tree comp_op1 = arg01;
4723 tree comp_type = TREE_TYPE (comp_op0);
4724
4725 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4726 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4727 {
4728 comp_type = type;
4729 comp_op0 = arg1;
4730 comp_op1 = arg2;
4731 }
4732
4733 switch (comp_code)
4734 {
4735 case EQ_EXPR:
4736 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4737 case NE_EXPR:
4738 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4739 case LE_EXPR:
4740 case LT_EXPR:
4741 case UNLE_EXPR:
4742 case UNLT_EXPR:
4743 /* In C++ a ?: expression can be an lvalue, so put the
4744 operand which will be used if they are equal first
4745 so that we can convert this back to the
4746 corresponding COND_EXPR. */
4747 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4748 {
4749 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4750 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4751 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4752 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4753 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4754 comp_op1, comp_op0);
4755 return pedantic_non_lvalue_loc (loc,
4756 fold_convert_loc (loc, type, tem));
4757 }
4758 break;
4759 case GE_EXPR:
4760 case GT_EXPR:
4761 case UNGE_EXPR:
4762 case UNGT_EXPR:
4763 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4764 {
4765 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4766 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4767 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4768 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4769 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4770 comp_op1, comp_op0);
4771 return pedantic_non_lvalue_loc (loc,
4772 fold_convert_loc (loc, type, tem));
4773 }
4774 break;
4775 case UNEQ_EXPR:
4776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4777 return pedantic_non_lvalue_loc (loc,
4778 fold_convert_loc (loc, type, arg2));
4779 break;
4780 case LTGT_EXPR:
4781 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4782 return pedantic_non_lvalue_loc (loc,
4783 fold_convert_loc (loc, type, arg1));
4784 break;
4785 default:
4786 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4787 break;
4788 }
4789 }
4790
4791 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4792 we might still be able to simplify this. For example,
4793 if C1 is one less or one more than C2, this might have started
4794 out as a MIN or MAX and been transformed by this function.
4795 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4796
4797 if (INTEGRAL_TYPE_P (type)
4798 && TREE_CODE (arg01) == INTEGER_CST
4799 && TREE_CODE (arg2) == INTEGER_CST)
4800 switch (comp_code)
4801 {
4802 case EQ_EXPR:
4803 if (TREE_CODE (arg1) == INTEGER_CST)
4804 break;
4805 /* We can replace A with C1 in this case. */
4806 arg1 = fold_convert_loc (loc, type, arg01);
4807 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4808
4809 case LT_EXPR:
4810 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4811 MIN_EXPR, to preserve the signedness of the comparison. */
4812 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4813 OEP_ONLY_CONST)
4814 && operand_equal_p (arg01,
4815 const_binop (PLUS_EXPR, arg2,
4816 build_int_cst (type, 1)),
4817 OEP_ONLY_CONST))
4818 {
4819 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4820 fold_convert_loc (loc, TREE_TYPE (arg00),
4821 arg2));
4822 return pedantic_non_lvalue_loc (loc,
4823 fold_convert_loc (loc, type, tem));
4824 }
4825 break;
4826
4827 case LE_EXPR:
4828 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4829 as above. */
4830 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4831 OEP_ONLY_CONST)
4832 && operand_equal_p (arg01,
4833 const_binop (MINUS_EXPR, arg2,
4834 build_int_cst (type, 1)),
4835 OEP_ONLY_CONST))
4836 {
4837 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4838 fold_convert_loc (loc, TREE_TYPE (arg00),
4839 arg2));
4840 return pedantic_non_lvalue_loc (loc,
4841 fold_convert_loc (loc, type, tem));
4842 }
4843 break;
4844
4845 case GT_EXPR:
4846 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4847 MAX_EXPR, to preserve the signedness of the comparison. */
4848 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4849 OEP_ONLY_CONST)
4850 && operand_equal_p (arg01,
4851 const_binop (MINUS_EXPR, arg2,
4852 build_int_cst (type, 1)),
4853 OEP_ONLY_CONST))
4854 {
4855 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4856 fold_convert_loc (loc, TREE_TYPE (arg00),
4857 arg2));
4858 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4859 }
4860 break;
4861
4862 case GE_EXPR:
4863 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4864 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4865 OEP_ONLY_CONST)
4866 && operand_equal_p (arg01,
4867 const_binop (PLUS_EXPR, arg2,
4868 build_int_cst (type, 1)),
4869 OEP_ONLY_CONST))
4870 {
4871 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4872 fold_convert_loc (loc, TREE_TYPE (arg00),
4873 arg2));
4874 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4875 }
4876 break;
4877 case NE_EXPR:
4878 break;
4879 default:
4880 gcc_unreachable ();
4881 }
4882
4883 return NULL_TREE;
4884 }
4885
4886
4887
4888 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4889 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4890 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4891 false) >= 2)
4892 #endif
4893
4894 /* EXP is some logical combination of boolean tests. See if we can
4895 merge it into some range test. Return the new tree if so. */
4896
4897 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)4898 fold_range_test (location_t loc, enum tree_code code, tree type,
4899 tree op0, tree op1)
4900 {
4901 int or_op = (code == TRUTH_ORIF_EXPR
4902 || code == TRUTH_OR_EXPR);
4903 int in0_p, in1_p, in_p;
4904 tree low0, low1, low, high0, high1, high;
4905 bool strict_overflow_p = false;
4906 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4907 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4908 tree tem;
4909 const char * const warnmsg = G_("assuming signed overflow does not occur "
4910 "when simplifying range test");
4911
4912 /* If this is an OR operation, invert both sides; we will invert
4913 again at the end. */
4914 if (or_op)
4915 in0_p = ! in0_p, in1_p = ! in1_p;
4916
4917 /* If both expressions are the same, if we can merge the ranges, and we
4918 can build the range test, return it or it inverted. If one of the
4919 ranges is always true or always false, consider it to be the same
4920 expression as the other. */
4921 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4922 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4923 in1_p, low1, high1)
4924 && 0 != (tem = (build_range_check (loc, type,
4925 lhs != 0 ? lhs
4926 : rhs != 0 ? rhs : integer_zero_node,
4927 in_p, low, high))))
4928 {
4929 if (strict_overflow_p)
4930 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4931 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4932 }
4933
4934 /* On machines where the branch cost is expensive, if this is a
4935 short-circuited branch and the underlying object on both sides
4936 is the same, make a non-short-circuit operation. */
4937 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4938 && lhs != 0 && rhs != 0
4939 && (code == TRUTH_ANDIF_EXPR
4940 || code == TRUTH_ORIF_EXPR)
4941 && operand_equal_p (lhs, rhs, 0))
4942 {
4943 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4944 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4945 which cases we can't do this. */
4946 if (simple_operand_p (lhs))
4947 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4948 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4949 type, op0, op1);
4950
4951 else if (!lang_hooks.decls.global_bindings_p ()
4952 && !CONTAINS_PLACEHOLDER_P (lhs))
4953 {
4954 tree common = save_expr (lhs);
4955
4956 if (0 != (lhs = build_range_check (loc, type, common,
4957 or_op ? ! in0_p : in0_p,
4958 low0, high0))
4959 && (0 != (rhs = build_range_check (loc, type, common,
4960 or_op ? ! in1_p : in1_p,
4961 low1, high1))))
4962 {
4963 if (strict_overflow_p)
4964 fold_overflow_warning (warnmsg,
4965 WARN_STRICT_OVERFLOW_COMPARISON);
4966 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4967 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4968 type, lhs, rhs);
4969 }
4970 }
4971 }
4972
4973 return 0;
4974 }
4975
4976 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
4977 bit value. Arrange things so the extra bits will be set to zero if and
4978 only if C is signed-extended to its full width. If MASK is nonzero,
4979 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4980
4981 static tree
unextend(tree c,int p,int unsignedp,tree mask)4982 unextend (tree c, int p, int unsignedp, tree mask)
4983 {
4984 tree type = TREE_TYPE (c);
4985 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4986 tree temp;
4987
4988 if (p == modesize || unsignedp)
4989 return c;
4990
4991 /* We work by getting just the sign bit into the low-order bit, then
4992 into the high-order bit, then sign-extend. We then XOR that value
4993 with C. */
4994 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
4995 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
4996
4997 /* We must use a signed type in order to get an arithmetic right shift.
4998 However, we must also avoid introducing accidental overflows, so that
4999 a subsequent call to integer_zerop will work. Hence we must
5000 do the type conversion here. At this point, the constant is either
5001 zero or one, and the conversion to a signed type can never overflow.
5002 We could get an overflow if this conversion is done anywhere else. */
5003 if (TYPE_UNSIGNED (type))
5004 temp = fold_convert (signed_type_for (type), temp);
5005
5006 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5007 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5008 if (mask != 0)
5009 temp = const_binop (BIT_AND_EXPR, temp,
5010 fold_convert (TREE_TYPE (c), mask));
5011 /* If necessary, convert the type back to match the type of C. */
5012 if (TYPE_UNSIGNED (type))
5013 temp = fold_convert (type, temp);
5014
5015 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5016 }
5017
5018 /* For an expression that has the form
5019 (A && B) || ~B
5020 or
5021 (A || B) && ~B,
5022 we can drop one of the inner expressions and simplify to
5023 A || ~B
5024 or
5025 A && ~B
5026 LOC is the location of the resulting expression. OP is the inner
5027 logical operation; the left-hand side in the examples above, while CMPOP
5028 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5029 removing a condition that guards another, as in
5030 (A != NULL && A->...) || A == NULL
5031 which we must not transform. If RHS_ONLY is true, only eliminate the
5032 right-most operand of the inner logical operation. */
5033
5034 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)5035 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5036 bool rhs_only)
5037 {
5038 tree type = TREE_TYPE (cmpop);
5039 enum tree_code code = TREE_CODE (cmpop);
5040 enum tree_code truthop_code = TREE_CODE (op);
5041 tree lhs = TREE_OPERAND (op, 0);
5042 tree rhs = TREE_OPERAND (op, 1);
5043 tree orig_lhs = lhs, orig_rhs = rhs;
5044 enum tree_code rhs_code = TREE_CODE (rhs);
5045 enum tree_code lhs_code = TREE_CODE (lhs);
5046 enum tree_code inv_code;
5047
5048 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5049 return NULL_TREE;
5050
5051 if (TREE_CODE_CLASS (code) != tcc_comparison)
5052 return NULL_TREE;
5053
5054 if (rhs_code == truthop_code)
5055 {
5056 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5057 if (newrhs != NULL_TREE)
5058 {
5059 rhs = newrhs;
5060 rhs_code = TREE_CODE (rhs);
5061 }
5062 }
5063 if (lhs_code == truthop_code && !rhs_only)
5064 {
5065 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5066 if (newlhs != NULL_TREE)
5067 {
5068 lhs = newlhs;
5069 lhs_code = TREE_CODE (lhs);
5070 }
5071 }
5072
5073 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5074 if (inv_code == rhs_code
5075 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5076 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5077 return lhs;
5078 if (!rhs_only && inv_code == lhs_code
5079 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5080 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5081 return rhs;
5082 if (rhs != orig_rhs || lhs != orig_lhs)
5083 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5084 lhs, rhs);
5085 return NULL_TREE;
5086 }
5087
5088 /* Find ways of folding logical expressions of LHS and RHS:
5089 Try to merge two comparisons to the same innermost item.
5090 Look for range tests like "ch >= '0' && ch <= '9'".
5091 Look for combinations of simple terms on machines with expensive branches
5092 and evaluate the RHS unconditionally.
5093
5094 For example, if we have p->a == 2 && p->b == 4 and we can make an
5095 object large enough to span both A and B, we can do this with a comparison
5096 against the object ANDed with the a mask.
5097
5098 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5099 operations to do this with one comparison.
5100
5101 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5102 function and the one above.
5103
5104 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5105 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5106
5107 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5108 two operands.
5109
5110 We return the simplified tree or 0 if no optimization is possible. */
5111
5112 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)5113 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5114 tree lhs, tree rhs)
5115 {
5116 /* If this is the "or" of two comparisons, we can do something if
5117 the comparisons are NE_EXPR. If this is the "and", we can do something
5118 if the comparisons are EQ_EXPR. I.e.,
5119 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5120
5121 WANTED_CODE is this operation code. For single bit fields, we can
5122 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5123 comparison for one-bit fields. */
5124
5125 enum tree_code wanted_code;
5126 enum tree_code lcode, rcode;
5127 tree ll_arg, lr_arg, rl_arg, rr_arg;
5128 tree ll_inner, lr_inner, rl_inner, rr_inner;
5129 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5130 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5131 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5132 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5133 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5134 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5135 enum machine_mode lnmode, rnmode;
5136 tree ll_mask, lr_mask, rl_mask, rr_mask;
5137 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5138 tree l_const, r_const;
5139 tree lntype, rntype, result;
5140 HOST_WIDE_INT first_bit, end_bit;
5141 int volatilep;
5142
5143 /* Start by getting the comparison codes. Fail if anything is volatile.
5144 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5145 it were surrounded with a NE_EXPR. */
5146
5147 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5148 return 0;
5149
5150 lcode = TREE_CODE (lhs);
5151 rcode = TREE_CODE (rhs);
5152
5153 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5154 {
5155 lhs = build2 (NE_EXPR, truth_type, lhs,
5156 build_int_cst (TREE_TYPE (lhs), 0));
5157 lcode = NE_EXPR;
5158 }
5159
5160 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5161 {
5162 rhs = build2 (NE_EXPR, truth_type, rhs,
5163 build_int_cst (TREE_TYPE (rhs), 0));
5164 rcode = NE_EXPR;
5165 }
5166
5167 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5168 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5169 return 0;
5170
5171 ll_arg = TREE_OPERAND (lhs, 0);
5172 lr_arg = TREE_OPERAND (lhs, 1);
5173 rl_arg = TREE_OPERAND (rhs, 0);
5174 rr_arg = TREE_OPERAND (rhs, 1);
5175
5176 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5177 if (simple_operand_p (ll_arg)
5178 && simple_operand_p (lr_arg))
5179 {
5180 if (operand_equal_p (ll_arg, rl_arg, 0)
5181 && operand_equal_p (lr_arg, rr_arg, 0))
5182 {
5183 result = combine_comparisons (loc, code, lcode, rcode,
5184 truth_type, ll_arg, lr_arg);
5185 if (result)
5186 return result;
5187 }
5188 else if (operand_equal_p (ll_arg, rr_arg, 0)
5189 && operand_equal_p (lr_arg, rl_arg, 0))
5190 {
5191 result = combine_comparisons (loc, code, lcode,
5192 swap_tree_comparison (rcode),
5193 truth_type, ll_arg, lr_arg);
5194 if (result)
5195 return result;
5196 }
5197 }
5198
5199 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5200 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5201
5202 /* If the RHS can be evaluated unconditionally and its operands are
5203 simple, it wins to evaluate the RHS unconditionally on machines
5204 with expensive branches. In this case, this isn't a comparison
5205 that can be merged. */
5206
5207 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5208 false) >= 2
5209 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5210 && simple_operand_p (rl_arg)
5211 && simple_operand_p (rr_arg))
5212 {
5213 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5214 if (code == TRUTH_OR_EXPR
5215 && lcode == NE_EXPR && integer_zerop (lr_arg)
5216 && rcode == NE_EXPR && integer_zerop (rr_arg)
5217 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5218 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5219 return build2_loc (loc, NE_EXPR, truth_type,
5220 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5221 ll_arg, rl_arg),
5222 build_int_cst (TREE_TYPE (ll_arg), 0));
5223
5224 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5225 if (code == TRUTH_AND_EXPR
5226 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5227 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5228 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5229 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5230 return build2_loc (loc, EQ_EXPR, truth_type,
5231 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5232 ll_arg, rl_arg),
5233 build_int_cst (TREE_TYPE (ll_arg), 0));
5234 }
5235
5236 /* See if the comparisons can be merged. Then get all the parameters for
5237 each side. */
5238
5239 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5240 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5241 return 0;
5242
5243 volatilep = 0;
5244 ll_inner = decode_field_reference (loc, ll_arg,
5245 &ll_bitsize, &ll_bitpos, &ll_mode,
5246 &ll_unsignedp, &volatilep, &ll_mask,
5247 &ll_and_mask);
5248 lr_inner = decode_field_reference (loc, lr_arg,
5249 &lr_bitsize, &lr_bitpos, &lr_mode,
5250 &lr_unsignedp, &volatilep, &lr_mask,
5251 &lr_and_mask);
5252 rl_inner = decode_field_reference (loc, rl_arg,
5253 &rl_bitsize, &rl_bitpos, &rl_mode,
5254 &rl_unsignedp, &volatilep, &rl_mask,
5255 &rl_and_mask);
5256 rr_inner = decode_field_reference (loc, rr_arg,
5257 &rr_bitsize, &rr_bitpos, &rr_mode,
5258 &rr_unsignedp, &volatilep, &rr_mask,
5259 &rr_and_mask);
5260
5261 /* It must be true that the inner operation on the lhs of each
5262 comparison must be the same if we are to be able to do anything.
5263 Then see if we have constants. If not, the same must be true for
5264 the rhs's. */
5265 if (volatilep || ll_inner == 0 || rl_inner == 0
5266 || ! operand_equal_p (ll_inner, rl_inner, 0))
5267 return 0;
5268
5269 if (TREE_CODE (lr_arg) == INTEGER_CST
5270 && TREE_CODE (rr_arg) == INTEGER_CST)
5271 l_const = lr_arg, r_const = rr_arg;
5272 else if (lr_inner == 0 || rr_inner == 0
5273 || ! operand_equal_p (lr_inner, rr_inner, 0))
5274 return 0;
5275 else
5276 l_const = r_const = 0;
5277
5278 /* If either comparison code is not correct for our logical operation,
5279 fail. However, we can convert a one-bit comparison against zero into
5280 the opposite comparison against that bit being set in the field. */
5281
5282 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5283 if (lcode != wanted_code)
5284 {
5285 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5286 {
5287 /* Make the left operand unsigned, since we are only interested
5288 in the value of one bit. Otherwise we are doing the wrong
5289 thing below. */
5290 ll_unsignedp = 1;
5291 l_const = ll_mask;
5292 }
5293 else
5294 return 0;
5295 }
5296
5297 /* This is analogous to the code for l_const above. */
5298 if (rcode != wanted_code)
5299 {
5300 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5301 {
5302 rl_unsignedp = 1;
5303 r_const = rl_mask;
5304 }
5305 else
5306 return 0;
5307 }
5308
5309 /* See if we can find a mode that contains both fields being compared on
5310 the left. If we can't, fail. Otherwise, update all constants and masks
5311 to be relative to a field of that size. */
5312 first_bit = MIN (ll_bitpos, rl_bitpos);
5313 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5314 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5315 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5316 volatilep);
5317 if (lnmode == VOIDmode)
5318 return 0;
5319
5320 lnbitsize = GET_MODE_BITSIZE (lnmode);
5321 lnbitpos = first_bit & ~ (lnbitsize - 1);
5322 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5323 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5324
5325 if (BYTES_BIG_ENDIAN)
5326 {
5327 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5328 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5329 }
5330
5331 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5332 size_int (xll_bitpos));
5333 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5334 size_int (xrl_bitpos));
5335
5336 if (l_const)
5337 {
5338 l_const = fold_convert_loc (loc, lntype, l_const);
5339 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5340 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5341 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5342 fold_build1_loc (loc, BIT_NOT_EXPR,
5343 lntype, ll_mask))))
5344 {
5345 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5346
5347 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5348 }
5349 }
5350 if (r_const)
5351 {
5352 r_const = fold_convert_loc (loc, lntype, r_const);
5353 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5354 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5355 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5356 fold_build1_loc (loc, BIT_NOT_EXPR,
5357 lntype, rl_mask))))
5358 {
5359 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5360
5361 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5362 }
5363 }
5364
5365 /* If the right sides are not constant, do the same for it. Also,
5366 disallow this optimization if a size or signedness mismatch occurs
5367 between the left and right sides. */
5368 if (l_const == 0)
5369 {
5370 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5371 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5372 /* Make sure the two fields on the right
5373 correspond to the left without being swapped. */
5374 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5375 return 0;
5376
5377 first_bit = MIN (lr_bitpos, rr_bitpos);
5378 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5379 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5380 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5381 volatilep);
5382 if (rnmode == VOIDmode)
5383 return 0;
5384
5385 rnbitsize = GET_MODE_BITSIZE (rnmode);
5386 rnbitpos = first_bit & ~ (rnbitsize - 1);
5387 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5388 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5389
5390 if (BYTES_BIG_ENDIAN)
5391 {
5392 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5393 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5394 }
5395
5396 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5397 rntype, lr_mask),
5398 size_int (xlr_bitpos));
5399 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5400 rntype, rr_mask),
5401 size_int (xrr_bitpos));
5402
5403 /* Make a mask that corresponds to both fields being compared.
5404 Do this for both items being compared. If the operands are the
5405 same size and the bits being compared are in the same position
5406 then we can do this by masking both and comparing the masked
5407 results. */
5408 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5409 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5410 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5411 {
5412 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5413 ll_unsignedp || rl_unsignedp);
5414 if (! all_ones_mask_p (ll_mask, lnbitsize))
5415 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5416
5417 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5418 lr_unsignedp || rr_unsignedp);
5419 if (! all_ones_mask_p (lr_mask, rnbitsize))
5420 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5421
5422 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5423 }
5424
5425 /* There is still another way we can do something: If both pairs of
5426 fields being compared are adjacent, we may be able to make a wider
5427 field containing them both.
5428
5429 Note that we still must mask the lhs/rhs expressions. Furthermore,
5430 the mask must be shifted to account for the shift done by
5431 make_bit_field_ref. */
5432 if ((ll_bitsize + ll_bitpos == rl_bitpos
5433 && lr_bitsize + lr_bitpos == rr_bitpos)
5434 || (ll_bitpos == rl_bitpos + rl_bitsize
5435 && lr_bitpos == rr_bitpos + rr_bitsize))
5436 {
5437 tree type;
5438
5439 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5440 ll_bitsize + rl_bitsize,
5441 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5442 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5443 lr_bitsize + rr_bitsize,
5444 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5445
5446 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5447 size_int (MIN (xll_bitpos, xrl_bitpos)));
5448 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5449 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5450
5451 /* Convert to the smaller type before masking out unwanted bits. */
5452 type = lntype;
5453 if (lntype != rntype)
5454 {
5455 if (lnbitsize > rnbitsize)
5456 {
5457 lhs = fold_convert_loc (loc, rntype, lhs);
5458 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5459 type = rntype;
5460 }
5461 else if (lnbitsize < rnbitsize)
5462 {
5463 rhs = fold_convert_loc (loc, lntype, rhs);
5464 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5465 type = lntype;
5466 }
5467 }
5468
5469 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5470 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5471
5472 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5473 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5474
5475 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5476 }
5477
5478 return 0;
5479 }
5480
5481 /* Handle the case of comparisons with constants. If there is something in
5482 common between the masks, those bits of the constants must be the same.
5483 If not, the condition is always false. Test for this to avoid generating
5484 incorrect code below. */
5485 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5486 if (! integer_zerop (result)
5487 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5488 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5489 {
5490 if (wanted_code == NE_EXPR)
5491 {
5492 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5493 return constant_boolean_node (true, truth_type);
5494 }
5495 else
5496 {
5497 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5498 return constant_boolean_node (false, truth_type);
5499 }
5500 }
5501
5502 /* Construct the expression we will return. First get the component
5503 reference we will make. Unless the mask is all ones the width of
5504 that field, perform the mask operation. Then compare with the
5505 merged constant. */
5506 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5507 ll_unsignedp || rl_unsignedp);
5508
5509 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5510 if (! all_ones_mask_p (ll_mask, lnbitsize))
5511 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5512
5513 return build2_loc (loc, wanted_code, truth_type, result,
5514 const_binop (BIT_IOR_EXPR, l_const, r_const));
5515 }
5516
5517 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5518 constant. */
5519
5520 static tree
optimize_minmax_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5521 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5522 tree op0, tree op1)
5523 {
5524 tree arg0 = op0;
5525 enum tree_code op_code;
5526 tree comp_const;
5527 tree minmax_const;
5528 int consts_equal, consts_lt;
5529 tree inner;
5530
5531 STRIP_SIGN_NOPS (arg0);
5532
5533 op_code = TREE_CODE (arg0);
5534 minmax_const = TREE_OPERAND (arg0, 1);
5535 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5536 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5537 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5538 inner = TREE_OPERAND (arg0, 0);
5539
5540 /* If something does not permit us to optimize, return the original tree. */
5541 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5542 || TREE_CODE (comp_const) != INTEGER_CST
5543 || TREE_OVERFLOW (comp_const)
5544 || TREE_CODE (minmax_const) != INTEGER_CST
5545 || TREE_OVERFLOW (minmax_const))
5546 return NULL_TREE;
5547
5548 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5549 and GT_EXPR, doing the rest with recursive calls using logical
5550 simplifications. */
5551 switch (code)
5552 {
5553 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5554 {
5555 tree tem
5556 = optimize_minmax_comparison (loc,
5557 invert_tree_comparison (code, false),
5558 type, op0, op1);
5559 if (tem)
5560 return invert_truthvalue_loc (loc, tem);
5561 return NULL_TREE;
5562 }
5563
5564 case GE_EXPR:
5565 return
5566 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5567 optimize_minmax_comparison
5568 (loc, EQ_EXPR, type, arg0, comp_const),
5569 optimize_minmax_comparison
5570 (loc, GT_EXPR, type, arg0, comp_const));
5571
5572 case EQ_EXPR:
5573 if (op_code == MAX_EXPR && consts_equal)
5574 /* MAX (X, 0) == 0 -> X <= 0 */
5575 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5576
5577 else if (op_code == MAX_EXPR && consts_lt)
5578 /* MAX (X, 0) == 5 -> X == 5 */
5579 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5580
5581 else if (op_code == MAX_EXPR)
5582 /* MAX (X, 0) == -1 -> false */
5583 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5584
5585 else if (consts_equal)
5586 /* MIN (X, 0) == 0 -> X >= 0 */
5587 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5588
5589 else if (consts_lt)
5590 /* MIN (X, 0) == 5 -> false */
5591 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5592
5593 else
5594 /* MIN (X, 0) == -1 -> X == -1 */
5595 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5596
5597 case GT_EXPR:
5598 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5599 /* MAX (X, 0) > 0 -> X > 0
5600 MAX (X, 0) > 5 -> X > 5 */
5601 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5602
5603 else if (op_code == MAX_EXPR)
5604 /* MAX (X, 0) > -1 -> true */
5605 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5606
5607 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5608 /* MIN (X, 0) > 0 -> false
5609 MIN (X, 0) > 5 -> false */
5610 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5611
5612 else
5613 /* MIN (X, 0) > -1 -> X > -1 */
5614 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5615
5616 default:
5617 return NULL_TREE;
5618 }
5619 }
5620
5621 /* T is an integer expression that is being multiplied, divided, or taken a
5622 modulus (CODE says which and what kind of divide or modulus) by a
5623 constant C. See if we can eliminate that operation by folding it with
5624 other operations already in T. WIDE_TYPE, if non-null, is a type that
5625 should be used for the computation if wider than our type.
5626
5627 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5628 (X * 2) + (Y * 4). We must, however, be assured that either the original
5629 expression would not overflow or that overflow is undefined for the type
5630 in the language in question.
5631
5632 If we return a non-null expression, it is an equivalent form of the
5633 original computation, but need not be in the original type.
5634
5635 We set *STRICT_OVERFLOW_P to true if the return values depends on
5636 signed overflow being undefined. Otherwise we do not change
5637 *STRICT_OVERFLOW_P. */
5638
5639 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5640 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5641 bool *strict_overflow_p)
5642 {
5643 /* To avoid exponential search depth, refuse to allow recursion past
5644 three levels. Beyond that (1) it's highly unlikely that we'll find
5645 something interesting and (2) we've probably processed it before
5646 when we built the inner expression. */
5647
5648 static int depth;
5649 tree ret;
5650
5651 if (depth > 3)
5652 return NULL;
5653
5654 depth++;
5655 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5656 depth--;
5657
5658 return ret;
5659 }
5660
5661 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)5662 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5663 bool *strict_overflow_p)
5664 {
5665 tree type = TREE_TYPE (t);
5666 enum tree_code tcode = TREE_CODE (t);
5667 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5668 > GET_MODE_SIZE (TYPE_MODE (type)))
5669 ? wide_type : type);
5670 tree t1, t2;
5671 int same_p = tcode == code;
5672 tree op0 = NULL_TREE, op1 = NULL_TREE;
5673 bool sub_strict_overflow_p;
5674
5675 /* Don't deal with constants of zero here; they confuse the code below. */
5676 if (integer_zerop (c))
5677 return NULL_TREE;
5678
5679 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5680 op0 = TREE_OPERAND (t, 0);
5681
5682 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5683 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5684
5685 /* Note that we need not handle conditional operations here since fold
5686 already handles those cases. So just do arithmetic here. */
5687 switch (tcode)
5688 {
5689 case INTEGER_CST:
5690 /* For a constant, we can always simplify if we are a multiply
5691 or (for divide and modulus) if it is a multiple of our constant. */
5692 if (code == MULT_EXPR
5693 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5694 return const_binop (code, fold_convert (ctype, t),
5695 fold_convert (ctype, c));
5696 break;
5697
5698 CASE_CONVERT: case NON_LVALUE_EXPR:
5699 /* If op0 is an expression ... */
5700 if ((COMPARISON_CLASS_P (op0)
5701 || UNARY_CLASS_P (op0)
5702 || BINARY_CLASS_P (op0)
5703 || VL_EXP_CLASS_P (op0)
5704 || EXPRESSION_CLASS_P (op0))
5705 /* ... and has wrapping overflow, and its type is smaller
5706 than ctype, then we cannot pass through as widening. */
5707 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5708 && (TYPE_PRECISION (ctype)
5709 > TYPE_PRECISION (TREE_TYPE (op0))))
5710 /* ... or this is a truncation (t is narrower than op0),
5711 then we cannot pass through this narrowing. */
5712 || (TYPE_PRECISION (type)
5713 < TYPE_PRECISION (TREE_TYPE (op0)))
5714 /* ... or signedness changes for division or modulus,
5715 then we cannot pass through this conversion. */
5716 || (code != MULT_EXPR
5717 && (TYPE_UNSIGNED (ctype)
5718 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5719 /* ... or has undefined overflow while the converted to
5720 type has not, we cannot do the operation in the inner type
5721 as that would introduce undefined overflow. */
5722 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5723 && !TYPE_OVERFLOW_UNDEFINED (type))))
5724 break;
5725
5726 /* Pass the constant down and see if we can make a simplification. If
5727 we can, replace this expression with the inner simplification for
5728 possible later conversion to our or some other type. */
5729 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5730 && TREE_CODE (t2) == INTEGER_CST
5731 && !TREE_OVERFLOW (t2)
5732 && (0 != (t1 = extract_muldiv (op0, t2, code,
5733 code == MULT_EXPR
5734 ? ctype : NULL_TREE,
5735 strict_overflow_p))))
5736 return t1;
5737 break;
5738
5739 case ABS_EXPR:
5740 /* If widening the type changes it from signed to unsigned, then we
5741 must avoid building ABS_EXPR itself as unsigned. */
5742 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5743 {
5744 tree cstype = (*signed_type_for) (ctype);
5745 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5746 != 0)
5747 {
5748 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5749 return fold_convert (ctype, t1);
5750 }
5751 break;
5752 }
5753 /* If the constant is negative, we cannot simplify this. */
5754 if (tree_int_cst_sgn (c) == -1)
5755 break;
5756 /* FALLTHROUGH */
5757 case NEGATE_EXPR:
5758 /* For division and modulus, type can't be unsigned, as e.g.
5759 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5760 For signed types, even with wrapping overflow, this is fine. */
5761 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5762 break;
5763 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5764 != 0)
5765 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5766 break;
5767
5768 case MIN_EXPR: case MAX_EXPR:
5769 /* If widening the type changes the signedness, then we can't perform
5770 this optimization as that changes the result. */
5771 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5772 break;
5773
5774 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5775 sub_strict_overflow_p = false;
5776 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5777 &sub_strict_overflow_p)) != 0
5778 && (t2 = extract_muldiv (op1, c, code, wide_type,
5779 &sub_strict_overflow_p)) != 0)
5780 {
5781 if (tree_int_cst_sgn (c) < 0)
5782 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5783 if (sub_strict_overflow_p)
5784 *strict_overflow_p = true;
5785 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5786 fold_convert (ctype, t2));
5787 }
5788 break;
5789
5790 case LSHIFT_EXPR: case RSHIFT_EXPR:
5791 /* If the second operand is constant, this is a multiplication
5792 or floor division, by a power of two, so we can treat it that
5793 way unless the multiplier or divisor overflows. Signed
5794 left-shift overflow is implementation-defined rather than
5795 undefined in C90, so do not convert signed left shift into
5796 multiplication. */
5797 if (TREE_CODE (op1) == INTEGER_CST
5798 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5799 /* const_binop may not detect overflow correctly,
5800 so check for it explicitly here. */
5801 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5802 && TREE_INT_CST_HIGH (op1) == 0
5803 && 0 != (t1 = fold_convert (ctype,
5804 const_binop (LSHIFT_EXPR,
5805 size_one_node,
5806 op1)))
5807 && !TREE_OVERFLOW (t1))
5808 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5809 ? MULT_EXPR : FLOOR_DIV_EXPR,
5810 ctype,
5811 fold_convert (ctype, op0),
5812 t1),
5813 c, code, wide_type, strict_overflow_p);
5814 break;
5815
5816 case PLUS_EXPR: case MINUS_EXPR:
5817 /* See if we can eliminate the operation on both sides. If we can, we
5818 can return a new PLUS or MINUS. If we can't, the only remaining
5819 cases where we can do anything are if the second operand is a
5820 constant. */
5821 sub_strict_overflow_p = false;
5822 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5823 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5824 if (t1 != 0 && t2 != 0
5825 && (code == MULT_EXPR
5826 /* If not multiplication, we can only do this if both operands
5827 are divisible by c. */
5828 || (multiple_of_p (ctype, op0, c)
5829 && multiple_of_p (ctype, op1, c))))
5830 {
5831 if (sub_strict_overflow_p)
5832 *strict_overflow_p = true;
5833 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5834 fold_convert (ctype, t2));
5835 }
5836
5837 /* If this was a subtraction, negate OP1 and set it to be an addition.
5838 This simplifies the logic below. */
5839 if (tcode == MINUS_EXPR)
5840 {
5841 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5842 /* If OP1 was not easily negatable, the constant may be OP0. */
5843 if (TREE_CODE (op0) == INTEGER_CST)
5844 {
5845 tree tem = op0;
5846 op0 = op1;
5847 op1 = tem;
5848 tem = t1;
5849 t1 = t2;
5850 t2 = tem;
5851 }
5852 }
5853
5854 if (TREE_CODE (op1) != INTEGER_CST)
5855 break;
5856
5857 /* If either OP1 or C are negative, this optimization is not safe for
5858 some of the division and remainder types while for others we need
5859 to change the code. */
5860 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5861 {
5862 if (code == CEIL_DIV_EXPR)
5863 code = FLOOR_DIV_EXPR;
5864 else if (code == FLOOR_DIV_EXPR)
5865 code = CEIL_DIV_EXPR;
5866 else if (code != MULT_EXPR
5867 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5868 break;
5869 }
5870
5871 /* If it's a multiply or a division/modulus operation of a multiple
5872 of our constant, do the operation and verify it doesn't overflow. */
5873 if (code == MULT_EXPR
5874 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5875 {
5876 op1 = const_binop (code, fold_convert (ctype, op1),
5877 fold_convert (ctype, c));
5878 /* We allow the constant to overflow with wrapping semantics. */
5879 if (op1 == 0
5880 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5881 break;
5882 }
5883 else
5884 break;
5885
5886 /* If we have an unsigned type, we cannot widen the operation since it
5887 will change the result if the original computation overflowed. */
5888 if (TYPE_UNSIGNED (ctype) && ctype != type)
5889 break;
5890
5891 /* If we were able to eliminate our operation from the first side,
5892 apply our operation to the second side and reform the PLUS. */
5893 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5894 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5895
5896 /* The last case is if we are a multiply. In that case, we can
5897 apply the distributive law to commute the multiply and addition
5898 if the multiplication of the constants doesn't overflow
5899 and overflow is defined. With undefined overflow
5900 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5901 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5902 return fold_build2 (tcode, ctype,
5903 fold_build2 (code, ctype,
5904 fold_convert (ctype, op0),
5905 fold_convert (ctype, c)),
5906 op1);
5907
5908 break;
5909
5910 case MULT_EXPR:
5911 /* We have a special case here if we are doing something like
5912 (C * 8) % 4 since we know that's zero. */
5913 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5914 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5915 /* If the multiplication can overflow we cannot optimize this. */
5916 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5917 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5918 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5919 {
5920 *strict_overflow_p = true;
5921 return omit_one_operand (type, integer_zero_node, op0);
5922 }
5923
5924 /* ... fall through ... */
5925
5926 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5927 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5928 /* If we can extract our operation from the LHS, do so and return a
5929 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5930 do something only if the second operand is a constant. */
5931 if (same_p
5932 && (t1 = extract_muldiv (op0, c, code, wide_type,
5933 strict_overflow_p)) != 0)
5934 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5935 fold_convert (ctype, op1));
5936 else if (tcode == MULT_EXPR && code == MULT_EXPR
5937 && (t1 = extract_muldiv (op1, c, code, wide_type,
5938 strict_overflow_p)) != 0)
5939 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5940 fold_convert (ctype, t1));
5941 else if (TREE_CODE (op1) != INTEGER_CST)
5942 return 0;
5943
5944 /* If these are the same operation types, we can associate them
5945 assuming no overflow. */
5946 if (tcode == code)
5947 {
5948 double_int mul;
5949 bool overflow_p;
5950 unsigned prec = TYPE_PRECISION (ctype);
5951 bool uns = TYPE_UNSIGNED (ctype);
5952 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5953 double_int dic = tree_to_double_int (c).ext (prec, uns);
5954 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5955 overflow_p = ((!uns && overflow_p)
5956 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5957 if (!double_int_fits_to_tree_p (ctype, mul)
5958 && ((uns && tcode != MULT_EXPR) || !uns))
5959 overflow_p = 1;
5960 if (!overflow_p)
5961 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5962 double_int_to_tree (ctype, mul));
5963 }
5964
5965 /* If these operations "cancel" each other, we have the main
5966 optimizations of this pass, which occur when either constant is a
5967 multiple of the other, in which case we replace this with either an
5968 operation or CODE or TCODE.
5969
5970 If we have an unsigned type, we cannot do this since it will change
5971 the result if the original computation overflowed. */
5972 if (TYPE_OVERFLOW_UNDEFINED (ctype)
5973 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5974 || (tcode == MULT_EXPR
5975 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5976 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
5977 && code != MULT_EXPR)))
5978 {
5979 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5980 {
5981 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5982 *strict_overflow_p = true;
5983 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5984 fold_convert (ctype,
5985 const_binop (TRUNC_DIV_EXPR,
5986 op1, c)));
5987 }
5988 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
5989 {
5990 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5991 *strict_overflow_p = true;
5992 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5993 fold_convert (ctype,
5994 const_binop (TRUNC_DIV_EXPR,
5995 c, op1)));
5996 }
5997 }
5998 break;
5999
6000 default:
6001 break;
6002 }
6003
6004 return 0;
6005 }
6006
6007 /* Return a node which has the indicated constant VALUE (either 0 or
6008 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6009 and is of the indicated TYPE. */
6010
6011 tree
constant_boolean_node(bool value,tree type)6012 constant_boolean_node (bool value, tree type)
6013 {
6014 if (type == integer_type_node)
6015 return value ? integer_one_node : integer_zero_node;
6016 else if (type == boolean_type_node)
6017 return value ? boolean_true_node : boolean_false_node;
6018 else if (TREE_CODE (type) == VECTOR_TYPE)
6019 return build_vector_from_val (type,
6020 build_int_cst (TREE_TYPE (type),
6021 value ? -1 : 0));
6022 else
6023 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6024 }
6025
6026
6027 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6028 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6029 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6030 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6031 COND is the first argument to CODE; otherwise (as in the example
6032 given here), it is the second argument. TYPE is the type of the
6033 original expression. Return NULL_TREE if no simplification is
6034 possible. */
6035
6036 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)6037 fold_binary_op_with_conditional_arg (location_t loc,
6038 enum tree_code code,
6039 tree type, tree op0, tree op1,
6040 tree cond, tree arg, int cond_first_p)
6041 {
6042 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6043 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6044 tree test, true_value, false_value;
6045 tree lhs = NULL_TREE;
6046 tree rhs = NULL_TREE;
6047 enum tree_code cond_code = COND_EXPR;
6048
6049 if (TREE_CODE (cond) == COND_EXPR
6050 || TREE_CODE (cond) == VEC_COND_EXPR)
6051 {
6052 test = TREE_OPERAND (cond, 0);
6053 true_value = TREE_OPERAND (cond, 1);
6054 false_value = TREE_OPERAND (cond, 2);
6055 /* If this operand throws an expression, then it does not make
6056 sense to try to perform a logical or arithmetic operation
6057 involving it. */
6058 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6059 lhs = true_value;
6060 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6061 rhs = false_value;
6062 }
6063 else
6064 {
6065 tree testtype = TREE_TYPE (cond);
6066 test = cond;
6067 true_value = constant_boolean_node (true, testtype);
6068 false_value = constant_boolean_node (false, testtype);
6069 }
6070
6071 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6072 cond_code = VEC_COND_EXPR;
6073
6074 /* This transformation is only worthwhile if we don't have to wrap ARG
6075 in a SAVE_EXPR and the operation can be simplified without recursing
6076 on at least one of the branches once its pushed inside the COND_EXPR. */
6077 if (!TREE_CONSTANT (arg)
6078 && (TREE_SIDE_EFFECTS (arg)
6079 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6080 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6081 return NULL_TREE;
6082
6083 arg = fold_convert_loc (loc, arg_type, arg);
6084 if (lhs == 0)
6085 {
6086 true_value = fold_convert_loc (loc, cond_type, true_value);
6087 if (cond_first_p)
6088 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6089 else
6090 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6091 }
6092 if (rhs == 0)
6093 {
6094 false_value = fold_convert_loc (loc, cond_type, false_value);
6095 if (cond_first_p)
6096 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6097 else
6098 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6099 }
6100
6101 /* Check that we have simplified at least one of the branches. */
6102 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6103 return NULL_TREE;
6104
6105 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6106 }
6107
6108
6109 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6110
6111 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6112 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6113 ADDEND is the same as X.
6114
6115 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6116 and finite. The problematic cases are when X is zero, and its mode
6117 has signed zeros. In the case of rounding towards -infinity,
6118 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6119 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6120
6121 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)6122 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6123 {
6124 if (!real_zerop (addend))
6125 return false;
6126
6127 /* Don't allow the fold with -fsignaling-nans. */
6128 if (HONOR_SNANS (TYPE_MODE (type)))
6129 return false;
6130
6131 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6132 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6133 return true;
6134
6135 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6136 if (TREE_CODE (addend) == REAL_CST
6137 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6138 negate = !negate;
6139
6140 /* The mode has signed zeros, and we have to honor their sign.
6141 In this situation, there is only one case we can return true for.
6142 X - 0 is the same as X unless rounding towards -infinity is
6143 supported. */
6144 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6145 }
6146
6147 /* Subroutine of fold() that checks comparisons of built-in math
6148 functions against real constants.
6149
6150 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6151 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6152 is the type of the result and ARG0 and ARG1 are the operands of the
6153 comparison. ARG1 must be a TREE_REAL_CST.
6154
6155 The function returns the constant folded tree if a simplification
6156 can be made, and NULL_TREE otherwise. */
6157
6158 static tree
fold_mathfn_compare(location_t loc,enum built_in_function fcode,enum tree_code code,tree type,tree arg0,tree arg1)6159 fold_mathfn_compare (location_t loc,
6160 enum built_in_function fcode, enum tree_code code,
6161 tree type, tree arg0, tree arg1)
6162 {
6163 REAL_VALUE_TYPE c;
6164
6165 if (BUILTIN_SQRT_P (fcode))
6166 {
6167 tree arg = CALL_EXPR_ARG (arg0, 0);
6168 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6169
6170 c = TREE_REAL_CST (arg1);
6171 if (REAL_VALUE_NEGATIVE (c))
6172 {
6173 /* sqrt(x) < y is always false, if y is negative. */
6174 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6175 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6176
6177 /* sqrt(x) > y is always true, if y is negative and we
6178 don't care about NaNs, i.e. negative values of x. */
6179 if (code == NE_EXPR || !HONOR_NANS (mode))
6180 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6181
6182 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6183 return fold_build2_loc (loc, GE_EXPR, type, arg,
6184 build_real (TREE_TYPE (arg), dconst0));
6185 }
6186 else if (code == GT_EXPR || code == GE_EXPR)
6187 {
6188 REAL_VALUE_TYPE c2;
6189
6190 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6191 real_convert (&c2, mode, &c2);
6192
6193 if (REAL_VALUE_ISINF (c2))
6194 {
6195 /* sqrt(x) > y is x == +Inf, when y is very large. */
6196 if (HONOR_INFINITIES (mode))
6197 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6199
6200 /* sqrt(x) > y is always false, when y is very large
6201 and we don't care about infinities. */
6202 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6203 }
6204
6205 /* sqrt(x) > c is the same as x > c*c. */
6206 return fold_build2_loc (loc, code, type, arg,
6207 build_real (TREE_TYPE (arg), c2));
6208 }
6209 else if (code == LT_EXPR || code == LE_EXPR)
6210 {
6211 REAL_VALUE_TYPE c2;
6212
6213 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6214 real_convert (&c2, mode, &c2);
6215
6216 if (REAL_VALUE_ISINF (c2))
6217 {
6218 /* sqrt(x) < y is always true, when y is a very large
6219 value and we don't care about NaNs or Infinities. */
6220 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6221 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6222
6223 /* sqrt(x) < y is x != +Inf when y is very large and we
6224 don't care about NaNs. */
6225 if (! HONOR_NANS (mode))
6226 return fold_build2_loc (loc, NE_EXPR, type, arg,
6227 build_real (TREE_TYPE (arg), c2));
6228
6229 /* sqrt(x) < y is x >= 0 when y is very large and we
6230 don't care about Infinities. */
6231 if (! HONOR_INFINITIES (mode))
6232 return fold_build2_loc (loc, GE_EXPR, type, arg,
6233 build_real (TREE_TYPE (arg), dconst0));
6234
6235 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6236 arg = save_expr (arg);
6237 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6238 fold_build2_loc (loc, GE_EXPR, type, arg,
6239 build_real (TREE_TYPE (arg),
6240 dconst0)),
6241 fold_build2_loc (loc, NE_EXPR, type, arg,
6242 build_real (TREE_TYPE (arg),
6243 c2)));
6244 }
6245
6246 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6247 if (! HONOR_NANS (mode))
6248 return fold_build2_loc (loc, code, type, arg,
6249 build_real (TREE_TYPE (arg), c2));
6250
6251 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6252 arg = save_expr (arg);
6253 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6254 fold_build2_loc (loc, GE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg),
6256 dconst0)),
6257 fold_build2_loc (loc, code, type, arg,
6258 build_real (TREE_TYPE (arg),
6259 c2)));
6260 }
6261 }
6262
6263 return NULL_TREE;
6264 }
6265
6266 /* Subroutine of fold() that optimizes comparisons against Infinities,
6267 either +Inf or -Inf.
6268
6269 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6270 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6271 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272
6273 The function returns the constant folded tree if a simplification
6274 can be made, and NULL_TREE otherwise. */
6275
6276 static tree
fold_inf_compare(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6277 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6278 tree arg0, tree arg1)
6279 {
6280 enum machine_mode mode;
6281 REAL_VALUE_TYPE max;
6282 tree temp;
6283 bool neg;
6284
6285 mode = TYPE_MODE (TREE_TYPE (arg0));
6286
6287 /* For negative infinity swap the sense of the comparison. */
6288 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6289 if (neg)
6290 code = swap_tree_comparison (code);
6291
6292 switch (code)
6293 {
6294 case GT_EXPR:
6295 /* x > +Inf is always false, if with ignore sNANs. */
6296 if (HONOR_SNANS (mode))
6297 return NULL_TREE;
6298 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6299
6300 case LE_EXPR:
6301 /* x <= +Inf is always true, if we don't case about NaNs. */
6302 if (! HONOR_NANS (mode))
6303 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6304
6305 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6306 arg0 = save_expr (arg0);
6307 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6308
6309 case EQ_EXPR:
6310 case GE_EXPR:
6311 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6312 real_maxval (&max, neg, mode);
6313 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6314 arg0, build_real (TREE_TYPE (arg0), max));
6315
6316 case LT_EXPR:
6317 /* x < +Inf is always equal to x <= DBL_MAX. */
6318 real_maxval (&max, neg, mode);
6319 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6320 arg0, build_real (TREE_TYPE (arg0), max));
6321
6322 case NE_EXPR:
6323 /* x != +Inf is always equal to !(x > DBL_MAX). */
6324 real_maxval (&max, neg, mode);
6325 if (! HONOR_NANS (mode))
6326 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6328
6329 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6331 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6332
6333 default:
6334 break;
6335 }
6336
6337 return NULL_TREE;
6338 }
6339
6340 /* Subroutine of fold() that optimizes comparisons of a division by
6341 a nonzero integer constant against an integer constant, i.e.
6342 X/C1 op C2.
6343
6344 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6345 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6346 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6347
6348 The function returns the constant folded tree if a simplification
6349 can be made, and NULL_TREE otherwise. */
6350
6351 static tree
fold_div_compare(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6352 fold_div_compare (location_t loc,
6353 enum tree_code code, tree type, tree arg0, tree arg1)
6354 {
6355 tree prod, tmp, hi, lo;
6356 tree arg00 = TREE_OPERAND (arg0, 0);
6357 tree arg01 = TREE_OPERAND (arg0, 1);
6358 double_int val;
6359 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6360 bool neg_overflow;
6361 bool overflow;
6362
6363 /* We have to do this the hard way to detect unsigned overflow.
6364 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6365 val = TREE_INT_CST (arg01)
6366 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6367 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6368 neg_overflow = false;
6369
6370 if (unsigned_p)
6371 {
6372 tmp = int_const_binop (MINUS_EXPR, arg01,
6373 build_int_cst (TREE_TYPE (arg01), 1));
6374 lo = prod;
6375
6376 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6377 val = TREE_INT_CST (prod)
6378 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6379 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6380 -1, overflow | TREE_OVERFLOW (prod));
6381 }
6382 else if (tree_int_cst_sgn (arg01) >= 0)
6383 {
6384 tmp = int_const_binop (MINUS_EXPR, arg01,
6385 build_int_cst (TREE_TYPE (arg01), 1));
6386 switch (tree_int_cst_sgn (arg1))
6387 {
6388 case -1:
6389 neg_overflow = true;
6390 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6391 hi = prod;
6392 break;
6393
6394 case 0:
6395 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6396 hi = tmp;
6397 break;
6398
6399 case 1:
6400 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6401 lo = prod;
6402 break;
6403
6404 default:
6405 gcc_unreachable ();
6406 }
6407 }
6408 else
6409 {
6410 /* A negative divisor reverses the relational operators. */
6411 code = swap_tree_comparison (code);
6412
6413 tmp = int_const_binop (PLUS_EXPR, arg01,
6414 build_int_cst (TREE_TYPE (arg01), 1));
6415 switch (tree_int_cst_sgn (arg1))
6416 {
6417 case -1:
6418 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6419 lo = prod;
6420 break;
6421
6422 case 0:
6423 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6424 lo = tmp;
6425 break;
6426
6427 case 1:
6428 neg_overflow = true;
6429 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6430 hi = prod;
6431 break;
6432
6433 default:
6434 gcc_unreachable ();
6435 }
6436 }
6437
6438 switch (code)
6439 {
6440 case EQ_EXPR:
6441 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6442 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6443 if (TREE_OVERFLOW (hi))
6444 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6445 if (TREE_OVERFLOW (lo))
6446 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6447 return build_range_check (loc, type, arg00, 1, lo, hi);
6448
6449 case NE_EXPR:
6450 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6451 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6452 if (TREE_OVERFLOW (hi))
6453 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6454 if (TREE_OVERFLOW (lo))
6455 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6456 return build_range_check (loc, type, arg00, 0, lo, hi);
6457
6458 case LT_EXPR:
6459 if (TREE_OVERFLOW (lo))
6460 {
6461 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6462 return omit_one_operand_loc (loc, type, tmp, arg00);
6463 }
6464 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6465
6466 case LE_EXPR:
6467 if (TREE_OVERFLOW (hi))
6468 {
6469 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6470 return omit_one_operand_loc (loc, type, tmp, arg00);
6471 }
6472 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6473
6474 case GT_EXPR:
6475 if (TREE_OVERFLOW (hi))
6476 {
6477 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6478 return omit_one_operand_loc (loc, type, tmp, arg00);
6479 }
6480 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6481
6482 case GE_EXPR:
6483 if (TREE_OVERFLOW (lo))
6484 {
6485 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6486 return omit_one_operand_loc (loc, type, tmp, arg00);
6487 }
6488 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6489
6490 default:
6491 break;
6492 }
6493
6494 return NULL_TREE;
6495 }
6496
6497
6498 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6499 equality/inequality test, then return a simplified form of the test
6500 using a sign testing. Otherwise return NULL. TYPE is the desired
6501 result type. */
6502
6503 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6504 fold_single_bit_test_into_sign_test (location_t loc,
6505 enum tree_code code, tree arg0, tree arg1,
6506 tree result_type)
6507 {
6508 /* If this is testing a single bit, we can optimize the test. */
6509 if ((code == NE_EXPR || code == EQ_EXPR)
6510 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6511 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6512 {
6513 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6514 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6515 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6516
6517 if (arg00 != NULL_TREE
6518 /* This is only a win if casting to a signed type is cheap,
6519 i.e. when arg00's type is not a partial mode. */
6520 && TYPE_PRECISION (TREE_TYPE (arg00))
6521 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6522 {
6523 tree stype = signed_type_for (TREE_TYPE (arg00));
6524 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6525 result_type,
6526 fold_convert_loc (loc, stype, arg00),
6527 build_int_cst (stype, 0));
6528 }
6529 }
6530
6531 return NULL_TREE;
6532 }
6533
6534 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6535 equality/inequality test, then return a simplified form of
6536 the test using shifts and logical operations. Otherwise return
6537 NULL. TYPE is the desired result type. */
6538
6539 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)6540 fold_single_bit_test (location_t loc, enum tree_code code,
6541 tree arg0, tree arg1, tree result_type)
6542 {
6543 /* If this is testing a single bit, we can optimize the test. */
6544 if ((code == NE_EXPR || code == EQ_EXPR)
6545 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6546 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6547 {
6548 tree inner = TREE_OPERAND (arg0, 0);
6549 tree type = TREE_TYPE (arg0);
6550 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6551 enum machine_mode operand_mode = TYPE_MODE (type);
6552 int ops_unsigned;
6553 tree signed_type, unsigned_type, intermediate_type;
6554 tree tem, one;
6555
6556 /* First, see if we can fold the single bit test into a sign-bit
6557 test. */
6558 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6559 result_type);
6560 if (tem)
6561 return tem;
6562
6563 /* Otherwise we have (A & C) != 0 where C is a single bit,
6564 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6565 Similarly for (A & C) == 0. */
6566
6567 /* If INNER is a right shift of a constant and it plus BITNUM does
6568 not overflow, adjust BITNUM and INNER. */
6569 if (TREE_CODE (inner) == RSHIFT_EXPR
6570 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6571 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6572 && bitnum < TYPE_PRECISION (type)
6573 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6574 bitnum - TYPE_PRECISION (type)))
6575 {
6576 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6577 inner = TREE_OPERAND (inner, 0);
6578 }
6579
6580 /* If we are going to be able to omit the AND below, we must do our
6581 operations as unsigned. If we must use the AND, we have a choice.
6582 Normally unsigned is faster, but for some machines signed is. */
6583 #ifdef LOAD_EXTEND_OP
6584 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6585 && !flag_syntax_only) ? 0 : 1;
6586 #else
6587 ops_unsigned = 1;
6588 #endif
6589
6590 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6591 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6592 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6593 inner = fold_convert_loc (loc, intermediate_type, inner);
6594
6595 if (bitnum != 0)
6596 inner = build2 (RSHIFT_EXPR, intermediate_type,
6597 inner, size_int (bitnum));
6598
6599 one = build_int_cst (intermediate_type, 1);
6600
6601 if (code == EQ_EXPR)
6602 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6603
6604 /* Put the AND last so it can combine with more things. */
6605 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6606
6607 /* Make sure to return the proper type. */
6608 inner = fold_convert_loc (loc, result_type, inner);
6609
6610 return inner;
6611 }
6612 return NULL_TREE;
6613 }
6614
6615 /* Check whether we are allowed to reorder operands arg0 and arg1,
6616 such that the evaluation of arg1 occurs before arg0. */
6617
6618 static bool
reorder_operands_p(const_tree arg0,const_tree arg1)6619 reorder_operands_p (const_tree arg0, const_tree arg1)
6620 {
6621 if (! flag_evaluation_order)
6622 return true;
6623 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6624 return true;
6625 return ! TREE_SIDE_EFFECTS (arg0)
6626 && ! TREE_SIDE_EFFECTS (arg1);
6627 }
6628
6629 /* Test whether it is preferable two swap two operands, ARG0 and
6630 ARG1, for example because ARG0 is an integer constant and ARG1
6631 isn't. If REORDER is true, only recommend swapping if we can
6632 evaluate the operands in reverse order. */
6633
6634 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1,bool reorder)6635 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6636 {
6637 STRIP_SIGN_NOPS (arg0);
6638 STRIP_SIGN_NOPS (arg1);
6639
6640 if (TREE_CODE (arg1) == INTEGER_CST)
6641 return 0;
6642 if (TREE_CODE (arg0) == INTEGER_CST)
6643 return 1;
6644
6645 if (TREE_CODE (arg1) == REAL_CST)
6646 return 0;
6647 if (TREE_CODE (arg0) == REAL_CST)
6648 return 1;
6649
6650 if (TREE_CODE (arg1) == FIXED_CST)
6651 return 0;
6652 if (TREE_CODE (arg0) == FIXED_CST)
6653 return 1;
6654
6655 if (TREE_CODE (arg1) == COMPLEX_CST)
6656 return 0;
6657 if (TREE_CODE (arg0) == COMPLEX_CST)
6658 return 1;
6659
6660 if (TREE_CONSTANT (arg1))
6661 return 0;
6662 if (TREE_CONSTANT (arg0))
6663 return 1;
6664
6665 if (optimize_function_for_size_p (cfun))
6666 return 0;
6667
6668 if (reorder && flag_evaluation_order
6669 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6670 return 0;
6671
6672 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6673 for commutative and comparison operators. Ensuring a canonical
6674 form allows the optimizers to find additional redundancies without
6675 having to explicitly check for both orderings. */
6676 if (TREE_CODE (arg0) == SSA_NAME
6677 && TREE_CODE (arg1) == SSA_NAME
6678 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6679 return 1;
6680
6681 /* Put SSA_NAMEs last. */
6682 if (TREE_CODE (arg1) == SSA_NAME)
6683 return 0;
6684 if (TREE_CODE (arg0) == SSA_NAME)
6685 return 1;
6686
6687 /* Put variables last. */
6688 if (DECL_P (arg1))
6689 return 0;
6690 if (DECL_P (arg0))
6691 return 1;
6692
6693 return 0;
6694 }
6695
6696 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6697 ARG0 is extended to a wider type. */
6698
6699 static tree
fold_widened_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6700 fold_widened_comparison (location_t loc, enum tree_code code,
6701 tree type, tree arg0, tree arg1)
6702 {
6703 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6704 tree arg1_unw;
6705 tree shorter_type, outer_type;
6706 tree min, max;
6707 bool above, below;
6708
6709 if (arg0_unw == arg0)
6710 return NULL_TREE;
6711 shorter_type = TREE_TYPE (arg0_unw);
6712
6713 #ifdef HAVE_canonicalize_funcptr_for_compare
6714 /* Disable this optimization if we're casting a function pointer
6715 type on targets that require function pointer canonicalization. */
6716 if (HAVE_canonicalize_funcptr_for_compare
6717 && TREE_CODE (shorter_type) == POINTER_TYPE
6718 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6719 return NULL_TREE;
6720 #endif
6721
6722 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6723 return NULL_TREE;
6724
6725 arg1_unw = get_unwidened (arg1, NULL_TREE);
6726
6727 /* If possible, express the comparison in the shorter mode. */
6728 if ((code == EQ_EXPR || code == NE_EXPR
6729 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6730 && (TREE_TYPE (arg1_unw) == shorter_type
6731 || ((TYPE_PRECISION (shorter_type)
6732 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6733 && (TYPE_UNSIGNED (shorter_type)
6734 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6735 || (TREE_CODE (arg1_unw) == INTEGER_CST
6736 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6737 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6738 && int_fits_type_p (arg1_unw, shorter_type))))
6739 return fold_build2_loc (loc, code, type, arg0_unw,
6740 fold_convert_loc (loc, shorter_type, arg1_unw));
6741
6742 if (TREE_CODE (arg1_unw) != INTEGER_CST
6743 || TREE_CODE (shorter_type) != INTEGER_TYPE
6744 || !int_fits_type_p (arg1_unw, shorter_type))
6745 return NULL_TREE;
6746
6747 /* If we are comparing with the integer that does not fit into the range
6748 of the shorter type, the result is known. */
6749 outer_type = TREE_TYPE (arg1_unw);
6750 min = lower_bound_in_type (outer_type, shorter_type);
6751 max = upper_bound_in_type (outer_type, shorter_type);
6752
6753 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6754 max, arg1_unw));
6755 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6756 arg1_unw, min));
6757
6758 switch (code)
6759 {
6760 case EQ_EXPR:
6761 if (above || below)
6762 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6763 break;
6764
6765 case NE_EXPR:
6766 if (above || below)
6767 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6768 break;
6769
6770 case LT_EXPR:
6771 case LE_EXPR:
6772 if (above)
6773 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6774 else if (below)
6775 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6776
6777 case GT_EXPR:
6778 case GE_EXPR:
6779 if (above)
6780 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6781 else if (below)
6782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6783
6784 default:
6785 break;
6786 }
6787
6788 return NULL_TREE;
6789 }
6790
6791 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6792 ARG0 just the signedness is changed. */
6793
6794 static tree
fold_sign_changed_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)6795 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6796 tree arg0, tree arg1)
6797 {
6798 tree arg0_inner;
6799 tree inner_type, outer_type;
6800
6801 if (!CONVERT_EXPR_P (arg0))
6802 return NULL_TREE;
6803
6804 outer_type = TREE_TYPE (arg0);
6805 arg0_inner = TREE_OPERAND (arg0, 0);
6806 inner_type = TREE_TYPE (arg0_inner);
6807
6808 #ifdef HAVE_canonicalize_funcptr_for_compare
6809 /* Disable this optimization if we're casting a function pointer
6810 type on targets that require function pointer canonicalization. */
6811 if (HAVE_canonicalize_funcptr_for_compare
6812 && TREE_CODE (inner_type) == POINTER_TYPE
6813 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6814 return NULL_TREE;
6815 #endif
6816
6817 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6818 return NULL_TREE;
6819
6820 if (TREE_CODE (arg1) != INTEGER_CST
6821 && !(CONVERT_EXPR_P (arg1)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6823 return NULL_TREE;
6824
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6826 && code != NE_EXPR
6827 && code != EQ_EXPR)
6828 return NULL_TREE;
6829
6830 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6831 return NULL_TREE;
6832
6833 if (TREE_CODE (arg1) == INTEGER_CST)
6834 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6835 0, TREE_OVERFLOW (arg1));
6836 else
6837 arg1 = fold_convert_loc (loc, inner_type, arg1);
6838
6839 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6840 }
6841
6842 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6843 step of the array. Reconstructs s and delta in the case of s *
6844 delta being an integer constant (and thus already folded). ADDR is
6845 the address. MULT is the multiplicative expression. If the
6846 function succeeds, the new address expression is returned.
6847 Otherwise NULL_TREE is returned. LOC is the location of the
6848 resulting expression. */
6849
6850 static tree
try_move_mult_to_index(location_t loc,tree addr,tree op1)6851 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6852 {
6853 tree s, delta, step;
6854 tree ref = TREE_OPERAND (addr, 0), pref;
6855 tree ret, pos;
6856 tree itype;
6857 bool mdim = false;
6858
6859 /* Strip the nops that might be added when converting op1 to sizetype. */
6860 STRIP_NOPS (op1);
6861
6862 /* Canonicalize op1 into a possibly non-constant delta
6863 and an INTEGER_CST s. */
6864 if (TREE_CODE (op1) == MULT_EXPR)
6865 {
6866 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6867
6868 STRIP_NOPS (arg0);
6869 STRIP_NOPS (arg1);
6870
6871 if (TREE_CODE (arg0) == INTEGER_CST)
6872 {
6873 s = arg0;
6874 delta = arg1;
6875 }
6876 else if (TREE_CODE (arg1) == INTEGER_CST)
6877 {
6878 s = arg1;
6879 delta = arg0;
6880 }
6881 else
6882 return NULL_TREE;
6883 }
6884 else if (TREE_CODE (op1) == INTEGER_CST)
6885 {
6886 delta = op1;
6887 s = NULL_TREE;
6888 }
6889 else
6890 {
6891 /* Simulate we are delta * 1. */
6892 delta = op1;
6893 s = integer_one_node;
6894 }
6895
6896 /* Handle &x.array the same as we would handle &x.array[0]. */
6897 if (TREE_CODE (ref) == COMPONENT_REF
6898 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6899 {
6900 tree domain;
6901
6902 /* Remember if this was a multi-dimensional array. */
6903 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6904 mdim = true;
6905
6906 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6907 if (! domain)
6908 goto cont;
6909 itype = TREE_TYPE (domain);
6910
6911 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6912 if (TREE_CODE (step) != INTEGER_CST)
6913 goto cont;
6914
6915 if (s)
6916 {
6917 if (! tree_int_cst_equal (step, s))
6918 goto cont;
6919 }
6920 else
6921 {
6922 /* Try if delta is a multiple of step. */
6923 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6924 if (! tmp)
6925 goto cont;
6926 delta = tmp;
6927 }
6928
6929 /* Only fold here if we can verify we do not overflow one
6930 dimension of a multi-dimensional array. */
6931 if (mdim)
6932 {
6933 tree tmp;
6934
6935 if (!TYPE_MIN_VALUE (domain)
6936 || !TYPE_MAX_VALUE (domain)
6937 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6938 goto cont;
6939
6940 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6941 fold_convert_loc (loc, itype,
6942 TYPE_MIN_VALUE (domain)),
6943 fold_convert_loc (loc, itype, delta));
6944 if (TREE_CODE (tmp) != INTEGER_CST
6945 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6946 goto cont;
6947 }
6948
6949 /* We found a suitable component reference. */
6950
6951 pref = TREE_OPERAND (addr, 0);
6952 ret = copy_node (pref);
6953 SET_EXPR_LOCATION (ret, loc);
6954
6955 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6956 fold_build2_loc
6957 (loc, PLUS_EXPR, itype,
6958 fold_convert_loc (loc, itype,
6959 TYPE_MIN_VALUE
6960 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6961 fold_convert_loc (loc, itype, delta)),
6962 NULL_TREE, NULL_TREE);
6963 return build_fold_addr_expr_loc (loc, ret);
6964 }
6965
6966 cont:
6967
6968 for (;; ref = TREE_OPERAND (ref, 0))
6969 {
6970 if (TREE_CODE (ref) == ARRAY_REF)
6971 {
6972 tree domain;
6973
6974 /* Remember if this was a multi-dimensional array. */
6975 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6976 mdim = true;
6977
6978 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6979 if (! domain)
6980 continue;
6981 itype = TREE_TYPE (domain);
6982
6983 step = array_ref_element_size (ref);
6984 if (TREE_CODE (step) != INTEGER_CST)
6985 continue;
6986
6987 if (s)
6988 {
6989 if (! tree_int_cst_equal (step, s))
6990 continue;
6991 }
6992 else
6993 {
6994 /* Try if delta is a multiple of step. */
6995 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6996 if (! tmp)
6997 continue;
6998 delta = tmp;
6999 }
7000
7001 /* Only fold here if we can verify we do not overflow one
7002 dimension of a multi-dimensional array. */
7003 if (mdim)
7004 {
7005 tree tmp;
7006
7007 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7008 || !TYPE_MAX_VALUE (domain)
7009 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7010 continue;
7011
7012 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7013 fold_convert_loc (loc, itype,
7014 TREE_OPERAND (ref, 1)),
7015 fold_convert_loc (loc, itype, delta));
7016 if (!tmp
7017 || TREE_CODE (tmp) != INTEGER_CST
7018 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7019 continue;
7020 }
7021
7022 break;
7023 }
7024 else
7025 mdim = false;
7026
7027 if (!handled_component_p (ref))
7028 return NULL_TREE;
7029 }
7030
7031 /* We found the suitable array reference. So copy everything up to it,
7032 and replace the index. */
7033
7034 pref = TREE_OPERAND (addr, 0);
7035 ret = copy_node (pref);
7036 SET_EXPR_LOCATION (ret, loc);
7037 pos = ret;
7038
7039 while (pref != ref)
7040 {
7041 pref = TREE_OPERAND (pref, 0);
7042 TREE_OPERAND (pos, 0) = copy_node (pref);
7043 pos = TREE_OPERAND (pos, 0);
7044 }
7045
7046 TREE_OPERAND (pos, 1)
7047 = fold_build2_loc (loc, PLUS_EXPR, itype,
7048 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7049 fold_convert_loc (loc, itype, delta));
7050 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7051 }
7052
7053
7054 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7055 means A >= Y && A != MAX, but in this case we know that
7056 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7057
7058 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7059 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7060 {
7061 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7062
7063 if (TREE_CODE (bound) == LT_EXPR)
7064 a = TREE_OPERAND (bound, 0);
7065 else if (TREE_CODE (bound) == GT_EXPR)
7066 a = TREE_OPERAND (bound, 1);
7067 else
7068 return NULL_TREE;
7069
7070 typea = TREE_TYPE (a);
7071 if (!INTEGRAL_TYPE_P (typea)
7072 && !POINTER_TYPE_P (typea))
7073 return NULL_TREE;
7074
7075 if (TREE_CODE (ineq) == LT_EXPR)
7076 {
7077 a1 = TREE_OPERAND (ineq, 1);
7078 y = TREE_OPERAND (ineq, 0);
7079 }
7080 else if (TREE_CODE (ineq) == GT_EXPR)
7081 {
7082 a1 = TREE_OPERAND (ineq, 0);
7083 y = TREE_OPERAND (ineq, 1);
7084 }
7085 else
7086 return NULL_TREE;
7087
7088 if (TREE_TYPE (a1) != typea)
7089 return NULL_TREE;
7090
7091 if (POINTER_TYPE_P (typea))
7092 {
7093 /* Convert the pointer types into integer before taking the difference. */
7094 tree ta = fold_convert_loc (loc, ssizetype, a);
7095 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7096 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7097 }
7098 else
7099 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7100
7101 if (!diff || !integer_onep (diff))
7102 return NULL_TREE;
7103
7104 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7105 }
7106
7107 /* Fold a sum or difference of at least one multiplication.
7108 Returns the folded tree or NULL if no simplification could be made. */
7109
7110 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7111 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7112 tree arg0, tree arg1)
7113 {
7114 tree arg00, arg01, arg10, arg11;
7115 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7116
7117 /* (A * C) +- (B * C) -> (A+-B) * C.
7118 (A * C) +- A -> A * (C+-1).
7119 We are most concerned about the case where C is a constant,
7120 but other combinations show up during loop reduction. Since
7121 it is not difficult, try all four possibilities. */
7122
7123 if (TREE_CODE (arg0) == MULT_EXPR)
7124 {
7125 arg00 = TREE_OPERAND (arg0, 0);
7126 arg01 = TREE_OPERAND (arg0, 1);
7127 }
7128 else if (TREE_CODE (arg0) == INTEGER_CST)
7129 {
7130 arg00 = build_one_cst (type);
7131 arg01 = arg0;
7132 }
7133 else
7134 {
7135 /* We cannot generate constant 1 for fract. */
7136 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7137 return NULL_TREE;
7138 arg00 = arg0;
7139 arg01 = build_one_cst (type);
7140 }
7141 if (TREE_CODE (arg1) == MULT_EXPR)
7142 {
7143 arg10 = TREE_OPERAND (arg1, 0);
7144 arg11 = TREE_OPERAND (arg1, 1);
7145 }
7146 else if (TREE_CODE (arg1) == INTEGER_CST)
7147 {
7148 arg10 = build_one_cst (type);
7149 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7150 the purpose of this canonicalization. */
7151 if (TREE_INT_CST_HIGH (arg1) == -1
7152 && negate_expr_p (arg1)
7153 && code == PLUS_EXPR)
7154 {
7155 arg11 = negate_expr (arg1);
7156 code = MINUS_EXPR;
7157 }
7158 else
7159 arg11 = arg1;
7160 }
7161 else
7162 {
7163 /* We cannot generate constant 1 for fract. */
7164 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7165 return NULL_TREE;
7166 arg10 = arg1;
7167 arg11 = build_one_cst (type);
7168 }
7169 same = NULL_TREE;
7170
7171 if (operand_equal_p (arg01, arg11, 0))
7172 same = arg01, alt0 = arg00, alt1 = arg10;
7173 else if (operand_equal_p (arg00, arg10, 0))
7174 same = arg00, alt0 = arg01, alt1 = arg11;
7175 else if (operand_equal_p (arg00, arg11, 0))
7176 same = arg00, alt0 = arg01, alt1 = arg10;
7177 else if (operand_equal_p (arg01, arg10, 0))
7178 same = arg01, alt0 = arg00, alt1 = arg11;
7179
7180 /* No identical multiplicands; see if we can find a common
7181 power-of-two factor in non-power-of-two multiplies. This
7182 can help in multi-dimensional array access. */
7183 else if (host_integerp (arg01, 0)
7184 && host_integerp (arg11, 0))
7185 {
7186 HOST_WIDE_INT int01, int11, tmp;
7187 bool swap = false;
7188 tree maybe_same;
7189 int01 = TREE_INT_CST_LOW (arg01);
7190 int11 = TREE_INT_CST_LOW (arg11);
7191
7192 /* Move min of absolute values to int11. */
7193 if (absu_hwi (int01) < absu_hwi (int11))
7194 {
7195 tmp = int01, int01 = int11, int11 = tmp;
7196 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7197 maybe_same = arg01;
7198 swap = true;
7199 }
7200 else
7201 maybe_same = arg11;
7202
7203 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7204 /* The remainder should not be a constant, otherwise we
7205 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7206 increased the number of multiplications necessary. */
7207 && TREE_CODE (arg10) != INTEGER_CST)
7208 {
7209 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7210 build_int_cst (TREE_TYPE (arg00),
7211 int01 / int11));
7212 alt1 = arg10;
7213 same = maybe_same;
7214 if (swap)
7215 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7216 }
7217 }
7218
7219 if (same)
7220 return fold_build2_loc (loc, MULT_EXPR, type,
7221 fold_build2_loc (loc, code, type,
7222 fold_convert_loc (loc, type, alt0),
7223 fold_convert_loc (loc, type, alt1)),
7224 fold_convert_loc (loc, type, same));
7225
7226 return NULL_TREE;
7227 }
7228
7229 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7230 specified by EXPR into the buffer PTR of length LEN bytes.
7231 Return the number of bytes placed in the buffer, or zero
7232 upon failure. */
7233
7234 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len)7235 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7236 {
7237 tree type = TREE_TYPE (expr);
7238 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7239 int byte, offset, word, words;
7240 unsigned char value;
7241
7242 if (total_bytes > len)
7243 return 0;
7244 words = total_bytes / UNITS_PER_WORD;
7245
7246 for (byte = 0; byte < total_bytes; byte++)
7247 {
7248 int bitpos = byte * BITS_PER_UNIT;
7249 if (bitpos < HOST_BITS_PER_WIDE_INT)
7250 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7251 else
7252 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7253 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7254
7255 if (total_bytes > UNITS_PER_WORD)
7256 {
7257 word = byte / UNITS_PER_WORD;
7258 if (WORDS_BIG_ENDIAN)
7259 word = (words - 1) - word;
7260 offset = word * UNITS_PER_WORD;
7261 if (BYTES_BIG_ENDIAN)
7262 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7263 else
7264 offset += byte % UNITS_PER_WORD;
7265 }
7266 else
7267 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7268 ptr[offset] = value;
7269 }
7270 return total_bytes;
7271 }
7272
7273
7274 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7275 specified by EXPR into the buffer PTR of length LEN bytes.
7276 Return the number of bytes placed in the buffer, or zero
7277 upon failure. */
7278
7279 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len)7280 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7281 {
7282 tree type = TREE_TYPE (expr);
7283 enum machine_mode mode = TYPE_MODE (type);
7284 int total_bytes = GET_MODE_SIZE (mode);
7285 FIXED_VALUE_TYPE value;
7286 tree i_value, i_type;
7287
7288 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7289 return 0;
7290
7291 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7292
7293 if (NULL_TREE == i_type
7294 || TYPE_PRECISION (i_type) != total_bytes)
7295 return 0;
7296
7297 value = TREE_FIXED_CST (expr);
7298 i_value = double_int_to_tree (i_type, value.data);
7299
7300 return native_encode_int (i_value, ptr, len);
7301 }
7302
7303
7304 /* Subroutine of native_encode_expr. Encode the REAL_CST
7305 specified by EXPR into the buffer PTR of length LEN bytes.
7306 Return the number of bytes placed in the buffer, or zero
7307 upon failure. */
7308
7309 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len)7310 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7311 {
7312 tree type = TREE_TYPE (expr);
7313 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7314 int byte, offset, word, words, bitpos;
7315 unsigned char value;
7316
7317 /* There are always 32 bits in each long, no matter the size of
7318 the hosts long. We handle floating point representations with
7319 up to 192 bits. */
7320 long tmp[6];
7321
7322 if (total_bytes > len)
7323 return 0;
7324 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7325
7326 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7327
7328 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7329 bitpos += BITS_PER_UNIT)
7330 {
7331 byte = (bitpos / BITS_PER_UNIT) & 3;
7332 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7333
7334 if (UNITS_PER_WORD < 4)
7335 {
7336 word = byte / UNITS_PER_WORD;
7337 if (WORDS_BIG_ENDIAN)
7338 word = (words - 1) - word;
7339 offset = word * UNITS_PER_WORD;
7340 if (BYTES_BIG_ENDIAN)
7341 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7342 else
7343 offset += byte % UNITS_PER_WORD;
7344 }
7345 else
7346 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7347 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7348 }
7349 return total_bytes;
7350 }
7351
7352 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7353 specified by EXPR into the buffer PTR of length LEN bytes.
7354 Return the number of bytes placed in the buffer, or zero
7355 upon failure. */
7356
7357 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len)7358 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7359 {
7360 int rsize, isize;
7361 tree part;
7362
7363 part = TREE_REALPART (expr);
7364 rsize = native_encode_expr (part, ptr, len);
7365 if (rsize == 0)
7366 return 0;
7367 part = TREE_IMAGPART (expr);
7368 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7369 if (isize != rsize)
7370 return 0;
7371 return rsize + isize;
7372 }
7373
7374
7375 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7376 specified by EXPR into the buffer PTR of length LEN bytes.
7377 Return the number of bytes placed in the buffer, or zero
7378 upon failure. */
7379
7380 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len)7381 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7382 {
7383 unsigned i, count;
7384 int size, offset;
7385 tree itype, elem;
7386
7387 offset = 0;
7388 count = VECTOR_CST_NELTS (expr);
7389 itype = TREE_TYPE (TREE_TYPE (expr));
7390 size = GET_MODE_SIZE (TYPE_MODE (itype));
7391 for (i = 0; i < count; i++)
7392 {
7393 elem = VECTOR_CST_ELT (expr, i);
7394 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7395 return 0;
7396 offset += size;
7397 }
7398 return offset;
7399 }
7400
7401
7402 /* Subroutine of native_encode_expr. Encode the STRING_CST
7403 specified by EXPR into the buffer PTR of length LEN bytes.
7404 Return the number of bytes placed in the buffer, or zero
7405 upon failure. */
7406
7407 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len)7408 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7409 {
7410 tree type = TREE_TYPE (expr);
7411 HOST_WIDE_INT total_bytes;
7412
7413 if (TREE_CODE (type) != ARRAY_TYPE
7414 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7415 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7416 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7417 return 0;
7418 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7419 if (total_bytes > len)
7420 return 0;
7421 if (TREE_STRING_LENGTH (expr) < total_bytes)
7422 {
7423 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7424 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7425 total_bytes - TREE_STRING_LENGTH (expr));
7426 }
7427 else
7428 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7429 return total_bytes;
7430 }
7431
7432
7433 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7434 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7435 buffer PTR of length LEN bytes. Return the number of bytes
7436 placed in the buffer, or zero upon failure. */
7437
7438 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len)7439 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7440 {
7441 switch (TREE_CODE (expr))
7442 {
7443 case INTEGER_CST:
7444 return native_encode_int (expr, ptr, len);
7445
7446 case REAL_CST:
7447 return native_encode_real (expr, ptr, len);
7448
7449 case FIXED_CST:
7450 return native_encode_fixed (expr, ptr, len);
7451
7452 case COMPLEX_CST:
7453 return native_encode_complex (expr, ptr, len);
7454
7455 case VECTOR_CST:
7456 return native_encode_vector (expr, ptr, len);
7457
7458 case STRING_CST:
7459 return native_encode_string (expr, ptr, len);
7460
7461 default:
7462 return 0;
7463 }
7464 }
7465
7466
7467 /* Subroutine of native_interpret_expr. Interpret the contents of
7468 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7469 If the buffer cannot be interpreted, return NULL_TREE. */
7470
7471 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)7472 native_interpret_int (tree type, const unsigned char *ptr, int len)
7473 {
7474 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7475 double_int result;
7476
7477 if (total_bytes > len
7478 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7479 return NULL_TREE;
7480
7481 result = double_int::from_buffer (ptr, total_bytes);
7482
7483 return double_int_to_tree (type, result);
7484 }
7485
7486
7487 /* Subroutine of native_interpret_expr. Interpret the contents of
7488 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7489 If the buffer cannot be interpreted, return NULL_TREE. */
7490
7491 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)7492 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7493 {
7494 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7495 double_int result;
7496 FIXED_VALUE_TYPE fixed_value;
7497
7498 if (total_bytes > len
7499 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7500 return NULL_TREE;
7501
7502 result = double_int::from_buffer (ptr, total_bytes);
7503 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7504
7505 return build_fixed (type, fixed_value);
7506 }
7507
7508
7509 /* Subroutine of native_interpret_expr. Interpret the contents of
7510 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7511 If the buffer cannot be interpreted, return NULL_TREE. */
7512
7513 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)7514 native_interpret_real (tree type, const unsigned char *ptr, int len)
7515 {
7516 enum machine_mode mode = TYPE_MODE (type);
7517 int total_bytes = GET_MODE_SIZE (mode);
7518 int byte, offset, word, words, bitpos;
7519 unsigned char value;
7520 /* There are always 32 bits in each long, no matter the size of
7521 the hosts long. We handle floating point representations with
7522 up to 192 bits. */
7523 REAL_VALUE_TYPE r;
7524 long tmp[6];
7525
7526 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7527 if (total_bytes > len || total_bytes > 24)
7528 return NULL_TREE;
7529 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7530
7531 memset (tmp, 0, sizeof (tmp));
7532 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7533 bitpos += BITS_PER_UNIT)
7534 {
7535 byte = (bitpos / BITS_PER_UNIT) & 3;
7536 if (UNITS_PER_WORD < 4)
7537 {
7538 word = byte / UNITS_PER_WORD;
7539 if (WORDS_BIG_ENDIAN)
7540 word = (words - 1) - word;
7541 offset = word * UNITS_PER_WORD;
7542 if (BYTES_BIG_ENDIAN)
7543 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7544 else
7545 offset += byte % UNITS_PER_WORD;
7546 }
7547 else
7548 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7549 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7550
7551 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7552 }
7553
7554 real_from_target (&r, tmp, mode);
7555 return build_real (type, r);
7556 }
7557
7558
7559 /* Subroutine of native_interpret_expr. Interpret the contents of
7560 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7561 If the buffer cannot be interpreted, return NULL_TREE. */
7562
7563 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)7564 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7565 {
7566 tree etype, rpart, ipart;
7567 int size;
7568
7569 etype = TREE_TYPE (type);
7570 size = GET_MODE_SIZE (TYPE_MODE (etype));
7571 if (size * 2 > len)
7572 return NULL_TREE;
7573 rpart = native_interpret_expr (etype, ptr, size);
7574 if (!rpart)
7575 return NULL_TREE;
7576 ipart = native_interpret_expr (etype, ptr+size, size);
7577 if (!ipart)
7578 return NULL_TREE;
7579 return build_complex (type, rpart, ipart);
7580 }
7581
7582
7583 /* Subroutine of native_interpret_expr. Interpret the contents of
7584 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7585 If the buffer cannot be interpreted, return NULL_TREE. */
7586
7587 static tree
native_interpret_vector(tree type,const unsigned char * ptr,int len)7588 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7589 {
7590 tree etype, elem;
7591 int i, size, count;
7592 tree *elements;
7593
7594 etype = TREE_TYPE (type);
7595 size = GET_MODE_SIZE (TYPE_MODE (etype));
7596 count = TYPE_VECTOR_SUBPARTS (type);
7597 if (size * count > len)
7598 return NULL_TREE;
7599
7600 elements = XALLOCAVEC (tree, count);
7601 for (i = count - 1; i >= 0; i--)
7602 {
7603 elem = native_interpret_expr (etype, ptr+(i*size), size);
7604 if (!elem)
7605 return NULL_TREE;
7606 elements[i] = elem;
7607 }
7608 return build_vector (type, elements);
7609 }
7610
7611
7612 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7613 the buffer PTR of length LEN as a constant of type TYPE. For
7614 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7615 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7616 return NULL_TREE. */
7617
7618 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)7619 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7620 {
7621 switch (TREE_CODE (type))
7622 {
7623 case INTEGER_TYPE:
7624 case ENUMERAL_TYPE:
7625 case BOOLEAN_TYPE:
7626 case POINTER_TYPE:
7627 case REFERENCE_TYPE:
7628 return native_interpret_int (type, ptr, len);
7629
7630 case REAL_TYPE:
7631 return native_interpret_real (type, ptr, len);
7632
7633 case FIXED_POINT_TYPE:
7634 return native_interpret_fixed (type, ptr, len);
7635
7636 case COMPLEX_TYPE:
7637 return native_interpret_complex (type, ptr, len);
7638
7639 case VECTOR_TYPE:
7640 return native_interpret_vector (type, ptr, len);
7641
7642 default:
7643 return NULL_TREE;
7644 }
7645 }
7646
7647 /* Returns true if we can interpret the contents of a native encoding
7648 as TYPE. */
7649
7650 static bool
can_native_interpret_type_p(tree type)7651 can_native_interpret_type_p (tree type)
7652 {
7653 switch (TREE_CODE (type))
7654 {
7655 case INTEGER_TYPE:
7656 case ENUMERAL_TYPE:
7657 case BOOLEAN_TYPE:
7658 case POINTER_TYPE:
7659 case REFERENCE_TYPE:
7660 case FIXED_POINT_TYPE:
7661 case REAL_TYPE:
7662 case COMPLEX_TYPE:
7663 case VECTOR_TYPE:
7664 return true;
7665 default:
7666 return false;
7667 }
7668 }
7669
7670 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7671 TYPE at compile-time. If we're unable to perform the conversion
7672 return NULL_TREE. */
7673
7674 static tree
fold_view_convert_expr(tree type,tree expr)7675 fold_view_convert_expr (tree type, tree expr)
7676 {
7677 /* We support up to 512-bit values (for V8DFmode). */
7678 unsigned char buffer[64];
7679 int len;
7680
7681 /* Check that the host and target are sane. */
7682 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7683 return NULL_TREE;
7684
7685 len = native_encode_expr (expr, buffer, sizeof (buffer));
7686 if (len == 0)
7687 return NULL_TREE;
7688
7689 return native_interpret_expr (type, buffer, len);
7690 }
7691
7692 /* Build an expression for the address of T. Folds away INDIRECT_REF
7693 to avoid confusing the gimplify process. */
7694
7695 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)7696 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7697 {
7698 /* The size of the object is not relevant when talking about its address. */
7699 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7700 t = TREE_OPERAND (t, 0);
7701
7702 if (TREE_CODE (t) == INDIRECT_REF)
7703 {
7704 t = TREE_OPERAND (t, 0);
7705
7706 if (TREE_TYPE (t) != ptrtype)
7707 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7708 }
7709 else if (TREE_CODE (t) == MEM_REF
7710 && integer_zerop (TREE_OPERAND (t, 1)))
7711 return TREE_OPERAND (t, 0);
7712 else if (TREE_CODE (t) == MEM_REF
7713 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7714 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7715 TREE_OPERAND (t, 0),
7716 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7717 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7718 {
7719 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7720
7721 if (TREE_TYPE (t) != ptrtype)
7722 t = fold_convert_loc (loc, ptrtype, t);
7723 }
7724 else
7725 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7726
7727 return t;
7728 }
7729
7730 /* Build an expression for the address of T. */
7731
7732 tree
build_fold_addr_expr_loc(location_t loc,tree t)7733 build_fold_addr_expr_loc (location_t loc, tree t)
7734 {
7735 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7736
7737 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7738 }
7739
7740 static bool vec_cst_ctor_to_array (tree, tree *);
7741
7742 /* Fold a unary expression of code CODE and type TYPE with operand
7743 OP0. Return the folded expression if folding is successful.
7744 Otherwise, return NULL_TREE. */
7745
7746 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)7747 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7748 {
7749 tree tem;
7750 tree arg0;
7751 enum tree_code_class kind = TREE_CODE_CLASS (code);
7752
7753 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7754 && TREE_CODE_LENGTH (code) == 1);
7755
7756 arg0 = op0;
7757 if (arg0)
7758 {
7759 if (CONVERT_EXPR_CODE_P (code)
7760 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7761 {
7762 /* Don't use STRIP_NOPS, because signedness of argument type
7763 matters. */
7764 STRIP_SIGN_NOPS (arg0);
7765 }
7766 else
7767 {
7768 /* Strip any conversions that don't change the mode. This
7769 is safe for every expression, except for a comparison
7770 expression because its signedness is derived from its
7771 operands.
7772
7773 Note that this is done as an internal manipulation within
7774 the constant folder, in order to find the simplest
7775 representation of the arguments so that their form can be
7776 studied. In any cases, the appropriate type conversions
7777 should be put back in the tree that will get out of the
7778 constant folder. */
7779 STRIP_NOPS (arg0);
7780 }
7781 }
7782
7783 if (TREE_CODE_CLASS (code) == tcc_unary)
7784 {
7785 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7786 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7787 fold_build1_loc (loc, code, type,
7788 fold_convert_loc (loc, TREE_TYPE (op0),
7789 TREE_OPERAND (arg0, 1))));
7790 else if (TREE_CODE (arg0) == COND_EXPR)
7791 {
7792 tree arg01 = TREE_OPERAND (arg0, 1);
7793 tree arg02 = TREE_OPERAND (arg0, 2);
7794 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7795 arg01 = fold_build1_loc (loc, code, type,
7796 fold_convert_loc (loc,
7797 TREE_TYPE (op0), arg01));
7798 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7799 arg02 = fold_build1_loc (loc, code, type,
7800 fold_convert_loc (loc,
7801 TREE_TYPE (op0), arg02));
7802 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7803 arg01, arg02);
7804
7805 /* If this was a conversion, and all we did was to move into
7806 inside the COND_EXPR, bring it back out. But leave it if
7807 it is a conversion from integer to integer and the
7808 result precision is no wider than a word since such a
7809 conversion is cheap and may be optimized away by combine,
7810 while it couldn't if it were outside the COND_EXPR. Then return
7811 so we don't get into an infinite recursion loop taking the
7812 conversion out and then back in. */
7813
7814 if ((CONVERT_EXPR_CODE_P (code)
7815 || code == NON_LVALUE_EXPR)
7816 && TREE_CODE (tem) == COND_EXPR
7817 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7818 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7819 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7820 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7821 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7822 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7823 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7824 && (INTEGRAL_TYPE_P
7825 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7826 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7827 || flag_syntax_only))
7828 tem = build1_loc (loc, code, type,
7829 build3 (COND_EXPR,
7830 TREE_TYPE (TREE_OPERAND
7831 (TREE_OPERAND (tem, 1), 0)),
7832 TREE_OPERAND (tem, 0),
7833 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7834 TREE_OPERAND (TREE_OPERAND (tem, 2),
7835 0)));
7836 return tem;
7837 }
7838 }
7839
7840 switch (code)
7841 {
7842 case PAREN_EXPR:
7843 /* Re-association barriers around constants and other re-association
7844 barriers can be removed. */
7845 if (CONSTANT_CLASS_P (op0)
7846 || TREE_CODE (op0) == PAREN_EXPR)
7847 return fold_convert_loc (loc, type, op0);
7848 return NULL_TREE;
7849
7850 CASE_CONVERT:
7851 case FLOAT_EXPR:
7852 case FIX_TRUNC_EXPR:
7853 if (TREE_TYPE (op0) == type)
7854 return op0;
7855
7856 if (COMPARISON_CLASS_P (op0))
7857 {
7858 /* If we have (type) (a CMP b) and type is an integral type, return
7859 new expression involving the new type. Canonicalize
7860 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7861 non-integral type.
7862 Do not fold the result as that would not simplify further, also
7863 folding again results in recursions. */
7864 if (TREE_CODE (type) == BOOLEAN_TYPE)
7865 return build2_loc (loc, TREE_CODE (op0), type,
7866 TREE_OPERAND (op0, 0),
7867 TREE_OPERAND (op0, 1));
7868 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7869 && TREE_CODE (type) != VECTOR_TYPE)
7870 return build3_loc (loc, COND_EXPR, type, op0,
7871 constant_boolean_node (true, type),
7872 constant_boolean_node (false, type));
7873 }
7874
7875 /* Handle cases of two conversions in a row. */
7876 if (CONVERT_EXPR_P (op0))
7877 {
7878 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7879 tree inter_type = TREE_TYPE (op0);
7880 int inside_int = INTEGRAL_TYPE_P (inside_type);
7881 int inside_ptr = POINTER_TYPE_P (inside_type);
7882 int inside_float = FLOAT_TYPE_P (inside_type);
7883 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7884 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7885 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7886 int inter_int = INTEGRAL_TYPE_P (inter_type);
7887 int inter_ptr = POINTER_TYPE_P (inter_type);
7888 int inter_float = FLOAT_TYPE_P (inter_type);
7889 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7890 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7891 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7892 int final_int = INTEGRAL_TYPE_P (type);
7893 int final_ptr = POINTER_TYPE_P (type);
7894 int final_float = FLOAT_TYPE_P (type);
7895 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7896 unsigned int final_prec = TYPE_PRECISION (type);
7897 int final_unsignedp = TYPE_UNSIGNED (type);
7898
7899 /* In addition to the cases of two conversions in a row
7900 handled below, if we are converting something to its own
7901 type via an object of identical or wider precision, neither
7902 conversion is needed. */
7903 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7904 && (((inter_int || inter_ptr) && final_int)
7905 || (inter_float && final_float))
7906 && inter_prec >= final_prec)
7907 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7908
7909 /* Likewise, if the intermediate and initial types are either both
7910 float or both integer, we don't need the middle conversion if the
7911 former is wider than the latter and doesn't change the signedness
7912 (for integers). Avoid this if the final type is a pointer since
7913 then we sometimes need the middle conversion. Likewise if the
7914 final type has a precision not equal to the size of its mode. */
7915 if (((inter_int && inside_int)
7916 || (inter_float && inside_float)
7917 || (inter_vec && inside_vec))
7918 && inter_prec >= inside_prec
7919 && (inter_float || inter_vec
7920 || inter_unsignedp == inside_unsignedp)
7921 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7922 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7923 && ! final_ptr
7924 && (! final_vec || inter_prec == inside_prec))
7925 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7926
7927 /* If we have a sign-extension of a zero-extended value, we can
7928 replace that by a single zero-extension. Likewise if the
7929 final conversion does not change precision we can drop the
7930 intermediate conversion. */
7931 if (inside_int && inter_int && final_int
7932 && ((inside_prec < inter_prec && inter_prec < final_prec
7933 && inside_unsignedp && !inter_unsignedp)
7934 || final_prec == inter_prec))
7935 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7936
7937 /* Two conversions in a row are not needed unless:
7938 - some conversion is floating-point (overstrict for now), or
7939 - some conversion is a vector (overstrict for now), or
7940 - the intermediate type is narrower than both initial and
7941 final, or
7942 - the intermediate type and innermost type differ in signedness,
7943 and the outermost type is wider than the intermediate, or
7944 - the initial type is a pointer type and the precisions of the
7945 intermediate and final types differ, or
7946 - the final type is a pointer type and the precisions of the
7947 initial and intermediate types differ. */
7948 if (! inside_float && ! inter_float && ! final_float
7949 && ! inside_vec && ! inter_vec && ! final_vec
7950 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7951 && ! (inside_int && inter_int
7952 && inter_unsignedp != inside_unsignedp
7953 && inter_prec < final_prec)
7954 && ((inter_unsignedp && inter_prec > inside_prec)
7955 == (final_unsignedp && final_prec > inter_prec))
7956 && ! (inside_ptr && inter_prec != final_prec)
7957 && ! (final_ptr && inside_prec != inter_prec)
7958 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7959 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7960 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7961 }
7962
7963 /* Handle (T *)&A.B.C for A being of type T and B and C
7964 living at offset zero. This occurs frequently in
7965 C++ upcasting and then accessing the base. */
7966 if (TREE_CODE (op0) == ADDR_EXPR
7967 && POINTER_TYPE_P (type)
7968 && handled_component_p (TREE_OPERAND (op0, 0)))
7969 {
7970 HOST_WIDE_INT bitsize, bitpos;
7971 tree offset;
7972 enum machine_mode mode;
7973 int unsignedp, volatilep;
7974 tree base = TREE_OPERAND (op0, 0);
7975 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7976 &mode, &unsignedp, &volatilep, false);
7977 /* If the reference was to a (constant) zero offset, we can use
7978 the address of the base if it has the same base type
7979 as the result type and the pointer type is unqualified. */
7980 if (! offset && bitpos == 0
7981 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7982 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7983 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7984 return fold_convert_loc (loc, type,
7985 build_fold_addr_expr_loc (loc, base));
7986 }
7987
7988 if (TREE_CODE (op0) == MODIFY_EXPR
7989 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7990 /* Detect assigning a bitfield. */
7991 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7992 && DECL_BIT_FIELD
7993 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7994 {
7995 /* Don't leave an assignment inside a conversion
7996 unless assigning a bitfield. */
7997 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7998 /* First do the assignment, then return converted constant. */
7999 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8000 TREE_NO_WARNING (tem) = 1;
8001 TREE_USED (tem) = 1;
8002 return tem;
8003 }
8004
8005 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8006 constants (if x has signed type, the sign bit cannot be set
8007 in c). This folds extension into the BIT_AND_EXPR.
8008 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8009 very likely don't have maximal range for their precision and this
8010 transformation effectively doesn't preserve non-maximal ranges. */
8011 if (TREE_CODE (type) == INTEGER_TYPE
8012 && TREE_CODE (op0) == BIT_AND_EXPR
8013 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8014 {
8015 tree and_expr = op0;
8016 tree and0 = TREE_OPERAND (and_expr, 0);
8017 tree and1 = TREE_OPERAND (and_expr, 1);
8018 int change = 0;
8019
8020 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8021 || (TYPE_PRECISION (type)
8022 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8023 change = 1;
8024 else if (TYPE_PRECISION (TREE_TYPE (and1))
8025 <= HOST_BITS_PER_WIDE_INT
8026 && host_integerp (and1, 1))
8027 {
8028 unsigned HOST_WIDE_INT cst;
8029
8030 cst = tree_low_cst (and1, 1);
8031 cst &= (HOST_WIDE_INT) -1
8032 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8033 change = (cst == 0);
8034 #ifdef LOAD_EXTEND_OP
8035 if (change
8036 && !flag_syntax_only
8037 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8038 == ZERO_EXTEND))
8039 {
8040 tree uns = unsigned_type_for (TREE_TYPE (and0));
8041 and0 = fold_convert_loc (loc, uns, and0);
8042 and1 = fold_convert_loc (loc, uns, and1);
8043 }
8044 #endif
8045 }
8046 if (change)
8047 {
8048 tem = force_fit_type_double (type, tree_to_double_int (and1),
8049 0, TREE_OVERFLOW (and1));
8050 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8051 fold_convert_loc (loc, type, and0), tem);
8052 }
8053 }
8054
8055 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8056 when one of the new casts will fold away. Conservatively we assume
8057 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8058 if (POINTER_TYPE_P (type)
8059 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8060 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8061 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8062 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8063 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8064 {
8065 tree arg00 = TREE_OPERAND (arg0, 0);
8066 tree arg01 = TREE_OPERAND (arg0, 1);
8067
8068 return fold_build_pointer_plus_loc
8069 (loc, fold_convert_loc (loc, type, arg00), arg01);
8070 }
8071
8072 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8073 of the same precision, and X is an integer type not narrower than
8074 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8075 if (INTEGRAL_TYPE_P (type)
8076 && TREE_CODE (op0) == BIT_NOT_EXPR
8077 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8078 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8079 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8080 {
8081 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8082 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8083 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8084 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8085 fold_convert_loc (loc, type, tem));
8086 }
8087
8088 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8089 type of X and Y (integer types only). */
8090 if (INTEGRAL_TYPE_P (type)
8091 && TREE_CODE (op0) == MULT_EXPR
8092 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8093 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8094 {
8095 /* Be careful not to introduce new overflows. */
8096 tree mult_type;
8097 if (TYPE_OVERFLOW_WRAPS (type))
8098 mult_type = type;
8099 else
8100 mult_type = unsigned_type_for (type);
8101
8102 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8103 {
8104 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8105 fold_convert_loc (loc, mult_type,
8106 TREE_OPERAND (op0, 0)),
8107 fold_convert_loc (loc, mult_type,
8108 TREE_OPERAND (op0, 1)));
8109 return fold_convert_loc (loc, type, tem);
8110 }
8111 }
8112
8113 tem = fold_convert_const (code, type, op0);
8114 return tem ? tem : NULL_TREE;
8115
8116 case ADDR_SPACE_CONVERT_EXPR:
8117 if (integer_zerop (arg0))
8118 return fold_convert_const (code, type, arg0);
8119 return NULL_TREE;
8120
8121 case FIXED_CONVERT_EXPR:
8122 tem = fold_convert_const (code, type, arg0);
8123 return tem ? tem : NULL_TREE;
8124
8125 case VIEW_CONVERT_EXPR:
8126 if (TREE_TYPE (op0) == type)
8127 return op0;
8128 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8129 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8130 type, TREE_OPERAND (op0, 0));
8131 if (TREE_CODE (op0) == MEM_REF)
8132 return fold_build2_loc (loc, MEM_REF, type,
8133 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8134
8135 /* For integral conversions with the same precision or pointer
8136 conversions use a NOP_EXPR instead. */
8137 if ((INTEGRAL_TYPE_P (type)
8138 || POINTER_TYPE_P (type))
8139 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8140 || POINTER_TYPE_P (TREE_TYPE (op0)))
8141 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8142 return fold_convert_loc (loc, type, op0);
8143
8144 /* Strip inner integral conversions that do not change the precision. */
8145 if (CONVERT_EXPR_P (op0)
8146 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8147 || POINTER_TYPE_P (TREE_TYPE (op0)))
8148 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8149 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8150 && (TYPE_PRECISION (TREE_TYPE (op0))
8151 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8152 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8153 type, TREE_OPERAND (op0, 0));
8154
8155 return fold_view_convert_expr (type, op0);
8156
8157 case NEGATE_EXPR:
8158 tem = fold_negate_expr (loc, arg0);
8159 if (tem)
8160 return fold_convert_loc (loc, type, tem);
8161 return NULL_TREE;
8162
8163 case ABS_EXPR:
8164 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8165 return fold_abs_const (arg0, type);
8166 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8167 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8168 /* Convert fabs((double)float) into (double)fabsf(float). */
8169 else if (TREE_CODE (arg0) == NOP_EXPR
8170 && TREE_CODE (type) == REAL_TYPE)
8171 {
8172 tree targ0 = strip_float_extensions (arg0);
8173 if (targ0 != arg0)
8174 return fold_convert_loc (loc, type,
8175 fold_build1_loc (loc, ABS_EXPR,
8176 TREE_TYPE (targ0),
8177 targ0));
8178 }
8179 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8180 else if (TREE_CODE (arg0) == ABS_EXPR)
8181 return arg0;
8182 else if (tree_expr_nonnegative_p (arg0))
8183 return arg0;
8184
8185 /* Strip sign ops from argument. */
8186 if (TREE_CODE (type) == REAL_TYPE)
8187 {
8188 tem = fold_strip_sign_ops (arg0);
8189 if (tem)
8190 return fold_build1_loc (loc, ABS_EXPR, type,
8191 fold_convert_loc (loc, type, tem));
8192 }
8193 return NULL_TREE;
8194
8195 case CONJ_EXPR:
8196 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8197 return fold_convert_loc (loc, type, arg0);
8198 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8199 {
8200 tree itype = TREE_TYPE (type);
8201 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8202 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8203 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8204 negate_expr (ipart));
8205 }
8206 if (TREE_CODE (arg0) == COMPLEX_CST)
8207 {
8208 tree itype = TREE_TYPE (type);
8209 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8210 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8211 return build_complex (type, rpart, negate_expr (ipart));
8212 }
8213 if (TREE_CODE (arg0) == CONJ_EXPR)
8214 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8215 return NULL_TREE;
8216
8217 case BIT_NOT_EXPR:
8218 if (TREE_CODE (arg0) == INTEGER_CST)
8219 return fold_not_const (arg0, type);
8220 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8221 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8222 /* Convert ~ (-A) to A - 1. */
8223 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8224 return fold_build2_loc (loc, MINUS_EXPR, type,
8225 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8226 build_int_cst (type, 1));
8227 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8228 else if (INTEGRAL_TYPE_P (type)
8229 && ((TREE_CODE (arg0) == MINUS_EXPR
8230 && integer_onep (TREE_OPERAND (arg0, 1)))
8231 || (TREE_CODE (arg0) == PLUS_EXPR
8232 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8233 return fold_build1_loc (loc, NEGATE_EXPR, type,
8234 fold_convert_loc (loc, type,
8235 TREE_OPERAND (arg0, 0)));
8236 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8237 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8238 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8239 fold_convert_loc (loc, type,
8240 TREE_OPERAND (arg0, 0)))))
8241 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8242 fold_convert_loc (loc, type,
8243 TREE_OPERAND (arg0, 1)));
8244 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8245 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8246 fold_convert_loc (loc, type,
8247 TREE_OPERAND (arg0, 1)))))
8248 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8249 fold_convert_loc (loc, type,
8250 TREE_OPERAND (arg0, 0)), tem);
8251 /* Perform BIT_NOT_EXPR on each element individually. */
8252 else if (TREE_CODE (arg0) == VECTOR_CST)
8253 {
8254 tree *elements;
8255 tree elem;
8256 unsigned count = VECTOR_CST_NELTS (arg0), i;
8257
8258 elements = XALLOCAVEC (tree, count);
8259 for (i = 0; i < count; i++)
8260 {
8261 elem = VECTOR_CST_ELT (arg0, i);
8262 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8263 if (elem == NULL_TREE)
8264 break;
8265 elements[i] = elem;
8266 }
8267 if (i == count)
8268 return build_vector (type, elements);
8269 }
8270
8271 return NULL_TREE;
8272
8273 case TRUTH_NOT_EXPR:
8274 /* The argument to invert_truthvalue must have Boolean type. */
8275 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8276 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8277
8278 /* Note that the operand of this must be an int
8279 and its values must be 0 or 1.
8280 ("true" is a fixed value perhaps depending on the language,
8281 but we don't handle values other than 1 correctly yet.) */
8282 tem = fold_truth_not_expr (loc, arg0);
8283 if (!tem)
8284 return NULL_TREE;
8285 return fold_convert_loc (loc, type, tem);
8286
8287 case REALPART_EXPR:
8288 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8289 return fold_convert_loc (loc, type, arg0);
8290 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8291 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8292 TREE_OPERAND (arg0, 1));
8293 if (TREE_CODE (arg0) == COMPLEX_CST)
8294 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8295 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8296 {
8297 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8298 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8299 fold_build1_loc (loc, REALPART_EXPR, itype,
8300 TREE_OPERAND (arg0, 0)),
8301 fold_build1_loc (loc, REALPART_EXPR, itype,
8302 TREE_OPERAND (arg0, 1)));
8303 return fold_convert_loc (loc, type, tem);
8304 }
8305 if (TREE_CODE (arg0) == CONJ_EXPR)
8306 {
8307 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8308 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8309 TREE_OPERAND (arg0, 0));
8310 return fold_convert_loc (loc, type, tem);
8311 }
8312 if (TREE_CODE (arg0) == CALL_EXPR)
8313 {
8314 tree fn = get_callee_fndecl (arg0);
8315 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8316 switch (DECL_FUNCTION_CODE (fn))
8317 {
8318 CASE_FLT_FN (BUILT_IN_CEXPI):
8319 fn = mathfn_built_in (type, BUILT_IN_COS);
8320 if (fn)
8321 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8322 break;
8323
8324 default:
8325 break;
8326 }
8327 }
8328 return NULL_TREE;
8329
8330 case IMAGPART_EXPR:
8331 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8332 return build_zero_cst (type);
8333 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8334 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8335 TREE_OPERAND (arg0, 0));
8336 if (TREE_CODE (arg0) == COMPLEX_CST)
8337 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8338 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8339 {
8340 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8341 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8342 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8343 TREE_OPERAND (arg0, 0)),
8344 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8345 TREE_OPERAND (arg0, 1)));
8346 return fold_convert_loc (loc, type, tem);
8347 }
8348 if (TREE_CODE (arg0) == CONJ_EXPR)
8349 {
8350 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8351 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8352 return fold_convert_loc (loc, type, negate_expr (tem));
8353 }
8354 if (TREE_CODE (arg0) == CALL_EXPR)
8355 {
8356 tree fn = get_callee_fndecl (arg0);
8357 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8358 switch (DECL_FUNCTION_CODE (fn))
8359 {
8360 CASE_FLT_FN (BUILT_IN_CEXPI):
8361 fn = mathfn_built_in (type, BUILT_IN_SIN);
8362 if (fn)
8363 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8364 break;
8365
8366 default:
8367 break;
8368 }
8369 }
8370 return NULL_TREE;
8371
8372 case INDIRECT_REF:
8373 /* Fold *&X to X if X is an lvalue. */
8374 if (TREE_CODE (op0) == ADDR_EXPR)
8375 {
8376 tree op00 = TREE_OPERAND (op0, 0);
8377 if ((TREE_CODE (op00) == VAR_DECL
8378 || TREE_CODE (op00) == PARM_DECL
8379 || TREE_CODE (op00) == RESULT_DECL)
8380 && !TREE_READONLY (op00))
8381 return op00;
8382 }
8383 return NULL_TREE;
8384
8385 case VEC_UNPACK_LO_EXPR:
8386 case VEC_UNPACK_HI_EXPR:
8387 case VEC_UNPACK_FLOAT_LO_EXPR:
8388 case VEC_UNPACK_FLOAT_HI_EXPR:
8389 {
8390 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8391 tree *elts;
8392 enum tree_code subcode;
8393
8394 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8395 if (TREE_CODE (arg0) != VECTOR_CST)
8396 return NULL_TREE;
8397
8398 elts = XALLOCAVEC (tree, nelts * 2);
8399 if (!vec_cst_ctor_to_array (arg0, elts))
8400 return NULL_TREE;
8401
8402 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8403 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8404 elts += nelts;
8405
8406 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8407 subcode = NOP_EXPR;
8408 else
8409 subcode = FLOAT_EXPR;
8410
8411 for (i = 0; i < nelts; i++)
8412 {
8413 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8414 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8415 return NULL_TREE;
8416 }
8417
8418 return build_vector (type, elts);
8419 }
8420
8421 case REDUC_MIN_EXPR:
8422 case REDUC_MAX_EXPR:
8423 case REDUC_PLUS_EXPR:
8424 {
8425 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8426 tree *elts;
8427 enum tree_code subcode;
8428
8429 if (TREE_CODE (op0) != VECTOR_CST)
8430 return NULL_TREE;
8431
8432 elts = XALLOCAVEC (tree, nelts);
8433 if (!vec_cst_ctor_to_array (op0, elts))
8434 return NULL_TREE;
8435
8436 switch (code)
8437 {
8438 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8439 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8440 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8441 default: gcc_unreachable ();
8442 }
8443
8444 for (i = 1; i < nelts; i++)
8445 {
8446 elts[0] = const_binop (subcode, elts[0], elts[i]);
8447 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8448 return NULL_TREE;
8449 elts[i] = build_zero_cst (TREE_TYPE (type));
8450 }
8451
8452 return build_vector (type, elts);
8453 }
8454
8455 default:
8456 return NULL_TREE;
8457 } /* switch (code) */
8458 }
8459
8460
8461 /* If the operation was a conversion do _not_ mark a resulting constant
8462 with TREE_OVERFLOW if the original constant was not. These conversions
8463 have implementation defined behavior and retaining the TREE_OVERFLOW
8464 flag here would confuse later passes such as VRP. */
8465 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)8466 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8467 tree type, tree op0)
8468 {
8469 tree res = fold_unary_loc (loc, code, type, op0);
8470 if (res
8471 && TREE_CODE (res) == INTEGER_CST
8472 && TREE_CODE (op0) == INTEGER_CST
8473 && CONVERT_EXPR_CODE_P (code))
8474 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8475
8476 return res;
8477 }
8478
8479 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8480 operands OP0 and OP1. LOC is the location of the resulting expression.
8481 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8482 Return the folded expression if folding is successful. Otherwise,
8483 return NULL_TREE. */
8484 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)8485 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8486 tree arg0, tree arg1, tree op0, tree op1)
8487 {
8488 tree tem;
8489
8490 /* We only do these simplifications if we are optimizing. */
8491 if (!optimize)
8492 return NULL_TREE;
8493
8494 /* Check for things like (A || B) && (A || C). We can convert this
8495 to A || (B && C). Note that either operator can be any of the four
8496 truth and/or operations and the transformation will still be
8497 valid. Also note that we only care about order for the
8498 ANDIF and ORIF operators. If B contains side effects, this
8499 might change the truth-value of A. */
8500 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8501 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8502 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8503 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8504 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8505 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8506 {
8507 tree a00 = TREE_OPERAND (arg0, 0);
8508 tree a01 = TREE_OPERAND (arg0, 1);
8509 tree a10 = TREE_OPERAND (arg1, 0);
8510 tree a11 = TREE_OPERAND (arg1, 1);
8511 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8512 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8513 && (code == TRUTH_AND_EXPR
8514 || code == TRUTH_OR_EXPR));
8515
8516 if (operand_equal_p (a00, a10, 0))
8517 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8518 fold_build2_loc (loc, code, type, a01, a11));
8519 else if (commutative && operand_equal_p (a00, a11, 0))
8520 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8521 fold_build2_loc (loc, code, type, a01, a10));
8522 else if (commutative && operand_equal_p (a01, a10, 0))
8523 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8524 fold_build2_loc (loc, code, type, a00, a11));
8525
8526 /* This case if tricky because we must either have commutative
8527 operators or else A10 must not have side-effects. */
8528
8529 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8530 && operand_equal_p (a01, a11, 0))
8531 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8532 fold_build2_loc (loc, code, type, a00, a10),
8533 a01);
8534 }
8535
8536 /* See if we can build a range comparison. */
8537 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8538 return tem;
8539
8540 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8541 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8542 {
8543 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8544 if (tem)
8545 return fold_build2_loc (loc, code, type, tem, arg1);
8546 }
8547
8548 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8549 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8550 {
8551 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8552 if (tem)
8553 return fold_build2_loc (loc, code, type, arg0, tem);
8554 }
8555
8556 /* Check for the possibility of merging component references. If our
8557 lhs is another similar operation, try to merge its rhs with our
8558 rhs. Then try to merge our lhs and rhs. */
8559 if (TREE_CODE (arg0) == code
8560 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8561 TREE_OPERAND (arg0, 1), arg1)))
8562 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8563
8564 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8565 return tem;
8566
8567 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8568 && (code == TRUTH_AND_EXPR
8569 || code == TRUTH_ANDIF_EXPR
8570 || code == TRUTH_OR_EXPR
8571 || code == TRUTH_ORIF_EXPR))
8572 {
8573 enum tree_code ncode, icode;
8574
8575 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8576 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8577 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8578
8579 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8580 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8581 We don't want to pack more than two leafs to a non-IF AND/OR
8582 expression.
8583 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8584 equal to IF-CODE, then we don't want to add right-hand operand.
8585 If the inner right-hand side of left-hand operand has
8586 side-effects, or isn't simple, then we can't add to it,
8587 as otherwise we might destroy if-sequence. */
8588 if (TREE_CODE (arg0) == icode
8589 && simple_operand_p_2 (arg1)
8590 /* Needed for sequence points to handle trappings, and
8591 side-effects. */
8592 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8593 {
8594 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8595 arg1);
8596 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8597 tem);
8598 }
8599 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8600 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8601 else if (TREE_CODE (arg1) == icode
8602 && simple_operand_p_2 (arg0)
8603 /* Needed for sequence points to handle trappings, and
8604 side-effects. */
8605 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8606 {
8607 tem = fold_build2_loc (loc, ncode, type,
8608 arg0, TREE_OPERAND (arg1, 0));
8609 return fold_build2_loc (loc, icode, type, tem,
8610 TREE_OPERAND (arg1, 1));
8611 }
8612 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8613 into (A OR B).
8614 For sequence point consistancy, we need to check for trapping,
8615 and side-effects. */
8616 else if (code == icode && simple_operand_p_2 (arg0)
8617 && simple_operand_p_2 (arg1))
8618 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8619 }
8620
8621 return NULL_TREE;
8622 }
8623
8624 /* Fold a binary expression of code CODE and type TYPE with operands
8625 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8626 Return the folded expression if folding is successful. Otherwise,
8627 return NULL_TREE. */
8628
8629 static tree
fold_minmax(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8630 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8631 {
8632 enum tree_code compl_code;
8633
8634 if (code == MIN_EXPR)
8635 compl_code = MAX_EXPR;
8636 else if (code == MAX_EXPR)
8637 compl_code = MIN_EXPR;
8638 else
8639 gcc_unreachable ();
8640
8641 /* MIN (MAX (a, b), b) == b. */
8642 if (TREE_CODE (op0) == compl_code
8643 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8644 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8645
8646 /* MIN (MAX (b, a), b) == b. */
8647 if (TREE_CODE (op0) == compl_code
8648 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8649 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8650 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8651
8652 /* MIN (a, MAX (a, b)) == a. */
8653 if (TREE_CODE (op1) == compl_code
8654 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8655 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8656 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8657
8658 /* MIN (a, MAX (b, a)) == a. */
8659 if (TREE_CODE (op1) == compl_code
8660 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8661 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8662 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8663
8664 return NULL_TREE;
8665 }
8666
8667 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8668 by changing CODE to reduce the magnitude of constants involved in
8669 ARG0 of the comparison.
8670 Returns a canonicalized comparison tree if a simplification was
8671 possible, otherwise returns NULL_TREE.
8672 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8673 valid if signed overflow is undefined. */
8674
8675 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)8676 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8677 tree arg0, tree arg1,
8678 bool *strict_overflow_p)
8679 {
8680 enum tree_code code0 = TREE_CODE (arg0);
8681 tree t, cst0 = NULL_TREE;
8682 int sgn0;
8683 bool swap = false;
8684
8685 /* Match A +- CST code arg1 and CST code arg1. We can change the
8686 first form only if overflow is undefined. */
8687 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8688 /* In principle pointers also have undefined overflow behavior,
8689 but that causes problems elsewhere. */
8690 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8691 && (code0 == MINUS_EXPR
8692 || code0 == PLUS_EXPR)
8693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8694 || code0 == INTEGER_CST))
8695 return NULL_TREE;
8696
8697 /* Identify the constant in arg0 and its sign. */
8698 if (code0 == INTEGER_CST)
8699 cst0 = arg0;
8700 else
8701 cst0 = TREE_OPERAND (arg0, 1);
8702 sgn0 = tree_int_cst_sgn (cst0);
8703
8704 /* Overflowed constants and zero will cause problems. */
8705 if (integer_zerop (cst0)
8706 || TREE_OVERFLOW (cst0))
8707 return NULL_TREE;
8708
8709 /* See if we can reduce the magnitude of the constant in
8710 arg0 by changing the comparison code. */
8711 if (code0 == INTEGER_CST)
8712 {
8713 /* CST <= arg1 -> CST-1 < arg1. */
8714 if (code == LE_EXPR && sgn0 == 1)
8715 code = LT_EXPR;
8716 /* -CST < arg1 -> -CST-1 <= arg1. */
8717 else if (code == LT_EXPR && sgn0 == -1)
8718 code = LE_EXPR;
8719 /* CST > arg1 -> CST-1 >= arg1. */
8720 else if (code == GT_EXPR && sgn0 == 1)
8721 code = GE_EXPR;
8722 /* -CST >= arg1 -> -CST-1 > arg1. */
8723 else if (code == GE_EXPR && sgn0 == -1)
8724 code = GT_EXPR;
8725 else
8726 return NULL_TREE;
8727 /* arg1 code' CST' might be more canonical. */
8728 swap = true;
8729 }
8730 else
8731 {
8732 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8733 if (code == LT_EXPR
8734 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8735 code = LE_EXPR;
8736 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8737 else if (code == GT_EXPR
8738 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8739 code = GE_EXPR;
8740 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8741 else if (code == LE_EXPR
8742 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8743 code = LT_EXPR;
8744 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8745 else if (code == GE_EXPR
8746 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8747 code = GT_EXPR;
8748 else
8749 return NULL_TREE;
8750 *strict_overflow_p = true;
8751 }
8752
8753 /* Now build the constant reduced in magnitude. But not if that
8754 would produce one outside of its types range. */
8755 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8756 && ((sgn0 == 1
8757 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8758 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8759 || (sgn0 == -1
8760 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8761 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8762 /* We cannot swap the comparison here as that would cause us to
8763 endlessly recurse. */
8764 return NULL_TREE;
8765
8766 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8767 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8768 if (code0 != INTEGER_CST)
8769 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8770 t = fold_convert (TREE_TYPE (arg1), t);
8771
8772 /* If swapping might yield to a more canonical form, do so. */
8773 if (swap)
8774 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8775 else
8776 return fold_build2_loc (loc, code, type, t, arg1);
8777 }
8778
8779 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8780 overflow further. Try to decrease the magnitude of constants involved
8781 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8782 and put sole constants at the second argument position.
8783 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8784
8785 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)8786 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8787 tree arg0, tree arg1)
8788 {
8789 tree t;
8790 bool strict_overflow_p;
8791 const char * const warnmsg = G_("assuming signed overflow does not occur "
8792 "when reducing constant in comparison");
8793
8794 /* Try canonicalization by simplifying arg0. */
8795 strict_overflow_p = false;
8796 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8797 &strict_overflow_p);
8798 if (t)
8799 {
8800 if (strict_overflow_p)
8801 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8802 return t;
8803 }
8804
8805 /* Try canonicalization by simplifying arg1 using the swapped
8806 comparison. */
8807 code = swap_tree_comparison (code);
8808 strict_overflow_p = false;
8809 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8810 &strict_overflow_p);
8811 if (t && strict_overflow_p)
8812 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8813 return t;
8814 }
8815
8816 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8817 space. This is used to avoid issuing overflow warnings for
8818 expressions like &p->x which can not wrap. */
8819
8820 static bool
pointer_may_wrap_p(tree base,tree offset,HOST_WIDE_INT bitpos)8821 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8822 {
8823 double_int di_offset, total;
8824
8825 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8826 return true;
8827
8828 if (bitpos < 0)
8829 return true;
8830
8831 if (offset == NULL_TREE)
8832 di_offset = double_int_zero;
8833 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8834 return true;
8835 else
8836 di_offset = TREE_INT_CST (offset);
8837
8838 bool overflow;
8839 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8840 total = di_offset.add_with_sign (units, true, &overflow);
8841 if (overflow)
8842 return true;
8843
8844 if (total.high != 0)
8845 return true;
8846
8847 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8848 if (size <= 0)
8849 return true;
8850
8851 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8852 array. */
8853 if (TREE_CODE (base) == ADDR_EXPR)
8854 {
8855 HOST_WIDE_INT base_size;
8856
8857 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8858 if (base_size > 0 && size < base_size)
8859 size = base_size;
8860 }
8861
8862 return total.low > (unsigned HOST_WIDE_INT) size;
8863 }
8864
8865 /* Subroutine of fold_binary. This routine performs all of the
8866 transformations that are common to the equality/inequality
8867 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8868 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8869 fold_binary should call fold_binary. Fold a comparison with
8870 tree code CODE and type TYPE with operands OP0 and OP1. Return
8871 the folded comparison or NULL_TREE. */
8872
8873 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)8874 fold_comparison (location_t loc, enum tree_code code, tree type,
8875 tree op0, tree op1)
8876 {
8877 tree arg0, arg1, tem;
8878
8879 arg0 = op0;
8880 arg1 = op1;
8881
8882 STRIP_SIGN_NOPS (arg0);
8883 STRIP_SIGN_NOPS (arg1);
8884
8885 tem = fold_relational_const (code, type, arg0, arg1);
8886 if (tem != NULL_TREE)
8887 return tem;
8888
8889 /* If one arg is a real or integer constant, put it last. */
8890 if (tree_swap_operands_p (arg0, arg1, true))
8891 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8892
8893 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8894 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8895 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8896 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8897 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8898 && (TREE_CODE (arg1) == INTEGER_CST
8899 && !TREE_OVERFLOW (arg1)))
8900 {
8901 tree const1 = TREE_OPERAND (arg0, 1);
8902 tree const2 = arg1;
8903 tree variable = TREE_OPERAND (arg0, 0);
8904 tree lhs;
8905 int lhs_add;
8906 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8907
8908 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8909 TREE_TYPE (arg1), const2, const1);
8910
8911 /* If the constant operation overflowed this can be
8912 simplified as a comparison against INT_MAX/INT_MIN. */
8913 if (TREE_CODE (lhs) == INTEGER_CST
8914 && TREE_OVERFLOW (lhs))
8915 {
8916 int const1_sgn = tree_int_cst_sgn (const1);
8917 enum tree_code code2 = code;
8918
8919 /* Get the sign of the constant on the lhs if the
8920 operation were VARIABLE + CONST1. */
8921 if (TREE_CODE (arg0) == MINUS_EXPR)
8922 const1_sgn = -const1_sgn;
8923
8924 /* The sign of the constant determines if we overflowed
8925 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8926 Canonicalize to the INT_MIN overflow by swapping the comparison
8927 if necessary. */
8928 if (const1_sgn == -1)
8929 code2 = swap_tree_comparison (code);
8930
8931 /* We now can look at the canonicalized case
8932 VARIABLE + 1 CODE2 INT_MIN
8933 and decide on the result. */
8934 if (code2 == LT_EXPR
8935 || code2 == LE_EXPR
8936 || code2 == EQ_EXPR)
8937 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8938 else if (code2 == NE_EXPR
8939 || code2 == GE_EXPR
8940 || code2 == GT_EXPR)
8941 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8942 }
8943
8944 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8945 && (TREE_CODE (lhs) != INTEGER_CST
8946 || !TREE_OVERFLOW (lhs)))
8947 {
8948 if (code != EQ_EXPR && code != NE_EXPR)
8949 fold_overflow_warning ("assuming signed overflow does not occur "
8950 "when changing X +- C1 cmp C2 to "
8951 "X cmp C1 +- C2",
8952 WARN_STRICT_OVERFLOW_COMPARISON);
8953 return fold_build2_loc (loc, code, type, variable, lhs);
8954 }
8955 }
8956
8957 /* For comparisons of pointers we can decompose it to a compile time
8958 comparison of the base objects and the offsets into the object.
8959 This requires at least one operand being an ADDR_EXPR or a
8960 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8961 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8962 && (TREE_CODE (arg0) == ADDR_EXPR
8963 || TREE_CODE (arg1) == ADDR_EXPR
8964 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8965 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8966 {
8967 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8968 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8969 enum machine_mode mode;
8970 int volatilep, unsignedp;
8971 bool indirect_base0 = false, indirect_base1 = false;
8972
8973 /* Get base and offset for the access. Strip ADDR_EXPR for
8974 get_inner_reference, but put it back by stripping INDIRECT_REF
8975 off the base object if possible. indirect_baseN will be true
8976 if baseN is not an address but refers to the object itself. */
8977 base0 = arg0;
8978 if (TREE_CODE (arg0) == ADDR_EXPR)
8979 {
8980 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8981 &bitsize, &bitpos0, &offset0, &mode,
8982 &unsignedp, &volatilep, false);
8983 if (TREE_CODE (base0) == INDIRECT_REF)
8984 base0 = TREE_OPERAND (base0, 0);
8985 else
8986 indirect_base0 = true;
8987 }
8988 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8989 {
8990 base0 = TREE_OPERAND (arg0, 0);
8991 STRIP_SIGN_NOPS (base0);
8992 if (TREE_CODE (base0) == ADDR_EXPR)
8993 {
8994 base0 = TREE_OPERAND (base0, 0);
8995 indirect_base0 = true;
8996 }
8997 offset0 = TREE_OPERAND (arg0, 1);
8998 if (host_integerp (offset0, 0))
8999 {
9000 HOST_WIDE_INT off = size_low_cst (offset0);
9001 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9002 * BITS_PER_UNIT)
9003 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9004 {
9005 bitpos0 = off * BITS_PER_UNIT;
9006 offset0 = NULL_TREE;
9007 }
9008 }
9009 }
9010
9011 base1 = arg1;
9012 if (TREE_CODE (arg1) == ADDR_EXPR)
9013 {
9014 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9015 &bitsize, &bitpos1, &offset1, &mode,
9016 &unsignedp, &volatilep, false);
9017 if (TREE_CODE (base1) == INDIRECT_REF)
9018 base1 = TREE_OPERAND (base1, 0);
9019 else
9020 indirect_base1 = true;
9021 }
9022 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9023 {
9024 base1 = TREE_OPERAND (arg1, 0);
9025 STRIP_SIGN_NOPS (base1);
9026 if (TREE_CODE (base1) == ADDR_EXPR)
9027 {
9028 base1 = TREE_OPERAND (base1, 0);
9029 indirect_base1 = true;
9030 }
9031 offset1 = TREE_OPERAND (arg1, 1);
9032 if (host_integerp (offset1, 0))
9033 {
9034 HOST_WIDE_INT off = size_low_cst (offset1);
9035 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9036 * BITS_PER_UNIT)
9037 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9038 {
9039 bitpos1 = off * BITS_PER_UNIT;
9040 offset1 = NULL_TREE;
9041 }
9042 }
9043 }
9044
9045 /* A local variable can never be pointed to by
9046 the default SSA name of an incoming parameter. */
9047 if ((TREE_CODE (arg0) == ADDR_EXPR
9048 && indirect_base0
9049 && TREE_CODE (base0) == VAR_DECL
9050 && auto_var_in_fn_p (base0, current_function_decl)
9051 && !indirect_base1
9052 && TREE_CODE (base1) == SSA_NAME
9053 && SSA_NAME_IS_DEFAULT_DEF (base1)
9054 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9055 || (TREE_CODE (arg1) == ADDR_EXPR
9056 && indirect_base1
9057 && TREE_CODE (base1) == VAR_DECL
9058 && auto_var_in_fn_p (base1, current_function_decl)
9059 && !indirect_base0
9060 && TREE_CODE (base0) == SSA_NAME
9061 && SSA_NAME_IS_DEFAULT_DEF (base0)
9062 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9063 {
9064 if (code == NE_EXPR)
9065 return constant_boolean_node (1, type);
9066 else if (code == EQ_EXPR)
9067 return constant_boolean_node (0, type);
9068 }
9069 /* If we have equivalent bases we might be able to simplify. */
9070 else if (indirect_base0 == indirect_base1
9071 && operand_equal_p (base0, base1, 0))
9072 {
9073 /* We can fold this expression to a constant if the non-constant
9074 offset parts are equal. */
9075 if ((offset0 == offset1
9076 || (offset0 && offset1
9077 && operand_equal_p (offset0, offset1, 0)))
9078 && (code == EQ_EXPR
9079 || code == NE_EXPR
9080 || (indirect_base0 && DECL_P (base0))
9081 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9082
9083 {
9084 if (code != EQ_EXPR
9085 && code != NE_EXPR
9086 && bitpos0 != bitpos1
9087 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9088 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9089 fold_overflow_warning (("assuming pointer wraparound does not "
9090 "occur when comparing P +- C1 with "
9091 "P +- C2"),
9092 WARN_STRICT_OVERFLOW_CONDITIONAL);
9093
9094 switch (code)
9095 {
9096 case EQ_EXPR:
9097 return constant_boolean_node (bitpos0 == bitpos1, type);
9098 case NE_EXPR:
9099 return constant_boolean_node (bitpos0 != bitpos1, type);
9100 case LT_EXPR:
9101 return constant_boolean_node (bitpos0 < bitpos1, type);
9102 case LE_EXPR:
9103 return constant_boolean_node (bitpos0 <= bitpos1, type);
9104 case GE_EXPR:
9105 return constant_boolean_node (bitpos0 >= bitpos1, type);
9106 case GT_EXPR:
9107 return constant_boolean_node (bitpos0 > bitpos1, type);
9108 default:;
9109 }
9110 }
9111 /* We can simplify the comparison to a comparison of the variable
9112 offset parts if the constant offset parts are equal.
9113 Be careful to use signed sizetype here because otherwise we
9114 mess with array offsets in the wrong way. This is possible
9115 because pointer arithmetic is restricted to retain within an
9116 object and overflow on pointer differences is undefined as of
9117 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9118 else if (bitpos0 == bitpos1
9119 && ((code == EQ_EXPR || code == NE_EXPR)
9120 || (indirect_base0 && DECL_P (base0))
9121 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9122 {
9123 /* By converting to signed sizetype we cover middle-end pointer
9124 arithmetic which operates on unsigned pointer types of size
9125 type size and ARRAY_REF offsets which are properly sign or
9126 zero extended from their type in case it is narrower than
9127 sizetype. */
9128 if (offset0 == NULL_TREE)
9129 offset0 = build_int_cst (ssizetype, 0);
9130 else
9131 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9132 if (offset1 == NULL_TREE)
9133 offset1 = build_int_cst (ssizetype, 0);
9134 else
9135 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9136
9137 if (code != EQ_EXPR
9138 && code != NE_EXPR
9139 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9140 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9141 fold_overflow_warning (("assuming pointer wraparound does not "
9142 "occur when comparing P +- C1 with "
9143 "P +- C2"),
9144 WARN_STRICT_OVERFLOW_COMPARISON);
9145
9146 return fold_build2_loc (loc, code, type, offset0, offset1);
9147 }
9148 }
9149 /* For non-equal bases we can simplify if they are addresses
9150 of local binding decls or constants. */
9151 else if (indirect_base0 && indirect_base1
9152 /* We know that !operand_equal_p (base0, base1, 0)
9153 because the if condition was false. But make
9154 sure two decls are not the same. */
9155 && base0 != base1
9156 && TREE_CODE (arg0) == ADDR_EXPR
9157 && TREE_CODE (arg1) == ADDR_EXPR
9158 && (((TREE_CODE (base0) == VAR_DECL
9159 || TREE_CODE (base0) == PARM_DECL)
9160 && (targetm.binds_local_p (base0)
9161 || CONSTANT_CLASS_P (base1)))
9162 || CONSTANT_CLASS_P (base0))
9163 && (((TREE_CODE (base1) == VAR_DECL
9164 || TREE_CODE (base1) == PARM_DECL)
9165 && (targetm.binds_local_p (base1)
9166 || CONSTANT_CLASS_P (base0)))
9167 || CONSTANT_CLASS_P (base1)))
9168 {
9169 if (code == EQ_EXPR)
9170 return omit_two_operands_loc (loc, type, boolean_false_node,
9171 arg0, arg1);
9172 else if (code == NE_EXPR)
9173 return omit_two_operands_loc (loc, type, boolean_true_node,
9174 arg0, arg1);
9175 }
9176 /* For equal offsets we can simplify to a comparison of the
9177 base addresses. */
9178 else if (bitpos0 == bitpos1
9179 && (indirect_base0
9180 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9181 && (indirect_base1
9182 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9183 && ((offset0 == offset1)
9184 || (offset0 && offset1
9185 && operand_equal_p (offset0, offset1, 0))))
9186 {
9187 if (indirect_base0)
9188 base0 = build_fold_addr_expr_loc (loc, base0);
9189 if (indirect_base1)
9190 base1 = build_fold_addr_expr_loc (loc, base1);
9191 return fold_build2_loc (loc, code, type, base0, base1);
9192 }
9193 }
9194
9195 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9196 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9197 the resulting offset is smaller in absolute value than the
9198 original one. */
9199 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9200 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9201 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9202 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9203 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9204 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9205 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9206 {
9207 tree const1 = TREE_OPERAND (arg0, 1);
9208 tree const2 = TREE_OPERAND (arg1, 1);
9209 tree variable1 = TREE_OPERAND (arg0, 0);
9210 tree variable2 = TREE_OPERAND (arg1, 0);
9211 tree cst;
9212 const char * const warnmsg = G_("assuming signed overflow does not "
9213 "occur when combining constants around "
9214 "a comparison");
9215
9216 /* Put the constant on the side where it doesn't overflow and is
9217 of lower absolute value than before. */
9218 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9219 ? MINUS_EXPR : PLUS_EXPR,
9220 const2, const1);
9221 if (!TREE_OVERFLOW (cst)
9222 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9223 {
9224 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9225 return fold_build2_loc (loc, code, type,
9226 variable1,
9227 fold_build2_loc (loc,
9228 TREE_CODE (arg1), TREE_TYPE (arg1),
9229 variable2, cst));
9230 }
9231
9232 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9233 ? MINUS_EXPR : PLUS_EXPR,
9234 const1, const2);
9235 if (!TREE_OVERFLOW (cst)
9236 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9237 {
9238 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9239 return fold_build2_loc (loc, code, type,
9240 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9241 variable1, cst),
9242 variable2);
9243 }
9244 }
9245
9246 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9247 signed arithmetic case. That form is created by the compiler
9248 often enough for folding it to be of value. One example is in
9249 computing loop trip counts after Operator Strength Reduction. */
9250 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9251 && TREE_CODE (arg0) == MULT_EXPR
9252 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9253 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9254 && integer_zerop (arg1))
9255 {
9256 tree const1 = TREE_OPERAND (arg0, 1);
9257 tree const2 = arg1; /* zero */
9258 tree variable1 = TREE_OPERAND (arg0, 0);
9259 enum tree_code cmp_code = code;
9260
9261 /* Handle unfolded multiplication by zero. */
9262 if (integer_zerop (const1))
9263 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9264
9265 fold_overflow_warning (("assuming signed overflow does not occur when "
9266 "eliminating multiplication in comparison "
9267 "with zero"),
9268 WARN_STRICT_OVERFLOW_COMPARISON);
9269
9270 /* If const1 is negative we swap the sense of the comparison. */
9271 if (tree_int_cst_sgn (const1) < 0)
9272 cmp_code = swap_tree_comparison (cmp_code);
9273
9274 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9275 }
9276
9277 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9278 if (tem)
9279 return tem;
9280
9281 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9282 {
9283 tree targ0 = strip_float_extensions (arg0);
9284 tree targ1 = strip_float_extensions (arg1);
9285 tree newtype = TREE_TYPE (targ0);
9286
9287 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9288 newtype = TREE_TYPE (targ1);
9289
9290 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9291 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9292 return fold_build2_loc (loc, code, type,
9293 fold_convert_loc (loc, newtype, targ0),
9294 fold_convert_loc (loc, newtype, targ1));
9295
9296 /* (-a) CMP (-b) -> b CMP a */
9297 if (TREE_CODE (arg0) == NEGATE_EXPR
9298 && TREE_CODE (arg1) == NEGATE_EXPR)
9299 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9300 TREE_OPERAND (arg0, 0));
9301
9302 if (TREE_CODE (arg1) == REAL_CST)
9303 {
9304 REAL_VALUE_TYPE cst;
9305 cst = TREE_REAL_CST (arg1);
9306
9307 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9308 if (TREE_CODE (arg0) == NEGATE_EXPR)
9309 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9310 TREE_OPERAND (arg0, 0),
9311 build_real (TREE_TYPE (arg1),
9312 real_value_negate (&cst)));
9313
9314 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9315 /* a CMP (-0) -> a CMP 0 */
9316 if (REAL_VALUE_MINUS_ZERO (cst))
9317 return fold_build2_loc (loc, code, type, arg0,
9318 build_real (TREE_TYPE (arg1), dconst0));
9319
9320 /* x != NaN is always true, other ops are always false. */
9321 if (REAL_VALUE_ISNAN (cst)
9322 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9323 {
9324 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9325 return omit_one_operand_loc (loc, type, tem, arg0);
9326 }
9327
9328 /* Fold comparisons against infinity. */
9329 if (REAL_VALUE_ISINF (cst)
9330 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9331 {
9332 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9333 if (tem != NULL_TREE)
9334 return tem;
9335 }
9336 }
9337
9338 /* If this is a comparison of a real constant with a PLUS_EXPR
9339 or a MINUS_EXPR of a real constant, we can convert it into a
9340 comparison with a revised real constant as long as no overflow
9341 occurs when unsafe_math_optimizations are enabled. */
9342 if (flag_unsafe_math_optimizations
9343 && TREE_CODE (arg1) == REAL_CST
9344 && (TREE_CODE (arg0) == PLUS_EXPR
9345 || TREE_CODE (arg0) == MINUS_EXPR)
9346 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9347 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9348 ? MINUS_EXPR : PLUS_EXPR,
9349 arg1, TREE_OPERAND (arg0, 1)))
9350 && !TREE_OVERFLOW (tem))
9351 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9352
9353 /* Likewise, we can simplify a comparison of a real constant with
9354 a MINUS_EXPR whose first operand is also a real constant, i.e.
9355 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9356 floating-point types only if -fassociative-math is set. */
9357 if (flag_associative_math
9358 && TREE_CODE (arg1) == REAL_CST
9359 && TREE_CODE (arg0) == MINUS_EXPR
9360 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9361 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9362 arg1))
9363 && !TREE_OVERFLOW (tem))
9364 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9365 TREE_OPERAND (arg0, 1), tem);
9366
9367 /* Fold comparisons against built-in math functions. */
9368 if (TREE_CODE (arg1) == REAL_CST
9369 && flag_unsafe_math_optimizations
9370 && ! flag_errno_math)
9371 {
9372 enum built_in_function fcode = builtin_mathfn_code (arg0);
9373
9374 if (fcode != END_BUILTINS)
9375 {
9376 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9377 if (tem != NULL_TREE)
9378 return tem;
9379 }
9380 }
9381 }
9382
9383 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9384 && CONVERT_EXPR_P (arg0))
9385 {
9386 /* If we are widening one operand of an integer comparison,
9387 see if the other operand is similarly being widened. Perhaps we
9388 can do the comparison in the narrower type. */
9389 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9390 if (tem)
9391 return tem;
9392
9393 /* Or if we are changing signedness. */
9394 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9395 if (tem)
9396 return tem;
9397 }
9398
9399 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9400 constant, we can simplify it. */
9401 if (TREE_CODE (arg1) == INTEGER_CST
9402 && (TREE_CODE (arg0) == MIN_EXPR
9403 || TREE_CODE (arg0) == MAX_EXPR)
9404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9405 {
9406 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9407 if (tem)
9408 return tem;
9409 }
9410
9411 /* Simplify comparison of something with itself. (For IEEE
9412 floating-point, we can only do some of these simplifications.) */
9413 if (operand_equal_p (arg0, arg1, 0))
9414 {
9415 switch (code)
9416 {
9417 case EQ_EXPR:
9418 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9419 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9420 return constant_boolean_node (1, type);
9421 break;
9422
9423 case GE_EXPR:
9424 case LE_EXPR:
9425 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9426 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9427 return constant_boolean_node (1, type);
9428 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9429
9430 case NE_EXPR:
9431 /* For NE, we can only do this simplification if integer
9432 or we don't honor IEEE floating point NaNs. */
9433 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9434 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9435 break;
9436 /* ... fall through ... */
9437 case GT_EXPR:
9438 case LT_EXPR:
9439 return constant_boolean_node (0, type);
9440 default:
9441 gcc_unreachable ();
9442 }
9443 }
9444
9445 /* If we are comparing an expression that just has comparisons
9446 of two integer values, arithmetic expressions of those comparisons,
9447 and constants, we can simplify it. There are only three cases
9448 to check: the two values can either be equal, the first can be
9449 greater, or the second can be greater. Fold the expression for
9450 those three values. Since each value must be 0 or 1, we have
9451 eight possibilities, each of which corresponds to the constant 0
9452 or 1 or one of the six possible comparisons.
9453
9454 This handles common cases like (a > b) == 0 but also handles
9455 expressions like ((x > y) - (y > x)) > 0, which supposedly
9456 occur in macroized code. */
9457
9458 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9459 {
9460 tree cval1 = 0, cval2 = 0;
9461 int save_p = 0;
9462
9463 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9464 /* Don't handle degenerate cases here; they should already
9465 have been handled anyway. */
9466 && cval1 != 0 && cval2 != 0
9467 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9468 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9469 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9470 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9471 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9472 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9473 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9474 {
9475 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9476 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9477
9478 /* We can't just pass T to eval_subst in case cval1 or cval2
9479 was the same as ARG1. */
9480
9481 tree high_result
9482 = fold_build2_loc (loc, code, type,
9483 eval_subst (loc, arg0, cval1, maxval,
9484 cval2, minval),
9485 arg1);
9486 tree equal_result
9487 = fold_build2_loc (loc, code, type,
9488 eval_subst (loc, arg0, cval1, maxval,
9489 cval2, maxval),
9490 arg1);
9491 tree low_result
9492 = fold_build2_loc (loc, code, type,
9493 eval_subst (loc, arg0, cval1, minval,
9494 cval2, maxval),
9495 arg1);
9496
9497 /* All three of these results should be 0 or 1. Confirm they are.
9498 Then use those values to select the proper code to use. */
9499
9500 if (TREE_CODE (high_result) == INTEGER_CST
9501 && TREE_CODE (equal_result) == INTEGER_CST
9502 && TREE_CODE (low_result) == INTEGER_CST)
9503 {
9504 /* Make a 3-bit mask with the high-order bit being the
9505 value for `>', the next for '=', and the low for '<'. */
9506 switch ((integer_onep (high_result) * 4)
9507 + (integer_onep (equal_result) * 2)
9508 + integer_onep (low_result))
9509 {
9510 case 0:
9511 /* Always false. */
9512 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9513 case 1:
9514 code = LT_EXPR;
9515 break;
9516 case 2:
9517 code = EQ_EXPR;
9518 break;
9519 case 3:
9520 code = LE_EXPR;
9521 break;
9522 case 4:
9523 code = GT_EXPR;
9524 break;
9525 case 5:
9526 code = NE_EXPR;
9527 break;
9528 case 6:
9529 code = GE_EXPR;
9530 break;
9531 case 7:
9532 /* Always true. */
9533 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9534 }
9535
9536 if (save_p)
9537 {
9538 tem = save_expr (build2 (code, type, cval1, cval2));
9539 SET_EXPR_LOCATION (tem, loc);
9540 return tem;
9541 }
9542 return fold_build2_loc (loc, code, type, cval1, cval2);
9543 }
9544 }
9545 }
9546
9547 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9548 into a single range test. */
9549 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9550 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9551 && TREE_CODE (arg1) == INTEGER_CST
9552 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9553 && !integer_zerop (TREE_OPERAND (arg0, 1))
9554 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9555 && !TREE_OVERFLOW (arg1))
9556 {
9557 tem = fold_div_compare (loc, code, type, arg0, arg1);
9558 if (tem != NULL_TREE)
9559 return tem;
9560 }
9561
9562 /* Fold ~X op ~Y as Y op X. */
9563 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9564 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9565 {
9566 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9567 return fold_build2_loc (loc, code, type,
9568 fold_convert_loc (loc, cmp_type,
9569 TREE_OPERAND (arg1, 0)),
9570 TREE_OPERAND (arg0, 0));
9571 }
9572
9573 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9574 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9575 && TREE_CODE (arg1) == INTEGER_CST)
9576 {
9577 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9578 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9579 TREE_OPERAND (arg0, 0),
9580 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9581 fold_convert_loc (loc, cmp_type, arg1)));
9582 }
9583
9584 return NULL_TREE;
9585 }
9586
9587
9588 /* Subroutine of fold_binary. Optimize complex multiplications of the
9589 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9590 argument EXPR represents the expression "z" of type TYPE. */
9591
9592 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)9593 fold_mult_zconjz (location_t loc, tree type, tree expr)
9594 {
9595 tree itype = TREE_TYPE (type);
9596 tree rpart, ipart, tem;
9597
9598 if (TREE_CODE (expr) == COMPLEX_EXPR)
9599 {
9600 rpart = TREE_OPERAND (expr, 0);
9601 ipart = TREE_OPERAND (expr, 1);
9602 }
9603 else if (TREE_CODE (expr) == COMPLEX_CST)
9604 {
9605 rpart = TREE_REALPART (expr);
9606 ipart = TREE_IMAGPART (expr);
9607 }
9608 else
9609 {
9610 expr = save_expr (expr);
9611 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9612 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9613 }
9614
9615 rpart = save_expr (rpart);
9616 ipart = save_expr (ipart);
9617 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9618 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9619 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9620 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9621 build_zero_cst (itype));
9622 }
9623
9624
9625 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9626 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9627 guarantees that P and N have the same least significant log2(M) bits.
9628 N is not otherwise constrained. In particular, N is not normalized to
9629 0 <= N < M as is common. In general, the precise value of P is unknown.
9630 M is chosen as large as possible such that constant N can be determined.
9631
9632 Returns M and sets *RESIDUE to N.
9633
9634 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9635 account. This is not always possible due to PR 35705.
9636 */
9637
9638 static unsigned HOST_WIDE_INT
get_pointer_modulus_and_residue(tree expr,unsigned HOST_WIDE_INT * residue,bool allow_func_align)9639 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9640 bool allow_func_align)
9641 {
9642 enum tree_code code;
9643
9644 *residue = 0;
9645
9646 code = TREE_CODE (expr);
9647 if (code == ADDR_EXPR)
9648 {
9649 unsigned int bitalign;
9650 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9651 *residue /= BITS_PER_UNIT;
9652 return bitalign / BITS_PER_UNIT;
9653 }
9654 else if (code == POINTER_PLUS_EXPR)
9655 {
9656 tree op0, op1;
9657 unsigned HOST_WIDE_INT modulus;
9658 enum tree_code inner_code;
9659
9660 op0 = TREE_OPERAND (expr, 0);
9661 STRIP_NOPS (op0);
9662 modulus = get_pointer_modulus_and_residue (op0, residue,
9663 allow_func_align);
9664
9665 op1 = TREE_OPERAND (expr, 1);
9666 STRIP_NOPS (op1);
9667 inner_code = TREE_CODE (op1);
9668 if (inner_code == INTEGER_CST)
9669 {
9670 *residue += TREE_INT_CST_LOW (op1);
9671 return modulus;
9672 }
9673 else if (inner_code == MULT_EXPR)
9674 {
9675 op1 = TREE_OPERAND (op1, 1);
9676 if (TREE_CODE (op1) == INTEGER_CST)
9677 {
9678 unsigned HOST_WIDE_INT align;
9679
9680 /* Compute the greatest power-of-2 divisor of op1. */
9681 align = TREE_INT_CST_LOW (op1);
9682 align &= -align;
9683
9684 /* If align is non-zero and less than *modulus, replace
9685 *modulus with align., If align is 0, then either op1 is 0
9686 or the greatest power-of-2 divisor of op1 doesn't fit in an
9687 unsigned HOST_WIDE_INT. In either case, no additional
9688 constraint is imposed. */
9689 if (align)
9690 modulus = MIN (modulus, align);
9691
9692 return modulus;
9693 }
9694 }
9695 }
9696
9697 /* If we get here, we were unable to determine anything useful about the
9698 expression. */
9699 return 1;
9700 }
9701
9702 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9703 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9704
9705 static bool
vec_cst_ctor_to_array(tree arg,tree * elts)9706 vec_cst_ctor_to_array (tree arg, tree *elts)
9707 {
9708 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9709
9710 if (TREE_CODE (arg) == VECTOR_CST)
9711 {
9712 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9713 elts[i] = VECTOR_CST_ELT (arg, i);
9714 }
9715 else if (TREE_CODE (arg) == CONSTRUCTOR)
9716 {
9717 constructor_elt *elt;
9718
9719 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9720 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9721 return false;
9722 else
9723 elts[i] = elt->value;
9724 }
9725 else
9726 return false;
9727 for (; i < nelts; i++)
9728 elts[i]
9729 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9730 return true;
9731 }
9732
9733 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9734 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9735 NULL_TREE otherwise. */
9736
9737 static tree
fold_vec_perm(tree type,tree arg0,tree arg1,const unsigned char * sel)9738 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9739 {
9740 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9741 tree *elts;
9742 bool need_ctor = false;
9743
9744 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9745 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9746 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9747 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9748 return NULL_TREE;
9749
9750 elts = XALLOCAVEC (tree, nelts * 3);
9751 if (!vec_cst_ctor_to_array (arg0, elts)
9752 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9753 return NULL_TREE;
9754
9755 for (i = 0; i < nelts; i++)
9756 {
9757 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9758 need_ctor = true;
9759 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9760 }
9761
9762 if (need_ctor)
9763 {
9764 vec<constructor_elt, va_gc> *v;
9765 vec_alloc (v, nelts);
9766 for (i = 0; i < nelts; i++)
9767 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9768 return build_constructor (type, v);
9769 }
9770 else
9771 return build_vector (type, &elts[2 * nelts]);
9772 }
9773
9774 /* Try to fold a pointer difference of type TYPE two address expressions of
9775 array references AREF0 and AREF1 using location LOC. Return a
9776 simplified expression for the difference or NULL_TREE. */
9777
9778 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1)9779 fold_addr_of_array_ref_difference (location_t loc, tree type,
9780 tree aref0, tree aref1)
9781 {
9782 tree base0 = TREE_OPERAND (aref0, 0);
9783 tree base1 = TREE_OPERAND (aref1, 0);
9784 tree base_offset = build_int_cst (type, 0);
9785
9786 /* If the bases are array references as well, recurse. If the bases
9787 are pointer indirections compute the difference of the pointers.
9788 If the bases are equal, we are set. */
9789 if ((TREE_CODE (base0) == ARRAY_REF
9790 && TREE_CODE (base1) == ARRAY_REF
9791 && (base_offset
9792 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9793 || (INDIRECT_REF_P (base0)
9794 && INDIRECT_REF_P (base1)
9795 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9796 TREE_OPERAND (base0, 0),
9797 TREE_OPERAND (base1, 0))))
9798 || operand_equal_p (base0, base1, 0))
9799 {
9800 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9801 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9802 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9803 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9804 return fold_build2_loc (loc, PLUS_EXPR, type,
9805 base_offset,
9806 fold_build2_loc (loc, MULT_EXPR, type,
9807 diff, esz));
9808 }
9809 return NULL_TREE;
9810 }
9811
9812 /* If the real or vector real constant CST of type TYPE has an exact
9813 inverse, return it, else return NULL. */
9814
9815 static tree
exact_inverse(tree type,tree cst)9816 exact_inverse (tree type, tree cst)
9817 {
9818 REAL_VALUE_TYPE r;
9819 tree unit_type, *elts;
9820 enum machine_mode mode;
9821 unsigned vec_nelts, i;
9822
9823 switch (TREE_CODE (cst))
9824 {
9825 case REAL_CST:
9826 r = TREE_REAL_CST (cst);
9827
9828 if (exact_real_inverse (TYPE_MODE (type), &r))
9829 return build_real (type, r);
9830
9831 return NULL_TREE;
9832
9833 case VECTOR_CST:
9834 vec_nelts = VECTOR_CST_NELTS (cst);
9835 elts = XALLOCAVEC (tree, vec_nelts);
9836 unit_type = TREE_TYPE (type);
9837 mode = TYPE_MODE (unit_type);
9838
9839 for (i = 0; i < vec_nelts; i++)
9840 {
9841 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9842 if (!exact_real_inverse (mode, &r))
9843 return NULL_TREE;
9844 elts[i] = build_real (unit_type, r);
9845 }
9846
9847 return build_vector (type, elts);
9848
9849 default:
9850 return NULL_TREE;
9851 }
9852 }
9853
9854 /* Fold a binary expression of code CODE and type TYPE with operands
9855 OP0 and OP1. LOC is the location of the resulting expression.
9856 Return the folded expression if folding is successful. Otherwise,
9857 return NULL_TREE. */
9858
9859 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9860 fold_binary_loc (location_t loc,
9861 enum tree_code code, tree type, tree op0, tree op1)
9862 {
9863 enum tree_code_class kind = TREE_CODE_CLASS (code);
9864 tree arg0, arg1, tem;
9865 tree t1 = NULL_TREE;
9866 bool strict_overflow_p;
9867
9868 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9869 && TREE_CODE_LENGTH (code) == 2
9870 && op0 != NULL_TREE
9871 && op1 != NULL_TREE);
9872
9873 arg0 = op0;
9874 arg1 = op1;
9875
9876 /* Strip any conversions that don't change the mode. This is
9877 safe for every expression, except for a comparison expression
9878 because its signedness is derived from its operands. So, in
9879 the latter case, only strip conversions that don't change the
9880 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9881 preserved.
9882
9883 Note that this is done as an internal manipulation within the
9884 constant folder, in order to find the simplest representation
9885 of the arguments so that their form can be studied. In any
9886 cases, the appropriate type conversions should be put back in
9887 the tree that will get out of the constant folder. */
9888
9889 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9890 {
9891 STRIP_SIGN_NOPS (arg0);
9892 STRIP_SIGN_NOPS (arg1);
9893 }
9894 else
9895 {
9896 STRIP_NOPS (arg0);
9897 STRIP_NOPS (arg1);
9898 }
9899
9900 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9901 constant but we can't do arithmetic on them. */
9902 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9903 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9904 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9905 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9906 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9907 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9908 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9909 {
9910 if (kind == tcc_binary)
9911 {
9912 /* Make sure type and arg0 have the same saturating flag. */
9913 gcc_assert (TYPE_SATURATING (type)
9914 == TYPE_SATURATING (TREE_TYPE (arg0)));
9915 tem = const_binop (code, arg0, arg1);
9916 }
9917 else if (kind == tcc_comparison)
9918 tem = fold_relational_const (code, type, arg0, arg1);
9919 else
9920 tem = NULL_TREE;
9921
9922 if (tem != NULL_TREE)
9923 {
9924 if (TREE_TYPE (tem) != type)
9925 tem = fold_convert_loc (loc, type, tem);
9926 return tem;
9927 }
9928 }
9929
9930 /* If this is a commutative operation, and ARG0 is a constant, move it
9931 to ARG1 to reduce the number of tests below. */
9932 if (commutative_tree_code (code)
9933 && tree_swap_operands_p (arg0, arg1, true))
9934 return fold_build2_loc (loc, code, type, op1, op0);
9935
9936 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9937
9938 First check for cases where an arithmetic operation is applied to a
9939 compound, conditional, or comparison operation. Push the arithmetic
9940 operation inside the compound or conditional to see if any folding
9941 can then be done. Convert comparison to conditional for this purpose.
9942 The also optimizes non-constant cases that used to be done in
9943 expand_expr.
9944
9945 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9946 one of the operands is a comparison and the other is a comparison, a
9947 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9948 code below would make the expression more complex. Change it to a
9949 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9950 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9951
9952 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9953 || code == EQ_EXPR || code == NE_EXPR)
9954 && TREE_CODE (type) != VECTOR_TYPE
9955 && ((truth_value_p (TREE_CODE (arg0))
9956 && (truth_value_p (TREE_CODE (arg1))
9957 || (TREE_CODE (arg1) == BIT_AND_EXPR
9958 && integer_onep (TREE_OPERAND (arg1, 1)))))
9959 || (truth_value_p (TREE_CODE (arg1))
9960 && (truth_value_p (TREE_CODE (arg0))
9961 || (TREE_CODE (arg0) == BIT_AND_EXPR
9962 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9963 {
9964 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9965 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9966 : TRUTH_XOR_EXPR,
9967 boolean_type_node,
9968 fold_convert_loc (loc, boolean_type_node, arg0),
9969 fold_convert_loc (loc, boolean_type_node, arg1));
9970
9971 if (code == EQ_EXPR)
9972 tem = invert_truthvalue_loc (loc, tem);
9973
9974 return fold_convert_loc (loc, type, tem);
9975 }
9976
9977 if (TREE_CODE_CLASS (code) == tcc_binary
9978 || TREE_CODE_CLASS (code) == tcc_comparison)
9979 {
9980 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9981 {
9982 tem = fold_build2_loc (loc, code, type,
9983 fold_convert_loc (loc, TREE_TYPE (op0),
9984 TREE_OPERAND (arg0, 1)), op1);
9985 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9986 tem);
9987 }
9988 if (TREE_CODE (arg1) == COMPOUND_EXPR
9989 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9990 {
9991 tem = fold_build2_loc (loc, code, type, op0,
9992 fold_convert_loc (loc, TREE_TYPE (op1),
9993 TREE_OPERAND (arg1, 1)));
9994 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9995 tem);
9996 }
9997
9998 if (TREE_CODE (arg0) == COND_EXPR
9999 || TREE_CODE (arg0) == VEC_COND_EXPR
10000 || COMPARISON_CLASS_P (arg0))
10001 {
10002 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10003 arg0, arg1,
10004 /*cond_first_p=*/1);
10005 if (tem != NULL_TREE)
10006 return tem;
10007 }
10008
10009 if (TREE_CODE (arg1) == COND_EXPR
10010 || TREE_CODE (arg1) == VEC_COND_EXPR
10011 || COMPARISON_CLASS_P (arg1))
10012 {
10013 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10014 arg1, arg0,
10015 /*cond_first_p=*/0);
10016 if (tem != NULL_TREE)
10017 return tem;
10018 }
10019 }
10020
10021 switch (code)
10022 {
10023 case MEM_REF:
10024 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10025 if (TREE_CODE (arg0) == ADDR_EXPR
10026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10027 {
10028 tree iref = TREE_OPERAND (arg0, 0);
10029 return fold_build2 (MEM_REF, type,
10030 TREE_OPERAND (iref, 0),
10031 int_const_binop (PLUS_EXPR, arg1,
10032 TREE_OPERAND (iref, 1)));
10033 }
10034
10035 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10036 if (TREE_CODE (arg0) == ADDR_EXPR
10037 && handled_component_p (TREE_OPERAND (arg0, 0)))
10038 {
10039 tree base;
10040 HOST_WIDE_INT coffset;
10041 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10042 &coffset);
10043 if (!base)
10044 return NULL_TREE;
10045 return fold_build2 (MEM_REF, type,
10046 build_fold_addr_expr (base),
10047 int_const_binop (PLUS_EXPR, arg1,
10048 size_int (coffset)));
10049 }
10050
10051 return NULL_TREE;
10052
10053 case POINTER_PLUS_EXPR:
10054 /* 0 +p index -> (type)index */
10055 if (integer_zerop (arg0))
10056 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10057
10058 /* PTR +p 0 -> PTR */
10059 if (integer_zerop (arg1))
10060 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10061
10062 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10063 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10064 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10065 return fold_convert_loc (loc, type,
10066 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10067 fold_convert_loc (loc, sizetype,
10068 arg1),
10069 fold_convert_loc (loc, sizetype,
10070 arg0)));
10071
10072 /* (PTR +p B) +p A -> PTR +p (B + A) */
10073 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10074 {
10075 tree inner;
10076 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10077 tree arg00 = TREE_OPERAND (arg0, 0);
10078 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10079 arg01, fold_convert_loc (loc, sizetype, arg1));
10080 return fold_convert_loc (loc, type,
10081 fold_build_pointer_plus_loc (loc,
10082 arg00, inner));
10083 }
10084
10085 /* PTR_CST +p CST -> CST1 */
10086 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10087 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10088 fold_convert_loc (loc, type, arg1));
10089
10090 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10091 of the array. Loop optimizer sometimes produce this type of
10092 expressions. */
10093 if (TREE_CODE (arg0) == ADDR_EXPR)
10094 {
10095 tem = try_move_mult_to_index (loc, arg0,
10096 fold_convert_loc (loc,
10097 ssizetype, arg1));
10098 if (tem)
10099 return fold_convert_loc (loc, type, tem);
10100 }
10101
10102 return NULL_TREE;
10103
10104 case PLUS_EXPR:
10105 /* A + (-B) -> A - B */
10106 if (TREE_CODE (arg1) == NEGATE_EXPR)
10107 return fold_build2_loc (loc, MINUS_EXPR, type,
10108 fold_convert_loc (loc, type, arg0),
10109 fold_convert_loc (loc, type,
10110 TREE_OPERAND (arg1, 0)));
10111 /* (-A) + B -> B - A */
10112 if (TREE_CODE (arg0) == NEGATE_EXPR
10113 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10114 return fold_build2_loc (loc, MINUS_EXPR, type,
10115 fold_convert_loc (loc, type, arg1),
10116 fold_convert_loc (loc, type,
10117 TREE_OPERAND (arg0, 0)));
10118
10119 if (INTEGRAL_TYPE_P (type))
10120 {
10121 /* Convert ~A + 1 to -A. */
10122 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10123 && integer_onep (arg1))
10124 return fold_build1_loc (loc, NEGATE_EXPR, type,
10125 fold_convert_loc (loc, type,
10126 TREE_OPERAND (arg0, 0)));
10127
10128 /* ~X + X is -1. */
10129 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10130 && !TYPE_OVERFLOW_TRAPS (type))
10131 {
10132 tree tem = TREE_OPERAND (arg0, 0);
10133
10134 STRIP_NOPS (tem);
10135 if (operand_equal_p (tem, arg1, 0))
10136 {
10137 t1 = build_int_cst_type (type, -1);
10138 return omit_one_operand_loc (loc, type, t1, arg1);
10139 }
10140 }
10141
10142 /* X + ~X is -1. */
10143 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10144 && !TYPE_OVERFLOW_TRAPS (type))
10145 {
10146 tree tem = TREE_OPERAND (arg1, 0);
10147
10148 STRIP_NOPS (tem);
10149 if (operand_equal_p (arg0, tem, 0))
10150 {
10151 t1 = build_int_cst_type (type, -1);
10152 return omit_one_operand_loc (loc, type, t1, arg0);
10153 }
10154 }
10155
10156 /* X + (X / CST) * -CST is X % CST. */
10157 if (TREE_CODE (arg1) == MULT_EXPR
10158 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10159 && operand_equal_p (arg0,
10160 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10161 {
10162 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10163 tree cst1 = TREE_OPERAND (arg1, 1);
10164 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10165 cst1, cst0);
10166 if (sum && integer_zerop (sum))
10167 return fold_convert_loc (loc, type,
10168 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10169 TREE_TYPE (arg0), arg0,
10170 cst0));
10171 }
10172 }
10173
10174 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10175 one. Make sure the type is not saturating and has the signedness of
10176 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10177 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10178 if ((TREE_CODE (arg0) == MULT_EXPR
10179 || TREE_CODE (arg1) == MULT_EXPR)
10180 && !TYPE_SATURATING (type)
10181 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10182 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10183 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10184 {
10185 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10186 if (tem)
10187 return tem;
10188 }
10189
10190 if (! FLOAT_TYPE_P (type))
10191 {
10192 if (integer_zerop (arg1))
10193 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10194
10195 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10196 with a constant, and the two constants have no bits in common,
10197 we should treat this as a BIT_IOR_EXPR since this may produce more
10198 simplifications. */
10199 if (TREE_CODE (arg0) == BIT_AND_EXPR
10200 && TREE_CODE (arg1) == BIT_AND_EXPR
10201 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10202 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10203 && integer_zerop (const_binop (BIT_AND_EXPR,
10204 TREE_OPERAND (arg0, 1),
10205 TREE_OPERAND (arg1, 1))))
10206 {
10207 code = BIT_IOR_EXPR;
10208 goto bit_ior;
10209 }
10210
10211 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10212 (plus (plus (mult) (mult)) (foo)) so that we can
10213 take advantage of the factoring cases below. */
10214 if (TYPE_OVERFLOW_WRAPS (type)
10215 && (((TREE_CODE (arg0) == PLUS_EXPR
10216 || TREE_CODE (arg0) == MINUS_EXPR)
10217 && TREE_CODE (arg1) == MULT_EXPR)
10218 || ((TREE_CODE (arg1) == PLUS_EXPR
10219 || TREE_CODE (arg1) == MINUS_EXPR)
10220 && TREE_CODE (arg0) == MULT_EXPR)))
10221 {
10222 tree parg0, parg1, parg, marg;
10223 enum tree_code pcode;
10224
10225 if (TREE_CODE (arg1) == MULT_EXPR)
10226 parg = arg0, marg = arg1;
10227 else
10228 parg = arg1, marg = arg0;
10229 pcode = TREE_CODE (parg);
10230 parg0 = TREE_OPERAND (parg, 0);
10231 parg1 = TREE_OPERAND (parg, 1);
10232 STRIP_NOPS (parg0);
10233 STRIP_NOPS (parg1);
10234
10235 if (TREE_CODE (parg0) == MULT_EXPR
10236 && TREE_CODE (parg1) != MULT_EXPR)
10237 return fold_build2_loc (loc, pcode, type,
10238 fold_build2_loc (loc, PLUS_EXPR, type,
10239 fold_convert_loc (loc, type,
10240 parg0),
10241 fold_convert_loc (loc, type,
10242 marg)),
10243 fold_convert_loc (loc, type, parg1));
10244 if (TREE_CODE (parg0) != MULT_EXPR
10245 && TREE_CODE (parg1) == MULT_EXPR)
10246 return
10247 fold_build2_loc (loc, PLUS_EXPR, type,
10248 fold_convert_loc (loc, type, parg0),
10249 fold_build2_loc (loc, pcode, type,
10250 fold_convert_loc (loc, type, marg),
10251 fold_convert_loc (loc, type,
10252 parg1)));
10253 }
10254 }
10255 else
10256 {
10257 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10258 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10259 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10260
10261 /* Likewise if the operands are reversed. */
10262 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10263 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10264
10265 /* Convert X + -C into X - C. */
10266 if (TREE_CODE (arg1) == REAL_CST
10267 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10268 {
10269 tem = fold_negate_const (arg1, type);
10270 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10271 return fold_build2_loc (loc, MINUS_EXPR, type,
10272 fold_convert_loc (loc, type, arg0),
10273 fold_convert_loc (loc, type, tem));
10274 }
10275
10276 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10277 to __complex__ ( x, y ). This is not the same for SNaNs or
10278 if signed zeros are involved. */
10279 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10280 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10281 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10282 {
10283 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10284 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10285 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10286 bool arg0rz = false, arg0iz = false;
10287 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10288 || (arg0i && (arg0iz = real_zerop (arg0i))))
10289 {
10290 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10291 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10292 if (arg0rz && arg1i && real_zerop (arg1i))
10293 {
10294 tree rp = arg1r ? arg1r
10295 : build1 (REALPART_EXPR, rtype, arg1);
10296 tree ip = arg0i ? arg0i
10297 : build1 (IMAGPART_EXPR, rtype, arg0);
10298 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10299 }
10300 else if (arg0iz && arg1r && real_zerop (arg1r))
10301 {
10302 tree rp = arg0r ? arg0r
10303 : build1 (REALPART_EXPR, rtype, arg0);
10304 tree ip = arg1i ? arg1i
10305 : build1 (IMAGPART_EXPR, rtype, arg1);
10306 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10307 }
10308 }
10309 }
10310
10311 if (flag_unsafe_math_optimizations
10312 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10313 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10314 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10315 return tem;
10316
10317 /* Convert x+x into x*2.0. */
10318 if (operand_equal_p (arg0, arg1, 0)
10319 && SCALAR_FLOAT_TYPE_P (type))
10320 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10321 build_real (type, dconst2));
10322
10323 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10324 We associate floats only if the user has specified
10325 -fassociative-math. */
10326 if (flag_associative_math
10327 && TREE_CODE (arg1) == PLUS_EXPR
10328 && TREE_CODE (arg0) != MULT_EXPR)
10329 {
10330 tree tree10 = TREE_OPERAND (arg1, 0);
10331 tree tree11 = TREE_OPERAND (arg1, 1);
10332 if (TREE_CODE (tree11) == MULT_EXPR
10333 && TREE_CODE (tree10) == MULT_EXPR)
10334 {
10335 tree tree0;
10336 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10337 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10338 }
10339 }
10340 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10341 We associate floats only if the user has specified
10342 -fassociative-math. */
10343 if (flag_associative_math
10344 && TREE_CODE (arg0) == PLUS_EXPR
10345 && TREE_CODE (arg1) != MULT_EXPR)
10346 {
10347 tree tree00 = TREE_OPERAND (arg0, 0);
10348 tree tree01 = TREE_OPERAND (arg0, 1);
10349 if (TREE_CODE (tree01) == MULT_EXPR
10350 && TREE_CODE (tree00) == MULT_EXPR)
10351 {
10352 tree tree0;
10353 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10354 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10355 }
10356 }
10357 }
10358
10359 bit_rotate:
10360 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10361 is a rotate of A by C1 bits. */
10362 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10363 is a rotate of A by B bits. */
10364 {
10365 enum tree_code code0, code1;
10366 tree rtype;
10367 code0 = TREE_CODE (arg0);
10368 code1 = TREE_CODE (arg1);
10369 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10370 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10371 && operand_equal_p (TREE_OPERAND (arg0, 0),
10372 TREE_OPERAND (arg1, 0), 0)
10373 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10374 TYPE_UNSIGNED (rtype))
10375 /* Only create rotates in complete modes. Other cases are not
10376 expanded properly. */
10377 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10378 {
10379 tree tree01, tree11;
10380 enum tree_code code01, code11;
10381
10382 tree01 = TREE_OPERAND (arg0, 1);
10383 tree11 = TREE_OPERAND (arg1, 1);
10384 STRIP_NOPS (tree01);
10385 STRIP_NOPS (tree11);
10386 code01 = TREE_CODE (tree01);
10387 code11 = TREE_CODE (tree11);
10388 if (code01 == INTEGER_CST
10389 && code11 == INTEGER_CST
10390 && TREE_INT_CST_HIGH (tree01) == 0
10391 && TREE_INT_CST_HIGH (tree11) == 0
10392 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10393 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10394 {
10395 tem = build2_loc (loc, LROTATE_EXPR,
10396 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10397 TREE_OPERAND (arg0, 0),
10398 code0 == LSHIFT_EXPR ? tree01 : tree11);
10399 return fold_convert_loc (loc, type, tem);
10400 }
10401 else if (code11 == MINUS_EXPR)
10402 {
10403 tree tree110, tree111;
10404 tree110 = TREE_OPERAND (tree11, 0);
10405 tree111 = TREE_OPERAND (tree11, 1);
10406 STRIP_NOPS (tree110);
10407 STRIP_NOPS (tree111);
10408 if (TREE_CODE (tree110) == INTEGER_CST
10409 && 0 == compare_tree_int (tree110,
10410 TYPE_PRECISION
10411 (TREE_TYPE (TREE_OPERAND
10412 (arg0, 0))))
10413 && operand_equal_p (tree01, tree111, 0))
10414 return
10415 fold_convert_loc (loc, type,
10416 build2 ((code0 == LSHIFT_EXPR
10417 ? LROTATE_EXPR
10418 : RROTATE_EXPR),
10419 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10420 TREE_OPERAND (arg0, 0), tree01));
10421 }
10422 else if (code01 == MINUS_EXPR)
10423 {
10424 tree tree010, tree011;
10425 tree010 = TREE_OPERAND (tree01, 0);
10426 tree011 = TREE_OPERAND (tree01, 1);
10427 STRIP_NOPS (tree010);
10428 STRIP_NOPS (tree011);
10429 if (TREE_CODE (tree010) == INTEGER_CST
10430 && 0 == compare_tree_int (tree010,
10431 TYPE_PRECISION
10432 (TREE_TYPE (TREE_OPERAND
10433 (arg0, 0))))
10434 && operand_equal_p (tree11, tree011, 0))
10435 return fold_convert_loc
10436 (loc, type,
10437 build2 ((code0 != LSHIFT_EXPR
10438 ? LROTATE_EXPR
10439 : RROTATE_EXPR),
10440 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10441 TREE_OPERAND (arg0, 0), tree11));
10442 }
10443 }
10444 }
10445
10446 associate:
10447 /* In most languages, can't associate operations on floats through
10448 parentheses. Rather than remember where the parentheses were, we
10449 don't associate floats at all, unless the user has specified
10450 -fassociative-math.
10451 And, we need to make sure type is not saturating. */
10452
10453 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10454 && !TYPE_SATURATING (type))
10455 {
10456 tree var0, con0, lit0, minus_lit0;
10457 tree var1, con1, lit1, minus_lit1;
10458 tree atype = type;
10459 bool ok = true;
10460
10461 /* Split both trees into variables, constants, and literals. Then
10462 associate each group together, the constants with literals,
10463 then the result with variables. This increases the chances of
10464 literals being recombined later and of generating relocatable
10465 expressions for the sum of a constant and literal. */
10466 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10467 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10468 code == MINUS_EXPR);
10469
10470 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10471 if (code == MINUS_EXPR)
10472 code = PLUS_EXPR;
10473
10474 /* With undefined overflow prefer doing association in a type
10475 which wraps on overflow, if that is one of the operand types. */
10476 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10477 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10478 {
10479 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10480 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10481 atype = TREE_TYPE (arg0);
10482 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10483 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10484 atype = TREE_TYPE (arg1);
10485 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10486 }
10487
10488 /* With undefined overflow we can only associate constants with one
10489 variable, and constants whose association doesn't overflow. */
10490 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10491 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10492 {
10493 if (var0 && var1)
10494 {
10495 tree tmp0 = var0;
10496 tree tmp1 = var1;
10497
10498 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10499 tmp0 = TREE_OPERAND (tmp0, 0);
10500 if (CONVERT_EXPR_P (tmp0)
10501 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10502 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10503 <= TYPE_PRECISION (atype)))
10504 tmp0 = TREE_OPERAND (tmp0, 0);
10505 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10506 tmp1 = TREE_OPERAND (tmp1, 0);
10507 if (CONVERT_EXPR_P (tmp1)
10508 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10509 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10510 <= TYPE_PRECISION (atype)))
10511 tmp1 = TREE_OPERAND (tmp1, 0);
10512 /* The only case we can still associate with two variables
10513 is if they are the same, modulo negation and bit-pattern
10514 preserving conversions. */
10515 if (!operand_equal_p (tmp0, tmp1, 0))
10516 ok = false;
10517 }
10518 }
10519
10520 /* Only do something if we found more than two objects. Otherwise,
10521 nothing has changed and we risk infinite recursion. */
10522 if (ok
10523 && (2 < ((var0 != 0) + (var1 != 0)
10524 + (con0 != 0) + (con1 != 0)
10525 + (lit0 != 0) + (lit1 != 0)
10526 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10527 {
10528 bool any_overflows = false;
10529 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10530 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10531 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10532 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10533 var0 = associate_trees (loc, var0, var1, code, atype);
10534 con0 = associate_trees (loc, con0, con1, code, atype);
10535 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10536 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10537 code, atype);
10538
10539 /* Preserve the MINUS_EXPR if the negative part of the literal is
10540 greater than the positive part. Otherwise, the multiplicative
10541 folding code (i.e extract_muldiv) may be fooled in case
10542 unsigned constants are subtracted, like in the following
10543 example: ((X*2 + 4) - 8U)/2. */
10544 if (minus_lit0 && lit0)
10545 {
10546 if (TREE_CODE (lit0) == INTEGER_CST
10547 && TREE_CODE (minus_lit0) == INTEGER_CST
10548 && tree_int_cst_lt (lit0, minus_lit0))
10549 {
10550 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10551 MINUS_EXPR, atype);
10552 lit0 = 0;
10553 }
10554 else
10555 {
10556 lit0 = associate_trees (loc, lit0, minus_lit0,
10557 MINUS_EXPR, atype);
10558 minus_lit0 = 0;
10559 }
10560 }
10561
10562 /* Don't introduce overflows through reassociation. */
10563 if (!any_overflows
10564 && ((lit0 && TREE_OVERFLOW (lit0))
10565 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10566 return NULL_TREE;
10567
10568 if (minus_lit0)
10569 {
10570 if (con0 == 0)
10571 return
10572 fold_convert_loc (loc, type,
10573 associate_trees (loc, var0, minus_lit0,
10574 MINUS_EXPR, atype));
10575 else
10576 {
10577 con0 = associate_trees (loc, con0, minus_lit0,
10578 MINUS_EXPR, atype);
10579 return
10580 fold_convert_loc (loc, type,
10581 associate_trees (loc, var0, con0,
10582 PLUS_EXPR, atype));
10583 }
10584 }
10585
10586 con0 = associate_trees (loc, con0, lit0, code, atype);
10587 return
10588 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10589 code, atype));
10590 }
10591 }
10592
10593 return NULL_TREE;
10594
10595 case MINUS_EXPR:
10596 /* Pointer simplifications for subtraction, simple reassociations. */
10597 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10598 {
10599 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10600 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10601 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10602 {
10603 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10604 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10605 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10606 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10607 return fold_build2_loc (loc, PLUS_EXPR, type,
10608 fold_build2_loc (loc, MINUS_EXPR, type,
10609 arg00, arg10),
10610 fold_build2_loc (loc, MINUS_EXPR, type,
10611 arg01, arg11));
10612 }
10613 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10614 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10615 {
10616 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10617 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10618 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10619 fold_convert_loc (loc, type, arg1));
10620 if (tmp)
10621 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10622 }
10623 }
10624 /* A - (-B) -> A + B */
10625 if (TREE_CODE (arg1) == NEGATE_EXPR)
10626 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10627 fold_convert_loc (loc, type,
10628 TREE_OPERAND (arg1, 0)));
10629 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10630 if (TREE_CODE (arg0) == NEGATE_EXPR
10631 && (FLOAT_TYPE_P (type)
10632 || INTEGRAL_TYPE_P (type))
10633 && negate_expr_p (arg1)
10634 && reorder_operands_p (arg0, arg1))
10635 return fold_build2_loc (loc, MINUS_EXPR, type,
10636 fold_convert_loc (loc, type,
10637 negate_expr (arg1)),
10638 fold_convert_loc (loc, type,
10639 TREE_OPERAND (arg0, 0)));
10640 /* Convert -A - 1 to ~A. */
10641 if (INTEGRAL_TYPE_P (type)
10642 && TREE_CODE (arg0) == NEGATE_EXPR
10643 && integer_onep (arg1)
10644 && !TYPE_OVERFLOW_TRAPS (type))
10645 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10646 fold_convert_loc (loc, type,
10647 TREE_OPERAND (arg0, 0)));
10648
10649 /* Convert -1 - A to ~A. */
10650 if (INTEGRAL_TYPE_P (type)
10651 && integer_all_onesp (arg0))
10652 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10653
10654
10655 /* X - (X / CST) * CST is X % CST. */
10656 if (INTEGRAL_TYPE_P (type)
10657 && TREE_CODE (arg1) == MULT_EXPR
10658 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10659 && operand_equal_p (arg0,
10660 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10661 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10662 TREE_OPERAND (arg1, 1), 0))
10663 return
10664 fold_convert_loc (loc, type,
10665 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10666 arg0, TREE_OPERAND (arg1, 1)));
10667
10668 if (! FLOAT_TYPE_P (type))
10669 {
10670 if (integer_zerop (arg0))
10671 return negate_expr (fold_convert_loc (loc, type, arg1));
10672 if (integer_zerop (arg1))
10673 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10674
10675 /* Fold A - (A & B) into ~B & A. */
10676 if (!TREE_SIDE_EFFECTS (arg0)
10677 && TREE_CODE (arg1) == BIT_AND_EXPR)
10678 {
10679 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10680 {
10681 tree arg10 = fold_convert_loc (loc, type,
10682 TREE_OPERAND (arg1, 0));
10683 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10684 fold_build1_loc (loc, BIT_NOT_EXPR,
10685 type, arg10),
10686 fold_convert_loc (loc, type, arg0));
10687 }
10688 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10689 {
10690 tree arg11 = fold_convert_loc (loc,
10691 type, TREE_OPERAND (arg1, 1));
10692 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10693 fold_build1_loc (loc, BIT_NOT_EXPR,
10694 type, arg11),
10695 fold_convert_loc (loc, type, arg0));
10696 }
10697 }
10698
10699 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10700 any power of 2 minus 1. */
10701 if (TREE_CODE (arg0) == BIT_AND_EXPR
10702 && TREE_CODE (arg1) == BIT_AND_EXPR
10703 && operand_equal_p (TREE_OPERAND (arg0, 0),
10704 TREE_OPERAND (arg1, 0), 0))
10705 {
10706 tree mask0 = TREE_OPERAND (arg0, 1);
10707 tree mask1 = TREE_OPERAND (arg1, 1);
10708 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10709
10710 if (operand_equal_p (tem, mask1, 0))
10711 {
10712 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10713 TREE_OPERAND (arg0, 0), mask1);
10714 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10715 }
10716 }
10717 }
10718
10719 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10720 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10721 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10722
10723 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10724 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10725 (-ARG1 + ARG0) reduces to -ARG1. */
10726 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10727 return negate_expr (fold_convert_loc (loc, type, arg1));
10728
10729 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10730 __complex__ ( x, -y ). This is not the same for SNaNs or if
10731 signed zeros are involved. */
10732 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10733 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10734 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10735 {
10736 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10737 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10738 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10739 bool arg0rz = false, arg0iz = false;
10740 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10741 || (arg0i && (arg0iz = real_zerop (arg0i))))
10742 {
10743 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10744 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10745 if (arg0rz && arg1i && real_zerop (arg1i))
10746 {
10747 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10748 arg1r ? arg1r
10749 : build1 (REALPART_EXPR, rtype, arg1));
10750 tree ip = arg0i ? arg0i
10751 : build1 (IMAGPART_EXPR, rtype, arg0);
10752 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10753 }
10754 else if (arg0iz && arg1r && real_zerop (arg1r))
10755 {
10756 tree rp = arg0r ? arg0r
10757 : build1 (REALPART_EXPR, rtype, arg0);
10758 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10759 arg1i ? arg1i
10760 : build1 (IMAGPART_EXPR, rtype, arg1));
10761 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10762 }
10763 }
10764 }
10765
10766 /* Fold &x - &x. This can happen from &x.foo - &x.
10767 This is unsafe for certain floats even in non-IEEE formats.
10768 In IEEE, it is unsafe because it does wrong for NaNs.
10769 Also note that operand_equal_p is always false if an operand
10770 is volatile. */
10771
10772 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10773 && operand_equal_p (arg0, arg1, 0))
10774 return build_zero_cst (type);
10775
10776 /* A - B -> A + (-B) if B is easily negatable. */
10777 if (negate_expr_p (arg1)
10778 && ((FLOAT_TYPE_P (type)
10779 /* Avoid this transformation if B is a positive REAL_CST. */
10780 && (TREE_CODE (arg1) != REAL_CST
10781 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10782 || INTEGRAL_TYPE_P (type)))
10783 return fold_build2_loc (loc, PLUS_EXPR, type,
10784 fold_convert_loc (loc, type, arg0),
10785 fold_convert_loc (loc, type,
10786 negate_expr (arg1)));
10787
10788 /* Try folding difference of addresses. */
10789 {
10790 HOST_WIDE_INT diff;
10791
10792 if ((TREE_CODE (arg0) == ADDR_EXPR
10793 || TREE_CODE (arg1) == ADDR_EXPR)
10794 && ptr_difference_const (arg0, arg1, &diff))
10795 return build_int_cst_type (type, diff);
10796 }
10797
10798 /* Fold &a[i] - &a[j] to i-j. */
10799 if (TREE_CODE (arg0) == ADDR_EXPR
10800 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10801 && TREE_CODE (arg1) == ADDR_EXPR
10802 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10803 {
10804 tree tem = fold_addr_of_array_ref_difference (loc, type,
10805 TREE_OPERAND (arg0, 0),
10806 TREE_OPERAND (arg1, 0));
10807 if (tem)
10808 return tem;
10809 }
10810
10811 if (FLOAT_TYPE_P (type)
10812 && flag_unsafe_math_optimizations
10813 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10814 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10815 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10816 return tem;
10817
10818 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10819 one. Make sure the type is not saturating and has the signedness of
10820 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10821 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10822 if ((TREE_CODE (arg0) == MULT_EXPR
10823 || TREE_CODE (arg1) == MULT_EXPR)
10824 && !TYPE_SATURATING (type)
10825 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10826 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10827 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10828 {
10829 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10830 if (tem)
10831 return tem;
10832 }
10833
10834 goto associate;
10835
10836 case MULT_EXPR:
10837 /* (-A) * (-B) -> A * B */
10838 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10839 return fold_build2_loc (loc, MULT_EXPR, type,
10840 fold_convert_loc (loc, type,
10841 TREE_OPERAND (arg0, 0)),
10842 fold_convert_loc (loc, type,
10843 negate_expr (arg1)));
10844 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10845 return fold_build2_loc (loc, MULT_EXPR, type,
10846 fold_convert_loc (loc, type,
10847 negate_expr (arg0)),
10848 fold_convert_loc (loc, type,
10849 TREE_OPERAND (arg1, 0)));
10850
10851 if (! FLOAT_TYPE_P (type))
10852 {
10853 if (integer_zerop (arg1))
10854 return omit_one_operand_loc (loc, type, arg1, arg0);
10855 if (integer_onep (arg1))
10856 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10857 /* Transform x * -1 into -x. Make sure to do the negation
10858 on the original operand with conversions not stripped
10859 because we can only strip non-sign-changing conversions. */
10860 if (integer_all_onesp (arg1))
10861 return fold_convert_loc (loc, type, negate_expr (op0));
10862 /* Transform x * -C into -x * C if x is easily negatable. */
10863 if (TREE_CODE (arg1) == INTEGER_CST
10864 && tree_int_cst_sgn (arg1) == -1
10865 && negate_expr_p (arg0)
10866 && (tem = negate_expr (arg1)) != arg1
10867 && !TREE_OVERFLOW (tem))
10868 return fold_build2_loc (loc, MULT_EXPR, type,
10869 fold_convert_loc (loc, type,
10870 negate_expr (arg0)),
10871 tem);
10872
10873 /* (a * (1 << b)) is (a << b) */
10874 if (TREE_CODE (arg1) == LSHIFT_EXPR
10875 && integer_onep (TREE_OPERAND (arg1, 0)))
10876 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10877 TREE_OPERAND (arg1, 1));
10878 if (TREE_CODE (arg0) == LSHIFT_EXPR
10879 && integer_onep (TREE_OPERAND (arg0, 0)))
10880 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10881 TREE_OPERAND (arg0, 1));
10882
10883 /* (A + A) * C -> A * 2 * C */
10884 if (TREE_CODE (arg0) == PLUS_EXPR
10885 && TREE_CODE (arg1) == INTEGER_CST
10886 && operand_equal_p (TREE_OPERAND (arg0, 0),
10887 TREE_OPERAND (arg0, 1), 0))
10888 return fold_build2_loc (loc, MULT_EXPR, type,
10889 omit_one_operand_loc (loc, type,
10890 TREE_OPERAND (arg0, 0),
10891 TREE_OPERAND (arg0, 1)),
10892 fold_build2_loc (loc, MULT_EXPR, type,
10893 build_int_cst (type, 2) , arg1));
10894
10895 strict_overflow_p = false;
10896 if (TREE_CODE (arg1) == INTEGER_CST
10897 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10898 &strict_overflow_p)))
10899 {
10900 if (strict_overflow_p)
10901 fold_overflow_warning (("assuming signed overflow does not "
10902 "occur when simplifying "
10903 "multiplication"),
10904 WARN_STRICT_OVERFLOW_MISC);
10905 return fold_convert_loc (loc, type, tem);
10906 }
10907
10908 /* Optimize z * conj(z) for integer complex numbers. */
10909 if (TREE_CODE (arg0) == CONJ_EXPR
10910 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10911 return fold_mult_zconjz (loc, type, arg1);
10912 if (TREE_CODE (arg1) == CONJ_EXPR
10913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10914 return fold_mult_zconjz (loc, type, arg0);
10915 }
10916 else
10917 {
10918 /* Maybe fold x * 0 to 0. The expressions aren't the same
10919 when x is NaN, since x * 0 is also NaN. Nor are they the
10920 same in modes with signed zeros, since multiplying a
10921 negative value by 0 gives -0, not +0. */
10922 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10923 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10924 && real_zerop (arg1))
10925 return omit_one_operand_loc (loc, type, arg1, arg0);
10926 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10927 Likewise for complex arithmetic with signed zeros. */
10928 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10929 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10930 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10931 && real_onep (arg1))
10932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10933
10934 /* Transform x * -1.0 into -x. */
10935 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10936 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10937 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10938 && real_minus_onep (arg1))
10939 return fold_convert_loc (loc, type, negate_expr (arg0));
10940
10941 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10942 the result for floating point types due to rounding so it is applied
10943 only if -fassociative-math was specify. */
10944 if (flag_associative_math
10945 && TREE_CODE (arg0) == RDIV_EXPR
10946 && TREE_CODE (arg1) == REAL_CST
10947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10948 {
10949 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10950 arg1);
10951 if (tem)
10952 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10953 TREE_OPERAND (arg0, 1));
10954 }
10955
10956 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10957 if (operand_equal_p (arg0, arg1, 0))
10958 {
10959 tree tem = fold_strip_sign_ops (arg0);
10960 if (tem != NULL_TREE)
10961 {
10962 tem = fold_convert_loc (loc, type, tem);
10963 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10964 }
10965 }
10966
10967 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10968 This is not the same for NaNs or if signed zeros are
10969 involved. */
10970 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10971 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10972 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10973 && TREE_CODE (arg1) == COMPLEX_CST
10974 && real_zerop (TREE_REALPART (arg1)))
10975 {
10976 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10977 if (real_onep (TREE_IMAGPART (arg1)))
10978 return
10979 fold_build2_loc (loc, COMPLEX_EXPR, type,
10980 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10981 rtype, arg0)),
10982 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10983 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10984 return
10985 fold_build2_loc (loc, COMPLEX_EXPR, type,
10986 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10987 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10988 rtype, arg0)));
10989 }
10990
10991 /* Optimize z * conj(z) for floating point complex numbers.
10992 Guarded by flag_unsafe_math_optimizations as non-finite
10993 imaginary components don't produce scalar results. */
10994 if (flag_unsafe_math_optimizations
10995 && TREE_CODE (arg0) == CONJ_EXPR
10996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10997 return fold_mult_zconjz (loc, type, arg1);
10998 if (flag_unsafe_math_optimizations
10999 && TREE_CODE (arg1) == CONJ_EXPR
11000 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11001 return fold_mult_zconjz (loc, type, arg0);
11002
11003 if (flag_unsafe_math_optimizations)
11004 {
11005 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11006 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11007
11008 /* Optimizations of root(...)*root(...). */
11009 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11010 {
11011 tree rootfn, arg;
11012 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11013 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11014
11015 /* Optimize sqrt(x)*sqrt(x) as x. */
11016 if (BUILTIN_SQRT_P (fcode0)
11017 && operand_equal_p (arg00, arg10, 0)
11018 && ! HONOR_SNANS (TYPE_MODE (type)))
11019 return arg00;
11020
11021 /* Optimize root(x)*root(y) as root(x*y). */
11022 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11023 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11024 return build_call_expr_loc (loc, rootfn, 1, arg);
11025 }
11026
11027 /* Optimize expN(x)*expN(y) as expN(x+y). */
11028 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11029 {
11030 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11031 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11032 CALL_EXPR_ARG (arg0, 0),
11033 CALL_EXPR_ARG (arg1, 0));
11034 return build_call_expr_loc (loc, expfn, 1, arg);
11035 }
11036
11037 /* Optimizations of pow(...)*pow(...). */
11038 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11039 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11040 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11041 {
11042 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11043 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11044 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11045 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11046
11047 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11048 if (operand_equal_p (arg01, arg11, 0))
11049 {
11050 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11051 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11052 arg00, arg10);
11053 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11054 }
11055
11056 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11057 if (operand_equal_p (arg00, arg10, 0))
11058 {
11059 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11060 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11061 arg01, arg11);
11062 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11063 }
11064 }
11065
11066 /* Optimize tan(x)*cos(x) as sin(x). */
11067 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11068 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11069 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11070 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11071 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11072 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11073 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11074 CALL_EXPR_ARG (arg1, 0), 0))
11075 {
11076 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11077
11078 if (sinfn != NULL_TREE)
11079 return build_call_expr_loc (loc, sinfn, 1,
11080 CALL_EXPR_ARG (arg0, 0));
11081 }
11082
11083 /* Optimize x*pow(x,c) as pow(x,c+1). */
11084 if (fcode1 == BUILT_IN_POW
11085 || fcode1 == BUILT_IN_POWF
11086 || fcode1 == BUILT_IN_POWL)
11087 {
11088 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11089 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11090 if (TREE_CODE (arg11) == REAL_CST
11091 && !TREE_OVERFLOW (arg11)
11092 && operand_equal_p (arg0, arg10, 0))
11093 {
11094 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11095 REAL_VALUE_TYPE c;
11096 tree arg;
11097
11098 c = TREE_REAL_CST (arg11);
11099 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11100 arg = build_real (type, c);
11101 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11102 }
11103 }
11104
11105 /* Optimize pow(x,c)*x as pow(x,c+1). */
11106 if (fcode0 == BUILT_IN_POW
11107 || fcode0 == BUILT_IN_POWF
11108 || fcode0 == BUILT_IN_POWL)
11109 {
11110 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11111 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11112 if (TREE_CODE (arg01) == REAL_CST
11113 && !TREE_OVERFLOW (arg01)
11114 && operand_equal_p (arg1, arg00, 0))
11115 {
11116 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11117 REAL_VALUE_TYPE c;
11118 tree arg;
11119
11120 c = TREE_REAL_CST (arg01);
11121 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11122 arg = build_real (type, c);
11123 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11124 }
11125 }
11126
11127 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11128 if (!in_gimple_form
11129 && optimize
11130 && operand_equal_p (arg0, arg1, 0))
11131 {
11132 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11133
11134 if (powfn)
11135 {
11136 tree arg = build_real (type, dconst2);
11137 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11138 }
11139 }
11140 }
11141 }
11142 goto associate;
11143
11144 case BIT_IOR_EXPR:
11145 bit_ior:
11146 if (integer_all_onesp (arg1))
11147 return omit_one_operand_loc (loc, type, arg1, arg0);
11148 if (integer_zerop (arg1))
11149 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11150 if (operand_equal_p (arg0, arg1, 0))
11151 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11152
11153 /* ~X | X is -1. */
11154 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11156 {
11157 t1 = build_zero_cst (type);
11158 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11159 return omit_one_operand_loc (loc, type, t1, arg1);
11160 }
11161
11162 /* X | ~X is -1. */
11163 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11164 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11165 {
11166 t1 = build_zero_cst (type);
11167 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11168 return omit_one_operand_loc (loc, type, t1, arg0);
11169 }
11170
11171 /* Canonicalize (X & C1) | C2. */
11172 if (TREE_CODE (arg0) == BIT_AND_EXPR
11173 && TREE_CODE (arg1) == INTEGER_CST
11174 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11175 {
11176 double_int c1, c2, c3, msk;
11177 int width = TYPE_PRECISION (type), w;
11178 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11179 c2 = tree_to_double_int (arg1);
11180
11181 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11182 if ((c1 & c2) == c1)
11183 return omit_one_operand_loc (loc, type, arg1,
11184 TREE_OPERAND (arg0, 0));
11185
11186 msk = double_int::mask (width);
11187
11188 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11189 if (msk.and_not (c1 | c2).is_zero ())
11190 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11191 TREE_OPERAND (arg0, 0), arg1);
11192
11193 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11194 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11195 mode which allows further optimizations. */
11196 c1 &= msk;
11197 c2 &= msk;
11198 c3 = c1.and_not (c2);
11199 for (w = BITS_PER_UNIT;
11200 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11201 w <<= 1)
11202 {
11203 unsigned HOST_WIDE_INT mask
11204 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11205 if (((c1.low | c2.low) & mask) == mask
11206 && (c1.low & ~mask) == 0 && c1.high == 0)
11207 {
11208 c3 = double_int::from_uhwi (mask);
11209 break;
11210 }
11211 }
11212 if (c3 != c1)
11213 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11214 fold_build2_loc (loc, BIT_AND_EXPR, type,
11215 TREE_OPERAND (arg0, 0),
11216 double_int_to_tree (type,
11217 c3)),
11218 arg1);
11219 }
11220
11221 /* (X & Y) | Y is (X, Y). */
11222 if (TREE_CODE (arg0) == BIT_AND_EXPR
11223 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11224 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11225 /* (X & Y) | X is (Y, X). */
11226 if (TREE_CODE (arg0) == BIT_AND_EXPR
11227 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11228 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11229 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11230 /* X | (X & Y) is (Y, X). */
11231 if (TREE_CODE (arg1) == BIT_AND_EXPR
11232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11234 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11235 /* X | (Y & X) is (Y, X). */
11236 if (TREE_CODE (arg1) == BIT_AND_EXPR
11237 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11238 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11239 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11240
11241 /* (X & ~Y) | (~X & Y) is X ^ Y */
11242 if (TREE_CODE (arg0) == BIT_AND_EXPR
11243 && TREE_CODE (arg1) == BIT_AND_EXPR)
11244 {
11245 tree a0, a1, l0, l1, n0, n1;
11246
11247 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11248 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11249
11250 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11251 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11252
11253 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11254 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11255
11256 if ((operand_equal_p (n0, a0, 0)
11257 && operand_equal_p (n1, a1, 0))
11258 || (operand_equal_p (n0, a1, 0)
11259 && operand_equal_p (n1, a0, 0)))
11260 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11261 }
11262
11263 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11264 if (t1 != NULL_TREE)
11265 return t1;
11266
11267 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11268
11269 This results in more efficient code for machines without a NAND
11270 instruction. Combine will canonicalize to the first form
11271 which will allow use of NAND instructions provided by the
11272 backend if they exist. */
11273 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11274 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11275 {
11276 return
11277 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11278 build2 (BIT_AND_EXPR, type,
11279 fold_convert_loc (loc, type,
11280 TREE_OPERAND (arg0, 0)),
11281 fold_convert_loc (loc, type,
11282 TREE_OPERAND (arg1, 0))));
11283 }
11284
11285 /* See if this can be simplified into a rotate first. If that
11286 is unsuccessful continue in the association code. */
11287 goto bit_rotate;
11288
11289 case BIT_XOR_EXPR:
11290 if (integer_zerop (arg1))
11291 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11292 if (integer_all_onesp (arg1))
11293 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11294 if (operand_equal_p (arg0, arg1, 0))
11295 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11296
11297 /* ~X ^ X is -1. */
11298 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11299 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11300 {
11301 t1 = build_zero_cst (type);
11302 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11303 return omit_one_operand_loc (loc, type, t1, arg1);
11304 }
11305
11306 /* X ^ ~X is -1. */
11307 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11308 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11309 {
11310 t1 = build_zero_cst (type);
11311 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11312 return omit_one_operand_loc (loc, type, t1, arg0);
11313 }
11314
11315 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11316 with a constant, and the two constants have no bits in common,
11317 we should treat this as a BIT_IOR_EXPR since this may produce more
11318 simplifications. */
11319 if (TREE_CODE (arg0) == BIT_AND_EXPR
11320 && TREE_CODE (arg1) == BIT_AND_EXPR
11321 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11322 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11323 && integer_zerop (const_binop (BIT_AND_EXPR,
11324 TREE_OPERAND (arg0, 1),
11325 TREE_OPERAND (arg1, 1))))
11326 {
11327 code = BIT_IOR_EXPR;
11328 goto bit_ior;
11329 }
11330
11331 /* (X | Y) ^ X -> Y & ~ X*/
11332 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11333 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11334 {
11335 tree t2 = TREE_OPERAND (arg0, 1);
11336 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11337 arg1);
11338 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11339 fold_convert_loc (loc, type, t2),
11340 fold_convert_loc (loc, type, t1));
11341 return t1;
11342 }
11343
11344 /* (Y | X) ^ X -> Y & ~ X*/
11345 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 {
11348 tree t2 = TREE_OPERAND (arg0, 0);
11349 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11350 arg1);
11351 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11352 fold_convert_loc (loc, type, t2),
11353 fold_convert_loc (loc, type, t1));
11354 return t1;
11355 }
11356
11357 /* X ^ (X | Y) -> Y & ~ X*/
11358 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11359 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11360 {
11361 tree t2 = TREE_OPERAND (arg1, 1);
11362 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11363 arg0);
11364 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11365 fold_convert_loc (loc, type, t2),
11366 fold_convert_loc (loc, type, t1));
11367 return t1;
11368 }
11369
11370 /* X ^ (Y | X) -> Y & ~ X*/
11371 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11372 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11373 {
11374 tree t2 = TREE_OPERAND (arg1, 0);
11375 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11376 arg0);
11377 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11378 fold_convert_loc (loc, type, t2),
11379 fold_convert_loc (loc, type, t1));
11380 return t1;
11381 }
11382
11383 /* Convert ~X ^ ~Y to X ^ Y. */
11384 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11385 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11386 return fold_build2_loc (loc, code, type,
11387 fold_convert_loc (loc, type,
11388 TREE_OPERAND (arg0, 0)),
11389 fold_convert_loc (loc, type,
11390 TREE_OPERAND (arg1, 0)));
11391
11392 /* Convert ~X ^ C to X ^ ~C. */
11393 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11394 && TREE_CODE (arg1) == INTEGER_CST)
11395 return fold_build2_loc (loc, code, type,
11396 fold_convert_loc (loc, type,
11397 TREE_OPERAND (arg0, 0)),
11398 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11399
11400 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11401 if (TREE_CODE (arg0) == BIT_AND_EXPR
11402 && integer_onep (TREE_OPERAND (arg0, 1))
11403 && integer_onep (arg1))
11404 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11405 build_zero_cst (TREE_TYPE (arg0)));
11406
11407 /* Fold (X & Y) ^ Y as ~X & Y. */
11408 if (TREE_CODE (arg0) == BIT_AND_EXPR
11409 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11410 {
11411 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11412 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11413 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11414 fold_convert_loc (loc, type, arg1));
11415 }
11416 /* Fold (X & Y) ^ X as ~Y & X. */
11417 if (TREE_CODE (arg0) == BIT_AND_EXPR
11418 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11419 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11420 {
11421 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11422 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11423 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11424 fold_convert_loc (loc, type, arg1));
11425 }
11426 /* Fold X ^ (X & Y) as X & ~Y. */
11427 if (TREE_CODE (arg1) == BIT_AND_EXPR
11428 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11429 {
11430 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11431 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11432 fold_convert_loc (loc, type, arg0),
11433 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11434 }
11435 /* Fold X ^ (Y & X) as ~Y & X. */
11436 if (TREE_CODE (arg1) == BIT_AND_EXPR
11437 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11438 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11439 {
11440 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11441 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11442 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11443 fold_convert_loc (loc, type, arg0));
11444 }
11445
11446 /* See if this can be simplified into a rotate first. If that
11447 is unsuccessful continue in the association code. */
11448 goto bit_rotate;
11449
11450 case BIT_AND_EXPR:
11451 if (integer_all_onesp (arg1))
11452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11453 if (integer_zerop (arg1))
11454 return omit_one_operand_loc (loc, type, arg1, arg0);
11455 if (operand_equal_p (arg0, arg1, 0))
11456 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11457
11458 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11459 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11460 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11461 || (TREE_CODE (arg0) == EQ_EXPR
11462 && integer_zerop (TREE_OPERAND (arg0, 1))))
11463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11464 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11465
11466 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11467 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11468 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11469 || (TREE_CODE (arg1) == EQ_EXPR
11470 && integer_zerop (TREE_OPERAND (arg1, 1))))
11471 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11472 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11473
11474 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11475 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11476 && TREE_CODE (arg1) == INTEGER_CST
11477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11478 {
11479 tree tmp1 = fold_convert_loc (loc, type, arg1);
11480 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11481 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11482 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11483 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11484 return
11485 fold_convert_loc (loc, type,
11486 fold_build2_loc (loc, BIT_IOR_EXPR,
11487 type, tmp2, tmp3));
11488 }
11489
11490 /* (X | Y) & Y is (X, Y). */
11491 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11492 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11493 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11494 /* (X | Y) & X is (Y, X). */
11495 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11496 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11497 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11498 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11499 /* X & (X | Y) is (Y, X). */
11500 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11501 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11502 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11503 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11504 /* X & (Y | X) is (Y, X). */
11505 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11506 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11507 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11508 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11509
11510 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11511 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11512 && integer_onep (TREE_OPERAND (arg0, 1))
11513 && integer_onep (arg1))
11514 {
11515 tree tem2;
11516 tem = TREE_OPERAND (arg0, 0);
11517 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11518 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11519 tem, tem2);
11520 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11521 build_zero_cst (TREE_TYPE (tem)));
11522 }
11523 /* Fold ~X & 1 as (X & 1) == 0. */
11524 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11525 && integer_onep (arg1))
11526 {
11527 tree tem2;
11528 tem = TREE_OPERAND (arg0, 0);
11529 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11530 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11531 tem, tem2);
11532 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11533 build_zero_cst (TREE_TYPE (tem)));
11534 }
11535 /* Fold !X & 1 as X == 0. */
11536 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11537 && integer_onep (arg1))
11538 {
11539 tem = TREE_OPERAND (arg0, 0);
11540 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11541 build_zero_cst (TREE_TYPE (tem)));
11542 }
11543
11544 /* Fold (X ^ Y) & Y as ~X & Y. */
11545 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11546 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11547 {
11548 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11549 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11550 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11551 fold_convert_loc (loc, type, arg1));
11552 }
11553 /* Fold (X ^ Y) & X as ~Y & X. */
11554 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11555 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11556 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11557 {
11558 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11559 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11560 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11561 fold_convert_loc (loc, type, arg1));
11562 }
11563 /* Fold X & (X ^ Y) as X & ~Y. */
11564 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11565 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11566 {
11567 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11568 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11569 fold_convert_loc (loc, type, arg0),
11570 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11571 }
11572 /* Fold X & (Y ^ X) as ~Y & X. */
11573 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11574 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11575 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11576 {
11577 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11578 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11579 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11580 fold_convert_loc (loc, type, arg0));
11581 }
11582
11583 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11584 multiple of 1 << CST. */
11585 if (TREE_CODE (arg1) == INTEGER_CST)
11586 {
11587 double_int cst1 = tree_to_double_int (arg1);
11588 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11589 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11590 if ((cst1 & ncst1) == ncst1
11591 && multiple_of_p (type, arg0,
11592 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11593 return fold_convert_loc (loc, type, arg0);
11594 }
11595
11596 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11597 bits from CST2. */
11598 if (TREE_CODE (arg1) == INTEGER_CST
11599 && TREE_CODE (arg0) == MULT_EXPR
11600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11601 {
11602 int arg1tz
11603 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11604 if (arg1tz > 0)
11605 {
11606 double_int arg1mask, masked;
11607 arg1mask = ~double_int::mask (arg1tz);
11608 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11609 TYPE_UNSIGNED (type));
11610 masked = arg1mask & tree_to_double_int (arg1);
11611 if (masked.is_zero ())
11612 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11613 arg0, arg1);
11614 else if (masked != tree_to_double_int (arg1))
11615 return fold_build2_loc (loc, code, type, op0,
11616 double_int_to_tree (type, masked));
11617 }
11618 }
11619
11620 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11621 ((A & N) + B) & M -> (A + B) & M
11622 Similarly if (N & M) == 0,
11623 ((A | N) + B) & M -> (A + B) & M
11624 and for - instead of + (or unary - instead of +)
11625 and/or ^ instead of |.
11626 If B is constant and (B & M) == 0, fold into A & M. */
11627 if (host_integerp (arg1, 1))
11628 {
11629 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11630 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11631 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11632 && (TREE_CODE (arg0) == PLUS_EXPR
11633 || TREE_CODE (arg0) == MINUS_EXPR
11634 || TREE_CODE (arg0) == NEGATE_EXPR)
11635 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11636 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11637 {
11638 tree pmop[2];
11639 int which = 0;
11640 unsigned HOST_WIDE_INT cst0;
11641
11642 /* Now we know that arg0 is (C + D) or (C - D) or
11643 -C and arg1 (M) is == (1LL << cst) - 1.
11644 Store C into PMOP[0] and D into PMOP[1]. */
11645 pmop[0] = TREE_OPERAND (arg0, 0);
11646 pmop[1] = NULL;
11647 if (TREE_CODE (arg0) != NEGATE_EXPR)
11648 {
11649 pmop[1] = TREE_OPERAND (arg0, 1);
11650 which = 1;
11651 }
11652
11653 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11654 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11655 & cst1) != cst1)
11656 which = -1;
11657
11658 for (; which >= 0; which--)
11659 switch (TREE_CODE (pmop[which]))
11660 {
11661 case BIT_AND_EXPR:
11662 case BIT_IOR_EXPR:
11663 case BIT_XOR_EXPR:
11664 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11665 != INTEGER_CST)
11666 break;
11667 /* tree_low_cst not used, because we don't care about
11668 the upper bits. */
11669 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11670 cst0 &= cst1;
11671 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11672 {
11673 if (cst0 != cst1)
11674 break;
11675 }
11676 else if (cst0 != 0)
11677 break;
11678 /* If C or D is of the form (A & N) where
11679 (N & M) == M, or of the form (A | N) or
11680 (A ^ N) where (N & M) == 0, replace it with A. */
11681 pmop[which] = TREE_OPERAND (pmop[which], 0);
11682 break;
11683 case INTEGER_CST:
11684 /* If C or D is a N where (N & M) == 0, it can be
11685 omitted (assumed 0). */
11686 if ((TREE_CODE (arg0) == PLUS_EXPR
11687 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11688 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11689 pmop[which] = NULL;
11690 break;
11691 default:
11692 break;
11693 }
11694
11695 /* Only build anything new if we optimized one or both arguments
11696 above. */
11697 if (pmop[0] != TREE_OPERAND (arg0, 0)
11698 || (TREE_CODE (arg0) != NEGATE_EXPR
11699 && pmop[1] != TREE_OPERAND (arg0, 1)))
11700 {
11701 tree utype = TREE_TYPE (arg0);
11702 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11703 {
11704 /* Perform the operations in a type that has defined
11705 overflow behavior. */
11706 utype = unsigned_type_for (TREE_TYPE (arg0));
11707 if (pmop[0] != NULL)
11708 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11709 if (pmop[1] != NULL)
11710 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11711 }
11712
11713 if (TREE_CODE (arg0) == NEGATE_EXPR)
11714 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11715 else if (TREE_CODE (arg0) == PLUS_EXPR)
11716 {
11717 if (pmop[0] != NULL && pmop[1] != NULL)
11718 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11719 pmop[0], pmop[1]);
11720 else if (pmop[0] != NULL)
11721 tem = pmop[0];
11722 else if (pmop[1] != NULL)
11723 tem = pmop[1];
11724 else
11725 return build_int_cst (type, 0);
11726 }
11727 else if (pmop[0] == NULL)
11728 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11729 else
11730 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11731 pmop[0], pmop[1]);
11732 /* TEM is now the new binary +, - or unary - replacement. */
11733 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11734 fold_convert_loc (loc, utype, arg1));
11735 return fold_convert_loc (loc, type, tem);
11736 }
11737 }
11738 }
11739
11740 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11741 if (t1 != NULL_TREE)
11742 return t1;
11743 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11744 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11745 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11746 {
11747 unsigned int prec
11748 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11749
11750 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11751 && (~TREE_INT_CST_LOW (arg1)
11752 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11753 return
11754 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11755 }
11756
11757 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11758
11759 This results in more efficient code for machines without a NOR
11760 instruction. Combine will canonicalize to the first form
11761 which will allow use of NOR instructions provided by the
11762 backend if they exist. */
11763 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11764 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11765 {
11766 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11767 build2 (BIT_IOR_EXPR, type,
11768 fold_convert_loc (loc, type,
11769 TREE_OPERAND (arg0, 0)),
11770 fold_convert_loc (loc, type,
11771 TREE_OPERAND (arg1, 0))));
11772 }
11773
11774 /* If arg0 is derived from the address of an object or function, we may
11775 be able to fold this expression using the object or function's
11776 alignment. */
11777 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11778 {
11779 unsigned HOST_WIDE_INT modulus, residue;
11780 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11781
11782 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11783 integer_onep (arg1));
11784
11785 /* This works because modulus is a power of 2. If this weren't the
11786 case, we'd have to replace it by its greatest power-of-2
11787 divisor: modulus & -modulus. */
11788 if (low < modulus)
11789 return build_int_cst (type, residue & low);
11790 }
11791
11792 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11793 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11794 if the new mask might be further optimized. */
11795 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11796 || TREE_CODE (arg0) == RSHIFT_EXPR)
11797 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11798 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11799 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11800 < TYPE_PRECISION (TREE_TYPE (arg0))
11801 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11802 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11803 {
11804 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11805 unsigned HOST_WIDE_INT mask
11806 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11807 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11808 tree shift_type = TREE_TYPE (arg0);
11809
11810 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11811 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11812 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11813 && TYPE_PRECISION (TREE_TYPE (arg0))
11814 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11815 {
11816 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11817 tree arg00 = TREE_OPERAND (arg0, 0);
11818 /* See if more bits can be proven as zero because of
11819 zero extension. */
11820 if (TREE_CODE (arg00) == NOP_EXPR
11821 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11822 {
11823 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11824 if (TYPE_PRECISION (inner_type)
11825 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11826 && TYPE_PRECISION (inner_type) < prec)
11827 {
11828 prec = TYPE_PRECISION (inner_type);
11829 /* See if we can shorten the right shift. */
11830 if (shiftc < prec)
11831 shift_type = inner_type;
11832 }
11833 }
11834 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11835 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11836 zerobits <<= prec - shiftc;
11837 /* For arithmetic shift if sign bit could be set, zerobits
11838 can contain actually sign bits, so no transformation is
11839 possible, unless MASK masks them all away. In that
11840 case the shift needs to be converted into logical shift. */
11841 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11842 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11843 {
11844 if ((mask & zerobits) == 0)
11845 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11846 else
11847 zerobits = 0;
11848 }
11849 }
11850
11851 /* ((X << 16) & 0xff00) is (X, 0). */
11852 if ((mask & zerobits) == mask)
11853 return omit_one_operand_loc (loc, type,
11854 build_int_cst (type, 0), arg0);
11855
11856 newmask = mask | zerobits;
11857 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11858 {
11859 unsigned int prec;
11860
11861 /* Only do the transformation if NEWMASK is some integer
11862 mode's mask. */
11863 for (prec = BITS_PER_UNIT;
11864 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11865 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11866 break;
11867 if (prec < HOST_BITS_PER_WIDE_INT
11868 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11869 {
11870 tree newmaskt;
11871
11872 if (shift_type != TREE_TYPE (arg0))
11873 {
11874 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11875 fold_convert_loc (loc, shift_type,
11876 TREE_OPERAND (arg0, 0)),
11877 TREE_OPERAND (arg0, 1));
11878 tem = fold_convert_loc (loc, type, tem);
11879 }
11880 else
11881 tem = op0;
11882 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11883 if (!tree_int_cst_equal (newmaskt, arg1))
11884 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11885 }
11886 }
11887 }
11888
11889 goto associate;
11890
11891 case RDIV_EXPR:
11892 /* Don't touch a floating-point divide by zero unless the mode
11893 of the constant can represent infinity. */
11894 if (TREE_CODE (arg1) == REAL_CST
11895 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11896 && real_zerop (arg1))
11897 return NULL_TREE;
11898
11899 /* Optimize A / A to 1.0 if we don't care about
11900 NaNs or Infinities. Skip the transformation
11901 for non-real operands. */
11902 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11903 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11904 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11905 && operand_equal_p (arg0, arg1, 0))
11906 {
11907 tree r = build_real (TREE_TYPE (arg0), dconst1);
11908
11909 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11910 }
11911
11912 /* The complex version of the above A / A optimization. */
11913 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11914 && operand_equal_p (arg0, arg1, 0))
11915 {
11916 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11917 if (! HONOR_NANS (TYPE_MODE (elem_type))
11918 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11919 {
11920 tree r = build_real (elem_type, dconst1);
11921 /* omit_two_operands will call fold_convert for us. */
11922 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11923 }
11924 }
11925
11926 /* (-A) / (-B) -> A / B */
11927 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11928 return fold_build2_loc (loc, RDIV_EXPR, type,
11929 TREE_OPERAND (arg0, 0),
11930 negate_expr (arg1));
11931 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11932 return fold_build2_loc (loc, RDIV_EXPR, type,
11933 negate_expr (arg0),
11934 TREE_OPERAND (arg1, 0));
11935
11936 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11937 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11938 && real_onep (arg1))
11939 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11940
11941 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11942 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11943 && real_minus_onep (arg1))
11944 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11945 negate_expr (arg0)));
11946
11947 /* If ARG1 is a constant, we can convert this to a multiply by the
11948 reciprocal. This does not have the same rounding properties,
11949 so only do this if -freciprocal-math. We can actually
11950 always safely do it if ARG1 is a power of two, but it's hard to
11951 tell if it is or not in a portable manner. */
11952 if (optimize
11953 && (TREE_CODE (arg1) == REAL_CST
11954 || (TREE_CODE (arg1) == COMPLEX_CST
11955 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11956 || (TREE_CODE (arg1) == VECTOR_CST
11957 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11958 {
11959 if (flag_reciprocal_math
11960 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11961 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11962 /* Find the reciprocal if optimizing and the result is exact.
11963 TODO: Complex reciprocal not implemented. */
11964 if (TREE_CODE (arg1) != COMPLEX_CST)
11965 {
11966 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
11967
11968 if (inverse)
11969 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
11970 }
11971 }
11972 /* Convert A/B/C to A/(B*C). */
11973 if (flag_reciprocal_math
11974 && TREE_CODE (arg0) == RDIV_EXPR)
11975 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11976 fold_build2_loc (loc, MULT_EXPR, type,
11977 TREE_OPERAND (arg0, 1), arg1));
11978
11979 /* Convert A/(B/C) to (A/B)*C. */
11980 if (flag_reciprocal_math
11981 && TREE_CODE (arg1) == RDIV_EXPR)
11982 return fold_build2_loc (loc, MULT_EXPR, type,
11983 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11984 TREE_OPERAND (arg1, 0)),
11985 TREE_OPERAND (arg1, 1));
11986
11987 /* Convert C1/(X*C2) into (C1/C2)/X. */
11988 if (flag_reciprocal_math
11989 && TREE_CODE (arg1) == MULT_EXPR
11990 && TREE_CODE (arg0) == REAL_CST
11991 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11992 {
11993 tree tem = const_binop (RDIV_EXPR, arg0,
11994 TREE_OPERAND (arg1, 1));
11995 if (tem)
11996 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11997 TREE_OPERAND (arg1, 0));
11998 }
11999
12000 if (flag_unsafe_math_optimizations)
12001 {
12002 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12003 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12004
12005 /* Optimize sin(x)/cos(x) as tan(x). */
12006 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12007 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12008 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12009 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12010 CALL_EXPR_ARG (arg1, 0), 0))
12011 {
12012 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12013
12014 if (tanfn != NULL_TREE)
12015 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12016 }
12017
12018 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12019 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12020 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12021 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12022 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12023 CALL_EXPR_ARG (arg1, 0), 0))
12024 {
12025 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12026
12027 if (tanfn != NULL_TREE)
12028 {
12029 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12030 CALL_EXPR_ARG (arg0, 0));
12031 return fold_build2_loc (loc, RDIV_EXPR, type,
12032 build_real (type, dconst1), tmp);
12033 }
12034 }
12035
12036 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12037 NaNs or Infinities. */
12038 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12039 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12040 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12041 {
12042 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12043 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12044
12045 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12046 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12047 && operand_equal_p (arg00, arg01, 0))
12048 {
12049 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12050
12051 if (cosfn != NULL_TREE)
12052 return build_call_expr_loc (loc, cosfn, 1, arg00);
12053 }
12054 }
12055
12056 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12057 NaNs or Infinities. */
12058 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12059 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12060 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12061 {
12062 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12063 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12064
12065 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12066 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12067 && operand_equal_p (arg00, arg01, 0))
12068 {
12069 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12070
12071 if (cosfn != NULL_TREE)
12072 {
12073 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12074 return fold_build2_loc (loc, RDIV_EXPR, type,
12075 build_real (type, dconst1),
12076 tmp);
12077 }
12078 }
12079 }
12080
12081 /* Optimize pow(x,c)/x as pow(x,c-1). */
12082 if (fcode0 == BUILT_IN_POW
12083 || fcode0 == BUILT_IN_POWF
12084 || fcode0 == BUILT_IN_POWL)
12085 {
12086 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12087 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12088 if (TREE_CODE (arg01) == REAL_CST
12089 && !TREE_OVERFLOW (arg01)
12090 && operand_equal_p (arg1, arg00, 0))
12091 {
12092 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12093 REAL_VALUE_TYPE c;
12094 tree arg;
12095
12096 c = TREE_REAL_CST (arg01);
12097 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12098 arg = build_real (type, c);
12099 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12100 }
12101 }
12102
12103 /* Optimize a/root(b/c) into a*root(c/b). */
12104 if (BUILTIN_ROOT_P (fcode1))
12105 {
12106 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12107
12108 if (TREE_CODE (rootarg) == RDIV_EXPR)
12109 {
12110 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12111 tree b = TREE_OPERAND (rootarg, 0);
12112 tree c = TREE_OPERAND (rootarg, 1);
12113
12114 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12115
12116 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12117 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12118 }
12119 }
12120
12121 /* Optimize x/expN(y) into x*expN(-y). */
12122 if (BUILTIN_EXPONENT_P (fcode1))
12123 {
12124 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12125 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12126 arg1 = build_call_expr_loc (loc,
12127 expfn, 1,
12128 fold_convert_loc (loc, type, arg));
12129 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12130 }
12131
12132 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12133 if (fcode1 == BUILT_IN_POW
12134 || fcode1 == BUILT_IN_POWF
12135 || fcode1 == BUILT_IN_POWL)
12136 {
12137 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12138 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12139 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12140 tree neg11 = fold_convert_loc (loc, type,
12141 negate_expr (arg11));
12142 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12143 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12144 }
12145 }
12146 return NULL_TREE;
12147
12148 case TRUNC_DIV_EXPR:
12149 /* Optimize (X & (-A)) / A where A is a power of 2,
12150 to X >> log2(A) */
12151 if (TREE_CODE (arg0) == BIT_AND_EXPR
12152 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12153 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12154 {
12155 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12156 arg1, TREE_OPERAND (arg0, 1));
12157 if (sum && integer_zerop (sum)) {
12158 unsigned long pow2;
12159
12160 if (TREE_INT_CST_LOW (arg1))
12161 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12162 else
12163 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12164 + HOST_BITS_PER_WIDE_INT;
12165
12166 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12167 TREE_OPERAND (arg0, 0),
12168 build_int_cst (integer_type_node, pow2));
12169 }
12170 }
12171
12172 /* Fall through */
12173
12174 case FLOOR_DIV_EXPR:
12175 /* Simplify A / (B << N) where A and B are positive and B is
12176 a power of 2, to A >> (N + log2(B)). */
12177 strict_overflow_p = false;
12178 if (TREE_CODE (arg1) == LSHIFT_EXPR
12179 && (TYPE_UNSIGNED (type)
12180 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12181 {
12182 tree sval = TREE_OPERAND (arg1, 0);
12183 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12184 {
12185 tree sh_cnt = TREE_OPERAND (arg1, 1);
12186 unsigned long pow2;
12187
12188 if (TREE_INT_CST_LOW (sval))
12189 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12190 else
12191 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12192 + HOST_BITS_PER_WIDE_INT;
12193
12194 if (strict_overflow_p)
12195 fold_overflow_warning (("assuming signed overflow does not "
12196 "occur when simplifying A / (B << N)"),
12197 WARN_STRICT_OVERFLOW_MISC);
12198
12199 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12200 sh_cnt,
12201 build_int_cst (TREE_TYPE (sh_cnt),
12202 pow2));
12203 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12204 fold_convert_loc (loc, type, arg0), sh_cnt);
12205 }
12206 }
12207
12208 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12209 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12210 if (INTEGRAL_TYPE_P (type)
12211 && TYPE_UNSIGNED (type)
12212 && code == FLOOR_DIV_EXPR)
12213 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12214
12215 /* Fall through */
12216
12217 case ROUND_DIV_EXPR:
12218 case CEIL_DIV_EXPR:
12219 case EXACT_DIV_EXPR:
12220 if (integer_onep (arg1))
12221 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12222 if (integer_zerop (arg1))
12223 return NULL_TREE;
12224 /* X / -1 is -X. */
12225 if (!TYPE_UNSIGNED (type)
12226 && TREE_CODE (arg1) == INTEGER_CST
12227 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12228 && TREE_INT_CST_HIGH (arg1) == -1)
12229 return fold_convert_loc (loc, type, negate_expr (arg0));
12230
12231 /* Convert -A / -B to A / B when the type is signed and overflow is
12232 undefined. */
12233 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12234 && TREE_CODE (arg0) == NEGATE_EXPR
12235 && negate_expr_p (arg1))
12236 {
12237 if (INTEGRAL_TYPE_P (type))
12238 fold_overflow_warning (("assuming signed overflow does not occur "
12239 "when distributing negation across "
12240 "division"),
12241 WARN_STRICT_OVERFLOW_MISC);
12242 return fold_build2_loc (loc, code, type,
12243 fold_convert_loc (loc, type,
12244 TREE_OPERAND (arg0, 0)),
12245 fold_convert_loc (loc, type,
12246 negate_expr (arg1)));
12247 }
12248 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12249 && TREE_CODE (arg1) == NEGATE_EXPR
12250 && negate_expr_p (arg0))
12251 {
12252 if (INTEGRAL_TYPE_P (type))
12253 fold_overflow_warning (("assuming signed overflow does not occur "
12254 "when distributing negation across "
12255 "division"),
12256 WARN_STRICT_OVERFLOW_MISC);
12257 return fold_build2_loc (loc, code, type,
12258 fold_convert_loc (loc, type,
12259 negate_expr (arg0)),
12260 fold_convert_loc (loc, type,
12261 TREE_OPERAND (arg1, 0)));
12262 }
12263
12264 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12265 operation, EXACT_DIV_EXPR.
12266
12267 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12268 At one time others generated faster code, it's not clear if they do
12269 after the last round to changes to the DIV code in expmed.c. */
12270 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12271 && multiple_of_p (type, arg0, arg1))
12272 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12273
12274 strict_overflow_p = false;
12275 if (TREE_CODE (arg1) == INTEGER_CST
12276 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12277 &strict_overflow_p)))
12278 {
12279 if (strict_overflow_p)
12280 fold_overflow_warning (("assuming signed overflow does not occur "
12281 "when simplifying division"),
12282 WARN_STRICT_OVERFLOW_MISC);
12283 return fold_convert_loc (loc, type, tem);
12284 }
12285
12286 return NULL_TREE;
12287
12288 case CEIL_MOD_EXPR:
12289 case FLOOR_MOD_EXPR:
12290 case ROUND_MOD_EXPR:
12291 case TRUNC_MOD_EXPR:
12292 /* X % 1 is always zero, but be sure to preserve any side
12293 effects in X. */
12294 if (integer_onep (arg1))
12295 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12296
12297 /* X % 0, return X % 0 unchanged so that we can get the
12298 proper warnings and errors. */
12299 if (integer_zerop (arg1))
12300 return NULL_TREE;
12301
12302 /* 0 % X is always zero, but be sure to preserve any side
12303 effects in X. Place this after checking for X == 0. */
12304 if (integer_zerop (arg0))
12305 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12306
12307 /* X % -1 is zero. */
12308 if (!TYPE_UNSIGNED (type)
12309 && TREE_CODE (arg1) == INTEGER_CST
12310 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12311 && TREE_INT_CST_HIGH (arg1) == -1)
12312 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12313
12314 /* X % -C is the same as X % C. */
12315 if (code == TRUNC_MOD_EXPR
12316 && !TYPE_UNSIGNED (type)
12317 && TREE_CODE (arg1) == INTEGER_CST
12318 && !TREE_OVERFLOW (arg1)
12319 && TREE_INT_CST_HIGH (arg1) < 0
12320 && !TYPE_OVERFLOW_TRAPS (type)
12321 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12322 && !sign_bit_p (arg1, arg1))
12323 return fold_build2_loc (loc, code, type,
12324 fold_convert_loc (loc, type, arg0),
12325 fold_convert_loc (loc, type,
12326 negate_expr (arg1)));
12327
12328 /* X % -Y is the same as X % Y. */
12329 if (code == TRUNC_MOD_EXPR
12330 && !TYPE_UNSIGNED (type)
12331 && TREE_CODE (arg1) == NEGATE_EXPR
12332 && !TYPE_OVERFLOW_TRAPS (type))
12333 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12334 fold_convert_loc (loc, type,
12335 TREE_OPERAND (arg1, 0)));
12336
12337 strict_overflow_p = false;
12338 if (TREE_CODE (arg1) == INTEGER_CST
12339 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12340 &strict_overflow_p)))
12341 {
12342 if (strict_overflow_p)
12343 fold_overflow_warning (("assuming signed overflow does not occur "
12344 "when simplifying modulus"),
12345 WARN_STRICT_OVERFLOW_MISC);
12346 return fold_convert_loc (loc, type, tem);
12347 }
12348
12349 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12350 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12351 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12352 && (TYPE_UNSIGNED (type)
12353 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12354 {
12355 tree c = arg1;
12356 /* Also optimize A % (C << N) where C is a power of 2,
12357 to A & ((C << N) - 1). */
12358 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12359 c = TREE_OPERAND (arg1, 0);
12360
12361 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12362 {
12363 tree mask
12364 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12365 build_int_cst (TREE_TYPE (arg1), 1));
12366 if (strict_overflow_p)
12367 fold_overflow_warning (("assuming signed overflow does not "
12368 "occur when simplifying "
12369 "X % (power of two)"),
12370 WARN_STRICT_OVERFLOW_MISC);
12371 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12372 fold_convert_loc (loc, type, arg0),
12373 fold_convert_loc (loc, type, mask));
12374 }
12375 }
12376
12377 return NULL_TREE;
12378
12379 case LROTATE_EXPR:
12380 case RROTATE_EXPR:
12381 if (integer_all_onesp (arg0))
12382 return omit_one_operand_loc (loc, type, arg0, arg1);
12383 goto shift;
12384
12385 case RSHIFT_EXPR:
12386 /* Optimize -1 >> x for arithmetic right shifts. */
12387 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12388 && tree_expr_nonnegative_p (arg1))
12389 return omit_one_operand_loc (loc, type, arg0, arg1);
12390 /* ... fall through ... */
12391
12392 case LSHIFT_EXPR:
12393 shift:
12394 if (integer_zerop (arg1))
12395 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12396 if (integer_zerop (arg0))
12397 return omit_one_operand_loc (loc, type, arg0, arg1);
12398
12399 /* Since negative shift count is not well-defined,
12400 don't try to compute it in the compiler. */
12401 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12402 return NULL_TREE;
12403
12404 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12405 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12406 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12407 && host_integerp (TREE_OPERAND (arg0, 1), false)
12408 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12409 {
12410 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12411 + TREE_INT_CST_LOW (arg1));
12412
12413 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12414 being well defined. */
12415 if (low >= TYPE_PRECISION (type))
12416 {
12417 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12418 low = low % TYPE_PRECISION (type);
12419 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12420 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12421 TREE_OPERAND (arg0, 0));
12422 else
12423 low = TYPE_PRECISION (type) - 1;
12424 }
12425
12426 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12427 build_int_cst (type, low));
12428 }
12429
12430 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12431 into x & ((unsigned)-1 >> c) for unsigned types. */
12432 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12433 || (TYPE_UNSIGNED (type)
12434 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12435 && host_integerp (arg1, false)
12436 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12437 && host_integerp (TREE_OPERAND (arg0, 1), false)
12438 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12439 {
12440 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12441 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12442 tree lshift;
12443 tree arg00;
12444
12445 if (low0 == low1)
12446 {
12447 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12448
12449 lshift = build_int_cst (type, -1);
12450 lshift = int_const_binop (code, lshift, arg1);
12451
12452 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12453 }
12454 }
12455
12456 /* Rewrite an LROTATE_EXPR by a constant into an
12457 RROTATE_EXPR by a new constant. */
12458 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12459 {
12460 tree tem = build_int_cst (TREE_TYPE (arg1),
12461 TYPE_PRECISION (type));
12462 tem = const_binop (MINUS_EXPR, tem, arg1);
12463 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12464 }
12465
12466 /* If we have a rotate of a bit operation with the rotate count and
12467 the second operand of the bit operation both constant,
12468 permute the two operations. */
12469 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12470 && (TREE_CODE (arg0) == BIT_AND_EXPR
12471 || TREE_CODE (arg0) == BIT_IOR_EXPR
12472 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12473 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12474 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12475 fold_build2_loc (loc, code, type,
12476 TREE_OPERAND (arg0, 0), arg1),
12477 fold_build2_loc (loc, code, type,
12478 TREE_OPERAND (arg0, 1), arg1));
12479
12480 /* Two consecutive rotates adding up to the precision of the
12481 type can be ignored. */
12482 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12483 && TREE_CODE (arg0) == RROTATE_EXPR
12484 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12485 && TREE_INT_CST_HIGH (arg1) == 0
12486 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12487 && ((TREE_INT_CST_LOW (arg1)
12488 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12489 == (unsigned int) TYPE_PRECISION (type)))
12490 return TREE_OPERAND (arg0, 0);
12491
12492 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12493 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12494 if the latter can be further optimized. */
12495 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12496 && TREE_CODE (arg0) == BIT_AND_EXPR
12497 && TREE_CODE (arg1) == INTEGER_CST
12498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12499 {
12500 tree mask = fold_build2_loc (loc, code, type,
12501 fold_convert_loc (loc, type,
12502 TREE_OPERAND (arg0, 1)),
12503 arg1);
12504 tree shift = fold_build2_loc (loc, code, type,
12505 fold_convert_loc (loc, type,
12506 TREE_OPERAND (arg0, 0)),
12507 arg1);
12508 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12509 if (tem)
12510 return tem;
12511 }
12512
12513 return NULL_TREE;
12514
12515 case MIN_EXPR:
12516 if (operand_equal_p (arg0, arg1, 0))
12517 return omit_one_operand_loc (loc, type, arg0, arg1);
12518 if (INTEGRAL_TYPE_P (type)
12519 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12520 return omit_one_operand_loc (loc, type, arg1, arg0);
12521 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12522 if (tem)
12523 return tem;
12524 goto associate;
12525
12526 case MAX_EXPR:
12527 if (operand_equal_p (arg0, arg1, 0))
12528 return omit_one_operand_loc (loc, type, arg0, arg1);
12529 if (INTEGRAL_TYPE_P (type)
12530 && TYPE_MAX_VALUE (type)
12531 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12532 return omit_one_operand_loc (loc, type, arg1, arg0);
12533 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12534 if (tem)
12535 return tem;
12536 goto associate;
12537
12538 case TRUTH_ANDIF_EXPR:
12539 /* Note that the operands of this must be ints
12540 and their values must be 0 or 1.
12541 ("true" is a fixed value perhaps depending on the language.) */
12542 /* If first arg is constant zero, return it. */
12543 if (integer_zerop (arg0))
12544 return fold_convert_loc (loc, type, arg0);
12545 case TRUTH_AND_EXPR:
12546 /* If either arg is constant true, drop it. */
12547 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12548 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12549 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12550 /* Preserve sequence points. */
12551 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12552 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12553 /* If second arg is constant zero, result is zero, but first arg
12554 must be evaluated. */
12555 if (integer_zerop (arg1))
12556 return omit_one_operand_loc (loc, type, arg1, arg0);
12557 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12558 case will be handled here. */
12559 if (integer_zerop (arg0))
12560 return omit_one_operand_loc (loc, type, arg0, arg1);
12561
12562 /* !X && X is always false. */
12563 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12564 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12565 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12566 /* X && !X is always false. */
12567 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12568 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12569 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12570
12571 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12572 means A >= Y && A != MAX, but in this case we know that
12573 A < X <= MAX. */
12574
12575 if (!TREE_SIDE_EFFECTS (arg0)
12576 && !TREE_SIDE_EFFECTS (arg1))
12577 {
12578 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12579 if (tem && !operand_equal_p (tem, arg0, 0))
12580 return fold_build2_loc (loc, code, type, tem, arg1);
12581
12582 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12583 if (tem && !operand_equal_p (tem, arg1, 0))
12584 return fold_build2_loc (loc, code, type, arg0, tem);
12585 }
12586
12587 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12588 != NULL_TREE)
12589 return tem;
12590
12591 return NULL_TREE;
12592
12593 case TRUTH_ORIF_EXPR:
12594 /* Note that the operands of this must be ints
12595 and their values must be 0 or true.
12596 ("true" is a fixed value perhaps depending on the language.) */
12597 /* If first arg is constant true, return it. */
12598 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12599 return fold_convert_loc (loc, type, arg0);
12600 case TRUTH_OR_EXPR:
12601 /* If either arg is constant zero, drop it. */
12602 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12603 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12604 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12605 /* Preserve sequence points. */
12606 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12607 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12608 /* If second arg is constant true, result is true, but we must
12609 evaluate first arg. */
12610 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12611 return omit_one_operand_loc (loc, type, arg1, arg0);
12612 /* Likewise for first arg, but note this only occurs here for
12613 TRUTH_OR_EXPR. */
12614 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12615 return omit_one_operand_loc (loc, type, arg0, arg1);
12616
12617 /* !X || X is always true. */
12618 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12619 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12620 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12621 /* X || !X is always true. */
12622 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12624 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12625
12626 /* (X && !Y) || (!X && Y) is X ^ Y */
12627 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12628 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12629 {
12630 tree a0, a1, l0, l1, n0, n1;
12631
12632 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12633 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12634
12635 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12636 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12637
12638 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12639 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12640
12641 if ((operand_equal_p (n0, a0, 0)
12642 && operand_equal_p (n1, a1, 0))
12643 || (operand_equal_p (n0, a1, 0)
12644 && operand_equal_p (n1, a0, 0)))
12645 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12646 }
12647
12648 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12649 != NULL_TREE)
12650 return tem;
12651
12652 return NULL_TREE;
12653
12654 case TRUTH_XOR_EXPR:
12655 /* If the second arg is constant zero, drop it. */
12656 if (integer_zerop (arg1))
12657 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12658 /* If the second arg is constant true, this is a logical inversion. */
12659 if (integer_onep (arg1))
12660 {
12661 /* Only call invert_truthvalue if operand is a truth value. */
12662 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12663 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12664 else
12665 tem = invert_truthvalue_loc (loc, arg0);
12666 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12667 }
12668 /* Identical arguments cancel to zero. */
12669 if (operand_equal_p (arg0, arg1, 0))
12670 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12671
12672 /* !X ^ X is always true. */
12673 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12674 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12675 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12676
12677 /* X ^ !X is always true. */
12678 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12680 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12681
12682 return NULL_TREE;
12683
12684 case EQ_EXPR:
12685 case NE_EXPR:
12686 STRIP_NOPS (arg0);
12687 STRIP_NOPS (arg1);
12688
12689 tem = fold_comparison (loc, code, type, op0, op1);
12690 if (tem != NULL_TREE)
12691 return tem;
12692
12693 /* bool_var != 0 becomes bool_var. */
12694 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12695 && code == NE_EXPR)
12696 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12697
12698 /* bool_var == 1 becomes bool_var. */
12699 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12700 && code == EQ_EXPR)
12701 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12702
12703 /* bool_var != 1 becomes !bool_var. */
12704 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12705 && code == NE_EXPR)
12706 return fold_convert_loc (loc, type,
12707 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12708 TREE_TYPE (arg0), arg0));
12709
12710 /* bool_var == 0 becomes !bool_var. */
12711 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12712 && code == EQ_EXPR)
12713 return fold_convert_loc (loc, type,
12714 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12715 TREE_TYPE (arg0), arg0));
12716
12717 /* !exp != 0 becomes !exp */
12718 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12719 && code == NE_EXPR)
12720 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12721
12722 /* If this is an equality comparison of the address of two non-weak,
12723 unaliased symbols neither of which are extern (since we do not
12724 have access to attributes for externs), then we know the result. */
12725 if (TREE_CODE (arg0) == ADDR_EXPR
12726 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12727 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12728 && ! lookup_attribute ("alias",
12729 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12730 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12731 && TREE_CODE (arg1) == ADDR_EXPR
12732 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12733 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12734 && ! lookup_attribute ("alias",
12735 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12736 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12737 {
12738 /* We know that we're looking at the address of two
12739 non-weak, unaliased, static _DECL nodes.
12740
12741 It is both wasteful and incorrect to call operand_equal_p
12742 to compare the two ADDR_EXPR nodes. It is wasteful in that
12743 all we need to do is test pointer equality for the arguments
12744 to the two ADDR_EXPR nodes. It is incorrect to use
12745 operand_equal_p as that function is NOT equivalent to a
12746 C equality test. It can in fact return false for two
12747 objects which would test as equal using the C equality
12748 operator. */
12749 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12750 return constant_boolean_node (equal
12751 ? code == EQ_EXPR : code != EQ_EXPR,
12752 type);
12753 }
12754
12755 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12756 a MINUS_EXPR of a constant, we can convert it into a comparison with
12757 a revised constant as long as no overflow occurs. */
12758 if (TREE_CODE (arg1) == INTEGER_CST
12759 && (TREE_CODE (arg0) == PLUS_EXPR
12760 || TREE_CODE (arg0) == MINUS_EXPR)
12761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12762 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12763 ? MINUS_EXPR : PLUS_EXPR,
12764 fold_convert_loc (loc, TREE_TYPE (arg0),
12765 arg1),
12766 TREE_OPERAND (arg0, 1)))
12767 && !TREE_OVERFLOW (tem))
12768 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12769
12770 /* Similarly for a NEGATE_EXPR. */
12771 if (TREE_CODE (arg0) == NEGATE_EXPR
12772 && TREE_CODE (arg1) == INTEGER_CST
12773 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12774 arg1)))
12775 && TREE_CODE (tem) == INTEGER_CST
12776 && !TREE_OVERFLOW (tem))
12777 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12778
12779 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12780 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12781 && TREE_CODE (arg1) == INTEGER_CST
12782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12783 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12784 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12785 fold_convert_loc (loc,
12786 TREE_TYPE (arg0),
12787 arg1),
12788 TREE_OPERAND (arg0, 1)));
12789
12790 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12791 if ((TREE_CODE (arg0) == PLUS_EXPR
12792 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12793 || TREE_CODE (arg0) == MINUS_EXPR)
12794 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12795 0)),
12796 arg1, 0)
12797 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12798 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12799 {
12800 tree val = TREE_OPERAND (arg0, 1);
12801 return omit_two_operands_loc (loc, type,
12802 fold_build2_loc (loc, code, type,
12803 val,
12804 build_int_cst (TREE_TYPE (val),
12805 0)),
12806 TREE_OPERAND (arg0, 0), arg1);
12807 }
12808
12809 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12810 if (TREE_CODE (arg0) == MINUS_EXPR
12811 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12812 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12813 1)),
12814 arg1, 0)
12815 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12816 {
12817 return omit_two_operands_loc (loc, type,
12818 code == NE_EXPR
12819 ? boolean_true_node : boolean_false_node,
12820 TREE_OPERAND (arg0, 1), arg1);
12821 }
12822
12823 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12824 for !=. Don't do this for ordered comparisons due to overflow. */
12825 if (TREE_CODE (arg0) == MINUS_EXPR
12826 && integer_zerop (arg1))
12827 return fold_build2_loc (loc, code, type,
12828 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12829
12830 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12831 if (TREE_CODE (arg0) == ABS_EXPR
12832 && (integer_zerop (arg1) || real_zerop (arg1)))
12833 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12834
12835 /* If this is an EQ or NE comparison with zero and ARG0 is
12836 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12837 two operations, but the latter can be done in one less insn
12838 on machines that have only two-operand insns or on which a
12839 constant cannot be the first operand. */
12840 if (TREE_CODE (arg0) == BIT_AND_EXPR
12841 && integer_zerop (arg1))
12842 {
12843 tree arg00 = TREE_OPERAND (arg0, 0);
12844 tree arg01 = TREE_OPERAND (arg0, 1);
12845 if (TREE_CODE (arg00) == LSHIFT_EXPR
12846 && integer_onep (TREE_OPERAND (arg00, 0)))
12847 {
12848 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12849 arg01, TREE_OPERAND (arg00, 1));
12850 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12851 build_int_cst (TREE_TYPE (arg0), 1));
12852 return fold_build2_loc (loc, code, type,
12853 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12854 arg1);
12855 }
12856 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12857 && integer_onep (TREE_OPERAND (arg01, 0)))
12858 {
12859 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12860 arg00, TREE_OPERAND (arg01, 1));
12861 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12862 build_int_cst (TREE_TYPE (arg0), 1));
12863 return fold_build2_loc (loc, code, type,
12864 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12865 arg1);
12866 }
12867 }
12868
12869 /* If this is an NE or EQ comparison of zero against the result of a
12870 signed MOD operation whose second operand is a power of 2, make
12871 the MOD operation unsigned since it is simpler and equivalent. */
12872 if (integer_zerop (arg1)
12873 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12874 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12875 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12876 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12877 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12878 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12879 {
12880 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12881 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12882 fold_convert_loc (loc, newtype,
12883 TREE_OPERAND (arg0, 0)),
12884 fold_convert_loc (loc, newtype,
12885 TREE_OPERAND (arg0, 1)));
12886
12887 return fold_build2_loc (loc, code, type, newmod,
12888 fold_convert_loc (loc, newtype, arg1));
12889 }
12890
12891 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12892 C1 is a valid shift constant, and C2 is a power of two, i.e.
12893 a single bit. */
12894 if (TREE_CODE (arg0) == BIT_AND_EXPR
12895 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12896 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12897 == INTEGER_CST
12898 && integer_pow2p (TREE_OPERAND (arg0, 1))
12899 && integer_zerop (arg1))
12900 {
12901 tree itype = TREE_TYPE (arg0);
12902 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12903 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12904
12905 /* Check for a valid shift count. */
12906 if (TREE_INT_CST_HIGH (arg001) == 0
12907 && TREE_INT_CST_LOW (arg001) < prec)
12908 {
12909 tree arg01 = TREE_OPERAND (arg0, 1);
12910 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12911 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12912 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12913 can be rewritten as (X & (C2 << C1)) != 0. */
12914 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12915 {
12916 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12917 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12918 return fold_build2_loc (loc, code, type, tem,
12919 fold_convert_loc (loc, itype, arg1));
12920 }
12921 /* Otherwise, for signed (arithmetic) shifts,
12922 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12923 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12924 else if (!TYPE_UNSIGNED (itype))
12925 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12926 arg000, build_int_cst (itype, 0));
12927 /* Otherwise, of unsigned (logical) shifts,
12928 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12929 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12930 else
12931 return omit_one_operand_loc (loc, type,
12932 code == EQ_EXPR ? integer_one_node
12933 : integer_zero_node,
12934 arg000);
12935 }
12936 }
12937
12938 /* If we have (A & C) == C where C is a power of 2, convert this into
12939 (A & C) != 0. Similarly for NE_EXPR. */
12940 if (TREE_CODE (arg0) == BIT_AND_EXPR
12941 && integer_pow2p (TREE_OPERAND (arg0, 1))
12942 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12943 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12944 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12945 integer_zero_node));
12946
12947 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12948 bit, then fold the expression into A < 0 or A >= 0. */
12949 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12950 if (tem)
12951 return tem;
12952
12953 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12954 Similarly for NE_EXPR. */
12955 if (TREE_CODE (arg0) == BIT_AND_EXPR
12956 && TREE_CODE (arg1) == INTEGER_CST
12957 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12958 {
12959 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12960 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12961 TREE_OPERAND (arg0, 1));
12962 tree dandnotc
12963 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12964 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12965 notc);
12966 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12967 if (integer_nonzerop (dandnotc))
12968 return omit_one_operand_loc (loc, type, rslt, arg0);
12969 }
12970
12971 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12972 Similarly for NE_EXPR. */
12973 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12974 && TREE_CODE (arg1) == INTEGER_CST
12975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12976 {
12977 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12978 tree candnotd
12979 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12980 TREE_OPERAND (arg0, 1),
12981 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12982 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12983 if (integer_nonzerop (candnotd))
12984 return omit_one_operand_loc (loc, type, rslt, arg0);
12985 }
12986
12987 /* If this is a comparison of a field, we may be able to simplify it. */
12988 if ((TREE_CODE (arg0) == COMPONENT_REF
12989 || TREE_CODE (arg0) == BIT_FIELD_REF)
12990 /* Handle the constant case even without -O
12991 to make sure the warnings are given. */
12992 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12993 {
12994 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12995 if (t1)
12996 return t1;
12997 }
12998
12999 /* Optimize comparisons of strlen vs zero to a compare of the
13000 first character of the string vs zero. To wit,
13001 strlen(ptr) == 0 => *ptr == 0
13002 strlen(ptr) != 0 => *ptr != 0
13003 Other cases should reduce to one of these two (or a constant)
13004 due to the return value of strlen being unsigned. */
13005 if (TREE_CODE (arg0) == CALL_EXPR
13006 && integer_zerop (arg1))
13007 {
13008 tree fndecl = get_callee_fndecl (arg0);
13009
13010 if (fndecl
13011 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13012 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13013 && call_expr_nargs (arg0) == 1
13014 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13015 {
13016 tree iref = build_fold_indirect_ref_loc (loc,
13017 CALL_EXPR_ARG (arg0, 0));
13018 return fold_build2_loc (loc, code, type, iref,
13019 build_int_cst (TREE_TYPE (iref), 0));
13020 }
13021 }
13022
13023 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13024 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13025 if (TREE_CODE (arg0) == RSHIFT_EXPR
13026 && integer_zerop (arg1)
13027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13028 {
13029 tree arg00 = TREE_OPERAND (arg0, 0);
13030 tree arg01 = TREE_OPERAND (arg0, 1);
13031 tree itype = TREE_TYPE (arg00);
13032 if (TREE_INT_CST_HIGH (arg01) == 0
13033 && TREE_INT_CST_LOW (arg01)
13034 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13035 {
13036 if (TYPE_UNSIGNED (itype))
13037 {
13038 itype = signed_type_for (itype);
13039 arg00 = fold_convert_loc (loc, itype, arg00);
13040 }
13041 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13042 type, arg00, build_zero_cst (itype));
13043 }
13044 }
13045
13046 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13047 if (integer_zerop (arg1)
13048 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13049 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13050 TREE_OPERAND (arg0, 1));
13051
13052 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13053 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13054 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13055 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13056 build_zero_cst (TREE_TYPE (arg0)));
13057 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13058 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13059 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13060 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13061 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13062 build_zero_cst (TREE_TYPE (arg0)));
13063
13064 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13065 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13066 && TREE_CODE (arg1) == INTEGER_CST
13067 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13068 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13069 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13070 TREE_OPERAND (arg0, 1), arg1));
13071
13072 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13073 (X & C) == 0 when C is a single bit. */
13074 if (TREE_CODE (arg0) == BIT_AND_EXPR
13075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13076 && integer_zerop (arg1)
13077 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13078 {
13079 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13080 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13081 TREE_OPERAND (arg0, 1));
13082 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13083 type, tem,
13084 fold_convert_loc (loc, TREE_TYPE (arg0),
13085 arg1));
13086 }
13087
13088 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13089 constant C is a power of two, i.e. a single bit. */
13090 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13091 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13092 && integer_zerop (arg1)
13093 && integer_pow2p (TREE_OPERAND (arg0, 1))
13094 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13095 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13096 {
13097 tree arg00 = TREE_OPERAND (arg0, 0);
13098 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13099 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13100 }
13101
13102 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13103 when is C is a power of two, i.e. a single bit. */
13104 if (TREE_CODE (arg0) == BIT_AND_EXPR
13105 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13106 && integer_zerop (arg1)
13107 && integer_pow2p (TREE_OPERAND (arg0, 1))
13108 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13109 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13110 {
13111 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13112 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13113 arg000, TREE_OPERAND (arg0, 1));
13114 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13115 tem, build_int_cst (TREE_TYPE (tem), 0));
13116 }
13117
13118 if (integer_zerop (arg1)
13119 && tree_expr_nonzero_p (arg0))
13120 {
13121 tree res = constant_boolean_node (code==NE_EXPR, type);
13122 return omit_one_operand_loc (loc, type, res, arg0);
13123 }
13124
13125 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13126 if (TREE_CODE (arg0) == NEGATE_EXPR
13127 && TREE_CODE (arg1) == NEGATE_EXPR)
13128 return fold_build2_loc (loc, code, type,
13129 TREE_OPERAND (arg0, 0),
13130 fold_convert_loc (loc, TREE_TYPE (arg0),
13131 TREE_OPERAND (arg1, 0)));
13132
13133 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13134 if (TREE_CODE (arg0) == BIT_AND_EXPR
13135 && TREE_CODE (arg1) == BIT_AND_EXPR)
13136 {
13137 tree arg00 = TREE_OPERAND (arg0, 0);
13138 tree arg01 = TREE_OPERAND (arg0, 1);
13139 tree arg10 = TREE_OPERAND (arg1, 0);
13140 tree arg11 = TREE_OPERAND (arg1, 1);
13141 tree itype = TREE_TYPE (arg0);
13142
13143 if (operand_equal_p (arg01, arg11, 0))
13144 return fold_build2_loc (loc, code, type,
13145 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13146 fold_build2_loc (loc,
13147 BIT_XOR_EXPR, itype,
13148 arg00, arg10),
13149 arg01),
13150 build_zero_cst (itype));
13151
13152 if (operand_equal_p (arg01, arg10, 0))
13153 return fold_build2_loc (loc, code, type,
13154 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13155 fold_build2_loc (loc,
13156 BIT_XOR_EXPR, itype,
13157 arg00, arg11),
13158 arg01),
13159 build_zero_cst (itype));
13160
13161 if (operand_equal_p (arg00, arg11, 0))
13162 return fold_build2_loc (loc, code, type,
13163 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13164 fold_build2_loc (loc,
13165 BIT_XOR_EXPR, itype,
13166 arg01, arg10),
13167 arg00),
13168 build_zero_cst (itype));
13169
13170 if (operand_equal_p (arg00, arg10, 0))
13171 return fold_build2_loc (loc, code, type,
13172 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13173 fold_build2_loc (loc,
13174 BIT_XOR_EXPR, itype,
13175 arg01, arg11),
13176 arg00),
13177 build_zero_cst (itype));
13178 }
13179
13180 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13181 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13182 {
13183 tree arg00 = TREE_OPERAND (arg0, 0);
13184 tree arg01 = TREE_OPERAND (arg0, 1);
13185 tree arg10 = TREE_OPERAND (arg1, 0);
13186 tree arg11 = TREE_OPERAND (arg1, 1);
13187 tree itype = TREE_TYPE (arg0);
13188
13189 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13190 operand_equal_p guarantees no side-effects so we don't need
13191 to use omit_one_operand on Z. */
13192 if (operand_equal_p (arg01, arg11, 0))
13193 return fold_build2_loc (loc, code, type, arg00,
13194 fold_convert_loc (loc, TREE_TYPE (arg00),
13195 arg10));
13196 if (operand_equal_p (arg01, arg10, 0))
13197 return fold_build2_loc (loc, code, type, arg00,
13198 fold_convert_loc (loc, TREE_TYPE (arg00),
13199 arg11));
13200 if (operand_equal_p (arg00, arg11, 0))
13201 return fold_build2_loc (loc, code, type, arg01,
13202 fold_convert_loc (loc, TREE_TYPE (arg01),
13203 arg10));
13204 if (operand_equal_p (arg00, arg10, 0))
13205 return fold_build2_loc (loc, code, type, arg01,
13206 fold_convert_loc (loc, TREE_TYPE (arg01),
13207 arg11));
13208
13209 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13210 if (TREE_CODE (arg01) == INTEGER_CST
13211 && TREE_CODE (arg11) == INTEGER_CST)
13212 {
13213 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13214 fold_convert_loc (loc, itype, arg11));
13215 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13216 return fold_build2_loc (loc, code, type, tem,
13217 fold_convert_loc (loc, itype, arg10));
13218 }
13219 }
13220
13221 /* Attempt to simplify equality/inequality comparisons of complex
13222 values. Only lower the comparison if the result is known or
13223 can be simplified to a single scalar comparison. */
13224 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13225 || TREE_CODE (arg0) == COMPLEX_CST)
13226 && (TREE_CODE (arg1) == COMPLEX_EXPR
13227 || TREE_CODE (arg1) == COMPLEX_CST))
13228 {
13229 tree real0, imag0, real1, imag1;
13230 tree rcond, icond;
13231
13232 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13233 {
13234 real0 = TREE_OPERAND (arg0, 0);
13235 imag0 = TREE_OPERAND (arg0, 1);
13236 }
13237 else
13238 {
13239 real0 = TREE_REALPART (arg0);
13240 imag0 = TREE_IMAGPART (arg0);
13241 }
13242
13243 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13244 {
13245 real1 = TREE_OPERAND (arg1, 0);
13246 imag1 = TREE_OPERAND (arg1, 1);
13247 }
13248 else
13249 {
13250 real1 = TREE_REALPART (arg1);
13251 imag1 = TREE_IMAGPART (arg1);
13252 }
13253
13254 rcond = fold_binary_loc (loc, code, type, real0, real1);
13255 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13256 {
13257 if (integer_zerop (rcond))
13258 {
13259 if (code == EQ_EXPR)
13260 return omit_two_operands_loc (loc, type, boolean_false_node,
13261 imag0, imag1);
13262 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13263 }
13264 else
13265 {
13266 if (code == NE_EXPR)
13267 return omit_two_operands_loc (loc, type, boolean_true_node,
13268 imag0, imag1);
13269 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13270 }
13271 }
13272
13273 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13274 if (icond && TREE_CODE (icond) == INTEGER_CST)
13275 {
13276 if (integer_zerop (icond))
13277 {
13278 if (code == EQ_EXPR)
13279 return omit_two_operands_loc (loc, type, boolean_false_node,
13280 real0, real1);
13281 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13282 }
13283 else
13284 {
13285 if (code == NE_EXPR)
13286 return omit_two_operands_loc (loc, type, boolean_true_node,
13287 real0, real1);
13288 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13289 }
13290 }
13291 }
13292
13293 return NULL_TREE;
13294
13295 case LT_EXPR:
13296 case GT_EXPR:
13297 case LE_EXPR:
13298 case GE_EXPR:
13299 tem = fold_comparison (loc, code, type, op0, op1);
13300 if (tem != NULL_TREE)
13301 return tem;
13302
13303 /* Transform comparisons of the form X +- C CMP X. */
13304 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13305 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13306 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13307 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13308 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13309 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13310 {
13311 tree arg01 = TREE_OPERAND (arg0, 1);
13312 enum tree_code code0 = TREE_CODE (arg0);
13313 int is_positive;
13314
13315 if (TREE_CODE (arg01) == REAL_CST)
13316 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13317 else
13318 is_positive = tree_int_cst_sgn (arg01);
13319
13320 /* (X - c) > X becomes false. */
13321 if (code == GT_EXPR
13322 && ((code0 == MINUS_EXPR && is_positive >= 0)
13323 || (code0 == PLUS_EXPR && is_positive <= 0)))
13324 {
13325 if (TREE_CODE (arg01) == INTEGER_CST
13326 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13327 fold_overflow_warning (("assuming signed overflow does not "
13328 "occur when assuming that (X - c) > X "
13329 "is always false"),
13330 WARN_STRICT_OVERFLOW_ALL);
13331 return constant_boolean_node (0, type);
13332 }
13333
13334 /* Likewise (X + c) < X becomes false. */
13335 if (code == LT_EXPR
13336 && ((code0 == PLUS_EXPR && is_positive >= 0)
13337 || (code0 == MINUS_EXPR && is_positive <= 0)))
13338 {
13339 if (TREE_CODE (arg01) == INTEGER_CST
13340 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13341 fold_overflow_warning (("assuming signed overflow does not "
13342 "occur when assuming that "
13343 "(X + c) < X is always false"),
13344 WARN_STRICT_OVERFLOW_ALL);
13345 return constant_boolean_node (0, type);
13346 }
13347
13348 /* Convert (X - c) <= X to true. */
13349 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13350 && code == LE_EXPR
13351 && ((code0 == MINUS_EXPR && is_positive >= 0)
13352 || (code0 == PLUS_EXPR && is_positive <= 0)))
13353 {
13354 if (TREE_CODE (arg01) == INTEGER_CST
13355 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13356 fold_overflow_warning (("assuming signed overflow does not "
13357 "occur when assuming that "
13358 "(X - c) <= X is always true"),
13359 WARN_STRICT_OVERFLOW_ALL);
13360 return constant_boolean_node (1, type);
13361 }
13362
13363 /* Convert (X + c) >= X to true. */
13364 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13365 && code == GE_EXPR
13366 && ((code0 == PLUS_EXPR && is_positive >= 0)
13367 || (code0 == MINUS_EXPR && is_positive <= 0)))
13368 {
13369 if (TREE_CODE (arg01) == INTEGER_CST
13370 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13371 fold_overflow_warning (("assuming signed overflow does not "
13372 "occur when assuming that "
13373 "(X + c) >= X is always true"),
13374 WARN_STRICT_OVERFLOW_ALL);
13375 return constant_boolean_node (1, type);
13376 }
13377
13378 if (TREE_CODE (arg01) == INTEGER_CST)
13379 {
13380 /* Convert X + c > X and X - c < X to true for integers. */
13381 if (code == GT_EXPR
13382 && ((code0 == PLUS_EXPR && is_positive > 0)
13383 || (code0 == MINUS_EXPR && is_positive < 0)))
13384 {
13385 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13386 fold_overflow_warning (("assuming signed overflow does "
13387 "not occur when assuming that "
13388 "(X + c) > X is always true"),
13389 WARN_STRICT_OVERFLOW_ALL);
13390 return constant_boolean_node (1, type);
13391 }
13392
13393 if (code == LT_EXPR
13394 && ((code0 == MINUS_EXPR && is_positive > 0)
13395 || (code0 == PLUS_EXPR && is_positive < 0)))
13396 {
13397 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13398 fold_overflow_warning (("assuming signed overflow does "
13399 "not occur when assuming that "
13400 "(X - c) < X is always true"),
13401 WARN_STRICT_OVERFLOW_ALL);
13402 return constant_boolean_node (1, type);
13403 }
13404
13405 /* Convert X + c <= X and X - c >= X to false for integers. */
13406 if (code == LE_EXPR
13407 && ((code0 == PLUS_EXPR && is_positive > 0)
13408 || (code0 == MINUS_EXPR && is_positive < 0)))
13409 {
13410 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13411 fold_overflow_warning (("assuming signed overflow does "
13412 "not occur when assuming that "
13413 "(X + c) <= X is always false"),
13414 WARN_STRICT_OVERFLOW_ALL);
13415 return constant_boolean_node (0, type);
13416 }
13417
13418 if (code == GE_EXPR
13419 && ((code0 == MINUS_EXPR && is_positive > 0)
13420 || (code0 == PLUS_EXPR && is_positive < 0)))
13421 {
13422 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13423 fold_overflow_warning (("assuming signed overflow does "
13424 "not occur when assuming that "
13425 "(X - c) >= X is always false"),
13426 WARN_STRICT_OVERFLOW_ALL);
13427 return constant_boolean_node (0, type);
13428 }
13429 }
13430 }
13431
13432 /* Comparisons with the highest or lowest possible integer of
13433 the specified precision will have known values. */
13434 {
13435 tree arg1_type = TREE_TYPE (arg1);
13436 unsigned int width = TYPE_PRECISION (arg1_type);
13437
13438 if (TREE_CODE (arg1) == INTEGER_CST
13439 && width <= HOST_BITS_PER_DOUBLE_INT
13440 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13441 {
13442 HOST_WIDE_INT signed_max_hi;
13443 unsigned HOST_WIDE_INT signed_max_lo;
13444 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13445
13446 if (width <= HOST_BITS_PER_WIDE_INT)
13447 {
13448 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13449 - 1;
13450 signed_max_hi = 0;
13451 max_hi = 0;
13452
13453 if (TYPE_UNSIGNED (arg1_type))
13454 {
13455 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13456 min_lo = 0;
13457 min_hi = 0;
13458 }
13459 else
13460 {
13461 max_lo = signed_max_lo;
13462 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13463 min_hi = -1;
13464 }
13465 }
13466 else
13467 {
13468 width -= HOST_BITS_PER_WIDE_INT;
13469 signed_max_lo = -1;
13470 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13471 - 1;
13472 max_lo = -1;
13473 min_lo = 0;
13474
13475 if (TYPE_UNSIGNED (arg1_type))
13476 {
13477 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13478 min_hi = 0;
13479 }
13480 else
13481 {
13482 max_hi = signed_max_hi;
13483 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13484 }
13485 }
13486
13487 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13488 && TREE_INT_CST_LOW (arg1) == max_lo)
13489 switch (code)
13490 {
13491 case GT_EXPR:
13492 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13493
13494 case GE_EXPR:
13495 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13496
13497 case LE_EXPR:
13498 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13499
13500 case LT_EXPR:
13501 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13502
13503 /* The GE_EXPR and LT_EXPR cases above are not normally
13504 reached because of previous transformations. */
13505
13506 default:
13507 break;
13508 }
13509 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13510 == max_hi
13511 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13512 switch (code)
13513 {
13514 case GT_EXPR:
13515 arg1 = const_binop (PLUS_EXPR, arg1,
13516 build_int_cst (TREE_TYPE (arg1), 1));
13517 return fold_build2_loc (loc, EQ_EXPR, type,
13518 fold_convert_loc (loc,
13519 TREE_TYPE (arg1), arg0),
13520 arg1);
13521 case LE_EXPR:
13522 arg1 = const_binop (PLUS_EXPR, arg1,
13523 build_int_cst (TREE_TYPE (arg1), 1));
13524 return fold_build2_loc (loc, NE_EXPR, type,
13525 fold_convert_loc (loc, TREE_TYPE (arg1),
13526 arg0),
13527 arg1);
13528 default:
13529 break;
13530 }
13531 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13532 == min_hi
13533 && TREE_INT_CST_LOW (arg1) == min_lo)
13534 switch (code)
13535 {
13536 case LT_EXPR:
13537 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13538
13539 case LE_EXPR:
13540 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13541
13542 case GE_EXPR:
13543 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13544
13545 case GT_EXPR:
13546 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13547
13548 default:
13549 break;
13550 }
13551 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13552 == min_hi
13553 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13554 switch (code)
13555 {
13556 case GE_EXPR:
13557 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13558 return fold_build2_loc (loc, NE_EXPR, type,
13559 fold_convert_loc (loc,
13560 TREE_TYPE (arg1), arg0),
13561 arg1);
13562 case LT_EXPR:
13563 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13564 return fold_build2_loc (loc, EQ_EXPR, type,
13565 fold_convert_loc (loc, TREE_TYPE (arg1),
13566 arg0),
13567 arg1);
13568 default:
13569 break;
13570 }
13571
13572 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13573 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13574 && TYPE_UNSIGNED (arg1_type)
13575 /* We will flip the signedness of the comparison operator
13576 associated with the mode of arg1, so the sign bit is
13577 specified by this mode. Check that arg1 is the signed
13578 max associated with this sign bit. */
13579 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13580 /* signed_type does not work on pointer types. */
13581 && INTEGRAL_TYPE_P (arg1_type))
13582 {
13583 /* The following case also applies to X < signed_max+1
13584 and X >= signed_max+1 because previous transformations. */
13585 if (code == LE_EXPR || code == GT_EXPR)
13586 {
13587 tree st;
13588 st = signed_type_for (TREE_TYPE (arg1));
13589 return fold_build2_loc (loc,
13590 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13591 type, fold_convert_loc (loc, st, arg0),
13592 build_int_cst (st, 0));
13593 }
13594 }
13595 }
13596 }
13597
13598 /* If we are comparing an ABS_EXPR with a constant, we can
13599 convert all the cases into explicit comparisons, but they may
13600 well not be faster than doing the ABS and one comparison.
13601 But ABS (X) <= C is a range comparison, which becomes a subtraction
13602 and a comparison, and is probably faster. */
13603 if (code == LE_EXPR
13604 && TREE_CODE (arg1) == INTEGER_CST
13605 && TREE_CODE (arg0) == ABS_EXPR
13606 && ! TREE_SIDE_EFFECTS (arg0)
13607 && (0 != (tem = negate_expr (arg1)))
13608 && TREE_CODE (tem) == INTEGER_CST
13609 && !TREE_OVERFLOW (tem))
13610 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13611 build2 (GE_EXPR, type,
13612 TREE_OPERAND (arg0, 0), tem),
13613 build2 (LE_EXPR, type,
13614 TREE_OPERAND (arg0, 0), arg1));
13615
13616 /* Convert ABS_EXPR<x> >= 0 to true. */
13617 strict_overflow_p = false;
13618 if (code == GE_EXPR
13619 && (integer_zerop (arg1)
13620 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13621 && real_zerop (arg1)))
13622 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13623 {
13624 if (strict_overflow_p)
13625 fold_overflow_warning (("assuming signed overflow does not occur "
13626 "when simplifying comparison of "
13627 "absolute value and zero"),
13628 WARN_STRICT_OVERFLOW_CONDITIONAL);
13629 return omit_one_operand_loc (loc, type,
13630 constant_boolean_node (true, type),
13631 arg0);
13632 }
13633
13634 /* Convert ABS_EXPR<x> < 0 to false. */
13635 strict_overflow_p = false;
13636 if (code == LT_EXPR
13637 && (integer_zerop (arg1) || real_zerop (arg1))
13638 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13639 {
13640 if (strict_overflow_p)
13641 fold_overflow_warning (("assuming signed overflow does not occur "
13642 "when simplifying comparison of "
13643 "absolute value and zero"),
13644 WARN_STRICT_OVERFLOW_CONDITIONAL);
13645 return omit_one_operand_loc (loc, type,
13646 constant_boolean_node (false, type),
13647 arg0);
13648 }
13649
13650 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13651 and similarly for >= into !=. */
13652 if ((code == LT_EXPR || code == GE_EXPR)
13653 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13654 && TREE_CODE (arg1) == LSHIFT_EXPR
13655 && integer_onep (TREE_OPERAND (arg1, 0)))
13656 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13657 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13658 TREE_OPERAND (arg1, 1)),
13659 build_zero_cst (TREE_TYPE (arg0)));
13660
13661 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13662 otherwise Y might be >= # of bits in X's type and thus e.g.
13663 (unsigned char) (1 << Y) for Y 15 might be 0.
13664 If the cast is widening, then 1 << Y should have unsigned type,
13665 otherwise if Y is number of bits in the signed shift type minus 1,
13666 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13667 31 might be 0xffffffff80000000. */
13668 if ((code == LT_EXPR || code == GE_EXPR)
13669 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13670 && CONVERT_EXPR_P (arg1)
13671 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13672 && (TYPE_PRECISION (TREE_TYPE (arg1))
13673 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13674 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13675 || (TYPE_PRECISION (TREE_TYPE (arg1))
13676 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13677 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13678 {
13679 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13680 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13681 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13682 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13683 build_zero_cst (TREE_TYPE (arg0)));
13684 }
13685
13686 return NULL_TREE;
13687
13688 case UNORDERED_EXPR:
13689 case ORDERED_EXPR:
13690 case UNLT_EXPR:
13691 case UNLE_EXPR:
13692 case UNGT_EXPR:
13693 case UNGE_EXPR:
13694 case UNEQ_EXPR:
13695 case LTGT_EXPR:
13696 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13697 {
13698 t1 = fold_relational_const (code, type, arg0, arg1);
13699 if (t1 != NULL_TREE)
13700 return t1;
13701 }
13702
13703 /* If the first operand is NaN, the result is constant. */
13704 if (TREE_CODE (arg0) == REAL_CST
13705 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13706 && (code != LTGT_EXPR || ! flag_trapping_math))
13707 {
13708 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13709 ? integer_zero_node
13710 : integer_one_node;
13711 return omit_one_operand_loc (loc, type, t1, arg1);
13712 }
13713
13714 /* If the second operand is NaN, the result is constant. */
13715 if (TREE_CODE (arg1) == REAL_CST
13716 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13717 && (code != LTGT_EXPR || ! flag_trapping_math))
13718 {
13719 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13720 ? integer_zero_node
13721 : integer_one_node;
13722 return omit_one_operand_loc (loc, type, t1, arg0);
13723 }
13724
13725 /* Simplify unordered comparison of something with itself. */
13726 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13727 && operand_equal_p (arg0, arg1, 0))
13728 return constant_boolean_node (1, type);
13729
13730 if (code == LTGT_EXPR
13731 && !flag_trapping_math
13732 && operand_equal_p (arg0, arg1, 0))
13733 return constant_boolean_node (0, type);
13734
13735 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13736 {
13737 tree targ0 = strip_float_extensions (arg0);
13738 tree targ1 = strip_float_extensions (arg1);
13739 tree newtype = TREE_TYPE (targ0);
13740
13741 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13742 newtype = TREE_TYPE (targ1);
13743
13744 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13745 return fold_build2_loc (loc, code, type,
13746 fold_convert_loc (loc, newtype, targ0),
13747 fold_convert_loc (loc, newtype, targ1));
13748 }
13749
13750 return NULL_TREE;
13751
13752 case COMPOUND_EXPR:
13753 /* When pedantic, a compound expression can be neither an lvalue
13754 nor an integer constant expression. */
13755 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13756 return NULL_TREE;
13757 /* Don't let (0, 0) be null pointer constant. */
13758 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13759 : fold_convert_loc (loc, type, arg1);
13760 return pedantic_non_lvalue_loc (loc, tem);
13761
13762 case COMPLEX_EXPR:
13763 if ((TREE_CODE (arg0) == REAL_CST
13764 && TREE_CODE (arg1) == REAL_CST)
13765 || (TREE_CODE (arg0) == INTEGER_CST
13766 && TREE_CODE (arg1) == INTEGER_CST))
13767 return build_complex (type, arg0, arg1);
13768 if (TREE_CODE (arg0) == REALPART_EXPR
13769 && TREE_CODE (arg1) == IMAGPART_EXPR
13770 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13771 && operand_equal_p (TREE_OPERAND (arg0, 0),
13772 TREE_OPERAND (arg1, 0), 0))
13773 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13774 TREE_OPERAND (arg1, 0));
13775 return NULL_TREE;
13776
13777 case ASSERT_EXPR:
13778 /* An ASSERT_EXPR should never be passed to fold_binary. */
13779 gcc_unreachable ();
13780
13781 case VEC_PACK_TRUNC_EXPR:
13782 case VEC_PACK_FIX_TRUNC_EXPR:
13783 {
13784 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13785 tree *elts;
13786
13787 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13788 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13789 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13790 return NULL_TREE;
13791
13792 elts = XALLOCAVEC (tree, nelts);
13793 if (!vec_cst_ctor_to_array (arg0, elts)
13794 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13795 return NULL_TREE;
13796
13797 for (i = 0; i < nelts; i++)
13798 {
13799 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13800 ? NOP_EXPR : FIX_TRUNC_EXPR,
13801 TREE_TYPE (type), elts[i]);
13802 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13803 return NULL_TREE;
13804 }
13805
13806 return build_vector (type, elts);
13807 }
13808
13809 case VEC_WIDEN_MULT_LO_EXPR:
13810 case VEC_WIDEN_MULT_HI_EXPR:
13811 case VEC_WIDEN_MULT_EVEN_EXPR:
13812 case VEC_WIDEN_MULT_ODD_EXPR:
13813 {
13814 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13815 unsigned int out, ofs, scale;
13816 tree *elts;
13817
13818 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13819 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13820 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13821 return NULL_TREE;
13822
13823 elts = XALLOCAVEC (tree, nelts * 4);
13824 if (!vec_cst_ctor_to_array (arg0, elts)
13825 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13826 return NULL_TREE;
13827
13828 if (code == VEC_WIDEN_MULT_LO_EXPR)
13829 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13830 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13831 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13832 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13833 scale = 1, ofs = 0;
13834 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13835 scale = 1, ofs = 1;
13836
13837 for (out = 0; out < nelts; out++)
13838 {
13839 unsigned int in1 = (out << scale) + ofs;
13840 unsigned int in2 = in1 + nelts * 2;
13841 tree t1, t2;
13842
13843 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13844 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13845
13846 if (t1 == NULL_TREE || t2 == NULL_TREE)
13847 return NULL_TREE;
13848 elts[out] = const_binop (MULT_EXPR, t1, t2);
13849 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13850 return NULL_TREE;
13851 }
13852
13853 return build_vector (type, elts);
13854 }
13855
13856 default:
13857 return NULL_TREE;
13858 } /* switch (code) */
13859 }
13860
13861 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13862 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13863 of GOTO_EXPR. */
13864
13865 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)13866 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13867 {
13868 switch (TREE_CODE (*tp))
13869 {
13870 case LABEL_EXPR:
13871 return *tp;
13872
13873 case GOTO_EXPR:
13874 *walk_subtrees = 0;
13875
13876 /* ... fall through ... */
13877
13878 default:
13879 return NULL_TREE;
13880 }
13881 }
13882
13883 /* Return whether the sub-tree ST contains a label which is accessible from
13884 outside the sub-tree. */
13885
13886 static bool
contains_label_p(tree st)13887 contains_label_p (tree st)
13888 {
13889 return
13890 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13891 }
13892
13893 /* Fold a ternary expression of code CODE and type TYPE with operands
13894 OP0, OP1, and OP2. Return the folded expression if folding is
13895 successful. Otherwise, return NULL_TREE. */
13896
13897 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)13898 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13899 tree op0, tree op1, tree op2)
13900 {
13901 tree tem;
13902 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13903 enum tree_code_class kind = TREE_CODE_CLASS (code);
13904
13905 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13906 && TREE_CODE_LENGTH (code) == 3);
13907
13908 /* Strip any conversions that don't change the mode. This is safe
13909 for every expression, except for a comparison expression because
13910 its signedness is derived from its operands. So, in the latter
13911 case, only strip conversions that don't change the signedness.
13912
13913 Note that this is done as an internal manipulation within the
13914 constant folder, in order to find the simplest representation of
13915 the arguments so that their form can be studied. In any cases,
13916 the appropriate type conversions should be put back in the tree
13917 that will get out of the constant folder. */
13918 if (op0)
13919 {
13920 arg0 = op0;
13921 STRIP_NOPS (arg0);
13922 }
13923
13924 if (op1)
13925 {
13926 arg1 = op1;
13927 STRIP_NOPS (arg1);
13928 }
13929
13930 if (op2)
13931 {
13932 arg2 = op2;
13933 STRIP_NOPS (arg2);
13934 }
13935
13936 switch (code)
13937 {
13938 case COMPONENT_REF:
13939 if (TREE_CODE (arg0) == CONSTRUCTOR
13940 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13941 {
13942 unsigned HOST_WIDE_INT idx;
13943 tree field, value;
13944 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13945 if (field == arg1)
13946 return value;
13947 }
13948 return NULL_TREE;
13949
13950 case COND_EXPR:
13951 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13952 so all simple results must be passed through pedantic_non_lvalue. */
13953 if (TREE_CODE (arg0) == INTEGER_CST)
13954 {
13955 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13956 tem = integer_zerop (arg0) ? op2 : op1;
13957 /* Only optimize constant conditions when the selected branch
13958 has the same type as the COND_EXPR. This avoids optimizing
13959 away "c ? x : throw", where the throw has a void type.
13960 Avoid throwing away that operand which contains label. */
13961 if ((!TREE_SIDE_EFFECTS (unused_op)
13962 || !contains_label_p (unused_op))
13963 && (! VOID_TYPE_P (TREE_TYPE (tem))
13964 || VOID_TYPE_P (type)))
13965 return pedantic_non_lvalue_loc (loc, tem);
13966 return NULL_TREE;
13967 }
13968 if (operand_equal_p (arg1, op2, 0))
13969 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13970
13971 /* If we have A op B ? A : C, we may be able to convert this to a
13972 simpler expression, depending on the operation and the values
13973 of B and C. Signed zeros prevent all of these transformations,
13974 for reasons given above each one.
13975
13976 Also try swapping the arguments and inverting the conditional. */
13977 if (COMPARISON_CLASS_P (arg0)
13978 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13979 arg1, TREE_OPERAND (arg0, 1))
13980 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13981 {
13982 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13983 if (tem)
13984 return tem;
13985 }
13986
13987 if (COMPARISON_CLASS_P (arg0)
13988 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13989 op2,
13990 TREE_OPERAND (arg0, 1))
13991 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13992 {
13993 location_t loc0 = expr_location_or (arg0, loc);
13994 tem = fold_truth_not_expr (loc0, arg0);
13995 if (tem && COMPARISON_CLASS_P (tem))
13996 {
13997 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13998 if (tem)
13999 return tem;
14000 }
14001 }
14002
14003 /* If the second operand is simpler than the third, swap them
14004 since that produces better jump optimization results. */
14005 if (truth_value_p (TREE_CODE (arg0))
14006 && tree_swap_operands_p (op1, op2, false))
14007 {
14008 location_t loc0 = expr_location_or (arg0, loc);
14009 /* See if this can be inverted. If it can't, possibly because
14010 it was a floating-point inequality comparison, don't do
14011 anything. */
14012 tem = fold_truth_not_expr (loc0, arg0);
14013 if (tem)
14014 return fold_build3_loc (loc, code, type, tem, op2, op1);
14015 }
14016
14017 /* Convert A ? 1 : 0 to simply A. */
14018 if (integer_onep (op1)
14019 && integer_zerop (op2)
14020 /* If we try to convert OP0 to our type, the
14021 call to fold will try to move the conversion inside
14022 a COND, which will recurse. In that case, the COND_EXPR
14023 is probably the best choice, so leave it alone. */
14024 && type == TREE_TYPE (arg0))
14025 return pedantic_non_lvalue_loc (loc, arg0);
14026
14027 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14028 over COND_EXPR in cases such as floating point comparisons. */
14029 if (integer_zerop (op1)
14030 && integer_onep (op2)
14031 && truth_value_p (TREE_CODE (arg0)))
14032 return pedantic_non_lvalue_loc (loc,
14033 fold_convert_loc (loc, type,
14034 invert_truthvalue_loc (loc,
14035 arg0)));
14036
14037 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14038 if (TREE_CODE (arg0) == LT_EXPR
14039 && integer_zerop (TREE_OPERAND (arg0, 1))
14040 && integer_zerop (op2)
14041 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14042 {
14043 /* sign_bit_p only checks ARG1 bits within A's precision.
14044 If <sign bit of A> has wider type than A, bits outside
14045 of A's precision in <sign bit of A> need to be checked.
14046 If they are all 0, this optimization needs to be done
14047 in unsigned A's type, if they are all 1 in signed A's type,
14048 otherwise this can't be done. */
14049 if (TYPE_PRECISION (TREE_TYPE (tem))
14050 < TYPE_PRECISION (TREE_TYPE (arg1))
14051 && TYPE_PRECISION (TREE_TYPE (tem))
14052 < TYPE_PRECISION (type))
14053 {
14054 unsigned HOST_WIDE_INT mask_lo;
14055 HOST_WIDE_INT mask_hi;
14056 int inner_width, outer_width;
14057 tree tem_type;
14058
14059 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14060 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14061 if (outer_width > TYPE_PRECISION (type))
14062 outer_width = TYPE_PRECISION (type);
14063
14064 if (outer_width > HOST_BITS_PER_WIDE_INT)
14065 {
14066 mask_hi = ((unsigned HOST_WIDE_INT) -1
14067 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14068 mask_lo = -1;
14069 }
14070 else
14071 {
14072 mask_hi = 0;
14073 mask_lo = ((unsigned HOST_WIDE_INT) -1
14074 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14075 }
14076 if (inner_width > HOST_BITS_PER_WIDE_INT)
14077 {
14078 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14079 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14080 mask_lo = 0;
14081 }
14082 else
14083 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14084 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14085
14086 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14087 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14088 {
14089 tem_type = signed_type_for (TREE_TYPE (tem));
14090 tem = fold_convert_loc (loc, tem_type, tem);
14091 }
14092 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14093 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14094 {
14095 tem_type = unsigned_type_for (TREE_TYPE (tem));
14096 tem = fold_convert_loc (loc, tem_type, tem);
14097 }
14098 else
14099 tem = NULL;
14100 }
14101
14102 if (tem)
14103 return
14104 fold_convert_loc (loc, type,
14105 fold_build2_loc (loc, BIT_AND_EXPR,
14106 TREE_TYPE (tem), tem,
14107 fold_convert_loc (loc,
14108 TREE_TYPE (tem),
14109 arg1)));
14110 }
14111
14112 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14113 already handled above. */
14114 if (TREE_CODE (arg0) == BIT_AND_EXPR
14115 && integer_onep (TREE_OPERAND (arg0, 1))
14116 && integer_zerop (op2)
14117 && integer_pow2p (arg1))
14118 {
14119 tree tem = TREE_OPERAND (arg0, 0);
14120 STRIP_NOPS (tem);
14121 if (TREE_CODE (tem) == RSHIFT_EXPR
14122 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14123 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14124 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14125 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14126 TREE_OPERAND (tem, 0), arg1);
14127 }
14128
14129 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14130 is probably obsolete because the first operand should be a
14131 truth value (that's why we have the two cases above), but let's
14132 leave it in until we can confirm this for all front-ends. */
14133 if (integer_zerop (op2)
14134 && TREE_CODE (arg0) == NE_EXPR
14135 && integer_zerop (TREE_OPERAND (arg0, 1))
14136 && integer_pow2p (arg1)
14137 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14138 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14139 arg1, OEP_ONLY_CONST))
14140 return pedantic_non_lvalue_loc (loc,
14141 fold_convert_loc (loc, type,
14142 TREE_OPERAND (arg0, 0)));
14143
14144 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14145 if (integer_zerop (op2)
14146 && truth_value_p (TREE_CODE (arg0))
14147 && truth_value_p (TREE_CODE (arg1)))
14148 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14149 fold_convert_loc (loc, type, arg0),
14150 arg1);
14151
14152 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14153 if (integer_onep (op2)
14154 && truth_value_p (TREE_CODE (arg0))
14155 && truth_value_p (TREE_CODE (arg1)))
14156 {
14157 location_t loc0 = expr_location_or (arg0, loc);
14158 /* Only perform transformation if ARG0 is easily inverted. */
14159 tem = fold_truth_not_expr (loc0, arg0);
14160 if (tem)
14161 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14162 fold_convert_loc (loc, type, tem),
14163 arg1);
14164 }
14165
14166 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14167 if (integer_zerop (arg1)
14168 && truth_value_p (TREE_CODE (arg0))
14169 && truth_value_p (TREE_CODE (op2)))
14170 {
14171 location_t loc0 = expr_location_or (arg0, loc);
14172 /* Only perform transformation if ARG0 is easily inverted. */
14173 tem = fold_truth_not_expr (loc0, arg0);
14174 if (tem)
14175 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14176 fold_convert_loc (loc, type, tem),
14177 op2);
14178 }
14179
14180 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14181 if (integer_onep (arg1)
14182 && truth_value_p (TREE_CODE (arg0))
14183 && truth_value_p (TREE_CODE (op2)))
14184 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14185 fold_convert_loc (loc, type, arg0),
14186 op2);
14187
14188 return NULL_TREE;
14189
14190 case VEC_COND_EXPR:
14191 if (TREE_CODE (arg0) == VECTOR_CST)
14192 {
14193 if (integer_all_onesp (arg0) && !TREE_SIDE_EFFECTS (op2))
14194 return pedantic_non_lvalue_loc (loc, op1);
14195 if (integer_zerop (arg0) && !TREE_SIDE_EFFECTS (op1))
14196 return pedantic_non_lvalue_loc (loc, op2);
14197 }
14198 return NULL_TREE;
14199
14200 case CALL_EXPR:
14201 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14202 of fold_ternary on them. */
14203 gcc_unreachable ();
14204
14205 case BIT_FIELD_REF:
14206 if ((TREE_CODE (arg0) == VECTOR_CST
14207 || (TREE_CODE (arg0) == CONSTRUCTOR
14208 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14209 && (type == TREE_TYPE (TREE_TYPE (arg0))
14210 || (TREE_CODE (type) == VECTOR_TYPE
14211 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14212 {
14213 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14214 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14215 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14216 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14217
14218 if (n != 0
14219 && (idx % width) == 0
14220 && (n % width) == 0
14221 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14222 {
14223 idx = idx / width;
14224 n = n / width;
14225
14226 if (TREE_CODE (arg0) == VECTOR_CST)
14227 {
14228 if (n == 1)
14229 return VECTOR_CST_ELT (arg0, idx);
14230
14231 tree *vals = XALLOCAVEC (tree, n);
14232 for (unsigned i = 0; i < n; ++i)
14233 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14234 return build_vector (type, vals);
14235 }
14236
14237 /* Constructor elements can be subvectors. */
14238 unsigned HOST_WIDE_INT k = 1;
14239 if (CONSTRUCTOR_NELTS (arg0) != 0)
14240 {
14241 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14242 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14243 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14244 }
14245
14246 /* We keep an exact subset of the constructor elements. */
14247 if ((idx % k) == 0 && (n % k) == 0)
14248 {
14249 if (CONSTRUCTOR_NELTS (arg0) == 0)
14250 return build_constructor (type, NULL);
14251 idx /= k;
14252 n /= k;
14253 if (n == 1)
14254 {
14255 if (idx < CONSTRUCTOR_NELTS (arg0))
14256 return CONSTRUCTOR_ELT (arg0, idx)->value;
14257 return build_zero_cst (type);
14258 }
14259
14260 vec<constructor_elt, va_gc> *vals;
14261 vec_alloc (vals, n);
14262 for (unsigned i = 0;
14263 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14264 ++i)
14265 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14266 CONSTRUCTOR_ELT
14267 (arg0, idx + i)->value);
14268 return build_constructor (type, vals);
14269 }
14270 /* The bitfield references a single constructor element. */
14271 else if (idx + n <= (idx / k + 1) * k)
14272 {
14273 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14274 return build_zero_cst (type);
14275 else if (n == k)
14276 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14277 else
14278 return fold_build3_loc (loc, code, type,
14279 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14280 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14281 }
14282 }
14283 }
14284
14285 /* A bit-field-ref that referenced the full argument can be stripped. */
14286 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14287 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14288 && integer_zerop (op2))
14289 return fold_convert_loc (loc, type, arg0);
14290
14291 /* On constants we can use native encode/interpret to constant
14292 fold (nearly) all BIT_FIELD_REFs. */
14293 if (CONSTANT_CLASS_P (arg0)
14294 && can_native_interpret_type_p (type)
14295 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14296 /* This limitation should not be necessary, we just need to
14297 round this up to mode size. */
14298 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14299 /* Need bit-shifting of the buffer to relax the following. */
14300 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14301 {
14302 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14303 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14304 unsigned HOST_WIDE_INT clen;
14305 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14306 /* ??? We cannot tell native_encode_expr to start at
14307 some random byte only. So limit us to a reasonable amount
14308 of work. */
14309 if (clen <= 4096)
14310 {
14311 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14312 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14313 if (len > 0
14314 && len * BITS_PER_UNIT >= bitpos + bitsize)
14315 {
14316 tree v = native_interpret_expr (type,
14317 b + bitpos / BITS_PER_UNIT,
14318 bitsize / BITS_PER_UNIT);
14319 if (v)
14320 return v;
14321 }
14322 }
14323 }
14324
14325 return NULL_TREE;
14326
14327 case FMA_EXPR:
14328 /* For integers we can decompose the FMA if possible. */
14329 if (TREE_CODE (arg0) == INTEGER_CST
14330 && TREE_CODE (arg1) == INTEGER_CST)
14331 return fold_build2_loc (loc, PLUS_EXPR, type,
14332 const_binop (MULT_EXPR, arg0, arg1), arg2);
14333 if (integer_zerop (arg2))
14334 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14335
14336 return fold_fma (loc, type, arg0, arg1, arg2);
14337
14338 case VEC_PERM_EXPR:
14339 if (TREE_CODE (arg2) == VECTOR_CST)
14340 {
14341 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14342 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14343 tree t;
14344 bool need_mask_canon = false;
14345 bool all_in_vec0 = true;
14346 bool all_in_vec1 = true;
14347 bool maybe_identity = true;
14348 bool single_arg = (op0 == op1);
14349 bool changed = false;
14350
14351 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14352 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14353 for (i = 0; i < nelts; i++)
14354 {
14355 tree val = VECTOR_CST_ELT (arg2, i);
14356 if (TREE_CODE (val) != INTEGER_CST)
14357 return NULL_TREE;
14358
14359 sel[i] = TREE_INT_CST_LOW (val) & mask;
14360 if (TREE_INT_CST_HIGH (val)
14361 || ((unsigned HOST_WIDE_INT)
14362 TREE_INT_CST_LOW (val) != sel[i]))
14363 need_mask_canon = true;
14364
14365 if (sel[i] < nelts)
14366 all_in_vec1 = false;
14367 else
14368 all_in_vec0 = false;
14369
14370 if ((sel[i] & (nelts-1)) != i)
14371 maybe_identity = false;
14372 }
14373
14374 if (maybe_identity)
14375 {
14376 if (all_in_vec0)
14377 return op0;
14378 if (all_in_vec1)
14379 return op1;
14380 }
14381
14382 if (all_in_vec0)
14383 op1 = op0;
14384 else if (all_in_vec1)
14385 {
14386 op0 = op1;
14387 for (i = 0; i < nelts; i++)
14388 sel[i] -= nelts;
14389 need_mask_canon = true;
14390 }
14391
14392 if ((TREE_CODE (op0) == VECTOR_CST
14393 || TREE_CODE (op0) == CONSTRUCTOR)
14394 && (TREE_CODE (op1) == VECTOR_CST
14395 || TREE_CODE (op1) == CONSTRUCTOR))
14396 {
14397 t = fold_vec_perm (type, op0, op1, sel);
14398 if (t != NULL_TREE)
14399 return t;
14400 }
14401
14402 if (op0 == op1 && !single_arg)
14403 changed = true;
14404
14405 if (need_mask_canon && arg2 == op2)
14406 {
14407 tree *tsel = XALLOCAVEC (tree, nelts);
14408 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14409 for (i = 0; i < nelts; i++)
14410 tsel[i] = build_int_cst (eltype, sel[i]);
14411 op2 = build_vector (TREE_TYPE (arg2), tsel);
14412 changed = true;
14413 }
14414
14415 if (changed)
14416 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14417 }
14418 return NULL_TREE;
14419
14420 default:
14421 return NULL_TREE;
14422 } /* switch (code) */
14423 }
14424
14425 /* Perform constant folding and related simplification of EXPR.
14426 The related simplifications include x*1 => x, x*0 => 0, etc.,
14427 and application of the associative law.
14428 NOP_EXPR conversions may be removed freely (as long as we
14429 are careful not to change the type of the overall expression).
14430 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14431 but we can constant-fold them if they have constant operands. */
14432
14433 #ifdef ENABLE_FOLD_CHECKING
14434 # define fold(x) fold_1 (x)
14435 static tree fold_1 (tree);
14436 static
14437 #endif
14438 tree
fold(tree expr)14439 fold (tree expr)
14440 {
14441 const tree t = expr;
14442 enum tree_code code = TREE_CODE (t);
14443 enum tree_code_class kind = TREE_CODE_CLASS (code);
14444 tree tem;
14445 location_t loc = EXPR_LOCATION (expr);
14446
14447 /* Return right away if a constant. */
14448 if (kind == tcc_constant)
14449 return t;
14450
14451 /* CALL_EXPR-like objects with variable numbers of operands are
14452 treated specially. */
14453 if (kind == tcc_vl_exp)
14454 {
14455 if (code == CALL_EXPR)
14456 {
14457 tem = fold_call_expr (loc, expr, false);
14458 return tem ? tem : expr;
14459 }
14460 return expr;
14461 }
14462
14463 if (IS_EXPR_CODE_CLASS (kind))
14464 {
14465 tree type = TREE_TYPE (t);
14466 tree op0, op1, op2;
14467
14468 switch (TREE_CODE_LENGTH (code))
14469 {
14470 case 1:
14471 op0 = TREE_OPERAND (t, 0);
14472 tem = fold_unary_loc (loc, code, type, op0);
14473 return tem ? tem : expr;
14474 case 2:
14475 op0 = TREE_OPERAND (t, 0);
14476 op1 = TREE_OPERAND (t, 1);
14477 tem = fold_binary_loc (loc, code, type, op0, op1);
14478 return tem ? tem : expr;
14479 case 3:
14480 op0 = TREE_OPERAND (t, 0);
14481 op1 = TREE_OPERAND (t, 1);
14482 op2 = TREE_OPERAND (t, 2);
14483 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14484 return tem ? tem : expr;
14485 default:
14486 break;
14487 }
14488 }
14489
14490 switch (code)
14491 {
14492 case ARRAY_REF:
14493 {
14494 tree op0 = TREE_OPERAND (t, 0);
14495 tree op1 = TREE_OPERAND (t, 1);
14496
14497 if (TREE_CODE (op1) == INTEGER_CST
14498 && TREE_CODE (op0) == CONSTRUCTOR
14499 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14500 {
14501 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14502 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14503 unsigned HOST_WIDE_INT begin = 0;
14504
14505 /* Find a matching index by means of a binary search. */
14506 while (begin != end)
14507 {
14508 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14509 tree index = (*elts)[middle].index;
14510
14511 if (TREE_CODE (index) == INTEGER_CST
14512 && tree_int_cst_lt (index, op1))
14513 begin = middle + 1;
14514 else if (TREE_CODE (index) == INTEGER_CST
14515 && tree_int_cst_lt (op1, index))
14516 end = middle;
14517 else if (TREE_CODE (index) == RANGE_EXPR
14518 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14519 begin = middle + 1;
14520 else if (TREE_CODE (index) == RANGE_EXPR
14521 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14522 end = middle;
14523 else
14524 return (*elts)[middle].value;
14525 }
14526 }
14527
14528 return t;
14529 }
14530
14531 /* Return a VECTOR_CST if possible. */
14532 case CONSTRUCTOR:
14533 {
14534 tree type = TREE_TYPE (t);
14535 if (TREE_CODE (type) != VECTOR_TYPE)
14536 return t;
14537
14538 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14539 unsigned HOST_WIDE_INT idx, pos = 0;
14540 tree value;
14541
14542 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14543 {
14544 if (!CONSTANT_CLASS_P (value))
14545 return t;
14546 if (TREE_CODE (value) == VECTOR_CST)
14547 {
14548 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14549 vec[pos++] = VECTOR_CST_ELT (value, i);
14550 }
14551 else
14552 vec[pos++] = value;
14553 }
14554 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14555 vec[pos] = build_zero_cst (TREE_TYPE (type));
14556
14557 return build_vector (type, vec);
14558 }
14559
14560 case CONST_DECL:
14561 return fold (DECL_INITIAL (t));
14562
14563 default:
14564 return t;
14565 } /* switch (code) */
14566 }
14567
14568 #ifdef ENABLE_FOLD_CHECKING
14569 #undef fold
14570
14571 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14572 hash_table <pointer_hash <tree_node> >);
14573 static void fold_check_failed (const_tree, const_tree);
14574 void print_fold_checksum (const_tree);
14575
14576 /* When --enable-checking=fold, compute a digest of expr before
14577 and after actual fold call to see if fold did not accidentally
14578 change original expr. */
14579
14580 tree
fold(tree expr)14581 fold (tree expr)
14582 {
14583 tree ret;
14584 struct md5_ctx ctx;
14585 unsigned char checksum_before[16], checksum_after[16];
14586 hash_table <pointer_hash <tree_node> > ht;
14587
14588 ht.create (32);
14589 md5_init_ctx (&ctx);
14590 fold_checksum_tree (expr, &ctx, ht);
14591 md5_finish_ctx (&ctx, checksum_before);
14592 ht.empty ();
14593
14594 ret = fold_1 (expr);
14595
14596 md5_init_ctx (&ctx);
14597 fold_checksum_tree (expr, &ctx, ht);
14598 md5_finish_ctx (&ctx, checksum_after);
14599 ht.dispose ();
14600
14601 if (memcmp (checksum_before, checksum_after, 16))
14602 fold_check_failed (expr, ret);
14603
14604 return ret;
14605 }
14606
14607 void
print_fold_checksum(const_tree expr)14608 print_fold_checksum (const_tree expr)
14609 {
14610 struct md5_ctx ctx;
14611 unsigned char checksum[16], cnt;
14612 hash_table <pointer_hash <tree_node> > ht;
14613
14614 ht.create (32);
14615 md5_init_ctx (&ctx);
14616 fold_checksum_tree (expr, &ctx, ht);
14617 md5_finish_ctx (&ctx, checksum);
14618 ht.dispose ();
14619 for (cnt = 0; cnt < 16; ++cnt)
14620 fprintf (stderr, "%02x", checksum[cnt]);
14621 putc ('\n', stderr);
14622 }
14623
14624 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)14625 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14626 {
14627 internal_error ("fold check: original tree changed by fold");
14628 }
14629
14630 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<pointer_hash<tree_node>> ht)14631 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14632 hash_table <pointer_hash <tree_node> > ht)
14633 {
14634 tree_node **slot;
14635 enum tree_code code;
14636 union tree_node buf;
14637 int i, len;
14638
14639 recursive_label:
14640 if (expr == NULL)
14641 return;
14642 slot = ht.find_slot (expr, INSERT);
14643 if (*slot != NULL)
14644 return;
14645 *slot = CONST_CAST_TREE (expr);
14646 code = TREE_CODE (expr);
14647 if (TREE_CODE_CLASS (code) == tcc_declaration
14648 && DECL_ASSEMBLER_NAME_SET_P (expr))
14649 {
14650 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14651 memcpy ((char *) &buf, expr, tree_size (expr));
14652 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14653 expr = (tree) &buf;
14654 }
14655 else if (TREE_CODE_CLASS (code) == tcc_type
14656 && (TYPE_POINTER_TO (expr)
14657 || TYPE_REFERENCE_TO (expr)
14658 || TYPE_CACHED_VALUES_P (expr)
14659 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14660 || TYPE_NEXT_VARIANT (expr)))
14661 {
14662 /* Allow these fields to be modified. */
14663 tree tmp;
14664 memcpy ((char *) &buf, expr, tree_size (expr));
14665 expr = tmp = (tree) &buf;
14666 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14667 TYPE_POINTER_TO (tmp) = NULL;
14668 TYPE_REFERENCE_TO (tmp) = NULL;
14669 TYPE_NEXT_VARIANT (tmp) = NULL;
14670 if (TYPE_CACHED_VALUES_P (tmp))
14671 {
14672 TYPE_CACHED_VALUES_P (tmp) = 0;
14673 TYPE_CACHED_VALUES (tmp) = NULL;
14674 }
14675 }
14676 md5_process_bytes (expr, tree_size (expr), ctx);
14677 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14678 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14679 if (TREE_CODE_CLASS (code) != tcc_type
14680 && TREE_CODE_CLASS (code) != tcc_declaration
14681 && code != TREE_LIST
14682 && code != SSA_NAME
14683 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14684 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14685 switch (TREE_CODE_CLASS (code))
14686 {
14687 case tcc_constant:
14688 switch (code)
14689 {
14690 case STRING_CST:
14691 md5_process_bytes (TREE_STRING_POINTER (expr),
14692 TREE_STRING_LENGTH (expr), ctx);
14693 break;
14694 case COMPLEX_CST:
14695 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14696 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14697 break;
14698 case VECTOR_CST:
14699 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14700 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14701 break;
14702 default:
14703 break;
14704 }
14705 break;
14706 case tcc_exceptional:
14707 switch (code)
14708 {
14709 case TREE_LIST:
14710 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14711 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14712 expr = TREE_CHAIN (expr);
14713 goto recursive_label;
14714 break;
14715 case TREE_VEC:
14716 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14717 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14718 break;
14719 default:
14720 break;
14721 }
14722 break;
14723 case tcc_expression:
14724 case tcc_reference:
14725 case tcc_comparison:
14726 case tcc_unary:
14727 case tcc_binary:
14728 case tcc_statement:
14729 case tcc_vl_exp:
14730 len = TREE_OPERAND_LENGTH (expr);
14731 for (i = 0; i < len; ++i)
14732 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14733 break;
14734 case tcc_declaration:
14735 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14736 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14737 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14738 {
14739 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14740 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14741 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14742 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14743 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14744 }
14745 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14746 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14747
14748 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14749 {
14750 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14751 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14752 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14753 }
14754 break;
14755 case tcc_type:
14756 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14757 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14758 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14759 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14760 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14761 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14762 if (INTEGRAL_TYPE_P (expr)
14763 || SCALAR_FLOAT_TYPE_P (expr))
14764 {
14765 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14766 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14767 }
14768 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14769 if (TREE_CODE (expr) == RECORD_TYPE
14770 || TREE_CODE (expr) == UNION_TYPE
14771 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14772 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14773 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14774 break;
14775 default:
14776 break;
14777 }
14778 }
14779
14780 /* Helper function for outputting the checksum of a tree T. When
14781 debugging with gdb, you can "define mynext" to be "next" followed
14782 by "call debug_fold_checksum (op0)", then just trace down till the
14783 outputs differ. */
14784
14785 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)14786 debug_fold_checksum (const_tree t)
14787 {
14788 int i;
14789 unsigned char checksum[16];
14790 struct md5_ctx ctx;
14791 hash_table <pointer_hash <tree_node> > ht;
14792 ht.create (32);
14793
14794 md5_init_ctx (&ctx);
14795 fold_checksum_tree (t, &ctx, ht);
14796 md5_finish_ctx (&ctx, checksum);
14797 ht.empty ();
14798
14799 for (i = 0; i < 16; i++)
14800 fprintf (stderr, "%d ", checksum[i]);
14801
14802 fprintf (stderr, "\n");
14803 }
14804
14805 #endif
14806
14807 /* Fold a unary tree expression with code CODE of type TYPE with an
14808 operand OP0. LOC is the location of the resulting expression.
14809 Return a folded expression if successful. Otherwise, return a tree
14810 expression with code CODE of type TYPE with an operand OP0. */
14811
14812 tree
fold_build1_stat_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)14813 fold_build1_stat_loc (location_t loc,
14814 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14815 {
14816 tree tem;
14817 #ifdef ENABLE_FOLD_CHECKING
14818 unsigned char checksum_before[16], checksum_after[16];
14819 struct md5_ctx ctx;
14820 hash_table <pointer_hash <tree_node> > ht;
14821
14822 ht.create (32);
14823 md5_init_ctx (&ctx);
14824 fold_checksum_tree (op0, &ctx, ht);
14825 md5_finish_ctx (&ctx, checksum_before);
14826 ht.empty ();
14827 #endif
14828
14829 tem = fold_unary_loc (loc, code, type, op0);
14830 if (!tem)
14831 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14832
14833 #ifdef ENABLE_FOLD_CHECKING
14834 md5_init_ctx (&ctx);
14835 fold_checksum_tree (op0, &ctx, ht);
14836 md5_finish_ctx (&ctx, checksum_after);
14837 ht.dispose ();
14838
14839 if (memcmp (checksum_before, checksum_after, 16))
14840 fold_check_failed (op0, tem);
14841 #endif
14842 return tem;
14843 }
14844
14845 /* Fold a binary tree expression with code CODE of type TYPE with
14846 operands OP0 and OP1. LOC is the location of the resulting
14847 expression. Return a folded expression if successful. Otherwise,
14848 return a tree expression with code CODE of type TYPE with operands
14849 OP0 and OP1. */
14850
14851 tree
fold_build2_stat_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)14852 fold_build2_stat_loc (location_t loc,
14853 enum tree_code code, tree type, tree op0, tree op1
14854 MEM_STAT_DECL)
14855 {
14856 tree tem;
14857 #ifdef ENABLE_FOLD_CHECKING
14858 unsigned char checksum_before_op0[16],
14859 checksum_before_op1[16],
14860 checksum_after_op0[16],
14861 checksum_after_op1[16];
14862 struct md5_ctx ctx;
14863 hash_table <pointer_hash <tree_node> > ht;
14864
14865 ht.create (32);
14866 md5_init_ctx (&ctx);
14867 fold_checksum_tree (op0, &ctx, ht);
14868 md5_finish_ctx (&ctx, checksum_before_op0);
14869 ht.empty ();
14870
14871 md5_init_ctx (&ctx);
14872 fold_checksum_tree (op1, &ctx, ht);
14873 md5_finish_ctx (&ctx, checksum_before_op1);
14874 ht.empty ();
14875 #endif
14876
14877 tem = fold_binary_loc (loc, code, type, op0, op1);
14878 if (!tem)
14879 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14880
14881 #ifdef ENABLE_FOLD_CHECKING
14882 md5_init_ctx (&ctx);
14883 fold_checksum_tree (op0, &ctx, ht);
14884 md5_finish_ctx (&ctx, checksum_after_op0);
14885 ht.empty ();
14886
14887 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14888 fold_check_failed (op0, tem);
14889
14890 md5_init_ctx (&ctx);
14891 fold_checksum_tree (op1, &ctx, ht);
14892 md5_finish_ctx (&ctx, checksum_after_op1);
14893 ht.dispose ();
14894
14895 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14896 fold_check_failed (op1, tem);
14897 #endif
14898 return tem;
14899 }
14900
14901 /* Fold a ternary tree expression with code CODE of type TYPE with
14902 operands OP0, OP1, and OP2. Return a folded expression if
14903 successful. Otherwise, return a tree expression with code CODE of
14904 type TYPE with operands OP0, OP1, and OP2. */
14905
14906 tree
fold_build3_stat_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)14907 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14908 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14909 {
14910 tree tem;
14911 #ifdef ENABLE_FOLD_CHECKING
14912 unsigned char checksum_before_op0[16],
14913 checksum_before_op1[16],
14914 checksum_before_op2[16],
14915 checksum_after_op0[16],
14916 checksum_after_op1[16],
14917 checksum_after_op2[16];
14918 struct md5_ctx ctx;
14919 hash_table <pointer_hash <tree_node> > ht;
14920
14921 ht.create (32);
14922 md5_init_ctx (&ctx);
14923 fold_checksum_tree (op0, &ctx, ht);
14924 md5_finish_ctx (&ctx, checksum_before_op0);
14925 ht.empty ();
14926
14927 md5_init_ctx (&ctx);
14928 fold_checksum_tree (op1, &ctx, ht);
14929 md5_finish_ctx (&ctx, checksum_before_op1);
14930 ht.empty ();
14931
14932 md5_init_ctx (&ctx);
14933 fold_checksum_tree (op2, &ctx, ht);
14934 md5_finish_ctx (&ctx, checksum_before_op2);
14935 ht.empty ();
14936 #endif
14937
14938 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14939 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14940 if (!tem)
14941 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14942
14943 #ifdef ENABLE_FOLD_CHECKING
14944 md5_init_ctx (&ctx);
14945 fold_checksum_tree (op0, &ctx, ht);
14946 md5_finish_ctx (&ctx, checksum_after_op0);
14947 ht.empty ();
14948
14949 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14950 fold_check_failed (op0, tem);
14951
14952 md5_init_ctx (&ctx);
14953 fold_checksum_tree (op1, &ctx, ht);
14954 md5_finish_ctx (&ctx, checksum_after_op1);
14955 ht.empty ();
14956
14957 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14958 fold_check_failed (op1, tem);
14959
14960 md5_init_ctx (&ctx);
14961 fold_checksum_tree (op2, &ctx, ht);
14962 md5_finish_ctx (&ctx, checksum_after_op2);
14963 ht.dispose ();
14964
14965 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14966 fold_check_failed (op2, tem);
14967 #endif
14968 return tem;
14969 }
14970
14971 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14972 arguments in ARGARRAY, and a null static chain.
14973 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14974 of type TYPE from the given operands as constructed by build_call_array. */
14975
14976 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)14977 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14978 int nargs, tree *argarray)
14979 {
14980 tree tem;
14981 #ifdef ENABLE_FOLD_CHECKING
14982 unsigned char checksum_before_fn[16],
14983 checksum_before_arglist[16],
14984 checksum_after_fn[16],
14985 checksum_after_arglist[16];
14986 struct md5_ctx ctx;
14987 hash_table <pointer_hash <tree_node> > ht;
14988 int i;
14989
14990 ht.create (32);
14991 md5_init_ctx (&ctx);
14992 fold_checksum_tree (fn, &ctx, ht);
14993 md5_finish_ctx (&ctx, checksum_before_fn);
14994 ht.empty ();
14995
14996 md5_init_ctx (&ctx);
14997 for (i = 0; i < nargs; i++)
14998 fold_checksum_tree (argarray[i], &ctx, ht);
14999 md5_finish_ctx (&ctx, checksum_before_arglist);
15000 ht.empty ();
15001 #endif
15002
15003 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15004
15005 #ifdef ENABLE_FOLD_CHECKING
15006 md5_init_ctx (&ctx);
15007 fold_checksum_tree (fn, &ctx, ht);
15008 md5_finish_ctx (&ctx, checksum_after_fn);
15009 ht.empty ();
15010
15011 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15012 fold_check_failed (fn, tem);
15013
15014 md5_init_ctx (&ctx);
15015 for (i = 0; i < nargs; i++)
15016 fold_checksum_tree (argarray[i], &ctx, ht);
15017 md5_finish_ctx (&ctx, checksum_after_arglist);
15018 ht.dispose ();
15019
15020 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15021 fold_check_failed (NULL_TREE, tem);
15022 #endif
15023 return tem;
15024 }
15025
15026 /* Perform constant folding and related simplification of initializer
15027 expression EXPR. These behave identically to "fold_buildN" but ignore
15028 potential run-time traps and exceptions that fold must preserve. */
15029
15030 #define START_FOLD_INIT \
15031 int saved_signaling_nans = flag_signaling_nans;\
15032 int saved_trapping_math = flag_trapping_math;\
15033 int saved_rounding_math = flag_rounding_math;\
15034 int saved_trapv = flag_trapv;\
15035 int saved_folding_initializer = folding_initializer;\
15036 flag_signaling_nans = 0;\
15037 flag_trapping_math = 0;\
15038 flag_rounding_math = 0;\
15039 flag_trapv = 0;\
15040 folding_initializer = 1;
15041
15042 #define END_FOLD_INIT \
15043 flag_signaling_nans = saved_signaling_nans;\
15044 flag_trapping_math = saved_trapping_math;\
15045 flag_rounding_math = saved_rounding_math;\
15046 flag_trapv = saved_trapv;\
15047 folding_initializer = saved_folding_initializer;
15048
15049 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)15050 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15051 tree type, tree op)
15052 {
15053 tree result;
15054 START_FOLD_INIT;
15055
15056 result = fold_build1_loc (loc, code, type, op);
15057
15058 END_FOLD_INIT;
15059 return result;
15060 }
15061
15062 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)15063 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15064 tree type, tree op0, tree op1)
15065 {
15066 tree result;
15067 START_FOLD_INIT;
15068
15069 result = fold_build2_loc (loc, code, type, op0, op1);
15070
15071 END_FOLD_INIT;
15072 return result;
15073 }
15074
15075 tree
fold_build3_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)15076 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15077 tree type, tree op0, tree op1, tree op2)
15078 {
15079 tree result;
15080 START_FOLD_INIT;
15081
15082 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15083
15084 END_FOLD_INIT;
15085 return result;
15086 }
15087
15088 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)15089 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15090 int nargs, tree *argarray)
15091 {
15092 tree result;
15093 START_FOLD_INIT;
15094
15095 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15096
15097 END_FOLD_INIT;
15098 return result;
15099 }
15100
15101 #undef START_FOLD_INIT
15102 #undef END_FOLD_INIT
15103
15104 /* Determine if first argument is a multiple of second argument. Return 0 if
15105 it is not, or we cannot easily determined it to be.
15106
15107 An example of the sort of thing we care about (at this point; this routine
15108 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15109 fold cases do now) is discovering that
15110
15111 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15112
15113 is a multiple of
15114
15115 SAVE_EXPR (J * 8)
15116
15117 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15118
15119 This code also handles discovering that
15120
15121 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15122
15123 is a multiple of 8 so we don't have to worry about dealing with a
15124 possible remainder.
15125
15126 Note that we *look* inside a SAVE_EXPR only to determine how it was
15127 calculated; it is not safe for fold to do much of anything else with the
15128 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15129 at run time. For example, the latter example above *cannot* be implemented
15130 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15131 evaluation time of the original SAVE_EXPR is not necessarily the same at
15132 the time the new expression is evaluated. The only optimization of this
15133 sort that would be valid is changing
15134
15135 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15136
15137 divided by 8 to
15138
15139 SAVE_EXPR (I) * SAVE_EXPR (J)
15140
15141 (where the same SAVE_EXPR (J) is used in the original and the
15142 transformed version). */
15143
15144 int
multiple_of_p(tree type,const_tree top,const_tree bottom)15145 multiple_of_p (tree type, const_tree top, const_tree bottom)
15146 {
15147 if (operand_equal_p (top, bottom, 0))
15148 return 1;
15149
15150 if (TREE_CODE (type) != INTEGER_TYPE)
15151 return 0;
15152
15153 switch (TREE_CODE (top))
15154 {
15155 case BIT_AND_EXPR:
15156 /* Bitwise and provides a power of two multiple. If the mask is
15157 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15158 if (!integer_pow2p (bottom))
15159 return 0;
15160 /* FALLTHRU */
15161
15162 case MULT_EXPR:
15163 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15164 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15165
15166 case PLUS_EXPR:
15167 case MINUS_EXPR:
15168 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15169 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15170
15171 case LSHIFT_EXPR:
15172 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15173 {
15174 tree op1, t1;
15175
15176 op1 = TREE_OPERAND (top, 1);
15177 /* const_binop may not detect overflow correctly,
15178 so check for it explicitly here. */
15179 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15180 > TREE_INT_CST_LOW (op1)
15181 && TREE_INT_CST_HIGH (op1) == 0
15182 && 0 != (t1 = fold_convert (type,
15183 const_binop (LSHIFT_EXPR,
15184 size_one_node,
15185 op1)))
15186 && !TREE_OVERFLOW (t1))
15187 return multiple_of_p (type, t1, bottom);
15188 }
15189 return 0;
15190
15191 case NOP_EXPR:
15192 /* Can't handle conversions from non-integral or wider integral type. */
15193 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15194 || (TYPE_PRECISION (type)
15195 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15196 return 0;
15197
15198 /* .. fall through ... */
15199
15200 case SAVE_EXPR:
15201 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15202
15203 case COND_EXPR:
15204 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15205 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15206
15207 case INTEGER_CST:
15208 if (TREE_CODE (bottom) != INTEGER_CST
15209 || integer_zerop (bottom)
15210 || (TYPE_UNSIGNED (type)
15211 && (tree_int_cst_sgn (top) < 0
15212 || tree_int_cst_sgn (bottom) < 0)))
15213 return 0;
15214 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15215 top, bottom));
15216
15217 default:
15218 return 0;
15219 }
15220 }
15221
15222 /* Return true if CODE or TYPE is known to be non-negative. */
15223
15224 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)15225 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15226 {
15227 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15228 && truth_value_p (code))
15229 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15230 have a signed:1 type (where the value is -1 and 0). */
15231 return true;
15232 return false;
15233 }
15234
15235 /* Return true if (CODE OP0) is known to be non-negative. If the return
15236 value is based on the assumption that signed overflow is undefined,
15237 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15238 *STRICT_OVERFLOW_P. */
15239
15240 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)15241 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15242 bool *strict_overflow_p)
15243 {
15244 if (TYPE_UNSIGNED (type))
15245 return true;
15246
15247 switch (code)
15248 {
15249 case ABS_EXPR:
15250 /* We can't return 1 if flag_wrapv is set because
15251 ABS_EXPR<INT_MIN> = INT_MIN. */
15252 if (!INTEGRAL_TYPE_P (type))
15253 return true;
15254 if (TYPE_OVERFLOW_UNDEFINED (type))
15255 {
15256 *strict_overflow_p = true;
15257 return true;
15258 }
15259 break;
15260
15261 case NON_LVALUE_EXPR:
15262 case FLOAT_EXPR:
15263 case FIX_TRUNC_EXPR:
15264 return tree_expr_nonnegative_warnv_p (op0,
15265 strict_overflow_p);
15266
15267 case NOP_EXPR:
15268 {
15269 tree inner_type = TREE_TYPE (op0);
15270 tree outer_type = type;
15271
15272 if (TREE_CODE (outer_type) == REAL_TYPE)
15273 {
15274 if (TREE_CODE (inner_type) == REAL_TYPE)
15275 return tree_expr_nonnegative_warnv_p (op0,
15276 strict_overflow_p);
15277 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15278 {
15279 if (TYPE_UNSIGNED (inner_type))
15280 return true;
15281 return tree_expr_nonnegative_warnv_p (op0,
15282 strict_overflow_p);
15283 }
15284 }
15285 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15286 {
15287 if (TREE_CODE (inner_type) == REAL_TYPE)
15288 return tree_expr_nonnegative_warnv_p (op0,
15289 strict_overflow_p);
15290 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15291 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15292 && TYPE_UNSIGNED (inner_type);
15293 }
15294 }
15295 break;
15296
15297 default:
15298 return tree_simple_nonnegative_warnv_p (code, type);
15299 }
15300
15301 /* We don't know sign of `t', so be conservative and return false. */
15302 return false;
15303 }
15304
15305 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15306 value is based on the assumption that signed overflow is undefined,
15307 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15308 *STRICT_OVERFLOW_P. */
15309
15310 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)15311 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15312 tree op1, bool *strict_overflow_p)
15313 {
15314 if (TYPE_UNSIGNED (type))
15315 return true;
15316
15317 switch (code)
15318 {
15319 case POINTER_PLUS_EXPR:
15320 case PLUS_EXPR:
15321 if (FLOAT_TYPE_P (type))
15322 return (tree_expr_nonnegative_warnv_p (op0,
15323 strict_overflow_p)
15324 && tree_expr_nonnegative_warnv_p (op1,
15325 strict_overflow_p));
15326
15327 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15328 both unsigned and at least 2 bits shorter than the result. */
15329 if (TREE_CODE (type) == INTEGER_TYPE
15330 && TREE_CODE (op0) == NOP_EXPR
15331 && TREE_CODE (op1) == NOP_EXPR)
15332 {
15333 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15334 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15335 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15336 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15337 {
15338 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15339 TYPE_PRECISION (inner2)) + 1;
15340 return prec < TYPE_PRECISION (type);
15341 }
15342 }
15343 break;
15344
15345 case MULT_EXPR:
15346 if (FLOAT_TYPE_P (type))
15347 {
15348 /* x * x for floating point x is always non-negative. */
15349 if (operand_equal_p (op0, op1, 0))
15350 return true;
15351 return (tree_expr_nonnegative_warnv_p (op0,
15352 strict_overflow_p)
15353 && tree_expr_nonnegative_warnv_p (op1,
15354 strict_overflow_p));
15355 }
15356
15357 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15358 both unsigned and their total bits is shorter than the result. */
15359 if (TREE_CODE (type) == INTEGER_TYPE
15360 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15361 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15362 {
15363 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15364 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15365 : TREE_TYPE (op0);
15366 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15367 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15368 : TREE_TYPE (op1);
15369
15370 bool unsigned0 = TYPE_UNSIGNED (inner0);
15371 bool unsigned1 = TYPE_UNSIGNED (inner1);
15372
15373 if (TREE_CODE (op0) == INTEGER_CST)
15374 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15375
15376 if (TREE_CODE (op1) == INTEGER_CST)
15377 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15378
15379 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15380 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15381 {
15382 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15383 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15384 : TYPE_PRECISION (inner0);
15385
15386 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15387 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15388 : TYPE_PRECISION (inner1);
15389
15390 return precision0 + precision1 < TYPE_PRECISION (type);
15391 }
15392 }
15393 return false;
15394
15395 case BIT_AND_EXPR:
15396 case MAX_EXPR:
15397 return (tree_expr_nonnegative_warnv_p (op0,
15398 strict_overflow_p)
15399 || tree_expr_nonnegative_warnv_p (op1,
15400 strict_overflow_p));
15401
15402 case BIT_IOR_EXPR:
15403 case BIT_XOR_EXPR:
15404 case MIN_EXPR:
15405 case RDIV_EXPR:
15406 case TRUNC_DIV_EXPR:
15407 case CEIL_DIV_EXPR:
15408 case FLOOR_DIV_EXPR:
15409 case ROUND_DIV_EXPR:
15410 return (tree_expr_nonnegative_warnv_p (op0,
15411 strict_overflow_p)
15412 && tree_expr_nonnegative_warnv_p (op1,
15413 strict_overflow_p));
15414
15415 case TRUNC_MOD_EXPR:
15416 case CEIL_MOD_EXPR:
15417 case FLOOR_MOD_EXPR:
15418 case ROUND_MOD_EXPR:
15419 return tree_expr_nonnegative_warnv_p (op0,
15420 strict_overflow_p);
15421 default:
15422 return tree_simple_nonnegative_warnv_p (code, type);
15423 }
15424
15425 /* We don't know sign of `t', so be conservative and return false. */
15426 return false;
15427 }
15428
15429 /* Return true if T is known to be non-negative. If the return
15430 value is based on the assumption that signed overflow is undefined,
15431 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15432 *STRICT_OVERFLOW_P. */
15433
15434 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15435 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15436 {
15437 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15438 return true;
15439
15440 switch (TREE_CODE (t))
15441 {
15442 case INTEGER_CST:
15443 return tree_int_cst_sgn (t) >= 0;
15444
15445 case REAL_CST:
15446 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15447
15448 case FIXED_CST:
15449 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15450
15451 case COND_EXPR:
15452 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15453 strict_overflow_p)
15454 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15455 strict_overflow_p));
15456 default:
15457 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15458 TREE_TYPE (t));
15459 }
15460 /* We don't know sign of `t', so be conservative and return false. */
15461 return false;
15462 }
15463
15464 /* Return true if T is known to be non-negative. If the return
15465 value is based on the assumption that signed overflow is undefined,
15466 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15467 *STRICT_OVERFLOW_P. */
15468
15469 bool
tree_call_nonnegative_warnv_p(tree type,tree fndecl,tree arg0,tree arg1,bool * strict_overflow_p)15470 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15471 tree arg0, tree arg1, bool *strict_overflow_p)
15472 {
15473 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15474 switch (DECL_FUNCTION_CODE (fndecl))
15475 {
15476 CASE_FLT_FN (BUILT_IN_ACOS):
15477 CASE_FLT_FN (BUILT_IN_ACOSH):
15478 CASE_FLT_FN (BUILT_IN_CABS):
15479 CASE_FLT_FN (BUILT_IN_COSH):
15480 CASE_FLT_FN (BUILT_IN_ERFC):
15481 CASE_FLT_FN (BUILT_IN_EXP):
15482 CASE_FLT_FN (BUILT_IN_EXP10):
15483 CASE_FLT_FN (BUILT_IN_EXP2):
15484 CASE_FLT_FN (BUILT_IN_FABS):
15485 CASE_FLT_FN (BUILT_IN_FDIM):
15486 CASE_FLT_FN (BUILT_IN_HYPOT):
15487 CASE_FLT_FN (BUILT_IN_POW10):
15488 CASE_INT_FN (BUILT_IN_FFS):
15489 CASE_INT_FN (BUILT_IN_PARITY):
15490 CASE_INT_FN (BUILT_IN_POPCOUNT):
15491 case BUILT_IN_BSWAP32:
15492 case BUILT_IN_BSWAP64:
15493 /* Always true. */
15494 return true;
15495
15496 CASE_FLT_FN (BUILT_IN_SQRT):
15497 /* sqrt(-0.0) is -0.0. */
15498 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15499 return true;
15500 return tree_expr_nonnegative_warnv_p (arg0,
15501 strict_overflow_p);
15502
15503 CASE_FLT_FN (BUILT_IN_ASINH):
15504 CASE_FLT_FN (BUILT_IN_ATAN):
15505 CASE_FLT_FN (BUILT_IN_ATANH):
15506 CASE_FLT_FN (BUILT_IN_CBRT):
15507 CASE_FLT_FN (BUILT_IN_CEIL):
15508 CASE_FLT_FN (BUILT_IN_ERF):
15509 CASE_FLT_FN (BUILT_IN_EXPM1):
15510 CASE_FLT_FN (BUILT_IN_FLOOR):
15511 CASE_FLT_FN (BUILT_IN_FMOD):
15512 CASE_FLT_FN (BUILT_IN_FREXP):
15513 CASE_FLT_FN (BUILT_IN_ICEIL):
15514 CASE_FLT_FN (BUILT_IN_IFLOOR):
15515 CASE_FLT_FN (BUILT_IN_IRINT):
15516 CASE_FLT_FN (BUILT_IN_IROUND):
15517 CASE_FLT_FN (BUILT_IN_LCEIL):
15518 CASE_FLT_FN (BUILT_IN_LDEXP):
15519 CASE_FLT_FN (BUILT_IN_LFLOOR):
15520 CASE_FLT_FN (BUILT_IN_LLCEIL):
15521 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15522 CASE_FLT_FN (BUILT_IN_LLRINT):
15523 CASE_FLT_FN (BUILT_IN_LLROUND):
15524 CASE_FLT_FN (BUILT_IN_LRINT):
15525 CASE_FLT_FN (BUILT_IN_LROUND):
15526 CASE_FLT_FN (BUILT_IN_MODF):
15527 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15528 CASE_FLT_FN (BUILT_IN_RINT):
15529 CASE_FLT_FN (BUILT_IN_ROUND):
15530 CASE_FLT_FN (BUILT_IN_SCALB):
15531 CASE_FLT_FN (BUILT_IN_SCALBLN):
15532 CASE_FLT_FN (BUILT_IN_SCALBN):
15533 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15534 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15535 CASE_FLT_FN (BUILT_IN_SINH):
15536 CASE_FLT_FN (BUILT_IN_TANH):
15537 CASE_FLT_FN (BUILT_IN_TRUNC):
15538 /* True if the 1st argument is nonnegative. */
15539 return tree_expr_nonnegative_warnv_p (arg0,
15540 strict_overflow_p);
15541
15542 CASE_FLT_FN (BUILT_IN_FMAX):
15543 /* True if the 1st OR 2nd arguments are nonnegative. */
15544 return (tree_expr_nonnegative_warnv_p (arg0,
15545 strict_overflow_p)
15546 || (tree_expr_nonnegative_warnv_p (arg1,
15547 strict_overflow_p)));
15548
15549 CASE_FLT_FN (BUILT_IN_FMIN):
15550 /* True if the 1st AND 2nd arguments are nonnegative. */
15551 return (tree_expr_nonnegative_warnv_p (arg0,
15552 strict_overflow_p)
15553 && (tree_expr_nonnegative_warnv_p (arg1,
15554 strict_overflow_p)));
15555
15556 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15557 /* True if the 2nd argument is nonnegative. */
15558 return tree_expr_nonnegative_warnv_p (arg1,
15559 strict_overflow_p);
15560
15561 CASE_FLT_FN (BUILT_IN_POWI):
15562 /* True if the 1st argument is nonnegative or the second
15563 argument is an even integer. */
15564 if (TREE_CODE (arg1) == INTEGER_CST
15565 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15566 return true;
15567 return tree_expr_nonnegative_warnv_p (arg0,
15568 strict_overflow_p);
15569
15570 CASE_FLT_FN (BUILT_IN_POW):
15571 /* True if the 1st argument is nonnegative or the second
15572 argument is an even integer valued real. */
15573 if (TREE_CODE (arg1) == REAL_CST)
15574 {
15575 REAL_VALUE_TYPE c;
15576 HOST_WIDE_INT n;
15577
15578 c = TREE_REAL_CST (arg1);
15579 n = real_to_integer (&c);
15580 if ((n & 1) == 0)
15581 {
15582 REAL_VALUE_TYPE cint;
15583 real_from_integer (&cint, VOIDmode, n,
15584 n < 0 ? -1 : 0, 0);
15585 if (real_identical (&c, &cint))
15586 return true;
15587 }
15588 }
15589 return tree_expr_nonnegative_warnv_p (arg0,
15590 strict_overflow_p);
15591
15592 default:
15593 break;
15594 }
15595 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15596 type);
15597 }
15598
15599 /* Return true if T is known to be non-negative. If the return
15600 value is based on the assumption that signed overflow is undefined,
15601 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15602 *STRICT_OVERFLOW_P. */
15603
15604 bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15605 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15606 {
15607 enum tree_code code = TREE_CODE (t);
15608 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15609 return true;
15610
15611 switch (code)
15612 {
15613 case TARGET_EXPR:
15614 {
15615 tree temp = TARGET_EXPR_SLOT (t);
15616 t = TARGET_EXPR_INITIAL (t);
15617
15618 /* If the initializer is non-void, then it's a normal expression
15619 that will be assigned to the slot. */
15620 if (!VOID_TYPE_P (t))
15621 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15622
15623 /* Otherwise, the initializer sets the slot in some way. One common
15624 way is an assignment statement at the end of the initializer. */
15625 while (1)
15626 {
15627 if (TREE_CODE (t) == BIND_EXPR)
15628 t = expr_last (BIND_EXPR_BODY (t));
15629 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15630 || TREE_CODE (t) == TRY_CATCH_EXPR)
15631 t = expr_last (TREE_OPERAND (t, 0));
15632 else if (TREE_CODE (t) == STATEMENT_LIST)
15633 t = expr_last (t);
15634 else
15635 break;
15636 }
15637 if (TREE_CODE (t) == MODIFY_EXPR
15638 && TREE_OPERAND (t, 0) == temp)
15639 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15640 strict_overflow_p);
15641
15642 return false;
15643 }
15644
15645 case CALL_EXPR:
15646 {
15647 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15648 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15649
15650 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15651 get_callee_fndecl (t),
15652 arg0,
15653 arg1,
15654 strict_overflow_p);
15655 }
15656 case COMPOUND_EXPR:
15657 case MODIFY_EXPR:
15658 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15659 strict_overflow_p);
15660 case BIND_EXPR:
15661 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15662 strict_overflow_p);
15663 case SAVE_EXPR:
15664 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15665 strict_overflow_p);
15666
15667 default:
15668 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15669 TREE_TYPE (t));
15670 }
15671
15672 /* We don't know sign of `t', so be conservative and return false. */
15673 return false;
15674 }
15675
15676 /* Return true if T is known to be non-negative. If the return
15677 value is based on the assumption that signed overflow is undefined,
15678 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15679 *STRICT_OVERFLOW_P. */
15680
15681 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p)15682 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15683 {
15684 enum tree_code code;
15685 if (t == error_mark_node)
15686 return false;
15687
15688 code = TREE_CODE (t);
15689 switch (TREE_CODE_CLASS (code))
15690 {
15691 case tcc_binary:
15692 case tcc_comparison:
15693 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15694 TREE_TYPE (t),
15695 TREE_OPERAND (t, 0),
15696 TREE_OPERAND (t, 1),
15697 strict_overflow_p);
15698
15699 case tcc_unary:
15700 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15701 TREE_TYPE (t),
15702 TREE_OPERAND (t, 0),
15703 strict_overflow_p);
15704
15705 case tcc_constant:
15706 case tcc_declaration:
15707 case tcc_reference:
15708 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15709
15710 default:
15711 break;
15712 }
15713
15714 switch (code)
15715 {
15716 case TRUTH_AND_EXPR:
15717 case TRUTH_OR_EXPR:
15718 case TRUTH_XOR_EXPR:
15719 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15720 TREE_TYPE (t),
15721 TREE_OPERAND (t, 0),
15722 TREE_OPERAND (t, 1),
15723 strict_overflow_p);
15724 case TRUTH_NOT_EXPR:
15725 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15726 TREE_TYPE (t),
15727 TREE_OPERAND (t, 0),
15728 strict_overflow_p);
15729
15730 case COND_EXPR:
15731 case CONSTRUCTOR:
15732 case OBJ_TYPE_REF:
15733 case ASSERT_EXPR:
15734 case ADDR_EXPR:
15735 case WITH_SIZE_EXPR:
15736 case SSA_NAME:
15737 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15738
15739 default:
15740 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15741 }
15742 }
15743
15744 /* Return true if `t' is known to be non-negative. Handle warnings
15745 about undefined signed overflow. */
15746
15747 bool
tree_expr_nonnegative_p(tree t)15748 tree_expr_nonnegative_p (tree t)
15749 {
15750 bool ret, strict_overflow_p;
15751
15752 strict_overflow_p = false;
15753 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15754 if (strict_overflow_p)
15755 fold_overflow_warning (("assuming signed overflow does not occur when "
15756 "determining that expression is always "
15757 "non-negative"),
15758 WARN_STRICT_OVERFLOW_MISC);
15759 return ret;
15760 }
15761
15762
15763 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15764 For floating point we further ensure that T is not denormal.
15765 Similar logic is present in nonzero_address in rtlanal.h.
15766
15767 If the return value is based on the assumption that signed overflow
15768 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15769 change *STRICT_OVERFLOW_P. */
15770
15771 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)15772 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15773 bool *strict_overflow_p)
15774 {
15775 switch (code)
15776 {
15777 case ABS_EXPR:
15778 return tree_expr_nonzero_warnv_p (op0,
15779 strict_overflow_p);
15780
15781 case NOP_EXPR:
15782 {
15783 tree inner_type = TREE_TYPE (op0);
15784 tree outer_type = type;
15785
15786 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15787 && tree_expr_nonzero_warnv_p (op0,
15788 strict_overflow_p));
15789 }
15790 break;
15791
15792 case NON_LVALUE_EXPR:
15793 return tree_expr_nonzero_warnv_p (op0,
15794 strict_overflow_p);
15795
15796 default:
15797 break;
15798 }
15799
15800 return false;
15801 }
15802
15803 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15804 For floating point we further ensure that T is not denormal.
15805 Similar logic is present in nonzero_address in rtlanal.h.
15806
15807 If the return value is based on the assumption that signed overflow
15808 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15809 change *STRICT_OVERFLOW_P. */
15810
15811 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)15812 tree_binary_nonzero_warnv_p (enum tree_code code,
15813 tree type,
15814 tree op0,
15815 tree op1, bool *strict_overflow_p)
15816 {
15817 bool sub_strict_overflow_p;
15818 switch (code)
15819 {
15820 case POINTER_PLUS_EXPR:
15821 case PLUS_EXPR:
15822 if (TYPE_OVERFLOW_UNDEFINED (type))
15823 {
15824 /* With the presence of negative values it is hard
15825 to say something. */
15826 sub_strict_overflow_p = false;
15827 if (!tree_expr_nonnegative_warnv_p (op0,
15828 &sub_strict_overflow_p)
15829 || !tree_expr_nonnegative_warnv_p (op1,
15830 &sub_strict_overflow_p))
15831 return false;
15832 /* One of operands must be positive and the other non-negative. */
15833 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15834 overflows, on a twos-complement machine the sum of two
15835 nonnegative numbers can never be zero. */
15836 return (tree_expr_nonzero_warnv_p (op0,
15837 strict_overflow_p)
15838 || tree_expr_nonzero_warnv_p (op1,
15839 strict_overflow_p));
15840 }
15841 break;
15842
15843 case MULT_EXPR:
15844 if (TYPE_OVERFLOW_UNDEFINED (type))
15845 {
15846 if (tree_expr_nonzero_warnv_p (op0,
15847 strict_overflow_p)
15848 && tree_expr_nonzero_warnv_p (op1,
15849 strict_overflow_p))
15850 {
15851 *strict_overflow_p = true;
15852 return true;
15853 }
15854 }
15855 break;
15856
15857 case MIN_EXPR:
15858 sub_strict_overflow_p = false;
15859 if (tree_expr_nonzero_warnv_p (op0,
15860 &sub_strict_overflow_p)
15861 && tree_expr_nonzero_warnv_p (op1,
15862 &sub_strict_overflow_p))
15863 {
15864 if (sub_strict_overflow_p)
15865 *strict_overflow_p = true;
15866 }
15867 break;
15868
15869 case MAX_EXPR:
15870 sub_strict_overflow_p = false;
15871 if (tree_expr_nonzero_warnv_p (op0,
15872 &sub_strict_overflow_p))
15873 {
15874 if (sub_strict_overflow_p)
15875 *strict_overflow_p = true;
15876
15877 /* When both operands are nonzero, then MAX must be too. */
15878 if (tree_expr_nonzero_warnv_p (op1,
15879 strict_overflow_p))
15880 return true;
15881
15882 /* MAX where operand 0 is positive is positive. */
15883 return tree_expr_nonnegative_warnv_p (op0,
15884 strict_overflow_p);
15885 }
15886 /* MAX where operand 1 is positive is positive. */
15887 else if (tree_expr_nonzero_warnv_p (op1,
15888 &sub_strict_overflow_p)
15889 && tree_expr_nonnegative_warnv_p (op1,
15890 &sub_strict_overflow_p))
15891 {
15892 if (sub_strict_overflow_p)
15893 *strict_overflow_p = true;
15894 return true;
15895 }
15896 break;
15897
15898 case BIT_IOR_EXPR:
15899 return (tree_expr_nonzero_warnv_p (op1,
15900 strict_overflow_p)
15901 || tree_expr_nonzero_warnv_p (op0,
15902 strict_overflow_p));
15903
15904 default:
15905 break;
15906 }
15907
15908 return false;
15909 }
15910
15911 /* Return true when T is an address and is known to be nonzero.
15912 For floating point we further ensure that T is not denormal.
15913 Similar logic is present in nonzero_address in rtlanal.h.
15914
15915 If the return value is based on the assumption that signed overflow
15916 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15917 change *STRICT_OVERFLOW_P. */
15918
15919 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)15920 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15921 {
15922 bool sub_strict_overflow_p;
15923 switch (TREE_CODE (t))
15924 {
15925 case INTEGER_CST:
15926 return !integer_zerop (t);
15927
15928 case ADDR_EXPR:
15929 {
15930 tree base = TREE_OPERAND (t, 0);
15931 if (!DECL_P (base))
15932 base = get_base_address (base);
15933
15934 if (!base)
15935 return false;
15936
15937 /* Weak declarations may link to NULL. Other things may also be NULL
15938 so protect with -fdelete-null-pointer-checks; but not variables
15939 allocated on the stack. */
15940 if (DECL_P (base)
15941 && (flag_delete_null_pointer_checks
15942 || (DECL_CONTEXT (base)
15943 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15944 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
15945 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15946
15947 /* Constants are never weak. */
15948 if (CONSTANT_CLASS_P (base))
15949 return true;
15950
15951 return false;
15952 }
15953
15954 case COND_EXPR:
15955 sub_strict_overflow_p = false;
15956 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15957 &sub_strict_overflow_p)
15958 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15959 &sub_strict_overflow_p))
15960 {
15961 if (sub_strict_overflow_p)
15962 *strict_overflow_p = true;
15963 return true;
15964 }
15965 break;
15966
15967 default:
15968 break;
15969 }
15970 return false;
15971 }
15972
15973 /* Return true when T is an address and is known to be nonzero.
15974 For floating point we further ensure that T is not denormal.
15975 Similar logic is present in nonzero_address in rtlanal.h.
15976
15977 If the return value is based on the assumption that signed overflow
15978 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15979 change *STRICT_OVERFLOW_P. */
15980
15981 bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)15982 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15983 {
15984 tree type = TREE_TYPE (t);
15985 enum tree_code code;
15986
15987 /* Doing something useful for floating point would need more work. */
15988 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15989 return false;
15990
15991 code = TREE_CODE (t);
15992 switch (TREE_CODE_CLASS (code))
15993 {
15994 case tcc_unary:
15995 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15996 strict_overflow_p);
15997 case tcc_binary:
15998 case tcc_comparison:
15999 return tree_binary_nonzero_warnv_p (code, type,
16000 TREE_OPERAND (t, 0),
16001 TREE_OPERAND (t, 1),
16002 strict_overflow_p);
16003 case tcc_constant:
16004 case tcc_declaration:
16005 case tcc_reference:
16006 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16007
16008 default:
16009 break;
16010 }
16011
16012 switch (code)
16013 {
16014 case TRUTH_NOT_EXPR:
16015 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16016 strict_overflow_p);
16017
16018 case TRUTH_AND_EXPR:
16019 case TRUTH_OR_EXPR:
16020 case TRUTH_XOR_EXPR:
16021 return tree_binary_nonzero_warnv_p (code, type,
16022 TREE_OPERAND (t, 0),
16023 TREE_OPERAND (t, 1),
16024 strict_overflow_p);
16025
16026 case COND_EXPR:
16027 case CONSTRUCTOR:
16028 case OBJ_TYPE_REF:
16029 case ASSERT_EXPR:
16030 case ADDR_EXPR:
16031 case WITH_SIZE_EXPR:
16032 case SSA_NAME:
16033 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16034
16035 case COMPOUND_EXPR:
16036 case MODIFY_EXPR:
16037 case BIND_EXPR:
16038 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16039 strict_overflow_p);
16040
16041 case SAVE_EXPR:
16042 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16043 strict_overflow_p);
16044
16045 case CALL_EXPR:
16046 return alloca_call_p (t);
16047
16048 default:
16049 break;
16050 }
16051 return false;
16052 }
16053
16054 /* Return true when T is an address and is known to be nonzero.
16055 Handle warnings about undefined signed overflow. */
16056
16057 bool
tree_expr_nonzero_p(tree t)16058 tree_expr_nonzero_p (tree t)
16059 {
16060 bool ret, strict_overflow_p;
16061
16062 strict_overflow_p = false;
16063 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16064 if (strict_overflow_p)
16065 fold_overflow_warning (("assuming signed overflow does not occur when "
16066 "determining that expression is always "
16067 "non-zero"),
16068 WARN_STRICT_OVERFLOW_MISC);
16069 return ret;
16070 }
16071
16072 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16073 attempt to fold the expression to a constant without modifying TYPE,
16074 OP0 or OP1.
16075
16076 If the expression could be simplified to a constant, then return
16077 the constant. If the expression would not be simplified to a
16078 constant, then return NULL_TREE. */
16079
16080 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)16081 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16082 {
16083 tree tem = fold_binary (code, type, op0, op1);
16084 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16085 }
16086
16087 /* Given the components of a unary expression CODE, TYPE and OP0,
16088 attempt to fold the expression to a constant without modifying
16089 TYPE or OP0.
16090
16091 If the expression could be simplified to a constant, then return
16092 the constant. If the expression would not be simplified to a
16093 constant, then return NULL_TREE. */
16094
16095 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)16096 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16097 {
16098 tree tem = fold_unary (code, type, op0);
16099 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16100 }
16101
16102 /* If EXP represents referencing an element in a constant string
16103 (either via pointer arithmetic or array indexing), return the
16104 tree representing the value accessed, otherwise return NULL. */
16105
16106 tree
fold_read_from_constant_string(tree exp)16107 fold_read_from_constant_string (tree exp)
16108 {
16109 if ((TREE_CODE (exp) == INDIRECT_REF
16110 || TREE_CODE (exp) == ARRAY_REF)
16111 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16112 {
16113 tree exp1 = TREE_OPERAND (exp, 0);
16114 tree index;
16115 tree string;
16116 location_t loc = EXPR_LOCATION (exp);
16117
16118 if (TREE_CODE (exp) == INDIRECT_REF)
16119 string = string_constant (exp1, &index);
16120 else
16121 {
16122 tree low_bound = array_ref_low_bound (exp);
16123 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16124
16125 /* Optimize the special-case of a zero lower bound.
16126
16127 We convert the low_bound to sizetype to avoid some problems
16128 with constant folding. (E.g. suppose the lower bound is 1,
16129 and its mode is QI. Without the conversion,l (ARRAY
16130 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16131 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16132 if (! integer_zerop (low_bound))
16133 index = size_diffop_loc (loc, index,
16134 fold_convert_loc (loc, sizetype, low_bound));
16135
16136 string = exp1;
16137 }
16138
16139 if (string
16140 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16141 && TREE_CODE (string) == STRING_CST
16142 && TREE_CODE (index) == INTEGER_CST
16143 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16144 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16145 == MODE_INT)
16146 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16147 return build_int_cst_type (TREE_TYPE (exp),
16148 (TREE_STRING_POINTER (string)
16149 [TREE_INT_CST_LOW (index)]));
16150 }
16151 return NULL;
16152 }
16153
16154 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16155 an integer constant, real, or fixed-point constant.
16156
16157 TYPE is the type of the result. */
16158
16159 static tree
fold_negate_const(tree arg0,tree type)16160 fold_negate_const (tree arg0, tree type)
16161 {
16162 tree t = NULL_TREE;
16163
16164 switch (TREE_CODE (arg0))
16165 {
16166 case INTEGER_CST:
16167 {
16168 double_int val = tree_to_double_int (arg0);
16169 bool overflow;
16170 val = val.neg_with_overflow (&overflow);
16171 t = force_fit_type_double (type, val, 1,
16172 (overflow | TREE_OVERFLOW (arg0))
16173 && !TYPE_UNSIGNED (type));
16174 break;
16175 }
16176
16177 case REAL_CST:
16178 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16179 break;
16180
16181 case FIXED_CST:
16182 {
16183 FIXED_VALUE_TYPE f;
16184 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16185 &(TREE_FIXED_CST (arg0)), NULL,
16186 TYPE_SATURATING (type));
16187 t = build_fixed (type, f);
16188 /* Propagate overflow flags. */
16189 if (overflow_p | TREE_OVERFLOW (arg0))
16190 TREE_OVERFLOW (t) = 1;
16191 break;
16192 }
16193
16194 default:
16195 gcc_unreachable ();
16196 }
16197
16198 return t;
16199 }
16200
16201 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16202 an integer constant or real constant.
16203
16204 TYPE is the type of the result. */
16205
16206 tree
fold_abs_const(tree arg0,tree type)16207 fold_abs_const (tree arg0, tree type)
16208 {
16209 tree t = NULL_TREE;
16210
16211 switch (TREE_CODE (arg0))
16212 {
16213 case INTEGER_CST:
16214 {
16215 double_int val = tree_to_double_int (arg0);
16216
16217 /* If the value is unsigned or non-negative, then the absolute value
16218 is the same as the ordinary value. */
16219 if (TYPE_UNSIGNED (type)
16220 || !val.is_negative ())
16221 t = arg0;
16222
16223 /* If the value is negative, then the absolute value is
16224 its negation. */
16225 else
16226 {
16227 bool overflow;
16228 val = val.neg_with_overflow (&overflow);
16229 t = force_fit_type_double (type, val, -1,
16230 overflow | TREE_OVERFLOW (arg0));
16231 }
16232 }
16233 break;
16234
16235 case REAL_CST:
16236 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16237 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16238 else
16239 t = arg0;
16240 break;
16241
16242 default:
16243 gcc_unreachable ();
16244 }
16245
16246 return t;
16247 }
16248
16249 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16250 constant. TYPE is the type of the result. */
16251
16252 static tree
fold_not_const(const_tree arg0,tree type)16253 fold_not_const (const_tree arg0, tree type)
16254 {
16255 double_int val;
16256
16257 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16258
16259 val = ~tree_to_double_int (arg0);
16260 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16261 }
16262
16263 /* Given CODE, a relational operator, the target type, TYPE and two
16264 constant operands OP0 and OP1, return the result of the
16265 relational operation. If the result is not a compile time
16266 constant, then return NULL_TREE. */
16267
16268 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)16269 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16270 {
16271 int result, invert;
16272
16273 /* From here on, the only cases we handle are when the result is
16274 known to be a constant. */
16275
16276 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16277 {
16278 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16279 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16280
16281 /* Handle the cases where either operand is a NaN. */
16282 if (real_isnan (c0) || real_isnan (c1))
16283 {
16284 switch (code)
16285 {
16286 case EQ_EXPR:
16287 case ORDERED_EXPR:
16288 result = 0;
16289 break;
16290
16291 case NE_EXPR:
16292 case UNORDERED_EXPR:
16293 case UNLT_EXPR:
16294 case UNLE_EXPR:
16295 case UNGT_EXPR:
16296 case UNGE_EXPR:
16297 case UNEQ_EXPR:
16298 result = 1;
16299 break;
16300
16301 case LT_EXPR:
16302 case LE_EXPR:
16303 case GT_EXPR:
16304 case GE_EXPR:
16305 case LTGT_EXPR:
16306 if (flag_trapping_math)
16307 return NULL_TREE;
16308 result = 0;
16309 break;
16310
16311 default:
16312 gcc_unreachable ();
16313 }
16314
16315 return constant_boolean_node (result, type);
16316 }
16317
16318 return constant_boolean_node (real_compare (code, c0, c1), type);
16319 }
16320
16321 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16322 {
16323 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16324 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16325 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16326 }
16327
16328 /* Handle equality/inequality of complex constants. */
16329 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16330 {
16331 tree rcond = fold_relational_const (code, type,
16332 TREE_REALPART (op0),
16333 TREE_REALPART (op1));
16334 tree icond = fold_relational_const (code, type,
16335 TREE_IMAGPART (op0),
16336 TREE_IMAGPART (op1));
16337 if (code == EQ_EXPR)
16338 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16339 else if (code == NE_EXPR)
16340 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16341 else
16342 return NULL_TREE;
16343 }
16344
16345 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16346 {
16347 unsigned count = VECTOR_CST_NELTS (op0);
16348 tree *elts = XALLOCAVEC (tree, count);
16349 gcc_assert (VECTOR_CST_NELTS (op1) == count
16350 && TYPE_VECTOR_SUBPARTS (type) == count);
16351
16352 for (unsigned i = 0; i < count; i++)
16353 {
16354 tree elem_type = TREE_TYPE (type);
16355 tree elem0 = VECTOR_CST_ELT (op0, i);
16356 tree elem1 = VECTOR_CST_ELT (op1, i);
16357
16358 tree tem = fold_relational_const (code, elem_type,
16359 elem0, elem1);
16360
16361 if (tem == NULL_TREE)
16362 return NULL_TREE;
16363
16364 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16365 }
16366
16367 return build_vector (type, elts);
16368 }
16369
16370 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16371
16372 To compute GT, swap the arguments and do LT.
16373 To compute GE, do LT and invert the result.
16374 To compute LE, swap the arguments, do LT and invert the result.
16375 To compute NE, do EQ and invert the result.
16376
16377 Therefore, the code below must handle only EQ and LT. */
16378
16379 if (code == LE_EXPR || code == GT_EXPR)
16380 {
16381 tree tem = op0;
16382 op0 = op1;
16383 op1 = tem;
16384 code = swap_tree_comparison (code);
16385 }
16386
16387 /* Note that it is safe to invert for real values here because we
16388 have already handled the one case that it matters. */
16389
16390 invert = 0;
16391 if (code == NE_EXPR || code == GE_EXPR)
16392 {
16393 invert = 1;
16394 code = invert_tree_comparison (code, false);
16395 }
16396
16397 /* Compute a result for LT or EQ if args permit;
16398 Otherwise return T. */
16399 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16400 {
16401 if (code == EQ_EXPR)
16402 result = tree_int_cst_equal (op0, op1);
16403 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16404 result = INT_CST_LT_UNSIGNED (op0, op1);
16405 else
16406 result = INT_CST_LT (op0, op1);
16407 }
16408 else
16409 return NULL_TREE;
16410
16411 if (invert)
16412 result ^= 1;
16413 return constant_boolean_node (result, type);
16414 }
16415
16416 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16417 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16418 itself. */
16419
16420 tree
fold_build_cleanup_point_expr(tree type,tree expr)16421 fold_build_cleanup_point_expr (tree type, tree expr)
16422 {
16423 /* If the expression does not have side effects then we don't have to wrap
16424 it with a cleanup point expression. */
16425 if (!TREE_SIDE_EFFECTS (expr))
16426 return expr;
16427
16428 /* If the expression is a return, check to see if the expression inside the
16429 return has no side effects or the right hand side of the modify expression
16430 inside the return. If either don't have side effects set we don't need to
16431 wrap the expression in a cleanup point expression. Note we don't check the
16432 left hand side of the modify because it should always be a return decl. */
16433 if (TREE_CODE (expr) == RETURN_EXPR)
16434 {
16435 tree op = TREE_OPERAND (expr, 0);
16436 if (!op || !TREE_SIDE_EFFECTS (op))
16437 return expr;
16438 op = TREE_OPERAND (op, 1);
16439 if (!TREE_SIDE_EFFECTS (op))
16440 return expr;
16441 }
16442
16443 return build1 (CLEANUP_POINT_EXPR, type, expr);
16444 }
16445
16446 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16447 of an indirection through OP0, or NULL_TREE if no simplification is
16448 possible. */
16449
16450 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)16451 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16452 {
16453 tree sub = op0;
16454 tree subtype;
16455
16456 STRIP_NOPS (sub);
16457 subtype = TREE_TYPE (sub);
16458 if (!POINTER_TYPE_P (subtype))
16459 return NULL_TREE;
16460
16461 if (TREE_CODE (sub) == ADDR_EXPR)
16462 {
16463 tree op = TREE_OPERAND (sub, 0);
16464 tree optype = TREE_TYPE (op);
16465 /* *&CONST_DECL -> to the value of the const decl. */
16466 if (TREE_CODE (op) == CONST_DECL)
16467 return DECL_INITIAL (op);
16468 /* *&p => p; make sure to handle *&"str"[cst] here. */
16469 if (type == optype)
16470 {
16471 tree fop = fold_read_from_constant_string (op);
16472 if (fop)
16473 return fop;
16474 else
16475 return op;
16476 }
16477 /* *(foo *)&fooarray => fooarray[0] */
16478 else if (TREE_CODE (optype) == ARRAY_TYPE
16479 && type == TREE_TYPE (optype)
16480 && (!in_gimple_form
16481 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16482 {
16483 tree type_domain = TYPE_DOMAIN (optype);
16484 tree min_val = size_zero_node;
16485 if (type_domain && TYPE_MIN_VALUE (type_domain))
16486 min_val = TYPE_MIN_VALUE (type_domain);
16487 if (in_gimple_form
16488 && TREE_CODE (min_val) != INTEGER_CST)
16489 return NULL_TREE;
16490 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16491 NULL_TREE, NULL_TREE);
16492 }
16493 /* *(foo *)&complexfoo => __real__ complexfoo */
16494 else if (TREE_CODE (optype) == COMPLEX_TYPE
16495 && type == TREE_TYPE (optype))
16496 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16497 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16498 else if (TREE_CODE (optype) == VECTOR_TYPE
16499 && type == TREE_TYPE (optype))
16500 {
16501 tree part_width = TYPE_SIZE (type);
16502 tree index = bitsize_int (0);
16503 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16504 }
16505 }
16506
16507 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16508 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16509 {
16510 tree op00 = TREE_OPERAND (sub, 0);
16511 tree op01 = TREE_OPERAND (sub, 1);
16512
16513 STRIP_NOPS (op00);
16514 if (TREE_CODE (op00) == ADDR_EXPR)
16515 {
16516 tree op00type;
16517 op00 = TREE_OPERAND (op00, 0);
16518 op00type = TREE_TYPE (op00);
16519
16520 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16521 if (TREE_CODE (op00type) == VECTOR_TYPE
16522 && type == TREE_TYPE (op00type))
16523 {
16524 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16525 tree part_width = TYPE_SIZE (type);
16526 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16527 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16528 tree index = bitsize_int (indexi);
16529
16530 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16531 return fold_build3_loc (loc,
16532 BIT_FIELD_REF, type, op00,
16533 part_width, index);
16534
16535 }
16536 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16537 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16538 && type == TREE_TYPE (op00type))
16539 {
16540 tree size = TYPE_SIZE_UNIT (type);
16541 if (tree_int_cst_equal (size, op01))
16542 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16543 }
16544 /* ((foo *)&fooarray)[1] => fooarray[1] */
16545 else if (TREE_CODE (op00type) == ARRAY_TYPE
16546 && type == TREE_TYPE (op00type))
16547 {
16548 tree type_domain = TYPE_DOMAIN (op00type);
16549 tree min_val = size_zero_node;
16550 if (type_domain && TYPE_MIN_VALUE (type_domain))
16551 min_val = TYPE_MIN_VALUE (type_domain);
16552 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16553 TYPE_SIZE_UNIT (type));
16554 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16555 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16556 NULL_TREE, NULL_TREE);
16557 }
16558 }
16559 }
16560
16561 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16562 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16563 && type == TREE_TYPE (TREE_TYPE (subtype))
16564 && (!in_gimple_form
16565 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16566 {
16567 tree type_domain;
16568 tree min_val = size_zero_node;
16569 sub = build_fold_indirect_ref_loc (loc, sub);
16570 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16571 if (type_domain && TYPE_MIN_VALUE (type_domain))
16572 min_val = TYPE_MIN_VALUE (type_domain);
16573 if (in_gimple_form
16574 && TREE_CODE (min_val) != INTEGER_CST)
16575 return NULL_TREE;
16576 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16577 NULL_TREE);
16578 }
16579
16580 return NULL_TREE;
16581 }
16582
16583 /* Builds an expression for an indirection through T, simplifying some
16584 cases. */
16585
16586 tree
build_fold_indirect_ref_loc(location_t loc,tree t)16587 build_fold_indirect_ref_loc (location_t loc, tree t)
16588 {
16589 tree type = TREE_TYPE (TREE_TYPE (t));
16590 tree sub = fold_indirect_ref_1 (loc, type, t);
16591
16592 if (sub)
16593 return sub;
16594
16595 return build1_loc (loc, INDIRECT_REF, type, t);
16596 }
16597
16598 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16599
16600 tree
fold_indirect_ref_loc(location_t loc,tree t)16601 fold_indirect_ref_loc (location_t loc, tree t)
16602 {
16603 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16604
16605 if (sub)
16606 return sub;
16607 else
16608 return t;
16609 }
16610
16611 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16612 whose result is ignored. The type of the returned tree need not be
16613 the same as the original expression. */
16614
16615 tree
fold_ignored_result(tree t)16616 fold_ignored_result (tree t)
16617 {
16618 if (!TREE_SIDE_EFFECTS (t))
16619 return integer_zero_node;
16620
16621 for (;;)
16622 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16623 {
16624 case tcc_unary:
16625 t = TREE_OPERAND (t, 0);
16626 break;
16627
16628 case tcc_binary:
16629 case tcc_comparison:
16630 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16631 t = TREE_OPERAND (t, 0);
16632 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16633 t = TREE_OPERAND (t, 1);
16634 else
16635 return t;
16636 break;
16637
16638 case tcc_expression:
16639 switch (TREE_CODE (t))
16640 {
16641 case COMPOUND_EXPR:
16642 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16643 return t;
16644 t = TREE_OPERAND (t, 0);
16645 break;
16646
16647 case COND_EXPR:
16648 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16649 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16650 return t;
16651 t = TREE_OPERAND (t, 0);
16652 break;
16653
16654 default:
16655 return t;
16656 }
16657 break;
16658
16659 default:
16660 return t;
16661 }
16662 }
16663
16664 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16665 This can only be applied to objects of a sizetype. */
16666
16667 tree
round_up_loc(location_t loc,tree value,int divisor)16668 round_up_loc (location_t loc, tree value, int divisor)
16669 {
16670 tree div = NULL_TREE;
16671
16672 gcc_assert (divisor > 0);
16673 if (divisor == 1)
16674 return value;
16675
16676 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16677 have to do anything. Only do this when we are not given a const,
16678 because in that case, this check is more expensive than just
16679 doing it. */
16680 if (TREE_CODE (value) != INTEGER_CST)
16681 {
16682 div = build_int_cst (TREE_TYPE (value), divisor);
16683
16684 if (multiple_of_p (TREE_TYPE (value), value, div))
16685 return value;
16686 }
16687
16688 /* If divisor is a power of two, simplify this to bit manipulation. */
16689 if (divisor == (divisor & -divisor))
16690 {
16691 if (TREE_CODE (value) == INTEGER_CST)
16692 {
16693 double_int val = tree_to_double_int (value);
16694 bool overflow_p;
16695
16696 if ((val.low & (divisor - 1)) == 0)
16697 return value;
16698
16699 overflow_p = TREE_OVERFLOW (value);
16700 val.low &= ~(divisor - 1);
16701 val.low += divisor;
16702 if (val.low == 0)
16703 {
16704 val.high++;
16705 if (val.high == 0)
16706 overflow_p = true;
16707 }
16708
16709 return force_fit_type_double (TREE_TYPE (value), val,
16710 -1, overflow_p);
16711 }
16712 else
16713 {
16714 tree t;
16715
16716 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16717 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16718 t = build_int_cst (TREE_TYPE (value), -divisor);
16719 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16720 }
16721 }
16722 else
16723 {
16724 if (!div)
16725 div = build_int_cst (TREE_TYPE (value), divisor);
16726 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16727 value = size_binop_loc (loc, MULT_EXPR, value, div);
16728 }
16729
16730 return value;
16731 }
16732
16733 /* Likewise, but round down. */
16734
16735 tree
round_down_loc(location_t loc,tree value,int divisor)16736 round_down_loc (location_t loc, tree value, int divisor)
16737 {
16738 tree div = NULL_TREE;
16739
16740 gcc_assert (divisor > 0);
16741 if (divisor == 1)
16742 return value;
16743
16744 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16745 have to do anything. Only do this when we are not given a const,
16746 because in that case, this check is more expensive than just
16747 doing it. */
16748 if (TREE_CODE (value) != INTEGER_CST)
16749 {
16750 div = build_int_cst (TREE_TYPE (value), divisor);
16751
16752 if (multiple_of_p (TREE_TYPE (value), value, div))
16753 return value;
16754 }
16755
16756 /* If divisor is a power of two, simplify this to bit manipulation. */
16757 if (divisor == (divisor & -divisor))
16758 {
16759 tree t;
16760
16761 t = build_int_cst (TREE_TYPE (value), -divisor);
16762 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16763 }
16764 else
16765 {
16766 if (!div)
16767 div = build_int_cst (TREE_TYPE (value), divisor);
16768 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16769 value = size_binop_loc (loc, MULT_EXPR, value, div);
16770 }
16771
16772 return value;
16773 }
16774
16775 /* Returns the pointer to the base of the object addressed by EXP and
16776 extracts the information about the offset of the access, storing it
16777 to PBITPOS and POFFSET. */
16778
16779 static tree
split_address_to_core_and_offset(tree exp,HOST_WIDE_INT * pbitpos,tree * poffset)16780 split_address_to_core_and_offset (tree exp,
16781 HOST_WIDE_INT *pbitpos, tree *poffset)
16782 {
16783 tree core;
16784 enum machine_mode mode;
16785 int unsignedp, volatilep;
16786 HOST_WIDE_INT bitsize;
16787 location_t loc = EXPR_LOCATION (exp);
16788
16789 if (TREE_CODE (exp) == ADDR_EXPR)
16790 {
16791 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16792 poffset, &mode, &unsignedp, &volatilep,
16793 false);
16794 core = build_fold_addr_expr_loc (loc, core);
16795 }
16796 else
16797 {
16798 core = exp;
16799 *pbitpos = 0;
16800 *poffset = NULL_TREE;
16801 }
16802
16803 return core;
16804 }
16805
16806 /* Returns true if addresses of E1 and E2 differ by a constant, false
16807 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16808
16809 bool
ptr_difference_const(tree e1,tree e2,HOST_WIDE_INT * diff)16810 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16811 {
16812 tree core1, core2;
16813 HOST_WIDE_INT bitpos1, bitpos2;
16814 tree toffset1, toffset2, tdiff, type;
16815
16816 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16817 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16818
16819 if (bitpos1 % BITS_PER_UNIT != 0
16820 || bitpos2 % BITS_PER_UNIT != 0
16821 || !operand_equal_p (core1, core2, 0))
16822 return false;
16823
16824 if (toffset1 && toffset2)
16825 {
16826 type = TREE_TYPE (toffset1);
16827 if (type != TREE_TYPE (toffset2))
16828 toffset2 = fold_convert (type, toffset2);
16829
16830 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16831 if (!cst_and_fits_in_hwi (tdiff))
16832 return false;
16833
16834 *diff = int_cst_value (tdiff);
16835 }
16836 else if (toffset1 || toffset2)
16837 {
16838 /* If only one of the offsets is non-constant, the difference cannot
16839 be a constant. */
16840 return false;
16841 }
16842 else
16843 *diff = 0;
16844
16845 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16846 return true;
16847 }
16848
16849 /* Simplify the floating point expression EXP when the sign of the
16850 result is not significant. Return NULL_TREE if no simplification
16851 is possible. */
16852
16853 tree
fold_strip_sign_ops(tree exp)16854 fold_strip_sign_ops (tree exp)
16855 {
16856 tree arg0, arg1;
16857 location_t loc = EXPR_LOCATION (exp);
16858
16859 switch (TREE_CODE (exp))
16860 {
16861 case ABS_EXPR:
16862 case NEGATE_EXPR:
16863 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16864 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16865
16866 case MULT_EXPR:
16867 case RDIV_EXPR:
16868 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16869 return NULL_TREE;
16870 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16871 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16872 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16873 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16874 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16875 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16876 break;
16877
16878 case COMPOUND_EXPR:
16879 arg0 = TREE_OPERAND (exp, 0);
16880 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16881 if (arg1)
16882 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16883 break;
16884
16885 case COND_EXPR:
16886 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16887 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16888 if (arg0 || arg1)
16889 return fold_build3_loc (loc,
16890 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16891 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16892 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16893 break;
16894
16895 case CALL_EXPR:
16896 {
16897 const enum built_in_function fcode = builtin_mathfn_code (exp);
16898 switch (fcode)
16899 {
16900 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16901 /* Strip copysign function call, return the 1st argument. */
16902 arg0 = CALL_EXPR_ARG (exp, 0);
16903 arg1 = CALL_EXPR_ARG (exp, 1);
16904 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16905
16906 default:
16907 /* Strip sign ops from the argument of "odd" math functions. */
16908 if (negate_mathfn_p (fcode))
16909 {
16910 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16911 if (arg0)
16912 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16913 }
16914 break;
16915 }
16916 }
16917 break;
16918
16919 default:
16920 break;
16921 }
16922 return NULL_TREE;
16923 }
16924