1 /* Fold a constant sub-tree into a single node for C-compiler 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 4 2012 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 /*@@ This file should be rewritten to use an arbitrary precision 23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst". 24 @@ Perhaps the routines could also be used for bc/dc, and made a lib. 25 @@ The routines that translate from the ap rep should 26 @@ warn if precision et. al. is lost. 27 @@ This would also make life easier when this technology is used 28 @@ for cross-compilers. */ 29 30 /* The entry points in this file are fold, size_int_wide and size_binop. 31 32 fold takes a tree as argument and returns a simplified tree. 33 34 size_binop takes a tree code for an arithmetic operation 35 and two operands that are trees, and produces a tree for the 36 result, assuming the type comes from `sizetype'. 37 38 size_int takes an integer value, and creates a tree constant 39 with type from `sizetype'. 40 41 Note: Since the folders get called on non-gimple code as well as 42 gimple code, we need to handle GIMPLE tuples as well as their 43 corresponding tree equivalents. */ 44 45 #include "config.h" 46 #include "system.h" 47 #include "coretypes.h" 48 #include "tm.h" 49 #include "flags.h" 50 #include "tree.h" 51 #include "realmpfr.h" 52 #include "rtl.h" 53 #include "expr.h" 54 #include "tm_p.h" 55 #include "target.h" 56 #include "diagnostic-core.h" 57 #include "intl.h" 58 #include "ggc.h" 59 #include "hashtab.h" 60 #include "langhooks.h" 61 #include "md5.h" 62 #include "gimple.h" 63 #include "tree-flow.h" 64 65 /* Nonzero if we are folding constants inside an initializer; zero 66 otherwise. */ 67 int folding_initializer = 0; 68 69 /* The following constants represent a bit based encoding of GCC's 70 comparison operators. This encoding simplifies transformations 71 on relational comparison operators, such as AND and OR. */ 72 enum comparison_code { 73 COMPCODE_FALSE = 0, 74 COMPCODE_LT = 1, 75 COMPCODE_EQ = 2, 76 COMPCODE_LE = 3, 77 COMPCODE_GT = 4, 78 COMPCODE_LTGT = 5, 79 COMPCODE_GE = 6, 80 COMPCODE_ORD = 7, 81 COMPCODE_UNORD = 8, 82 COMPCODE_UNLT = 9, 83 COMPCODE_UNEQ = 10, 84 COMPCODE_UNLE = 11, 85 COMPCODE_UNGT = 12, 86 COMPCODE_NE = 13, 87 COMPCODE_UNGE = 14, 88 COMPCODE_TRUE = 15 89 }; 90 91 static bool negate_mathfn_p (enum built_in_function); 92 static bool negate_expr_p (tree); 93 static tree negate_expr (tree); 94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); 95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree); 96 static tree const_binop (enum tree_code, tree, tree); 97 static enum comparison_code comparison_to_compcode (enum tree_code); 98 static enum tree_code compcode_to_comparison (enum comparison_code); 99 static int operand_equal_for_comparison_p (tree, tree, tree); 100 static int twoval_comparison_p (tree, tree *, tree *, int *); 101 static tree eval_subst (location_t, tree, tree, tree, tree, tree); 102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree); 103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree); 104 static tree make_bit_field_ref (location_t, tree, tree, 105 HOST_WIDE_INT, HOST_WIDE_INT, int); 106 static tree optimize_bit_field_compare (location_t, enum tree_code, 107 tree, tree, tree); 108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, 109 HOST_WIDE_INT *, 110 enum machine_mode *, int *, int *, 111 tree *, tree *); 112 static int all_ones_mask_p (const_tree, int); 113 static tree sign_bit_p (tree, const_tree); 114 static int simple_operand_p (const_tree); 115 static bool simple_operand_p_2 (tree); 116 static tree range_binop (enum tree_code, tree, tree, int, tree, int); 117 static tree range_predecessor (tree); 118 static tree range_successor (tree); 119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); 120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); 121 static tree unextend (tree, int, int, tree); 122 static tree optimize_minmax_comparison (location_t, enum tree_code, 123 tree, tree, tree); 124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); 125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); 126 static tree fold_binary_op_with_conditional_arg (location_t, 127 enum tree_code, tree, 128 tree, tree, 129 tree, tree, int); 130 static tree fold_mathfn_compare (location_t, 131 enum built_in_function, enum tree_code, 132 tree, tree, tree); 133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); 134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); 135 static bool reorder_operands_p (const_tree, const_tree); 136 static tree fold_negate_const (tree, tree); 137 static tree fold_not_const (const_tree, tree); 138 static tree fold_relational_const (enum tree_code, tree, tree, tree); 139 static tree fold_convert_const (enum tree_code, tree, tree); 140 141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION. 142 Otherwise, return LOC. */ 143 144 static location_t 145 expr_location_or (tree t, location_t loc) 146 { 147 location_t tloc = EXPR_LOCATION (t); 148 return tloc != UNKNOWN_LOCATION ? tloc : loc; 149 } 150 151 /* Similar to protected_set_expr_location, but never modify x in place, 152 if location can and needs to be set, unshare it. */ 153 154 static inline tree 155 protected_set_expr_location_unshare (tree x, location_t loc) 156 { 157 if (CAN_HAVE_LOCATION_P (x) 158 && EXPR_LOCATION (x) != loc 159 && !(TREE_CODE (x) == SAVE_EXPR 160 || TREE_CODE (x) == TARGET_EXPR 161 || TREE_CODE (x) == BIND_EXPR)) 162 { 163 x = copy_node (x); 164 SET_EXPR_LOCATION (x, loc); 165 } 166 return x; 167 } 168 169 170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring 171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1, 172 and SUM1. Then this yields nonzero if overflow occurred during the 173 addition. 174 175 Overflow occurs if A and B have the same sign, but A and SUM differ in 176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the 177 sign. */ 178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) 179 180 /* If ARG2 divides ARG1 with zero remainder, carries out the division 181 of type CODE and returns the quotient. 182 Otherwise returns NULL_TREE. */ 183 184 tree 185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) 186 { 187 double_int quo, rem; 188 int uns; 189 190 /* The sign of the division is according to operand two, that 191 does the correct thing for POINTER_PLUS_EXPR where we want 192 a signed division. */ 193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); 194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE 195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) 196 uns = false; 197 198 quo = double_int_divmod (tree_to_double_int (arg1), 199 tree_to_double_int (arg2), 200 uns, code, &rem); 201 202 if (double_int_zero_p (rem)) 203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high); 204 205 return NULL_TREE; 206 } 207 208 /* This is nonzero if we should defer warnings about undefined 209 overflow. This facility exists because these warnings are a 210 special case. The code to estimate loop iterations does not want 211 to issue any warnings, since it works with expressions which do not 212 occur in user code. Various bits of cleanup code call fold(), but 213 only use the result if it has certain characteristics (e.g., is a 214 constant); that code only wants to issue a warning if the result is 215 used. */ 216 217 static int fold_deferring_overflow_warnings; 218 219 /* If a warning about undefined overflow is deferred, this is the 220 warning. Note that this may cause us to turn two warnings into 221 one, but that is fine since it is sufficient to only give one 222 warning per expression. */ 223 224 static const char* fold_deferred_overflow_warning; 225 226 /* If a warning about undefined overflow is deferred, this is the 227 level at which the warning should be emitted. */ 228 229 static enum warn_strict_overflow_code fold_deferred_overflow_code; 230 231 /* Start deferring overflow warnings. We could use a stack here to 232 permit nested calls, but at present it is not necessary. */ 233 234 void 235 fold_defer_overflow_warnings (void) 236 { 237 ++fold_deferring_overflow_warnings; 238 } 239 240 /* Stop deferring overflow warnings. If there is a pending warning, 241 and ISSUE is true, then issue the warning if appropriate. STMT is 242 the statement with which the warning should be associated (used for 243 location information); STMT may be NULL. CODE is the level of the 244 warning--a warn_strict_overflow_code value. This function will use 245 the smaller of CODE and the deferred code when deciding whether to 246 issue the warning. CODE may be zero to mean to always use the 247 deferred code. */ 248 249 void 250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code) 251 { 252 const char *warnmsg; 253 location_t locus; 254 255 gcc_assert (fold_deferring_overflow_warnings > 0); 256 --fold_deferring_overflow_warnings; 257 if (fold_deferring_overflow_warnings > 0) 258 { 259 if (fold_deferred_overflow_warning != NULL 260 && code != 0 261 && code < (int) fold_deferred_overflow_code) 262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code; 263 return; 264 } 265 266 warnmsg = fold_deferred_overflow_warning; 267 fold_deferred_overflow_warning = NULL; 268 269 if (!issue || warnmsg == NULL) 270 return; 271 272 if (gimple_no_warning_p (stmt)) 273 return; 274 275 /* Use the smallest code level when deciding to issue the 276 warning. */ 277 if (code == 0 || code > (int) fold_deferred_overflow_code) 278 code = fold_deferred_overflow_code; 279 280 if (!issue_strict_overflow_warning (code)) 281 return; 282 283 if (stmt == NULL) 284 locus = input_location; 285 else 286 locus = gimple_location (stmt); 287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg); 288 } 289 290 /* Stop deferring overflow warnings, ignoring any deferred 291 warnings. */ 292 293 void 294 fold_undefer_and_ignore_overflow_warnings (void) 295 { 296 fold_undefer_overflow_warnings (false, NULL, 0); 297 } 298 299 /* Whether we are deferring overflow warnings. */ 300 301 bool 302 fold_deferring_overflow_warnings_p (void) 303 { 304 return fold_deferring_overflow_warnings > 0; 305 } 306 307 /* This is called when we fold something based on the fact that signed 308 overflow is undefined. */ 309 310 static void 311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) 312 { 313 if (fold_deferring_overflow_warnings > 0) 314 { 315 if (fold_deferred_overflow_warning == NULL 316 || wc < fold_deferred_overflow_code) 317 { 318 fold_deferred_overflow_warning = gmsgid; 319 fold_deferred_overflow_code = wc; 320 } 321 } 322 else if (issue_strict_overflow_warning (wc)) 323 warning (OPT_Wstrict_overflow, gmsgid); 324 } 325 326 /* Return true if the built-in mathematical function specified by CODE 327 is odd, i.e. -f(x) == f(-x). */ 328 329 static bool 330 negate_mathfn_p (enum built_in_function code) 331 { 332 switch (code) 333 { 334 CASE_FLT_FN (BUILT_IN_ASIN): 335 CASE_FLT_FN (BUILT_IN_ASINH): 336 CASE_FLT_FN (BUILT_IN_ATAN): 337 CASE_FLT_FN (BUILT_IN_ATANH): 338 CASE_FLT_FN (BUILT_IN_CASIN): 339 CASE_FLT_FN (BUILT_IN_CASINH): 340 CASE_FLT_FN (BUILT_IN_CATAN): 341 CASE_FLT_FN (BUILT_IN_CATANH): 342 CASE_FLT_FN (BUILT_IN_CBRT): 343 CASE_FLT_FN (BUILT_IN_CPROJ): 344 CASE_FLT_FN (BUILT_IN_CSIN): 345 CASE_FLT_FN (BUILT_IN_CSINH): 346 CASE_FLT_FN (BUILT_IN_CTAN): 347 CASE_FLT_FN (BUILT_IN_CTANH): 348 CASE_FLT_FN (BUILT_IN_ERF): 349 CASE_FLT_FN (BUILT_IN_LLROUND): 350 CASE_FLT_FN (BUILT_IN_LROUND): 351 CASE_FLT_FN (BUILT_IN_ROUND): 352 CASE_FLT_FN (BUILT_IN_SIN): 353 CASE_FLT_FN (BUILT_IN_SINH): 354 CASE_FLT_FN (BUILT_IN_TAN): 355 CASE_FLT_FN (BUILT_IN_TANH): 356 CASE_FLT_FN (BUILT_IN_TRUNC): 357 return true; 358 359 CASE_FLT_FN (BUILT_IN_LLRINT): 360 CASE_FLT_FN (BUILT_IN_LRINT): 361 CASE_FLT_FN (BUILT_IN_NEARBYINT): 362 CASE_FLT_FN (BUILT_IN_RINT): 363 return !flag_rounding_math; 364 365 default: 366 break; 367 } 368 return false; 369 } 370 371 /* Check whether we may negate an integer constant T without causing 372 overflow. */ 373 374 bool 375 may_negate_without_overflow_p (const_tree t) 376 { 377 unsigned HOST_WIDE_INT val; 378 unsigned int prec; 379 tree type; 380 381 gcc_assert (TREE_CODE (t) == INTEGER_CST); 382 383 type = TREE_TYPE (t); 384 if (TYPE_UNSIGNED (type)) 385 return false; 386 387 prec = TYPE_PRECISION (type); 388 if (prec > HOST_BITS_PER_WIDE_INT) 389 { 390 if (TREE_INT_CST_LOW (t) != 0) 391 return true; 392 prec -= HOST_BITS_PER_WIDE_INT; 393 val = TREE_INT_CST_HIGH (t); 394 } 395 else 396 val = TREE_INT_CST_LOW (t); 397 if (prec < HOST_BITS_PER_WIDE_INT) 398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1; 399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1)); 400 } 401 402 /* Determine whether an expression T can be cheaply negated using 403 the function negate_expr without introducing undefined overflow. */ 404 405 static bool 406 negate_expr_p (tree t) 407 { 408 tree type; 409 410 if (t == 0) 411 return false; 412 413 type = TREE_TYPE (t); 414 415 STRIP_SIGN_NOPS (t); 416 switch (TREE_CODE (t)) 417 { 418 case INTEGER_CST: 419 if (TYPE_OVERFLOW_WRAPS (type)) 420 return true; 421 422 /* Check that -CST will not overflow type. */ 423 return may_negate_without_overflow_p (t); 424 case BIT_NOT_EXPR: 425 return (INTEGRAL_TYPE_P (type) 426 && TYPE_OVERFLOW_WRAPS (type)); 427 428 case FIXED_CST: 429 case NEGATE_EXPR: 430 return true; 431 432 case REAL_CST: 433 /* We want to canonicalize to positive real constants. Pretend 434 that only negative ones can be easily negated. */ 435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 436 437 case COMPLEX_CST: 438 return negate_expr_p (TREE_REALPART (t)) 439 && negate_expr_p (TREE_IMAGPART (t)); 440 441 case COMPLEX_EXPR: 442 return negate_expr_p (TREE_OPERAND (t, 0)) 443 && negate_expr_p (TREE_OPERAND (t, 1)); 444 445 case CONJ_EXPR: 446 return negate_expr_p (TREE_OPERAND (t, 0)); 447 448 case PLUS_EXPR: 449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 451 return false; 452 /* -(A + B) -> (-B) - A. */ 453 if (negate_expr_p (TREE_OPERAND (t, 1)) 454 && reorder_operands_p (TREE_OPERAND (t, 0), 455 TREE_OPERAND (t, 1))) 456 return true; 457 /* -(A + B) -> (-A) - B. */ 458 return negate_expr_p (TREE_OPERAND (t, 0)); 459 460 case MINUS_EXPR: 461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */ 462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 464 && reorder_operands_p (TREE_OPERAND (t, 0), 465 TREE_OPERAND (t, 1)); 466 467 case MULT_EXPR: 468 if (TYPE_UNSIGNED (TREE_TYPE (t))) 469 break; 470 471 /* Fall through. */ 472 473 case RDIV_EXPR: 474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t)))) 475 return negate_expr_p (TREE_OPERAND (t, 1)) 476 || negate_expr_p (TREE_OPERAND (t, 0)); 477 break; 478 479 case TRUNC_DIV_EXPR: 480 case ROUND_DIV_EXPR: 481 case FLOOR_DIV_EXPR: 482 case CEIL_DIV_EXPR: 483 case EXACT_DIV_EXPR: 484 /* In general we can't negate A / B, because if A is INT_MIN and 485 B is 1, we may turn this into INT_MIN / -1 which is undefined 486 and actually traps on some architectures. But if overflow is 487 undefined, we can negate, because - (INT_MIN / 1) is an 488 overflow. */ 489 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) 491 break; 492 return negate_expr_p (TREE_OPERAND (t, 1)) 493 || negate_expr_p (TREE_OPERAND (t, 0)); 494 495 case NOP_EXPR: 496 /* Negate -((double)float) as (double)(-float). */ 497 if (TREE_CODE (type) == REAL_TYPE) 498 { 499 tree tem = strip_float_extensions (t); 500 if (tem != t) 501 return negate_expr_p (tem); 502 } 503 break; 504 505 case CALL_EXPR: 506 /* Negate -f(x) as f(-x). */ 507 if (negate_mathfn_p (builtin_mathfn_code (t))) 508 return negate_expr_p (CALL_EXPR_ARG (t, 0)); 509 break; 510 511 case RSHIFT_EXPR: 512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 514 { 515 tree op1 = TREE_OPERAND (t, 1); 516 if (TREE_INT_CST_HIGH (op1) == 0 517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 518 == TREE_INT_CST_LOW (op1)) 519 return true; 520 } 521 break; 522 523 default: 524 break; 525 } 526 return false; 527 } 528 529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no 530 simplification is possible. 531 If negate_expr_p would return true for T, NULL_TREE will never be 532 returned. */ 533 534 static tree 535 fold_negate_expr (location_t loc, tree t) 536 { 537 tree type = TREE_TYPE (t); 538 tree tem; 539 540 switch (TREE_CODE (t)) 541 { 542 /* Convert - (~A) to A + 1. */ 543 case BIT_NOT_EXPR: 544 if (INTEGRAL_TYPE_P (type)) 545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), 546 build_int_cst (type, 1)); 547 break; 548 549 case INTEGER_CST: 550 tem = fold_negate_const (t, type); 551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) 552 || !TYPE_OVERFLOW_TRAPS (type)) 553 return tem; 554 break; 555 556 case REAL_CST: 557 tem = fold_negate_const (t, type); 558 /* Two's complement FP formats, such as c4x, may overflow. */ 559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math) 560 return tem; 561 break; 562 563 case FIXED_CST: 564 tem = fold_negate_const (t, type); 565 return tem; 566 567 case COMPLEX_CST: 568 { 569 tree rpart = negate_expr (TREE_REALPART (t)); 570 tree ipart = negate_expr (TREE_IMAGPART (t)); 571 572 if ((TREE_CODE (rpart) == REAL_CST 573 && TREE_CODE (ipart) == REAL_CST) 574 || (TREE_CODE (rpart) == INTEGER_CST 575 && TREE_CODE (ipart) == INTEGER_CST)) 576 return build_complex (type, rpart, ipart); 577 } 578 break; 579 580 case COMPLEX_EXPR: 581 if (negate_expr_p (t)) 582 return fold_build2_loc (loc, COMPLEX_EXPR, type, 583 fold_negate_expr (loc, TREE_OPERAND (t, 0)), 584 fold_negate_expr (loc, TREE_OPERAND (t, 1))); 585 break; 586 587 case CONJ_EXPR: 588 if (negate_expr_p (t)) 589 return fold_build1_loc (loc, CONJ_EXPR, type, 590 fold_negate_expr (loc, TREE_OPERAND (t, 0))); 591 break; 592 593 case NEGATE_EXPR: 594 return TREE_OPERAND (t, 0); 595 596 case PLUS_EXPR: 597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 599 { 600 /* -(A + B) -> (-B) - A. */ 601 if (negate_expr_p (TREE_OPERAND (t, 1)) 602 && reorder_operands_p (TREE_OPERAND (t, 0), 603 TREE_OPERAND (t, 1))) 604 { 605 tem = negate_expr (TREE_OPERAND (t, 1)); 606 return fold_build2_loc (loc, MINUS_EXPR, type, 607 tem, TREE_OPERAND (t, 0)); 608 } 609 610 /* -(A + B) -> (-A) - B. */ 611 if (negate_expr_p (TREE_OPERAND (t, 0))) 612 { 613 tem = negate_expr (TREE_OPERAND (t, 0)); 614 return fold_build2_loc (loc, MINUS_EXPR, type, 615 tem, TREE_OPERAND (t, 1)); 616 } 617 } 618 break; 619 620 case MINUS_EXPR: 621 /* - (A - B) -> B - A */ 622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) 623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) 625 return fold_build2_loc (loc, MINUS_EXPR, type, 626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); 627 break; 628 629 case MULT_EXPR: 630 if (TYPE_UNSIGNED (type)) 631 break; 632 633 /* Fall through. */ 634 635 case RDIV_EXPR: 636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))) 637 { 638 tem = TREE_OPERAND (t, 1); 639 if (negate_expr_p (tem)) 640 return fold_build2_loc (loc, TREE_CODE (t), type, 641 TREE_OPERAND (t, 0), negate_expr (tem)); 642 tem = TREE_OPERAND (t, 0); 643 if (negate_expr_p (tem)) 644 return fold_build2_loc (loc, TREE_CODE (t), type, 645 negate_expr (tem), TREE_OPERAND (t, 1)); 646 } 647 break; 648 649 case TRUNC_DIV_EXPR: 650 case ROUND_DIV_EXPR: 651 case FLOOR_DIV_EXPR: 652 case CEIL_DIV_EXPR: 653 case EXACT_DIV_EXPR: 654 /* In general we can't negate A / B, because if A is INT_MIN and 655 B is 1, we may turn this into INT_MIN / -1 which is undefined 656 and actually traps on some architectures. But if overflow is 657 undefined, we can negate, because - (INT_MIN / 1) is an 658 overflow. */ 659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 660 { 661 const char * const warnmsg = G_("assuming signed overflow does not " 662 "occur when negating a division"); 663 tem = TREE_OPERAND (t, 1); 664 if (negate_expr_p (tem)) 665 { 666 if (INTEGRAL_TYPE_P (type) 667 && (TREE_CODE (tem) != INTEGER_CST 668 || integer_onep (tem))) 669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 670 return fold_build2_loc (loc, TREE_CODE (t), type, 671 TREE_OPERAND (t, 0), negate_expr (tem)); 672 } 673 tem = TREE_OPERAND (t, 0); 674 if (negate_expr_p (tem)) 675 { 676 if (INTEGRAL_TYPE_P (type) 677 && (TREE_CODE (tem) != INTEGER_CST 678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) 679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); 680 return fold_build2_loc (loc, TREE_CODE (t), type, 681 negate_expr (tem), TREE_OPERAND (t, 1)); 682 } 683 } 684 break; 685 686 case NOP_EXPR: 687 /* Convert -((double)float) into (double)(-float). */ 688 if (TREE_CODE (type) == REAL_TYPE) 689 { 690 tem = strip_float_extensions (t); 691 if (tem != t && negate_expr_p (tem)) 692 return fold_convert_loc (loc, type, negate_expr (tem)); 693 } 694 break; 695 696 case CALL_EXPR: 697 /* Negate -f(x) as f(-x). */ 698 if (negate_mathfn_p (builtin_mathfn_code (t)) 699 && negate_expr_p (CALL_EXPR_ARG (t, 0))) 700 { 701 tree fndecl, arg; 702 703 fndecl = get_callee_fndecl (t); 704 arg = negate_expr (CALL_EXPR_ARG (t, 0)); 705 return build_call_expr_loc (loc, fndecl, 1, arg); 706 } 707 break; 708 709 case RSHIFT_EXPR: 710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */ 711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST) 712 { 713 tree op1 = TREE_OPERAND (t, 1); 714 if (TREE_INT_CST_HIGH (op1) == 0 715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1) 716 == TREE_INT_CST_LOW (op1)) 717 { 718 tree ntype = TYPE_UNSIGNED (type) 719 ? signed_type_for (type) 720 : unsigned_type_for (type); 721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); 722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1); 723 return fold_convert_loc (loc, type, temp); 724 } 725 } 726 break; 727 728 default: 729 break; 730 } 731 732 return NULL_TREE; 733 } 734 735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be 736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case 737 return NULL_TREE. */ 738 739 static tree 740 negate_expr (tree t) 741 { 742 tree type, tem; 743 location_t loc; 744 745 if (t == NULL_TREE) 746 return NULL_TREE; 747 748 loc = EXPR_LOCATION (t); 749 type = TREE_TYPE (t); 750 STRIP_SIGN_NOPS (t); 751 752 tem = fold_negate_expr (loc, t); 753 if (!tem) 754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t); 755 return fold_convert_loc (loc, type, tem); 756 } 757 758 /* Split a tree IN into a constant, literal and variable parts that could be 759 combined with CODE to make IN. "constant" means an expression with 760 TREE_CONSTANT but that isn't an actual constant. CODE must be a 761 commutative arithmetic operation. Store the constant part into *CONP, 762 the literal in *LITP and return the variable part. If a part isn't 763 present, set it to null. If the tree does not decompose in this way, 764 return the entire tree as the variable part and the other parts as null. 765 766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that 767 case, we negate an operand that was subtracted. Except if it is a 768 literal for which we use *MINUS_LITP instead. 769 770 If NEGATE_P is true, we are negating all of IN, again except a literal 771 for which we use *MINUS_LITP instead. 772 773 If IN is itself a literal or constant, return it as appropriate. 774 775 Note that we do not guarantee that any of the three values will be the 776 same type as IN, but they will have the same signedness and mode. */ 777 778 static tree 779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp, 780 tree *minus_litp, int negate_p) 781 { 782 tree var = 0; 783 784 *conp = 0; 785 *litp = 0; 786 *minus_litp = 0; 787 788 /* Strip any conversions that don't change the machine mode or signedness. */ 789 STRIP_SIGN_NOPS (in); 790 791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST 792 || TREE_CODE (in) == FIXED_CST) 793 *litp = in; 794 else if (TREE_CODE (in) == code 795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math) 796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in)) 797 /* We can associate addition and subtraction together (even 798 though the C standard doesn't say so) for integers because 799 the value is not affected. For reals, the value might be 800 affected, so we can't. */ 801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR) 802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR)))) 803 { 804 tree op0 = TREE_OPERAND (in, 0); 805 tree op1 = TREE_OPERAND (in, 1); 806 int neg1_p = TREE_CODE (in) == MINUS_EXPR; 807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0; 808 809 /* First see if either of the operands is a literal, then a constant. */ 810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST 811 || TREE_CODE (op0) == FIXED_CST) 812 *litp = op0, op0 = 0; 813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST 814 || TREE_CODE (op1) == FIXED_CST) 815 *litp = op1, neg_litp_p = neg1_p, op1 = 0; 816 817 if (op0 != 0 && TREE_CONSTANT (op0)) 818 *conp = op0, op0 = 0; 819 else if (op1 != 0 && TREE_CONSTANT (op1)) 820 *conp = op1, neg_conp_p = neg1_p, op1 = 0; 821 822 /* If we haven't dealt with either operand, this is not a case we can 823 decompose. Otherwise, VAR is either of the ones remaining, if any. */ 824 if (op0 != 0 && op1 != 0) 825 var = in; 826 else if (op0 != 0) 827 var = op0; 828 else 829 var = op1, neg_var_p = neg1_p; 830 831 /* Now do any needed negations. */ 832 if (neg_litp_p) 833 *minus_litp = *litp, *litp = 0; 834 if (neg_conp_p) 835 *conp = negate_expr (*conp); 836 if (neg_var_p) 837 var = negate_expr (var); 838 } 839 else if (TREE_CONSTANT (in)) 840 *conp = in; 841 else 842 var = in; 843 844 if (negate_p) 845 { 846 if (*litp) 847 *minus_litp = *litp, *litp = 0; 848 else if (*minus_litp) 849 *litp = *minus_litp, *minus_litp = 0; 850 *conp = negate_expr (*conp); 851 var = negate_expr (var); 852 } 853 854 return var; 855 } 856 857 /* Re-associate trees split by the above function. T1 and T2 are 858 either expressions to associate or null. Return the new 859 expression, if any. LOC is the location of the new expression. If 860 we build an operation, do it in TYPE and with CODE. */ 861 862 static tree 863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) 864 { 865 if (t1 == 0) 866 return t2; 867 else if (t2 == 0) 868 return t1; 869 870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't 871 try to fold this since we will have infinite recursion. But do 872 deal with any NEGATE_EXPRs. */ 873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code 874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR) 875 { 876 if (code == PLUS_EXPR) 877 { 878 if (TREE_CODE (t1) == NEGATE_EXPR) 879 return build2_loc (loc, MINUS_EXPR, type, 880 fold_convert_loc (loc, type, t2), 881 fold_convert_loc (loc, type, 882 TREE_OPERAND (t1, 0))); 883 else if (TREE_CODE (t2) == NEGATE_EXPR) 884 return build2_loc (loc, MINUS_EXPR, type, 885 fold_convert_loc (loc, type, t1), 886 fold_convert_loc (loc, type, 887 TREE_OPERAND (t2, 0))); 888 else if (integer_zerop (t2)) 889 return fold_convert_loc (loc, type, t1); 890 } 891 else if (code == MINUS_EXPR) 892 { 893 if (integer_zerop (t2)) 894 return fold_convert_loc (loc, type, t1); 895 } 896 897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), 898 fold_convert_loc (loc, type, t2)); 899 } 900 901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), 902 fold_convert_loc (loc, type, t2)); 903 } 904 905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable 906 for use in int_const_binop, size_binop and size_diffop. */ 907 908 static bool 909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2) 910 { 911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1)) 912 return false; 913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2)) 914 return false; 915 916 switch (code) 917 { 918 case LSHIFT_EXPR: 919 case RSHIFT_EXPR: 920 case LROTATE_EXPR: 921 case RROTATE_EXPR: 922 return true; 923 924 default: 925 break; 926 } 927 928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) 929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) 930 && TYPE_MODE (type1) == TYPE_MODE (type2); 931 } 932 933 934 /* Combine two integer constants ARG1 and ARG2 under operation CODE 935 to produce a new constant. Return NULL_TREE if we don't know how 936 to evaluate CODE at compile-time. */ 937 938 tree 939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2) 940 { 941 double_int op1, op2, res, tmp; 942 tree t; 943 tree type = TREE_TYPE (arg1); 944 bool uns = TYPE_UNSIGNED (type); 945 bool is_sizetype 946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); 947 bool overflow = false; 948 949 op1 = tree_to_double_int (arg1); 950 op2 = tree_to_double_int (arg2); 951 952 switch (code) 953 { 954 case BIT_IOR_EXPR: 955 res = double_int_ior (op1, op2); 956 break; 957 958 case BIT_XOR_EXPR: 959 res = double_int_xor (op1, op2); 960 break; 961 962 case BIT_AND_EXPR: 963 res = double_int_and (op1, op2); 964 break; 965 966 case RSHIFT_EXPR: 967 res = double_int_rshift (op1, double_int_to_shwi (op2), 968 TYPE_PRECISION (type), !uns); 969 break; 970 971 case LSHIFT_EXPR: 972 /* It's unclear from the C standard whether shifts can overflow. 973 The following code ignores overflow; perhaps a C standard 974 interpretation ruling is needed. */ 975 res = double_int_lshift (op1, double_int_to_shwi (op2), 976 TYPE_PRECISION (type), !uns); 977 break; 978 979 case RROTATE_EXPR: 980 res = double_int_rrotate (op1, double_int_to_shwi (op2), 981 TYPE_PRECISION (type)); 982 break; 983 984 case LROTATE_EXPR: 985 res = double_int_lrotate (op1, double_int_to_shwi (op2), 986 TYPE_PRECISION (type)); 987 break; 988 989 case PLUS_EXPR: 990 overflow = add_double (op1.low, op1.high, op2.low, op2.high, 991 &res.low, &res.high); 992 break; 993 994 case MINUS_EXPR: 995 neg_double (op2.low, op2.high, &res.low, &res.high); 996 add_double (op1.low, op1.high, res.low, res.high, 997 &res.low, &res.high); 998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high); 999 break; 1000 1001 case MULT_EXPR: 1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high, 1003 &res.low, &res.high); 1004 break; 1005 1006 case TRUNC_DIV_EXPR: 1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: 1008 case EXACT_DIV_EXPR: 1009 /* This is a shortcut for a common special case. */ 1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 1011 && !TREE_OVERFLOW (arg1) 1012 && !TREE_OVERFLOW (arg2) 1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) 1014 { 1015 if (code == CEIL_DIV_EXPR) 1016 op1.low += op2.low - 1; 1017 1018 res.low = op1.low / op2.low, res.high = 0; 1019 break; 1020 } 1021 1022 /* ... fall through ... */ 1023 1024 case ROUND_DIV_EXPR: 1025 if (double_int_zero_p (op2)) 1026 return NULL_TREE; 1027 if (double_int_one_p (op2)) 1028 { 1029 res = op1; 1030 break; 1031 } 1032 if (double_int_equal_p (op1, op2) 1033 && ! double_int_zero_p (op1)) 1034 { 1035 res = double_int_one; 1036 break; 1037 } 1038 overflow = div_and_round_double (code, uns, 1039 op1.low, op1.high, op2.low, op2.high, 1040 &res.low, &res.high, 1041 &tmp.low, &tmp.high); 1042 break; 1043 1044 case TRUNC_MOD_EXPR: 1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: 1046 /* This is a shortcut for a common special case. */ 1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 1048 && !TREE_OVERFLOW (arg1) 1049 && !TREE_OVERFLOW (arg2) 1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) 1051 { 1052 if (code == CEIL_MOD_EXPR) 1053 op1.low += op2.low - 1; 1054 res.low = op1.low % op2.low, res.high = 0; 1055 break; 1056 } 1057 1058 /* ... fall through ... */ 1059 1060 case ROUND_MOD_EXPR: 1061 if (double_int_zero_p (op2)) 1062 return NULL_TREE; 1063 overflow = div_and_round_double (code, uns, 1064 op1.low, op1.high, op2.low, op2.high, 1065 &tmp.low, &tmp.high, 1066 &res.low, &res.high); 1067 break; 1068 1069 case MIN_EXPR: 1070 res = double_int_min (op1, op2, uns); 1071 break; 1072 1073 case MAX_EXPR: 1074 res = double_int_max (op1, op2, uns); 1075 break; 1076 1077 default: 1078 return NULL_TREE; 1079 } 1080 1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1, 1082 ((!uns || is_sizetype) && overflow) 1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); 1084 1085 return t; 1086 } 1087 1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new 1089 constant. We assume ARG1 and ARG2 have the same data type, or at least 1090 are the same kind of constant and the same machine mode. Return zero if 1091 combining the constants is not allowed in the current operating mode. */ 1092 1093 static tree 1094 const_binop (enum tree_code code, tree arg1, tree arg2) 1095 { 1096 /* Sanity check for the recursive cases. */ 1097 if (!arg1 || !arg2) 1098 return NULL_TREE; 1099 1100 STRIP_NOPS (arg1); 1101 STRIP_NOPS (arg2); 1102 1103 if (TREE_CODE (arg1) == INTEGER_CST) 1104 return int_const_binop (code, arg1, arg2); 1105 1106 if (TREE_CODE (arg1) == REAL_CST) 1107 { 1108 enum machine_mode mode; 1109 REAL_VALUE_TYPE d1; 1110 REAL_VALUE_TYPE d2; 1111 REAL_VALUE_TYPE value; 1112 REAL_VALUE_TYPE result; 1113 bool inexact; 1114 tree t, type; 1115 1116 /* The following codes are handled by real_arithmetic. */ 1117 switch (code) 1118 { 1119 case PLUS_EXPR: 1120 case MINUS_EXPR: 1121 case MULT_EXPR: 1122 case RDIV_EXPR: 1123 case MIN_EXPR: 1124 case MAX_EXPR: 1125 break; 1126 1127 default: 1128 return NULL_TREE; 1129 } 1130 1131 d1 = TREE_REAL_CST (arg1); 1132 d2 = TREE_REAL_CST (arg2); 1133 1134 type = TREE_TYPE (arg1); 1135 mode = TYPE_MODE (type); 1136 1137 /* Don't perform operation if we honor signaling NaNs and 1138 either operand is a NaN. */ 1139 if (HONOR_SNANS (mode) 1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2))) 1141 return NULL_TREE; 1142 1143 /* Don't perform operation if it would raise a division 1144 by zero exception. */ 1145 if (code == RDIV_EXPR 1146 && REAL_VALUES_EQUAL (d2, dconst0) 1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) 1148 return NULL_TREE; 1149 1150 /* If either operand is a NaN, just return it. Otherwise, set up 1151 for floating-point trap; we return an overflow. */ 1152 if (REAL_VALUE_ISNAN (d1)) 1153 return arg1; 1154 else if (REAL_VALUE_ISNAN (d2)) 1155 return arg2; 1156 1157 inexact = real_arithmetic (&value, code, &d1, &d2); 1158 real_convert (&result, mode, &value); 1159 1160 /* Don't constant fold this floating point operation if 1161 the result has overflowed and flag_trapping_math. */ 1162 if (flag_trapping_math 1163 && MODE_HAS_INFINITIES (mode) 1164 && REAL_VALUE_ISINF (result) 1165 && !REAL_VALUE_ISINF (d1) 1166 && !REAL_VALUE_ISINF (d2)) 1167 return NULL_TREE; 1168 1169 /* Don't constant fold this floating point operation if the 1170 result may dependent upon the run-time rounding mode and 1171 flag_rounding_math is set, or if GCC's software emulation 1172 is unable to accurately represent the result. */ 1173 if ((flag_rounding_math 1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations)) 1175 && (inexact || !real_identical (&result, &value))) 1176 return NULL_TREE; 1177 1178 t = build_real (type, result); 1179 1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); 1181 return t; 1182 } 1183 1184 if (TREE_CODE (arg1) == FIXED_CST) 1185 { 1186 FIXED_VALUE_TYPE f1; 1187 FIXED_VALUE_TYPE f2; 1188 FIXED_VALUE_TYPE result; 1189 tree t, type; 1190 int sat_p; 1191 bool overflow_p; 1192 1193 /* The following codes are handled by fixed_arithmetic. */ 1194 switch (code) 1195 { 1196 case PLUS_EXPR: 1197 case MINUS_EXPR: 1198 case MULT_EXPR: 1199 case TRUNC_DIV_EXPR: 1200 f2 = TREE_FIXED_CST (arg2); 1201 break; 1202 1203 case LSHIFT_EXPR: 1204 case RSHIFT_EXPR: 1205 f2.data.high = TREE_INT_CST_HIGH (arg2); 1206 f2.data.low = TREE_INT_CST_LOW (arg2); 1207 f2.mode = SImode; 1208 break; 1209 1210 default: 1211 return NULL_TREE; 1212 } 1213 1214 f1 = TREE_FIXED_CST (arg1); 1215 type = TREE_TYPE (arg1); 1216 sat_p = TYPE_SATURATING (type); 1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p); 1218 t = build_fixed (type, result); 1219 /* Propagate overflow flags. */ 1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) 1221 TREE_OVERFLOW (t) = 1; 1222 return t; 1223 } 1224 1225 if (TREE_CODE (arg1) == COMPLEX_CST) 1226 { 1227 tree type = TREE_TYPE (arg1); 1228 tree r1 = TREE_REALPART (arg1); 1229 tree i1 = TREE_IMAGPART (arg1); 1230 tree r2 = TREE_REALPART (arg2); 1231 tree i2 = TREE_IMAGPART (arg2); 1232 tree real, imag; 1233 1234 switch (code) 1235 { 1236 case PLUS_EXPR: 1237 case MINUS_EXPR: 1238 real = const_binop (code, r1, r2); 1239 imag = const_binop (code, i1, i2); 1240 break; 1241 1242 case MULT_EXPR: 1243 if (COMPLEX_FLOAT_TYPE_P (type)) 1244 return do_mpc_arg2 (arg1, arg2, type, 1245 /* do_nonfinite= */ folding_initializer, 1246 mpc_mul); 1247 1248 real = const_binop (MINUS_EXPR, 1249 const_binop (MULT_EXPR, r1, r2), 1250 const_binop (MULT_EXPR, i1, i2)); 1251 imag = const_binop (PLUS_EXPR, 1252 const_binop (MULT_EXPR, r1, i2), 1253 const_binop (MULT_EXPR, i1, r2)); 1254 break; 1255 1256 case RDIV_EXPR: 1257 if (COMPLEX_FLOAT_TYPE_P (type)) 1258 return do_mpc_arg2 (arg1, arg2, type, 1259 /* do_nonfinite= */ folding_initializer, 1260 mpc_div); 1261 /* Fallthru ... */ 1262 case TRUNC_DIV_EXPR: 1263 case CEIL_DIV_EXPR: 1264 case FLOOR_DIV_EXPR: 1265 case ROUND_DIV_EXPR: 1266 if (flag_complex_method == 0) 1267 { 1268 /* Keep this algorithm in sync with 1269 tree-complex.c:expand_complex_div_straight(). 1270 1271 Expand complex division to scalars, straightforward algorithm. 1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) 1273 t = br*br + bi*bi 1274 */ 1275 tree magsquared 1276 = const_binop (PLUS_EXPR, 1277 const_binop (MULT_EXPR, r2, r2), 1278 const_binop (MULT_EXPR, i2, i2)); 1279 tree t1 1280 = const_binop (PLUS_EXPR, 1281 const_binop (MULT_EXPR, r1, r2), 1282 const_binop (MULT_EXPR, i1, i2)); 1283 tree t2 1284 = const_binop (MINUS_EXPR, 1285 const_binop (MULT_EXPR, i1, r2), 1286 const_binop (MULT_EXPR, r1, i2)); 1287 1288 real = const_binop (code, t1, magsquared); 1289 imag = const_binop (code, t2, magsquared); 1290 } 1291 else 1292 { 1293 /* Keep this algorithm in sync with 1294 tree-complex.c:expand_complex_div_wide(). 1295 1296 Expand complex division to scalars, modified algorithm to minimize 1297 overflow with wide input ranges. */ 1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node, 1299 fold_abs_const (r2, TREE_TYPE (type)), 1300 fold_abs_const (i2, TREE_TYPE (type))); 1301 1302 if (integer_nonzerop (compare)) 1303 { 1304 /* In the TRUE branch, we compute 1305 ratio = br/bi; 1306 div = (br * ratio) + bi; 1307 tr = (ar * ratio) + ai; 1308 ti = (ai * ratio) - ar; 1309 tr = tr / div; 1310 ti = ti / div; */ 1311 tree ratio = const_binop (code, r2, i2); 1312 tree div = const_binop (PLUS_EXPR, i2, 1313 const_binop (MULT_EXPR, r2, ratio)); 1314 real = const_binop (MULT_EXPR, r1, ratio); 1315 real = const_binop (PLUS_EXPR, real, i1); 1316 real = const_binop (code, real, div); 1317 1318 imag = const_binop (MULT_EXPR, i1, ratio); 1319 imag = const_binop (MINUS_EXPR, imag, r1); 1320 imag = const_binop (code, imag, div); 1321 } 1322 else 1323 { 1324 /* In the FALSE branch, we compute 1325 ratio = d/c; 1326 divisor = (d * ratio) + c; 1327 tr = (b * ratio) + a; 1328 ti = b - (a * ratio); 1329 tr = tr / div; 1330 ti = ti / div; */ 1331 tree ratio = const_binop (code, i2, r2); 1332 tree div = const_binop (PLUS_EXPR, r2, 1333 const_binop (MULT_EXPR, i2, ratio)); 1334 1335 real = const_binop (MULT_EXPR, i1, ratio); 1336 real = const_binop (PLUS_EXPR, real, r1); 1337 real = const_binop (code, real, div); 1338 1339 imag = const_binop (MULT_EXPR, r1, ratio); 1340 imag = const_binop (MINUS_EXPR, i1, imag); 1341 imag = const_binop (code, imag, div); 1342 } 1343 } 1344 break; 1345 1346 default: 1347 return NULL_TREE; 1348 } 1349 1350 if (real && imag) 1351 return build_complex (type, real, imag); 1352 } 1353 1354 if (TREE_CODE (arg1) == VECTOR_CST) 1355 { 1356 tree type = TREE_TYPE(arg1); 1357 int count = TYPE_VECTOR_SUBPARTS (type), i; 1358 tree elements1, elements2, list = NULL_TREE; 1359 1360 if(TREE_CODE(arg2) != VECTOR_CST) 1361 return NULL_TREE; 1362 1363 elements1 = TREE_VECTOR_CST_ELTS (arg1); 1364 elements2 = TREE_VECTOR_CST_ELTS (arg2); 1365 1366 for (i = 0; i < count; i++) 1367 { 1368 tree elem1, elem2, elem; 1369 1370 /* The trailing elements can be empty and should be treated as 0 */ 1371 if(!elements1) 1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 1373 else 1374 { 1375 elem1 = TREE_VALUE(elements1); 1376 elements1 = TREE_CHAIN (elements1); 1377 } 1378 1379 if(!elements2) 1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 1381 else 1382 { 1383 elem2 = TREE_VALUE(elements2); 1384 elements2 = TREE_CHAIN (elements2); 1385 } 1386 1387 elem = const_binop (code, elem1, elem2); 1388 1389 /* It is possible that const_binop cannot handle the given 1390 code and return NULL_TREE */ 1391 if(elem == NULL_TREE) 1392 return NULL_TREE; 1393 1394 list = tree_cons (NULL_TREE, elem, list); 1395 } 1396 return build_vector(type, nreverse(list)); 1397 } 1398 return NULL_TREE; 1399 } 1400 1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND 1402 indicates which particular sizetype to create. */ 1403 1404 tree 1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) 1406 { 1407 return build_int_cst (sizetype_tab[(int) kind], number); 1408 } 1409 1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE 1411 is a tree code. The type of the result is taken from the operands. 1412 Both must be equivalent integer types, ala int_binop_types_match_p. 1413 If the operands are constant, so is the result. */ 1414 1415 tree 1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1) 1417 { 1418 tree type = TREE_TYPE (arg0); 1419 1420 if (arg0 == error_mark_node || arg1 == error_mark_node) 1421 return error_mark_node; 1422 1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), 1424 TREE_TYPE (arg1))); 1425 1426 /* Handle the special case of two integer constants faster. */ 1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 1428 { 1429 /* And some specific cases even faster than that. */ 1430 if (code == PLUS_EXPR) 1431 { 1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) 1433 return arg1; 1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 1435 return arg0; 1436 } 1437 else if (code == MINUS_EXPR) 1438 { 1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) 1440 return arg0; 1441 } 1442 else if (code == MULT_EXPR) 1443 { 1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) 1445 return arg1; 1446 } 1447 1448 /* Handle general case of two integer constants. */ 1449 return int_const_binop (code, arg0, arg1); 1450 } 1451 1452 return fold_build2_loc (loc, code, type, arg0, arg1); 1453 } 1454 1455 /* Given two values, either both of sizetype or both of bitsizetype, 1456 compute the difference between the two values. Return the value 1457 in signed type corresponding to the type of the operands. */ 1458 1459 tree 1460 size_diffop_loc (location_t loc, tree arg0, tree arg1) 1461 { 1462 tree type = TREE_TYPE (arg0); 1463 tree ctype; 1464 1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0), 1466 TREE_TYPE (arg1))); 1467 1468 /* If the type is already signed, just do the simple thing. */ 1469 if (!TYPE_UNSIGNED (type)) 1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1); 1471 1472 if (type == sizetype) 1473 ctype = ssizetype; 1474 else if (type == bitsizetype) 1475 ctype = sbitsizetype; 1476 else 1477 ctype = signed_type_for (type); 1478 1479 /* If either operand is not a constant, do the conversions to the signed 1480 type and subtract. The hardware will do the right thing with any 1481 overflow in the subtraction. */ 1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) 1483 return size_binop_loc (loc, MINUS_EXPR, 1484 fold_convert_loc (loc, ctype, arg0), 1485 fold_convert_loc (loc, ctype, arg1)); 1486 1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. 1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't 1489 overflow) and negate (which can't either). Special-case a result 1490 of zero while we're here. */ 1491 if (tree_int_cst_equal (arg0, arg1)) 1492 return build_int_cst (ctype, 0); 1493 else if (tree_int_cst_lt (arg1, arg0)) 1494 return fold_convert_loc (loc, ctype, 1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1)); 1496 else 1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0), 1498 fold_convert_loc (loc, ctype, 1499 size_binop_loc (loc, 1500 MINUS_EXPR, 1501 arg1, arg0))); 1502 } 1503 1504 /* A subroutine of fold_convert_const handling conversions of an 1505 INTEGER_CST to another integer type. */ 1506 1507 static tree 1508 fold_convert_const_int_from_int (tree type, const_tree arg1) 1509 { 1510 tree t; 1511 1512 /* Given an integer constant, make new constant with new type, 1513 appropriately sign-extended or truncated. */ 1514 t = force_fit_type_double (type, tree_to_double_int (arg1), 1515 !POINTER_TYPE_P (TREE_TYPE (arg1)), 1516 (TREE_INT_CST_HIGH (arg1) < 0 1517 && (TYPE_UNSIGNED (type) 1518 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 1519 | TREE_OVERFLOW (arg1)); 1520 1521 return t; 1522 } 1523 1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST 1525 to an integer type. */ 1526 1527 static tree 1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1) 1529 { 1530 int overflow = 0; 1531 tree t; 1532 1533 /* The following code implements the floating point to integer 1534 conversion rules required by the Java Language Specification, 1535 that IEEE NaNs are mapped to zero and values that overflow 1536 the target precision saturate, i.e. values greater than 1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN 1538 are mapped to INT_MIN. These semantics are allowed by the 1539 C and C++ standards that simply state that the behavior of 1540 FP-to-integer conversion is unspecified upon overflow. */ 1541 1542 double_int val; 1543 REAL_VALUE_TYPE r; 1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); 1545 1546 switch (code) 1547 { 1548 case FIX_TRUNC_EXPR: 1549 real_trunc (&r, VOIDmode, &x); 1550 break; 1551 1552 default: 1553 gcc_unreachable (); 1554 } 1555 1556 /* If R is NaN, return zero and show we have an overflow. */ 1557 if (REAL_VALUE_ISNAN (r)) 1558 { 1559 overflow = 1; 1560 val = double_int_zero; 1561 } 1562 1563 /* See if R is less than the lower bound or greater than the 1564 upper bound. */ 1565 1566 if (! overflow) 1567 { 1568 tree lt = TYPE_MIN_VALUE (type); 1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt); 1570 if (REAL_VALUES_LESS (r, l)) 1571 { 1572 overflow = 1; 1573 val = tree_to_double_int (lt); 1574 } 1575 } 1576 1577 if (! overflow) 1578 { 1579 tree ut = TYPE_MAX_VALUE (type); 1580 if (ut) 1581 { 1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut); 1583 if (REAL_VALUES_LESS (u, r)) 1584 { 1585 overflow = 1; 1586 val = tree_to_double_int (ut); 1587 } 1588 } 1589 } 1590 1591 if (! overflow) 1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r); 1593 1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1)); 1595 return t; 1596 } 1597 1598 /* A subroutine of fold_convert_const handling conversions of a 1599 FIXED_CST to an integer type. */ 1600 1601 static tree 1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1) 1603 { 1604 tree t; 1605 double_int temp, temp_trunc; 1606 unsigned int mode; 1607 1608 /* Right shift FIXED_CST to temp by fbit. */ 1609 temp = TREE_FIXED_CST (arg1).data; 1610 mode = TREE_FIXED_CST (arg1).mode; 1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT) 1612 { 1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode), 1614 HOST_BITS_PER_DOUBLE_INT, 1615 SIGNED_FIXED_POINT_MODE_P (mode)); 1616 1617 /* Left shift temp to temp_trunc by fbit. */ 1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode), 1619 HOST_BITS_PER_DOUBLE_INT, 1620 SIGNED_FIXED_POINT_MODE_P (mode)); 1621 } 1622 else 1623 { 1624 temp = double_int_zero; 1625 temp_trunc = double_int_zero; 1626 } 1627 1628 /* If FIXED_CST is negative, we need to round the value toward 0. 1629 By checking if the fractional bits are not zero to add 1 to temp. */ 1630 if (SIGNED_FIXED_POINT_MODE_P (mode) 1631 && double_int_negative_p (temp_trunc) 1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) 1633 temp = double_int_add (temp, double_int_one); 1634 1635 /* Given a fixed-point constant, make new constant with new type, 1636 appropriately sign-extended or truncated. */ 1637 t = force_fit_type_double (type, temp, -1, 1638 (double_int_negative_p (temp) 1639 && (TYPE_UNSIGNED (type) 1640 < TYPE_UNSIGNED (TREE_TYPE (arg1)))) 1641 | TREE_OVERFLOW (arg1)); 1642 1643 return t; 1644 } 1645 1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST 1647 to another floating point type. */ 1648 1649 static tree 1650 fold_convert_const_real_from_real (tree type, const_tree arg1) 1651 { 1652 REAL_VALUE_TYPE value; 1653 tree t; 1654 1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1)); 1656 t = build_real (type, value); 1657 1658 /* If converting an infinity or NAN to a representation that doesn't 1659 have one, set the overflow bit so that we can produce some kind of 1660 error message at the appropriate point if necessary. It's not the 1661 most user-friendly message, but it's better than nothing. */ 1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1)) 1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type))) 1664 TREE_OVERFLOW (t) = 1; 1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 1666 && !MODE_HAS_NANS (TYPE_MODE (type))) 1667 TREE_OVERFLOW (t) = 1; 1668 /* Regular overflow, conversion produced an infinity in a mode that 1669 can't represent them. */ 1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) 1671 && REAL_VALUE_ISINF (value) 1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1))) 1673 TREE_OVERFLOW (t) = 1; 1674 else 1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 1676 return t; 1677 } 1678 1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST 1680 to a floating point type. */ 1681 1682 static tree 1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1) 1684 { 1685 REAL_VALUE_TYPE value; 1686 tree t; 1687 1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1)); 1689 t = build_real (type, value); 1690 1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); 1692 return t; 1693 } 1694 1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST 1696 to another fixed-point type. */ 1697 1698 static tree 1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1) 1700 { 1701 FIXED_VALUE_TYPE value; 1702 tree t; 1703 bool overflow_p; 1704 1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1), 1706 TYPE_SATURATING (type)); 1707 t = build_fixed (type, value); 1708 1709 /* Propagate overflow flags. */ 1710 if (overflow_p | TREE_OVERFLOW (arg1)) 1711 TREE_OVERFLOW (t) = 1; 1712 return t; 1713 } 1714 1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST 1716 to a fixed-point type. */ 1717 1718 static tree 1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1) 1720 { 1721 FIXED_VALUE_TYPE value; 1722 tree t; 1723 bool overflow_p; 1724 1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), 1726 TREE_INT_CST (arg1), 1727 TYPE_UNSIGNED (TREE_TYPE (arg1)), 1728 TYPE_SATURATING (type)); 1729 t = build_fixed (type, value); 1730 1731 /* Propagate overflow flags. */ 1732 if (overflow_p | TREE_OVERFLOW (arg1)) 1733 TREE_OVERFLOW (t) = 1; 1734 return t; 1735 } 1736 1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST 1738 to a fixed-point type. */ 1739 1740 static tree 1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1) 1742 { 1743 FIXED_VALUE_TYPE value; 1744 tree t; 1745 bool overflow_p; 1746 1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type), 1748 &TREE_REAL_CST (arg1), 1749 TYPE_SATURATING (type)); 1750 t = build_fixed (type, value); 1751 1752 /* Propagate overflow flags. */ 1753 if (overflow_p | TREE_OVERFLOW (arg1)) 1754 TREE_OVERFLOW (t) = 1; 1755 return t; 1756 } 1757 1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to 1759 type TYPE. If no simplification can be done return NULL_TREE. */ 1760 1761 static tree 1762 fold_convert_const (enum tree_code code, tree type, tree arg1) 1763 { 1764 if (TREE_TYPE (arg1) == type) 1765 return arg1; 1766 1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type) 1768 || TREE_CODE (type) == OFFSET_TYPE) 1769 { 1770 if (TREE_CODE (arg1) == INTEGER_CST) 1771 return fold_convert_const_int_from_int (type, arg1); 1772 else if (TREE_CODE (arg1) == REAL_CST) 1773 return fold_convert_const_int_from_real (code, type, arg1); 1774 else if (TREE_CODE (arg1) == FIXED_CST) 1775 return fold_convert_const_int_from_fixed (type, arg1); 1776 } 1777 else if (TREE_CODE (type) == REAL_TYPE) 1778 { 1779 if (TREE_CODE (arg1) == INTEGER_CST) 1780 return build_real_from_int_cst (type, arg1); 1781 else if (TREE_CODE (arg1) == REAL_CST) 1782 return fold_convert_const_real_from_real (type, arg1); 1783 else if (TREE_CODE (arg1) == FIXED_CST) 1784 return fold_convert_const_real_from_fixed (type, arg1); 1785 } 1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE) 1787 { 1788 if (TREE_CODE (arg1) == FIXED_CST) 1789 return fold_convert_const_fixed_from_fixed (type, arg1); 1790 else if (TREE_CODE (arg1) == INTEGER_CST) 1791 return fold_convert_const_fixed_from_int (type, arg1); 1792 else if (TREE_CODE (arg1) == REAL_CST) 1793 return fold_convert_const_fixed_from_real (type, arg1); 1794 } 1795 return NULL_TREE; 1796 } 1797 1798 /* Construct a vector of zero elements of vector type TYPE. */ 1799 1800 static tree 1801 build_zero_vector (tree type) 1802 { 1803 tree t; 1804 1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); 1806 return build_vector_from_val (type, t); 1807 } 1808 1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */ 1810 1811 bool 1812 fold_convertible_p (const_tree type, const_tree arg) 1813 { 1814 tree orig = TREE_TYPE (arg); 1815 1816 if (type == orig) 1817 return true; 1818 1819 if (TREE_CODE (arg) == ERROR_MARK 1820 || TREE_CODE (type) == ERROR_MARK 1821 || TREE_CODE (orig) == ERROR_MARK) 1822 return false; 1823 1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 1825 return true; 1826 1827 switch (TREE_CODE (type)) 1828 { 1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 1830 case POINTER_TYPE: case REFERENCE_TYPE: 1831 case OFFSET_TYPE: 1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 1833 || TREE_CODE (orig) == OFFSET_TYPE) 1834 return true; 1835 return (TREE_CODE (orig) == VECTOR_TYPE 1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 1837 1838 case REAL_TYPE: 1839 case FIXED_POINT_TYPE: 1840 case COMPLEX_TYPE: 1841 case VECTOR_TYPE: 1842 case VOID_TYPE: 1843 return TREE_CODE (type) == TREE_CODE (orig); 1844 1845 default: 1846 return false; 1847 } 1848 } 1849 1850 /* Convert expression ARG to type TYPE. Used by the middle-end for 1851 simple conversions in preference to calling the front-end's convert. */ 1852 1853 tree 1854 fold_convert_loc (location_t loc, tree type, tree arg) 1855 { 1856 tree orig = TREE_TYPE (arg); 1857 tree tem; 1858 1859 if (type == orig) 1860 return arg; 1861 1862 if (TREE_CODE (arg) == ERROR_MARK 1863 || TREE_CODE (type) == ERROR_MARK 1864 || TREE_CODE (orig) == ERROR_MARK) 1865 return error_mark_node; 1866 1867 switch (TREE_CODE (type)) 1868 { 1869 case POINTER_TYPE: 1870 case REFERENCE_TYPE: 1871 /* Handle conversions between pointers to different address spaces. */ 1872 if (POINTER_TYPE_P (orig) 1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type)) 1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig)))) 1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg); 1876 /* fall through */ 1877 1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 1879 case OFFSET_TYPE: 1880 if (TREE_CODE (arg) == INTEGER_CST) 1881 { 1882 tem = fold_convert_const (NOP_EXPR, type, arg); 1883 if (tem != NULL_TREE) 1884 return tem; 1885 } 1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 1887 || TREE_CODE (orig) == OFFSET_TYPE) 1888 return fold_build1_loc (loc, NOP_EXPR, type, arg); 1889 if (TREE_CODE (orig) == COMPLEX_TYPE) 1890 return fold_convert_loc (loc, type, 1891 fold_build1_loc (loc, REALPART_EXPR, 1892 TREE_TYPE (orig), arg)); 1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE 1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 1895 return fold_build1_loc (loc, NOP_EXPR, type, arg); 1896 1897 case REAL_TYPE: 1898 if (TREE_CODE (arg) == INTEGER_CST) 1899 { 1900 tem = fold_convert_const (FLOAT_EXPR, type, arg); 1901 if (tem != NULL_TREE) 1902 return tem; 1903 } 1904 else if (TREE_CODE (arg) == REAL_CST) 1905 { 1906 tem = fold_convert_const (NOP_EXPR, type, arg); 1907 if (tem != NULL_TREE) 1908 return tem; 1909 } 1910 else if (TREE_CODE (arg) == FIXED_CST) 1911 { 1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 1913 if (tem != NULL_TREE) 1914 return tem; 1915 } 1916 1917 switch (TREE_CODE (orig)) 1918 { 1919 case INTEGER_TYPE: 1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 1921 case POINTER_TYPE: case REFERENCE_TYPE: 1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg); 1923 1924 case REAL_TYPE: 1925 return fold_build1_loc (loc, NOP_EXPR, type, arg); 1926 1927 case FIXED_POINT_TYPE: 1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 1929 1930 case COMPLEX_TYPE: 1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 1932 return fold_convert_loc (loc, type, tem); 1933 1934 default: 1935 gcc_unreachable (); 1936 } 1937 1938 case FIXED_POINT_TYPE: 1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST 1940 || TREE_CODE (arg) == REAL_CST) 1941 { 1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); 1943 if (tem != NULL_TREE) 1944 goto fold_convert_exit; 1945 } 1946 1947 switch (TREE_CODE (orig)) 1948 { 1949 case FIXED_POINT_TYPE: 1950 case INTEGER_TYPE: 1951 case ENUMERAL_TYPE: 1952 case BOOLEAN_TYPE: 1953 case REAL_TYPE: 1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); 1955 1956 case COMPLEX_TYPE: 1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 1958 return fold_convert_loc (loc, type, tem); 1959 1960 default: 1961 gcc_unreachable (); 1962 } 1963 1964 case COMPLEX_TYPE: 1965 switch (TREE_CODE (orig)) 1966 { 1967 case INTEGER_TYPE: 1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE: 1969 case POINTER_TYPE: case REFERENCE_TYPE: 1970 case REAL_TYPE: 1971 case FIXED_POINT_TYPE: 1972 return fold_build2_loc (loc, COMPLEX_EXPR, type, 1973 fold_convert_loc (loc, TREE_TYPE (type), arg), 1974 fold_convert_loc (loc, TREE_TYPE (type), 1975 integer_zero_node)); 1976 case COMPLEX_TYPE: 1977 { 1978 tree rpart, ipart; 1979 1980 if (TREE_CODE (arg) == COMPLEX_EXPR) 1981 { 1982 rpart = fold_convert_loc (loc, TREE_TYPE (type), 1983 TREE_OPERAND (arg, 0)); 1984 ipart = fold_convert_loc (loc, TREE_TYPE (type), 1985 TREE_OPERAND (arg, 1)); 1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 1987 } 1988 1989 arg = save_expr (arg); 1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); 1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg); 1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart); 1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart); 1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); 1995 } 1996 1997 default: 1998 gcc_unreachable (); 1999 } 2000 2001 case VECTOR_TYPE: 2002 if (integer_zerop (arg)) 2003 return build_zero_vector (type); 2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); 2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) 2006 || TREE_CODE (orig) == VECTOR_TYPE); 2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); 2008 2009 case VOID_TYPE: 2010 tem = fold_ignored_result (arg); 2011 return fold_build1_loc (loc, NOP_EXPR, type, tem); 2012 2013 default: 2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) 2015 return fold_build1_loc (loc, NOP_EXPR, type, arg); 2016 gcc_unreachable (); 2017 } 2018 fold_convert_exit: 2019 protected_set_expr_location_unshare (tem, loc); 2020 return tem; 2021 } 2022 2023 /* Return false if expr can be assumed not to be an lvalue, true 2024 otherwise. */ 2025 2026 static bool 2027 maybe_lvalue_p (const_tree x) 2028 { 2029 /* We only need to wrap lvalue tree codes. */ 2030 switch (TREE_CODE (x)) 2031 { 2032 case VAR_DECL: 2033 case PARM_DECL: 2034 case RESULT_DECL: 2035 case LABEL_DECL: 2036 case FUNCTION_DECL: 2037 case SSA_NAME: 2038 2039 case COMPONENT_REF: 2040 case MEM_REF: 2041 case INDIRECT_REF: 2042 case ARRAY_REF: 2043 case ARRAY_RANGE_REF: 2044 case BIT_FIELD_REF: 2045 case OBJ_TYPE_REF: 2046 2047 case REALPART_EXPR: 2048 case IMAGPART_EXPR: 2049 case PREINCREMENT_EXPR: 2050 case PREDECREMENT_EXPR: 2051 case SAVE_EXPR: 2052 case TRY_CATCH_EXPR: 2053 case WITH_CLEANUP_EXPR: 2054 case COMPOUND_EXPR: 2055 case MODIFY_EXPR: 2056 case TARGET_EXPR: 2057 case COND_EXPR: 2058 case BIND_EXPR: 2059 break; 2060 2061 default: 2062 /* Assume the worst for front-end tree codes. */ 2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES) 2064 break; 2065 return false; 2066 } 2067 2068 return true; 2069 } 2070 2071 /* Return an expr equal to X but certainly not valid as an lvalue. */ 2072 2073 tree 2074 non_lvalue_loc (location_t loc, tree x) 2075 { 2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to 2077 us. */ 2078 if (in_gimple_form) 2079 return x; 2080 2081 if (! maybe_lvalue_p (x)) 2082 return x; 2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x); 2084 } 2085 2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C. 2087 Zero means allow extended lvalues. */ 2088 2089 int pedantic_lvalues; 2090 2091 /* When pedantic, return an expr equal to X but certainly not valid as a 2092 pedantic lvalue. Otherwise, return X. */ 2093 2094 static tree 2095 pedantic_non_lvalue_loc (location_t loc, tree x) 2096 { 2097 if (pedantic_lvalues) 2098 return non_lvalue_loc (loc, x); 2099 2100 return protected_set_expr_location_unshare (x, loc); 2101 } 2102 2103 /* Given a tree comparison code, return the code that is the logical inverse. 2104 It is generally not safe to do this for floating-point comparisons, except 2105 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */ 2106 2107 enum tree_code 2108 invert_tree_comparison (enum tree_code code, bool honor_nans) 2109 { 2110 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR) 2111 return ERROR_MARK; 2112 2113 switch (code) 2114 { 2115 case EQ_EXPR: 2116 return NE_EXPR; 2117 case NE_EXPR: 2118 return EQ_EXPR; 2119 case GT_EXPR: 2120 return honor_nans ? UNLE_EXPR : LE_EXPR; 2121 case GE_EXPR: 2122 return honor_nans ? UNLT_EXPR : LT_EXPR; 2123 case LT_EXPR: 2124 return honor_nans ? UNGE_EXPR : GE_EXPR; 2125 case LE_EXPR: 2126 return honor_nans ? UNGT_EXPR : GT_EXPR; 2127 case LTGT_EXPR: 2128 return UNEQ_EXPR; 2129 case UNEQ_EXPR: 2130 return LTGT_EXPR; 2131 case UNGT_EXPR: 2132 return LE_EXPR; 2133 case UNGE_EXPR: 2134 return LT_EXPR; 2135 case UNLT_EXPR: 2136 return GE_EXPR; 2137 case UNLE_EXPR: 2138 return GT_EXPR; 2139 case ORDERED_EXPR: 2140 return UNORDERED_EXPR; 2141 case UNORDERED_EXPR: 2142 return ORDERED_EXPR; 2143 default: 2144 gcc_unreachable (); 2145 } 2146 } 2147 2148 /* Similar, but return the comparison that results if the operands are 2149 swapped. This is safe for floating-point. */ 2150 2151 enum tree_code 2152 swap_tree_comparison (enum tree_code code) 2153 { 2154 switch (code) 2155 { 2156 case EQ_EXPR: 2157 case NE_EXPR: 2158 case ORDERED_EXPR: 2159 case UNORDERED_EXPR: 2160 case LTGT_EXPR: 2161 case UNEQ_EXPR: 2162 return code; 2163 case GT_EXPR: 2164 return LT_EXPR; 2165 case GE_EXPR: 2166 return LE_EXPR; 2167 case LT_EXPR: 2168 return GT_EXPR; 2169 case LE_EXPR: 2170 return GE_EXPR; 2171 case UNGT_EXPR: 2172 return UNLT_EXPR; 2173 case UNGE_EXPR: 2174 return UNLE_EXPR; 2175 case UNLT_EXPR: 2176 return UNGT_EXPR; 2177 case UNLE_EXPR: 2178 return UNGE_EXPR; 2179 default: 2180 gcc_unreachable (); 2181 } 2182 } 2183 2184 2185 /* Convert a comparison tree code from an enum tree_code representation 2186 into a compcode bit-based encoding. This function is the inverse of 2187 compcode_to_comparison. */ 2188 2189 static enum comparison_code 2190 comparison_to_compcode (enum tree_code code) 2191 { 2192 switch (code) 2193 { 2194 case LT_EXPR: 2195 return COMPCODE_LT; 2196 case EQ_EXPR: 2197 return COMPCODE_EQ; 2198 case LE_EXPR: 2199 return COMPCODE_LE; 2200 case GT_EXPR: 2201 return COMPCODE_GT; 2202 case NE_EXPR: 2203 return COMPCODE_NE; 2204 case GE_EXPR: 2205 return COMPCODE_GE; 2206 case ORDERED_EXPR: 2207 return COMPCODE_ORD; 2208 case UNORDERED_EXPR: 2209 return COMPCODE_UNORD; 2210 case UNLT_EXPR: 2211 return COMPCODE_UNLT; 2212 case UNEQ_EXPR: 2213 return COMPCODE_UNEQ; 2214 case UNLE_EXPR: 2215 return COMPCODE_UNLE; 2216 case UNGT_EXPR: 2217 return COMPCODE_UNGT; 2218 case LTGT_EXPR: 2219 return COMPCODE_LTGT; 2220 case UNGE_EXPR: 2221 return COMPCODE_UNGE; 2222 default: 2223 gcc_unreachable (); 2224 } 2225 } 2226 2227 /* Convert a compcode bit-based encoding of a comparison operator back 2228 to GCC's enum tree_code representation. This function is the 2229 inverse of comparison_to_compcode. */ 2230 2231 static enum tree_code 2232 compcode_to_comparison (enum comparison_code code) 2233 { 2234 switch (code) 2235 { 2236 case COMPCODE_LT: 2237 return LT_EXPR; 2238 case COMPCODE_EQ: 2239 return EQ_EXPR; 2240 case COMPCODE_LE: 2241 return LE_EXPR; 2242 case COMPCODE_GT: 2243 return GT_EXPR; 2244 case COMPCODE_NE: 2245 return NE_EXPR; 2246 case COMPCODE_GE: 2247 return GE_EXPR; 2248 case COMPCODE_ORD: 2249 return ORDERED_EXPR; 2250 case COMPCODE_UNORD: 2251 return UNORDERED_EXPR; 2252 case COMPCODE_UNLT: 2253 return UNLT_EXPR; 2254 case COMPCODE_UNEQ: 2255 return UNEQ_EXPR; 2256 case COMPCODE_UNLE: 2257 return UNLE_EXPR; 2258 case COMPCODE_UNGT: 2259 return UNGT_EXPR; 2260 case COMPCODE_LTGT: 2261 return LTGT_EXPR; 2262 case COMPCODE_UNGE: 2263 return UNGE_EXPR; 2264 default: 2265 gcc_unreachable (); 2266 } 2267 } 2268 2269 /* Return a tree for the comparison which is the combination of 2270 doing the AND or OR (depending on CODE) of the two operations LCODE 2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account 2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE 2273 if this makes the transformation invalid. */ 2274 2275 tree 2276 combine_comparisons (location_t loc, 2277 enum tree_code code, enum tree_code lcode, 2278 enum tree_code rcode, tree truth_type, 2279 tree ll_arg, tree lr_arg) 2280 { 2281 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg))); 2282 enum comparison_code lcompcode = comparison_to_compcode (lcode); 2283 enum comparison_code rcompcode = comparison_to_compcode (rcode); 2284 int compcode; 2285 2286 switch (code) 2287 { 2288 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR: 2289 compcode = lcompcode & rcompcode; 2290 break; 2291 2292 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR: 2293 compcode = lcompcode | rcompcode; 2294 break; 2295 2296 default: 2297 return NULL_TREE; 2298 } 2299 2300 if (!honor_nans) 2301 { 2302 /* Eliminate unordered comparisons, as well as LTGT and ORD 2303 which are not used unless the mode has NaNs. */ 2304 compcode &= ~COMPCODE_UNORD; 2305 if (compcode == COMPCODE_LTGT) 2306 compcode = COMPCODE_NE; 2307 else if (compcode == COMPCODE_ORD) 2308 compcode = COMPCODE_TRUE; 2309 } 2310 else if (flag_trapping_math) 2311 { 2312 /* Check that the original operation and the optimized ones will trap 2313 under the same condition. */ 2314 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0 2315 && (lcompcode != COMPCODE_EQ) 2316 && (lcompcode != COMPCODE_ORD); 2317 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0 2318 && (rcompcode != COMPCODE_EQ) 2319 && (rcompcode != COMPCODE_ORD); 2320 bool trap = (compcode & COMPCODE_UNORD) == 0 2321 && (compcode != COMPCODE_EQ) 2322 && (compcode != COMPCODE_ORD); 2323 2324 /* In a short-circuited boolean expression the LHS might be 2325 such that the RHS, if evaluated, will never trap. For 2326 example, in ORD (x, y) && (x < y), we evaluate the RHS only 2327 if neither x nor y is NaN. (This is a mixed blessing: for 2328 example, the expression above will never trap, hence 2329 optimizing it to x < y would be invalid). */ 2330 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD)) 2331 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD))) 2332 rtrap = false; 2333 2334 /* If the comparison was short-circuited, and only the RHS 2335 trapped, we may now generate a spurious trap. */ 2336 if (rtrap && !ltrap 2337 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 2338 return NULL_TREE; 2339 2340 /* If we changed the conditions that cause a trap, we lose. */ 2341 if ((ltrap || rtrap) != trap) 2342 return NULL_TREE; 2343 } 2344 2345 if (compcode == COMPCODE_TRUE) 2346 return constant_boolean_node (true, truth_type); 2347 else if (compcode == COMPCODE_FALSE) 2348 return constant_boolean_node (false, truth_type); 2349 else 2350 { 2351 enum tree_code tcode; 2352 2353 tcode = compcode_to_comparison ((enum comparison_code) compcode); 2354 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); 2355 } 2356 } 2357 2358 /* Return nonzero if two operands (typically of the same tree node) 2359 are necessarily equal. If either argument has side-effects this 2360 function returns zero. FLAGS modifies behavior as follows: 2361 2362 If OEP_ONLY_CONST is set, only return nonzero for constants. 2363 This function tests whether the operands are indistinguishable; 2364 it does not test whether they are equal using C's == operation. 2365 The distinction is important for IEEE floating point, because 2366 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and 2367 (2) two NaNs may be indistinguishable, but NaN!=NaN. 2368 2369 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself 2370 even though it may hold multiple values during a function. 2371 This is because a GCC tree node guarantees that nothing else is 2372 executed between the evaluation of its "operands" (which may often 2373 be evaluated in arbitrary order). Hence if the operands themselves 2374 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the 2375 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST 2376 unset means assuming isochronic (or instantaneous) tree equivalence. 2377 Unless comparing arbitrary expression trees, such as from different 2378 statements, this flag can usually be left unset. 2379 2380 If OEP_PURE_SAME is set, then pure functions with identical arguments 2381 are considered the same. It is used when the caller has other ways 2382 to ensure that global memory is unchanged in between. */ 2383 2384 int 2385 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) 2386 { 2387 /* If either is ERROR_MARK, they aren't equal. */ 2388 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK 2389 || TREE_TYPE (arg0) == error_mark_node 2390 || TREE_TYPE (arg1) == error_mark_node) 2391 return 0; 2392 2393 /* Similar, if either does not have a type (like a released SSA name), 2394 they aren't equal. */ 2395 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) 2396 return 0; 2397 2398 /* Check equality of integer constants before bailing out due to 2399 precision differences. */ 2400 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 2401 return tree_int_cst_equal (arg0, arg1); 2402 2403 /* If both types don't have the same signedness, then we can't consider 2404 them equal. We must check this before the STRIP_NOPS calls 2405 because they may change the signedness of the arguments. As pointers 2406 strictly don't have a signedness, require either two pointers or 2407 two non-pointers as well. */ 2408 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)) 2409 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1))) 2410 return 0; 2411 2412 /* We cannot consider pointers to different address space equal. */ 2413 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1)) 2414 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) 2415 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) 2416 return 0; 2417 2418 /* If both types don't have the same precision, then it is not safe 2419 to strip NOPs. */ 2420 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) 2421 return 0; 2422 2423 STRIP_NOPS (arg0); 2424 STRIP_NOPS (arg1); 2425 2426 /* In case both args are comparisons but with different comparison 2427 code, try to swap the comparison operands of one arg to produce 2428 a match and compare that variant. */ 2429 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2430 && COMPARISON_CLASS_P (arg0) 2431 && COMPARISON_CLASS_P (arg1)) 2432 { 2433 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1)); 2434 2435 if (TREE_CODE (arg0) == swap_code) 2436 return operand_equal_p (TREE_OPERAND (arg0, 0), 2437 TREE_OPERAND (arg1, 1), flags) 2438 && operand_equal_p (TREE_OPERAND (arg0, 1), 2439 TREE_OPERAND (arg1, 0), flags); 2440 } 2441 2442 if (TREE_CODE (arg0) != TREE_CODE (arg1) 2443 /* This is needed for conversions and for COMPONENT_REF. 2444 Might as well play it safe and always test this. */ 2445 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK 2446 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK 2447 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))) 2448 return 0; 2449 2450 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal. 2451 We don't care about side effects in that case because the SAVE_EXPR 2452 takes care of that for us. In all other cases, two expressions are 2453 equal if they have no side effects. If we have two identical 2454 expressions with side effects that should be treated the same due 2455 to the only side effects being identical SAVE_EXPR's, that will 2456 be detected in the recursive calls below. 2457 If we are taking an invariant address of two identical objects 2458 they are necessarily equal as well. */ 2459 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST) 2460 && (TREE_CODE (arg0) == SAVE_EXPR 2461 || (flags & OEP_CONSTANT_ADDRESS_OF) 2462 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1)))) 2463 return 1; 2464 2465 /* Next handle constant cases, those for which we can return 1 even 2466 if ONLY_CONST is set. */ 2467 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)) 2468 switch (TREE_CODE (arg0)) 2469 { 2470 case INTEGER_CST: 2471 return tree_int_cst_equal (arg0, arg1); 2472 2473 case FIXED_CST: 2474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0), 2475 TREE_FIXED_CST (arg1)); 2476 2477 case REAL_CST: 2478 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), 2479 TREE_REAL_CST (arg1))) 2480 return 1; 2481 2482 2483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))) 2484 { 2485 /* If we do not distinguish between signed and unsigned zero, 2486 consider them equal. */ 2487 if (real_zerop (arg0) && real_zerop (arg1)) 2488 return 1; 2489 } 2490 return 0; 2491 2492 case VECTOR_CST: 2493 { 2494 tree v1, v2; 2495 2496 v1 = TREE_VECTOR_CST_ELTS (arg0); 2497 v2 = TREE_VECTOR_CST_ELTS (arg1); 2498 while (v1 && v2) 2499 { 2500 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2), 2501 flags)) 2502 return 0; 2503 v1 = TREE_CHAIN (v1); 2504 v2 = TREE_CHAIN (v2); 2505 } 2506 2507 return v1 == v2; 2508 } 2509 2510 case COMPLEX_CST: 2511 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1), 2512 flags) 2513 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1), 2514 flags)); 2515 2516 case STRING_CST: 2517 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1) 2518 && ! memcmp (TREE_STRING_POINTER (arg0), 2519 TREE_STRING_POINTER (arg1), 2520 TREE_STRING_LENGTH (arg0))); 2521 2522 case ADDR_EXPR: 2523 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 2524 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1) 2525 ? OEP_CONSTANT_ADDRESS_OF : 0); 2526 default: 2527 break; 2528 } 2529 2530 if (flags & OEP_ONLY_CONST) 2531 return 0; 2532 2533 /* Define macros to test an operand from arg0 and arg1 for equality and a 2534 variant that allows null and views null as being different from any 2535 non-null value. In the latter case, if either is null, the both 2536 must be; otherwise, do the normal comparison. */ 2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \ 2538 TREE_OPERAND (arg1, N), flags) 2539 2540 #define OP_SAME_WITH_NULL(N) \ 2541 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \ 2542 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N)) 2543 2544 switch (TREE_CODE_CLASS (TREE_CODE (arg0))) 2545 { 2546 case tcc_unary: 2547 /* Two conversions are equal only if signedness and modes match. */ 2548 switch (TREE_CODE (arg0)) 2549 { 2550 CASE_CONVERT: 2551 case FIX_TRUNC_EXPR: 2552 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) 2553 != TYPE_UNSIGNED (TREE_TYPE (arg1))) 2554 return 0; 2555 break; 2556 default: 2557 break; 2558 } 2559 2560 return OP_SAME (0); 2561 2562 2563 case tcc_comparison: 2564 case tcc_binary: 2565 if (OP_SAME (0) && OP_SAME (1)) 2566 return 1; 2567 2568 /* For commutative ops, allow the other order. */ 2569 return (commutative_tree_code (TREE_CODE (arg0)) 2570 && operand_equal_p (TREE_OPERAND (arg0, 0), 2571 TREE_OPERAND (arg1, 1), flags) 2572 && operand_equal_p (TREE_OPERAND (arg0, 1), 2573 TREE_OPERAND (arg1, 0), flags)); 2574 2575 case tcc_reference: 2576 /* If either of the pointer (or reference) expressions we are 2577 dereferencing contain a side effect, these cannot be equal. */ 2578 if (TREE_SIDE_EFFECTS (arg0) 2579 || TREE_SIDE_EFFECTS (arg1)) 2580 return 0; 2581 2582 switch (TREE_CODE (arg0)) 2583 { 2584 case INDIRECT_REF: 2585 case REALPART_EXPR: 2586 case IMAGPART_EXPR: 2587 return OP_SAME (0); 2588 2589 case MEM_REF: 2590 /* Require equal access sizes, and similar pointer types. 2591 We can have incomplete types for array references of 2592 variable-sized arrays from the Fortran frontent 2593 though. */ 2594 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1)) 2595 || (TYPE_SIZE (TREE_TYPE (arg0)) 2596 && TYPE_SIZE (TREE_TYPE (arg1)) 2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), 2598 TYPE_SIZE (TREE_TYPE (arg1)), flags))) 2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1))) 2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1)))) 2601 && OP_SAME (0) && OP_SAME (1)); 2602 2603 case ARRAY_REF: 2604 case ARRAY_RANGE_REF: 2605 /* Operands 2 and 3 may be null. 2606 Compare the array index by value if it is constant first as we 2607 may have different types but same value here. */ 2608 return (OP_SAME (0) 2609 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1), 2610 TREE_OPERAND (arg1, 1)) 2611 || OP_SAME (1)) 2612 && OP_SAME_WITH_NULL (2) 2613 && OP_SAME_WITH_NULL (3)); 2614 2615 case COMPONENT_REF: 2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0 2617 may be NULL when we're called to compare MEM_EXPRs. */ 2618 return OP_SAME_WITH_NULL (0) 2619 && OP_SAME (1) 2620 && OP_SAME_WITH_NULL (2); 2621 2622 case BIT_FIELD_REF: 2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 2624 2625 default: 2626 return 0; 2627 } 2628 2629 case tcc_expression: 2630 switch (TREE_CODE (arg0)) 2631 { 2632 case ADDR_EXPR: 2633 case TRUTH_NOT_EXPR: 2634 return OP_SAME (0); 2635 2636 case TRUTH_ANDIF_EXPR: 2637 case TRUTH_ORIF_EXPR: 2638 return OP_SAME (0) && OP_SAME (1); 2639 2640 case FMA_EXPR: 2641 case WIDEN_MULT_PLUS_EXPR: 2642 case WIDEN_MULT_MINUS_EXPR: 2643 if (!OP_SAME (2)) 2644 return 0; 2645 /* The multiplcation operands are commutative. */ 2646 /* FALLTHRU */ 2647 2648 case TRUTH_AND_EXPR: 2649 case TRUTH_OR_EXPR: 2650 case TRUTH_XOR_EXPR: 2651 if (OP_SAME (0) && OP_SAME (1)) 2652 return 1; 2653 2654 /* Otherwise take into account this is a commutative operation. */ 2655 return (operand_equal_p (TREE_OPERAND (arg0, 0), 2656 TREE_OPERAND (arg1, 1), flags) 2657 && operand_equal_p (TREE_OPERAND (arg0, 1), 2658 TREE_OPERAND (arg1, 0), flags)); 2659 2660 case COND_EXPR: 2661 case VEC_COND_EXPR: 2662 case DOT_PROD_EXPR: 2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); 2664 2665 default: 2666 return 0; 2667 } 2668 2669 case tcc_vl_exp: 2670 switch (TREE_CODE (arg0)) 2671 { 2672 case CALL_EXPR: 2673 /* If the CALL_EXPRs call different functions, then they 2674 clearly can not be equal. */ 2675 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), 2676 flags)) 2677 return 0; 2678 2679 { 2680 unsigned int cef = call_expr_flags (arg0); 2681 if (flags & OEP_PURE_SAME) 2682 cef &= ECF_CONST | ECF_PURE; 2683 else 2684 cef &= ECF_CONST; 2685 if (!cef) 2686 return 0; 2687 } 2688 2689 /* Now see if all the arguments are the same. */ 2690 { 2691 const_call_expr_arg_iterator iter0, iter1; 2692 const_tree a0, a1; 2693 for (a0 = first_const_call_expr_arg (arg0, &iter0), 2694 a1 = first_const_call_expr_arg (arg1, &iter1); 2695 a0 && a1; 2696 a0 = next_const_call_expr_arg (&iter0), 2697 a1 = next_const_call_expr_arg (&iter1)) 2698 if (! operand_equal_p (a0, a1, flags)) 2699 return 0; 2700 2701 /* If we get here and both argument lists are exhausted 2702 then the CALL_EXPRs are equal. */ 2703 return ! (a0 || a1); 2704 } 2705 default: 2706 return 0; 2707 } 2708 2709 case tcc_declaration: 2710 /* Consider __builtin_sqrt equal to sqrt. */ 2711 return (TREE_CODE (arg0) == FUNCTION_DECL 2712 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1) 2713 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1) 2714 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1)); 2715 2716 default: 2717 return 0; 2718 } 2719 2720 #undef OP_SAME 2721 #undef OP_SAME_WITH_NULL 2722 } 2723 2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by 2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER. 2726 2727 When in doubt, return 0. */ 2728 2729 static int 2730 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) 2731 { 2732 int unsignedp1, unsignedpo; 2733 tree primarg0, primarg1, primother; 2734 unsigned int correct_width; 2735 2736 if (operand_equal_p (arg0, arg1, 0)) 2737 return 1; 2738 2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1))) 2741 return 0; 2742 2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1 2744 and see if the inner values are the same. This removes any 2745 signedness comparison, which doesn't matter here. */ 2746 primarg0 = arg0, primarg1 = arg1; 2747 STRIP_NOPS (primarg0); 2748 STRIP_NOPS (primarg1); 2749 if (operand_equal_p (primarg0, primarg1, 0)) 2750 return 1; 2751 2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the 2753 actual comparison operand, ARG0. 2754 2755 First throw away any conversions to wider types 2756 already present in the operands. */ 2757 2758 primarg1 = get_narrower (arg1, &unsignedp1); 2759 primother = get_narrower (other, &unsignedpo); 2760 2761 correct_width = TYPE_PRECISION (TREE_TYPE (arg1)); 2762 if (unsignedp1 == unsignedpo 2763 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width 2764 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width) 2765 { 2766 tree type = TREE_TYPE (arg0); 2767 2768 /* Make sure shorter operand is extended the right way 2769 to match the longer operand. */ 2770 primarg1 = fold_convert (signed_or_unsigned_type_for 2771 (unsignedp1, TREE_TYPE (primarg1)), primarg1); 2772 2773 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) 2774 return 1; 2775 } 2776 2777 return 0; 2778 } 2779 2780 /* See if ARG is an expression that is either a comparison or is performing 2781 arithmetic on comparisons. The comparisons must only be comparing 2782 two different values, which will be stored in *CVAL1 and *CVAL2; if 2783 they are nonzero it means that some operands have already been found. 2784 No variables may be used anywhere else in the expression except in the 2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around 2786 the expression and save_expr needs to be called with CVAL1 and CVAL2. 2787 2788 If this is true, return 1. Otherwise, return zero. */ 2789 2790 static int 2791 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) 2792 { 2793 enum tree_code code = TREE_CODE (arg); 2794 enum tree_code_class tclass = TREE_CODE_CLASS (code); 2795 2796 /* We can handle some of the tcc_expression cases here. */ 2797 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 2798 tclass = tcc_unary; 2799 else if (tclass == tcc_expression 2800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR 2801 || code == COMPOUND_EXPR)) 2802 tclass = tcc_binary; 2803 2804 else if (tclass == tcc_expression && code == SAVE_EXPR 2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0))) 2806 { 2807 /* If we've already found a CVAL1 or CVAL2, this expression is 2808 two complex to handle. */ 2809 if (*cval1 || *cval2) 2810 return 0; 2811 2812 tclass = tcc_unary; 2813 *save_p = 1; 2814 } 2815 2816 switch (tclass) 2817 { 2818 case tcc_unary: 2819 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); 2820 2821 case tcc_binary: 2822 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) 2823 && twoval_comparison_p (TREE_OPERAND (arg, 1), 2824 cval1, cval2, save_p)); 2825 2826 case tcc_constant: 2827 return 1; 2828 2829 case tcc_expression: 2830 if (code == COND_EXPR) 2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0), 2832 cval1, cval2, save_p) 2833 && twoval_comparison_p (TREE_OPERAND (arg, 1), 2834 cval1, cval2, save_p) 2835 && twoval_comparison_p (TREE_OPERAND (arg, 2), 2836 cval1, cval2, save_p)); 2837 return 0; 2838 2839 case tcc_comparison: 2840 /* First see if we can handle the first operand, then the second. For 2841 the second operand, we know *CVAL1 can't be zero. It must be that 2842 one side of the comparison is each of the values; test for the 2843 case where this isn't true by failing if the two operands 2844 are the same. */ 2845 2846 if (operand_equal_p (TREE_OPERAND (arg, 0), 2847 TREE_OPERAND (arg, 1), 0)) 2848 return 0; 2849 2850 if (*cval1 == 0) 2851 *cval1 = TREE_OPERAND (arg, 0); 2852 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) 2853 ; 2854 else if (*cval2 == 0) 2855 *cval2 = TREE_OPERAND (arg, 0); 2856 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) 2857 ; 2858 else 2859 return 0; 2860 2861 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) 2862 ; 2863 else if (*cval2 == 0) 2864 *cval2 = TREE_OPERAND (arg, 1); 2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) 2866 ; 2867 else 2868 return 0; 2869 2870 return 1; 2871 2872 default: 2873 return 0; 2874 } 2875 } 2876 2877 /* ARG is a tree that is known to contain just arithmetic operations and 2878 comparisons. Evaluate the operations in the tree substituting NEW0 for 2879 any occurrence of OLD0 as an operand of a comparison and likewise for 2880 NEW1 and OLD1. */ 2881 2882 static tree 2883 eval_subst (location_t loc, tree arg, tree old0, tree new0, 2884 tree old1, tree new1) 2885 { 2886 tree type = TREE_TYPE (arg); 2887 enum tree_code code = TREE_CODE (arg); 2888 enum tree_code_class tclass = TREE_CODE_CLASS (code); 2889 2890 /* We can handle some of the tcc_expression cases here. */ 2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR) 2892 tclass = tcc_unary; 2893 else if (tclass == tcc_expression 2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) 2895 tclass = tcc_binary; 2896 2897 switch (tclass) 2898 { 2899 case tcc_unary: 2900 return fold_build1_loc (loc, code, type, 2901 eval_subst (loc, TREE_OPERAND (arg, 0), 2902 old0, new0, old1, new1)); 2903 2904 case tcc_binary: 2905 return fold_build2_loc (loc, code, type, 2906 eval_subst (loc, TREE_OPERAND (arg, 0), 2907 old0, new0, old1, new1), 2908 eval_subst (loc, TREE_OPERAND (arg, 1), 2909 old0, new0, old1, new1)); 2910 2911 case tcc_expression: 2912 switch (code) 2913 { 2914 case SAVE_EXPR: 2915 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, 2916 old1, new1); 2917 2918 case COMPOUND_EXPR: 2919 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, 2920 old1, new1); 2921 2922 case COND_EXPR: 2923 return fold_build3_loc (loc, code, type, 2924 eval_subst (loc, TREE_OPERAND (arg, 0), 2925 old0, new0, old1, new1), 2926 eval_subst (loc, TREE_OPERAND (arg, 1), 2927 old0, new0, old1, new1), 2928 eval_subst (loc, TREE_OPERAND (arg, 2), 2929 old0, new0, old1, new1)); 2930 default: 2931 break; 2932 } 2933 /* Fall through - ??? */ 2934 2935 case tcc_comparison: 2936 { 2937 tree arg0 = TREE_OPERAND (arg, 0); 2938 tree arg1 = TREE_OPERAND (arg, 1); 2939 2940 /* We need to check both for exact equality and tree equality. The 2941 former will be true if the operand has a side-effect. In that 2942 case, we know the operand occurred exactly once. */ 2943 2944 if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) 2945 arg0 = new0; 2946 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) 2947 arg0 = new1; 2948 2949 if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) 2950 arg1 = new0; 2951 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) 2952 arg1 = new1; 2953 2954 return fold_build2_loc (loc, code, type, arg0, arg1); 2955 } 2956 2957 default: 2958 return arg; 2959 } 2960 } 2961 2962 /* Return a tree for the case when the result of an expression is RESULT 2963 converted to TYPE and OMITTED was previously an operand of the expression 2964 but is now not needed (e.g., we folded OMITTED * 0). 2965 2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do 2967 the conversion of RESULT to TYPE. */ 2968 2969 tree 2970 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted) 2971 { 2972 tree t = fold_convert_loc (loc, type, result); 2973 2974 /* If the resulting operand is an empty statement, just return the omitted 2975 statement casted to void. */ 2976 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) 2977 return build1_loc (loc, NOP_EXPR, void_type_node, 2978 fold_ignored_result (omitted)); 2979 2980 if (TREE_SIDE_EFFECTS (omitted)) 2981 return build2_loc (loc, COMPOUND_EXPR, type, 2982 fold_ignored_result (omitted), t); 2983 2984 return non_lvalue_loc (loc, t); 2985 } 2986 2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ 2988 2989 static tree 2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result, 2991 tree omitted) 2992 { 2993 tree t = fold_convert_loc (loc, type, result); 2994 2995 /* If the resulting operand is an empty statement, just return the omitted 2996 statement casted to void. */ 2997 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) 2998 return build1_loc (loc, NOP_EXPR, void_type_node, 2999 fold_ignored_result (omitted)); 3000 3001 if (TREE_SIDE_EFFECTS (omitted)) 3002 return build2_loc (loc, COMPOUND_EXPR, type, 3003 fold_ignored_result (omitted), t); 3004 3005 return pedantic_non_lvalue_loc (loc, t); 3006 } 3007 3008 /* Return a tree for the case when the result of an expression is RESULT 3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands 3010 of the expression but are now not needed. 3011 3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated. 3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is 3014 evaluated before OMITTED2. Otherwise, if neither has side effects, 3015 just do the conversion of RESULT to TYPE. */ 3016 3017 tree 3018 omit_two_operands_loc (location_t loc, tree type, tree result, 3019 tree omitted1, tree omitted2) 3020 { 3021 tree t = fold_convert_loc (loc, type, result); 3022 3023 if (TREE_SIDE_EFFECTS (omitted2)) 3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t); 3025 if (TREE_SIDE_EFFECTS (omitted1)) 3026 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t); 3027 3028 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t; 3029 } 3030 3031 3032 /* Return a simplified tree node for the truth-negation of ARG. This 3033 never alters ARG itself. We assume that ARG is an operation that 3034 returns a truth value (0 or 1). 3035 3036 FIXME: one would think we would fold the result, but it causes 3037 problems with the dominator optimizer. */ 3038 3039 tree 3040 fold_truth_not_expr (location_t loc, tree arg) 3041 { 3042 tree type = TREE_TYPE (arg); 3043 enum tree_code code = TREE_CODE (arg); 3044 location_t loc1, loc2; 3045 3046 /* If this is a comparison, we can simply invert it, except for 3047 floating-point non-equality comparisons, in which case we just 3048 enclose a TRUTH_NOT_EXPR around what we have. */ 3049 3050 if (TREE_CODE_CLASS (code) == tcc_comparison) 3051 { 3052 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0)); 3053 if (FLOAT_TYPE_P (op_type) 3054 && flag_trapping_math 3055 && code != ORDERED_EXPR && code != UNORDERED_EXPR 3056 && code != NE_EXPR && code != EQ_EXPR) 3057 return NULL_TREE; 3058 3059 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type))); 3060 if (code == ERROR_MARK) 3061 return NULL_TREE; 3062 3063 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0), 3064 TREE_OPERAND (arg, 1)); 3065 } 3066 3067 switch (code) 3068 { 3069 case INTEGER_CST: 3070 return constant_boolean_node (integer_zerop (arg), type); 3071 3072 case TRUTH_AND_EXPR: 3073 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3074 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3075 return build2_loc (loc, TRUTH_OR_EXPR, type, 3076 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3077 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3078 3079 case TRUTH_OR_EXPR: 3080 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3081 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3082 return build2_loc (loc, TRUTH_AND_EXPR, type, 3083 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3084 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3085 3086 case TRUTH_XOR_EXPR: 3087 /* Here we can invert either operand. We invert the first operand 3088 unless the second operand is a TRUTH_NOT_EXPR in which case our 3089 result is the XOR of the first operand with the inside of the 3090 negation of the second operand. */ 3091 3092 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) 3093 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), 3094 TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); 3095 else 3096 return build2_loc (loc, TRUTH_XOR_EXPR, type, 3097 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), 3098 TREE_OPERAND (arg, 1)); 3099 3100 case TRUTH_ANDIF_EXPR: 3101 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3102 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3103 return build2_loc (loc, TRUTH_ORIF_EXPR, type, 3104 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3105 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3106 3107 case TRUTH_ORIF_EXPR: 3108 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3109 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3110 return build2_loc (loc, TRUTH_ANDIF_EXPR, type, 3111 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), 3112 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); 3113 3114 case TRUTH_NOT_EXPR: 3115 return TREE_OPERAND (arg, 0); 3116 3117 case COND_EXPR: 3118 { 3119 tree arg1 = TREE_OPERAND (arg, 1); 3120 tree arg2 = TREE_OPERAND (arg, 2); 3121 3122 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3123 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc); 3124 3125 /* A COND_EXPR may have a throw as one operand, which 3126 then has void type. Just leave void operands 3127 as they are. */ 3128 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0), 3129 VOID_TYPE_P (TREE_TYPE (arg1)) 3130 ? arg1 : invert_truthvalue_loc (loc1, arg1), 3131 VOID_TYPE_P (TREE_TYPE (arg2)) 3132 ? arg2 : invert_truthvalue_loc (loc2, arg2)); 3133 } 3134 3135 case COMPOUND_EXPR: 3136 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc); 3137 return build2_loc (loc, COMPOUND_EXPR, type, 3138 TREE_OPERAND (arg, 0), 3139 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); 3140 3141 case NON_LVALUE_EXPR: 3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3143 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); 3144 3145 CASE_CONVERT: 3146 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) 3147 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); 3148 3149 /* ... fall through ... */ 3150 3151 case FLOAT_EXPR: 3152 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3153 return build1_loc (loc, TREE_CODE (arg), type, 3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3155 3156 case BIT_AND_EXPR: 3157 if (!integer_onep (TREE_OPERAND (arg, 1))) 3158 return NULL_TREE; 3159 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0)); 3160 3161 case SAVE_EXPR: 3162 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg); 3163 3164 case CLEANUP_POINT_EXPR: 3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc); 3166 return build1_loc (loc, CLEANUP_POINT_EXPR, type, 3167 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); 3168 3169 default: 3170 return NULL_TREE; 3171 } 3172 } 3173 3174 /* Return a simplified tree node for the truth-negation of ARG. This 3175 never alters ARG itself. We assume that ARG is an operation that 3176 returns a truth value (0 or 1). 3177 3178 FIXME: one would think we would fold the result, but it causes 3179 problems with the dominator optimizer. */ 3180 3181 tree 3182 invert_truthvalue_loc (location_t loc, tree arg) 3183 { 3184 tree tem; 3185 3186 if (TREE_CODE (arg) == ERROR_MARK) 3187 return arg; 3188 3189 tem = fold_truth_not_expr (loc, arg); 3190 if (!tem) 3191 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); 3192 3193 return tem; 3194 } 3195 3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both 3197 operands are another bit-wise operation with a common input. If so, 3198 distribute the bit operations to save an operation and possibly two if 3199 constants are involved. For example, convert 3200 (A | B) & (A | C) into A | (B & C) 3201 Further simplification will occur if B and C are constants. 3202 3203 If this optimization cannot be done, 0 will be returned. */ 3204 3205 static tree 3206 distribute_bit_expr (location_t loc, enum tree_code code, tree type, 3207 tree arg0, tree arg1) 3208 { 3209 tree common; 3210 tree left, right; 3211 3212 if (TREE_CODE (arg0) != TREE_CODE (arg1) 3213 || TREE_CODE (arg0) == code 3214 || (TREE_CODE (arg0) != BIT_AND_EXPR 3215 && TREE_CODE (arg0) != BIT_IOR_EXPR)) 3216 return 0; 3217 3218 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) 3219 { 3220 common = TREE_OPERAND (arg0, 0); 3221 left = TREE_OPERAND (arg0, 1); 3222 right = TREE_OPERAND (arg1, 1); 3223 } 3224 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) 3225 { 3226 common = TREE_OPERAND (arg0, 0); 3227 left = TREE_OPERAND (arg0, 1); 3228 right = TREE_OPERAND (arg1, 0); 3229 } 3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) 3231 { 3232 common = TREE_OPERAND (arg0, 1); 3233 left = TREE_OPERAND (arg0, 0); 3234 right = TREE_OPERAND (arg1, 1); 3235 } 3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) 3237 { 3238 common = TREE_OPERAND (arg0, 1); 3239 left = TREE_OPERAND (arg0, 0); 3240 right = TREE_OPERAND (arg1, 0); 3241 } 3242 else 3243 return 0; 3244 3245 common = fold_convert_loc (loc, type, common); 3246 left = fold_convert_loc (loc, type, left); 3247 right = fold_convert_loc (loc, type, right); 3248 return fold_build2_loc (loc, TREE_CODE (arg0), type, common, 3249 fold_build2_loc (loc, code, type, left, right)); 3250 } 3251 3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation 3253 with code CODE. This optimization is unsafe. */ 3254 static tree 3255 distribute_real_division (location_t loc, enum tree_code code, tree type, 3256 tree arg0, tree arg1) 3257 { 3258 bool mul0 = TREE_CODE (arg0) == MULT_EXPR; 3259 bool mul1 = TREE_CODE (arg1) == MULT_EXPR; 3260 3261 /* (A / C) +- (B / C) -> (A +- B) / C. */ 3262 if (mul0 == mul1 3263 && operand_equal_p (TREE_OPERAND (arg0, 1), 3264 TREE_OPERAND (arg1, 1), 0)) 3265 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type, 3266 fold_build2_loc (loc, code, type, 3267 TREE_OPERAND (arg0, 0), 3268 TREE_OPERAND (arg1, 0)), 3269 TREE_OPERAND (arg0, 1)); 3270 3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ 3272 if (operand_equal_p (TREE_OPERAND (arg0, 0), 3273 TREE_OPERAND (arg1, 0), 0) 3274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 3275 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 3276 { 3277 REAL_VALUE_TYPE r0, r1; 3278 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); 3279 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); 3280 if (!mul0) 3281 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); 3282 if (!mul1) 3283 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); 3284 real_arithmetic (&r0, code, &r0, &r1); 3285 return fold_build2_loc (loc, MULT_EXPR, type, 3286 TREE_OPERAND (arg0, 0), 3287 build_real (type, r0)); 3288 } 3289 3290 return NULL_TREE; 3291 } 3292 3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER 3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ 3295 3296 static tree 3297 make_bit_field_ref (location_t loc, tree inner, tree type, 3298 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp) 3299 { 3300 tree result, bftype; 3301 3302 if (bitpos == 0) 3303 { 3304 tree size = TYPE_SIZE (TREE_TYPE (inner)); 3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) 3306 || POINTER_TYPE_P (TREE_TYPE (inner))) 3307 && host_integerp (size, 0) 3308 && tree_low_cst (size, 0) == bitsize) 3309 return fold_convert_loc (loc, type, inner); 3310 } 3311 3312 bftype = type; 3313 if (TYPE_PRECISION (bftype) != bitsize 3314 || TYPE_UNSIGNED (bftype) == !unsignedp) 3315 bftype = build_nonstandard_integer_type (bitsize, 0); 3316 3317 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner, 3318 size_int (bitsize), bitsize_int (bitpos)); 3319 3320 if (bftype != type) 3321 result = fold_convert_loc (loc, type, result); 3322 3323 return result; 3324 } 3325 3326 /* Optimize a bit-field compare. 3327 3328 There are two cases: First is a compare against a constant and the 3329 second is a comparison of two items where the fields are at the same 3330 bit position relative to the start of a chunk (byte, halfword, word) 3331 large enough to contain it. In these cases we can avoid the shift 3332 implicit in bitfield extractions. 3333 3334 For constants, we emit a compare of the shifted constant with the 3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being 3336 compared. For two fields at the same position, we do the ANDs with the 3337 similar mask and compare the result of the ANDs. 3338 3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. 3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS 3341 are the left and right operands of the comparison, respectively. 3342 3343 If the optimization described above can be done, we return the resulting 3344 tree. Otherwise we return zero. */ 3345 3346 static tree 3347 optimize_bit_field_compare (location_t loc, enum tree_code code, 3348 tree compare_type, tree lhs, tree rhs) 3349 { 3350 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; 3351 tree type = TREE_TYPE (lhs); 3352 tree signed_type, unsigned_type; 3353 int const_p = TREE_CODE (rhs) == INTEGER_CST; 3354 enum machine_mode lmode, rmode, nmode; 3355 int lunsignedp, runsignedp; 3356 int lvolatilep = 0, rvolatilep = 0; 3357 tree linner, rinner = NULL_TREE; 3358 tree mask; 3359 tree offset; 3360 3361 /* Get all the information about the extractions being done. If the bit size 3362 if the same as the size of the underlying object, we aren't doing an 3363 extraction at all and so can do nothing. We also don't want to 3364 do anything if the inner expression is a PLACEHOLDER_EXPR since we 3365 then will no longer be able to replace it. */ 3366 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, 3367 &lunsignedp, &lvolatilep, false); 3368 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 3369 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR) 3370 return 0; 3371 3372 if (!const_p) 3373 { 3374 /* If this is not a constant, we can only do something if bit positions, 3375 sizes, and signedness are the same. */ 3376 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, 3377 &runsignedp, &rvolatilep, false); 3378 3379 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize 3380 || lunsignedp != runsignedp || offset != 0 3381 || TREE_CODE (rinner) == PLACEHOLDER_EXPR) 3382 return 0; 3383 } 3384 3385 /* See if we can find a mode to refer to this field. We should be able to, 3386 but fail if we can't. */ 3387 if (lvolatilep 3388 && GET_MODE_BITSIZE (lmode) > 0 3389 && flag_strict_volatile_bitfields > 0) 3390 nmode = lmode; 3391 else 3392 nmode = get_best_mode (lbitsize, lbitpos, 0, 0, 3393 const_p ? TYPE_ALIGN (TREE_TYPE (linner)) 3394 : MIN (TYPE_ALIGN (TREE_TYPE (linner)), 3395 TYPE_ALIGN (TREE_TYPE (rinner))), 3396 word_mode, lvolatilep || rvolatilep); 3397 if (nmode == VOIDmode) 3398 return 0; 3399 3400 /* Set signed and unsigned types of the precision of this mode for the 3401 shifts below. */ 3402 signed_type = lang_hooks.types.type_for_mode (nmode, 0); 3403 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1); 3404 3405 /* Compute the bit position and size for the new reference and our offset 3406 within it. If the new reference is the same size as the original, we 3407 won't optimize anything, so return zero. */ 3408 nbitsize = GET_MODE_BITSIZE (nmode); 3409 nbitpos = lbitpos & ~ (nbitsize - 1); 3410 lbitpos -= nbitpos; 3411 if (nbitsize == lbitsize) 3412 return 0; 3413 3414 if (BYTES_BIG_ENDIAN) 3415 lbitpos = nbitsize - lbitsize - lbitpos; 3416 3417 /* Make the mask to be used against the extracted field. */ 3418 mask = build_int_cst_type (unsigned_type, -1); 3419 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize)); 3420 mask = const_binop (RSHIFT_EXPR, mask, 3421 size_int (nbitsize - lbitsize - lbitpos)); 3422 3423 if (! const_p) 3424 /* If not comparing with constant, just rework the comparison 3425 and return. */ 3426 return fold_build2_loc (loc, code, compare_type, 3427 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3428 make_bit_field_ref (loc, linner, 3429 unsigned_type, 3430 nbitsize, nbitpos, 3431 1), 3432 mask), 3433 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3434 make_bit_field_ref (loc, rinner, 3435 unsigned_type, 3436 nbitsize, nbitpos, 3437 1), 3438 mask)); 3439 3440 /* Otherwise, we are handling the constant case. See if the constant is too 3441 big for the field. Warn and return a tree of for 0 (false) if so. We do 3442 this not only for its own sake, but to avoid having to test for this 3443 error case below. If we didn't, we might generate wrong code. 3444 3445 For unsigned fields, the constant shifted right by the field length should 3446 be all zero. For signed fields, the high-order bits should agree with 3447 the sign bit. */ 3448 3449 if (lunsignedp) 3450 { 3451 if (! integer_zerop (const_binop (RSHIFT_EXPR, 3452 fold_convert_loc (loc, 3453 unsigned_type, rhs), 3454 size_int (lbitsize)))) 3455 { 3456 warning (0, "comparison is always %d due to width of bit-field", 3457 code == NE_EXPR); 3458 return constant_boolean_node (code == NE_EXPR, compare_type); 3459 } 3460 } 3461 else 3462 { 3463 tree tem = const_binop (RSHIFT_EXPR, 3464 fold_convert_loc (loc, signed_type, rhs), 3465 size_int (lbitsize - 1)); 3466 if (! integer_zerop (tem) && ! integer_all_onesp (tem)) 3467 { 3468 warning (0, "comparison is always %d due to width of bit-field", 3469 code == NE_EXPR); 3470 return constant_boolean_node (code == NE_EXPR, compare_type); 3471 } 3472 } 3473 3474 /* Single-bit compares should always be against zero. */ 3475 if (lbitsize == 1 && ! integer_zerop (rhs)) 3476 { 3477 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR; 3478 rhs = build_int_cst (type, 0); 3479 } 3480 3481 /* Make a new bitfield reference, shift the constant over the 3482 appropriate number of bits and mask it with the computed mask 3483 (in case this was a signed field). If we changed it, make a new one. */ 3484 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1); 3485 if (lvolatilep) 3486 { 3487 TREE_SIDE_EFFECTS (lhs) = 1; 3488 TREE_THIS_VOLATILE (lhs) = 1; 3489 } 3490 3491 rhs = const_binop (BIT_AND_EXPR, 3492 const_binop (LSHIFT_EXPR, 3493 fold_convert_loc (loc, unsigned_type, rhs), 3494 size_int (lbitpos)), 3495 mask); 3496 3497 lhs = build2_loc (loc, code, compare_type, 3498 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs); 3499 return lhs; 3500 } 3501 3502 /* Subroutine for fold_truth_andor_1: decode a field reference. 3503 3504 If EXP is a comparison reference, we return the innermost reference. 3505 3506 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is 3507 set to the starting bit number. 3508 3509 If the innermost field can be completely contained in a mode-sized 3510 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode. 3511 3512 *PVOLATILEP is set to 1 if the any expression encountered is volatile; 3513 otherwise it is not changed. 3514 3515 *PUNSIGNEDP is set to the signedness of the field. 3516 3517 *PMASK is set to the mask used. This is either contained in a 3518 BIT_AND_EXPR or derived from the width of the field. 3519 3520 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any. 3521 3522 Return 0 if this is not a component reference or is one that we can't 3523 do anything with. */ 3524 3525 static tree 3526 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, 3527 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, 3528 int *punsignedp, int *pvolatilep, 3529 tree *pmask, tree *pand_mask) 3530 { 3531 tree outer_type = 0; 3532 tree and_mask = 0; 3533 tree mask, inner, offset; 3534 tree unsigned_type; 3535 unsigned int precision; 3536 3537 /* All the optimizations using this function assume integer fields. 3538 There are problems with FP fields since the type_for_size call 3539 below can fail for, e.g., XFmode. */ 3540 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp))) 3541 return 0; 3542 3543 /* We are interested in the bare arrangement of bits, so strip everything 3544 that doesn't affect the machine mode. However, record the type of the 3545 outermost expression if it may matter below. */ 3546 if (CONVERT_EXPR_P (exp) 3547 || TREE_CODE (exp) == NON_LVALUE_EXPR) 3548 outer_type = TREE_TYPE (exp); 3549 STRIP_NOPS (exp); 3550 3551 if (TREE_CODE (exp) == BIT_AND_EXPR) 3552 { 3553 and_mask = TREE_OPERAND (exp, 1); 3554 exp = TREE_OPERAND (exp, 0); 3555 STRIP_NOPS (exp); STRIP_NOPS (and_mask); 3556 if (TREE_CODE (and_mask) != INTEGER_CST) 3557 return 0; 3558 } 3559 3560 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode, 3561 punsignedp, pvolatilep, false); 3562 if ((inner == exp && and_mask == 0) 3563 || *pbitsize < 0 || offset != 0 3564 || TREE_CODE (inner) == PLACEHOLDER_EXPR) 3565 return 0; 3566 3567 /* If the number of bits in the reference is the same as the bitsize of 3568 the outer type, then the outer type gives the signedness. Otherwise 3569 (in case of a small bitfield) the signedness is unchanged. */ 3570 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type)) 3571 *punsignedp = TYPE_UNSIGNED (outer_type); 3572 3573 /* Compute the mask to access the bitfield. */ 3574 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); 3575 precision = TYPE_PRECISION (unsigned_type); 3576 3577 mask = build_int_cst_type (unsigned_type, -1); 3578 3579 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize)); 3580 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize)); 3581 3582 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ 3583 if (and_mask != 0) 3584 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, 3585 fold_convert_loc (loc, unsigned_type, and_mask), mask); 3586 3587 *pmask = mask; 3588 *pand_mask = and_mask; 3589 return inner; 3590 } 3591 3592 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order 3593 bit positions. */ 3594 3595 static int 3596 all_ones_mask_p (const_tree mask, int size) 3597 { 3598 tree type = TREE_TYPE (mask); 3599 unsigned int precision = TYPE_PRECISION (type); 3600 tree tmask; 3601 3602 tmask = build_int_cst_type (signed_type_for (type), -1); 3603 3604 return 3605 tree_int_cst_equal (mask, 3606 const_binop (RSHIFT_EXPR, 3607 const_binop (LSHIFT_EXPR, tmask, 3608 size_int (precision - size)), 3609 size_int (precision - size))); 3610 } 3611 3612 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that 3613 represents the sign bit of EXP's type. If EXP represents a sign 3614 or zero extension, also test VAL against the unextended type. 3615 The return value is the (sub)expression whose sign bit is VAL, 3616 or NULL_TREE otherwise. */ 3617 3618 static tree 3619 sign_bit_p (tree exp, const_tree val) 3620 { 3621 unsigned HOST_WIDE_INT mask_lo, lo; 3622 HOST_WIDE_INT mask_hi, hi; 3623 int width; 3624 tree t; 3625 3626 /* Tree EXP must have an integral type. */ 3627 t = TREE_TYPE (exp); 3628 if (! INTEGRAL_TYPE_P (t)) 3629 return NULL_TREE; 3630 3631 /* Tree VAL must be an integer constant. */ 3632 if (TREE_CODE (val) != INTEGER_CST 3633 || TREE_OVERFLOW (val)) 3634 return NULL_TREE; 3635 3636 width = TYPE_PRECISION (t); 3637 if (width > HOST_BITS_PER_WIDE_INT) 3638 { 3639 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1); 3640 lo = 0; 3641 3642 mask_hi = ((unsigned HOST_WIDE_INT) -1 3643 >> (2 * HOST_BITS_PER_WIDE_INT - width)); 3644 mask_lo = -1; 3645 } 3646 else 3647 { 3648 hi = 0; 3649 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1); 3650 3651 mask_hi = 0; 3652 mask_lo = ((unsigned HOST_WIDE_INT) -1 3653 >> (HOST_BITS_PER_WIDE_INT - width)); 3654 } 3655 3656 /* We mask off those bits beyond TREE_TYPE (exp) so that we can 3657 treat VAL as if it were unsigned. */ 3658 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi 3659 && (TREE_INT_CST_LOW (val) & mask_lo) == lo) 3660 return exp; 3661 3662 /* Handle extension from a narrower type. */ 3663 if (TREE_CODE (exp) == NOP_EXPR 3664 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width) 3665 return sign_bit_p (TREE_OPERAND (exp, 0), val); 3666 3667 return NULL_TREE; 3668 } 3669 3670 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough 3671 to be evaluated unconditionally. */ 3672 3673 static int 3674 simple_operand_p (const_tree exp) 3675 { 3676 /* Strip any conversions that don't change the machine mode. */ 3677 STRIP_NOPS (exp); 3678 3679 return (CONSTANT_CLASS_P (exp) 3680 || TREE_CODE (exp) == SSA_NAME 3681 || (DECL_P (exp) 3682 && ! TREE_ADDRESSABLE (exp) 3683 && ! TREE_THIS_VOLATILE (exp) 3684 && ! DECL_NONLOCAL (exp) 3685 /* Don't regard global variables as simple. They may be 3686 allocated in ways unknown to the compiler (shared memory, 3687 #pragma weak, etc). */ 3688 && ! TREE_PUBLIC (exp) 3689 && ! DECL_EXTERNAL (exp) 3690 /* Loading a static variable is unduly expensive, but global 3691 registers aren't expensive. */ 3692 && (! TREE_STATIC (exp) || DECL_REGISTER (exp)))); 3693 } 3694 3695 /* Subroutine for fold_truth_andor: determine if an operand is simple enough 3696 to be evaluated unconditionally. 3697 I addition to simple_operand_p, we assume that comparisons, conversions, 3698 and logic-not operations are simple, if their operands are simple, too. */ 3699 3700 static bool 3701 simple_operand_p_2 (tree exp) 3702 { 3703 enum tree_code code; 3704 3705 if (TREE_SIDE_EFFECTS (exp) 3706 || tree_could_trap_p (exp)) 3707 return false; 3708 3709 while (CONVERT_EXPR_P (exp)) 3710 exp = TREE_OPERAND (exp, 0); 3711 3712 code = TREE_CODE (exp); 3713 3714 if (TREE_CODE_CLASS (code) == tcc_comparison) 3715 return (simple_operand_p (TREE_OPERAND (exp, 0)) 3716 && simple_operand_p (TREE_OPERAND (exp, 1))); 3717 3718 if (code == TRUTH_NOT_EXPR) 3719 return simple_operand_p_2 (TREE_OPERAND (exp, 0)); 3720 3721 return simple_operand_p (exp); 3722 } 3723 3724 3725 /* The following functions are subroutines to fold_range_test and allow it to 3726 try to change a logical combination of comparisons into a range test. 3727 3728 For example, both 3729 X == 2 || X == 3 || X == 4 || X == 5 3730 and 3731 X >= 2 && X <= 5 3732 are converted to 3733 (unsigned) (X - 2) <= 3 3734 3735 We describe each set of comparisons as being either inside or outside 3736 a range, using a variable named like IN_P, and then describe the 3737 range with a lower and upper bound. If one of the bounds is omitted, 3738 it represents either the highest or lowest value of the type. 3739 3740 In the comments below, we represent a range by two numbers in brackets 3741 preceded by a "+" to designate being inside that range, or a "-" to 3742 designate being outside that range, so the condition can be inverted by 3743 flipping the prefix. An omitted bound is represented by a "-". For 3744 example, "- [-, 10]" means being outside the range starting at the lowest 3745 possible value and ending at 10, in other words, being greater than 10. 3746 The range "+ [-, -]" is always true and hence the range "- [-, -]" is 3747 always false. 3748 3749 We set up things so that the missing bounds are handled in a consistent 3750 manner so neither a missing bound nor "true" and "false" need to be 3751 handled using a special case. */ 3752 3753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case 3754 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P 3755 and UPPER1_P are nonzero if the respective argument is an upper bound 3756 and zero for a lower. TYPE, if nonzero, is the type of the result; it 3757 must be specified for a comparison. ARG1 will be converted to ARG0's 3758 type if both are specified. */ 3759 3760 static tree 3761 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, 3762 tree arg1, int upper1_p) 3763 { 3764 tree tem; 3765 int result; 3766 int sgn0, sgn1; 3767 3768 /* If neither arg represents infinity, do the normal operation. 3769 Else, if not a comparison, return infinity. Else handle the special 3770 comparison rules. Note that most of the cases below won't occur, but 3771 are handled for consistency. */ 3772 3773 if (arg0 != 0 && arg1 != 0) 3774 { 3775 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0), 3776 arg0, fold_convert (TREE_TYPE (arg0), arg1)); 3777 STRIP_NOPS (tem); 3778 return TREE_CODE (tem) == INTEGER_CST ? tem : 0; 3779 } 3780 3781 if (TREE_CODE_CLASS (code) != tcc_comparison) 3782 return 0; 3783 3784 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0 3785 for neither. In real maths, we cannot assume open ended ranges are 3786 the same. But, this is computer arithmetic, where numbers are finite. 3787 We can therefore make the transformation of any unbounded range with 3788 the value Z, Z being greater than any representable number. This permits 3789 us to treat unbounded ranges as equal. */ 3790 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1); 3791 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1); 3792 switch (code) 3793 { 3794 case EQ_EXPR: 3795 result = sgn0 == sgn1; 3796 break; 3797 case NE_EXPR: 3798 result = sgn0 != sgn1; 3799 break; 3800 case LT_EXPR: 3801 result = sgn0 < sgn1; 3802 break; 3803 case LE_EXPR: 3804 result = sgn0 <= sgn1; 3805 break; 3806 case GT_EXPR: 3807 result = sgn0 > sgn1; 3808 break; 3809 case GE_EXPR: 3810 result = sgn0 >= sgn1; 3811 break; 3812 default: 3813 gcc_unreachable (); 3814 } 3815 3816 return constant_boolean_node (result, type); 3817 } 3818 3819 /* Helper routine for make_range. Perform one step for it, return 3820 new expression if the loop should continue or NULL_TREE if it should 3821 stop. */ 3822 3823 tree 3824 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1, 3825 tree exp_type, tree *p_low, tree *p_high, int *p_in_p, 3826 bool *strict_overflow_p) 3827 { 3828 tree arg0_type = TREE_TYPE (arg0); 3829 tree n_low, n_high, low = *p_low, high = *p_high; 3830 int in_p = *p_in_p, n_in_p; 3831 3832 switch (code) 3833 { 3834 case TRUTH_NOT_EXPR: 3835 /* We can only do something if the range is testing for zero. */ 3836 if (low == NULL_TREE || high == NULL_TREE 3837 || ! integer_zerop (low) || ! integer_zerop (high)) 3838 return NULL_TREE; 3839 *p_in_p = ! in_p; 3840 return arg0; 3841 3842 case EQ_EXPR: case NE_EXPR: 3843 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR: 3844 /* We can only do something if the range is testing for zero 3845 and if the second operand is an integer constant. Note that 3846 saying something is "in" the range we make is done by 3847 complementing IN_P since it will set in the initial case of 3848 being not equal to zero; "out" is leaving it alone. */ 3849 if (low == NULL_TREE || high == NULL_TREE 3850 || ! integer_zerop (low) || ! integer_zerop (high) 3851 || TREE_CODE (arg1) != INTEGER_CST) 3852 return NULL_TREE; 3853 3854 switch (code) 3855 { 3856 case NE_EXPR: /* - [c, c] */ 3857 low = high = arg1; 3858 break; 3859 case EQ_EXPR: /* + [c, c] */ 3860 in_p = ! in_p, low = high = arg1; 3861 break; 3862 case GT_EXPR: /* - [-, c] */ 3863 low = 0, high = arg1; 3864 break; 3865 case GE_EXPR: /* + [c, -] */ 3866 in_p = ! in_p, low = arg1, high = 0; 3867 break; 3868 case LT_EXPR: /* - [c, -] */ 3869 low = arg1, high = 0; 3870 break; 3871 case LE_EXPR: /* + [-, c] */ 3872 in_p = ! in_p, low = 0, high = arg1; 3873 break; 3874 default: 3875 gcc_unreachable (); 3876 } 3877 3878 /* If this is an unsigned comparison, we also know that EXP is 3879 greater than or equal to zero. We base the range tests we make 3880 on that fact, so we record it here so we can parse existing 3881 range tests. We test arg0_type since often the return type 3882 of, e.g. EQ_EXPR, is boolean. */ 3883 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0)) 3884 { 3885 if (! merge_ranges (&n_in_p, &n_low, &n_high, 3886 in_p, low, high, 1, 3887 build_int_cst (arg0_type, 0), 3888 NULL_TREE)) 3889 return NULL_TREE; 3890 3891 in_p = n_in_p, low = n_low, high = n_high; 3892 3893 /* If the high bound is missing, but we have a nonzero low 3894 bound, reverse the range so it goes from zero to the low bound 3895 minus 1. */ 3896 if (high == 0 && low && ! integer_zerop (low)) 3897 { 3898 in_p = ! in_p; 3899 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0, 3900 integer_one_node, 0); 3901 low = build_int_cst (arg0_type, 0); 3902 } 3903 } 3904 3905 *p_low = low; 3906 *p_high = high; 3907 *p_in_p = in_p; 3908 return arg0; 3909 3910 case NEGATE_EXPR: 3911 /* If flag_wrapv and ARG0_TYPE is signed, make sure 3912 low and high are non-NULL, then normalize will DTRT. */ 3913 if (!TYPE_UNSIGNED (arg0_type) 3914 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 3915 { 3916 if (low == NULL_TREE) 3917 low = TYPE_MIN_VALUE (arg0_type); 3918 if (high == NULL_TREE) 3919 high = TYPE_MAX_VALUE (arg0_type); 3920 } 3921 3922 /* (-x) IN [a,b] -> x in [-b, -a] */ 3923 n_low = range_binop (MINUS_EXPR, exp_type, 3924 build_int_cst (exp_type, 0), 3925 0, high, 1); 3926 n_high = range_binop (MINUS_EXPR, exp_type, 3927 build_int_cst (exp_type, 0), 3928 0, low, 0); 3929 if (n_high != 0 && TREE_OVERFLOW (n_high)) 3930 return NULL_TREE; 3931 goto normalize; 3932 3933 case BIT_NOT_EXPR: 3934 /* ~ X -> -X - 1 */ 3935 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0), 3936 build_int_cst (exp_type, 1)); 3937 3938 case PLUS_EXPR: 3939 case MINUS_EXPR: 3940 if (TREE_CODE (arg1) != INTEGER_CST) 3941 return NULL_TREE; 3942 3943 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot 3944 move a constant to the other side. */ 3945 if (!TYPE_UNSIGNED (arg0_type) 3946 && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) 3947 return NULL_TREE; 3948 3949 /* If EXP is signed, any overflow in the computation is undefined, 3950 so we don't worry about it so long as our computations on 3951 the bounds don't overflow. For unsigned, overflow is defined 3952 and this is exactly the right thing. */ 3953 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 3954 arg0_type, low, 0, arg1, 0); 3955 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR, 3956 arg0_type, high, 1, arg1, 0); 3957 if ((n_low != 0 && TREE_OVERFLOW (n_low)) 3958 || (n_high != 0 && TREE_OVERFLOW (n_high))) 3959 return NULL_TREE; 3960 3961 if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) 3962 *strict_overflow_p = true; 3963 3964 normalize: 3965 /* Check for an unsigned range which has wrapped around the maximum 3966 value thus making n_high < n_low, and normalize it. */ 3967 if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) 3968 { 3969 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0, 3970 integer_one_node, 0); 3971 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0, 3972 integer_one_node, 0); 3973 3974 /* If the range is of the form +/- [ x+1, x ], we won't 3975 be able to normalize it. But then, it represents the 3976 whole range or the empty set, so make it 3977 +/- [ -, - ]. */ 3978 if (tree_int_cst_equal (n_low, low) 3979 && tree_int_cst_equal (n_high, high)) 3980 low = high = 0; 3981 else 3982 in_p = ! in_p; 3983 } 3984 else 3985 low = n_low, high = n_high; 3986 3987 *p_low = low; 3988 *p_high = high; 3989 *p_in_p = in_p; 3990 return arg0; 3991 3992 CASE_CONVERT: 3993 case NON_LVALUE_EXPR: 3994 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type)) 3995 return NULL_TREE; 3996 3997 if (! INTEGRAL_TYPE_P (arg0_type) 3998 || (low != 0 && ! int_fits_type_p (low, arg0_type)) 3999 || (high != 0 && ! int_fits_type_p (high, arg0_type))) 4000 return NULL_TREE; 4001 4002 n_low = low, n_high = high; 4003 4004 if (n_low != 0) 4005 n_low = fold_convert_loc (loc, arg0_type, n_low); 4006 4007 if (n_high != 0) 4008 n_high = fold_convert_loc (loc, arg0_type, n_high); 4009 4010 /* If we're converting arg0 from an unsigned type, to exp, 4011 a signed type, we will be doing the comparison as unsigned. 4012 The tests above have already verified that LOW and HIGH 4013 are both positive. 4014 4015 So we have to ensure that we will handle large unsigned 4016 values the same way that the current signed bounds treat 4017 negative values. */ 4018 4019 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type)) 4020 { 4021 tree high_positive; 4022 tree equiv_type; 4023 /* For fixed-point modes, we need to pass the saturating flag 4024 as the 2nd parameter. */ 4025 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type))) 4026 equiv_type 4027 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 4028 TYPE_SATURATING (arg0_type)); 4029 else 4030 equiv_type 4031 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1); 4032 4033 /* A range without an upper bound is, naturally, unbounded. 4034 Since convert would have cropped a very large value, use 4035 the max value for the destination type. */ 4036 high_positive 4037 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type) 4038 : TYPE_MAX_VALUE (arg0_type); 4039 4040 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) 4041 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, 4042 fold_convert_loc (loc, arg0_type, 4043 high_positive), 4044 build_int_cst (arg0_type, 1)); 4045 4046 /* If the low bound is specified, "and" the range with the 4047 range for which the original unsigned value will be 4048 positive. */ 4049 if (low != 0) 4050 { 4051 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high, 4052 1, fold_convert_loc (loc, arg0_type, 4053 integer_zero_node), 4054 high_positive)) 4055 return NULL_TREE; 4056 4057 in_p = (n_in_p == in_p); 4058 } 4059 else 4060 { 4061 /* Otherwise, "or" the range with the range of the input 4062 that will be interpreted as negative. */ 4063 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high, 4064 1, fold_convert_loc (loc, arg0_type, 4065 integer_zero_node), 4066 high_positive)) 4067 return NULL_TREE; 4068 4069 in_p = (in_p != n_in_p); 4070 } 4071 } 4072 4073 *p_low = n_low; 4074 *p_high = n_high; 4075 *p_in_p = in_p; 4076 return arg0; 4077 4078 default: 4079 return NULL_TREE; 4080 } 4081 } 4082 4083 /* Given EXP, a logical expression, set the range it is testing into 4084 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression 4085 actually being tested. *PLOW and *PHIGH will be made of the same 4086 type as the returned expression. If EXP is not a comparison, we 4087 will most likely not be returning a useful value and range. Set 4088 *STRICT_OVERFLOW_P to true if the return value is only valid 4089 because signed overflow is undefined; otherwise, do not change 4090 *STRICT_OVERFLOW_P. */ 4091 4092 tree 4093 make_range (tree exp, int *pin_p, tree *plow, tree *phigh, 4094 bool *strict_overflow_p) 4095 { 4096 enum tree_code code; 4097 tree arg0, arg1 = NULL_TREE; 4098 tree exp_type, nexp; 4099 int in_p; 4100 tree low, high; 4101 location_t loc = EXPR_LOCATION (exp); 4102 4103 /* Start with simply saying "EXP != 0" and then look at the code of EXP 4104 and see if we can refine the range. Some of the cases below may not 4105 happen, but it doesn't seem worth worrying about this. We "continue" 4106 the outer loop when we've changed something; otherwise we "break" 4107 the switch, which will "break" the while. */ 4108 4109 in_p = 0; 4110 low = high = build_int_cst (TREE_TYPE (exp), 0); 4111 4112 while (1) 4113 { 4114 code = TREE_CODE (exp); 4115 exp_type = TREE_TYPE (exp); 4116 arg0 = NULL_TREE; 4117 4118 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 4119 { 4120 if (TREE_OPERAND_LENGTH (exp) > 0) 4121 arg0 = TREE_OPERAND (exp, 0); 4122 if (TREE_CODE_CLASS (code) == tcc_binary 4123 || TREE_CODE_CLASS (code) == tcc_comparison 4124 || (TREE_CODE_CLASS (code) == tcc_expression 4125 && TREE_OPERAND_LENGTH (exp) > 1)) 4126 arg1 = TREE_OPERAND (exp, 1); 4127 } 4128 if (arg0 == NULL_TREE) 4129 break; 4130 4131 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low, 4132 &high, &in_p, strict_overflow_p); 4133 if (nexp == NULL_TREE) 4134 break; 4135 exp = nexp; 4136 } 4137 4138 /* If EXP is a constant, we can evaluate whether this is true or false. */ 4139 if (TREE_CODE (exp) == INTEGER_CST) 4140 { 4141 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node, 4142 exp, 0, low, 0)) 4143 && integer_onep (range_binop (LE_EXPR, integer_type_node, 4144 exp, 1, high, 1))); 4145 low = high = 0; 4146 exp = 0; 4147 } 4148 4149 *pin_p = in_p, *plow = low, *phigh = high; 4150 return exp; 4151 } 4152 4153 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result 4154 type, TYPE, return an expression to test if EXP is in (or out of, depending 4155 on IN_P) the range. Return 0 if the test couldn't be created. */ 4156 4157 tree 4158 build_range_check (location_t loc, tree type, tree exp, int in_p, 4159 tree low, tree high) 4160 { 4161 tree etype = TREE_TYPE (exp), value; 4162 4163 #ifdef HAVE_canonicalize_funcptr_for_compare 4164 /* Disable this optimization for function pointer expressions 4165 on targets that require function pointer canonicalization. */ 4166 if (HAVE_canonicalize_funcptr_for_compare 4167 && TREE_CODE (etype) == POINTER_TYPE 4168 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) 4169 return NULL_TREE; 4170 #endif 4171 4172 if (! in_p) 4173 { 4174 value = build_range_check (loc, type, exp, 1, low, high); 4175 if (value != 0) 4176 return invert_truthvalue_loc (loc, value); 4177 4178 return 0; 4179 } 4180 4181 if (low == 0 && high == 0) 4182 return build_int_cst (type, 1); 4183 4184 if (low == 0) 4185 return fold_build2_loc (loc, LE_EXPR, type, exp, 4186 fold_convert_loc (loc, etype, high)); 4187 4188 if (high == 0) 4189 return fold_build2_loc (loc, GE_EXPR, type, exp, 4190 fold_convert_loc (loc, etype, low)); 4191 4192 if (operand_equal_p (low, high, 0)) 4193 return fold_build2_loc (loc, EQ_EXPR, type, exp, 4194 fold_convert_loc (loc, etype, low)); 4195 4196 if (integer_zerop (low)) 4197 { 4198 if (! TYPE_UNSIGNED (etype)) 4199 { 4200 etype = unsigned_type_for (etype); 4201 high = fold_convert_loc (loc, etype, high); 4202 exp = fold_convert_loc (loc, etype, exp); 4203 } 4204 return build_range_check (loc, type, exp, 1, 0, high); 4205 } 4206 4207 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ 4208 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST) 4209 { 4210 unsigned HOST_WIDE_INT lo; 4211 HOST_WIDE_INT hi; 4212 int prec; 4213 4214 prec = TYPE_PRECISION (etype); 4215 if (prec <= HOST_BITS_PER_WIDE_INT) 4216 { 4217 hi = 0; 4218 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1; 4219 } 4220 else 4221 { 4222 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1; 4223 lo = (unsigned HOST_WIDE_INT) -1; 4224 } 4225 4226 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo) 4227 { 4228 if (TYPE_UNSIGNED (etype)) 4229 { 4230 tree signed_etype = signed_type_for (etype); 4231 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype)) 4232 etype 4233 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0); 4234 else 4235 etype = signed_etype; 4236 exp = fold_convert_loc (loc, etype, exp); 4237 } 4238 return fold_build2_loc (loc, GT_EXPR, type, exp, 4239 build_int_cst (etype, 0)); 4240 } 4241 } 4242 4243 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low). 4244 This requires wrap-around arithmetics for the type of the expression. 4245 First make sure that arithmetics in this type is valid, then make sure 4246 that it wraps around. */ 4247 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE) 4248 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 4249 TYPE_UNSIGNED (etype)); 4250 4251 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype)) 4252 { 4253 tree utype, minv, maxv; 4254 4255 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN 4256 for the type in question, as we rely on this here. */ 4257 utype = unsigned_type_for (etype); 4258 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype)); 4259 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, 4260 integer_one_node, 1); 4261 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype)); 4262 4263 if (integer_zerop (range_binop (NE_EXPR, integer_type_node, 4264 minv, 1, maxv, 1))) 4265 etype = utype; 4266 else 4267 return 0; 4268 } 4269 4270 high = fold_convert_loc (loc, etype, high); 4271 low = fold_convert_loc (loc, etype, low); 4272 exp = fold_convert_loc (loc, etype, exp); 4273 4274 value = const_binop (MINUS_EXPR, high, low); 4275 4276 4277 if (POINTER_TYPE_P (etype)) 4278 { 4279 if (value != 0 && !TREE_OVERFLOW (value)) 4280 { 4281 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low); 4282 return build_range_check (loc, type, 4283 fold_build_pointer_plus_loc (loc, exp, low), 4284 1, build_int_cst (etype, 0), value); 4285 } 4286 return 0; 4287 } 4288 4289 if (value != 0 && !TREE_OVERFLOW (value)) 4290 return build_range_check (loc, type, 4291 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), 4292 1, build_int_cst (etype, 0), value); 4293 4294 return 0; 4295 } 4296 4297 /* Return the predecessor of VAL in its type, handling the infinite case. */ 4298 4299 static tree 4300 range_predecessor (tree val) 4301 { 4302 tree type = TREE_TYPE (val); 4303 4304 if (INTEGRAL_TYPE_P (type) 4305 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0)) 4306 return 0; 4307 else 4308 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 4309 } 4310 4311 /* Return the successor of VAL in its type, handling the infinite case. */ 4312 4313 static tree 4314 range_successor (tree val) 4315 { 4316 tree type = TREE_TYPE (val); 4317 4318 if (INTEGRAL_TYPE_P (type) 4319 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0)) 4320 return 0; 4321 else 4322 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0); 4323 } 4324 4325 /* Given two ranges, see if we can merge them into one. Return 1 if we 4326 can, 0 if we can't. Set the output range into the specified parameters. */ 4327 4328 bool 4329 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, 4330 tree high0, int in1_p, tree low1, tree high1) 4331 { 4332 int no_overlap; 4333 int subset; 4334 int temp; 4335 tree tem; 4336 int in_p; 4337 tree low, high; 4338 int lowequal = ((low0 == 0 && low1 == 0) 4339 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4340 low0, 0, low1, 0))); 4341 int highequal = ((high0 == 0 && high1 == 0) 4342 || integer_onep (range_binop (EQ_EXPR, integer_type_node, 4343 high0, 1, high1, 1))); 4344 4345 /* Make range 0 be the range that starts first, or ends last if they 4346 start at the same value. Swap them if it isn't. */ 4347 if (integer_onep (range_binop (GT_EXPR, integer_type_node, 4348 low0, 0, low1, 0)) 4349 || (lowequal 4350 && integer_onep (range_binop (GT_EXPR, integer_type_node, 4351 high1, 1, high0, 1)))) 4352 { 4353 temp = in0_p, in0_p = in1_p, in1_p = temp; 4354 tem = low0, low0 = low1, low1 = tem; 4355 tem = high0, high0 = high1, high1 = tem; 4356 } 4357 4358 /* Now flag two cases, whether the ranges are disjoint or whether the 4359 second range is totally subsumed in the first. Note that the tests 4360 below are simplified by the ones above. */ 4361 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node, 4362 high0, 1, low1, 0)); 4363 subset = integer_onep (range_binop (LE_EXPR, integer_type_node, 4364 high1, 1, high0, 1)); 4365 4366 /* We now have four cases, depending on whether we are including or 4367 excluding the two ranges. */ 4368 if (in0_p && in1_p) 4369 { 4370 /* If they don't overlap, the result is false. If the second range 4371 is a subset it is the result. Otherwise, the range is from the start 4372 of the second to the end of the first. */ 4373 if (no_overlap) 4374 in_p = 0, low = high = 0; 4375 else if (subset) 4376 in_p = 1, low = low1, high = high1; 4377 else 4378 in_p = 1, low = low1, high = high0; 4379 } 4380 4381 else if (in0_p && ! in1_p) 4382 { 4383 /* If they don't overlap, the result is the first range. If they are 4384 equal, the result is false. If the second range is a subset of the 4385 first, and the ranges begin at the same place, we go from just after 4386 the end of the second range to the end of the first. If the second 4387 range is not a subset of the first, or if it is a subset and both 4388 ranges end at the same place, the range starts at the start of the 4389 first range and ends just before the second range. 4390 Otherwise, we can't describe this as a single range. */ 4391 if (no_overlap) 4392 in_p = 1, low = low0, high = high0; 4393 else if (lowequal && highequal) 4394 in_p = 0, low = high = 0; 4395 else if (subset && lowequal) 4396 { 4397 low = range_successor (high1); 4398 high = high0; 4399 in_p = 1; 4400 if (low == 0) 4401 { 4402 /* We are in the weird situation where high0 > high1 but 4403 high1 has no successor. Punt. */ 4404 return 0; 4405 } 4406 } 4407 else if (! subset || highequal) 4408 { 4409 low = low0; 4410 high = range_predecessor (low1); 4411 in_p = 1; 4412 if (high == 0) 4413 { 4414 /* low0 < low1 but low1 has no predecessor. Punt. */ 4415 return 0; 4416 } 4417 } 4418 else 4419 return 0; 4420 } 4421 4422 else if (! in0_p && in1_p) 4423 { 4424 /* If they don't overlap, the result is the second range. If the second 4425 is a subset of the first, the result is false. Otherwise, 4426 the range starts just after the first range and ends at the 4427 end of the second. */ 4428 if (no_overlap) 4429 in_p = 1, low = low1, high = high1; 4430 else if (subset || highequal) 4431 in_p = 0, low = high = 0; 4432 else 4433 { 4434 low = range_successor (high0); 4435 high = high1; 4436 in_p = 1; 4437 if (low == 0) 4438 { 4439 /* high1 > high0 but high0 has no successor. Punt. */ 4440 return 0; 4441 } 4442 } 4443 } 4444 4445 else 4446 { 4447 /* The case where we are excluding both ranges. Here the complex case 4448 is if they don't overlap. In that case, the only time we have a 4449 range is if they are adjacent. If the second is a subset of the 4450 first, the result is the first. Otherwise, the range to exclude 4451 starts at the beginning of the first range and ends at the end of the 4452 second. */ 4453 if (no_overlap) 4454 { 4455 if (integer_onep (range_binop (EQ_EXPR, integer_type_node, 4456 range_successor (high0), 4457 1, low1, 0))) 4458 in_p = 0, low = low0, high = high1; 4459 else 4460 { 4461 /* Canonicalize - [min, x] into - [-, x]. */ 4462 if (low0 && TREE_CODE (low0) == INTEGER_CST) 4463 switch (TREE_CODE (TREE_TYPE (low0))) 4464 { 4465 case ENUMERAL_TYPE: 4466 if (TYPE_PRECISION (TREE_TYPE (low0)) 4467 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0)))) 4468 break; 4469 /* FALLTHROUGH */ 4470 case INTEGER_TYPE: 4471 if (tree_int_cst_equal (low0, 4472 TYPE_MIN_VALUE (TREE_TYPE (low0)))) 4473 low0 = 0; 4474 break; 4475 case POINTER_TYPE: 4476 if (TYPE_UNSIGNED (TREE_TYPE (low0)) 4477 && integer_zerop (low0)) 4478 low0 = 0; 4479 break; 4480 default: 4481 break; 4482 } 4483 4484 /* Canonicalize - [x, max] into - [x, -]. */ 4485 if (high1 && TREE_CODE (high1) == INTEGER_CST) 4486 switch (TREE_CODE (TREE_TYPE (high1))) 4487 { 4488 case ENUMERAL_TYPE: 4489 if (TYPE_PRECISION (TREE_TYPE (high1)) 4490 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1)))) 4491 break; 4492 /* FALLTHROUGH */ 4493 case INTEGER_TYPE: 4494 if (tree_int_cst_equal (high1, 4495 TYPE_MAX_VALUE (TREE_TYPE (high1)))) 4496 high1 = 0; 4497 break; 4498 case POINTER_TYPE: 4499 if (TYPE_UNSIGNED (TREE_TYPE (high1)) 4500 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE, 4501 high1, 1, 4502 integer_one_node, 1))) 4503 high1 = 0; 4504 break; 4505 default: 4506 break; 4507 } 4508 4509 /* The ranges might be also adjacent between the maximum and 4510 minimum values of the given type. For 4511 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y 4512 return + [x + 1, y - 1]. */ 4513 if (low0 == 0 && high1 == 0) 4514 { 4515 low = range_successor (high0); 4516 high = range_predecessor (low1); 4517 if (low == 0 || high == 0) 4518 return 0; 4519 4520 in_p = 1; 4521 } 4522 else 4523 return 0; 4524 } 4525 } 4526 else if (subset) 4527 in_p = 0, low = low0, high = high0; 4528 else 4529 in_p = 0, low = low0, high = high1; 4530 } 4531 4532 *pin_p = in_p, *plow = low, *phigh = high; 4533 return 1; 4534 } 4535 4536 4537 /* Subroutine of fold, looking inside expressions of the form 4538 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands 4539 of the COND_EXPR. This function is being used also to optimize 4540 A op B ? C : A, by reversing the comparison first. 4541 4542 Return a folded expression whose code is not a COND_EXPR 4543 anymore, or NULL_TREE if no folding opportunity is found. */ 4544 4545 static tree 4546 fold_cond_expr_with_comparison (location_t loc, tree type, 4547 tree arg0, tree arg1, tree arg2) 4548 { 4549 enum tree_code comp_code = TREE_CODE (arg0); 4550 tree arg00 = TREE_OPERAND (arg0, 0); 4551 tree arg01 = TREE_OPERAND (arg0, 1); 4552 tree arg1_type = TREE_TYPE (arg1); 4553 tree tem; 4554 4555 STRIP_NOPS (arg1); 4556 STRIP_NOPS (arg2); 4557 4558 /* If we have A op 0 ? A : -A, consider applying the following 4559 transformations: 4560 4561 A == 0? A : -A same as -A 4562 A != 0? A : -A same as A 4563 A >= 0? A : -A same as abs (A) 4564 A > 0? A : -A same as abs (A) 4565 A <= 0? A : -A same as -abs (A) 4566 A < 0? A : -A same as -abs (A) 4567 4568 None of these transformations work for modes with signed 4569 zeros. If A is +/-0, the first two transformations will 4570 change the sign of the result (from +0 to -0, or vice 4571 versa). The last four will fix the sign of the result, 4572 even though the original expressions could be positive or 4573 negative, depending on the sign of A. 4574 4575 Note that all these transformations are correct if A is 4576 NaN, since the two alternatives (A and -A) are also NaNs. */ 4577 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 4578 && (FLOAT_TYPE_P (TREE_TYPE (arg01)) 4579 ? real_zerop (arg01) 4580 : integer_zerop (arg01)) 4581 && ((TREE_CODE (arg2) == NEGATE_EXPR 4582 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0)) 4583 /* In the case that A is of the form X-Y, '-A' (arg2) may 4584 have already been folded to Y-X, check for that. */ 4585 || (TREE_CODE (arg1) == MINUS_EXPR 4586 && TREE_CODE (arg2) == MINUS_EXPR 4587 && operand_equal_p (TREE_OPERAND (arg1, 0), 4588 TREE_OPERAND (arg2, 1), 0) 4589 && operand_equal_p (TREE_OPERAND (arg1, 1), 4590 TREE_OPERAND (arg2, 0), 0)))) 4591 switch (comp_code) 4592 { 4593 case EQ_EXPR: 4594 case UNEQ_EXPR: 4595 tem = fold_convert_loc (loc, arg1_type, arg1); 4596 return pedantic_non_lvalue_loc (loc, 4597 fold_convert_loc (loc, type, 4598 negate_expr (tem))); 4599 case NE_EXPR: 4600 case LTGT_EXPR: 4601 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 4602 case UNGE_EXPR: 4603 case UNGT_EXPR: 4604 if (flag_trapping_math) 4605 break; 4606 /* Fall through. */ 4607 case GE_EXPR: 4608 case GT_EXPR: 4609 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4610 arg1 = fold_convert_loc (loc, signed_type_for 4611 (TREE_TYPE (arg1)), arg1); 4612 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 4613 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 4614 case UNLE_EXPR: 4615 case UNLT_EXPR: 4616 if (flag_trapping_math) 4617 break; 4618 case LE_EXPR: 4619 case LT_EXPR: 4620 if (TYPE_UNSIGNED (TREE_TYPE (arg1))) 4621 arg1 = fold_convert_loc (loc, signed_type_for 4622 (TREE_TYPE (arg1)), arg1); 4623 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); 4624 return negate_expr (fold_convert_loc (loc, type, tem)); 4625 default: 4626 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 4627 break; 4628 } 4629 4630 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise 4631 A == 0 ? A : 0 is always 0 unless A is -0. Note that 4632 both transformations are correct when A is NaN: A != 0 4633 is then true, and A == 0 is false. */ 4634 4635 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 4636 && integer_zerop (arg01) && integer_zerop (arg2)) 4637 { 4638 if (comp_code == NE_EXPR) 4639 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 4640 else if (comp_code == EQ_EXPR) 4641 return build_int_cst (type, 0); 4642 } 4643 4644 /* Try some transformations of A op B ? A : B. 4645 4646 A == B? A : B same as B 4647 A != B? A : B same as A 4648 A >= B? A : B same as max (A, B) 4649 A > B? A : B same as max (B, A) 4650 A <= B? A : B same as min (A, B) 4651 A < B? A : B same as min (B, A) 4652 4653 As above, these transformations don't work in the presence 4654 of signed zeros. For example, if A and B are zeros of 4655 opposite sign, the first two transformations will change 4656 the sign of the result. In the last four, the original 4657 expressions give different results for (A=+0, B=-0) and 4658 (A=-0, B=+0), but the transformed expressions do not. 4659 4660 The first two transformations are correct if either A or B 4661 is a NaN. In the first transformation, the condition will 4662 be false, and B will indeed be chosen. In the case of the 4663 second transformation, the condition A != B will be true, 4664 and A will be chosen. 4665 4666 The conversions to max() and min() are not correct if B is 4667 a number and A is not. The conditions in the original 4668 expressions will be false, so all four give B. The min() 4669 and max() versions would give a NaN instead. */ 4670 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)) 4671 && operand_equal_for_comparison_p (arg01, arg2, arg00) 4672 /* Avoid these transformations if the COND_EXPR may be used 4673 as an lvalue in the C++ front-end. PR c++/19199. */ 4674 && (in_gimple_form 4675 || (strcmp (lang_hooks.name, "GNU C++") != 0 4676 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0) 4677 || ! maybe_lvalue_p (arg1) 4678 || ! maybe_lvalue_p (arg2))) 4679 { 4680 tree comp_op0 = arg00; 4681 tree comp_op1 = arg01; 4682 tree comp_type = TREE_TYPE (comp_op0); 4683 4684 /* Avoid adding NOP_EXPRs in case this is an lvalue. */ 4685 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type)) 4686 { 4687 comp_type = type; 4688 comp_op0 = arg1; 4689 comp_op1 = arg2; 4690 } 4691 4692 switch (comp_code) 4693 { 4694 case EQ_EXPR: 4695 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2)); 4696 case NE_EXPR: 4697 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 4698 case LE_EXPR: 4699 case LT_EXPR: 4700 case UNLE_EXPR: 4701 case UNLT_EXPR: 4702 /* In C++ a ?: expression can be an lvalue, so put the 4703 operand which will be used if they are equal first 4704 so that we can convert this back to the 4705 corresponding COND_EXPR. */ 4706 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4707 { 4708 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 4709 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 4710 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) 4711 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) 4712 : fold_build2_loc (loc, MIN_EXPR, comp_type, 4713 comp_op1, comp_op0); 4714 return pedantic_non_lvalue_loc (loc, 4715 fold_convert_loc (loc, type, tem)); 4716 } 4717 break; 4718 case GE_EXPR: 4719 case GT_EXPR: 4720 case UNGE_EXPR: 4721 case UNGT_EXPR: 4722 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4723 { 4724 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); 4725 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); 4726 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) 4727 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) 4728 : fold_build2_loc (loc, MAX_EXPR, comp_type, 4729 comp_op1, comp_op0); 4730 return pedantic_non_lvalue_loc (loc, 4731 fold_convert_loc (loc, type, tem)); 4732 } 4733 break; 4734 case UNEQ_EXPR: 4735 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4736 return pedantic_non_lvalue_loc (loc, 4737 fold_convert_loc (loc, type, arg2)); 4738 break; 4739 case LTGT_EXPR: 4740 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) 4741 return pedantic_non_lvalue_loc (loc, 4742 fold_convert_loc (loc, type, arg1)); 4743 break; 4744 default: 4745 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); 4746 break; 4747 } 4748 } 4749 4750 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers, 4751 we might still be able to simplify this. For example, 4752 if C1 is one less or one more than C2, this might have started 4753 out as a MIN or MAX and been transformed by this function. 4754 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */ 4755 4756 if (INTEGRAL_TYPE_P (type) 4757 && TREE_CODE (arg01) == INTEGER_CST 4758 && TREE_CODE (arg2) == INTEGER_CST) 4759 switch (comp_code) 4760 { 4761 case EQ_EXPR: 4762 if (TREE_CODE (arg1) == INTEGER_CST) 4763 break; 4764 /* We can replace A with C1 in this case. */ 4765 arg1 = fold_convert_loc (loc, type, arg01); 4766 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2); 4767 4768 case LT_EXPR: 4769 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for 4770 MIN_EXPR, to preserve the signedness of the comparison. */ 4771 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 4772 OEP_ONLY_CONST) 4773 && operand_equal_p (arg01, 4774 const_binop (PLUS_EXPR, arg2, 4775 build_int_cst (type, 1)), 4776 OEP_ONLY_CONST)) 4777 { 4778 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 4779 fold_convert_loc (loc, TREE_TYPE (arg00), 4780 arg2)); 4781 return pedantic_non_lvalue_loc (loc, 4782 fold_convert_loc (loc, type, tem)); 4783 } 4784 break; 4785 4786 case LE_EXPR: 4787 /* If C1 is C2 - 1, this is min(A, C2), with the same care 4788 as above. */ 4789 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 4790 OEP_ONLY_CONST) 4791 && operand_equal_p (arg01, 4792 const_binop (MINUS_EXPR, arg2, 4793 build_int_cst (type, 1)), 4794 OEP_ONLY_CONST)) 4795 { 4796 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, 4797 fold_convert_loc (loc, TREE_TYPE (arg00), 4798 arg2)); 4799 return pedantic_non_lvalue_loc (loc, 4800 fold_convert_loc (loc, type, tem)); 4801 } 4802 break; 4803 4804 case GT_EXPR: 4805 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for 4806 MAX_EXPR, to preserve the signedness of the comparison. */ 4807 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 4808 OEP_ONLY_CONST) 4809 && operand_equal_p (arg01, 4810 const_binop (MINUS_EXPR, arg2, 4811 build_int_cst (type, 1)), 4812 OEP_ONLY_CONST)) 4813 { 4814 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 4815 fold_convert_loc (loc, TREE_TYPE (arg00), 4816 arg2)); 4817 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 4818 } 4819 break; 4820 4821 case GE_EXPR: 4822 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */ 4823 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 4824 OEP_ONLY_CONST) 4825 && operand_equal_p (arg01, 4826 const_binop (PLUS_EXPR, arg2, 4827 build_int_cst (type, 1)), 4828 OEP_ONLY_CONST)) 4829 { 4830 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, 4831 fold_convert_loc (loc, TREE_TYPE (arg00), 4832 arg2)); 4833 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 4834 } 4835 break; 4836 case NE_EXPR: 4837 break; 4838 default: 4839 gcc_unreachable (); 4840 } 4841 4842 return NULL_TREE; 4843 } 4844 4845 4846 4847 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT 4848 #define LOGICAL_OP_NON_SHORT_CIRCUIT \ 4849 (BRANCH_COST (optimize_function_for_speed_p (cfun), \ 4850 false) >= 2) 4851 #endif 4852 4853 /* EXP is some logical combination of boolean tests. See if we can 4854 merge it into some range test. Return the new tree if so. */ 4855 4856 static tree 4857 fold_range_test (location_t loc, enum tree_code code, tree type, 4858 tree op0, tree op1) 4859 { 4860 int or_op = (code == TRUTH_ORIF_EXPR 4861 || code == TRUTH_OR_EXPR); 4862 int in0_p, in1_p, in_p; 4863 tree low0, low1, low, high0, high1, high; 4864 bool strict_overflow_p = false; 4865 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); 4866 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); 4867 tree tem; 4868 const char * const warnmsg = G_("assuming signed overflow does not occur " 4869 "when simplifying range test"); 4870 4871 /* If this is an OR operation, invert both sides; we will invert 4872 again at the end. */ 4873 if (or_op) 4874 in0_p = ! in0_p, in1_p = ! in1_p; 4875 4876 /* If both expressions are the same, if we can merge the ranges, and we 4877 can build the range test, return it or it inverted. If one of the 4878 ranges is always true or always false, consider it to be the same 4879 expression as the other. */ 4880 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) 4881 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, 4882 in1_p, low1, high1) 4883 && 0 != (tem = (build_range_check (loc, type, 4884 lhs != 0 ? lhs 4885 : rhs != 0 ? rhs : integer_zero_node, 4886 in_p, low, high)))) 4887 { 4888 if (strict_overflow_p) 4889 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 4890 return or_op ? invert_truthvalue_loc (loc, tem) : tem; 4891 } 4892 4893 /* On machines where the branch cost is expensive, if this is a 4894 short-circuited branch and the underlying object on both sides 4895 is the same, make a non-short-circuit operation. */ 4896 else if (LOGICAL_OP_NON_SHORT_CIRCUIT 4897 && lhs != 0 && rhs != 0 4898 && (code == TRUTH_ANDIF_EXPR 4899 || code == TRUTH_ORIF_EXPR) 4900 && operand_equal_p (lhs, rhs, 0)) 4901 { 4902 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR 4903 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in 4904 which cases we can't do this. */ 4905 if (simple_operand_p (lhs)) 4906 return build2_loc (loc, code == TRUTH_ANDIF_EXPR 4907 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 4908 type, op0, op1); 4909 4910 else if (!lang_hooks.decls.global_bindings_p () 4911 && !CONTAINS_PLACEHOLDER_P (lhs)) 4912 { 4913 tree common = save_expr (lhs); 4914 4915 if (0 != (lhs = build_range_check (loc, type, common, 4916 or_op ? ! in0_p : in0_p, 4917 low0, high0)) 4918 && (0 != (rhs = build_range_check (loc, type, common, 4919 or_op ? ! in1_p : in1_p, 4920 low1, high1)))) 4921 { 4922 if (strict_overflow_p) 4923 fold_overflow_warning (warnmsg, 4924 WARN_STRICT_OVERFLOW_COMPARISON); 4925 return build2_loc (loc, code == TRUTH_ANDIF_EXPR 4926 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, 4927 type, lhs, rhs); 4928 } 4929 } 4930 } 4931 4932 return 0; 4933 } 4934 4935 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P 4936 bit value. Arrange things so the extra bits will be set to zero if and 4937 only if C is signed-extended to its full width. If MASK is nonzero, 4938 it is an INTEGER_CST that should be AND'ed with the extra bits. */ 4939 4940 static tree 4941 unextend (tree c, int p, int unsignedp, tree mask) 4942 { 4943 tree type = TREE_TYPE (c); 4944 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type)); 4945 tree temp; 4946 4947 if (p == modesize || unsignedp) 4948 return c; 4949 4950 /* We work by getting just the sign bit into the low-order bit, then 4951 into the high-order bit, then sign-extend. We then XOR that value 4952 with C. */ 4953 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1)); 4954 temp = const_binop (BIT_AND_EXPR, temp, size_int (1)); 4955 4956 /* We must use a signed type in order to get an arithmetic right shift. 4957 However, we must also avoid introducing accidental overflows, so that 4958 a subsequent call to integer_zerop will work. Hence we must 4959 do the type conversion here. At this point, the constant is either 4960 zero or one, and the conversion to a signed type can never overflow. 4961 We could get an overflow if this conversion is done anywhere else. */ 4962 if (TYPE_UNSIGNED (type)) 4963 temp = fold_convert (signed_type_for (type), temp); 4964 4965 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1)); 4966 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1)); 4967 if (mask != 0) 4968 temp = const_binop (BIT_AND_EXPR, temp, 4969 fold_convert (TREE_TYPE (c), mask)); 4970 /* If necessary, convert the type back to match the type of C. */ 4971 if (TYPE_UNSIGNED (type)) 4972 temp = fold_convert (type, temp); 4973 4974 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp)); 4975 } 4976 4977 /* For an expression that has the form 4978 (A && B) || ~B 4979 or 4980 (A || B) && ~B, 4981 we can drop one of the inner expressions and simplify to 4982 A || ~B 4983 or 4984 A && ~B 4985 LOC is the location of the resulting expression. OP is the inner 4986 logical operation; the left-hand side in the examples above, while CMPOP 4987 is the right-hand side. RHS_ONLY is used to prevent us from accidentally 4988 removing a condition that guards another, as in 4989 (A != NULL && A->...) || A == NULL 4990 which we must not transform. If RHS_ONLY is true, only eliminate the 4991 right-most operand of the inner logical operation. */ 4992 4993 static tree 4994 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop, 4995 bool rhs_only) 4996 { 4997 tree type = TREE_TYPE (cmpop); 4998 enum tree_code code = TREE_CODE (cmpop); 4999 enum tree_code truthop_code = TREE_CODE (op); 5000 tree lhs = TREE_OPERAND (op, 0); 5001 tree rhs = TREE_OPERAND (op, 1); 5002 tree orig_lhs = lhs, orig_rhs = rhs; 5003 enum tree_code rhs_code = TREE_CODE (rhs); 5004 enum tree_code lhs_code = TREE_CODE (lhs); 5005 enum tree_code inv_code; 5006 5007 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop)) 5008 return NULL_TREE; 5009 5010 if (TREE_CODE_CLASS (code) != tcc_comparison) 5011 return NULL_TREE; 5012 5013 if (rhs_code == truthop_code) 5014 { 5015 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only); 5016 if (newrhs != NULL_TREE) 5017 { 5018 rhs = newrhs; 5019 rhs_code = TREE_CODE (rhs); 5020 } 5021 } 5022 if (lhs_code == truthop_code && !rhs_only) 5023 { 5024 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false); 5025 if (newlhs != NULL_TREE) 5026 { 5027 lhs = newlhs; 5028 lhs_code = TREE_CODE (lhs); 5029 } 5030 } 5031 5032 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type))); 5033 if (inv_code == rhs_code 5034 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0) 5035 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0)) 5036 return lhs; 5037 if (!rhs_only && inv_code == lhs_code 5038 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0) 5039 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0)) 5040 return rhs; 5041 if (rhs != orig_rhs || lhs != orig_lhs) 5042 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop), 5043 lhs, rhs); 5044 return NULL_TREE; 5045 } 5046 5047 /* Find ways of folding logical expressions of LHS and RHS: 5048 Try to merge two comparisons to the same innermost item. 5049 Look for range tests like "ch >= '0' && ch <= '9'". 5050 Look for combinations of simple terms on machines with expensive branches 5051 and evaluate the RHS unconditionally. 5052 5053 For example, if we have p->a == 2 && p->b == 4 and we can make an 5054 object large enough to span both A and B, we can do this with a comparison 5055 against the object ANDed with the a mask. 5056 5057 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking 5058 operations to do this with one comparison. 5059 5060 We check for both normal comparisons and the BIT_AND_EXPRs made this by 5061 function and the one above. 5062 5063 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR, 5064 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR. 5065 5066 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its 5067 two operands. 5068 5069 We return the simplified tree or 0 if no optimization is possible. */ 5070 5071 static tree 5072 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type, 5073 tree lhs, tree rhs) 5074 { 5075 /* If this is the "or" of two comparisons, we can do something if 5076 the comparisons are NE_EXPR. If this is the "and", we can do something 5077 if the comparisons are EQ_EXPR. I.e., 5078 (a->b == 2 && a->c == 4) can become (a->new == NEW). 5079 5080 WANTED_CODE is this operation code. For single bit fields, we can 5081 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong" 5082 comparison for one-bit fields. */ 5083 5084 enum tree_code wanted_code; 5085 enum tree_code lcode, rcode; 5086 tree ll_arg, lr_arg, rl_arg, rr_arg; 5087 tree ll_inner, lr_inner, rl_inner, rr_inner; 5088 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos; 5089 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos; 5090 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos; 5091 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos; 5092 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp; 5093 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode; 5094 enum machine_mode lnmode, rnmode; 5095 tree ll_mask, lr_mask, rl_mask, rr_mask; 5096 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask; 5097 tree l_const, r_const; 5098 tree lntype, rntype, result; 5099 HOST_WIDE_INT first_bit, end_bit; 5100 int volatilep; 5101 5102 /* Start by getting the comparison codes. Fail if anything is volatile. 5103 If one operand is a BIT_AND_EXPR with the constant one, treat it as if 5104 it were surrounded with a NE_EXPR. */ 5105 5106 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs)) 5107 return 0; 5108 5109 lcode = TREE_CODE (lhs); 5110 rcode = TREE_CODE (rhs); 5111 5112 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1))) 5113 { 5114 lhs = build2 (NE_EXPR, truth_type, lhs, 5115 build_int_cst (TREE_TYPE (lhs), 0)); 5116 lcode = NE_EXPR; 5117 } 5118 5119 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1))) 5120 { 5121 rhs = build2 (NE_EXPR, truth_type, rhs, 5122 build_int_cst (TREE_TYPE (rhs), 0)); 5123 rcode = NE_EXPR; 5124 } 5125 5126 if (TREE_CODE_CLASS (lcode) != tcc_comparison 5127 || TREE_CODE_CLASS (rcode) != tcc_comparison) 5128 return 0; 5129 5130 ll_arg = TREE_OPERAND (lhs, 0); 5131 lr_arg = TREE_OPERAND (lhs, 1); 5132 rl_arg = TREE_OPERAND (rhs, 0); 5133 rr_arg = TREE_OPERAND (rhs, 1); 5134 5135 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */ 5136 if (simple_operand_p (ll_arg) 5137 && simple_operand_p (lr_arg)) 5138 { 5139 if (operand_equal_p (ll_arg, rl_arg, 0) 5140 && operand_equal_p (lr_arg, rr_arg, 0)) 5141 { 5142 result = combine_comparisons (loc, code, lcode, rcode, 5143 truth_type, ll_arg, lr_arg); 5144 if (result) 5145 return result; 5146 } 5147 else if (operand_equal_p (ll_arg, rr_arg, 0) 5148 && operand_equal_p (lr_arg, rl_arg, 0)) 5149 { 5150 result = combine_comparisons (loc, code, lcode, 5151 swap_tree_comparison (rcode), 5152 truth_type, ll_arg, lr_arg); 5153 if (result) 5154 return result; 5155 } 5156 } 5157 5158 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) 5159 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR); 5160 5161 /* If the RHS can be evaluated unconditionally and its operands are 5162 simple, it wins to evaluate the RHS unconditionally on machines 5163 with expensive branches. In this case, this isn't a comparison 5164 that can be merged. */ 5165 5166 if (BRANCH_COST (optimize_function_for_speed_p (cfun), 5167 false) >= 2 5168 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg)) 5169 && simple_operand_p (rl_arg) 5170 && simple_operand_p (rr_arg)) 5171 { 5172 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */ 5173 if (code == TRUTH_OR_EXPR 5174 && lcode == NE_EXPR && integer_zerop (lr_arg) 5175 && rcode == NE_EXPR && integer_zerop (rr_arg) 5176 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5177 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5178 return build2_loc (loc, NE_EXPR, truth_type, 5179 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5180 ll_arg, rl_arg), 5181 build_int_cst (TREE_TYPE (ll_arg), 0)); 5182 5183 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ 5184 if (code == TRUTH_AND_EXPR 5185 && lcode == EQ_EXPR && integer_zerop (lr_arg) 5186 && rcode == EQ_EXPR && integer_zerop (rr_arg) 5187 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) 5188 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) 5189 return build2_loc (loc, EQ_EXPR, truth_type, 5190 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), 5191 ll_arg, rl_arg), 5192 build_int_cst (TREE_TYPE (ll_arg), 0)); 5193 } 5194 5195 /* See if the comparisons can be merged. Then get all the parameters for 5196 each side. */ 5197 5198 if ((lcode != EQ_EXPR && lcode != NE_EXPR) 5199 || (rcode != EQ_EXPR && rcode != NE_EXPR)) 5200 return 0; 5201 5202 volatilep = 0; 5203 ll_inner = decode_field_reference (loc, ll_arg, 5204 &ll_bitsize, &ll_bitpos, &ll_mode, 5205 &ll_unsignedp, &volatilep, &ll_mask, 5206 &ll_and_mask); 5207 lr_inner = decode_field_reference (loc, lr_arg, 5208 &lr_bitsize, &lr_bitpos, &lr_mode, 5209 &lr_unsignedp, &volatilep, &lr_mask, 5210 &lr_and_mask); 5211 rl_inner = decode_field_reference (loc, rl_arg, 5212 &rl_bitsize, &rl_bitpos, &rl_mode, 5213 &rl_unsignedp, &volatilep, &rl_mask, 5214 &rl_and_mask); 5215 rr_inner = decode_field_reference (loc, rr_arg, 5216 &rr_bitsize, &rr_bitpos, &rr_mode, 5217 &rr_unsignedp, &volatilep, &rr_mask, 5218 &rr_and_mask); 5219 5220 /* It must be true that the inner operation on the lhs of each 5221 comparison must be the same if we are to be able to do anything. 5222 Then see if we have constants. If not, the same must be true for 5223 the rhs's. */ 5224 if (volatilep || ll_inner == 0 || rl_inner == 0 5225 || ! operand_equal_p (ll_inner, rl_inner, 0)) 5226 return 0; 5227 5228 if (TREE_CODE (lr_arg) == INTEGER_CST 5229 && TREE_CODE (rr_arg) == INTEGER_CST) 5230 l_const = lr_arg, r_const = rr_arg; 5231 else if (lr_inner == 0 || rr_inner == 0 5232 || ! operand_equal_p (lr_inner, rr_inner, 0)) 5233 return 0; 5234 else 5235 l_const = r_const = 0; 5236 5237 /* If either comparison code is not correct for our logical operation, 5238 fail. However, we can convert a one-bit comparison against zero into 5239 the opposite comparison against that bit being set in the field. */ 5240 5241 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR); 5242 if (lcode != wanted_code) 5243 { 5244 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask)) 5245 { 5246 /* Make the left operand unsigned, since we are only interested 5247 in the value of one bit. Otherwise we are doing the wrong 5248 thing below. */ 5249 ll_unsignedp = 1; 5250 l_const = ll_mask; 5251 } 5252 else 5253 return 0; 5254 } 5255 5256 /* This is analogous to the code for l_const above. */ 5257 if (rcode != wanted_code) 5258 { 5259 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask)) 5260 { 5261 rl_unsignedp = 1; 5262 r_const = rl_mask; 5263 } 5264 else 5265 return 0; 5266 } 5267 5268 /* See if we can find a mode that contains both fields being compared on 5269 the left. If we can't, fail. Otherwise, update all constants and masks 5270 to be relative to a field of that size. */ 5271 first_bit = MIN (ll_bitpos, rl_bitpos); 5272 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize); 5273 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, 5274 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode, 5275 volatilep); 5276 if (lnmode == VOIDmode) 5277 return 0; 5278 5279 lnbitsize = GET_MODE_BITSIZE (lnmode); 5280 lnbitpos = first_bit & ~ (lnbitsize - 1); 5281 lntype = lang_hooks.types.type_for_size (lnbitsize, 1); 5282 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos; 5283 5284 if (BYTES_BIG_ENDIAN) 5285 { 5286 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize; 5287 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; 5288 } 5289 5290 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask), 5291 size_int (xll_bitpos)); 5292 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask), 5293 size_int (xrl_bitpos)); 5294 5295 if (l_const) 5296 { 5297 l_const = fold_convert_loc (loc, lntype, l_const); 5298 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); 5299 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos)); 5300 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, 5301 fold_build1_loc (loc, BIT_NOT_EXPR, 5302 lntype, ll_mask)))) 5303 { 5304 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5305 5306 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5307 } 5308 } 5309 if (r_const) 5310 { 5311 r_const = fold_convert_loc (loc, lntype, r_const); 5312 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); 5313 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos)); 5314 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, 5315 fold_build1_loc (loc, BIT_NOT_EXPR, 5316 lntype, rl_mask)))) 5317 { 5318 warning (0, "comparison is always %d", wanted_code == NE_EXPR); 5319 5320 return constant_boolean_node (wanted_code == NE_EXPR, truth_type); 5321 } 5322 } 5323 5324 /* If the right sides are not constant, do the same for it. Also, 5325 disallow this optimization if a size or signedness mismatch occurs 5326 between the left and right sides. */ 5327 if (l_const == 0) 5328 { 5329 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize 5330 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp 5331 /* Make sure the two fields on the right 5332 correspond to the left without being swapped. */ 5333 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos) 5334 return 0; 5335 5336 first_bit = MIN (lr_bitpos, rr_bitpos); 5337 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize); 5338 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0, 5339 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode, 5340 volatilep); 5341 if (rnmode == VOIDmode) 5342 return 0; 5343 5344 rnbitsize = GET_MODE_BITSIZE (rnmode); 5345 rnbitpos = first_bit & ~ (rnbitsize - 1); 5346 rntype = lang_hooks.types.type_for_size (rnbitsize, 1); 5347 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos; 5348 5349 if (BYTES_BIG_ENDIAN) 5350 { 5351 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize; 5352 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; 5353 } 5354 5355 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 5356 rntype, lr_mask), 5357 size_int (xlr_bitpos)); 5358 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, 5359 rntype, rr_mask), 5360 size_int (xrr_bitpos)); 5361 5362 /* Make a mask that corresponds to both fields being compared. 5363 Do this for both items being compared. If the operands are the 5364 same size and the bits being compared are in the same position 5365 then we can do this by masking both and comparing the masked 5366 results. */ 5367 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); 5368 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); 5369 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) 5370 { 5371 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 5372 ll_unsignedp || rl_unsignedp); 5373 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5374 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); 5375 5376 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, 5377 lr_unsignedp || rr_unsignedp); 5378 if (! all_ones_mask_p (lr_mask, rnbitsize)) 5379 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); 5380 5381 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); 5382 } 5383 5384 /* There is still another way we can do something: If both pairs of 5385 fields being compared are adjacent, we may be able to make a wider 5386 field containing them both. 5387 5388 Note that we still must mask the lhs/rhs expressions. Furthermore, 5389 the mask must be shifted to account for the shift done by 5390 make_bit_field_ref. */ 5391 if ((ll_bitsize + ll_bitpos == rl_bitpos 5392 && lr_bitsize + lr_bitpos == rr_bitpos) 5393 || (ll_bitpos == rl_bitpos + rl_bitsize 5394 && lr_bitpos == rr_bitpos + rr_bitsize)) 5395 { 5396 tree type; 5397 5398 lhs = make_bit_field_ref (loc, ll_inner, lntype, 5399 ll_bitsize + rl_bitsize, 5400 MIN (ll_bitpos, rl_bitpos), ll_unsignedp); 5401 rhs = make_bit_field_ref (loc, lr_inner, rntype, 5402 lr_bitsize + rr_bitsize, 5403 MIN (lr_bitpos, rr_bitpos), lr_unsignedp); 5404 5405 ll_mask = const_binop (RSHIFT_EXPR, ll_mask, 5406 size_int (MIN (xll_bitpos, xrl_bitpos))); 5407 lr_mask = const_binop (RSHIFT_EXPR, lr_mask, 5408 size_int (MIN (xlr_bitpos, xrr_bitpos))); 5409 5410 /* Convert to the smaller type before masking out unwanted bits. */ 5411 type = lntype; 5412 if (lntype != rntype) 5413 { 5414 if (lnbitsize > rnbitsize) 5415 { 5416 lhs = fold_convert_loc (loc, rntype, lhs); 5417 ll_mask = fold_convert_loc (loc, rntype, ll_mask); 5418 type = rntype; 5419 } 5420 else if (lnbitsize < rnbitsize) 5421 { 5422 rhs = fold_convert_loc (loc, lntype, rhs); 5423 lr_mask = fold_convert_loc (loc, lntype, lr_mask); 5424 type = lntype; 5425 } 5426 } 5427 5428 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize)) 5429 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask); 5430 5431 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) 5432 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); 5433 5434 return build2_loc (loc, wanted_code, truth_type, lhs, rhs); 5435 } 5436 5437 return 0; 5438 } 5439 5440 /* Handle the case of comparisons with constants. If there is something in 5441 common between the masks, those bits of the constants must be the same. 5442 If not, the condition is always false. Test for this to avoid generating 5443 incorrect code below. */ 5444 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask); 5445 if (! integer_zerop (result) 5446 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const), 5447 const_binop (BIT_AND_EXPR, result, r_const)) != 1) 5448 { 5449 if (wanted_code == NE_EXPR) 5450 { 5451 warning (0, "%<or%> of unmatched not-equal tests is always 1"); 5452 return constant_boolean_node (true, truth_type); 5453 } 5454 else 5455 { 5456 warning (0, "%<and%> of mutually exclusive equal-tests is always 0"); 5457 return constant_boolean_node (false, truth_type); 5458 } 5459 } 5460 5461 /* Construct the expression we will return. First get the component 5462 reference we will make. Unless the mask is all ones the width of 5463 that field, perform the mask operation. Then compare with the 5464 merged constant. */ 5465 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, 5466 ll_unsignedp || rl_unsignedp); 5467 5468 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); 5469 if (! all_ones_mask_p (ll_mask, lnbitsize)) 5470 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask); 5471 5472 return build2_loc (loc, wanted_code, truth_type, result, 5473 const_binop (BIT_IOR_EXPR, l_const, r_const)); 5474 } 5475 5476 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a 5477 constant. */ 5478 5479 static tree 5480 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type, 5481 tree op0, tree op1) 5482 { 5483 tree arg0 = op0; 5484 enum tree_code op_code; 5485 tree comp_const; 5486 tree minmax_const; 5487 int consts_equal, consts_lt; 5488 tree inner; 5489 5490 STRIP_SIGN_NOPS (arg0); 5491 5492 op_code = TREE_CODE (arg0); 5493 minmax_const = TREE_OPERAND (arg0, 1); 5494 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1); 5495 consts_equal = tree_int_cst_equal (minmax_const, comp_const); 5496 consts_lt = tree_int_cst_lt (minmax_const, comp_const); 5497 inner = TREE_OPERAND (arg0, 0); 5498 5499 /* If something does not permit us to optimize, return the original tree. */ 5500 if ((op_code != MIN_EXPR && op_code != MAX_EXPR) 5501 || TREE_CODE (comp_const) != INTEGER_CST 5502 || TREE_OVERFLOW (comp_const) 5503 || TREE_CODE (minmax_const) != INTEGER_CST 5504 || TREE_OVERFLOW (minmax_const)) 5505 return NULL_TREE; 5506 5507 /* Now handle all the various comparison codes. We only handle EQ_EXPR 5508 and GT_EXPR, doing the rest with recursive calls using logical 5509 simplifications. */ 5510 switch (code) 5511 { 5512 case NE_EXPR: case LT_EXPR: case LE_EXPR: 5513 { 5514 tree tem 5515 = optimize_minmax_comparison (loc, 5516 invert_tree_comparison (code, false), 5517 type, op0, op1); 5518 if (tem) 5519 return invert_truthvalue_loc (loc, tem); 5520 return NULL_TREE; 5521 } 5522 5523 case GE_EXPR: 5524 return 5525 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 5526 optimize_minmax_comparison 5527 (loc, EQ_EXPR, type, arg0, comp_const), 5528 optimize_minmax_comparison 5529 (loc, GT_EXPR, type, arg0, comp_const)); 5530 5531 case EQ_EXPR: 5532 if (op_code == MAX_EXPR && consts_equal) 5533 /* MAX (X, 0) == 0 -> X <= 0 */ 5534 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const); 5535 5536 else if (op_code == MAX_EXPR && consts_lt) 5537 /* MAX (X, 0) == 5 -> X == 5 */ 5538 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 5539 5540 else if (op_code == MAX_EXPR) 5541 /* MAX (X, 0) == -1 -> false */ 5542 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5543 5544 else if (consts_equal) 5545 /* MIN (X, 0) == 0 -> X >= 0 */ 5546 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const); 5547 5548 else if (consts_lt) 5549 /* MIN (X, 0) == 5 -> false */ 5550 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5551 5552 else 5553 /* MIN (X, 0) == -1 -> X == -1 */ 5554 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); 5555 5556 case GT_EXPR: 5557 if (op_code == MAX_EXPR && (consts_equal || consts_lt)) 5558 /* MAX (X, 0) > 0 -> X > 0 5559 MAX (X, 0) > 5 -> X > 5 */ 5560 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 5561 5562 else if (op_code == MAX_EXPR) 5563 /* MAX (X, 0) > -1 -> true */ 5564 return omit_one_operand_loc (loc, type, integer_one_node, inner); 5565 5566 else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) 5567 /* MIN (X, 0) > 0 -> false 5568 MIN (X, 0) > 5 -> false */ 5569 return omit_one_operand_loc (loc, type, integer_zero_node, inner); 5570 5571 else 5572 /* MIN (X, 0) > -1 -> X > -1 */ 5573 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); 5574 5575 default: 5576 return NULL_TREE; 5577 } 5578 } 5579 5580 /* T is an integer expression that is being multiplied, divided, or taken a 5581 modulus (CODE says which and what kind of divide or modulus) by a 5582 constant C. See if we can eliminate that operation by folding it with 5583 other operations already in T. WIDE_TYPE, if non-null, is a type that 5584 should be used for the computation if wider than our type. 5585 5586 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return 5587 (X * 2) + (Y * 4). We must, however, be assured that either the original 5588 expression would not overflow or that overflow is undefined for the type 5589 in the language in question. 5590 5591 If we return a non-null expression, it is an equivalent form of the 5592 original computation, but need not be in the original type. 5593 5594 We set *STRICT_OVERFLOW_P to true if the return values depends on 5595 signed overflow being undefined. Otherwise we do not change 5596 *STRICT_OVERFLOW_P. */ 5597 5598 static tree 5599 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type, 5600 bool *strict_overflow_p) 5601 { 5602 /* To avoid exponential search depth, refuse to allow recursion past 5603 three levels. Beyond that (1) it's highly unlikely that we'll find 5604 something interesting and (2) we've probably processed it before 5605 when we built the inner expression. */ 5606 5607 static int depth; 5608 tree ret; 5609 5610 if (depth > 3) 5611 return NULL; 5612 5613 depth++; 5614 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p); 5615 depth--; 5616 5617 return ret; 5618 } 5619 5620 static tree 5621 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, 5622 bool *strict_overflow_p) 5623 { 5624 tree type = TREE_TYPE (t); 5625 enum tree_code tcode = TREE_CODE (t); 5626 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type)) 5627 > GET_MODE_SIZE (TYPE_MODE (type))) 5628 ? wide_type : type); 5629 tree t1, t2; 5630 int same_p = tcode == code; 5631 tree op0 = NULL_TREE, op1 = NULL_TREE; 5632 bool sub_strict_overflow_p; 5633 5634 /* Don't deal with constants of zero here; they confuse the code below. */ 5635 if (integer_zerop (c)) 5636 return NULL_TREE; 5637 5638 if (TREE_CODE_CLASS (tcode) == tcc_unary) 5639 op0 = TREE_OPERAND (t, 0); 5640 5641 if (TREE_CODE_CLASS (tcode) == tcc_binary) 5642 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1); 5643 5644 /* Note that we need not handle conditional operations here since fold 5645 already handles those cases. So just do arithmetic here. */ 5646 switch (tcode) 5647 { 5648 case INTEGER_CST: 5649 /* For a constant, we can always simplify if we are a multiply 5650 or (for divide and modulus) if it is a multiple of our constant. */ 5651 if (code == MULT_EXPR 5652 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c))) 5653 return const_binop (code, fold_convert (ctype, t), 5654 fold_convert (ctype, c)); 5655 break; 5656 5657 CASE_CONVERT: case NON_LVALUE_EXPR: 5658 /* If op0 is an expression ... */ 5659 if ((COMPARISON_CLASS_P (op0) 5660 || UNARY_CLASS_P (op0) 5661 || BINARY_CLASS_P (op0) 5662 || VL_EXP_CLASS_P (op0) 5663 || EXPRESSION_CLASS_P (op0)) 5664 /* ... and has wrapping overflow, and its type is smaller 5665 than ctype, then we cannot pass through as widening. */ 5666 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)) 5667 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE 5668 && TYPE_IS_SIZETYPE (TREE_TYPE (op0))) 5669 && (TYPE_PRECISION (ctype) 5670 > TYPE_PRECISION (TREE_TYPE (op0)))) 5671 /* ... or this is a truncation (t is narrower than op0), 5672 then we cannot pass through this narrowing. */ 5673 || (TYPE_PRECISION (type) 5674 < TYPE_PRECISION (TREE_TYPE (op0))) 5675 /* ... or signedness changes for division or modulus, 5676 then we cannot pass through this conversion. */ 5677 || (code != MULT_EXPR 5678 && (TYPE_UNSIGNED (ctype) 5679 != TYPE_UNSIGNED (TREE_TYPE (op0)))) 5680 /* ... or has undefined overflow while the converted to 5681 type has not, we cannot do the operation in the inner type 5682 as that would introduce undefined overflow. */ 5683 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)) 5684 && !TYPE_OVERFLOW_UNDEFINED (type)))) 5685 break; 5686 5687 /* Pass the constant down and see if we can make a simplification. If 5688 we can, replace this expression with the inner simplification for 5689 possible later conversion to our or some other type. */ 5690 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 5691 && TREE_CODE (t2) == INTEGER_CST 5692 && !TREE_OVERFLOW (t2) 5693 && (0 != (t1 = extract_muldiv (op0, t2, code, 5694 code == MULT_EXPR 5695 ? ctype : NULL_TREE, 5696 strict_overflow_p)))) 5697 return t1; 5698 break; 5699 5700 case ABS_EXPR: 5701 /* If widening the type changes it from signed to unsigned, then we 5702 must avoid building ABS_EXPR itself as unsigned. */ 5703 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) 5704 { 5705 tree cstype = (*signed_type_for) (ctype); 5706 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p)) 5707 != 0) 5708 { 5709 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); 5710 return fold_convert (ctype, t1); 5711 } 5712 break; 5713 } 5714 /* If the constant is negative, we cannot simplify this. */ 5715 if (tree_int_cst_sgn (c) == -1) 5716 break; 5717 /* FALLTHROUGH */ 5718 case NEGATE_EXPR: 5719 /* For division and modulus, type can't be unsigned, as e.g. 5720 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2. 5721 For signed types, even with wrapping overflow, this is fine. */ 5722 if (code != MULT_EXPR && TYPE_UNSIGNED (type)) 5723 break; 5724 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) 5725 != 0) 5726 return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); 5727 break; 5728 5729 case MIN_EXPR: case MAX_EXPR: 5730 /* If widening the type changes the signedness, then we can't perform 5731 this optimization as that changes the result. */ 5732 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type)) 5733 break; 5734 5735 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ 5736 sub_strict_overflow_p = false; 5737 if ((t1 = extract_muldiv (op0, c, code, wide_type, 5738 &sub_strict_overflow_p)) != 0 5739 && (t2 = extract_muldiv (op1, c, code, wide_type, 5740 &sub_strict_overflow_p)) != 0) 5741 { 5742 if (tree_int_cst_sgn (c) < 0) 5743 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); 5744 if (sub_strict_overflow_p) 5745 *strict_overflow_p = true; 5746 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5747 fold_convert (ctype, t2)); 5748 } 5749 break; 5750 5751 case LSHIFT_EXPR: case RSHIFT_EXPR: 5752 /* If the second operand is constant, this is a multiplication 5753 or floor division, by a power of two, so we can treat it that 5754 way unless the multiplier or divisor overflows. Signed 5755 left-shift overflow is implementation-defined rather than 5756 undefined in C90, so do not convert signed left shift into 5757 multiplication. */ 5758 if (TREE_CODE (op1) == INTEGER_CST 5759 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0))) 5760 /* const_binop may not detect overflow correctly, 5761 so check for it explicitly here. */ 5762 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1) 5763 && TREE_INT_CST_HIGH (op1) == 0 5764 && 0 != (t1 = fold_convert (ctype, 5765 const_binop (LSHIFT_EXPR, 5766 size_one_node, 5767 op1))) 5768 && !TREE_OVERFLOW (t1)) 5769 return extract_muldiv (build2 (tcode == LSHIFT_EXPR 5770 ? MULT_EXPR : FLOOR_DIV_EXPR, 5771 ctype, 5772 fold_convert (ctype, op0), 5773 t1), 5774 c, code, wide_type, strict_overflow_p); 5775 break; 5776 5777 case PLUS_EXPR: case MINUS_EXPR: 5778 /* See if we can eliminate the operation on both sides. If we can, we 5779 can return a new PLUS or MINUS. If we can't, the only remaining 5780 cases where we can do anything are if the second operand is a 5781 constant. */ 5782 sub_strict_overflow_p = false; 5783 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); 5784 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); 5785 if (t1 != 0 && t2 != 0 5786 && (code == MULT_EXPR 5787 /* If not multiplication, we can only do this if both operands 5788 are divisible by c. */ 5789 || (multiple_of_p (ctype, op0, c) 5790 && multiple_of_p (ctype, op1, c)))) 5791 { 5792 if (sub_strict_overflow_p) 5793 *strict_overflow_p = true; 5794 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5795 fold_convert (ctype, t2)); 5796 } 5797 5798 /* If this was a subtraction, negate OP1 and set it to be an addition. 5799 This simplifies the logic below. */ 5800 if (tcode == MINUS_EXPR) 5801 { 5802 tcode = PLUS_EXPR, op1 = negate_expr (op1); 5803 /* If OP1 was not easily negatable, the constant may be OP0. */ 5804 if (TREE_CODE (op0) == INTEGER_CST) 5805 { 5806 tree tem = op0; 5807 op0 = op1; 5808 op1 = tem; 5809 tem = t1; 5810 t1 = t2; 5811 t2 = tem; 5812 } 5813 } 5814 5815 if (TREE_CODE (op1) != INTEGER_CST) 5816 break; 5817 5818 /* If either OP1 or C are negative, this optimization is not safe for 5819 some of the division and remainder types while for others we need 5820 to change the code. */ 5821 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0) 5822 { 5823 if (code == CEIL_DIV_EXPR) 5824 code = FLOOR_DIV_EXPR; 5825 else if (code == FLOOR_DIV_EXPR) 5826 code = CEIL_DIV_EXPR; 5827 else if (code != MULT_EXPR 5828 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR) 5829 break; 5830 } 5831 5832 /* If it's a multiply or a division/modulus operation of a multiple 5833 of our constant, do the operation and verify it doesn't overflow. */ 5834 if (code == MULT_EXPR 5835 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) 5836 { 5837 op1 = const_binop (code, fold_convert (ctype, op1), 5838 fold_convert (ctype, c)); 5839 /* We allow the constant to overflow with wrapping semantics. */ 5840 if (op1 == 0 5841 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) 5842 break; 5843 } 5844 else 5845 break; 5846 5847 /* If we have an unsigned type is not a sizetype, we cannot widen 5848 the operation since it will change the result if the original 5849 computation overflowed. */ 5850 if (TYPE_UNSIGNED (ctype) 5851 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)) 5852 && ctype != type) 5853 break; 5854 5855 /* If we were able to eliminate our operation from the first side, 5856 apply our operation to the second side and reform the PLUS. */ 5857 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR)) 5858 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1); 5859 5860 /* The last case is if we are a multiply. In that case, we can 5861 apply the distributive law to commute the multiply and addition 5862 if the multiplication of the constants doesn't overflow. */ 5863 if (code == MULT_EXPR) 5864 return fold_build2 (tcode, ctype, 5865 fold_build2 (code, ctype, 5866 fold_convert (ctype, op0), 5867 fold_convert (ctype, c)), 5868 op1); 5869 5870 break; 5871 5872 case MULT_EXPR: 5873 /* We have a special case here if we are doing something like 5874 (C * 8) % 4 since we know that's zero. */ 5875 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR 5876 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR) 5877 /* If the multiplication can overflow we cannot optimize this. 5878 ??? Until we can properly mark individual operations as 5879 not overflowing we need to treat sizetype special here as 5880 stor-layout relies on this opimization to make 5881 DECL_FIELD_BIT_OFFSET always a constant. */ 5882 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)) 5883 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE 5884 && TYPE_IS_SIZETYPE (TREE_TYPE (t)))) 5885 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST 5886 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) 5887 { 5888 *strict_overflow_p = true; 5889 return omit_one_operand (type, integer_zero_node, op0); 5890 } 5891 5892 /* ... fall through ... */ 5893 5894 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: 5895 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR: 5896 /* If we can extract our operation from the LHS, do so and return a 5897 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, 5898 do something only if the second operand is a constant. */ 5899 if (same_p 5900 && (t1 = extract_muldiv (op0, c, code, wide_type, 5901 strict_overflow_p)) != 0) 5902 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), 5903 fold_convert (ctype, op1)); 5904 else if (tcode == MULT_EXPR && code == MULT_EXPR 5905 && (t1 = extract_muldiv (op1, c, code, wide_type, 5906 strict_overflow_p)) != 0) 5907 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 5908 fold_convert (ctype, t1)); 5909 else if (TREE_CODE (op1) != INTEGER_CST) 5910 return 0; 5911 5912 /* If these are the same operation types, we can associate them 5913 assuming no overflow. */ 5914 if (tcode == code) 5915 { 5916 double_int mul; 5917 int overflow_p; 5918 mul = double_int_mul_with_sign 5919 (double_int_ext 5920 (tree_to_double_int (op1), 5921 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)), 5922 double_int_ext 5923 (tree_to_double_int (c), 5924 TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)), 5925 false, &overflow_p); 5926 overflow_p = (((!TYPE_UNSIGNED (ctype) 5927 || (TREE_CODE (ctype) == INTEGER_TYPE 5928 && TYPE_IS_SIZETYPE (ctype))) 5929 && overflow_p) 5930 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1)); 5931 if (!double_int_fits_to_tree_p (ctype, mul) 5932 && ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR) 5933 || !TYPE_UNSIGNED (ctype) 5934 || (TREE_CODE (ctype) == INTEGER_TYPE 5935 && TYPE_IS_SIZETYPE (ctype)))) 5936 overflow_p = 1; 5937 if (!overflow_p) 5938 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 5939 double_int_to_tree (ctype, mul)); 5940 } 5941 5942 /* If these operations "cancel" each other, we have the main 5943 optimizations of this pass, which occur when either constant is a 5944 multiple of the other, in which case we replace this with either an 5945 operation or CODE or TCODE. 5946 5947 If we have an unsigned type that is not a sizetype, we cannot do 5948 this since it will change the result if the original computation 5949 overflowed. */ 5950 if ((TYPE_OVERFLOW_UNDEFINED (ctype) 5951 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) 5952 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) 5953 || (tcode == MULT_EXPR 5954 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR 5955 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR 5956 && code != MULT_EXPR))) 5957 { 5958 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) 5959 { 5960 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 5961 *strict_overflow_p = true; 5962 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), 5963 fold_convert (ctype, 5964 const_binop (TRUNC_DIV_EXPR, 5965 op1, c))); 5966 } 5967 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1))) 5968 { 5969 if (TYPE_OVERFLOW_UNDEFINED (ctype)) 5970 *strict_overflow_p = true; 5971 return fold_build2 (code, ctype, fold_convert (ctype, op0), 5972 fold_convert (ctype, 5973 const_binop (TRUNC_DIV_EXPR, 5974 c, op1))); 5975 } 5976 } 5977 break; 5978 5979 default: 5980 break; 5981 } 5982 5983 return 0; 5984 } 5985 5986 /* Return a node which has the indicated constant VALUE (either 0 or 5987 1 for scalars or {-1,-1,..} or {0,0,...} for vectors), 5988 and is of the indicated TYPE. */ 5989 5990 tree 5991 constant_boolean_node (bool value, tree type) 5992 { 5993 if (type == integer_type_node) 5994 return value ? integer_one_node : integer_zero_node; 5995 else if (type == boolean_type_node) 5996 return value ? boolean_true_node : boolean_false_node; 5997 else if (TREE_CODE (type) == VECTOR_TYPE) 5998 return build_vector_from_val (type, 5999 build_int_cst (TREE_TYPE (type), 6000 value ? -1 : 0)); 6001 else 6002 return fold_convert (type, value ? integer_one_node : integer_zero_node); 6003 } 6004 6005 6006 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'. 6007 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here 6008 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)' 6009 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the 6010 COND is the first argument to CODE; otherwise (as in the example 6011 given here), it is the second argument. TYPE is the type of the 6012 original expression. Return NULL_TREE if no simplification is 6013 possible. */ 6014 6015 static tree 6016 fold_binary_op_with_conditional_arg (location_t loc, 6017 enum tree_code code, 6018 tree type, tree op0, tree op1, 6019 tree cond, tree arg, int cond_first_p) 6020 { 6021 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1); 6022 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0); 6023 tree test, true_value, false_value; 6024 tree lhs = NULL_TREE; 6025 tree rhs = NULL_TREE; 6026 6027 if (TREE_CODE (cond) == COND_EXPR) 6028 { 6029 test = TREE_OPERAND (cond, 0); 6030 true_value = TREE_OPERAND (cond, 1); 6031 false_value = TREE_OPERAND (cond, 2); 6032 /* If this operand throws an expression, then it does not make 6033 sense to try to perform a logical or arithmetic operation 6034 involving it. */ 6035 if (VOID_TYPE_P (TREE_TYPE (true_value))) 6036 lhs = true_value; 6037 if (VOID_TYPE_P (TREE_TYPE (false_value))) 6038 rhs = false_value; 6039 } 6040 else 6041 { 6042 tree testtype = TREE_TYPE (cond); 6043 test = cond; 6044 true_value = constant_boolean_node (true, testtype); 6045 false_value = constant_boolean_node (false, testtype); 6046 } 6047 6048 /* This transformation is only worthwhile if we don't have to wrap ARG 6049 in a SAVE_EXPR and the operation can be simplified without recursing 6050 on at least one of the branches once its pushed inside the COND_EXPR. */ 6051 if (!TREE_CONSTANT (arg) 6052 && (TREE_SIDE_EFFECTS (arg) 6053 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR 6054 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value))) 6055 return NULL_TREE; 6056 6057 arg = fold_convert_loc (loc, arg_type, arg); 6058 if (lhs == 0) 6059 { 6060 true_value = fold_convert_loc (loc, cond_type, true_value); 6061 if (cond_first_p) 6062 lhs = fold_build2_loc (loc, code, type, true_value, arg); 6063 else 6064 lhs = fold_build2_loc (loc, code, type, arg, true_value); 6065 } 6066 if (rhs == 0) 6067 { 6068 false_value = fold_convert_loc (loc, cond_type, false_value); 6069 if (cond_first_p) 6070 rhs = fold_build2_loc (loc, code, type, false_value, arg); 6071 else 6072 rhs = fold_build2_loc (loc, code, type, arg, false_value); 6073 } 6074 6075 /* Check that we have simplified at least one of the branches. */ 6076 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs)) 6077 return NULL_TREE; 6078 6079 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); 6080 } 6081 6082 6083 /* Subroutine of fold() that checks for the addition of +/- 0.0. 6084 6085 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type 6086 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X - 6087 ADDEND is the same as X. 6088 6089 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero 6090 and finite. The problematic cases are when X is zero, and its mode 6091 has signed zeros. In the case of rounding towards -infinity, 6092 X - 0 is not the same as X because 0 - 0 is -0. In other rounding 6093 modes, X + 0 is not the same as X because -0 + 0 is 0. */ 6094 6095 bool 6096 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate) 6097 { 6098 if (!real_zerop (addend)) 6099 return false; 6100 6101 /* Don't allow the fold with -fsignaling-nans. */ 6102 if (HONOR_SNANS (TYPE_MODE (type))) 6103 return false; 6104 6105 /* Allow the fold if zeros aren't signed, or their sign isn't important. */ 6106 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 6107 return true; 6108 6109 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */ 6110 if (TREE_CODE (addend) == REAL_CST 6111 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend))) 6112 negate = !negate; 6113 6114 /* The mode has signed zeros, and we have to honor their sign. 6115 In this situation, there is only one case we can return true for. 6116 X - 0 is the same as X unless rounding towards -infinity is 6117 supported. */ 6118 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)); 6119 } 6120 6121 /* Subroutine of fold() that checks comparisons of built-in math 6122 functions against real constants. 6123 6124 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison 6125 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE 6126 is the type of the result and ARG0 and ARG1 are the operands of the 6127 comparison. ARG1 must be a TREE_REAL_CST. 6128 6129 The function returns the constant folded tree if a simplification 6130 can be made, and NULL_TREE otherwise. */ 6131 6132 static tree 6133 fold_mathfn_compare (location_t loc, 6134 enum built_in_function fcode, enum tree_code code, 6135 tree type, tree arg0, tree arg1) 6136 { 6137 REAL_VALUE_TYPE c; 6138 6139 if (BUILTIN_SQRT_P (fcode)) 6140 { 6141 tree arg = CALL_EXPR_ARG (arg0, 0); 6142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); 6143 6144 c = TREE_REAL_CST (arg1); 6145 if (REAL_VALUE_NEGATIVE (c)) 6146 { 6147 /* sqrt(x) < y is always false, if y is negative. */ 6148 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) 6149 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6150 6151 /* sqrt(x) > y is always true, if y is negative and we 6152 don't care about NaNs, i.e. negative values of x. */ 6153 if (code == NE_EXPR || !HONOR_NANS (mode)) 6154 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6155 6156 /* sqrt(x) > y is the same as x >= 0, if y is negative. */ 6157 return fold_build2_loc (loc, GE_EXPR, type, arg, 6158 build_real (TREE_TYPE (arg), dconst0)); 6159 } 6160 else if (code == GT_EXPR || code == GE_EXPR) 6161 { 6162 REAL_VALUE_TYPE c2; 6163 6164 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6165 real_convert (&c2, mode, &c2); 6166 6167 if (REAL_VALUE_ISINF (c2)) 6168 { 6169 /* sqrt(x) > y is x == +Inf, when y is very large. */ 6170 if (HONOR_INFINITIES (mode)) 6171 return fold_build2_loc (loc, EQ_EXPR, type, arg, 6172 build_real (TREE_TYPE (arg), c2)); 6173 6174 /* sqrt(x) > y is always false, when y is very large 6175 and we don't care about infinities. */ 6176 return omit_one_operand_loc (loc, type, integer_zero_node, arg); 6177 } 6178 6179 /* sqrt(x) > c is the same as x > c*c. */ 6180 return fold_build2_loc (loc, code, type, arg, 6181 build_real (TREE_TYPE (arg), c2)); 6182 } 6183 else if (code == LT_EXPR || code == LE_EXPR) 6184 { 6185 REAL_VALUE_TYPE c2; 6186 6187 REAL_ARITHMETIC (c2, MULT_EXPR, c, c); 6188 real_convert (&c2, mode, &c2); 6189 6190 if (REAL_VALUE_ISINF (c2)) 6191 { 6192 /* sqrt(x) < y is always true, when y is a very large 6193 value and we don't care about NaNs or Infinities. */ 6194 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) 6195 return omit_one_operand_loc (loc, type, integer_one_node, arg); 6196 6197 /* sqrt(x) < y is x != +Inf when y is very large and we 6198 don't care about NaNs. */ 6199 if (! HONOR_NANS (mode)) 6200 return fold_build2_loc (loc, NE_EXPR, type, arg, 6201 build_real (TREE_TYPE (arg), c2)); 6202 6203 /* sqrt(x) < y is x >= 0 when y is very large and we 6204 don't care about Infinities. */ 6205 if (! HONOR_INFINITIES (mode)) 6206 return fold_build2_loc (loc, GE_EXPR, type, arg, 6207 build_real (TREE_TYPE (arg), dconst0)); 6208 6209 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ 6210 arg = save_expr (arg); 6211 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6212 fold_build2_loc (loc, GE_EXPR, type, arg, 6213 build_real (TREE_TYPE (arg), 6214 dconst0)), 6215 fold_build2_loc (loc, NE_EXPR, type, arg, 6216 build_real (TREE_TYPE (arg), 6217 c2))); 6218 } 6219 6220 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ 6221 if (! HONOR_NANS (mode)) 6222 return fold_build2_loc (loc, code, type, arg, 6223 build_real (TREE_TYPE (arg), c2)); 6224 6225 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ 6226 arg = save_expr (arg); 6227 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 6228 fold_build2_loc (loc, GE_EXPR, type, arg, 6229 build_real (TREE_TYPE (arg), 6230 dconst0)), 6231 fold_build2_loc (loc, code, type, arg, 6232 build_real (TREE_TYPE (arg), 6233 c2))); 6234 } 6235 } 6236 6237 return NULL_TREE; 6238 } 6239 6240 /* Subroutine of fold() that optimizes comparisons against Infinities, 6241 either +Inf or -Inf. 6242 6243 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6244 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6245 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6246 6247 The function returns the constant folded tree if a simplification 6248 can be made, and NULL_TREE otherwise. */ 6249 6250 static tree 6251 fold_inf_compare (location_t loc, enum tree_code code, tree type, 6252 tree arg0, tree arg1) 6253 { 6254 enum machine_mode mode; 6255 REAL_VALUE_TYPE max; 6256 tree temp; 6257 bool neg; 6258 6259 mode = TYPE_MODE (TREE_TYPE (arg0)); 6260 6261 /* For negative infinity swap the sense of the comparison. */ 6262 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)); 6263 if (neg) 6264 code = swap_tree_comparison (code); 6265 6266 switch (code) 6267 { 6268 case GT_EXPR: 6269 /* x > +Inf is always false, if with ignore sNANs. */ 6270 if (HONOR_SNANS (mode)) 6271 return NULL_TREE; 6272 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6273 6274 case LE_EXPR: 6275 /* x <= +Inf is always true, if we don't case about NaNs. */ 6276 if (! HONOR_NANS (mode)) 6277 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6278 6279 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ 6280 arg0 = save_expr (arg0); 6281 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0); 6282 6283 case EQ_EXPR: 6284 case GE_EXPR: 6285 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ 6286 real_maxval (&max, neg, mode); 6287 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 6288 arg0, build_real (TREE_TYPE (arg0), max)); 6289 6290 case LT_EXPR: 6291 /* x < +Inf is always equal to x <= DBL_MAX. */ 6292 real_maxval (&max, neg, mode); 6293 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 6294 arg0, build_real (TREE_TYPE (arg0), max)); 6295 6296 case NE_EXPR: 6297 /* x != +Inf is always equal to !(x > DBL_MAX). */ 6298 real_maxval (&max, neg, mode); 6299 if (! HONOR_NANS (mode)) 6300 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, 6301 arg0, build_real (TREE_TYPE (arg0), max)); 6302 6303 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, 6304 arg0, build_real (TREE_TYPE (arg0), max)); 6305 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp); 6306 6307 default: 6308 break; 6309 } 6310 6311 return NULL_TREE; 6312 } 6313 6314 /* Subroutine of fold() that optimizes comparisons of a division by 6315 a nonzero integer constant against an integer constant, i.e. 6316 X/C1 op C2. 6317 6318 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, 6319 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1 6320 are the operands of the comparison. ARG1 must be a TREE_REAL_CST. 6321 6322 The function returns the constant folded tree if a simplification 6323 can be made, and NULL_TREE otherwise. */ 6324 6325 static tree 6326 fold_div_compare (location_t loc, 6327 enum tree_code code, tree type, tree arg0, tree arg1) 6328 { 6329 tree prod, tmp, hi, lo; 6330 tree arg00 = TREE_OPERAND (arg0, 0); 6331 tree arg01 = TREE_OPERAND (arg0, 1); 6332 double_int val; 6333 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); 6334 bool neg_overflow; 6335 int overflow; 6336 6337 /* We have to do this the hard way to detect unsigned overflow. 6338 prod = int_const_binop (MULT_EXPR, arg01, arg1); */ 6339 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01), 6340 TREE_INT_CST_HIGH (arg01), 6341 TREE_INT_CST_LOW (arg1), 6342 TREE_INT_CST_HIGH (arg1), 6343 &val.low, &val.high, unsigned_p); 6344 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow); 6345 neg_overflow = false; 6346 6347 if (unsigned_p) 6348 { 6349 tmp = int_const_binop (MINUS_EXPR, arg01, 6350 build_int_cst (TREE_TYPE (arg01), 1)); 6351 lo = prod; 6352 6353 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */ 6354 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod), 6355 TREE_INT_CST_HIGH (prod), 6356 TREE_INT_CST_LOW (tmp), 6357 TREE_INT_CST_HIGH (tmp), 6358 &val.low, &val.high, unsigned_p); 6359 hi = force_fit_type_double (TREE_TYPE (arg00), val, 6360 -1, overflow | TREE_OVERFLOW (prod)); 6361 } 6362 else if (tree_int_cst_sgn (arg01) >= 0) 6363 { 6364 tmp = int_const_binop (MINUS_EXPR, arg01, 6365 build_int_cst (TREE_TYPE (arg01), 1)); 6366 switch (tree_int_cst_sgn (arg1)) 6367 { 6368 case -1: 6369 neg_overflow = true; 6370 lo = int_const_binop (MINUS_EXPR, prod, tmp); 6371 hi = prod; 6372 break; 6373 6374 case 0: 6375 lo = fold_negate_const (tmp, TREE_TYPE (arg0)); 6376 hi = tmp; 6377 break; 6378 6379 case 1: 6380 hi = int_const_binop (PLUS_EXPR, prod, tmp); 6381 lo = prod; 6382 break; 6383 6384 default: 6385 gcc_unreachable (); 6386 } 6387 } 6388 else 6389 { 6390 /* A negative divisor reverses the relational operators. */ 6391 code = swap_tree_comparison (code); 6392 6393 tmp = int_const_binop (PLUS_EXPR, arg01, 6394 build_int_cst (TREE_TYPE (arg01), 1)); 6395 switch (tree_int_cst_sgn (arg1)) 6396 { 6397 case -1: 6398 hi = int_const_binop (MINUS_EXPR, prod, tmp); 6399 lo = prod; 6400 break; 6401 6402 case 0: 6403 hi = fold_negate_const (tmp, TREE_TYPE (arg0)); 6404 lo = tmp; 6405 break; 6406 6407 case 1: 6408 neg_overflow = true; 6409 lo = int_const_binop (PLUS_EXPR, prod, tmp); 6410 hi = prod; 6411 break; 6412 6413 default: 6414 gcc_unreachable (); 6415 } 6416 } 6417 6418 switch (code) 6419 { 6420 case EQ_EXPR: 6421 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6422 return omit_one_operand_loc (loc, type, integer_zero_node, arg00); 6423 if (TREE_OVERFLOW (hi)) 6424 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 6425 if (TREE_OVERFLOW (lo)) 6426 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 6427 return build_range_check (loc, type, arg00, 1, lo, hi); 6428 6429 case NE_EXPR: 6430 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) 6431 return omit_one_operand_loc (loc, type, integer_one_node, arg00); 6432 if (TREE_OVERFLOW (hi)) 6433 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 6434 if (TREE_OVERFLOW (lo)) 6435 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 6436 return build_range_check (loc, type, arg00, 0, lo, hi); 6437 6438 case LT_EXPR: 6439 if (TREE_OVERFLOW (lo)) 6440 { 6441 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6442 return omit_one_operand_loc (loc, type, tmp, arg00); 6443 } 6444 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); 6445 6446 case LE_EXPR: 6447 if (TREE_OVERFLOW (hi)) 6448 { 6449 tmp = neg_overflow ? integer_zero_node : integer_one_node; 6450 return omit_one_operand_loc (loc, type, tmp, arg00); 6451 } 6452 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); 6453 6454 case GT_EXPR: 6455 if (TREE_OVERFLOW (hi)) 6456 { 6457 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6458 return omit_one_operand_loc (loc, type, tmp, arg00); 6459 } 6460 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); 6461 6462 case GE_EXPR: 6463 if (TREE_OVERFLOW (lo)) 6464 { 6465 tmp = neg_overflow ? integer_one_node : integer_zero_node; 6466 return omit_one_operand_loc (loc, type, tmp, arg00); 6467 } 6468 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); 6469 6470 default: 6471 break; 6472 } 6473 6474 return NULL_TREE; 6475 } 6476 6477 6478 /* If CODE with arguments ARG0 and ARG1 represents a single bit 6479 equality/inequality test, then return a simplified form of the test 6480 using a sign testing. Otherwise return NULL. TYPE is the desired 6481 result type. */ 6482 6483 static tree 6484 fold_single_bit_test_into_sign_test (location_t loc, 6485 enum tree_code code, tree arg0, tree arg1, 6486 tree result_type) 6487 { 6488 /* If this is testing a single bit, we can optimize the test. */ 6489 if ((code == NE_EXPR || code == EQ_EXPR) 6490 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6491 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6492 { 6493 /* If we have (A & C) != 0 where C is the sign bit of A, convert 6494 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */ 6495 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 6496 6497 if (arg00 != NULL_TREE 6498 /* This is only a win if casting to a signed type is cheap, 6499 i.e. when arg00's type is not a partial mode. */ 6500 && TYPE_PRECISION (TREE_TYPE (arg00)) 6501 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00)))) 6502 { 6503 tree stype = signed_type_for (TREE_TYPE (arg00)); 6504 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 6505 result_type, 6506 fold_convert_loc (loc, stype, arg00), 6507 build_int_cst (stype, 0)); 6508 } 6509 } 6510 6511 return NULL_TREE; 6512 } 6513 6514 /* If CODE with arguments ARG0 and ARG1 represents a single bit 6515 equality/inequality test, then return a simplified form of 6516 the test using shifts and logical operations. Otherwise return 6517 NULL. TYPE is the desired result type. */ 6518 6519 tree 6520 fold_single_bit_test (location_t loc, enum tree_code code, 6521 tree arg0, tree arg1, tree result_type) 6522 { 6523 /* If this is testing a single bit, we can optimize the test. */ 6524 if ((code == NE_EXPR || code == EQ_EXPR) 6525 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1) 6526 && integer_pow2p (TREE_OPERAND (arg0, 1))) 6527 { 6528 tree inner = TREE_OPERAND (arg0, 0); 6529 tree type = TREE_TYPE (arg0); 6530 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1)); 6531 enum machine_mode operand_mode = TYPE_MODE (type); 6532 int ops_unsigned; 6533 tree signed_type, unsigned_type, intermediate_type; 6534 tree tem, one; 6535 6536 /* First, see if we can fold the single bit test into a sign-bit 6537 test. */ 6538 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, 6539 result_type); 6540 if (tem) 6541 return tem; 6542 6543 /* Otherwise we have (A & C) != 0 where C is a single bit, 6544 convert that into ((A >> C2) & 1). Where C2 = log2(C). 6545 Similarly for (A & C) == 0. */ 6546 6547 /* If INNER is a right shift of a constant and it plus BITNUM does 6548 not overflow, adjust BITNUM and INNER. */ 6549 if (TREE_CODE (inner) == RSHIFT_EXPR 6550 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST 6551 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0 6552 && bitnum < TYPE_PRECISION (type) 6553 && 0 > compare_tree_int (TREE_OPERAND (inner, 1), 6554 bitnum - TYPE_PRECISION (type))) 6555 { 6556 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); 6557 inner = TREE_OPERAND (inner, 0); 6558 } 6559 6560 /* If we are going to be able to omit the AND below, we must do our 6561 operations as unsigned. If we must use the AND, we have a choice. 6562 Normally unsigned is faster, but for some machines signed is. */ 6563 #ifdef LOAD_EXTEND_OP 6564 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND 6565 && !flag_syntax_only) ? 0 : 1; 6566 #else 6567 ops_unsigned = 1; 6568 #endif 6569 6570 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); 6571 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); 6572 intermediate_type = ops_unsigned ? unsigned_type : signed_type; 6573 inner = fold_convert_loc (loc, intermediate_type, inner); 6574 6575 if (bitnum != 0) 6576 inner = build2 (RSHIFT_EXPR, intermediate_type, 6577 inner, size_int (bitnum)); 6578 6579 one = build_int_cst (intermediate_type, 1); 6580 6581 if (code == EQ_EXPR) 6582 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one); 6583 6584 /* Put the AND last so it can combine with more things. */ 6585 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one); 6586 6587 /* Make sure to return the proper type. */ 6588 inner = fold_convert_loc (loc, result_type, inner); 6589 6590 return inner; 6591 } 6592 return NULL_TREE; 6593 } 6594 6595 /* Check whether we are allowed to reorder operands arg0 and arg1, 6596 such that the evaluation of arg1 occurs before arg0. */ 6597 6598 static bool 6599 reorder_operands_p (const_tree arg0, const_tree arg1) 6600 { 6601 if (! flag_evaluation_order) 6602 return true; 6603 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1)) 6604 return true; 6605 return ! TREE_SIDE_EFFECTS (arg0) 6606 && ! TREE_SIDE_EFFECTS (arg1); 6607 } 6608 6609 /* Test whether it is preferable two swap two operands, ARG0 and 6610 ARG1, for example because ARG0 is an integer constant and ARG1 6611 isn't. If REORDER is true, only recommend swapping if we can 6612 evaluate the operands in reverse order. */ 6613 6614 bool 6615 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder) 6616 { 6617 STRIP_SIGN_NOPS (arg0); 6618 STRIP_SIGN_NOPS (arg1); 6619 6620 if (TREE_CODE (arg1) == INTEGER_CST) 6621 return 0; 6622 if (TREE_CODE (arg0) == INTEGER_CST) 6623 return 1; 6624 6625 if (TREE_CODE (arg1) == REAL_CST) 6626 return 0; 6627 if (TREE_CODE (arg0) == REAL_CST) 6628 return 1; 6629 6630 if (TREE_CODE (arg1) == FIXED_CST) 6631 return 0; 6632 if (TREE_CODE (arg0) == FIXED_CST) 6633 return 1; 6634 6635 if (TREE_CODE (arg1) == COMPLEX_CST) 6636 return 0; 6637 if (TREE_CODE (arg0) == COMPLEX_CST) 6638 return 1; 6639 6640 if (TREE_CONSTANT (arg1)) 6641 return 0; 6642 if (TREE_CONSTANT (arg0)) 6643 return 1; 6644 6645 if (optimize_function_for_size_p (cfun)) 6646 return 0; 6647 6648 if (reorder && flag_evaluation_order 6649 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) 6650 return 0; 6651 6652 /* It is preferable to swap two SSA_NAME to ensure a canonical form 6653 for commutative and comparison operators. Ensuring a canonical 6654 form allows the optimizers to find additional redundancies without 6655 having to explicitly check for both orderings. */ 6656 if (TREE_CODE (arg0) == SSA_NAME 6657 && TREE_CODE (arg1) == SSA_NAME 6658 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) 6659 return 1; 6660 6661 /* Put SSA_NAMEs last. */ 6662 if (TREE_CODE (arg1) == SSA_NAME) 6663 return 0; 6664 if (TREE_CODE (arg0) == SSA_NAME) 6665 return 1; 6666 6667 /* Put variables last. */ 6668 if (DECL_P (arg1)) 6669 return 0; 6670 if (DECL_P (arg0)) 6671 return 1; 6672 6673 return 0; 6674 } 6675 6676 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where 6677 ARG0 is extended to a wider type. */ 6678 6679 static tree 6680 fold_widened_comparison (location_t loc, enum tree_code code, 6681 tree type, tree arg0, tree arg1) 6682 { 6683 tree arg0_unw = get_unwidened (arg0, NULL_TREE); 6684 tree arg1_unw; 6685 tree shorter_type, outer_type; 6686 tree min, max; 6687 bool above, below; 6688 6689 if (arg0_unw == arg0) 6690 return NULL_TREE; 6691 shorter_type = TREE_TYPE (arg0_unw); 6692 6693 #ifdef HAVE_canonicalize_funcptr_for_compare 6694 /* Disable this optimization if we're casting a function pointer 6695 type on targets that require function pointer canonicalization. */ 6696 if (HAVE_canonicalize_funcptr_for_compare 6697 && TREE_CODE (shorter_type) == POINTER_TYPE 6698 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE) 6699 return NULL_TREE; 6700 #endif 6701 6702 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type)) 6703 return NULL_TREE; 6704 6705 arg1_unw = get_unwidened (arg1, NULL_TREE); 6706 6707 /* If possible, express the comparison in the shorter mode. */ 6708 if ((code == EQ_EXPR || code == NE_EXPR 6709 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) 6710 && (TREE_TYPE (arg1_unw) == shorter_type 6711 || ((TYPE_PRECISION (shorter_type) 6712 >= TYPE_PRECISION (TREE_TYPE (arg1_unw))) 6713 && (TYPE_UNSIGNED (shorter_type) 6714 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw)))) 6715 || (TREE_CODE (arg1_unw) == INTEGER_CST 6716 && (TREE_CODE (shorter_type) == INTEGER_TYPE 6717 || TREE_CODE (shorter_type) == BOOLEAN_TYPE) 6718 && int_fits_type_p (arg1_unw, shorter_type)))) 6719 return fold_build2_loc (loc, code, type, arg0_unw, 6720 fold_convert_loc (loc, shorter_type, arg1_unw)); 6721 6722 if (TREE_CODE (arg1_unw) != INTEGER_CST 6723 || TREE_CODE (shorter_type) != INTEGER_TYPE 6724 || !int_fits_type_p (arg1_unw, shorter_type)) 6725 return NULL_TREE; 6726 6727 /* If we are comparing with the integer that does not fit into the range 6728 of the shorter type, the result is known. */ 6729 outer_type = TREE_TYPE (arg1_unw); 6730 min = lower_bound_in_type (outer_type, shorter_type); 6731 max = upper_bound_in_type (outer_type, shorter_type); 6732 6733 above = integer_nonzerop (fold_relational_const (LT_EXPR, type, 6734 max, arg1_unw)); 6735 below = integer_nonzerop (fold_relational_const (LT_EXPR, type, 6736 arg1_unw, min)); 6737 6738 switch (code) 6739 { 6740 case EQ_EXPR: 6741 if (above || below) 6742 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6743 break; 6744 6745 case NE_EXPR: 6746 if (above || below) 6747 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6748 break; 6749 6750 case LT_EXPR: 6751 case LE_EXPR: 6752 if (above) 6753 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6754 else if (below) 6755 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6756 6757 case GT_EXPR: 6758 case GE_EXPR: 6759 if (above) 6760 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 6761 else if (below) 6762 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 6763 6764 default: 6765 break; 6766 } 6767 6768 return NULL_TREE; 6769 } 6770 6771 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for 6772 ARG0 just the signedness is changed. */ 6773 6774 static tree 6775 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type, 6776 tree arg0, tree arg1) 6777 { 6778 tree arg0_inner; 6779 tree inner_type, outer_type; 6780 6781 if (!CONVERT_EXPR_P (arg0)) 6782 return NULL_TREE; 6783 6784 outer_type = TREE_TYPE (arg0); 6785 arg0_inner = TREE_OPERAND (arg0, 0); 6786 inner_type = TREE_TYPE (arg0_inner); 6787 6788 #ifdef HAVE_canonicalize_funcptr_for_compare 6789 /* Disable this optimization if we're casting a function pointer 6790 type on targets that require function pointer canonicalization. */ 6791 if (HAVE_canonicalize_funcptr_for_compare 6792 && TREE_CODE (inner_type) == POINTER_TYPE 6793 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE) 6794 return NULL_TREE; 6795 #endif 6796 6797 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type)) 6798 return NULL_TREE; 6799 6800 if (TREE_CODE (arg1) != INTEGER_CST 6801 && !(CONVERT_EXPR_P (arg1) 6802 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type)) 6803 return NULL_TREE; 6804 6805 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type) 6806 && code != NE_EXPR 6807 && code != EQ_EXPR) 6808 return NULL_TREE; 6809 6810 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type)) 6811 return NULL_TREE; 6812 6813 if (TREE_CODE (arg1) == INTEGER_CST) 6814 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1), 6815 0, TREE_OVERFLOW (arg1)); 6816 else 6817 arg1 = fold_convert_loc (loc, inner_type, arg1); 6818 6819 return fold_build2_loc (loc, code, type, arg0_inner, arg1); 6820 } 6821 6822 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is 6823 step of the array. Reconstructs s and delta in the case of s * 6824 delta being an integer constant (and thus already folded). ADDR is 6825 the address. MULT is the multiplicative expression. If the 6826 function succeeds, the new address expression is returned. 6827 Otherwise NULL_TREE is returned. LOC is the location of the 6828 resulting expression. */ 6829 6830 static tree 6831 try_move_mult_to_index (location_t loc, tree addr, tree op1) 6832 { 6833 tree s, delta, step; 6834 tree ref = TREE_OPERAND (addr, 0), pref; 6835 tree ret, pos; 6836 tree itype; 6837 bool mdim = false; 6838 6839 /* Strip the nops that might be added when converting op1 to sizetype. */ 6840 STRIP_NOPS (op1); 6841 6842 /* Canonicalize op1 into a possibly non-constant delta 6843 and an INTEGER_CST s. */ 6844 if (TREE_CODE (op1) == MULT_EXPR) 6845 { 6846 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1); 6847 6848 STRIP_NOPS (arg0); 6849 STRIP_NOPS (arg1); 6850 6851 if (TREE_CODE (arg0) == INTEGER_CST) 6852 { 6853 s = arg0; 6854 delta = arg1; 6855 } 6856 else if (TREE_CODE (arg1) == INTEGER_CST) 6857 { 6858 s = arg1; 6859 delta = arg0; 6860 } 6861 else 6862 return NULL_TREE; 6863 } 6864 else if (TREE_CODE (op1) == INTEGER_CST) 6865 { 6866 delta = op1; 6867 s = NULL_TREE; 6868 } 6869 else 6870 { 6871 /* Simulate we are delta * 1. */ 6872 delta = op1; 6873 s = integer_one_node; 6874 } 6875 6876 /* Handle &x.array the same as we would handle &x.array[0]. */ 6877 if (TREE_CODE (ref) == COMPONENT_REF 6878 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE) 6879 { 6880 tree domain; 6881 6882 /* Remember if this was a multi-dimensional array. */ 6883 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF) 6884 mdim = true; 6885 6886 domain = TYPE_DOMAIN (TREE_TYPE (ref)); 6887 if (! domain) 6888 goto cont; 6889 itype = TREE_TYPE (domain); 6890 6891 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref))); 6892 if (TREE_CODE (step) != INTEGER_CST) 6893 goto cont; 6894 6895 if (s) 6896 { 6897 if (! tree_int_cst_equal (step, s)) 6898 goto cont; 6899 } 6900 else 6901 { 6902 /* Try if delta is a multiple of step. */ 6903 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step); 6904 if (! tmp) 6905 goto cont; 6906 delta = tmp; 6907 } 6908 6909 /* Only fold here if we can verify we do not overflow one 6910 dimension of a multi-dimensional array. */ 6911 if (mdim) 6912 { 6913 tree tmp; 6914 6915 if (!TYPE_MIN_VALUE (domain) 6916 || !TYPE_MAX_VALUE (domain) 6917 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST) 6918 goto cont; 6919 6920 tmp = fold_binary_loc (loc, PLUS_EXPR, itype, 6921 fold_convert_loc (loc, itype, 6922 TYPE_MIN_VALUE (domain)), 6923 fold_convert_loc (loc, itype, delta)); 6924 if (TREE_CODE (tmp) != INTEGER_CST 6925 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp)) 6926 goto cont; 6927 } 6928 6929 /* We found a suitable component reference. */ 6930 6931 pref = TREE_OPERAND (addr, 0); 6932 ret = copy_node (pref); 6933 SET_EXPR_LOCATION (ret, loc); 6934 6935 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret, 6936 fold_build2_loc 6937 (loc, PLUS_EXPR, itype, 6938 fold_convert_loc (loc, itype, 6939 TYPE_MIN_VALUE 6940 (TYPE_DOMAIN (TREE_TYPE (ref)))), 6941 fold_convert_loc (loc, itype, delta)), 6942 NULL_TREE, NULL_TREE); 6943 return build_fold_addr_expr_loc (loc, ret); 6944 } 6945 6946 cont: 6947 6948 for (;; ref = TREE_OPERAND (ref, 0)) 6949 { 6950 if (TREE_CODE (ref) == ARRAY_REF) 6951 { 6952 tree domain; 6953 6954 /* Remember if this was a multi-dimensional array. */ 6955 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF) 6956 mdim = true; 6957 6958 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); 6959 if (! domain) 6960 continue; 6961 itype = TREE_TYPE (domain); 6962 6963 step = array_ref_element_size (ref); 6964 if (TREE_CODE (step) != INTEGER_CST) 6965 continue; 6966 6967 if (s) 6968 { 6969 if (! tree_int_cst_equal (step, s)) 6970 continue; 6971 } 6972 else 6973 { 6974 /* Try if delta is a multiple of step. */ 6975 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step); 6976 if (! tmp) 6977 continue; 6978 delta = tmp; 6979 } 6980 6981 /* Only fold here if we can verify we do not overflow one 6982 dimension of a multi-dimensional array. */ 6983 if (mdim) 6984 { 6985 tree tmp; 6986 6987 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST 6988 || !TYPE_MAX_VALUE (domain) 6989 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST) 6990 continue; 6991 6992 tmp = fold_binary_loc (loc, PLUS_EXPR, itype, 6993 fold_convert_loc (loc, itype, 6994 TREE_OPERAND (ref, 1)), 6995 fold_convert_loc (loc, itype, delta)); 6996 if (!tmp 6997 || TREE_CODE (tmp) != INTEGER_CST 6998 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp)) 6999 continue; 7000 } 7001 7002 break; 7003 } 7004 else 7005 mdim = false; 7006 7007 if (!handled_component_p (ref)) 7008 return NULL_TREE; 7009 } 7010 7011 /* We found the suitable array reference. So copy everything up to it, 7012 and replace the index. */ 7013 7014 pref = TREE_OPERAND (addr, 0); 7015 ret = copy_node (pref); 7016 SET_EXPR_LOCATION (ret, loc); 7017 pos = ret; 7018 7019 while (pref != ref) 7020 { 7021 pref = TREE_OPERAND (pref, 0); 7022 TREE_OPERAND (pos, 0) = copy_node (pref); 7023 pos = TREE_OPERAND (pos, 0); 7024 } 7025 7026 TREE_OPERAND (pos, 1) 7027 = fold_build2_loc (loc, PLUS_EXPR, itype, 7028 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)), 7029 fold_convert_loc (loc, itype, delta)); 7030 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret); 7031 } 7032 7033 7034 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y 7035 means A >= Y && A != MAX, but in this case we know that 7036 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ 7037 7038 static tree 7039 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound) 7040 { 7041 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; 7042 7043 if (TREE_CODE (bound) == LT_EXPR) 7044 a = TREE_OPERAND (bound, 0); 7045 else if (TREE_CODE (bound) == GT_EXPR) 7046 a = TREE_OPERAND (bound, 1); 7047 else 7048 return NULL_TREE; 7049 7050 typea = TREE_TYPE (a); 7051 if (!INTEGRAL_TYPE_P (typea) 7052 && !POINTER_TYPE_P (typea)) 7053 return NULL_TREE; 7054 7055 if (TREE_CODE (ineq) == LT_EXPR) 7056 { 7057 a1 = TREE_OPERAND (ineq, 1); 7058 y = TREE_OPERAND (ineq, 0); 7059 } 7060 else if (TREE_CODE (ineq) == GT_EXPR) 7061 { 7062 a1 = TREE_OPERAND (ineq, 0); 7063 y = TREE_OPERAND (ineq, 1); 7064 } 7065 else 7066 return NULL_TREE; 7067 7068 if (TREE_TYPE (a1) != typea) 7069 return NULL_TREE; 7070 7071 if (POINTER_TYPE_P (typea)) 7072 { 7073 /* Convert the pointer types into integer before taking the difference. */ 7074 tree ta = fold_convert_loc (loc, ssizetype, a); 7075 tree ta1 = fold_convert_loc (loc, ssizetype, a1); 7076 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta); 7077 } 7078 else 7079 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a); 7080 7081 if (!diff || !integer_onep (diff)) 7082 return NULL_TREE; 7083 7084 return fold_build2_loc (loc, GE_EXPR, type, a, y); 7085 } 7086 7087 /* Fold a sum or difference of at least one multiplication. 7088 Returns the folded tree or NULL if no simplification could be made. */ 7089 7090 static tree 7091 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type, 7092 tree arg0, tree arg1) 7093 { 7094 tree arg00, arg01, arg10, arg11; 7095 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; 7096 7097 /* (A * C) +- (B * C) -> (A+-B) * C. 7098 (A * C) +- A -> A * (C+-1). 7099 We are most concerned about the case where C is a constant, 7100 but other combinations show up during loop reduction. Since 7101 it is not difficult, try all four possibilities. */ 7102 7103 if (TREE_CODE (arg0) == MULT_EXPR) 7104 { 7105 arg00 = TREE_OPERAND (arg0, 0); 7106 arg01 = TREE_OPERAND (arg0, 1); 7107 } 7108 else if (TREE_CODE (arg0) == INTEGER_CST) 7109 { 7110 arg00 = build_one_cst (type); 7111 arg01 = arg0; 7112 } 7113 else 7114 { 7115 /* We cannot generate constant 1 for fract. */ 7116 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7117 return NULL_TREE; 7118 arg00 = arg0; 7119 arg01 = build_one_cst (type); 7120 } 7121 if (TREE_CODE (arg1) == MULT_EXPR) 7122 { 7123 arg10 = TREE_OPERAND (arg1, 0); 7124 arg11 = TREE_OPERAND (arg1, 1); 7125 } 7126 else if (TREE_CODE (arg1) == INTEGER_CST) 7127 { 7128 arg10 = build_one_cst (type); 7129 /* As we canonicalize A - 2 to A + -2 get rid of that sign for 7130 the purpose of this canonicalization. */ 7131 if (TREE_INT_CST_HIGH (arg1) == -1 7132 && negate_expr_p (arg1) 7133 && code == PLUS_EXPR) 7134 { 7135 arg11 = negate_expr (arg1); 7136 code = MINUS_EXPR; 7137 } 7138 else 7139 arg11 = arg1; 7140 } 7141 else 7142 { 7143 /* We cannot generate constant 1 for fract. */ 7144 if (ALL_FRACT_MODE_P (TYPE_MODE (type))) 7145 return NULL_TREE; 7146 arg10 = arg1; 7147 arg11 = build_one_cst (type); 7148 } 7149 same = NULL_TREE; 7150 7151 if (operand_equal_p (arg01, arg11, 0)) 7152 same = arg01, alt0 = arg00, alt1 = arg10; 7153 else if (operand_equal_p (arg00, arg10, 0)) 7154 same = arg00, alt0 = arg01, alt1 = arg11; 7155 else if (operand_equal_p (arg00, arg11, 0)) 7156 same = arg00, alt0 = arg01, alt1 = arg10; 7157 else if (operand_equal_p (arg01, arg10, 0)) 7158 same = arg01, alt0 = arg00, alt1 = arg11; 7159 7160 /* No identical multiplicands; see if we can find a common 7161 power-of-two factor in non-power-of-two multiplies. This 7162 can help in multi-dimensional array access. */ 7163 else if (host_integerp (arg01, 0) 7164 && host_integerp (arg11, 0)) 7165 { 7166 HOST_WIDE_INT int01, int11, tmp; 7167 bool swap = false; 7168 tree maybe_same; 7169 int01 = TREE_INT_CST_LOW (arg01); 7170 int11 = TREE_INT_CST_LOW (arg11); 7171 7172 /* Move min of absolute values to int11. */ 7173 if (absu_hwi (int01) < absu_hwi (int11)) 7174 { 7175 tmp = int01, int01 = int11, int11 = tmp; 7176 alt0 = arg00, arg00 = arg10, arg10 = alt0; 7177 maybe_same = arg01; 7178 swap = true; 7179 } 7180 else 7181 maybe_same = arg11; 7182 7183 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0 7184 /* The remainder should not be a constant, otherwise we 7185 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has 7186 increased the number of multiplications necessary. */ 7187 && TREE_CODE (arg10) != INTEGER_CST) 7188 { 7189 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00, 7190 build_int_cst (TREE_TYPE (arg00), 7191 int01 / int11)); 7192 alt1 = arg10; 7193 same = maybe_same; 7194 if (swap) 7195 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same; 7196 } 7197 } 7198 7199 if (same) 7200 return fold_build2_loc (loc, MULT_EXPR, type, 7201 fold_build2_loc (loc, code, type, 7202 fold_convert_loc (loc, type, alt0), 7203 fold_convert_loc (loc, type, alt1)), 7204 fold_convert_loc (loc, type, same)); 7205 7206 return NULL_TREE; 7207 } 7208 7209 /* Subroutine of native_encode_expr. Encode the INTEGER_CST 7210 specified by EXPR into the buffer PTR of length LEN bytes. 7211 Return the number of bytes placed in the buffer, or zero 7212 upon failure. */ 7213 7214 static int 7215 native_encode_int (const_tree expr, unsigned char *ptr, int len) 7216 { 7217 tree type = TREE_TYPE (expr); 7218 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7219 int byte, offset, word, words; 7220 unsigned char value; 7221 7222 if (total_bytes > len) 7223 return 0; 7224 words = total_bytes / UNITS_PER_WORD; 7225 7226 for (byte = 0; byte < total_bytes; byte++) 7227 { 7228 int bitpos = byte * BITS_PER_UNIT; 7229 if (bitpos < HOST_BITS_PER_WIDE_INT) 7230 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos); 7231 else 7232 value = (unsigned char) (TREE_INT_CST_HIGH (expr) 7233 >> (bitpos - HOST_BITS_PER_WIDE_INT)); 7234 7235 if (total_bytes > UNITS_PER_WORD) 7236 { 7237 word = byte / UNITS_PER_WORD; 7238 if (WORDS_BIG_ENDIAN) 7239 word = (words - 1) - word; 7240 offset = word * UNITS_PER_WORD; 7241 if (BYTES_BIG_ENDIAN) 7242 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7243 else 7244 offset += byte % UNITS_PER_WORD; 7245 } 7246 else 7247 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7248 ptr[offset] = value; 7249 } 7250 return total_bytes; 7251 } 7252 7253 7254 /* Subroutine of native_encode_expr. Encode the REAL_CST 7255 specified by EXPR into the buffer PTR of length LEN bytes. 7256 Return the number of bytes placed in the buffer, or zero 7257 upon failure. */ 7258 7259 static int 7260 native_encode_real (const_tree expr, unsigned char *ptr, int len) 7261 { 7262 tree type = TREE_TYPE (expr); 7263 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7264 int byte, offset, word, words, bitpos; 7265 unsigned char value; 7266 7267 /* There are always 32 bits in each long, no matter the size of 7268 the hosts long. We handle floating point representations with 7269 up to 192 bits. */ 7270 long tmp[6]; 7271 7272 if (total_bytes > len) 7273 return 0; 7274 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 7275 7276 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); 7277 7278 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7279 bitpos += BITS_PER_UNIT) 7280 { 7281 byte = (bitpos / BITS_PER_UNIT) & 3; 7282 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); 7283 7284 if (UNITS_PER_WORD < 4) 7285 { 7286 word = byte / UNITS_PER_WORD; 7287 if (WORDS_BIG_ENDIAN) 7288 word = (words - 1) - word; 7289 offset = word * UNITS_PER_WORD; 7290 if (BYTES_BIG_ENDIAN) 7291 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7292 else 7293 offset += byte % UNITS_PER_WORD; 7294 } 7295 else 7296 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7297 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value; 7298 } 7299 return total_bytes; 7300 } 7301 7302 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST 7303 specified by EXPR into the buffer PTR of length LEN bytes. 7304 Return the number of bytes placed in the buffer, or zero 7305 upon failure. */ 7306 7307 static int 7308 native_encode_complex (const_tree expr, unsigned char *ptr, int len) 7309 { 7310 int rsize, isize; 7311 tree part; 7312 7313 part = TREE_REALPART (expr); 7314 rsize = native_encode_expr (part, ptr, len); 7315 if (rsize == 0) 7316 return 0; 7317 part = TREE_IMAGPART (expr); 7318 isize = native_encode_expr (part, ptr+rsize, len-rsize); 7319 if (isize != rsize) 7320 return 0; 7321 return rsize + isize; 7322 } 7323 7324 7325 /* Subroutine of native_encode_expr. Encode the VECTOR_CST 7326 specified by EXPR into the buffer PTR of length LEN bytes. 7327 Return the number of bytes placed in the buffer, or zero 7328 upon failure. */ 7329 7330 static int 7331 native_encode_vector (const_tree expr, unsigned char *ptr, int len) 7332 { 7333 int i, size, offset, count; 7334 tree itype, elem, elements; 7335 7336 offset = 0; 7337 elements = TREE_VECTOR_CST_ELTS (expr); 7338 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr)); 7339 itype = TREE_TYPE (TREE_TYPE (expr)); 7340 size = GET_MODE_SIZE (TYPE_MODE (itype)); 7341 for (i = 0; i < count; i++) 7342 { 7343 if (elements) 7344 { 7345 elem = TREE_VALUE (elements); 7346 elements = TREE_CHAIN (elements); 7347 } 7348 else 7349 elem = NULL_TREE; 7350 7351 if (elem) 7352 { 7353 if (native_encode_expr (elem, ptr+offset, len-offset) != size) 7354 return 0; 7355 } 7356 else 7357 { 7358 if (offset + size > len) 7359 return 0; 7360 memset (ptr+offset, 0, size); 7361 } 7362 offset += size; 7363 } 7364 return offset; 7365 } 7366 7367 7368 /* Subroutine of native_encode_expr. Encode the STRING_CST 7369 specified by EXPR into the buffer PTR of length LEN bytes. 7370 Return the number of bytes placed in the buffer, or zero 7371 upon failure. */ 7372 7373 static int 7374 native_encode_string (const_tree expr, unsigned char *ptr, int len) 7375 { 7376 tree type = TREE_TYPE (expr); 7377 HOST_WIDE_INT total_bytes; 7378 7379 if (TREE_CODE (type) != ARRAY_TYPE 7380 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE 7381 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT 7382 || !host_integerp (TYPE_SIZE_UNIT (type), 0)) 7383 return 0; 7384 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0); 7385 if (total_bytes > len) 7386 return 0; 7387 if (TREE_STRING_LENGTH (expr) < total_bytes) 7388 { 7389 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr)); 7390 memset (ptr + TREE_STRING_LENGTH (expr), 0, 7391 total_bytes - TREE_STRING_LENGTH (expr)); 7392 } 7393 else 7394 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes); 7395 return total_bytes; 7396 } 7397 7398 7399 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST, 7400 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the 7401 buffer PTR of length LEN bytes. Return the number of bytes 7402 placed in the buffer, or zero upon failure. */ 7403 7404 int 7405 native_encode_expr (const_tree expr, unsigned char *ptr, int len) 7406 { 7407 switch (TREE_CODE (expr)) 7408 { 7409 case INTEGER_CST: 7410 return native_encode_int (expr, ptr, len); 7411 7412 case REAL_CST: 7413 return native_encode_real (expr, ptr, len); 7414 7415 case COMPLEX_CST: 7416 return native_encode_complex (expr, ptr, len); 7417 7418 case VECTOR_CST: 7419 return native_encode_vector (expr, ptr, len); 7420 7421 case STRING_CST: 7422 return native_encode_string (expr, ptr, len); 7423 7424 default: 7425 return 0; 7426 } 7427 } 7428 7429 7430 /* Subroutine of native_interpret_expr. Interpret the contents of 7431 the buffer PTR of length LEN as an INTEGER_CST of type TYPE. 7432 If the buffer cannot be interpreted, return NULL_TREE. */ 7433 7434 static tree 7435 native_interpret_int (tree type, const unsigned char *ptr, int len) 7436 { 7437 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7438 int byte, offset, word, words; 7439 unsigned char value; 7440 double_int result; 7441 7442 if (total_bytes > len) 7443 return NULL_TREE; 7444 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) 7445 return NULL_TREE; 7446 7447 result = double_int_zero; 7448 words = total_bytes / UNITS_PER_WORD; 7449 7450 for (byte = 0; byte < total_bytes; byte++) 7451 { 7452 int bitpos = byte * BITS_PER_UNIT; 7453 if (total_bytes > UNITS_PER_WORD) 7454 { 7455 word = byte / UNITS_PER_WORD; 7456 if (WORDS_BIG_ENDIAN) 7457 word = (words - 1) - word; 7458 offset = word * UNITS_PER_WORD; 7459 if (BYTES_BIG_ENDIAN) 7460 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7461 else 7462 offset += byte % UNITS_PER_WORD; 7463 } 7464 else 7465 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; 7466 value = ptr[offset]; 7467 7468 if (bitpos < HOST_BITS_PER_WIDE_INT) 7469 result.low |= (unsigned HOST_WIDE_INT) value << bitpos; 7470 else 7471 result.high |= (unsigned HOST_WIDE_INT) value 7472 << (bitpos - HOST_BITS_PER_WIDE_INT); 7473 } 7474 7475 return double_int_to_tree (type, result); 7476 } 7477 7478 7479 /* Subroutine of native_interpret_expr. Interpret the contents of 7480 the buffer PTR of length LEN as a REAL_CST of type TYPE. 7481 If the buffer cannot be interpreted, return NULL_TREE. */ 7482 7483 static tree 7484 native_interpret_real (tree type, const unsigned char *ptr, int len) 7485 { 7486 enum machine_mode mode = TYPE_MODE (type); 7487 int total_bytes = GET_MODE_SIZE (mode); 7488 int byte, offset, word, words, bitpos; 7489 unsigned char value; 7490 /* There are always 32 bits in each long, no matter the size of 7491 the hosts long. We handle floating point representations with 7492 up to 192 bits. */ 7493 REAL_VALUE_TYPE r; 7494 long tmp[6]; 7495 7496 total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); 7497 if (total_bytes > len || total_bytes > 24) 7498 return NULL_TREE; 7499 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD; 7500 7501 memset (tmp, 0, sizeof (tmp)); 7502 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; 7503 bitpos += BITS_PER_UNIT) 7504 { 7505 byte = (bitpos / BITS_PER_UNIT) & 3; 7506 if (UNITS_PER_WORD < 4) 7507 { 7508 word = byte / UNITS_PER_WORD; 7509 if (WORDS_BIG_ENDIAN) 7510 word = (words - 1) - word; 7511 offset = word * UNITS_PER_WORD; 7512 if (BYTES_BIG_ENDIAN) 7513 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD); 7514 else 7515 offset += byte % UNITS_PER_WORD; 7516 } 7517 else 7518 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; 7519 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)]; 7520 7521 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); 7522 } 7523 7524 real_from_target (&r, tmp, mode); 7525 return build_real (type, r); 7526 } 7527 7528 7529 /* Subroutine of native_interpret_expr. Interpret the contents of 7530 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE. 7531 If the buffer cannot be interpreted, return NULL_TREE. */ 7532 7533 static tree 7534 native_interpret_complex (tree type, const unsigned char *ptr, int len) 7535 { 7536 tree etype, rpart, ipart; 7537 int size; 7538 7539 etype = TREE_TYPE (type); 7540 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7541 if (size * 2 > len) 7542 return NULL_TREE; 7543 rpart = native_interpret_expr (etype, ptr, size); 7544 if (!rpart) 7545 return NULL_TREE; 7546 ipart = native_interpret_expr (etype, ptr+size, size); 7547 if (!ipart) 7548 return NULL_TREE; 7549 return build_complex (type, rpart, ipart); 7550 } 7551 7552 7553 /* Subroutine of native_interpret_expr. Interpret the contents of 7554 the buffer PTR of length LEN as a VECTOR_CST of type TYPE. 7555 If the buffer cannot be interpreted, return NULL_TREE. */ 7556 7557 static tree 7558 native_interpret_vector (tree type, const unsigned char *ptr, int len) 7559 { 7560 tree etype, elem, elements; 7561 int i, size, count; 7562 7563 etype = TREE_TYPE (type); 7564 size = GET_MODE_SIZE (TYPE_MODE (etype)); 7565 count = TYPE_VECTOR_SUBPARTS (type); 7566 if (size * count > len) 7567 return NULL_TREE; 7568 7569 elements = NULL_TREE; 7570 for (i = count - 1; i >= 0; i--) 7571 { 7572 elem = native_interpret_expr (etype, ptr+(i*size), size); 7573 if (!elem) 7574 return NULL_TREE; 7575 elements = tree_cons (NULL_TREE, elem, elements); 7576 } 7577 return build_vector (type, elements); 7578 } 7579 7580 7581 /* Subroutine of fold_view_convert_expr. Interpret the contents of 7582 the buffer PTR of length LEN as a constant of type TYPE. For 7583 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P 7584 we return a REAL_CST, etc... If the buffer cannot be interpreted, 7585 return NULL_TREE. */ 7586 7587 tree 7588 native_interpret_expr (tree type, const unsigned char *ptr, int len) 7589 { 7590 switch (TREE_CODE (type)) 7591 { 7592 case INTEGER_TYPE: 7593 case ENUMERAL_TYPE: 7594 case BOOLEAN_TYPE: 7595 return native_interpret_int (type, ptr, len); 7596 7597 case REAL_TYPE: 7598 return native_interpret_real (type, ptr, len); 7599 7600 case COMPLEX_TYPE: 7601 return native_interpret_complex (type, ptr, len); 7602 7603 case VECTOR_TYPE: 7604 return native_interpret_vector (type, ptr, len); 7605 7606 default: 7607 return NULL_TREE; 7608 } 7609 } 7610 7611 7612 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type 7613 TYPE at compile-time. If we're unable to perform the conversion 7614 return NULL_TREE. */ 7615 7616 static tree 7617 fold_view_convert_expr (tree type, tree expr) 7618 { 7619 /* We support up to 512-bit values (for V8DFmode). */ 7620 unsigned char buffer[64]; 7621 int len; 7622 7623 /* Check that the host and target are sane. */ 7624 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8) 7625 return NULL_TREE; 7626 7627 len = native_encode_expr (expr, buffer, sizeof (buffer)); 7628 if (len == 0) 7629 return NULL_TREE; 7630 7631 return native_interpret_expr (type, buffer, len); 7632 } 7633 7634 /* Build an expression for the address of T. Folds away INDIRECT_REF 7635 to avoid confusing the gimplify process. */ 7636 7637 tree 7638 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) 7639 { 7640 /* The size of the object is not relevant when talking about its address. */ 7641 if (TREE_CODE (t) == WITH_SIZE_EXPR) 7642 t = TREE_OPERAND (t, 0); 7643 7644 if (TREE_CODE (t) == INDIRECT_REF) 7645 { 7646 t = TREE_OPERAND (t, 0); 7647 7648 if (TREE_TYPE (t) != ptrtype) 7649 t = build1_loc (loc, NOP_EXPR, ptrtype, t); 7650 } 7651 else if (TREE_CODE (t) == MEM_REF 7652 && integer_zerop (TREE_OPERAND (t, 1))) 7653 return TREE_OPERAND (t, 0); 7654 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR) 7655 { 7656 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0)); 7657 7658 if (TREE_TYPE (t) != ptrtype) 7659 t = fold_convert_loc (loc, ptrtype, t); 7660 } 7661 else 7662 t = build1_loc (loc, ADDR_EXPR, ptrtype, t); 7663 7664 return t; 7665 } 7666 7667 /* Build an expression for the address of T. */ 7668 7669 tree 7670 build_fold_addr_expr_loc (location_t loc, tree t) 7671 { 7672 tree ptrtype = build_pointer_type (TREE_TYPE (t)); 7673 7674 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype); 7675 } 7676 7677 static bool vec_cst_ctor_to_array (tree, tree *); 7678 7679 /* Fold a unary expression of code CODE and type TYPE with operand 7680 OP0. Return the folded expression if folding is successful. 7681 Otherwise, return NULL_TREE. */ 7682 7683 tree 7684 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) 7685 { 7686 tree tem; 7687 tree arg0; 7688 enum tree_code_class kind = TREE_CODE_CLASS (code); 7689 7690 gcc_assert (IS_EXPR_CODE_CLASS (kind) 7691 && TREE_CODE_LENGTH (code) == 1); 7692 7693 arg0 = op0; 7694 if (arg0) 7695 { 7696 if (CONVERT_EXPR_CODE_P (code) 7697 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR) 7698 { 7699 /* Don't use STRIP_NOPS, because signedness of argument type 7700 matters. */ 7701 STRIP_SIGN_NOPS (arg0); 7702 } 7703 else 7704 { 7705 /* Strip any conversions that don't change the mode. This 7706 is safe for every expression, except for a comparison 7707 expression because its signedness is derived from its 7708 operands. 7709 7710 Note that this is done as an internal manipulation within 7711 the constant folder, in order to find the simplest 7712 representation of the arguments so that their form can be 7713 studied. In any cases, the appropriate type conversions 7714 should be put back in the tree that will get out of the 7715 constant folder. */ 7716 STRIP_NOPS (arg0); 7717 } 7718 } 7719 7720 if (TREE_CODE_CLASS (code) == tcc_unary) 7721 { 7722 if (TREE_CODE (arg0) == COMPOUND_EXPR) 7723 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 7724 fold_build1_loc (loc, code, type, 7725 fold_convert_loc (loc, TREE_TYPE (op0), 7726 TREE_OPERAND (arg0, 1)))); 7727 else if (TREE_CODE (arg0) == COND_EXPR) 7728 { 7729 tree arg01 = TREE_OPERAND (arg0, 1); 7730 tree arg02 = TREE_OPERAND (arg0, 2); 7731 if (! VOID_TYPE_P (TREE_TYPE (arg01))) 7732 arg01 = fold_build1_loc (loc, code, type, 7733 fold_convert_loc (loc, 7734 TREE_TYPE (op0), arg01)); 7735 if (! VOID_TYPE_P (TREE_TYPE (arg02))) 7736 arg02 = fold_build1_loc (loc, code, type, 7737 fold_convert_loc (loc, 7738 TREE_TYPE (op0), arg02)); 7739 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0), 7740 arg01, arg02); 7741 7742 /* If this was a conversion, and all we did was to move into 7743 inside the COND_EXPR, bring it back out. But leave it if 7744 it is a conversion from integer to integer and the 7745 result precision is no wider than a word since such a 7746 conversion is cheap and may be optimized away by combine, 7747 while it couldn't if it were outside the COND_EXPR. Then return 7748 so we don't get into an infinite recursion loop taking the 7749 conversion out and then back in. */ 7750 7751 if ((CONVERT_EXPR_CODE_P (code) 7752 || code == NON_LVALUE_EXPR) 7753 && TREE_CODE (tem) == COND_EXPR 7754 && TREE_CODE (TREE_OPERAND (tem, 1)) == code 7755 && TREE_CODE (TREE_OPERAND (tem, 2)) == code 7756 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1)) 7757 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2)) 7758 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)) 7759 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0))) 7760 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 7761 && (INTEGRAL_TYPE_P 7762 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) 7763 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) 7764 || flag_syntax_only)) 7765 tem = build1_loc (loc, code, type, 7766 build3 (COND_EXPR, 7767 TREE_TYPE (TREE_OPERAND 7768 (TREE_OPERAND (tem, 1), 0)), 7769 TREE_OPERAND (tem, 0), 7770 TREE_OPERAND (TREE_OPERAND (tem, 1), 0), 7771 TREE_OPERAND (TREE_OPERAND (tem, 2), 7772 0))); 7773 return tem; 7774 } 7775 } 7776 7777 switch (code) 7778 { 7779 case PAREN_EXPR: 7780 /* Re-association barriers around constants and other re-association 7781 barriers can be removed. */ 7782 if (CONSTANT_CLASS_P (op0) 7783 || TREE_CODE (op0) == PAREN_EXPR) 7784 return fold_convert_loc (loc, type, op0); 7785 return NULL_TREE; 7786 7787 CASE_CONVERT: 7788 case FLOAT_EXPR: 7789 case FIX_TRUNC_EXPR: 7790 if (TREE_TYPE (op0) == type) 7791 return op0; 7792 7793 if (COMPARISON_CLASS_P (op0)) 7794 { 7795 /* If we have (type) (a CMP b) and type is an integral type, return 7796 new expression involving the new type. Canonicalize 7797 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for 7798 non-integral type. 7799 Do not fold the result as that would not simplify further, also 7800 folding again results in recursions. */ 7801 if (TREE_CODE (type) == BOOLEAN_TYPE) 7802 return build2_loc (loc, TREE_CODE (op0), type, 7803 TREE_OPERAND (op0, 0), 7804 TREE_OPERAND (op0, 1)); 7805 else if (!INTEGRAL_TYPE_P (type)) 7806 return build3_loc (loc, COND_EXPR, type, op0, 7807 constant_boolean_node (true, type), 7808 constant_boolean_node (false, type)); 7809 } 7810 7811 /* Handle cases of two conversions in a row. */ 7812 if (CONVERT_EXPR_P (op0)) 7813 { 7814 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0)); 7815 tree inter_type = TREE_TYPE (op0); 7816 int inside_int = INTEGRAL_TYPE_P (inside_type); 7817 int inside_ptr = POINTER_TYPE_P (inside_type); 7818 int inside_float = FLOAT_TYPE_P (inside_type); 7819 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE; 7820 unsigned int inside_prec = TYPE_PRECISION (inside_type); 7821 int inside_unsignedp = TYPE_UNSIGNED (inside_type); 7822 int inter_int = INTEGRAL_TYPE_P (inter_type); 7823 int inter_ptr = POINTER_TYPE_P (inter_type); 7824 int inter_float = FLOAT_TYPE_P (inter_type); 7825 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE; 7826 unsigned int inter_prec = TYPE_PRECISION (inter_type); 7827 int inter_unsignedp = TYPE_UNSIGNED (inter_type); 7828 int final_int = INTEGRAL_TYPE_P (type); 7829 int final_ptr = POINTER_TYPE_P (type); 7830 int final_float = FLOAT_TYPE_P (type); 7831 int final_vec = TREE_CODE (type) == VECTOR_TYPE; 7832 unsigned int final_prec = TYPE_PRECISION (type); 7833 int final_unsignedp = TYPE_UNSIGNED (type); 7834 7835 /* In addition to the cases of two conversions in a row 7836 handled below, if we are converting something to its own 7837 type via an object of identical or wider precision, neither 7838 conversion is needed. */ 7839 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type) 7840 && (((inter_int || inter_ptr) && final_int) 7841 || (inter_float && final_float)) 7842 && inter_prec >= final_prec) 7843 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 7844 7845 /* Likewise, if the intermediate and initial types are either both 7846 float or both integer, we don't need the middle conversion if the 7847 former is wider than the latter and doesn't change the signedness 7848 (for integers). Avoid this if the final type is a pointer since 7849 then we sometimes need the middle conversion. Likewise if the 7850 final type has a precision not equal to the size of its mode. */ 7851 if (((inter_int && inside_int) 7852 || (inter_float && inside_float) 7853 || (inter_vec && inside_vec)) 7854 && inter_prec >= inside_prec 7855 && (inter_float || inter_vec 7856 || inter_unsignedp == inside_unsignedp) 7857 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 7858 && TYPE_MODE (type) == TYPE_MODE (inter_type)) 7859 && ! final_ptr 7860 && (! final_vec || inter_prec == inside_prec)) 7861 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 7862 7863 /* If we have a sign-extension of a zero-extended value, we can 7864 replace that by a single zero-extension. */ 7865 if (inside_int && inter_int && final_int 7866 && inside_prec < inter_prec && inter_prec < final_prec 7867 && inside_unsignedp && !inter_unsignedp) 7868 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 7869 7870 /* Two conversions in a row are not needed unless: 7871 - some conversion is floating-point (overstrict for now), or 7872 - some conversion is a vector (overstrict for now), or 7873 - the intermediate type is narrower than both initial and 7874 final, or 7875 - the intermediate type and innermost type differ in signedness, 7876 and the outermost type is wider than the intermediate, or 7877 - the initial type is a pointer type and the precisions of the 7878 intermediate and final types differ, or 7879 - the final type is a pointer type and the precisions of the 7880 initial and intermediate types differ. */ 7881 if (! inside_float && ! inter_float && ! final_float 7882 && ! inside_vec && ! inter_vec && ! final_vec 7883 && (inter_prec >= inside_prec || inter_prec >= final_prec) 7884 && ! (inside_int && inter_int 7885 && inter_unsignedp != inside_unsignedp 7886 && inter_prec < final_prec) 7887 && ((inter_unsignedp && inter_prec > inside_prec) 7888 == (final_unsignedp && final_prec > inter_prec)) 7889 && ! (inside_ptr && inter_prec != final_prec) 7890 && ! (final_ptr && inside_prec != inter_prec) 7891 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) 7892 && TYPE_MODE (type) == TYPE_MODE (inter_type))) 7893 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); 7894 } 7895 7896 /* Handle (T *)&A.B.C for A being of type T and B and C 7897 living at offset zero. This occurs frequently in 7898 C++ upcasting and then accessing the base. */ 7899 if (TREE_CODE (op0) == ADDR_EXPR 7900 && POINTER_TYPE_P (type) 7901 && handled_component_p (TREE_OPERAND (op0, 0))) 7902 { 7903 HOST_WIDE_INT bitsize, bitpos; 7904 tree offset; 7905 enum machine_mode mode; 7906 int unsignedp, volatilep; 7907 tree base = TREE_OPERAND (op0, 0); 7908 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 7909 &mode, &unsignedp, &volatilep, false); 7910 /* If the reference was to a (constant) zero offset, we can use 7911 the address of the base if it has the same base type 7912 as the result type and the pointer type is unqualified. */ 7913 if (! offset && bitpos == 0 7914 && (TYPE_MAIN_VARIANT (TREE_TYPE (type)) 7915 == TYPE_MAIN_VARIANT (TREE_TYPE (base))) 7916 && TYPE_QUALS (type) == TYPE_UNQUALIFIED) 7917 return fold_convert_loc (loc, type, 7918 build_fold_addr_expr_loc (loc, base)); 7919 } 7920 7921 if (TREE_CODE (op0) == MODIFY_EXPR 7922 && TREE_CONSTANT (TREE_OPERAND (op0, 1)) 7923 /* Detect assigning a bitfield. */ 7924 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF 7925 && DECL_BIT_FIELD 7926 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) 7927 { 7928 /* Don't leave an assignment inside a conversion 7929 unless assigning a bitfield. */ 7930 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1)); 7931 /* First do the assignment, then return converted constant. */ 7932 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); 7933 TREE_NO_WARNING (tem) = 1; 7934 TREE_USED (tem) = 1; 7935 return tem; 7936 } 7937 7938 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer 7939 constants (if x has signed type, the sign bit cannot be set 7940 in c). This folds extension into the BIT_AND_EXPR. 7941 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they 7942 very likely don't have maximal range for their precision and this 7943 transformation effectively doesn't preserve non-maximal ranges. */ 7944 if (TREE_CODE (type) == INTEGER_TYPE 7945 && TREE_CODE (op0) == BIT_AND_EXPR 7946 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST) 7947 { 7948 tree and_expr = op0; 7949 tree and0 = TREE_OPERAND (and_expr, 0); 7950 tree and1 = TREE_OPERAND (and_expr, 1); 7951 int change = 0; 7952 7953 if (TYPE_UNSIGNED (TREE_TYPE (and_expr)) 7954 || (TYPE_PRECISION (type) 7955 <= TYPE_PRECISION (TREE_TYPE (and_expr)))) 7956 change = 1; 7957 else if (TYPE_PRECISION (TREE_TYPE (and1)) 7958 <= HOST_BITS_PER_WIDE_INT 7959 && host_integerp (and1, 1)) 7960 { 7961 unsigned HOST_WIDE_INT cst; 7962 7963 cst = tree_low_cst (and1, 1); 7964 cst &= (HOST_WIDE_INT) -1 7965 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1); 7966 change = (cst == 0); 7967 #ifdef LOAD_EXTEND_OP 7968 if (change 7969 && !flag_syntax_only 7970 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0))) 7971 == ZERO_EXTEND)) 7972 { 7973 tree uns = unsigned_type_for (TREE_TYPE (and0)); 7974 and0 = fold_convert_loc (loc, uns, and0); 7975 and1 = fold_convert_loc (loc, uns, and1); 7976 } 7977 #endif 7978 } 7979 if (change) 7980 { 7981 tem = force_fit_type_double (type, tree_to_double_int (and1), 7982 0, TREE_OVERFLOW (and1)); 7983 return fold_build2_loc (loc, BIT_AND_EXPR, type, 7984 fold_convert_loc (loc, type, and0), tem); 7985 } 7986 } 7987 7988 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, 7989 when one of the new casts will fold away. Conservatively we assume 7990 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */ 7991 if (POINTER_TYPE_P (type) 7992 && TREE_CODE (arg0) == POINTER_PLUS_EXPR 7993 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0))) 7994 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 7995 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR 7996 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR)) 7997 { 7998 tree arg00 = TREE_OPERAND (arg0, 0); 7999 tree arg01 = TREE_OPERAND (arg0, 1); 8000 8001 return fold_build_pointer_plus_loc 8002 (loc, fold_convert_loc (loc, type, arg00), arg01); 8003 } 8004 8005 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types 8006 of the same precision, and X is an integer type not narrower than 8007 types T1 or T2, i.e. the cast (T2)X isn't an extension. */ 8008 if (INTEGRAL_TYPE_P (type) 8009 && TREE_CODE (op0) == BIT_NOT_EXPR 8010 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8011 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0)) 8012 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 8013 { 8014 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); 8015 if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) 8016 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) 8017 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 8018 fold_convert_loc (loc, type, tem)); 8019 } 8020 8021 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the 8022 type of X and Y (integer types only). */ 8023 if (INTEGRAL_TYPE_P (type) 8024 && TREE_CODE (op0) == MULT_EXPR 8025 && INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8026 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0))) 8027 { 8028 /* Be careful not to introduce new overflows. */ 8029 tree mult_type; 8030 if (TYPE_OVERFLOW_WRAPS (type)) 8031 mult_type = type; 8032 else 8033 mult_type = unsigned_type_for (type); 8034 8035 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0))) 8036 { 8037 tem = fold_build2_loc (loc, MULT_EXPR, mult_type, 8038 fold_convert_loc (loc, mult_type, 8039 TREE_OPERAND (op0, 0)), 8040 fold_convert_loc (loc, mult_type, 8041 TREE_OPERAND (op0, 1))); 8042 return fold_convert_loc (loc, type, tem); 8043 } 8044 } 8045 8046 tem = fold_convert_const (code, type, op0); 8047 return tem ? tem : NULL_TREE; 8048 8049 case ADDR_SPACE_CONVERT_EXPR: 8050 if (integer_zerop (arg0)) 8051 return fold_convert_const (code, type, arg0); 8052 return NULL_TREE; 8053 8054 case FIXED_CONVERT_EXPR: 8055 tem = fold_convert_const (code, type, arg0); 8056 return tem ? tem : NULL_TREE; 8057 8058 case VIEW_CONVERT_EXPR: 8059 if (TREE_TYPE (op0) == type) 8060 return op0; 8061 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) 8062 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, 8063 type, TREE_OPERAND (op0, 0)); 8064 if (TREE_CODE (op0) == MEM_REF) 8065 return fold_build2_loc (loc, MEM_REF, type, 8066 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1)); 8067 8068 /* For integral conversions with the same precision or pointer 8069 conversions use a NOP_EXPR instead. */ 8070 if ((INTEGRAL_TYPE_P (type) 8071 || POINTER_TYPE_P (type)) 8072 && (INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8073 || POINTER_TYPE_P (TREE_TYPE (op0))) 8074 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) 8075 return fold_convert_loc (loc, type, op0); 8076 8077 /* Strip inner integral conversions that do not change the precision. */ 8078 if (CONVERT_EXPR_P (op0) 8079 && (INTEGRAL_TYPE_P (TREE_TYPE (op0)) 8080 || POINTER_TYPE_P (TREE_TYPE (op0))) 8081 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))) 8082 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))) 8083 && (TYPE_PRECISION (TREE_TYPE (op0)) 8084 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0))))) 8085 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, 8086 type, TREE_OPERAND (op0, 0)); 8087 8088 return fold_view_convert_expr (type, op0); 8089 8090 case NEGATE_EXPR: 8091 tem = fold_negate_expr (loc, arg0); 8092 if (tem) 8093 return fold_convert_loc (loc, type, tem); 8094 return NULL_TREE; 8095 8096 case ABS_EXPR: 8097 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) 8098 return fold_abs_const (arg0, type); 8099 else if (TREE_CODE (arg0) == NEGATE_EXPR) 8100 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0)); 8101 /* Convert fabs((double)float) into (double)fabsf(float). */ 8102 else if (TREE_CODE (arg0) == NOP_EXPR 8103 && TREE_CODE (type) == REAL_TYPE) 8104 { 8105 tree targ0 = strip_float_extensions (arg0); 8106 if (targ0 != arg0) 8107 return fold_convert_loc (loc, type, 8108 fold_build1_loc (loc, ABS_EXPR, 8109 TREE_TYPE (targ0), 8110 targ0)); 8111 } 8112 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */ 8113 else if (TREE_CODE (arg0) == ABS_EXPR) 8114 return arg0; 8115 else if (tree_expr_nonnegative_p (arg0)) 8116 return arg0; 8117 8118 /* Strip sign ops from argument. */ 8119 if (TREE_CODE (type) == REAL_TYPE) 8120 { 8121 tem = fold_strip_sign_ops (arg0); 8122 if (tem) 8123 return fold_build1_loc (loc, ABS_EXPR, type, 8124 fold_convert_loc (loc, type, tem)); 8125 } 8126 return NULL_TREE; 8127 8128 case CONJ_EXPR: 8129 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8130 return fold_convert_loc (loc, type, arg0); 8131 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8132 { 8133 tree itype = TREE_TYPE (type); 8134 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0)); 8135 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1)); 8136 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, 8137 negate_expr (ipart)); 8138 } 8139 if (TREE_CODE (arg0) == COMPLEX_CST) 8140 { 8141 tree itype = TREE_TYPE (type); 8142 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0)); 8143 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0)); 8144 return build_complex (type, rpart, negate_expr (ipart)); 8145 } 8146 if (TREE_CODE (arg0) == CONJ_EXPR) 8147 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 8148 return NULL_TREE; 8149 8150 case BIT_NOT_EXPR: 8151 if (TREE_CODE (arg0) == INTEGER_CST) 8152 return fold_not_const (arg0, type); 8153 else if (TREE_CODE (arg0) == BIT_NOT_EXPR) 8154 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 8155 /* Convert ~ (-A) to A - 1. */ 8156 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) 8157 return fold_build2_loc (loc, MINUS_EXPR, type, 8158 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)), 8159 build_int_cst (type, 1)); 8160 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ 8161 else if (INTEGRAL_TYPE_P (type) 8162 && ((TREE_CODE (arg0) == MINUS_EXPR 8163 && integer_onep (TREE_OPERAND (arg0, 1))) 8164 || (TREE_CODE (arg0) == PLUS_EXPR 8165 && integer_all_onesp (TREE_OPERAND (arg0, 1))))) 8166 return fold_build1_loc (loc, NEGATE_EXPR, type, 8167 fold_convert_loc (loc, type, 8168 TREE_OPERAND (arg0, 0))); 8169 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ 8170 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8171 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8172 fold_convert_loc (loc, type, 8173 TREE_OPERAND (arg0, 0))))) 8174 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem, 8175 fold_convert_loc (loc, type, 8176 TREE_OPERAND (arg0, 1))); 8177 else if (TREE_CODE (arg0) == BIT_XOR_EXPR 8178 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, 8179 fold_convert_loc (loc, type, 8180 TREE_OPERAND (arg0, 1))))) 8181 return fold_build2_loc (loc, BIT_XOR_EXPR, type, 8182 fold_convert_loc (loc, type, 8183 TREE_OPERAND (arg0, 0)), tem); 8184 /* Perform BIT_NOT_EXPR on each element individually. */ 8185 else if (TREE_CODE (arg0) == VECTOR_CST) 8186 { 8187 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE; 8188 int count = TYPE_VECTOR_SUBPARTS (type), i; 8189 8190 for (i = 0; i < count; i++) 8191 { 8192 if (elements) 8193 { 8194 elem = TREE_VALUE (elements); 8195 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem); 8196 if (elem == NULL_TREE) 8197 break; 8198 elements = TREE_CHAIN (elements); 8199 } 8200 else 8201 elem = build_int_cst (TREE_TYPE (type), -1); 8202 list = tree_cons (NULL_TREE, elem, list); 8203 } 8204 if (i == count) 8205 return build_vector (type, nreverse (list)); 8206 } 8207 8208 return NULL_TREE; 8209 8210 case TRUTH_NOT_EXPR: 8211 /* The argument to invert_truthvalue must have Boolean type. */ 8212 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 8213 arg0 = fold_convert_loc (loc, boolean_type_node, arg0); 8214 8215 /* Note that the operand of this must be an int 8216 and its values must be 0 or 1. 8217 ("true" is a fixed value perhaps depending on the language, 8218 but we don't handle values other than 1 correctly yet.) */ 8219 tem = fold_truth_not_expr (loc, arg0); 8220 if (!tem) 8221 return NULL_TREE; 8222 return fold_convert_loc (loc, type, tem); 8223 8224 case REALPART_EXPR: 8225 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8226 return fold_convert_loc (loc, type, arg0); 8227 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8228 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0), 8229 TREE_OPERAND (arg0, 1)); 8230 if (TREE_CODE (arg0) == COMPLEX_CST) 8231 return fold_convert_loc (loc, type, TREE_REALPART (arg0)); 8232 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8233 { 8234 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8235 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8236 fold_build1_loc (loc, REALPART_EXPR, itype, 8237 TREE_OPERAND (arg0, 0)), 8238 fold_build1_loc (loc, REALPART_EXPR, itype, 8239 TREE_OPERAND (arg0, 1))); 8240 return fold_convert_loc (loc, type, tem); 8241 } 8242 if (TREE_CODE (arg0) == CONJ_EXPR) 8243 { 8244 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8245 tem = fold_build1_loc (loc, REALPART_EXPR, itype, 8246 TREE_OPERAND (arg0, 0)); 8247 return fold_convert_loc (loc, type, tem); 8248 } 8249 if (TREE_CODE (arg0) == CALL_EXPR) 8250 { 8251 tree fn = get_callee_fndecl (arg0); 8252 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8253 switch (DECL_FUNCTION_CODE (fn)) 8254 { 8255 CASE_FLT_FN (BUILT_IN_CEXPI): 8256 fn = mathfn_built_in (type, BUILT_IN_COS); 8257 if (fn) 8258 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8259 break; 8260 8261 default: 8262 break; 8263 } 8264 } 8265 return NULL_TREE; 8266 8267 case IMAGPART_EXPR: 8268 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) 8269 return build_zero_cst (type); 8270 if (TREE_CODE (arg0) == COMPLEX_EXPR) 8271 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1), 8272 TREE_OPERAND (arg0, 0)); 8273 if (TREE_CODE (arg0) == COMPLEX_CST) 8274 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0)); 8275 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8276 { 8277 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8278 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, 8279 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8280 TREE_OPERAND (arg0, 0)), 8281 fold_build1_loc (loc, IMAGPART_EXPR, itype, 8282 TREE_OPERAND (arg0, 1))); 8283 return fold_convert_loc (loc, type, tem); 8284 } 8285 if (TREE_CODE (arg0) == CONJ_EXPR) 8286 { 8287 tree itype = TREE_TYPE (TREE_TYPE (arg0)); 8288 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); 8289 return fold_convert_loc (loc, type, negate_expr (tem)); 8290 } 8291 if (TREE_CODE (arg0) == CALL_EXPR) 8292 { 8293 tree fn = get_callee_fndecl (arg0); 8294 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) 8295 switch (DECL_FUNCTION_CODE (fn)) 8296 { 8297 CASE_FLT_FN (BUILT_IN_CEXPI): 8298 fn = mathfn_built_in (type, BUILT_IN_SIN); 8299 if (fn) 8300 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); 8301 break; 8302 8303 default: 8304 break; 8305 } 8306 } 8307 return NULL_TREE; 8308 8309 case INDIRECT_REF: 8310 /* Fold *&X to X if X is an lvalue. */ 8311 if (TREE_CODE (op0) == ADDR_EXPR) 8312 { 8313 tree op00 = TREE_OPERAND (op0, 0); 8314 if ((TREE_CODE (op00) == VAR_DECL 8315 || TREE_CODE (op00) == PARM_DECL 8316 || TREE_CODE (op00) == RESULT_DECL) 8317 && !TREE_READONLY (op00)) 8318 return op00; 8319 } 8320 return NULL_TREE; 8321 8322 case VEC_UNPACK_LO_EXPR: 8323 case VEC_UNPACK_HI_EXPR: 8324 case VEC_UNPACK_FLOAT_LO_EXPR: 8325 case VEC_UNPACK_FLOAT_HI_EXPR: 8326 { 8327 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 8328 tree *elts, vals = NULL_TREE; 8329 enum tree_code subcode; 8330 8331 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2); 8332 if (TREE_CODE (arg0) != VECTOR_CST) 8333 return NULL_TREE; 8334 8335 elts = XALLOCAVEC (tree, nelts * 2); 8336 if (!vec_cst_ctor_to_array (arg0, elts)) 8337 return NULL_TREE; 8338 8339 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR 8340 || code == VEC_UNPACK_FLOAT_LO_EXPR)) 8341 elts += nelts; 8342 8343 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR) 8344 subcode = NOP_EXPR; 8345 else 8346 subcode = FLOAT_EXPR; 8347 8348 for (i = 0; i < nelts; i++) 8349 { 8350 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]); 8351 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) 8352 return NULL_TREE; 8353 } 8354 8355 for (i = 0; i < nelts; i++) 8356 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals); 8357 return build_vector (type, vals); 8358 } 8359 8360 default: 8361 return NULL_TREE; 8362 } /* switch (code) */ 8363 } 8364 8365 8366 /* If the operation was a conversion do _not_ mark a resulting constant 8367 with TREE_OVERFLOW if the original constant was not. These conversions 8368 have implementation defined behavior and retaining the TREE_OVERFLOW 8369 flag here would confuse later passes such as VRP. */ 8370 tree 8371 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code, 8372 tree type, tree op0) 8373 { 8374 tree res = fold_unary_loc (loc, code, type, op0); 8375 if (res 8376 && TREE_CODE (res) == INTEGER_CST 8377 && TREE_CODE (op0) == INTEGER_CST 8378 && CONVERT_EXPR_CODE_P (code)) 8379 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0); 8380 8381 return res; 8382 } 8383 8384 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with 8385 operands OP0 and OP1. LOC is the location of the resulting expression. 8386 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1. 8387 Return the folded expression if folding is successful. Otherwise, 8388 return NULL_TREE. */ 8389 static tree 8390 fold_truth_andor (location_t loc, enum tree_code code, tree type, 8391 tree arg0, tree arg1, tree op0, tree op1) 8392 { 8393 tree tem; 8394 8395 /* We only do these simplifications if we are optimizing. */ 8396 if (!optimize) 8397 return NULL_TREE; 8398 8399 /* Check for things like (A || B) && (A || C). We can convert this 8400 to A || (B && C). Note that either operator can be any of the four 8401 truth and/or operations and the transformation will still be 8402 valid. Also note that we only care about order for the 8403 ANDIF and ORIF operators. If B contains side effects, this 8404 might change the truth-value of A. */ 8405 if (TREE_CODE (arg0) == TREE_CODE (arg1) 8406 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR 8407 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR 8408 || TREE_CODE (arg0) == TRUTH_AND_EXPR 8409 || TREE_CODE (arg0) == TRUTH_OR_EXPR) 8410 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1))) 8411 { 8412 tree a00 = TREE_OPERAND (arg0, 0); 8413 tree a01 = TREE_OPERAND (arg0, 1); 8414 tree a10 = TREE_OPERAND (arg1, 0); 8415 tree a11 = TREE_OPERAND (arg1, 1); 8416 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR 8417 || TREE_CODE (arg0) == TRUTH_AND_EXPR) 8418 && (code == TRUTH_AND_EXPR 8419 || code == TRUTH_OR_EXPR)); 8420 8421 if (operand_equal_p (a00, a10, 0)) 8422 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 8423 fold_build2_loc (loc, code, type, a01, a11)); 8424 else if (commutative && operand_equal_p (a00, a11, 0)) 8425 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, 8426 fold_build2_loc (loc, code, type, a01, a10)); 8427 else if (commutative && operand_equal_p (a01, a10, 0)) 8428 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01, 8429 fold_build2_loc (loc, code, type, a00, a11)); 8430 8431 /* This case if tricky because we must either have commutative 8432 operators or else A10 must not have side-effects. */ 8433 8434 else if ((commutative || ! TREE_SIDE_EFFECTS (a10)) 8435 && operand_equal_p (a01, a11, 0)) 8436 return fold_build2_loc (loc, TREE_CODE (arg0), type, 8437 fold_build2_loc (loc, code, type, a00, a10), 8438 a01); 8439 } 8440 8441 /* See if we can build a range comparison. */ 8442 if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) 8443 return tem; 8444 8445 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR) 8446 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR)) 8447 { 8448 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true); 8449 if (tem) 8450 return fold_build2_loc (loc, code, type, tem, arg1); 8451 } 8452 8453 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR) 8454 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR)) 8455 { 8456 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false); 8457 if (tem) 8458 return fold_build2_loc (loc, code, type, arg0, tem); 8459 } 8460 8461 /* Check for the possibility of merging component references. If our 8462 lhs is another similar operation, try to merge its rhs with our 8463 rhs. Then try to merge our lhs and rhs. */ 8464 if (TREE_CODE (arg0) == code 8465 && 0 != (tem = fold_truth_andor_1 (loc, code, type, 8466 TREE_OPERAND (arg0, 1), arg1))) 8467 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 8468 8469 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0) 8470 return tem; 8471 8472 if ((BRANCH_COST (optimize_function_for_speed_p (cfun), 8473 false) >= 2) 8474 && LOGICAL_OP_NON_SHORT_CIRCUIT 8475 && (code == TRUTH_AND_EXPR 8476 || code == TRUTH_ANDIF_EXPR 8477 || code == TRUTH_OR_EXPR 8478 || code == TRUTH_ORIF_EXPR)) 8479 { 8480 enum tree_code ncode, icode; 8481 8482 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR) 8483 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR; 8484 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR; 8485 8486 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)), 8487 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C)) 8488 We don't want to pack more than two leafs to a non-IF AND/OR 8489 expression. 8490 If tree-code of left-hand operand isn't an AND/OR-IF code and not 8491 equal to IF-CODE, then we don't want to add right-hand operand. 8492 If the inner right-hand side of left-hand operand has 8493 side-effects, or isn't simple, then we can't add to it, 8494 as otherwise we might destroy if-sequence. */ 8495 if (TREE_CODE (arg0) == icode 8496 && simple_operand_p_2 (arg1) 8497 /* Needed for sequence points to handle trappings, and 8498 side-effects. */ 8499 && simple_operand_p_2 (TREE_OPERAND (arg0, 1))) 8500 { 8501 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1), 8502 arg1); 8503 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0), 8504 tem); 8505 } 8506 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C), 8507 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */ 8508 else if (TREE_CODE (arg1) == icode 8509 && simple_operand_p_2 (arg0) 8510 /* Needed for sequence points to handle trappings, and 8511 side-effects. */ 8512 && simple_operand_p_2 (TREE_OPERAND (arg1, 0))) 8513 { 8514 tem = fold_build2_loc (loc, ncode, type, 8515 arg0, TREE_OPERAND (arg1, 0)); 8516 return fold_build2_loc (loc, icode, type, tem, 8517 TREE_OPERAND (arg1, 1)); 8518 } 8519 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B) 8520 into (A OR B). 8521 For sequence point consistancy, we need to check for trapping, 8522 and side-effects. */ 8523 else if (code == icode && simple_operand_p_2 (arg0) 8524 && simple_operand_p_2 (arg1)) 8525 return fold_build2_loc (loc, ncode, type, arg0, arg1); 8526 } 8527 8528 return NULL_TREE; 8529 } 8530 8531 /* Fold a binary expression of code CODE and type TYPE with operands 8532 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination. 8533 Return the folded expression if folding is successful. Otherwise, 8534 return NULL_TREE. */ 8535 8536 static tree 8537 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1) 8538 { 8539 enum tree_code compl_code; 8540 8541 if (code == MIN_EXPR) 8542 compl_code = MAX_EXPR; 8543 else if (code == MAX_EXPR) 8544 compl_code = MIN_EXPR; 8545 else 8546 gcc_unreachable (); 8547 8548 /* MIN (MAX (a, b), b) == b. */ 8549 if (TREE_CODE (op0) == compl_code 8550 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) 8551 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0)); 8552 8553 /* MIN (MAX (b, a), b) == b. */ 8554 if (TREE_CODE (op0) == compl_code 8555 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) 8556 && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) 8557 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1)); 8558 8559 /* MIN (a, MAX (a, b)) == a. */ 8560 if (TREE_CODE (op1) == compl_code 8561 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) 8562 && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) 8563 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1)); 8564 8565 /* MIN (a, MAX (b, a)) == a. */ 8566 if (TREE_CODE (op1) == compl_code 8567 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) 8568 && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) 8569 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0)); 8570 8571 return NULL_TREE; 8572 } 8573 8574 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1 8575 by changing CODE to reduce the magnitude of constants involved in 8576 ARG0 of the comparison. 8577 Returns a canonicalized comparison tree if a simplification was 8578 possible, otherwise returns NULL_TREE. 8579 Set *STRICT_OVERFLOW_P to true if the canonicalization is only 8580 valid if signed overflow is undefined. */ 8581 8582 static tree 8583 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type, 8584 tree arg0, tree arg1, 8585 bool *strict_overflow_p) 8586 { 8587 enum tree_code code0 = TREE_CODE (arg0); 8588 tree t, cst0 = NULL_TREE; 8589 int sgn0; 8590 bool swap = false; 8591 8592 /* Match A +- CST code arg1 and CST code arg1. We can change the 8593 first form only if overflow is undefined. */ 8594 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 8595 /* In principle pointers also have undefined overflow behavior, 8596 but that causes problems elsewhere. */ 8597 && !POINTER_TYPE_P (TREE_TYPE (arg0)) 8598 && (code0 == MINUS_EXPR 8599 || code0 == PLUS_EXPR) 8600 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 8601 || code0 == INTEGER_CST)) 8602 return NULL_TREE; 8603 8604 /* Identify the constant in arg0 and its sign. */ 8605 if (code0 == INTEGER_CST) 8606 cst0 = arg0; 8607 else 8608 cst0 = TREE_OPERAND (arg0, 1); 8609 sgn0 = tree_int_cst_sgn (cst0); 8610 8611 /* Overflowed constants and zero will cause problems. */ 8612 if (integer_zerop (cst0) 8613 || TREE_OVERFLOW (cst0)) 8614 return NULL_TREE; 8615 8616 /* See if we can reduce the magnitude of the constant in 8617 arg0 by changing the comparison code. */ 8618 if (code0 == INTEGER_CST) 8619 { 8620 /* CST <= arg1 -> CST-1 < arg1. */ 8621 if (code == LE_EXPR && sgn0 == 1) 8622 code = LT_EXPR; 8623 /* -CST < arg1 -> -CST-1 <= arg1. */ 8624 else if (code == LT_EXPR && sgn0 == -1) 8625 code = LE_EXPR; 8626 /* CST > arg1 -> CST-1 >= arg1. */ 8627 else if (code == GT_EXPR && sgn0 == 1) 8628 code = GE_EXPR; 8629 /* -CST >= arg1 -> -CST-1 > arg1. */ 8630 else if (code == GE_EXPR && sgn0 == -1) 8631 code = GT_EXPR; 8632 else 8633 return NULL_TREE; 8634 /* arg1 code' CST' might be more canonical. */ 8635 swap = true; 8636 } 8637 else 8638 { 8639 /* A - CST < arg1 -> A - CST-1 <= arg1. */ 8640 if (code == LT_EXPR 8641 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 8642 code = LE_EXPR; 8643 /* A + CST > arg1 -> A + CST-1 >= arg1. */ 8644 else if (code == GT_EXPR 8645 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 8646 code = GE_EXPR; 8647 /* A + CST <= arg1 -> A + CST-1 < arg1. */ 8648 else if (code == LE_EXPR 8649 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) 8650 code = LT_EXPR; 8651 /* A - CST >= arg1 -> A - CST-1 > arg1. */ 8652 else if (code == GE_EXPR 8653 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) 8654 code = GT_EXPR; 8655 else 8656 return NULL_TREE; 8657 *strict_overflow_p = true; 8658 } 8659 8660 /* Now build the constant reduced in magnitude. But not if that 8661 would produce one outside of its types range. */ 8662 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0)) 8663 && ((sgn0 == 1 8664 && TYPE_MIN_VALUE (TREE_TYPE (cst0)) 8665 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0)))) 8666 || (sgn0 == -1 8667 && TYPE_MAX_VALUE (TREE_TYPE (cst0)) 8668 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0)))))) 8669 /* We cannot swap the comparison here as that would cause us to 8670 endlessly recurse. */ 8671 return NULL_TREE; 8672 8673 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR, 8674 cst0, build_int_cst (TREE_TYPE (cst0), 1)); 8675 if (code0 != INTEGER_CST) 8676 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); 8677 t = fold_convert (TREE_TYPE (arg1), t); 8678 8679 /* If swapping might yield to a more canonical form, do so. */ 8680 if (swap) 8681 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t); 8682 else 8683 return fold_build2_loc (loc, code, type, t, arg1); 8684 } 8685 8686 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined 8687 overflow further. Try to decrease the magnitude of constants involved 8688 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa 8689 and put sole constants at the second argument position. 8690 Returns the canonicalized tree if changed, otherwise NULL_TREE. */ 8691 8692 static tree 8693 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type, 8694 tree arg0, tree arg1) 8695 { 8696 tree t; 8697 bool strict_overflow_p; 8698 const char * const warnmsg = G_("assuming signed overflow does not occur " 8699 "when reducing constant in comparison"); 8700 8701 /* Try canonicalization by simplifying arg0. */ 8702 strict_overflow_p = false; 8703 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1, 8704 &strict_overflow_p); 8705 if (t) 8706 { 8707 if (strict_overflow_p) 8708 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 8709 return t; 8710 } 8711 8712 /* Try canonicalization by simplifying arg1 using the swapped 8713 comparison. */ 8714 code = swap_tree_comparison (code); 8715 strict_overflow_p = false; 8716 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0, 8717 &strict_overflow_p); 8718 if (t && strict_overflow_p) 8719 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); 8720 return t; 8721 } 8722 8723 /* Return whether BASE + OFFSET + BITPOS may wrap around the address 8724 space. This is used to avoid issuing overflow warnings for 8725 expressions like &p->x which can not wrap. */ 8726 8727 static bool 8728 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) 8729 { 8730 unsigned HOST_WIDE_INT offset_low, total_low; 8731 HOST_WIDE_INT size, offset_high, total_high; 8732 8733 if (!POINTER_TYPE_P (TREE_TYPE (base))) 8734 return true; 8735 8736 if (bitpos < 0) 8737 return true; 8738 8739 if (offset == NULL_TREE) 8740 { 8741 offset_low = 0; 8742 offset_high = 0; 8743 } 8744 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset)) 8745 return true; 8746 else 8747 { 8748 offset_low = TREE_INT_CST_LOW (offset); 8749 offset_high = TREE_INT_CST_HIGH (offset); 8750 } 8751 8752 if (add_double_with_sign (offset_low, offset_high, 8753 bitpos / BITS_PER_UNIT, 0, 8754 &total_low, &total_high, 8755 true)) 8756 return true; 8757 8758 if (total_high != 0) 8759 return true; 8760 8761 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base))); 8762 if (size <= 0) 8763 return true; 8764 8765 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an 8766 array. */ 8767 if (TREE_CODE (base) == ADDR_EXPR) 8768 { 8769 HOST_WIDE_INT base_size; 8770 8771 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0))); 8772 if (base_size > 0 && size < base_size) 8773 size = base_size; 8774 } 8775 8776 return total_low > (unsigned HOST_WIDE_INT) size; 8777 } 8778 8779 /* Subroutine of fold_binary. This routine performs all of the 8780 transformations that are common to the equality/inequality 8781 operators (EQ_EXPR and NE_EXPR) and the ordering operators 8782 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than 8783 fold_binary should call fold_binary. Fold a comparison with 8784 tree code CODE and type TYPE with operands OP0 and OP1. Return 8785 the folded comparison or NULL_TREE. */ 8786 8787 static tree 8788 fold_comparison (location_t loc, enum tree_code code, tree type, 8789 tree op0, tree op1) 8790 { 8791 tree arg0, arg1, tem; 8792 8793 arg0 = op0; 8794 arg1 = op1; 8795 8796 STRIP_SIGN_NOPS (arg0); 8797 STRIP_SIGN_NOPS (arg1); 8798 8799 tem = fold_relational_const (code, type, arg0, arg1); 8800 if (tem != NULL_TREE) 8801 return tem; 8802 8803 /* If one arg is a real or integer constant, put it last. */ 8804 if (tree_swap_operands_p (arg0, arg1, true)) 8805 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0); 8806 8807 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */ 8808 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 8809 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 8810 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 8811 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 8812 && (TREE_CODE (arg1) == INTEGER_CST 8813 && !TREE_OVERFLOW (arg1))) 8814 { 8815 tree const1 = TREE_OPERAND (arg0, 1); 8816 tree const2 = arg1; 8817 tree variable = TREE_OPERAND (arg0, 0); 8818 tree lhs; 8819 int lhs_add; 8820 lhs_add = TREE_CODE (arg0) != PLUS_EXPR; 8821 8822 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR, 8823 TREE_TYPE (arg1), const2, const1); 8824 8825 /* If the constant operation overflowed this can be 8826 simplified as a comparison against INT_MAX/INT_MIN. */ 8827 if (TREE_CODE (lhs) == INTEGER_CST 8828 && TREE_OVERFLOW (lhs)) 8829 { 8830 int const1_sgn = tree_int_cst_sgn (const1); 8831 enum tree_code code2 = code; 8832 8833 /* Get the sign of the constant on the lhs if the 8834 operation were VARIABLE + CONST1. */ 8835 if (TREE_CODE (arg0) == MINUS_EXPR) 8836 const1_sgn = -const1_sgn; 8837 8838 /* The sign of the constant determines if we overflowed 8839 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1). 8840 Canonicalize to the INT_MIN overflow by swapping the comparison 8841 if necessary. */ 8842 if (const1_sgn == -1) 8843 code2 = swap_tree_comparison (code); 8844 8845 /* We now can look at the canonicalized case 8846 VARIABLE + 1 CODE2 INT_MIN 8847 and decide on the result. */ 8848 if (code2 == LT_EXPR 8849 || code2 == LE_EXPR 8850 || code2 == EQ_EXPR) 8851 return omit_one_operand_loc (loc, type, boolean_false_node, variable); 8852 else if (code2 == NE_EXPR 8853 || code2 == GE_EXPR 8854 || code2 == GT_EXPR) 8855 return omit_one_operand_loc (loc, type, boolean_true_node, variable); 8856 } 8857 8858 if (TREE_CODE (lhs) == TREE_CODE (arg1) 8859 && (TREE_CODE (lhs) != INTEGER_CST 8860 || !TREE_OVERFLOW (lhs))) 8861 { 8862 if (code != EQ_EXPR && code != NE_EXPR) 8863 fold_overflow_warning ("assuming signed overflow does not occur " 8864 "when changing X +- C1 cmp C2 to " 8865 "X cmp C1 +- C2", 8866 WARN_STRICT_OVERFLOW_COMPARISON); 8867 return fold_build2_loc (loc, code, type, variable, lhs); 8868 } 8869 } 8870 8871 /* For comparisons of pointers we can decompose it to a compile time 8872 comparison of the base objects and the offsets into the object. 8873 This requires at least one operand being an ADDR_EXPR or a 8874 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */ 8875 if (POINTER_TYPE_P (TREE_TYPE (arg0)) 8876 && (TREE_CODE (arg0) == ADDR_EXPR 8877 || TREE_CODE (arg1) == ADDR_EXPR 8878 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 8879 || TREE_CODE (arg1) == POINTER_PLUS_EXPR)) 8880 { 8881 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE; 8882 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0; 8883 enum machine_mode mode; 8884 int volatilep, unsignedp; 8885 bool indirect_base0 = false, indirect_base1 = false; 8886 8887 /* Get base and offset for the access. Strip ADDR_EXPR for 8888 get_inner_reference, but put it back by stripping INDIRECT_REF 8889 off the base object if possible. indirect_baseN will be true 8890 if baseN is not an address but refers to the object itself. */ 8891 base0 = arg0; 8892 if (TREE_CODE (arg0) == ADDR_EXPR) 8893 { 8894 base0 = get_inner_reference (TREE_OPERAND (arg0, 0), 8895 &bitsize, &bitpos0, &offset0, &mode, 8896 &unsignedp, &volatilep, false); 8897 if (TREE_CODE (base0) == INDIRECT_REF) 8898 base0 = TREE_OPERAND (base0, 0); 8899 else 8900 indirect_base0 = true; 8901 } 8902 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 8903 { 8904 base0 = TREE_OPERAND (arg0, 0); 8905 STRIP_SIGN_NOPS (base0); 8906 if (TREE_CODE (base0) == ADDR_EXPR) 8907 { 8908 base0 = TREE_OPERAND (base0, 0); 8909 indirect_base0 = true; 8910 } 8911 offset0 = TREE_OPERAND (arg0, 1); 8912 if (host_integerp (offset0, 0)) 8913 { 8914 HOST_WIDE_INT off = size_low_cst (offset0); 8915 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off) 8916 * BITS_PER_UNIT) 8917 / BITS_PER_UNIT == (HOST_WIDE_INT) off) 8918 { 8919 bitpos0 = off * BITS_PER_UNIT; 8920 offset0 = NULL_TREE; 8921 } 8922 } 8923 } 8924 8925 base1 = arg1; 8926 if (TREE_CODE (arg1) == ADDR_EXPR) 8927 { 8928 base1 = get_inner_reference (TREE_OPERAND (arg1, 0), 8929 &bitsize, &bitpos1, &offset1, &mode, 8930 &unsignedp, &volatilep, false); 8931 if (TREE_CODE (base1) == INDIRECT_REF) 8932 base1 = TREE_OPERAND (base1, 0); 8933 else 8934 indirect_base1 = true; 8935 } 8936 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR) 8937 { 8938 base1 = TREE_OPERAND (arg1, 0); 8939 STRIP_SIGN_NOPS (base1); 8940 if (TREE_CODE (base1) == ADDR_EXPR) 8941 { 8942 base1 = TREE_OPERAND (base1, 0); 8943 indirect_base1 = true; 8944 } 8945 offset1 = TREE_OPERAND (arg1, 1); 8946 if (host_integerp (offset1, 0)) 8947 { 8948 HOST_WIDE_INT off = size_low_cst (offset1); 8949 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off) 8950 * BITS_PER_UNIT) 8951 / BITS_PER_UNIT == (HOST_WIDE_INT) off) 8952 { 8953 bitpos1 = off * BITS_PER_UNIT; 8954 offset1 = NULL_TREE; 8955 } 8956 } 8957 } 8958 8959 /* A local variable can never be pointed to by 8960 the default SSA name of an incoming parameter. */ 8961 if ((TREE_CODE (arg0) == ADDR_EXPR 8962 && indirect_base0 8963 && TREE_CODE (base0) == VAR_DECL 8964 && auto_var_in_fn_p (base0, current_function_decl) 8965 && !indirect_base1 8966 && TREE_CODE (base1) == SSA_NAME 8967 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL 8968 && SSA_NAME_IS_DEFAULT_DEF (base1)) 8969 || (TREE_CODE (arg1) == ADDR_EXPR 8970 && indirect_base1 8971 && TREE_CODE (base1) == VAR_DECL 8972 && auto_var_in_fn_p (base1, current_function_decl) 8973 && !indirect_base0 8974 && TREE_CODE (base0) == SSA_NAME 8975 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL 8976 && SSA_NAME_IS_DEFAULT_DEF (base0))) 8977 { 8978 if (code == NE_EXPR) 8979 return constant_boolean_node (1, type); 8980 else if (code == EQ_EXPR) 8981 return constant_boolean_node (0, type); 8982 } 8983 /* If we have equivalent bases we might be able to simplify. */ 8984 else if (indirect_base0 == indirect_base1 8985 && operand_equal_p (base0, base1, 0)) 8986 { 8987 /* We can fold this expression to a constant if the non-constant 8988 offset parts are equal. */ 8989 if ((offset0 == offset1 8990 || (offset0 && offset1 8991 && operand_equal_p (offset0, offset1, 0))) 8992 && (code == EQ_EXPR 8993 || code == NE_EXPR 8994 || (indirect_base0 && DECL_P (base0)) 8995 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 8996 8997 { 8998 if (code != EQ_EXPR 8999 && code != NE_EXPR 9000 && bitpos0 != bitpos1 9001 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9002 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9003 fold_overflow_warning (("assuming pointer wraparound does not " 9004 "occur when comparing P +- C1 with " 9005 "P +- C2"), 9006 WARN_STRICT_OVERFLOW_CONDITIONAL); 9007 9008 switch (code) 9009 { 9010 case EQ_EXPR: 9011 return constant_boolean_node (bitpos0 == bitpos1, type); 9012 case NE_EXPR: 9013 return constant_boolean_node (bitpos0 != bitpos1, type); 9014 case LT_EXPR: 9015 return constant_boolean_node (bitpos0 < bitpos1, type); 9016 case LE_EXPR: 9017 return constant_boolean_node (bitpos0 <= bitpos1, type); 9018 case GE_EXPR: 9019 return constant_boolean_node (bitpos0 >= bitpos1, type); 9020 case GT_EXPR: 9021 return constant_boolean_node (bitpos0 > bitpos1, type); 9022 default:; 9023 } 9024 } 9025 /* We can simplify the comparison to a comparison of the variable 9026 offset parts if the constant offset parts are equal. 9027 Be careful to use signed size type here because otherwise we 9028 mess with array offsets in the wrong way. This is possible 9029 because pointer arithmetic is restricted to retain within an 9030 object and overflow on pointer differences is undefined as of 9031 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ 9032 else if (bitpos0 == bitpos1 9033 && ((code == EQ_EXPR || code == NE_EXPR) 9034 || (indirect_base0 && DECL_P (base0)) 9035 || POINTER_TYPE_OVERFLOW_UNDEFINED)) 9036 { 9037 /* By converting to signed size type we cover middle-end pointer 9038 arithmetic which operates on unsigned pointer types of size 9039 type size and ARRAY_REF offsets which are properly sign or 9040 zero extended from their type in case it is narrower than 9041 size type. */ 9042 if (offset0 == NULL_TREE) 9043 offset0 = build_int_cst (ssizetype, 0); 9044 else 9045 offset0 = fold_convert_loc (loc, ssizetype, offset0); 9046 if (offset1 == NULL_TREE) 9047 offset1 = build_int_cst (ssizetype, 0); 9048 else 9049 offset1 = fold_convert_loc (loc, ssizetype, offset1); 9050 9051 if (code != EQ_EXPR 9052 && code != NE_EXPR 9053 && (pointer_may_wrap_p (base0, offset0, bitpos0) 9054 || pointer_may_wrap_p (base1, offset1, bitpos1))) 9055 fold_overflow_warning (("assuming pointer wraparound does not " 9056 "occur when comparing P +- C1 with " 9057 "P +- C2"), 9058 WARN_STRICT_OVERFLOW_COMPARISON); 9059 9060 return fold_build2_loc (loc, code, type, offset0, offset1); 9061 } 9062 } 9063 /* For non-equal bases we can simplify if they are addresses 9064 of local binding decls or constants. */ 9065 else if (indirect_base0 && indirect_base1 9066 /* We know that !operand_equal_p (base0, base1, 0) 9067 because the if condition was false. But make 9068 sure two decls are not the same. */ 9069 && base0 != base1 9070 && TREE_CODE (arg0) == ADDR_EXPR 9071 && TREE_CODE (arg1) == ADDR_EXPR 9072 && (((TREE_CODE (base0) == VAR_DECL 9073 || TREE_CODE (base0) == PARM_DECL) 9074 && (targetm.binds_local_p (base0) 9075 || CONSTANT_CLASS_P (base1))) 9076 || CONSTANT_CLASS_P (base0)) 9077 && (((TREE_CODE (base1) == VAR_DECL 9078 || TREE_CODE (base1) == PARM_DECL) 9079 && (targetm.binds_local_p (base1) 9080 || CONSTANT_CLASS_P (base0))) 9081 || CONSTANT_CLASS_P (base1))) 9082 { 9083 if (code == EQ_EXPR) 9084 return omit_two_operands_loc (loc, type, boolean_false_node, 9085 arg0, arg1); 9086 else if (code == NE_EXPR) 9087 return omit_two_operands_loc (loc, type, boolean_true_node, 9088 arg0, arg1); 9089 } 9090 /* For equal offsets we can simplify to a comparison of the 9091 base addresses. */ 9092 else if (bitpos0 == bitpos1 9093 && (indirect_base0 9094 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0) 9095 && (indirect_base1 9096 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1) 9097 && ((offset0 == offset1) 9098 || (offset0 && offset1 9099 && operand_equal_p (offset0, offset1, 0)))) 9100 { 9101 if (indirect_base0) 9102 base0 = build_fold_addr_expr_loc (loc, base0); 9103 if (indirect_base1) 9104 base1 = build_fold_addr_expr_loc (loc, base1); 9105 return fold_build2_loc (loc, code, type, base0, base1); 9106 } 9107 } 9108 9109 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to 9110 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if 9111 the resulting offset is smaller in absolute value than the 9112 original one. */ 9113 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9114 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 9115 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9116 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9117 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR) 9118 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 9119 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1)))) 9120 { 9121 tree const1 = TREE_OPERAND (arg0, 1); 9122 tree const2 = TREE_OPERAND (arg1, 1); 9123 tree variable1 = TREE_OPERAND (arg0, 0); 9124 tree variable2 = TREE_OPERAND (arg1, 0); 9125 tree cst; 9126 const char * const warnmsg = G_("assuming signed overflow does not " 9127 "occur when combining constants around " 9128 "a comparison"); 9129 9130 /* Put the constant on the side where it doesn't overflow and is 9131 of lower absolute value than before. */ 9132 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9133 ? MINUS_EXPR : PLUS_EXPR, 9134 const2, const1); 9135 if (!TREE_OVERFLOW (cst) 9136 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)) 9137 { 9138 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9139 return fold_build2_loc (loc, code, type, 9140 variable1, 9141 fold_build2_loc (loc, 9142 TREE_CODE (arg1), TREE_TYPE (arg1), 9143 variable2, cst)); 9144 } 9145 9146 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) 9147 ? MINUS_EXPR : PLUS_EXPR, 9148 const1, const2); 9149 if (!TREE_OVERFLOW (cst) 9150 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)) 9151 { 9152 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); 9153 return fold_build2_loc (loc, code, type, 9154 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0), 9155 variable1, cst), 9156 variable2); 9157 } 9158 } 9159 9160 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the 9161 signed arithmetic case. That form is created by the compiler 9162 often enough for folding it to be of value. One example is in 9163 computing loop trip counts after Operator Strength Reduction. */ 9164 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) 9165 && TREE_CODE (arg0) == MULT_EXPR 9166 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9167 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) 9168 && integer_zerop (arg1)) 9169 { 9170 tree const1 = TREE_OPERAND (arg0, 1); 9171 tree const2 = arg1; /* zero */ 9172 tree variable1 = TREE_OPERAND (arg0, 0); 9173 enum tree_code cmp_code = code; 9174 9175 /* Handle unfolded multiplication by zero. */ 9176 if (integer_zerop (const1)) 9177 return fold_build2_loc (loc, cmp_code, type, const1, const2); 9178 9179 fold_overflow_warning (("assuming signed overflow does not occur when " 9180 "eliminating multiplication in comparison " 9181 "with zero"), 9182 WARN_STRICT_OVERFLOW_COMPARISON); 9183 9184 /* If const1 is negative we swap the sense of the comparison. */ 9185 if (tree_int_cst_sgn (const1) < 0) 9186 cmp_code = swap_tree_comparison (cmp_code); 9187 9188 return fold_build2_loc (loc, cmp_code, type, variable1, const2); 9189 } 9190 9191 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1); 9192 if (tem) 9193 return tem; 9194 9195 if (FLOAT_TYPE_P (TREE_TYPE (arg0))) 9196 { 9197 tree targ0 = strip_float_extensions (arg0); 9198 tree targ1 = strip_float_extensions (arg1); 9199 tree newtype = TREE_TYPE (targ0); 9200 9201 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 9202 newtype = TREE_TYPE (targ1); 9203 9204 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 9205 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 9206 return fold_build2_loc (loc, code, type, 9207 fold_convert_loc (loc, newtype, targ0), 9208 fold_convert_loc (loc, newtype, targ1)); 9209 9210 /* (-a) CMP (-b) -> b CMP a */ 9211 if (TREE_CODE (arg0) == NEGATE_EXPR 9212 && TREE_CODE (arg1) == NEGATE_EXPR) 9213 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0), 9214 TREE_OPERAND (arg0, 0)); 9215 9216 if (TREE_CODE (arg1) == REAL_CST) 9217 { 9218 REAL_VALUE_TYPE cst; 9219 cst = TREE_REAL_CST (arg1); 9220 9221 /* (-a) CMP CST -> a swap(CMP) (-CST) */ 9222 if (TREE_CODE (arg0) == NEGATE_EXPR) 9223 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9224 TREE_OPERAND (arg0, 0), 9225 build_real (TREE_TYPE (arg1), 9226 real_value_negate (&cst))); 9227 9228 /* IEEE doesn't distinguish +0 and -0 in comparisons. */ 9229 /* a CMP (-0) -> a CMP 0 */ 9230 if (REAL_VALUE_MINUS_ZERO (cst)) 9231 return fold_build2_loc (loc, code, type, arg0, 9232 build_real (TREE_TYPE (arg1), dconst0)); 9233 9234 /* x != NaN is always true, other ops are always false. */ 9235 if (REAL_VALUE_ISNAN (cst) 9236 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))) 9237 { 9238 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node; 9239 return omit_one_operand_loc (loc, type, tem, arg0); 9240 } 9241 9242 /* Fold comparisons against infinity. */ 9243 if (REAL_VALUE_ISINF (cst) 9244 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))) 9245 { 9246 tem = fold_inf_compare (loc, code, type, arg0, arg1); 9247 if (tem != NULL_TREE) 9248 return tem; 9249 } 9250 } 9251 9252 /* If this is a comparison of a real constant with a PLUS_EXPR 9253 or a MINUS_EXPR of a real constant, we can convert it into a 9254 comparison with a revised real constant as long as no overflow 9255 occurs when unsafe_math_optimizations are enabled. */ 9256 if (flag_unsafe_math_optimizations 9257 && TREE_CODE (arg1) == REAL_CST 9258 && (TREE_CODE (arg0) == PLUS_EXPR 9259 || TREE_CODE (arg0) == MINUS_EXPR) 9260 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 9261 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 9262 ? MINUS_EXPR : PLUS_EXPR, 9263 arg1, TREE_OPERAND (arg0, 1))) 9264 && !TREE_OVERFLOW (tem)) 9265 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 9266 9267 /* Likewise, we can simplify a comparison of a real constant with 9268 a MINUS_EXPR whose first operand is also a real constant, i.e. 9269 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on 9270 floating-point types only if -fassociative-math is set. */ 9271 if (flag_associative_math 9272 && TREE_CODE (arg1) == REAL_CST 9273 && TREE_CODE (arg0) == MINUS_EXPR 9274 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST 9275 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), 9276 arg1)) 9277 && !TREE_OVERFLOW (tem)) 9278 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9279 TREE_OPERAND (arg0, 1), tem); 9280 9281 /* Fold comparisons against built-in math functions. */ 9282 if (TREE_CODE (arg1) == REAL_CST 9283 && flag_unsafe_math_optimizations 9284 && ! flag_errno_math) 9285 { 9286 enum built_in_function fcode = builtin_mathfn_code (arg0); 9287 9288 if (fcode != END_BUILTINS) 9289 { 9290 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1); 9291 if (tem != NULL_TREE) 9292 return tem; 9293 } 9294 } 9295 } 9296 9297 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE 9298 && CONVERT_EXPR_P (arg0)) 9299 { 9300 /* If we are widening one operand of an integer comparison, 9301 see if the other operand is similarly being widened. Perhaps we 9302 can do the comparison in the narrower type. */ 9303 tem = fold_widened_comparison (loc, code, type, arg0, arg1); 9304 if (tem) 9305 return tem; 9306 9307 /* Or if we are changing signedness. */ 9308 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1); 9309 if (tem) 9310 return tem; 9311 } 9312 9313 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a 9314 constant, we can simplify it. */ 9315 if (TREE_CODE (arg1) == INTEGER_CST 9316 && (TREE_CODE (arg0) == MIN_EXPR 9317 || TREE_CODE (arg0) == MAX_EXPR) 9318 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 9319 { 9320 tem = optimize_minmax_comparison (loc, code, type, op0, op1); 9321 if (tem) 9322 return tem; 9323 } 9324 9325 /* Simplify comparison of something with itself. (For IEEE 9326 floating-point, we can only do some of these simplifications.) */ 9327 if (operand_equal_p (arg0, arg1, 0)) 9328 { 9329 switch (code) 9330 { 9331 case EQ_EXPR: 9332 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9333 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9334 return constant_boolean_node (1, type); 9335 break; 9336 9337 case GE_EXPR: 9338 case LE_EXPR: 9339 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) 9340 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9341 return constant_boolean_node (1, type); 9342 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1); 9343 9344 case NE_EXPR: 9345 /* For NE, we can only do this simplification if integer 9346 or we don't honor IEEE floating point NaNs. */ 9347 if (FLOAT_TYPE_P (TREE_TYPE (arg0)) 9348 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) 9349 break; 9350 /* ... fall through ... */ 9351 case GT_EXPR: 9352 case LT_EXPR: 9353 return constant_boolean_node (0, type); 9354 default: 9355 gcc_unreachable (); 9356 } 9357 } 9358 9359 /* If we are comparing an expression that just has comparisons 9360 of two integer values, arithmetic expressions of those comparisons, 9361 and constants, we can simplify it. There are only three cases 9362 to check: the two values can either be equal, the first can be 9363 greater, or the second can be greater. Fold the expression for 9364 those three values. Since each value must be 0 or 1, we have 9365 eight possibilities, each of which corresponds to the constant 0 9366 or 1 or one of the six possible comparisons. 9367 9368 This handles common cases like (a > b) == 0 but also handles 9369 expressions like ((x > y) - (y > x)) > 0, which supposedly 9370 occur in macroized code. */ 9371 9372 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST) 9373 { 9374 tree cval1 = 0, cval2 = 0; 9375 int save_p = 0; 9376 9377 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p) 9378 /* Don't handle degenerate cases here; they should already 9379 have been handled anyway. */ 9380 && cval1 != 0 && cval2 != 0 9381 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2)) 9382 && TREE_TYPE (cval1) == TREE_TYPE (cval2) 9383 && INTEGRAL_TYPE_P (TREE_TYPE (cval1)) 9384 && TYPE_MAX_VALUE (TREE_TYPE (cval1)) 9385 && TYPE_MAX_VALUE (TREE_TYPE (cval2)) 9386 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)), 9387 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0)) 9388 { 9389 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1)); 9390 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1)); 9391 9392 /* We can't just pass T to eval_subst in case cval1 or cval2 9393 was the same as ARG1. */ 9394 9395 tree high_result 9396 = fold_build2_loc (loc, code, type, 9397 eval_subst (loc, arg0, cval1, maxval, 9398 cval2, minval), 9399 arg1); 9400 tree equal_result 9401 = fold_build2_loc (loc, code, type, 9402 eval_subst (loc, arg0, cval1, maxval, 9403 cval2, maxval), 9404 arg1); 9405 tree low_result 9406 = fold_build2_loc (loc, code, type, 9407 eval_subst (loc, arg0, cval1, minval, 9408 cval2, maxval), 9409 arg1); 9410 9411 /* All three of these results should be 0 or 1. Confirm they are. 9412 Then use those values to select the proper code to use. */ 9413 9414 if (TREE_CODE (high_result) == INTEGER_CST 9415 && TREE_CODE (equal_result) == INTEGER_CST 9416 && TREE_CODE (low_result) == INTEGER_CST) 9417 { 9418 /* Make a 3-bit mask with the high-order bit being the 9419 value for `>', the next for '=', and the low for '<'. */ 9420 switch ((integer_onep (high_result) * 4) 9421 + (integer_onep (equal_result) * 2) 9422 + integer_onep (low_result)) 9423 { 9424 case 0: 9425 /* Always false. */ 9426 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 9427 case 1: 9428 code = LT_EXPR; 9429 break; 9430 case 2: 9431 code = EQ_EXPR; 9432 break; 9433 case 3: 9434 code = LE_EXPR; 9435 break; 9436 case 4: 9437 code = GT_EXPR; 9438 break; 9439 case 5: 9440 code = NE_EXPR; 9441 break; 9442 case 6: 9443 code = GE_EXPR; 9444 break; 9445 case 7: 9446 /* Always true. */ 9447 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 9448 } 9449 9450 if (save_p) 9451 { 9452 tem = save_expr (build2 (code, type, cval1, cval2)); 9453 SET_EXPR_LOCATION (tem, loc); 9454 return tem; 9455 } 9456 return fold_build2_loc (loc, code, type, cval1, cval2); 9457 } 9458 } 9459 } 9460 9461 /* We can fold X/C1 op C2 where C1 and C2 are integer constants 9462 into a single range test. */ 9463 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR 9464 || TREE_CODE (arg0) == EXACT_DIV_EXPR) 9465 && TREE_CODE (arg1) == INTEGER_CST 9466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 9467 && !integer_zerop (TREE_OPERAND (arg0, 1)) 9468 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) 9469 && !TREE_OVERFLOW (arg1)) 9470 { 9471 tem = fold_div_compare (loc, code, type, arg0, arg1); 9472 if (tem != NULL_TREE) 9473 return tem; 9474 } 9475 9476 /* Fold ~X op ~Y as Y op X. */ 9477 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9478 && TREE_CODE (arg1) == BIT_NOT_EXPR) 9479 { 9480 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9481 return fold_build2_loc (loc, code, type, 9482 fold_convert_loc (loc, cmp_type, 9483 TREE_OPERAND (arg1, 0)), 9484 TREE_OPERAND (arg0, 0)); 9485 } 9486 9487 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ 9488 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9489 && TREE_CODE (arg1) == INTEGER_CST) 9490 { 9491 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); 9492 return fold_build2_loc (loc, swap_tree_comparison (code), type, 9493 TREE_OPERAND (arg0, 0), 9494 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, 9495 fold_convert_loc (loc, cmp_type, arg1))); 9496 } 9497 9498 return NULL_TREE; 9499 } 9500 9501 9502 /* Subroutine of fold_binary. Optimize complex multiplications of the 9503 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The 9504 argument EXPR represents the expression "z" of type TYPE. */ 9505 9506 static tree 9507 fold_mult_zconjz (location_t loc, tree type, tree expr) 9508 { 9509 tree itype = TREE_TYPE (type); 9510 tree rpart, ipart, tem; 9511 9512 if (TREE_CODE (expr) == COMPLEX_EXPR) 9513 { 9514 rpart = TREE_OPERAND (expr, 0); 9515 ipart = TREE_OPERAND (expr, 1); 9516 } 9517 else if (TREE_CODE (expr) == COMPLEX_CST) 9518 { 9519 rpart = TREE_REALPART (expr); 9520 ipart = TREE_IMAGPART (expr); 9521 } 9522 else 9523 { 9524 expr = save_expr (expr); 9525 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr); 9526 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr); 9527 } 9528 9529 rpart = save_expr (rpart); 9530 ipart = save_expr (ipart); 9531 tem = fold_build2_loc (loc, PLUS_EXPR, itype, 9532 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart), 9533 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart)); 9534 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem, 9535 build_zero_cst (itype)); 9536 } 9537 9538 9539 /* Subroutine of fold_binary. If P is the value of EXPR, computes 9540 power-of-two M and (arbitrary) N such that M divides (P-N). This condition 9541 guarantees that P and N have the same least significant log2(M) bits. 9542 N is not otherwise constrained. In particular, N is not normalized to 9543 0 <= N < M as is common. In general, the precise value of P is unknown. 9544 M is chosen as large as possible such that constant N can be determined. 9545 9546 Returns M and sets *RESIDUE to N. 9547 9548 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into 9549 account. This is not always possible due to PR 35705. 9550 */ 9551 9552 static unsigned HOST_WIDE_INT 9553 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, 9554 bool allow_func_align) 9555 { 9556 enum tree_code code; 9557 9558 *residue = 0; 9559 9560 code = TREE_CODE (expr); 9561 if (code == ADDR_EXPR) 9562 { 9563 unsigned int bitalign; 9564 bitalign = get_object_alignment_1 (TREE_OPERAND (expr, 0), residue); 9565 *residue /= BITS_PER_UNIT; 9566 return bitalign / BITS_PER_UNIT; 9567 } 9568 else if (code == POINTER_PLUS_EXPR) 9569 { 9570 tree op0, op1; 9571 unsigned HOST_WIDE_INT modulus; 9572 enum tree_code inner_code; 9573 9574 op0 = TREE_OPERAND (expr, 0); 9575 STRIP_NOPS (op0); 9576 modulus = get_pointer_modulus_and_residue (op0, residue, 9577 allow_func_align); 9578 9579 op1 = TREE_OPERAND (expr, 1); 9580 STRIP_NOPS (op1); 9581 inner_code = TREE_CODE (op1); 9582 if (inner_code == INTEGER_CST) 9583 { 9584 *residue += TREE_INT_CST_LOW (op1); 9585 return modulus; 9586 } 9587 else if (inner_code == MULT_EXPR) 9588 { 9589 op1 = TREE_OPERAND (op1, 1); 9590 if (TREE_CODE (op1) == INTEGER_CST) 9591 { 9592 unsigned HOST_WIDE_INT align; 9593 9594 /* Compute the greatest power-of-2 divisor of op1. */ 9595 align = TREE_INT_CST_LOW (op1); 9596 align &= -align; 9597 9598 /* If align is non-zero and less than *modulus, replace 9599 *modulus with align., If align is 0, then either op1 is 0 9600 or the greatest power-of-2 divisor of op1 doesn't fit in an 9601 unsigned HOST_WIDE_INT. In either case, no additional 9602 constraint is imposed. */ 9603 if (align) 9604 modulus = MIN (modulus, align); 9605 9606 return modulus; 9607 } 9608 } 9609 } 9610 9611 /* If we get here, we were unable to determine anything useful about the 9612 expression. */ 9613 return 1; 9614 } 9615 9616 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or 9617 CONSTRUCTOR ARG into array ELTS and return true if successful. */ 9618 9619 static bool 9620 vec_cst_ctor_to_array (tree arg, tree *elts) 9621 { 9622 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i; 9623 9624 if (TREE_CODE (arg) == VECTOR_CST) 9625 { 9626 tree t; 9627 9628 for (i = 0, t = TREE_VECTOR_CST_ELTS (arg); 9629 i < nelts && t; i++, t = TREE_CHAIN (t)) 9630 elts[i] = TREE_VALUE (t); 9631 if (t) 9632 return false; 9633 } 9634 else if (TREE_CODE (arg) == CONSTRUCTOR) 9635 { 9636 constructor_elt *elt; 9637 9638 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt) 9639 if (i >= nelts) 9640 return false; 9641 else 9642 elts[i] = elt->value; 9643 } 9644 else 9645 return false; 9646 for (; i < nelts; i++) 9647 elts[i] 9648 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node); 9649 return true; 9650 } 9651 9652 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL 9653 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful, 9654 NULL_TREE otherwise. */ 9655 9656 static tree 9657 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel) 9658 { 9659 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 9660 tree *elts; 9661 bool need_ctor = false; 9662 9663 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts 9664 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts); 9665 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type) 9666 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type)) 9667 return NULL_TREE; 9668 9669 elts = XALLOCAVEC (tree, nelts * 3); 9670 if (!vec_cst_ctor_to_array (arg0, elts) 9671 || !vec_cst_ctor_to_array (arg1, elts + nelts)) 9672 return NULL_TREE; 9673 9674 for (i = 0; i < nelts; i++) 9675 { 9676 if (!CONSTANT_CLASS_P (elts[sel[i]])) 9677 need_ctor = true; 9678 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]); 9679 } 9680 9681 if (need_ctor) 9682 { 9683 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts); 9684 for (i = 0; i < nelts; i++) 9685 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]); 9686 return build_constructor (type, v); 9687 } 9688 else 9689 { 9690 tree vals = NULL_TREE; 9691 for (i = 0; i < nelts; i++) 9692 vals = tree_cons (NULL_TREE, elts[3 * nelts - i - 1], vals); 9693 return build_vector (type, vals); 9694 } 9695 } 9696 9697 /* Try to fold a pointer difference of type TYPE two address expressions of 9698 array references AREF0 and AREF1 using location LOC. Return a 9699 simplified expression for the difference or NULL_TREE. */ 9700 9701 static tree 9702 fold_addr_of_array_ref_difference (location_t loc, tree type, 9703 tree aref0, tree aref1) 9704 { 9705 tree base0 = TREE_OPERAND (aref0, 0); 9706 tree base1 = TREE_OPERAND (aref1, 0); 9707 tree base_offset = build_int_cst (type, 0); 9708 9709 /* If the bases are array references as well, recurse. If the bases 9710 are pointer indirections compute the difference of the pointers. 9711 If the bases are equal, we are set. */ 9712 if ((TREE_CODE (base0) == ARRAY_REF 9713 && TREE_CODE (base1) == ARRAY_REF 9714 && (base_offset 9715 = fold_addr_of_array_ref_difference (loc, type, base0, base1))) 9716 || (INDIRECT_REF_P (base0) 9717 && INDIRECT_REF_P (base1) 9718 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type, 9719 TREE_OPERAND (base0, 0), 9720 TREE_OPERAND (base1, 0)))) 9721 || operand_equal_p (base0, base1, 0)) 9722 { 9723 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1)); 9724 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1)); 9725 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0)); 9726 tree diff = build2 (MINUS_EXPR, type, op0, op1); 9727 return fold_build2_loc (loc, PLUS_EXPR, type, 9728 base_offset, 9729 fold_build2_loc (loc, MULT_EXPR, type, 9730 diff, esz)); 9731 } 9732 return NULL_TREE; 9733 } 9734 9735 /* Fold a binary expression of code CODE and type TYPE with operands 9736 OP0 and OP1. LOC is the location of the resulting expression. 9737 Return the folded expression if folding is successful. Otherwise, 9738 return NULL_TREE. */ 9739 9740 tree 9741 fold_binary_loc (location_t loc, 9742 enum tree_code code, tree type, tree op0, tree op1) 9743 { 9744 enum tree_code_class kind = TREE_CODE_CLASS (code); 9745 tree arg0, arg1, tem; 9746 tree t1 = NULL_TREE; 9747 bool strict_overflow_p; 9748 9749 gcc_assert (IS_EXPR_CODE_CLASS (kind) 9750 && TREE_CODE_LENGTH (code) == 2 9751 && op0 != NULL_TREE 9752 && op1 != NULL_TREE); 9753 9754 arg0 = op0; 9755 arg1 = op1; 9756 9757 /* Strip any conversions that don't change the mode. This is 9758 safe for every expression, except for a comparison expression 9759 because its signedness is derived from its operands. So, in 9760 the latter case, only strip conversions that don't change the 9761 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments 9762 preserved. 9763 9764 Note that this is done as an internal manipulation within the 9765 constant folder, in order to find the simplest representation 9766 of the arguments so that their form can be studied. In any 9767 cases, the appropriate type conversions should be put back in 9768 the tree that will get out of the constant folder. */ 9769 9770 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR) 9771 { 9772 STRIP_SIGN_NOPS (arg0); 9773 STRIP_SIGN_NOPS (arg1); 9774 } 9775 else 9776 { 9777 STRIP_NOPS (arg0); 9778 STRIP_NOPS (arg1); 9779 } 9780 9781 /* Note that TREE_CONSTANT isn't enough: static var addresses are 9782 constant but we can't do arithmetic on them. */ 9783 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 9784 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 9785 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST) 9786 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST) 9787 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST) 9788 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)) 9789 { 9790 if (kind == tcc_binary) 9791 { 9792 /* Make sure type and arg0 have the same saturating flag. */ 9793 gcc_assert (TYPE_SATURATING (type) 9794 == TYPE_SATURATING (TREE_TYPE (arg0))); 9795 tem = const_binop (code, arg0, arg1); 9796 } 9797 else if (kind == tcc_comparison) 9798 tem = fold_relational_const (code, type, arg0, arg1); 9799 else 9800 tem = NULL_TREE; 9801 9802 if (tem != NULL_TREE) 9803 { 9804 if (TREE_TYPE (tem) != type) 9805 tem = fold_convert_loc (loc, type, tem); 9806 return tem; 9807 } 9808 } 9809 9810 /* If this is a commutative operation, and ARG0 is a constant, move it 9811 to ARG1 to reduce the number of tests below. */ 9812 if (commutative_tree_code (code) 9813 && tree_swap_operands_p (arg0, arg1, true)) 9814 return fold_build2_loc (loc, code, type, op1, op0); 9815 9816 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand. 9817 9818 First check for cases where an arithmetic operation is applied to a 9819 compound, conditional, or comparison operation. Push the arithmetic 9820 operation inside the compound or conditional to see if any folding 9821 can then be done. Convert comparison to conditional for this purpose. 9822 The also optimizes non-constant cases that used to be done in 9823 expand_expr. 9824 9825 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR, 9826 one of the operands is a comparison and the other is a comparison, a 9827 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the 9828 code below would make the expression more complex. Change it to a 9829 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to 9830 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */ 9831 9832 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR 9833 || code == EQ_EXPR || code == NE_EXPR) 9834 && ((truth_value_p (TREE_CODE (arg0)) 9835 && (truth_value_p (TREE_CODE (arg1)) 9836 || (TREE_CODE (arg1) == BIT_AND_EXPR 9837 && integer_onep (TREE_OPERAND (arg1, 1))))) 9838 || (truth_value_p (TREE_CODE (arg1)) 9839 && (truth_value_p (TREE_CODE (arg0)) 9840 || (TREE_CODE (arg0) == BIT_AND_EXPR 9841 && integer_onep (TREE_OPERAND (arg0, 1))))))) 9842 { 9843 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR 9844 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR 9845 : TRUTH_XOR_EXPR, 9846 boolean_type_node, 9847 fold_convert_loc (loc, boolean_type_node, arg0), 9848 fold_convert_loc (loc, boolean_type_node, arg1)); 9849 9850 if (code == EQ_EXPR) 9851 tem = invert_truthvalue_loc (loc, tem); 9852 9853 return fold_convert_loc (loc, type, tem); 9854 } 9855 9856 if (TREE_CODE_CLASS (code) == tcc_binary 9857 || TREE_CODE_CLASS (code) == tcc_comparison) 9858 { 9859 if (TREE_CODE (arg0) == COMPOUND_EXPR) 9860 { 9861 tem = fold_build2_loc (loc, code, type, 9862 fold_convert_loc (loc, TREE_TYPE (op0), 9863 TREE_OPERAND (arg0, 1)), op1); 9864 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), 9865 tem); 9866 } 9867 if (TREE_CODE (arg1) == COMPOUND_EXPR 9868 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 9869 { 9870 tem = fold_build2_loc (loc, code, type, op0, 9871 fold_convert_loc (loc, TREE_TYPE (op1), 9872 TREE_OPERAND (arg1, 1))); 9873 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), 9874 tem); 9875 } 9876 9877 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0)) 9878 { 9879 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 9880 arg0, arg1, 9881 /*cond_first_p=*/1); 9882 if (tem != NULL_TREE) 9883 return tem; 9884 } 9885 9886 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1)) 9887 { 9888 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, 9889 arg1, arg0, 9890 /*cond_first_p=*/0); 9891 if (tem != NULL_TREE) 9892 return tem; 9893 } 9894 } 9895 9896 switch (code) 9897 { 9898 case MEM_REF: 9899 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */ 9900 if (TREE_CODE (arg0) == ADDR_EXPR 9901 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF) 9902 { 9903 tree iref = TREE_OPERAND (arg0, 0); 9904 return fold_build2 (MEM_REF, type, 9905 TREE_OPERAND (iref, 0), 9906 int_const_binop (PLUS_EXPR, arg1, 9907 TREE_OPERAND (iref, 1))); 9908 } 9909 9910 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */ 9911 if (TREE_CODE (arg0) == ADDR_EXPR 9912 && handled_component_p (TREE_OPERAND (arg0, 0))) 9913 { 9914 tree base; 9915 HOST_WIDE_INT coffset; 9916 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0), 9917 &coffset); 9918 if (!base) 9919 return NULL_TREE; 9920 return fold_build2 (MEM_REF, type, 9921 build_fold_addr_expr (base), 9922 int_const_binop (PLUS_EXPR, arg1, 9923 size_int (coffset))); 9924 } 9925 9926 return NULL_TREE; 9927 9928 case POINTER_PLUS_EXPR: 9929 /* 0 +p index -> (type)index */ 9930 if (integer_zerop (arg0)) 9931 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 9932 9933 /* PTR +p 0 -> PTR */ 9934 if (integer_zerop (arg1)) 9935 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 9936 9937 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */ 9938 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) 9939 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) 9940 return fold_convert_loc (loc, type, 9941 fold_build2_loc (loc, PLUS_EXPR, sizetype, 9942 fold_convert_loc (loc, sizetype, 9943 arg1), 9944 fold_convert_loc (loc, sizetype, 9945 arg0))); 9946 9947 /* (PTR +p B) +p A -> PTR +p (B + A) */ 9948 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 9949 { 9950 tree inner; 9951 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1)); 9952 tree arg00 = TREE_OPERAND (arg0, 0); 9953 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype, 9954 arg01, fold_convert_loc (loc, sizetype, arg1)); 9955 return fold_convert_loc (loc, type, 9956 fold_build_pointer_plus_loc (loc, 9957 arg00, inner)); 9958 } 9959 9960 /* PTR_CST +p CST -> CST1 */ 9961 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) 9962 return fold_build2_loc (loc, PLUS_EXPR, type, arg0, 9963 fold_convert_loc (loc, type, arg1)); 9964 9965 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step 9966 of the array. Loop optimizer sometimes produce this type of 9967 expressions. */ 9968 if (TREE_CODE (arg0) == ADDR_EXPR) 9969 { 9970 tem = try_move_mult_to_index (loc, arg0, 9971 fold_convert_loc (loc, sizetype, arg1)); 9972 if (tem) 9973 return fold_convert_loc (loc, type, tem); 9974 } 9975 9976 return NULL_TREE; 9977 9978 case PLUS_EXPR: 9979 /* A + (-B) -> A - B */ 9980 if (TREE_CODE (arg1) == NEGATE_EXPR) 9981 return fold_build2_loc (loc, MINUS_EXPR, type, 9982 fold_convert_loc (loc, type, arg0), 9983 fold_convert_loc (loc, type, 9984 TREE_OPERAND (arg1, 0))); 9985 /* (-A) + B -> B - A */ 9986 if (TREE_CODE (arg0) == NEGATE_EXPR 9987 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)) 9988 return fold_build2_loc (loc, MINUS_EXPR, type, 9989 fold_convert_loc (loc, type, arg1), 9990 fold_convert_loc (loc, type, 9991 TREE_OPERAND (arg0, 0))); 9992 9993 if (INTEGRAL_TYPE_P (type)) 9994 { 9995 /* Convert ~A + 1 to -A. */ 9996 if (TREE_CODE (arg0) == BIT_NOT_EXPR 9997 && integer_onep (arg1)) 9998 return fold_build1_loc (loc, NEGATE_EXPR, type, 9999 fold_convert_loc (loc, type, 10000 TREE_OPERAND (arg0, 0))); 10001 10002 /* ~X + X is -1. */ 10003 if (TREE_CODE (arg0) == BIT_NOT_EXPR 10004 && !TYPE_OVERFLOW_TRAPS (type)) 10005 { 10006 tree tem = TREE_OPERAND (arg0, 0); 10007 10008 STRIP_NOPS (tem); 10009 if (operand_equal_p (tem, arg1, 0)) 10010 { 10011 t1 = build_int_cst_type (type, -1); 10012 return omit_one_operand_loc (loc, type, t1, arg1); 10013 } 10014 } 10015 10016 /* X + ~X is -1. */ 10017 if (TREE_CODE (arg1) == BIT_NOT_EXPR 10018 && !TYPE_OVERFLOW_TRAPS (type)) 10019 { 10020 tree tem = TREE_OPERAND (arg1, 0); 10021 10022 STRIP_NOPS (tem); 10023 if (operand_equal_p (arg0, tem, 0)) 10024 { 10025 t1 = build_int_cst_type (type, -1); 10026 return omit_one_operand_loc (loc, type, t1, arg0); 10027 } 10028 } 10029 10030 /* X + (X / CST) * -CST is X % CST. */ 10031 if (TREE_CODE (arg1) == MULT_EXPR 10032 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10033 && operand_equal_p (arg0, 10034 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)) 10035 { 10036 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1); 10037 tree cst1 = TREE_OPERAND (arg1, 1); 10038 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1), 10039 cst1, cst0); 10040 if (sum && integer_zerop (sum)) 10041 return fold_convert_loc (loc, type, 10042 fold_build2_loc (loc, TRUNC_MOD_EXPR, 10043 TREE_TYPE (arg0), arg0, 10044 cst0)); 10045 } 10046 } 10047 10048 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or 10049 one. Make sure the type is not saturating and has the signedness of 10050 the stripped operands, as fold_plusminus_mult_expr will re-associate. 10051 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */ 10052 if ((TREE_CODE (arg0) == MULT_EXPR 10053 || TREE_CODE (arg1) == MULT_EXPR) 10054 && !TYPE_SATURATING (type) 10055 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0)) 10056 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1)) 10057 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10058 { 10059 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10060 if (tem) 10061 return tem; 10062 } 10063 10064 if (! FLOAT_TYPE_P (type)) 10065 { 10066 if (integer_zerop (arg1)) 10067 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10068 10069 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing 10070 with a constant, and the two constants have no bits in common, 10071 we should treat this as a BIT_IOR_EXPR since this may produce more 10072 simplifications. */ 10073 if (TREE_CODE (arg0) == BIT_AND_EXPR 10074 && TREE_CODE (arg1) == BIT_AND_EXPR 10075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 10076 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 10077 && integer_zerop (const_binop (BIT_AND_EXPR, 10078 TREE_OPERAND (arg0, 1), 10079 TREE_OPERAND (arg1, 1)))) 10080 { 10081 code = BIT_IOR_EXPR; 10082 goto bit_ior; 10083 } 10084 10085 /* Reassociate (plus (plus (mult) (foo)) (mult)) as 10086 (plus (plus (mult) (mult)) (foo)) so that we can 10087 take advantage of the factoring cases below. */ 10088 if (TYPE_OVERFLOW_WRAPS (type) 10089 && (((TREE_CODE (arg0) == PLUS_EXPR 10090 || TREE_CODE (arg0) == MINUS_EXPR) 10091 && TREE_CODE (arg1) == MULT_EXPR) 10092 || ((TREE_CODE (arg1) == PLUS_EXPR 10093 || TREE_CODE (arg1) == MINUS_EXPR) 10094 && TREE_CODE (arg0) == MULT_EXPR))) 10095 { 10096 tree parg0, parg1, parg, marg; 10097 enum tree_code pcode; 10098 10099 if (TREE_CODE (arg1) == MULT_EXPR) 10100 parg = arg0, marg = arg1; 10101 else 10102 parg = arg1, marg = arg0; 10103 pcode = TREE_CODE (parg); 10104 parg0 = TREE_OPERAND (parg, 0); 10105 parg1 = TREE_OPERAND (parg, 1); 10106 STRIP_NOPS (parg0); 10107 STRIP_NOPS (parg1); 10108 10109 if (TREE_CODE (parg0) == MULT_EXPR 10110 && TREE_CODE (parg1) != MULT_EXPR) 10111 return fold_build2_loc (loc, pcode, type, 10112 fold_build2_loc (loc, PLUS_EXPR, type, 10113 fold_convert_loc (loc, type, 10114 parg0), 10115 fold_convert_loc (loc, type, 10116 marg)), 10117 fold_convert_loc (loc, type, parg1)); 10118 if (TREE_CODE (parg0) != MULT_EXPR 10119 && TREE_CODE (parg1) == MULT_EXPR) 10120 return 10121 fold_build2_loc (loc, PLUS_EXPR, type, 10122 fold_convert_loc (loc, type, parg0), 10123 fold_build2_loc (loc, pcode, type, 10124 fold_convert_loc (loc, type, marg), 10125 fold_convert_loc (loc, type, 10126 parg1))); 10127 } 10128 } 10129 else 10130 { 10131 /* See if ARG1 is zero and X + ARG1 reduces to X. */ 10132 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0)) 10133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10134 10135 /* Likewise if the operands are reversed. */ 10136 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 10137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 10138 10139 /* Convert X + -C into X - C. */ 10140 if (TREE_CODE (arg1) == REAL_CST 10141 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))) 10142 { 10143 tem = fold_negate_const (arg1, type); 10144 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math) 10145 return fold_build2_loc (loc, MINUS_EXPR, type, 10146 fold_convert_loc (loc, type, arg0), 10147 fold_convert_loc (loc, type, tem)); 10148 } 10149 10150 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) 10151 to __complex__ ( x, y ). This is not the same for SNaNs or 10152 if signed zeros are involved. */ 10153 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10154 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10155 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10156 { 10157 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10158 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10159 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10160 bool arg0rz = false, arg0iz = false; 10161 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10162 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10163 { 10164 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10165 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10166 if (arg0rz && arg1i && real_zerop (arg1i)) 10167 { 10168 tree rp = arg1r ? arg1r 10169 : build1 (REALPART_EXPR, rtype, arg1); 10170 tree ip = arg0i ? arg0i 10171 : build1 (IMAGPART_EXPR, rtype, arg0); 10172 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10173 } 10174 else if (arg0iz && arg1r && real_zerop (arg1r)) 10175 { 10176 tree rp = arg0r ? arg0r 10177 : build1 (REALPART_EXPR, rtype, arg0); 10178 tree ip = arg1i ? arg1i 10179 : build1 (IMAGPART_EXPR, rtype, arg1); 10180 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10181 } 10182 } 10183 } 10184 10185 if (flag_unsafe_math_optimizations 10186 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10187 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10188 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10189 return tem; 10190 10191 /* Convert x+x into x*2.0. */ 10192 if (operand_equal_p (arg0, arg1, 0) 10193 && SCALAR_FLOAT_TYPE_P (type)) 10194 return fold_build2_loc (loc, MULT_EXPR, type, arg0, 10195 build_real (type, dconst2)); 10196 10197 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. 10198 We associate floats only if the user has specified 10199 -fassociative-math. */ 10200 if (flag_associative_math 10201 && TREE_CODE (arg1) == PLUS_EXPR 10202 && TREE_CODE (arg0) != MULT_EXPR) 10203 { 10204 tree tree10 = TREE_OPERAND (arg1, 0); 10205 tree tree11 = TREE_OPERAND (arg1, 1); 10206 if (TREE_CODE (tree11) == MULT_EXPR 10207 && TREE_CODE (tree10) == MULT_EXPR) 10208 { 10209 tree tree0; 10210 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10); 10211 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11); 10212 } 10213 } 10214 /* Convert (b*c + d*e) + a into b*c + (d*e +a). 10215 We associate floats only if the user has specified 10216 -fassociative-math. */ 10217 if (flag_associative_math 10218 && TREE_CODE (arg0) == PLUS_EXPR 10219 && TREE_CODE (arg1) != MULT_EXPR) 10220 { 10221 tree tree00 = TREE_OPERAND (arg0, 0); 10222 tree tree01 = TREE_OPERAND (arg0, 1); 10223 if (TREE_CODE (tree01) == MULT_EXPR 10224 && TREE_CODE (tree00) == MULT_EXPR) 10225 { 10226 tree tree0; 10227 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1); 10228 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0); 10229 } 10230 } 10231 } 10232 10233 bit_rotate: 10234 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A 10235 is a rotate of A by C1 bits. */ 10236 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A 10237 is a rotate of A by B bits. */ 10238 { 10239 enum tree_code code0, code1; 10240 tree rtype; 10241 code0 = TREE_CODE (arg0); 10242 code1 = TREE_CODE (arg1); 10243 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR) 10244 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR)) 10245 && operand_equal_p (TREE_OPERAND (arg0, 0), 10246 TREE_OPERAND (arg1, 0), 0) 10247 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)), 10248 TYPE_UNSIGNED (rtype)) 10249 /* Only create rotates in complete modes. Other cases are not 10250 expanded properly. */ 10251 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype))) 10252 { 10253 tree tree01, tree11; 10254 enum tree_code code01, code11; 10255 10256 tree01 = TREE_OPERAND (arg0, 1); 10257 tree11 = TREE_OPERAND (arg1, 1); 10258 STRIP_NOPS (tree01); 10259 STRIP_NOPS (tree11); 10260 code01 = TREE_CODE (tree01); 10261 code11 = TREE_CODE (tree11); 10262 if (code01 == INTEGER_CST 10263 && code11 == INTEGER_CST 10264 && TREE_INT_CST_HIGH (tree01) == 0 10265 && TREE_INT_CST_HIGH (tree11) == 0 10266 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11)) 10267 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))) 10268 { 10269 tem = build2_loc (loc, LROTATE_EXPR, 10270 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10271 TREE_OPERAND (arg0, 0), 10272 code0 == LSHIFT_EXPR ? tree01 : tree11); 10273 return fold_convert_loc (loc, type, tem); 10274 } 10275 else if (code11 == MINUS_EXPR) 10276 { 10277 tree tree110, tree111; 10278 tree110 = TREE_OPERAND (tree11, 0); 10279 tree111 = TREE_OPERAND (tree11, 1); 10280 STRIP_NOPS (tree110); 10281 STRIP_NOPS (tree111); 10282 if (TREE_CODE (tree110) == INTEGER_CST 10283 && 0 == compare_tree_int (tree110, 10284 TYPE_PRECISION 10285 (TREE_TYPE (TREE_OPERAND 10286 (arg0, 0)))) 10287 && operand_equal_p (tree01, tree111, 0)) 10288 return 10289 fold_convert_loc (loc, type, 10290 build2 ((code0 == LSHIFT_EXPR 10291 ? LROTATE_EXPR 10292 : RROTATE_EXPR), 10293 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10294 TREE_OPERAND (arg0, 0), tree01)); 10295 } 10296 else if (code01 == MINUS_EXPR) 10297 { 10298 tree tree010, tree011; 10299 tree010 = TREE_OPERAND (tree01, 0); 10300 tree011 = TREE_OPERAND (tree01, 1); 10301 STRIP_NOPS (tree010); 10302 STRIP_NOPS (tree011); 10303 if (TREE_CODE (tree010) == INTEGER_CST 10304 && 0 == compare_tree_int (tree010, 10305 TYPE_PRECISION 10306 (TREE_TYPE (TREE_OPERAND 10307 (arg0, 0)))) 10308 && operand_equal_p (tree11, tree011, 0)) 10309 return fold_convert_loc 10310 (loc, type, 10311 build2 ((code0 != LSHIFT_EXPR 10312 ? LROTATE_EXPR 10313 : RROTATE_EXPR), 10314 TREE_TYPE (TREE_OPERAND (arg0, 0)), 10315 TREE_OPERAND (arg0, 0), tree11)); 10316 } 10317 } 10318 } 10319 10320 associate: 10321 /* In most languages, can't associate operations on floats through 10322 parentheses. Rather than remember where the parentheses were, we 10323 don't associate floats at all, unless the user has specified 10324 -fassociative-math. 10325 And, we need to make sure type is not saturating. */ 10326 10327 if ((! FLOAT_TYPE_P (type) || flag_associative_math) 10328 && !TYPE_SATURATING (type)) 10329 { 10330 tree var0, con0, lit0, minus_lit0; 10331 tree var1, con1, lit1, minus_lit1; 10332 bool ok = true; 10333 10334 /* Split both trees into variables, constants, and literals. Then 10335 associate each group together, the constants with literals, 10336 then the result with variables. This increases the chances of 10337 literals being recombined later and of generating relocatable 10338 expressions for the sum of a constant and literal. */ 10339 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0); 10340 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, 10341 code == MINUS_EXPR); 10342 10343 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ 10344 if (code == MINUS_EXPR) 10345 code = PLUS_EXPR; 10346 10347 /* With undefined overflow we can only associate constants with one 10348 variable, and constants whose association doesn't overflow. */ 10349 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) 10350 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) 10351 { 10352 if (var0 && var1) 10353 { 10354 tree tmp0 = var0; 10355 tree tmp1 = var1; 10356 10357 if (TREE_CODE (tmp0) == NEGATE_EXPR) 10358 tmp0 = TREE_OPERAND (tmp0, 0); 10359 if (TREE_CODE (tmp1) == NEGATE_EXPR) 10360 tmp1 = TREE_OPERAND (tmp1, 0); 10361 /* The only case we can still associate with two variables 10362 is if they are the same, modulo negation. */ 10363 if (!operand_equal_p (tmp0, tmp1, 0)) 10364 ok = false; 10365 } 10366 10367 if (ok && lit0 && lit1) 10368 { 10369 tree tmp0 = fold_convert (type, lit0); 10370 tree tmp1 = fold_convert (type, lit1); 10371 10372 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1) 10373 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1))) 10374 ok = false; 10375 } 10376 } 10377 10378 /* Only do something if we found more than two objects. Otherwise, 10379 nothing has changed and we risk infinite recursion. */ 10380 if (ok 10381 && (2 < ((var0 != 0) + (var1 != 0) 10382 + (con0 != 0) + (con1 != 0) 10383 + (lit0 != 0) + (lit1 != 0) 10384 + (minus_lit0 != 0) + (minus_lit1 != 0)))) 10385 { 10386 var0 = associate_trees (loc, var0, var1, code, type); 10387 con0 = associate_trees (loc, con0, con1, code, type); 10388 lit0 = associate_trees (loc, lit0, lit1, code, type); 10389 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type); 10390 10391 /* Preserve the MINUS_EXPR if the negative part of the literal is 10392 greater than the positive part. Otherwise, the multiplicative 10393 folding code (i.e extract_muldiv) may be fooled in case 10394 unsigned constants are subtracted, like in the following 10395 example: ((X*2 + 4) - 8U)/2. */ 10396 if (minus_lit0 && lit0) 10397 { 10398 if (TREE_CODE (lit0) == INTEGER_CST 10399 && TREE_CODE (minus_lit0) == INTEGER_CST 10400 && tree_int_cst_lt (lit0, minus_lit0)) 10401 { 10402 minus_lit0 = associate_trees (loc, minus_lit0, lit0, 10403 MINUS_EXPR, type); 10404 lit0 = 0; 10405 } 10406 else 10407 { 10408 lit0 = associate_trees (loc, lit0, minus_lit0, 10409 MINUS_EXPR, type); 10410 minus_lit0 = 0; 10411 } 10412 } 10413 if (minus_lit0) 10414 { 10415 if (con0 == 0) 10416 return 10417 fold_convert_loc (loc, type, 10418 associate_trees (loc, var0, minus_lit0, 10419 MINUS_EXPR, type)); 10420 else 10421 { 10422 con0 = associate_trees (loc, con0, minus_lit0, 10423 MINUS_EXPR, type); 10424 return 10425 fold_convert_loc (loc, type, 10426 associate_trees (loc, var0, con0, 10427 PLUS_EXPR, type)); 10428 } 10429 } 10430 10431 con0 = associate_trees (loc, con0, lit0, code, type); 10432 return 10433 fold_convert_loc (loc, type, associate_trees (loc, var0, con0, 10434 code, type)); 10435 } 10436 } 10437 10438 return NULL_TREE; 10439 10440 case MINUS_EXPR: 10441 /* Pointer simplifications for subtraction, simple reassociations. */ 10442 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0))) 10443 { 10444 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */ 10445 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR 10446 && TREE_CODE (arg1) == POINTER_PLUS_EXPR) 10447 { 10448 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10449 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10450 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 10451 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 10452 return fold_build2_loc (loc, PLUS_EXPR, type, 10453 fold_build2_loc (loc, MINUS_EXPR, type, 10454 arg00, arg10), 10455 fold_build2_loc (loc, MINUS_EXPR, type, 10456 arg01, arg11)); 10457 } 10458 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */ 10459 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) 10460 { 10461 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 10462 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 10463 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00, 10464 fold_convert_loc (loc, type, arg1)); 10465 if (tmp) 10466 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01); 10467 } 10468 } 10469 /* A - (-B) -> A + B */ 10470 if (TREE_CODE (arg1) == NEGATE_EXPR) 10471 return fold_build2_loc (loc, PLUS_EXPR, type, op0, 10472 fold_convert_loc (loc, type, 10473 TREE_OPERAND (arg1, 0))); 10474 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ 10475 if (TREE_CODE (arg0) == NEGATE_EXPR 10476 && (FLOAT_TYPE_P (type) 10477 || INTEGRAL_TYPE_P (type)) 10478 && negate_expr_p (arg1) 10479 && reorder_operands_p (arg0, arg1)) 10480 return fold_build2_loc (loc, MINUS_EXPR, type, 10481 fold_convert_loc (loc, type, 10482 negate_expr (arg1)), 10483 fold_convert_loc (loc, type, 10484 TREE_OPERAND (arg0, 0))); 10485 /* Convert -A - 1 to ~A. */ 10486 if (INTEGRAL_TYPE_P (type) 10487 && TREE_CODE (arg0) == NEGATE_EXPR 10488 && integer_onep (arg1) 10489 && !TYPE_OVERFLOW_TRAPS (type)) 10490 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 10491 fold_convert_loc (loc, type, 10492 TREE_OPERAND (arg0, 0))); 10493 10494 /* Convert -1 - A to ~A. */ 10495 if (INTEGRAL_TYPE_P (type) 10496 && integer_all_onesp (arg0)) 10497 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1); 10498 10499 10500 /* X - (X / CST) * CST is X % CST. */ 10501 if (INTEGRAL_TYPE_P (type) 10502 && TREE_CODE (arg1) == MULT_EXPR 10503 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR 10504 && operand_equal_p (arg0, 10505 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0) 10506 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1), 10507 TREE_OPERAND (arg1, 1), 0)) 10508 return 10509 fold_convert_loc (loc, type, 10510 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0), 10511 arg0, TREE_OPERAND (arg1, 1))); 10512 10513 if (! FLOAT_TYPE_P (type)) 10514 { 10515 if (integer_zerop (arg0)) 10516 return negate_expr (fold_convert_loc (loc, type, arg1)); 10517 if (integer_zerop (arg1)) 10518 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10519 10520 /* Fold A - (A & B) into ~B & A. */ 10521 if (!TREE_SIDE_EFFECTS (arg0) 10522 && TREE_CODE (arg1) == BIT_AND_EXPR) 10523 { 10524 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)) 10525 { 10526 tree arg10 = fold_convert_loc (loc, type, 10527 TREE_OPERAND (arg1, 0)); 10528 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10529 fold_build1_loc (loc, BIT_NOT_EXPR, 10530 type, arg10), 10531 fold_convert_loc (loc, type, arg0)); 10532 } 10533 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10534 { 10535 tree arg11 = fold_convert_loc (loc, 10536 type, TREE_OPERAND (arg1, 1)); 10537 return fold_build2_loc (loc, BIT_AND_EXPR, type, 10538 fold_build1_loc (loc, BIT_NOT_EXPR, 10539 type, arg11), 10540 fold_convert_loc (loc, type, arg0)); 10541 } 10542 } 10543 10544 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is 10545 any power of 2 minus 1. */ 10546 if (TREE_CODE (arg0) == BIT_AND_EXPR 10547 && TREE_CODE (arg1) == BIT_AND_EXPR 10548 && operand_equal_p (TREE_OPERAND (arg0, 0), 10549 TREE_OPERAND (arg1, 0), 0)) 10550 { 10551 tree mask0 = TREE_OPERAND (arg0, 1); 10552 tree mask1 = TREE_OPERAND (arg1, 1); 10553 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0); 10554 10555 if (operand_equal_p (tem, mask1, 0)) 10556 { 10557 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type, 10558 TREE_OPERAND (arg0, 0), mask1); 10559 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1); 10560 } 10561 } 10562 } 10563 10564 /* See if ARG1 is zero and X - ARG1 reduces to X. */ 10565 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1)) 10566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10567 10568 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether 10569 ARG0 is zero and X + ARG0 reduces to X, since that would mean 10570 (-ARG1 + ARG0) reduces to -ARG1. */ 10571 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) 10572 return negate_expr (fold_convert_loc (loc, type, arg1)); 10573 10574 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to 10575 __complex__ ( x, -y ). This is not the same for SNaNs or if 10576 signed zeros are involved. */ 10577 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10578 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10579 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10580 { 10581 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10582 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); 10583 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); 10584 bool arg0rz = false, arg0iz = false; 10585 if ((arg0r && (arg0rz = real_zerop (arg0r))) 10586 || (arg0i && (arg0iz = real_zerop (arg0i)))) 10587 { 10588 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); 10589 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); 10590 if (arg0rz && arg1i && real_zerop (arg1i)) 10591 { 10592 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10593 arg1r ? arg1r 10594 : build1 (REALPART_EXPR, rtype, arg1)); 10595 tree ip = arg0i ? arg0i 10596 : build1 (IMAGPART_EXPR, rtype, arg0); 10597 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10598 } 10599 else if (arg0iz && arg1r && real_zerop (arg1r)) 10600 { 10601 tree rp = arg0r ? arg0r 10602 : build1 (REALPART_EXPR, rtype, arg0); 10603 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype, 10604 arg1i ? arg1i 10605 : build1 (IMAGPART_EXPR, rtype, arg1)); 10606 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); 10607 } 10608 } 10609 } 10610 10611 /* Fold &x - &x. This can happen from &x.foo - &x. 10612 This is unsafe for certain floats even in non-IEEE formats. 10613 In IEEE, it is unsafe because it does wrong for NaNs. 10614 Also note that operand_equal_p is always false if an operand 10615 is volatile. */ 10616 10617 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type))) 10618 && operand_equal_p (arg0, arg1, 0)) 10619 return build_zero_cst (type); 10620 10621 /* A - B -> A + (-B) if B is easily negatable. */ 10622 if (negate_expr_p (arg1) 10623 && ((FLOAT_TYPE_P (type) 10624 /* Avoid this transformation if B is a positive REAL_CST. */ 10625 && (TREE_CODE (arg1) != REAL_CST 10626 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) 10627 || INTEGRAL_TYPE_P (type))) 10628 return fold_build2_loc (loc, PLUS_EXPR, type, 10629 fold_convert_loc (loc, type, arg0), 10630 fold_convert_loc (loc, type, 10631 negate_expr (arg1))); 10632 10633 /* Try folding difference of addresses. */ 10634 { 10635 HOST_WIDE_INT diff; 10636 10637 if ((TREE_CODE (arg0) == ADDR_EXPR 10638 || TREE_CODE (arg1) == ADDR_EXPR) 10639 && ptr_difference_const (arg0, arg1, &diff)) 10640 return build_int_cst_type (type, diff); 10641 } 10642 10643 /* Fold &a[i] - &a[j] to i-j. */ 10644 if (TREE_CODE (arg0) == ADDR_EXPR 10645 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF 10646 && TREE_CODE (arg1) == ADDR_EXPR 10647 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF) 10648 { 10649 tree tem = fold_addr_of_array_ref_difference (loc, type, 10650 TREE_OPERAND (arg0, 0), 10651 TREE_OPERAND (arg1, 0)); 10652 if (tem) 10653 return tem; 10654 } 10655 10656 if (FLOAT_TYPE_P (type) 10657 && flag_unsafe_math_optimizations 10658 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) 10659 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) 10660 && (tem = distribute_real_division (loc, code, type, arg0, arg1))) 10661 return tem; 10662 10663 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or 10664 one. Make sure the type is not saturating and has the signedness of 10665 the stripped operands, as fold_plusminus_mult_expr will re-associate. 10666 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */ 10667 if ((TREE_CODE (arg0) == MULT_EXPR 10668 || TREE_CODE (arg1) == MULT_EXPR) 10669 && !TYPE_SATURATING (type) 10670 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0)) 10671 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1)) 10672 && (!FLOAT_TYPE_P (type) || flag_associative_math)) 10673 { 10674 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); 10675 if (tem) 10676 return tem; 10677 } 10678 10679 goto associate; 10680 10681 case MULT_EXPR: 10682 /* (-A) * (-B) -> A * B */ 10683 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 10684 return fold_build2_loc (loc, MULT_EXPR, type, 10685 fold_convert_loc (loc, type, 10686 TREE_OPERAND (arg0, 0)), 10687 fold_convert_loc (loc, type, 10688 negate_expr (arg1))); 10689 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 10690 return fold_build2_loc (loc, MULT_EXPR, type, 10691 fold_convert_loc (loc, type, 10692 negate_expr (arg0)), 10693 fold_convert_loc (loc, type, 10694 TREE_OPERAND (arg1, 0))); 10695 10696 if (! FLOAT_TYPE_P (type)) 10697 { 10698 if (integer_zerop (arg1)) 10699 return omit_one_operand_loc (loc, type, arg1, arg0); 10700 if (integer_onep (arg1)) 10701 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10702 /* Transform x * -1 into -x. Make sure to do the negation 10703 on the original operand with conversions not stripped 10704 because we can only strip non-sign-changing conversions. */ 10705 if (integer_all_onesp (arg1)) 10706 return fold_convert_loc (loc, type, negate_expr (op0)); 10707 /* Transform x * -C into -x * C if x is easily negatable. */ 10708 if (TREE_CODE (arg1) == INTEGER_CST 10709 && tree_int_cst_sgn (arg1) == -1 10710 && negate_expr_p (arg0) 10711 && (tem = negate_expr (arg1)) != arg1 10712 && !TREE_OVERFLOW (tem)) 10713 return fold_build2_loc (loc, MULT_EXPR, type, 10714 fold_convert_loc (loc, type, 10715 negate_expr (arg0)), 10716 tem); 10717 10718 /* (a * (1 << b)) is (a << b) */ 10719 if (TREE_CODE (arg1) == LSHIFT_EXPR 10720 && integer_onep (TREE_OPERAND (arg1, 0))) 10721 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0, 10722 TREE_OPERAND (arg1, 1)); 10723 if (TREE_CODE (arg0) == LSHIFT_EXPR 10724 && integer_onep (TREE_OPERAND (arg0, 0))) 10725 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1, 10726 TREE_OPERAND (arg0, 1)); 10727 10728 /* (A + A) * C -> A * 2 * C */ 10729 if (TREE_CODE (arg0) == PLUS_EXPR 10730 && TREE_CODE (arg1) == INTEGER_CST 10731 && operand_equal_p (TREE_OPERAND (arg0, 0), 10732 TREE_OPERAND (arg0, 1), 0)) 10733 return fold_build2_loc (loc, MULT_EXPR, type, 10734 omit_one_operand_loc (loc, type, 10735 TREE_OPERAND (arg0, 0), 10736 TREE_OPERAND (arg0, 1)), 10737 fold_build2_loc (loc, MULT_EXPR, type, 10738 build_int_cst (type, 2) , arg1)); 10739 10740 strict_overflow_p = false; 10741 if (TREE_CODE (arg1) == INTEGER_CST 10742 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 10743 &strict_overflow_p))) 10744 { 10745 if (strict_overflow_p) 10746 fold_overflow_warning (("assuming signed overflow does not " 10747 "occur when simplifying " 10748 "multiplication"), 10749 WARN_STRICT_OVERFLOW_MISC); 10750 return fold_convert_loc (loc, type, tem); 10751 } 10752 10753 /* Optimize z * conj(z) for integer complex numbers. */ 10754 if (TREE_CODE (arg0) == CONJ_EXPR 10755 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10756 return fold_mult_zconjz (loc, type, arg1); 10757 if (TREE_CODE (arg1) == CONJ_EXPR 10758 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10759 return fold_mult_zconjz (loc, type, arg0); 10760 } 10761 else 10762 { 10763 /* Maybe fold x * 0 to 0. The expressions aren't the same 10764 when x is NaN, since x * 0 is also NaN. Nor are they the 10765 same in modes with signed zeros, since multiplying a 10766 negative value by 0 gives -0, not +0. */ 10767 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 10768 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10769 && real_zerop (arg1)) 10770 return omit_one_operand_loc (loc, type, arg1, arg0); 10771 /* In IEEE floating point, x*1 is not equivalent to x for snans. 10772 Likewise for complex arithmetic with signed zeros. */ 10773 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10774 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10775 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10776 && real_onep (arg1)) 10777 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10778 10779 /* Transform x * -1.0 into -x. */ 10780 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 10781 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10782 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) 10783 && real_minus_onep (arg1)) 10784 return fold_convert_loc (loc, type, negate_expr (arg0)); 10785 10786 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change 10787 the result for floating point types due to rounding so it is applied 10788 only if -fassociative-math was specify. */ 10789 if (flag_associative_math 10790 && TREE_CODE (arg0) == RDIV_EXPR 10791 && TREE_CODE (arg1) == REAL_CST 10792 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST) 10793 { 10794 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), 10795 arg1); 10796 if (tem) 10797 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 10798 TREE_OPERAND (arg0, 1)); 10799 } 10800 10801 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */ 10802 if (operand_equal_p (arg0, arg1, 0)) 10803 { 10804 tree tem = fold_strip_sign_ops (arg0); 10805 if (tem != NULL_TREE) 10806 { 10807 tem = fold_convert_loc (loc, type, tem); 10808 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem); 10809 } 10810 } 10811 10812 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z). 10813 This is not the same for NaNs or if signed zeros are 10814 involved. */ 10815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 10816 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) 10817 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 10818 && TREE_CODE (arg1) == COMPLEX_CST 10819 && real_zerop (TREE_REALPART (arg1))) 10820 { 10821 tree rtype = TREE_TYPE (TREE_TYPE (arg0)); 10822 if (real_onep (TREE_IMAGPART (arg1))) 10823 return 10824 fold_build2_loc (loc, COMPLEX_EXPR, type, 10825 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR, 10826 rtype, arg0)), 10827 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0)); 10828 else if (real_minus_onep (TREE_IMAGPART (arg1))) 10829 return 10830 fold_build2_loc (loc, COMPLEX_EXPR, type, 10831 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0), 10832 negate_expr (fold_build1_loc (loc, REALPART_EXPR, 10833 rtype, arg0))); 10834 } 10835 10836 /* Optimize z * conj(z) for floating point complex numbers. 10837 Guarded by flag_unsafe_math_optimizations as non-finite 10838 imaginary components don't produce scalar results. */ 10839 if (flag_unsafe_math_optimizations 10840 && TREE_CODE (arg0) == CONJ_EXPR 10841 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 10842 return fold_mult_zconjz (loc, type, arg1); 10843 if (flag_unsafe_math_optimizations 10844 && TREE_CODE (arg1) == CONJ_EXPR 10845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 10846 return fold_mult_zconjz (loc, type, arg0); 10847 10848 if (flag_unsafe_math_optimizations) 10849 { 10850 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 10851 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 10852 10853 /* Optimizations of root(...)*root(...). */ 10854 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0)) 10855 { 10856 tree rootfn, arg; 10857 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10858 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10859 10860 /* Optimize sqrt(x)*sqrt(x) as x. */ 10861 if (BUILTIN_SQRT_P (fcode0) 10862 && operand_equal_p (arg00, arg10, 0) 10863 && ! HONOR_SNANS (TYPE_MODE (type))) 10864 return arg00; 10865 10866 /* Optimize root(x)*root(y) as root(x*y). */ 10867 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10868 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10); 10869 return build_call_expr_loc (loc, rootfn, 1, arg); 10870 } 10871 10872 /* Optimize expN(x)*expN(y) as expN(x+y). */ 10873 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) 10874 { 10875 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10876 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 10877 CALL_EXPR_ARG (arg0, 0), 10878 CALL_EXPR_ARG (arg1, 0)); 10879 return build_call_expr_loc (loc, expfn, 1, arg); 10880 } 10881 10882 /* Optimizations of pow(...)*pow(...). */ 10883 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW) 10884 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF) 10885 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL)) 10886 { 10887 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10888 tree arg01 = CALL_EXPR_ARG (arg0, 1); 10889 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10890 tree arg11 = CALL_EXPR_ARG (arg1, 1); 10891 10892 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */ 10893 if (operand_equal_p (arg01, arg11, 0)) 10894 { 10895 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10896 tree arg = fold_build2_loc (loc, MULT_EXPR, type, 10897 arg00, arg10); 10898 return build_call_expr_loc (loc, powfn, 2, arg, arg01); 10899 } 10900 10901 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ 10902 if (operand_equal_p (arg00, arg10, 0)) 10903 { 10904 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10905 tree arg = fold_build2_loc (loc, PLUS_EXPR, type, 10906 arg01, arg11); 10907 return build_call_expr_loc (loc, powfn, 2, arg00, arg); 10908 } 10909 } 10910 10911 /* Optimize tan(x)*cos(x) as sin(x). */ 10912 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS) 10913 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF) 10914 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL) 10915 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN) 10916 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF) 10917 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL)) 10918 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 10919 CALL_EXPR_ARG (arg1, 0), 0)) 10920 { 10921 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); 10922 10923 if (sinfn != NULL_TREE) 10924 return build_call_expr_loc (loc, sinfn, 1, 10925 CALL_EXPR_ARG (arg0, 0)); 10926 } 10927 10928 /* Optimize x*pow(x,c) as pow(x,c+1). */ 10929 if (fcode1 == BUILT_IN_POW 10930 || fcode1 == BUILT_IN_POWF 10931 || fcode1 == BUILT_IN_POWL) 10932 { 10933 tree arg10 = CALL_EXPR_ARG (arg1, 0); 10934 tree arg11 = CALL_EXPR_ARG (arg1, 1); 10935 if (TREE_CODE (arg11) == REAL_CST 10936 && !TREE_OVERFLOW (arg11) 10937 && operand_equal_p (arg0, arg10, 0)) 10938 { 10939 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 10940 REAL_VALUE_TYPE c; 10941 tree arg; 10942 10943 c = TREE_REAL_CST (arg11); 10944 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 10945 arg = build_real (type, c); 10946 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 10947 } 10948 } 10949 10950 /* Optimize pow(x,c)*x as pow(x,c+1). */ 10951 if (fcode0 == BUILT_IN_POW 10952 || fcode0 == BUILT_IN_POWF 10953 || fcode0 == BUILT_IN_POWL) 10954 { 10955 tree arg00 = CALL_EXPR_ARG (arg0, 0); 10956 tree arg01 = CALL_EXPR_ARG (arg0, 1); 10957 if (TREE_CODE (arg01) == REAL_CST 10958 && !TREE_OVERFLOW (arg01) 10959 && operand_equal_p (arg1, arg00, 0)) 10960 { 10961 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 10962 REAL_VALUE_TYPE c; 10963 tree arg; 10964 10965 c = TREE_REAL_CST (arg01); 10966 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); 10967 arg = build_real (type, c); 10968 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 10969 } 10970 } 10971 10972 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */ 10973 if (!in_gimple_form 10974 && optimize 10975 && operand_equal_p (arg0, arg1, 0)) 10976 { 10977 tree powfn = mathfn_built_in (type, BUILT_IN_POW); 10978 10979 if (powfn) 10980 { 10981 tree arg = build_real (type, dconst2); 10982 return build_call_expr_loc (loc, powfn, 2, arg0, arg); 10983 } 10984 } 10985 } 10986 } 10987 goto associate; 10988 10989 case BIT_IOR_EXPR: 10990 bit_ior: 10991 if (integer_all_onesp (arg1)) 10992 return omit_one_operand_loc (loc, type, arg1, arg0); 10993 if (integer_zerop (arg1)) 10994 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10995 if (operand_equal_p (arg0, arg1, 0)) 10996 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 10997 10998 /* ~X | X is -1. */ 10999 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11000 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11001 { 11002 t1 = build_zero_cst (type); 11003 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11004 return omit_one_operand_loc (loc, type, t1, arg1); 11005 } 11006 11007 /* X | ~X is -1. */ 11008 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11009 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11010 { 11011 t1 = build_zero_cst (type); 11012 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11013 return omit_one_operand_loc (loc, type, t1, arg0); 11014 } 11015 11016 /* Canonicalize (X & C1) | C2. */ 11017 if (TREE_CODE (arg0) == BIT_AND_EXPR 11018 && TREE_CODE (arg1) == INTEGER_CST 11019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11020 { 11021 double_int c1, c2, c3, msk; 11022 int width = TYPE_PRECISION (type), w; 11023 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1)); 11024 c2 = tree_to_double_int (arg1); 11025 11026 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ 11027 if (double_int_equal_p (double_int_and (c1, c2), c1)) 11028 return omit_one_operand_loc (loc, type, arg1, 11029 TREE_OPERAND (arg0, 0)); 11030 11031 msk = double_int_mask (width); 11032 11033 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ 11034 if (double_int_zero_p (double_int_and_not (msk, 11035 double_int_ior (c1, c2)))) 11036 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11037 TREE_OPERAND (arg0, 0), arg1); 11038 11039 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2, 11040 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some 11041 mode which allows further optimizations. */ 11042 c1 = double_int_and (c1, msk); 11043 c2 = double_int_and (c2, msk); 11044 c3 = double_int_and_not (c1, c2); 11045 for (w = BITS_PER_UNIT; 11046 w <= width && w <= HOST_BITS_PER_WIDE_INT; 11047 w <<= 1) 11048 { 11049 unsigned HOST_WIDE_INT mask 11050 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w); 11051 if (((c1.low | c2.low) & mask) == mask 11052 && (c1.low & ~mask) == 0 && c1.high == 0) 11053 { 11054 c3 = uhwi_to_double_int (mask); 11055 break; 11056 } 11057 } 11058 if (!double_int_equal_p (c3, c1)) 11059 return fold_build2_loc (loc, BIT_IOR_EXPR, type, 11060 fold_build2_loc (loc, BIT_AND_EXPR, type, 11061 TREE_OPERAND (arg0, 0), 11062 double_int_to_tree (type, 11063 c3)), 11064 arg1); 11065 } 11066 11067 /* (X & Y) | Y is (X, Y). */ 11068 if (TREE_CODE (arg0) == BIT_AND_EXPR 11069 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11070 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); 11071 /* (X & Y) | X is (Y, X). */ 11072 if (TREE_CODE (arg0) == BIT_AND_EXPR 11073 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11074 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11075 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); 11076 /* X | (X & Y) is (Y, X). */ 11077 if (TREE_CODE (arg1) == BIT_AND_EXPR 11078 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 11079 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 11080 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); 11081 /* X | (Y & X) is (Y, X). */ 11082 if (TREE_CODE (arg1) == BIT_AND_EXPR 11083 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11084 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11085 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); 11086 11087 /* (X & ~Y) | (~X & Y) is X ^ Y */ 11088 if (TREE_CODE (arg0) == BIT_AND_EXPR 11089 && TREE_CODE (arg1) == BIT_AND_EXPR) 11090 { 11091 tree a0, a1, l0, l1, n0, n1; 11092 11093 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11094 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11095 11096 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11097 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11098 11099 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0); 11100 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1); 11101 11102 if ((operand_equal_p (n0, a0, 0) 11103 && operand_equal_p (n1, a1, 0)) 11104 || (operand_equal_p (n0, a1, 0) 11105 && operand_equal_p (n1, a0, 0))) 11106 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1); 11107 } 11108 11109 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11110 if (t1 != NULL_TREE) 11111 return t1; 11112 11113 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))). 11114 11115 This results in more efficient code for machines without a NAND 11116 instruction. Combine will canonicalize to the first form 11117 which will allow use of NAND instructions provided by the 11118 backend if they exist. */ 11119 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11120 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11121 { 11122 return 11123 fold_build1_loc (loc, BIT_NOT_EXPR, type, 11124 build2 (BIT_AND_EXPR, type, 11125 fold_convert_loc (loc, type, 11126 TREE_OPERAND (arg0, 0)), 11127 fold_convert_loc (loc, type, 11128 TREE_OPERAND (arg1, 0)))); 11129 } 11130 11131 /* See if this can be simplified into a rotate first. If that 11132 is unsuccessful continue in the association code. */ 11133 goto bit_rotate; 11134 11135 case BIT_XOR_EXPR: 11136 if (integer_zerop (arg1)) 11137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11138 if (integer_all_onesp (arg1)) 11139 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0); 11140 if (operand_equal_p (arg0, arg1, 0)) 11141 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11142 11143 /* ~X ^ X is -1. */ 11144 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11145 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11146 { 11147 t1 = build_zero_cst (type); 11148 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11149 return omit_one_operand_loc (loc, type, t1, arg1); 11150 } 11151 11152 /* X ^ ~X is -1. */ 11153 if (TREE_CODE (arg1) == BIT_NOT_EXPR 11154 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11155 { 11156 t1 = build_zero_cst (type); 11157 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); 11158 return omit_one_operand_loc (loc, type, t1, arg0); 11159 } 11160 11161 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing 11162 with a constant, and the two constants have no bits in common, 11163 we should treat this as a BIT_IOR_EXPR since this may produce more 11164 simplifications. */ 11165 if (TREE_CODE (arg0) == BIT_AND_EXPR 11166 && TREE_CODE (arg1) == BIT_AND_EXPR 11167 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 11168 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST 11169 && integer_zerop (const_binop (BIT_AND_EXPR, 11170 TREE_OPERAND (arg0, 1), 11171 TREE_OPERAND (arg1, 1)))) 11172 { 11173 code = BIT_IOR_EXPR; 11174 goto bit_ior; 11175 } 11176 11177 /* (X | Y) ^ X -> Y & ~ X*/ 11178 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11179 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11180 { 11181 tree t2 = TREE_OPERAND (arg0, 1); 11182 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11183 arg1); 11184 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11185 fold_convert_loc (loc, type, t2), 11186 fold_convert_loc (loc, type, t1)); 11187 return t1; 11188 } 11189 11190 /* (Y | X) ^ X -> Y & ~ X*/ 11191 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11192 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11193 { 11194 tree t2 = TREE_OPERAND (arg0, 0); 11195 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), 11196 arg1); 11197 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11198 fold_convert_loc (loc, type, t2), 11199 fold_convert_loc (loc, type, t1)); 11200 return t1; 11201 } 11202 11203 /* X ^ (X | Y) -> Y & ~ X*/ 11204 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11205 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) 11206 { 11207 tree t2 = TREE_OPERAND (arg1, 1); 11208 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11209 arg0); 11210 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11211 fold_convert_loc (loc, type, t2), 11212 fold_convert_loc (loc, type, t1)); 11213 return t1; 11214 } 11215 11216 /* X ^ (Y | X) -> Y & ~ X*/ 11217 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11218 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) 11219 { 11220 tree t2 = TREE_OPERAND (arg1, 0); 11221 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), 11222 arg0); 11223 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, 11224 fold_convert_loc (loc, type, t2), 11225 fold_convert_loc (loc, type, t1)); 11226 return t1; 11227 } 11228 11229 /* Convert ~X ^ ~Y to X ^ Y. */ 11230 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11231 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11232 return fold_build2_loc (loc, code, type, 11233 fold_convert_loc (loc, type, 11234 TREE_OPERAND (arg0, 0)), 11235 fold_convert_loc (loc, type, 11236 TREE_OPERAND (arg1, 0))); 11237 11238 /* Convert ~X ^ C to X ^ ~C. */ 11239 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11240 && TREE_CODE (arg1) == INTEGER_CST) 11241 return fold_build2_loc (loc, code, type, 11242 fold_convert_loc (loc, type, 11243 TREE_OPERAND (arg0, 0)), 11244 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1)); 11245 11246 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ 11247 if (TREE_CODE (arg0) == BIT_AND_EXPR 11248 && integer_onep (TREE_OPERAND (arg0, 1)) 11249 && integer_onep (arg1)) 11250 return fold_build2_loc (loc, EQ_EXPR, type, arg0, 11251 build_int_cst (TREE_TYPE (arg0), 0)); 11252 11253 /* Fold (X & Y) ^ Y as ~X & Y. */ 11254 if (TREE_CODE (arg0) == BIT_AND_EXPR 11255 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11256 { 11257 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11258 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11259 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11260 fold_convert_loc (loc, type, arg1)); 11261 } 11262 /* Fold (X & Y) ^ X as ~Y & X. */ 11263 if (TREE_CODE (arg0) == BIT_AND_EXPR 11264 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11265 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11266 { 11267 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11268 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11269 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11270 fold_convert_loc (loc, type, arg1)); 11271 } 11272 /* Fold X ^ (X & Y) as X & ~Y. */ 11273 if (TREE_CODE (arg1) == BIT_AND_EXPR 11274 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11275 { 11276 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11277 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11278 fold_convert_loc (loc, type, arg0), 11279 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11280 } 11281 /* Fold X ^ (Y & X) as ~Y & X. */ 11282 if (TREE_CODE (arg1) == BIT_AND_EXPR 11283 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11284 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11285 { 11286 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11287 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11288 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11289 fold_convert_loc (loc, type, arg0)); 11290 } 11291 11292 /* See if this can be simplified into a rotate first. If that 11293 is unsuccessful continue in the association code. */ 11294 goto bit_rotate; 11295 11296 case BIT_AND_EXPR: 11297 if (integer_all_onesp (arg1)) 11298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11299 if (integer_zerop (arg1)) 11300 return omit_one_operand_loc (loc, type, arg1, arg0); 11301 if (operand_equal_p (arg0, arg1, 0)) 11302 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11303 11304 /* ~X & X, (X == 0) & X, and !X & X are always zero. */ 11305 if ((TREE_CODE (arg0) == BIT_NOT_EXPR 11306 || TREE_CODE (arg0) == TRUTH_NOT_EXPR 11307 || (TREE_CODE (arg0) == EQ_EXPR 11308 && integer_zerop (TREE_OPERAND (arg0, 1)))) 11309 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 11310 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 11311 11312 /* X & ~X , X & (X == 0), and X & !X are always zero. */ 11313 if ((TREE_CODE (arg1) == BIT_NOT_EXPR 11314 || TREE_CODE (arg1) == TRUTH_NOT_EXPR 11315 || (TREE_CODE (arg1) == EQ_EXPR 11316 && integer_zerop (TREE_OPERAND (arg1, 1)))) 11317 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11318 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 11319 11320 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */ 11321 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11322 && TREE_CODE (arg1) == INTEGER_CST 11323 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 11324 { 11325 tree tmp1 = fold_convert_loc (loc, type, arg1); 11326 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11327 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11328 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1); 11329 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1); 11330 return 11331 fold_convert_loc (loc, type, 11332 fold_build2_loc (loc, BIT_IOR_EXPR, 11333 type, tmp2, tmp3)); 11334 } 11335 11336 /* (X | Y) & Y is (X, Y). */ 11337 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11338 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11339 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); 11340 /* (X | Y) & X is (Y, X). */ 11341 if (TREE_CODE (arg0) == BIT_IOR_EXPR 11342 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11343 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11344 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); 11345 /* X & (X | Y) is (Y, X). */ 11346 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11347 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) 11348 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) 11349 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); 11350 /* X & (Y | X) is (Y, X). */ 11351 if (TREE_CODE (arg1) == BIT_IOR_EXPR 11352 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11353 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11354 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); 11355 11356 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */ 11357 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11358 && integer_onep (TREE_OPERAND (arg0, 1)) 11359 && integer_onep (arg1)) 11360 { 11361 tem = TREE_OPERAND (arg0, 0); 11362 return fold_build2_loc (loc, EQ_EXPR, type, 11363 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, 11364 build_int_cst (TREE_TYPE (tem), 1)), 11365 build_int_cst (TREE_TYPE (tem), 0)); 11366 } 11367 /* Fold ~X & 1 as (X & 1) == 0. */ 11368 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11369 && integer_onep (arg1)) 11370 { 11371 tem = TREE_OPERAND (arg0, 0); 11372 return fold_build2_loc (loc, EQ_EXPR, type, 11373 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, 11374 build_int_cst (TREE_TYPE (tem), 1)), 11375 build_int_cst (TREE_TYPE (tem), 0)); 11376 } 11377 /* Fold !X & 1 as X == 0. */ 11378 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 11379 && integer_onep (arg1)) 11380 { 11381 tem = TREE_OPERAND (arg0, 0); 11382 return fold_build2_loc (loc, EQ_EXPR, type, tem, 11383 build_int_cst (TREE_TYPE (tem), 0)); 11384 } 11385 11386 /* Fold (X ^ Y) & Y as ~X & Y. */ 11387 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11388 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 11389 { 11390 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11391 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11392 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11393 fold_convert_loc (loc, type, arg1)); 11394 } 11395 /* Fold (X ^ Y) & X as ~Y & X. */ 11396 if (TREE_CODE (arg0) == BIT_XOR_EXPR 11397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 11398 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 11399 { 11400 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 11401 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11402 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11403 fold_convert_loc (loc, type, arg1)); 11404 } 11405 /* Fold X & (X ^ Y) as X & ~Y. */ 11406 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 11408 { 11409 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 11410 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11411 fold_convert_loc (loc, type, arg0), 11412 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); 11413 } 11414 /* Fold X & (Y ^ X) as ~Y & X. */ 11415 if (TREE_CODE (arg1) == BIT_XOR_EXPR 11416 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) 11417 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) 11418 { 11419 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 11420 return fold_build2_loc (loc, BIT_AND_EXPR, type, 11421 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), 11422 fold_convert_loc (loc, type, arg0)); 11423 } 11424 11425 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M, 11426 ((A & N) + B) & M -> (A + B) & M 11427 Similarly if (N & M) == 0, 11428 ((A | N) + B) & M -> (A + B) & M 11429 and for - instead of + (or unary - instead of +) 11430 and/or ^ instead of |. 11431 If B is constant and (B & M) == 0, fold into A & M. */ 11432 if (host_integerp (arg1, 1)) 11433 { 11434 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1); 11435 if (~cst1 && (cst1 & (cst1 + 1)) == 0 11436 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 11437 && (TREE_CODE (arg0) == PLUS_EXPR 11438 || TREE_CODE (arg0) == MINUS_EXPR 11439 || TREE_CODE (arg0) == NEGATE_EXPR) 11440 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)) 11441 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE)) 11442 { 11443 tree pmop[2]; 11444 int which = 0; 11445 unsigned HOST_WIDE_INT cst0; 11446 11447 /* Now we know that arg0 is (C + D) or (C - D) or 11448 -C and arg1 (M) is == (1LL << cst) - 1. 11449 Store C into PMOP[0] and D into PMOP[1]. */ 11450 pmop[0] = TREE_OPERAND (arg0, 0); 11451 pmop[1] = NULL; 11452 if (TREE_CODE (arg0) != NEGATE_EXPR) 11453 { 11454 pmop[1] = TREE_OPERAND (arg0, 1); 11455 which = 1; 11456 } 11457 11458 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1) 11459 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1) 11460 & cst1) != cst1) 11461 which = -1; 11462 11463 for (; which >= 0; which--) 11464 switch (TREE_CODE (pmop[which])) 11465 { 11466 case BIT_AND_EXPR: 11467 case BIT_IOR_EXPR: 11468 case BIT_XOR_EXPR: 11469 if (TREE_CODE (TREE_OPERAND (pmop[which], 1)) 11470 != INTEGER_CST) 11471 break; 11472 /* tree_low_cst not used, because we don't care about 11473 the upper bits. */ 11474 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1)); 11475 cst0 &= cst1; 11476 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR) 11477 { 11478 if (cst0 != cst1) 11479 break; 11480 } 11481 else if (cst0 != 0) 11482 break; 11483 /* If C or D is of the form (A & N) where 11484 (N & M) == M, or of the form (A | N) or 11485 (A ^ N) where (N & M) == 0, replace it with A. */ 11486 pmop[which] = TREE_OPERAND (pmop[which], 0); 11487 break; 11488 case INTEGER_CST: 11489 /* If C or D is a N where (N & M) == 0, it can be 11490 omitted (assumed 0). */ 11491 if ((TREE_CODE (arg0) == PLUS_EXPR 11492 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0)) 11493 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0) 11494 pmop[which] = NULL; 11495 break; 11496 default: 11497 break; 11498 } 11499 11500 /* Only build anything new if we optimized one or both arguments 11501 above. */ 11502 if (pmop[0] != TREE_OPERAND (arg0, 0) 11503 || (TREE_CODE (arg0) != NEGATE_EXPR 11504 && pmop[1] != TREE_OPERAND (arg0, 1))) 11505 { 11506 tree utype = TREE_TYPE (arg0); 11507 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))) 11508 { 11509 /* Perform the operations in a type that has defined 11510 overflow behavior. */ 11511 utype = unsigned_type_for (TREE_TYPE (arg0)); 11512 if (pmop[0] != NULL) 11513 pmop[0] = fold_convert_loc (loc, utype, pmop[0]); 11514 if (pmop[1] != NULL) 11515 pmop[1] = fold_convert_loc (loc, utype, pmop[1]); 11516 } 11517 11518 if (TREE_CODE (arg0) == NEGATE_EXPR) 11519 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]); 11520 else if (TREE_CODE (arg0) == PLUS_EXPR) 11521 { 11522 if (pmop[0] != NULL && pmop[1] != NULL) 11523 tem = fold_build2_loc (loc, PLUS_EXPR, utype, 11524 pmop[0], pmop[1]); 11525 else if (pmop[0] != NULL) 11526 tem = pmop[0]; 11527 else if (pmop[1] != NULL) 11528 tem = pmop[1]; 11529 else 11530 return build_int_cst (type, 0); 11531 } 11532 else if (pmop[0] == NULL) 11533 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]); 11534 else 11535 tem = fold_build2_loc (loc, MINUS_EXPR, utype, 11536 pmop[0], pmop[1]); 11537 /* TEM is now the new binary +, - or unary - replacement. */ 11538 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem, 11539 fold_convert_loc (loc, utype, arg1)); 11540 return fold_convert_loc (loc, type, tem); 11541 } 11542 } 11543 } 11544 11545 t1 = distribute_bit_expr (loc, code, type, arg0, arg1); 11546 if (t1 != NULL_TREE) 11547 return t1; 11548 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ 11549 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR 11550 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))) 11551 { 11552 unsigned int prec 11553 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))); 11554 11555 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT 11556 && (~TREE_INT_CST_LOW (arg1) 11557 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0) 11558 return 11559 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 11560 } 11561 11562 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))). 11563 11564 This results in more efficient code for machines without a NOR 11565 instruction. Combine will canonicalize to the first form 11566 which will allow use of NOR instructions provided by the 11567 backend if they exist. */ 11568 if (TREE_CODE (arg0) == BIT_NOT_EXPR 11569 && TREE_CODE (arg1) == BIT_NOT_EXPR) 11570 { 11571 return fold_build1_loc (loc, BIT_NOT_EXPR, type, 11572 build2 (BIT_IOR_EXPR, type, 11573 fold_convert_loc (loc, type, 11574 TREE_OPERAND (arg0, 0)), 11575 fold_convert_loc (loc, type, 11576 TREE_OPERAND (arg1, 0)))); 11577 } 11578 11579 /* If arg0 is derived from the address of an object or function, we may 11580 be able to fold this expression using the object or function's 11581 alignment. */ 11582 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1)) 11583 { 11584 unsigned HOST_WIDE_INT modulus, residue; 11585 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1); 11586 11587 modulus = get_pointer_modulus_and_residue (arg0, &residue, 11588 integer_onep (arg1)); 11589 11590 /* This works because modulus is a power of 2. If this weren't the 11591 case, we'd have to replace it by its greatest power-of-2 11592 divisor: modulus & -modulus. */ 11593 if (low < modulus) 11594 return build_int_cst (type, residue & low); 11595 } 11596 11597 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1)) 11598 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1)) 11599 if the new mask might be further optimized. */ 11600 if ((TREE_CODE (arg0) == LSHIFT_EXPR 11601 || TREE_CODE (arg0) == RSHIFT_EXPR) 11602 && host_integerp (TREE_OPERAND (arg0, 1), 1) 11603 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1))) 11604 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) 11605 < TYPE_PRECISION (TREE_TYPE (arg0)) 11606 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT 11607 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0) 11608 { 11609 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1); 11610 unsigned HOST_WIDE_INT mask 11611 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1))); 11612 unsigned HOST_WIDE_INT newmask, zerobits = 0; 11613 tree shift_type = TREE_TYPE (arg0); 11614 11615 if (TREE_CODE (arg0) == LSHIFT_EXPR) 11616 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1); 11617 else if (TREE_CODE (arg0) == RSHIFT_EXPR 11618 && TYPE_PRECISION (TREE_TYPE (arg0)) 11619 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0)))) 11620 { 11621 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0)); 11622 tree arg00 = TREE_OPERAND (arg0, 0); 11623 /* See if more bits can be proven as zero because of 11624 zero extension. */ 11625 if (TREE_CODE (arg00) == NOP_EXPR 11626 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0)))) 11627 { 11628 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0)); 11629 if (TYPE_PRECISION (inner_type) 11630 == GET_MODE_BITSIZE (TYPE_MODE (inner_type)) 11631 && TYPE_PRECISION (inner_type) < prec) 11632 { 11633 prec = TYPE_PRECISION (inner_type); 11634 /* See if we can shorten the right shift. */ 11635 if (shiftc < prec) 11636 shift_type = inner_type; 11637 } 11638 } 11639 zerobits = ~(unsigned HOST_WIDE_INT) 0; 11640 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc; 11641 zerobits <<= prec - shiftc; 11642 /* For arithmetic shift if sign bit could be set, zerobits 11643 can contain actually sign bits, so no transformation is 11644 possible, unless MASK masks them all away. In that 11645 case the shift needs to be converted into logical shift. */ 11646 if (!TYPE_UNSIGNED (TREE_TYPE (arg0)) 11647 && prec == TYPE_PRECISION (TREE_TYPE (arg0))) 11648 { 11649 if ((mask & zerobits) == 0) 11650 shift_type = unsigned_type_for (TREE_TYPE (arg0)); 11651 else 11652 zerobits = 0; 11653 } 11654 } 11655 11656 /* ((X << 16) & 0xff00) is (X, 0). */ 11657 if ((mask & zerobits) == mask) 11658 return omit_one_operand_loc (loc, type, 11659 build_int_cst (type, 0), arg0); 11660 11661 newmask = mask | zerobits; 11662 if (newmask != mask && (newmask & (newmask + 1)) == 0) 11663 { 11664 unsigned int prec; 11665 11666 /* Only do the transformation if NEWMASK is some integer 11667 mode's mask. */ 11668 for (prec = BITS_PER_UNIT; 11669 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1) 11670 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1) 11671 break; 11672 if (prec < HOST_BITS_PER_WIDE_INT 11673 || newmask == ~(unsigned HOST_WIDE_INT) 0) 11674 { 11675 tree newmaskt; 11676 11677 if (shift_type != TREE_TYPE (arg0)) 11678 { 11679 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type, 11680 fold_convert_loc (loc, shift_type, 11681 TREE_OPERAND (arg0, 0)), 11682 TREE_OPERAND (arg0, 1)); 11683 tem = fold_convert_loc (loc, type, tem); 11684 } 11685 else 11686 tem = op0; 11687 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask); 11688 if (!tree_int_cst_equal (newmaskt, arg1)) 11689 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt); 11690 } 11691 } 11692 } 11693 11694 goto associate; 11695 11696 case RDIV_EXPR: 11697 /* Don't touch a floating-point divide by zero unless the mode 11698 of the constant can represent infinity. */ 11699 if (TREE_CODE (arg1) == REAL_CST 11700 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))) 11701 && real_zerop (arg1)) 11702 return NULL_TREE; 11703 11704 /* Optimize A / A to 1.0 if we don't care about 11705 NaNs or Infinities. Skip the transformation 11706 for non-real operands. */ 11707 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0)) 11708 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 11709 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0))) 11710 && operand_equal_p (arg0, arg1, 0)) 11711 { 11712 tree r = build_real (TREE_TYPE (arg0), dconst1); 11713 11714 return omit_two_operands_loc (loc, type, r, arg0, arg1); 11715 } 11716 11717 /* The complex version of the above A / A optimization. */ 11718 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) 11719 && operand_equal_p (arg0, arg1, 0)) 11720 { 11721 tree elem_type = TREE_TYPE (TREE_TYPE (arg0)); 11722 if (! HONOR_NANS (TYPE_MODE (elem_type)) 11723 && ! HONOR_INFINITIES (TYPE_MODE (elem_type))) 11724 { 11725 tree r = build_real (elem_type, dconst1); 11726 /* omit_two_operands will call fold_convert for us. */ 11727 return omit_two_operands_loc (loc, type, r, arg0, arg1); 11728 } 11729 } 11730 11731 /* (-A) / (-B) -> A / B */ 11732 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) 11733 return fold_build2_loc (loc, RDIV_EXPR, type, 11734 TREE_OPERAND (arg0, 0), 11735 negate_expr (arg1)); 11736 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) 11737 return fold_build2_loc (loc, RDIV_EXPR, type, 11738 negate_expr (arg0), 11739 TREE_OPERAND (arg1, 0)); 11740 11741 /* In IEEE floating point, x/1 is not equivalent to x for snans. */ 11742 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11743 && real_onep (arg1)) 11744 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 11745 11746 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */ 11747 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) 11748 && real_minus_onep (arg1)) 11749 return non_lvalue_loc (loc, fold_convert_loc (loc, type, 11750 negate_expr (arg0))); 11751 11752 /* If ARG1 is a constant, we can convert this to a multiply by the 11753 reciprocal. This does not have the same rounding properties, 11754 so only do this if -freciprocal-math. We can actually 11755 always safely do it if ARG1 is a power of two, but it's hard to 11756 tell if it is or not in a portable manner. */ 11757 if (TREE_CODE (arg1) == REAL_CST) 11758 { 11759 if (flag_reciprocal_math 11760 && 0 != (tem = const_binop (code, build_real (type, dconst1), 11761 arg1))) 11762 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem); 11763 /* Find the reciprocal if optimizing and the result is exact. */ 11764 if (optimize) 11765 { 11766 REAL_VALUE_TYPE r; 11767 r = TREE_REAL_CST (arg1); 11768 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r)) 11769 { 11770 tem = build_real (type, r); 11771 return fold_build2_loc (loc, MULT_EXPR, type, 11772 fold_convert_loc (loc, type, arg0), tem); 11773 } 11774 } 11775 } 11776 /* Convert A/B/C to A/(B*C). */ 11777 if (flag_reciprocal_math 11778 && TREE_CODE (arg0) == RDIV_EXPR) 11779 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), 11780 fold_build2_loc (loc, MULT_EXPR, type, 11781 TREE_OPERAND (arg0, 1), arg1)); 11782 11783 /* Convert A/(B/C) to (A/B)*C. */ 11784 if (flag_reciprocal_math 11785 && TREE_CODE (arg1) == RDIV_EXPR) 11786 return fold_build2_loc (loc, MULT_EXPR, type, 11787 fold_build2_loc (loc, RDIV_EXPR, type, arg0, 11788 TREE_OPERAND (arg1, 0)), 11789 TREE_OPERAND (arg1, 1)); 11790 11791 /* Convert C1/(X*C2) into (C1/C2)/X. */ 11792 if (flag_reciprocal_math 11793 && TREE_CODE (arg1) == MULT_EXPR 11794 && TREE_CODE (arg0) == REAL_CST 11795 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) 11796 { 11797 tree tem = const_binop (RDIV_EXPR, arg0, 11798 TREE_OPERAND (arg1, 1)); 11799 if (tem) 11800 return fold_build2_loc (loc, RDIV_EXPR, type, tem, 11801 TREE_OPERAND (arg1, 0)); 11802 } 11803 11804 if (flag_unsafe_math_optimizations) 11805 { 11806 enum built_in_function fcode0 = builtin_mathfn_code (arg0); 11807 enum built_in_function fcode1 = builtin_mathfn_code (arg1); 11808 11809 /* Optimize sin(x)/cos(x) as tan(x). */ 11810 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS) 11811 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF) 11812 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL)) 11813 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11814 CALL_EXPR_ARG (arg1, 0), 0)) 11815 { 11816 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11817 11818 if (tanfn != NULL_TREE) 11819 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0)); 11820 } 11821 11822 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ 11823 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN) 11824 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF) 11825 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL)) 11826 && operand_equal_p (CALL_EXPR_ARG (arg0, 0), 11827 CALL_EXPR_ARG (arg1, 0), 0)) 11828 { 11829 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); 11830 11831 if (tanfn != NULL_TREE) 11832 { 11833 tree tmp = build_call_expr_loc (loc, tanfn, 1, 11834 CALL_EXPR_ARG (arg0, 0)); 11835 return fold_build2_loc (loc, RDIV_EXPR, type, 11836 build_real (type, dconst1), tmp); 11837 } 11838 } 11839 11840 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about 11841 NaNs or Infinities. */ 11842 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN) 11843 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF) 11844 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL))) 11845 { 11846 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11847 tree arg01 = CALL_EXPR_ARG (arg1, 0); 11848 11849 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 11850 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 11851 && operand_equal_p (arg00, arg01, 0)) 11852 { 11853 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 11854 11855 if (cosfn != NULL_TREE) 11856 return build_call_expr_loc (loc, cosfn, 1, arg00); 11857 } 11858 } 11859 11860 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about 11861 NaNs or Infinities. */ 11862 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN) 11863 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF) 11864 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL))) 11865 { 11866 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11867 tree arg01 = CALL_EXPR_ARG (arg1, 0); 11868 11869 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) 11870 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) 11871 && operand_equal_p (arg00, arg01, 0)) 11872 { 11873 tree cosfn = mathfn_built_in (type, BUILT_IN_COS); 11874 11875 if (cosfn != NULL_TREE) 11876 { 11877 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00); 11878 return fold_build2_loc (loc, RDIV_EXPR, type, 11879 build_real (type, dconst1), 11880 tmp); 11881 } 11882 } 11883 } 11884 11885 /* Optimize pow(x,c)/x as pow(x,c-1). */ 11886 if (fcode0 == BUILT_IN_POW 11887 || fcode0 == BUILT_IN_POWF 11888 || fcode0 == BUILT_IN_POWL) 11889 { 11890 tree arg00 = CALL_EXPR_ARG (arg0, 0); 11891 tree arg01 = CALL_EXPR_ARG (arg0, 1); 11892 if (TREE_CODE (arg01) == REAL_CST 11893 && !TREE_OVERFLOW (arg01) 11894 && operand_equal_p (arg1, arg00, 0)) 11895 { 11896 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); 11897 REAL_VALUE_TYPE c; 11898 tree arg; 11899 11900 c = TREE_REAL_CST (arg01); 11901 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); 11902 arg = build_real (type, c); 11903 return build_call_expr_loc (loc, powfn, 2, arg1, arg); 11904 } 11905 } 11906 11907 /* Optimize a/root(b/c) into a*root(c/b). */ 11908 if (BUILTIN_ROOT_P (fcode1)) 11909 { 11910 tree rootarg = CALL_EXPR_ARG (arg1, 0); 11911 11912 if (TREE_CODE (rootarg) == RDIV_EXPR) 11913 { 11914 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11915 tree b = TREE_OPERAND (rootarg, 0); 11916 tree c = TREE_OPERAND (rootarg, 1); 11917 11918 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b); 11919 11920 tmp = build_call_expr_loc (loc, rootfn, 1, tmp); 11921 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp); 11922 } 11923 } 11924 11925 /* Optimize x/expN(y) into x*expN(-y). */ 11926 if (BUILTIN_EXPONENT_P (fcode1)) 11927 { 11928 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11929 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0)); 11930 arg1 = build_call_expr_loc (loc, 11931 expfn, 1, 11932 fold_convert_loc (loc, type, arg)); 11933 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 11934 } 11935 11936 /* Optimize x/pow(y,z) into x*pow(y,-z). */ 11937 if (fcode1 == BUILT_IN_POW 11938 || fcode1 == BUILT_IN_POWF 11939 || fcode1 == BUILT_IN_POWL) 11940 { 11941 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); 11942 tree arg10 = CALL_EXPR_ARG (arg1, 0); 11943 tree arg11 = CALL_EXPR_ARG (arg1, 1); 11944 tree neg11 = fold_convert_loc (loc, type, 11945 negate_expr (arg11)); 11946 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11); 11947 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 11948 } 11949 } 11950 return NULL_TREE; 11951 11952 case TRUNC_DIV_EXPR: 11953 /* Optimize (X & (-A)) / A where A is a power of 2, 11954 to X >> log2(A) */ 11955 if (TREE_CODE (arg0) == BIT_AND_EXPR 11956 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST 11957 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0) 11958 { 11959 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1), 11960 arg1, TREE_OPERAND (arg0, 1)); 11961 if (sum && integer_zerop (sum)) { 11962 unsigned long pow2; 11963 11964 if (TREE_INT_CST_LOW (arg1)) 11965 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1)); 11966 else 11967 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1)) 11968 + HOST_BITS_PER_WIDE_INT; 11969 11970 return fold_build2_loc (loc, RSHIFT_EXPR, type, 11971 TREE_OPERAND (arg0, 0), 11972 build_int_cst (integer_type_node, pow2)); 11973 } 11974 } 11975 11976 /* Fall thru */ 11977 11978 case FLOOR_DIV_EXPR: 11979 /* Simplify A / (B << N) where A and B are positive and B is 11980 a power of 2, to A >> (N + log2(B)). */ 11981 strict_overflow_p = false; 11982 if (TREE_CODE (arg1) == LSHIFT_EXPR 11983 && (TYPE_UNSIGNED (type) 11984 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 11985 { 11986 tree sval = TREE_OPERAND (arg1, 0); 11987 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0) 11988 { 11989 tree sh_cnt = TREE_OPERAND (arg1, 1); 11990 unsigned long pow2; 11991 11992 if (TREE_INT_CST_LOW (sval)) 11993 pow2 = exact_log2 (TREE_INT_CST_LOW (sval)); 11994 else 11995 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval)) 11996 + HOST_BITS_PER_WIDE_INT; 11997 11998 if (strict_overflow_p) 11999 fold_overflow_warning (("assuming signed overflow does not " 12000 "occur when simplifying A / (B << N)"), 12001 WARN_STRICT_OVERFLOW_MISC); 12002 12003 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt), 12004 sh_cnt, 12005 build_int_cst (TREE_TYPE (sh_cnt), 12006 pow2)); 12007 return fold_build2_loc (loc, RSHIFT_EXPR, type, 12008 fold_convert_loc (loc, type, arg0), sh_cnt); 12009 } 12010 } 12011 12012 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as 12013 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */ 12014 if (INTEGRAL_TYPE_P (type) 12015 && TYPE_UNSIGNED (type) 12016 && code == FLOOR_DIV_EXPR) 12017 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1); 12018 12019 /* Fall thru */ 12020 12021 case ROUND_DIV_EXPR: 12022 case CEIL_DIV_EXPR: 12023 case EXACT_DIV_EXPR: 12024 if (integer_onep (arg1)) 12025 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12026 if (integer_zerop (arg1)) 12027 return NULL_TREE; 12028 /* X / -1 is -X. */ 12029 if (!TYPE_UNSIGNED (type) 12030 && TREE_CODE (arg1) == INTEGER_CST 12031 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 12032 && TREE_INT_CST_HIGH (arg1) == -1) 12033 return fold_convert_loc (loc, type, negate_expr (arg0)); 12034 12035 /* Convert -A / -B to A / B when the type is signed and overflow is 12036 undefined. */ 12037 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 12038 && TREE_CODE (arg0) == NEGATE_EXPR 12039 && negate_expr_p (arg1)) 12040 { 12041 if (INTEGRAL_TYPE_P (type)) 12042 fold_overflow_warning (("assuming signed overflow does not occur " 12043 "when distributing negation across " 12044 "division"), 12045 WARN_STRICT_OVERFLOW_MISC); 12046 return fold_build2_loc (loc, code, type, 12047 fold_convert_loc (loc, type, 12048 TREE_OPERAND (arg0, 0)), 12049 fold_convert_loc (loc, type, 12050 negate_expr (arg1))); 12051 } 12052 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) 12053 && TREE_CODE (arg1) == NEGATE_EXPR 12054 && negate_expr_p (arg0)) 12055 { 12056 if (INTEGRAL_TYPE_P (type)) 12057 fold_overflow_warning (("assuming signed overflow does not occur " 12058 "when distributing negation across " 12059 "division"), 12060 WARN_STRICT_OVERFLOW_MISC); 12061 return fold_build2_loc (loc, code, type, 12062 fold_convert_loc (loc, type, 12063 negate_expr (arg0)), 12064 fold_convert_loc (loc, type, 12065 TREE_OPERAND (arg1, 0))); 12066 } 12067 12068 /* If arg0 is a multiple of arg1, then rewrite to the fastest div 12069 operation, EXACT_DIV_EXPR. 12070 12071 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now. 12072 At one time others generated faster code, it's not clear if they do 12073 after the last round to changes to the DIV code in expmed.c. */ 12074 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR) 12075 && multiple_of_p (type, arg0, arg1)) 12076 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1); 12077 12078 strict_overflow_p = false; 12079 if (TREE_CODE (arg1) == INTEGER_CST 12080 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 12081 &strict_overflow_p))) 12082 { 12083 if (strict_overflow_p) 12084 fold_overflow_warning (("assuming signed overflow does not occur " 12085 "when simplifying division"), 12086 WARN_STRICT_OVERFLOW_MISC); 12087 return fold_convert_loc (loc, type, tem); 12088 } 12089 12090 return NULL_TREE; 12091 12092 case CEIL_MOD_EXPR: 12093 case FLOOR_MOD_EXPR: 12094 case ROUND_MOD_EXPR: 12095 case TRUNC_MOD_EXPR: 12096 /* X % 1 is always zero, but be sure to preserve any side 12097 effects in X. */ 12098 if (integer_onep (arg1)) 12099 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12100 12101 /* X % 0, return X % 0 unchanged so that we can get the 12102 proper warnings and errors. */ 12103 if (integer_zerop (arg1)) 12104 return NULL_TREE; 12105 12106 /* 0 % X is always zero, but be sure to preserve any side 12107 effects in X. Place this after checking for X == 0. */ 12108 if (integer_zerop (arg0)) 12109 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 12110 12111 /* X % -1 is zero. */ 12112 if (!TYPE_UNSIGNED (type) 12113 && TREE_CODE (arg1) == INTEGER_CST 12114 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 12115 && TREE_INT_CST_HIGH (arg1) == -1) 12116 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12117 12118 /* X % -C is the same as X % C. */ 12119 if (code == TRUNC_MOD_EXPR 12120 && !TYPE_UNSIGNED (type) 12121 && TREE_CODE (arg1) == INTEGER_CST 12122 && !TREE_OVERFLOW (arg1) 12123 && TREE_INT_CST_HIGH (arg1) < 0 12124 && !TYPE_OVERFLOW_TRAPS (type) 12125 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */ 12126 && !sign_bit_p (arg1, arg1)) 12127 return fold_build2_loc (loc, code, type, 12128 fold_convert_loc (loc, type, arg0), 12129 fold_convert_loc (loc, type, 12130 negate_expr (arg1))); 12131 12132 /* X % -Y is the same as X % Y. */ 12133 if (code == TRUNC_MOD_EXPR 12134 && !TYPE_UNSIGNED (type) 12135 && TREE_CODE (arg1) == NEGATE_EXPR 12136 && !TYPE_OVERFLOW_TRAPS (type)) 12137 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0), 12138 fold_convert_loc (loc, type, 12139 TREE_OPERAND (arg1, 0))); 12140 12141 strict_overflow_p = false; 12142 if (TREE_CODE (arg1) == INTEGER_CST 12143 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, 12144 &strict_overflow_p))) 12145 { 12146 if (strict_overflow_p) 12147 fold_overflow_warning (("assuming signed overflow does not occur " 12148 "when simplifying modulus"), 12149 WARN_STRICT_OVERFLOW_MISC); 12150 return fold_convert_loc (loc, type, tem); 12151 } 12152 12153 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, 12154 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ 12155 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) 12156 && (TYPE_UNSIGNED (type) 12157 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) 12158 { 12159 tree c = arg1; 12160 /* Also optimize A % (C << N) where C is a power of 2, 12161 to A & ((C << N) - 1). */ 12162 if (TREE_CODE (arg1) == LSHIFT_EXPR) 12163 c = TREE_OPERAND (arg1, 0); 12164 12165 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) 12166 { 12167 tree mask 12168 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, 12169 build_int_cst (TREE_TYPE (arg1), 1)); 12170 if (strict_overflow_p) 12171 fold_overflow_warning (("assuming signed overflow does not " 12172 "occur when simplifying " 12173 "X % (power of two)"), 12174 WARN_STRICT_OVERFLOW_MISC); 12175 return fold_build2_loc (loc, BIT_AND_EXPR, type, 12176 fold_convert_loc (loc, type, arg0), 12177 fold_convert_loc (loc, type, mask)); 12178 } 12179 } 12180 12181 return NULL_TREE; 12182 12183 case LROTATE_EXPR: 12184 case RROTATE_EXPR: 12185 if (integer_all_onesp (arg0)) 12186 return omit_one_operand_loc (loc, type, arg0, arg1); 12187 goto shift; 12188 12189 case RSHIFT_EXPR: 12190 /* Optimize -1 >> x for arithmetic right shifts. */ 12191 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type) 12192 && tree_expr_nonnegative_p (arg1)) 12193 return omit_one_operand_loc (loc, type, arg0, arg1); 12194 /* ... fall through ... */ 12195 12196 case LSHIFT_EXPR: 12197 shift: 12198 if (integer_zerop (arg1)) 12199 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12200 if (integer_zerop (arg0)) 12201 return omit_one_operand_loc (loc, type, arg0, arg1); 12202 12203 /* Since negative shift count is not well-defined, 12204 don't try to compute it in the compiler. */ 12205 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0) 12206 return NULL_TREE; 12207 12208 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ 12209 if (TREE_CODE (op0) == code && host_integerp (arg1, false) 12210 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 12211 && host_integerp (TREE_OPERAND (arg0, 1), false) 12212 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 12213 { 12214 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) 12215 + TREE_INT_CST_LOW (arg1)); 12216 12217 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 12218 being well defined. */ 12219 if (low >= TYPE_PRECISION (type)) 12220 { 12221 if (code == LROTATE_EXPR || code == RROTATE_EXPR) 12222 low = low % TYPE_PRECISION (type); 12223 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) 12224 return omit_one_operand_loc (loc, type, build_int_cst (type, 0), 12225 TREE_OPERAND (arg0, 0)); 12226 else 12227 low = TYPE_PRECISION (type) - 1; 12228 } 12229 12230 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12231 build_int_cst (type, low)); 12232 } 12233 12234 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c 12235 into x & ((unsigned)-1 >> c) for unsigned types. */ 12236 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR) 12237 || (TYPE_UNSIGNED (type) 12238 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR)) 12239 && host_integerp (arg1, false) 12240 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) 12241 && host_integerp (TREE_OPERAND (arg0, 1), false) 12242 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) 12243 { 12244 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); 12245 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1); 12246 tree lshift; 12247 tree arg00; 12248 12249 if (low0 == low1) 12250 { 12251 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 12252 12253 lshift = build_int_cst (type, -1); 12254 lshift = int_const_binop (code, lshift, arg1); 12255 12256 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift); 12257 } 12258 } 12259 12260 /* Rewrite an LROTATE_EXPR by a constant into an 12261 RROTATE_EXPR by a new constant. */ 12262 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST) 12263 { 12264 tree tem = build_int_cst (TREE_TYPE (arg1), 12265 TYPE_PRECISION (type)); 12266 tem = const_binop (MINUS_EXPR, tem, arg1); 12267 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem); 12268 } 12269 12270 /* If we have a rotate of a bit operation with the rotate count and 12271 the second operand of the bit operation both constant, 12272 permute the two operations. */ 12273 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12274 && (TREE_CODE (arg0) == BIT_AND_EXPR 12275 || TREE_CODE (arg0) == BIT_IOR_EXPR 12276 || TREE_CODE (arg0) == BIT_XOR_EXPR) 12277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12278 return fold_build2_loc (loc, TREE_CODE (arg0), type, 12279 fold_build2_loc (loc, code, type, 12280 TREE_OPERAND (arg0, 0), arg1), 12281 fold_build2_loc (loc, code, type, 12282 TREE_OPERAND (arg0, 1), arg1)); 12283 12284 /* Two consecutive rotates adding up to the precision of the 12285 type can be ignored. */ 12286 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST 12287 && TREE_CODE (arg0) == RROTATE_EXPR 12288 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12289 && TREE_INT_CST_HIGH (arg1) == 0 12290 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0 12291 && ((TREE_INT_CST_LOW (arg1) 12292 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))) 12293 == (unsigned int) TYPE_PRECISION (type))) 12294 return TREE_OPERAND (arg0, 0); 12295 12296 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1) 12297 (X & C2) >> C1 into (X >> C1) & (C2 >> C1) 12298 if the latter can be further optimized. */ 12299 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR) 12300 && TREE_CODE (arg0) == BIT_AND_EXPR 12301 && TREE_CODE (arg1) == INTEGER_CST 12302 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12303 { 12304 tree mask = fold_build2_loc (loc, code, type, 12305 fold_convert_loc (loc, type, 12306 TREE_OPERAND (arg0, 1)), 12307 arg1); 12308 tree shift = fold_build2_loc (loc, code, type, 12309 fold_convert_loc (loc, type, 12310 TREE_OPERAND (arg0, 0)), 12311 arg1); 12312 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask); 12313 if (tem) 12314 return tem; 12315 } 12316 12317 return NULL_TREE; 12318 12319 case MIN_EXPR: 12320 if (operand_equal_p (arg0, arg1, 0)) 12321 return omit_one_operand_loc (loc, type, arg0, arg1); 12322 if (INTEGRAL_TYPE_P (type) 12323 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST)) 12324 return omit_one_operand_loc (loc, type, arg1, arg0); 12325 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1); 12326 if (tem) 12327 return tem; 12328 goto associate; 12329 12330 case MAX_EXPR: 12331 if (operand_equal_p (arg0, arg1, 0)) 12332 return omit_one_operand_loc (loc, type, arg0, arg1); 12333 if (INTEGRAL_TYPE_P (type) 12334 && TYPE_MAX_VALUE (type) 12335 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST)) 12336 return omit_one_operand_loc (loc, type, arg1, arg0); 12337 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1); 12338 if (tem) 12339 return tem; 12340 goto associate; 12341 12342 case TRUTH_ANDIF_EXPR: 12343 /* Note that the operands of this must be ints 12344 and their values must be 0 or 1. 12345 ("true" is a fixed value perhaps depending on the language.) */ 12346 /* If first arg is constant zero, return it. */ 12347 if (integer_zerop (arg0)) 12348 return fold_convert_loc (loc, type, arg0); 12349 case TRUTH_AND_EXPR: 12350 /* If either arg is constant true, drop it. */ 12351 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12352 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12353 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1) 12354 /* Preserve sequence points. */ 12355 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12356 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12357 /* If second arg is constant zero, result is zero, but first arg 12358 must be evaluated. */ 12359 if (integer_zerop (arg1)) 12360 return omit_one_operand_loc (loc, type, arg1, arg0); 12361 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR 12362 case will be handled here. */ 12363 if (integer_zerop (arg0)) 12364 return omit_one_operand_loc (loc, type, arg0, arg1); 12365 12366 /* !X && X is always false. */ 12367 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12368 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12369 return omit_one_operand_loc (loc, type, integer_zero_node, arg1); 12370 /* X && !X is always false. */ 12371 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12372 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12373 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12374 12375 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y 12376 means A >= Y && A != MAX, but in this case we know that 12377 A < X <= MAX. */ 12378 12379 if (!TREE_SIDE_EFFECTS (arg0) 12380 && !TREE_SIDE_EFFECTS (arg1)) 12381 { 12382 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1); 12383 if (tem && !operand_equal_p (tem, arg0, 0)) 12384 return fold_build2_loc (loc, code, type, tem, arg1); 12385 12386 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0); 12387 if (tem && !operand_equal_p (tem, arg1, 0)) 12388 return fold_build2_loc (loc, code, type, arg0, tem); 12389 } 12390 12391 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1)) 12392 != NULL_TREE) 12393 return tem; 12394 12395 return NULL_TREE; 12396 12397 case TRUTH_ORIF_EXPR: 12398 /* Note that the operands of this must be ints 12399 and their values must be 0 or true. 12400 ("true" is a fixed value perhaps depending on the language.) */ 12401 /* If first arg is constant true, return it. */ 12402 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12403 return fold_convert_loc (loc, type, arg0); 12404 case TRUTH_OR_EXPR: 12405 /* If either arg is constant zero, drop it. */ 12406 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0)) 12407 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); 12408 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1) 12409 /* Preserve sequence points. */ 12410 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) 12411 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12412 /* If second arg is constant true, result is true, but we must 12413 evaluate first arg. */ 12414 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)) 12415 return omit_one_operand_loc (loc, type, arg1, arg0); 12416 /* Likewise for first arg, but note this only occurs here for 12417 TRUTH_OR_EXPR. */ 12418 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) 12419 return omit_one_operand_loc (loc, type, arg0, arg1); 12420 12421 /* !X || X is always true. */ 12422 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12423 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12424 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12425 /* X || !X is always true. */ 12426 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12427 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12428 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12429 12430 /* (X && !Y) || (!X && Y) is X ^ Y */ 12431 if (TREE_CODE (arg0) == TRUTH_AND_EXPR 12432 && TREE_CODE (arg1) == TRUTH_AND_EXPR) 12433 { 12434 tree a0, a1, l0, l1, n0, n1; 12435 12436 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); 12437 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); 12438 12439 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); 12440 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); 12441 12442 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0); 12443 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1); 12444 12445 if ((operand_equal_p (n0, a0, 0) 12446 && operand_equal_p (n1, a1, 0)) 12447 || (operand_equal_p (n0, a1, 0) 12448 && operand_equal_p (n1, a0, 0))) 12449 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1); 12450 } 12451 12452 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1)) 12453 != NULL_TREE) 12454 return tem; 12455 12456 return NULL_TREE; 12457 12458 case TRUTH_XOR_EXPR: 12459 /* If the second arg is constant zero, drop it. */ 12460 if (integer_zerop (arg1)) 12461 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12462 /* If the second arg is constant true, this is a logical inversion. */ 12463 if (integer_onep (arg1)) 12464 { 12465 /* Only call invert_truthvalue if operand is a truth value. */ 12466 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) 12467 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); 12468 else 12469 tem = invert_truthvalue_loc (loc, arg0); 12470 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); 12471 } 12472 /* Identical arguments cancel to zero. */ 12473 if (operand_equal_p (arg0, arg1, 0)) 12474 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 12475 12476 /* !X ^ X is always true. */ 12477 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR 12478 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) 12479 return omit_one_operand_loc (loc, type, integer_one_node, arg1); 12480 12481 /* X ^ !X is always true. */ 12482 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR 12483 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) 12484 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 12485 12486 return NULL_TREE; 12487 12488 case EQ_EXPR: 12489 case NE_EXPR: 12490 STRIP_NOPS (arg0); 12491 STRIP_NOPS (arg1); 12492 12493 tem = fold_comparison (loc, code, type, op0, op1); 12494 if (tem != NULL_TREE) 12495 return tem; 12496 12497 /* bool_var != 0 becomes bool_var. */ 12498 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12499 && code == NE_EXPR) 12500 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12501 12502 /* bool_var == 1 becomes bool_var. */ 12503 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12504 && code == EQ_EXPR) 12505 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12506 12507 /* bool_var != 1 becomes !bool_var. */ 12508 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) 12509 && code == NE_EXPR) 12510 return fold_convert_loc (loc, type, 12511 fold_build1_loc (loc, TRUTH_NOT_EXPR, 12512 TREE_TYPE (arg0), arg0)); 12513 12514 /* bool_var == 0 becomes !bool_var. */ 12515 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) 12516 && code == EQ_EXPR) 12517 return fold_convert_loc (loc, type, 12518 fold_build1_loc (loc, TRUTH_NOT_EXPR, 12519 TREE_TYPE (arg0), arg0)); 12520 12521 /* !exp != 0 becomes !exp */ 12522 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1) 12523 && code == NE_EXPR) 12524 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); 12525 12526 /* If this is an equality comparison of the address of two non-weak, 12527 unaliased symbols neither of which are extern (since we do not 12528 have access to attributes for externs), then we know the result. */ 12529 if (TREE_CODE (arg0) == ADDR_EXPR 12530 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) 12531 && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) 12532 && ! lookup_attribute ("alias", 12533 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0))) 12534 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0)) 12535 && TREE_CODE (arg1) == ADDR_EXPR 12536 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0)) 12537 && ! DECL_WEAK (TREE_OPERAND (arg1, 0)) 12538 && ! lookup_attribute ("alias", 12539 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0))) 12540 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0))) 12541 { 12542 /* We know that we're looking at the address of two 12543 non-weak, unaliased, static _DECL nodes. 12544 12545 It is both wasteful and incorrect to call operand_equal_p 12546 to compare the two ADDR_EXPR nodes. It is wasteful in that 12547 all we need to do is test pointer equality for the arguments 12548 to the two ADDR_EXPR nodes. It is incorrect to use 12549 operand_equal_p as that function is NOT equivalent to a 12550 C equality test. It can in fact return false for two 12551 objects which would test as equal using the C equality 12552 operator. */ 12553 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0); 12554 return constant_boolean_node (equal 12555 ? code == EQ_EXPR : code != EQ_EXPR, 12556 type); 12557 } 12558 12559 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or 12560 a MINUS_EXPR of a constant, we can convert it into a comparison with 12561 a revised constant as long as no overflow occurs. */ 12562 if (TREE_CODE (arg1) == INTEGER_CST 12563 && (TREE_CODE (arg0) == PLUS_EXPR 12564 || TREE_CODE (arg0) == MINUS_EXPR) 12565 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 12566 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR 12567 ? MINUS_EXPR : PLUS_EXPR, 12568 fold_convert_loc (loc, TREE_TYPE (arg0), 12569 arg1), 12570 TREE_OPERAND (arg0, 1))) 12571 && !TREE_OVERFLOW (tem)) 12572 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12573 12574 /* Similarly for a NEGATE_EXPR. */ 12575 if (TREE_CODE (arg0) == NEGATE_EXPR 12576 && TREE_CODE (arg1) == INTEGER_CST 12577 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0), 12578 arg1))) 12579 && TREE_CODE (tem) == INTEGER_CST 12580 && !TREE_OVERFLOW (tem)) 12581 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); 12582 12583 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */ 12584 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12585 && TREE_CODE (arg1) == INTEGER_CST 12586 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12587 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12588 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0), 12589 fold_convert_loc (loc, 12590 TREE_TYPE (arg0), 12591 arg1), 12592 TREE_OPERAND (arg0, 1))); 12593 12594 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */ 12595 if ((TREE_CODE (arg0) == PLUS_EXPR 12596 || TREE_CODE (arg0) == POINTER_PLUS_EXPR 12597 || TREE_CODE (arg0) == MINUS_EXPR) 12598 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0, 12599 0)), 12600 arg1, 0) 12601 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 12602 || POINTER_TYPE_P (TREE_TYPE (arg0)))) 12603 { 12604 tree val = TREE_OPERAND (arg0, 1); 12605 return omit_two_operands_loc (loc, type, 12606 fold_build2_loc (loc, code, type, 12607 val, 12608 build_int_cst (TREE_TYPE (val), 12609 0)), 12610 TREE_OPERAND (arg0, 0), arg1); 12611 } 12612 12613 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */ 12614 if (TREE_CODE (arg0) == MINUS_EXPR 12615 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST 12616 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0, 12617 1)), 12618 arg1, 0) 12619 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1) 12620 { 12621 return omit_two_operands_loc (loc, type, 12622 code == NE_EXPR 12623 ? boolean_true_node : boolean_false_node, 12624 TREE_OPERAND (arg0, 1), arg1); 12625 } 12626 12627 /* If we have X - Y == 0, we can convert that to X == Y and similarly 12628 for !=. Don't do this for ordered comparisons due to overflow. */ 12629 if (TREE_CODE (arg0) == MINUS_EXPR 12630 && integer_zerop (arg1)) 12631 return fold_build2_loc (loc, code, type, 12632 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); 12633 12634 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */ 12635 if (TREE_CODE (arg0) == ABS_EXPR 12636 && (integer_zerop (arg1) || real_zerop (arg1))) 12637 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1); 12638 12639 /* If this is an EQ or NE comparison with zero and ARG0 is 12640 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require 12641 two operations, but the latter can be done in one less insn 12642 on machines that have only two-operand insns or on which a 12643 constant cannot be the first operand. */ 12644 if (TREE_CODE (arg0) == BIT_AND_EXPR 12645 && integer_zerop (arg1)) 12646 { 12647 tree arg00 = TREE_OPERAND (arg0, 0); 12648 tree arg01 = TREE_OPERAND (arg0, 1); 12649 if (TREE_CODE (arg00) == LSHIFT_EXPR 12650 && integer_onep (TREE_OPERAND (arg00, 0))) 12651 { 12652 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00), 12653 arg01, TREE_OPERAND (arg00, 1)); 12654 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12655 build_int_cst (TREE_TYPE (arg0), 1)); 12656 return fold_build2_loc (loc, code, type, 12657 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12658 arg1); 12659 } 12660 else if (TREE_CODE (arg01) == LSHIFT_EXPR 12661 && integer_onep (TREE_OPERAND (arg01, 0))) 12662 { 12663 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01), 12664 arg00, TREE_OPERAND (arg01, 1)); 12665 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, 12666 build_int_cst (TREE_TYPE (arg0), 1)); 12667 return fold_build2_loc (loc, code, type, 12668 fold_convert_loc (loc, TREE_TYPE (arg1), tem), 12669 arg1); 12670 } 12671 } 12672 12673 /* If this is an NE or EQ comparison of zero against the result of a 12674 signed MOD operation whose second operand is a power of 2, make 12675 the MOD operation unsigned since it is simpler and equivalent. */ 12676 if (integer_zerop (arg1) 12677 && !TYPE_UNSIGNED (TREE_TYPE (arg0)) 12678 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR 12679 || TREE_CODE (arg0) == CEIL_MOD_EXPR 12680 || TREE_CODE (arg0) == FLOOR_MOD_EXPR 12681 || TREE_CODE (arg0) == ROUND_MOD_EXPR) 12682 && integer_pow2p (TREE_OPERAND (arg0, 1))) 12683 { 12684 tree newtype = unsigned_type_for (TREE_TYPE (arg0)); 12685 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype, 12686 fold_convert_loc (loc, newtype, 12687 TREE_OPERAND (arg0, 0)), 12688 fold_convert_loc (loc, newtype, 12689 TREE_OPERAND (arg0, 1))); 12690 12691 return fold_build2_loc (loc, code, type, newmod, 12692 fold_convert_loc (loc, newtype, arg1)); 12693 } 12694 12695 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where 12696 C1 is a valid shift constant, and C2 is a power of two, i.e. 12697 a single bit. */ 12698 if (TREE_CODE (arg0) == BIT_AND_EXPR 12699 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR 12700 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)) 12701 == INTEGER_CST 12702 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12703 && integer_zerop (arg1)) 12704 { 12705 tree itype = TREE_TYPE (arg0); 12706 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype); 12707 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1); 12708 12709 /* Check for a valid shift count. */ 12710 if (TREE_INT_CST_HIGH (arg001) == 0 12711 && TREE_INT_CST_LOW (arg001) < prec) 12712 { 12713 tree arg01 = TREE_OPERAND (arg0, 1); 12714 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 12715 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); 12716 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 12717 can be rewritten as (X & (C2 << C1)) != 0. */ 12718 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) 12719 { 12720 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001); 12721 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem); 12722 return fold_build2_loc (loc, code, type, tem, 12723 fold_convert_loc (loc, itype, arg1)); 12724 } 12725 /* Otherwise, for signed (arithmetic) shifts, 12726 ((X >> C1) & C2) != 0 is rewritten as X < 0, and 12727 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ 12728 else if (!TYPE_UNSIGNED (itype)) 12729 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, 12730 arg000, build_int_cst (itype, 0)); 12731 /* Otherwise, of unsigned (logical) shifts, 12732 ((X >> C1) & C2) != 0 is rewritten as (X,false), and 12733 ((X >> C1) & C2) == 0 is rewritten as (X,true). */ 12734 else 12735 return omit_one_operand_loc (loc, type, 12736 code == EQ_EXPR ? integer_one_node 12737 : integer_zero_node, 12738 arg000); 12739 } 12740 } 12741 12742 /* If we have (A & C) == C where C is a power of 2, convert this into 12743 (A & C) != 0. Similarly for NE_EXPR. */ 12744 if (TREE_CODE (arg0) == BIT_AND_EXPR 12745 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12747 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12748 arg0, fold_convert_loc (loc, TREE_TYPE (arg0), 12749 integer_zero_node)); 12750 12751 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign 12752 bit, then fold the expression into A < 0 or A >= 0. */ 12753 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type); 12754 if (tem) 12755 return tem; 12756 12757 /* If we have (A & C) == D where D & ~C != 0, convert this into 0. 12758 Similarly for NE_EXPR. */ 12759 if (TREE_CODE (arg0) == BIT_AND_EXPR 12760 && TREE_CODE (arg1) == INTEGER_CST 12761 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12762 { 12763 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR, 12764 TREE_TYPE (TREE_OPERAND (arg0, 1)), 12765 TREE_OPERAND (arg0, 1)); 12766 tree dandnotc 12767 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12768 fold_convert_loc (loc, TREE_TYPE (arg0), arg1), 12769 notc); 12770 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12771 if (integer_nonzerop (dandnotc)) 12772 return omit_one_operand_loc (loc, type, rslt, arg0); 12773 } 12774 12775 /* If we have (A | C) == D where C & ~D != 0, convert this into 0. 12776 Similarly for NE_EXPR. */ 12777 if (TREE_CODE (arg0) == BIT_IOR_EXPR 12778 && TREE_CODE (arg1) == INTEGER_CST 12779 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12780 { 12781 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); 12782 tree candnotd 12783 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12784 TREE_OPERAND (arg0, 1), 12785 fold_convert_loc (loc, TREE_TYPE (arg0), notd)); 12786 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; 12787 if (integer_nonzerop (candnotd)) 12788 return omit_one_operand_loc (loc, type, rslt, arg0); 12789 } 12790 12791 /* If this is a comparison of a field, we may be able to simplify it. */ 12792 if ((TREE_CODE (arg0) == COMPONENT_REF 12793 || TREE_CODE (arg0) == BIT_FIELD_REF) 12794 /* Handle the constant case even without -O 12795 to make sure the warnings are given. */ 12796 && (optimize || TREE_CODE (arg1) == INTEGER_CST)) 12797 { 12798 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1); 12799 if (t1) 12800 return t1; 12801 } 12802 12803 /* Optimize comparisons of strlen vs zero to a compare of the 12804 first character of the string vs zero. To wit, 12805 strlen(ptr) == 0 => *ptr == 0 12806 strlen(ptr) != 0 => *ptr != 0 12807 Other cases should reduce to one of these two (or a constant) 12808 due to the return value of strlen being unsigned. */ 12809 if (TREE_CODE (arg0) == CALL_EXPR 12810 && integer_zerop (arg1)) 12811 { 12812 tree fndecl = get_callee_fndecl (arg0); 12813 12814 if (fndecl 12815 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 12816 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN 12817 && call_expr_nargs (arg0) == 1 12818 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) 12819 { 12820 tree iref = build_fold_indirect_ref_loc (loc, 12821 CALL_EXPR_ARG (arg0, 0)); 12822 return fold_build2_loc (loc, code, type, iref, 12823 build_int_cst (TREE_TYPE (iref), 0)); 12824 } 12825 } 12826 12827 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width 12828 of X. Similarly fold (X >> C) == 0 into X >= 0. */ 12829 if (TREE_CODE (arg0) == RSHIFT_EXPR 12830 && integer_zerop (arg1) 12831 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12832 { 12833 tree arg00 = TREE_OPERAND (arg0, 0); 12834 tree arg01 = TREE_OPERAND (arg0, 1); 12835 tree itype = TREE_TYPE (arg00); 12836 if (TREE_INT_CST_HIGH (arg01) == 0 12837 && TREE_INT_CST_LOW (arg01) 12838 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1)) 12839 { 12840 if (TYPE_UNSIGNED (itype)) 12841 { 12842 itype = signed_type_for (itype); 12843 arg00 = fold_convert_loc (loc, itype, arg00); 12844 } 12845 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, 12846 type, arg00, build_int_cst (itype, 0)); 12847 } 12848 } 12849 12850 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */ 12851 if (integer_zerop (arg1) 12852 && TREE_CODE (arg0) == BIT_XOR_EXPR) 12853 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12854 TREE_OPERAND (arg0, 1)); 12855 12856 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */ 12857 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12858 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) 12859 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12860 build_zero_cst (TREE_TYPE (arg0))); 12861 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */ 12862 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12863 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 12864 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) 12865 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1), 12866 build_zero_cst (TREE_TYPE (arg0))); 12867 12868 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */ 12869 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12870 && TREE_CODE (arg1) == INTEGER_CST 12871 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) 12872 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), 12873 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1), 12874 TREE_OPERAND (arg0, 1), arg1)); 12875 12876 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into 12877 (X & C) == 0 when C is a single bit. */ 12878 if (TREE_CODE (arg0) == BIT_AND_EXPR 12879 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR 12880 && integer_zerop (arg1) 12881 && integer_pow2p (TREE_OPERAND (arg0, 1))) 12882 { 12883 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), 12884 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0), 12885 TREE_OPERAND (arg0, 1)); 12886 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, 12887 type, tem, 12888 fold_convert_loc (loc, TREE_TYPE (arg0), 12889 arg1)); 12890 } 12891 12892 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the 12893 constant C is a power of two, i.e. a single bit. */ 12894 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12895 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 12896 && integer_zerop (arg1) 12897 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12898 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 12899 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 12900 { 12901 tree arg00 = TREE_OPERAND (arg0, 0); 12902 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12903 arg00, build_int_cst (TREE_TYPE (arg00), 0)); 12904 } 12905 12906 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0, 12907 when is C is a power of two, i.e. a single bit. */ 12908 if (TREE_CODE (arg0) == BIT_AND_EXPR 12909 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR 12910 && integer_zerop (arg1) 12911 && integer_pow2p (TREE_OPERAND (arg0, 1)) 12912 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 12913 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) 12914 { 12915 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); 12916 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000), 12917 arg000, TREE_OPERAND (arg0, 1)); 12918 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, 12919 tem, build_int_cst (TREE_TYPE (tem), 0)); 12920 } 12921 12922 if (integer_zerop (arg1) 12923 && tree_expr_nonzero_p (arg0)) 12924 { 12925 tree res = constant_boolean_node (code==NE_EXPR, type); 12926 return omit_one_operand_loc (loc, type, res, arg0); 12927 } 12928 12929 /* Fold -X op -Y as X op Y, where op is eq/ne. */ 12930 if (TREE_CODE (arg0) == NEGATE_EXPR 12931 && TREE_CODE (arg1) == NEGATE_EXPR) 12932 return fold_build2_loc (loc, code, type, 12933 TREE_OPERAND (arg0, 0), 12934 fold_convert_loc (loc, TREE_TYPE (arg0), 12935 TREE_OPERAND (arg1, 0))); 12936 12937 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */ 12938 if (TREE_CODE (arg0) == BIT_AND_EXPR 12939 && TREE_CODE (arg1) == BIT_AND_EXPR) 12940 { 12941 tree arg00 = TREE_OPERAND (arg0, 0); 12942 tree arg01 = TREE_OPERAND (arg0, 1); 12943 tree arg10 = TREE_OPERAND (arg1, 0); 12944 tree arg11 = TREE_OPERAND (arg1, 1); 12945 tree itype = TREE_TYPE (arg0); 12946 12947 if (operand_equal_p (arg01, arg11, 0)) 12948 return fold_build2_loc (loc, code, type, 12949 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12950 fold_build2_loc (loc, 12951 BIT_XOR_EXPR, itype, 12952 arg00, arg10), 12953 arg01), 12954 build_zero_cst (itype)); 12955 12956 if (operand_equal_p (arg01, arg10, 0)) 12957 return fold_build2_loc (loc, code, type, 12958 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12959 fold_build2_loc (loc, 12960 BIT_XOR_EXPR, itype, 12961 arg00, arg11), 12962 arg01), 12963 build_zero_cst (itype)); 12964 12965 if (operand_equal_p (arg00, arg11, 0)) 12966 return fold_build2_loc (loc, code, type, 12967 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12968 fold_build2_loc (loc, 12969 BIT_XOR_EXPR, itype, 12970 arg01, arg10), 12971 arg00), 12972 build_zero_cst (itype)); 12973 12974 if (operand_equal_p (arg00, arg10, 0)) 12975 return fold_build2_loc (loc, code, type, 12976 fold_build2_loc (loc, BIT_AND_EXPR, itype, 12977 fold_build2_loc (loc, 12978 BIT_XOR_EXPR, itype, 12979 arg01, arg11), 12980 arg00), 12981 build_zero_cst (itype)); 12982 } 12983 12984 if (TREE_CODE (arg0) == BIT_XOR_EXPR 12985 && TREE_CODE (arg1) == BIT_XOR_EXPR) 12986 { 12987 tree arg00 = TREE_OPERAND (arg0, 0); 12988 tree arg01 = TREE_OPERAND (arg0, 1); 12989 tree arg10 = TREE_OPERAND (arg1, 0); 12990 tree arg11 = TREE_OPERAND (arg1, 1); 12991 tree itype = TREE_TYPE (arg0); 12992 12993 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries. 12994 operand_equal_p guarantees no side-effects so we don't need 12995 to use omit_one_operand on Z. */ 12996 if (operand_equal_p (arg01, arg11, 0)) 12997 return fold_build2_loc (loc, code, type, arg00, 12998 fold_convert_loc (loc, TREE_TYPE (arg00), 12999 arg10)); 13000 if (operand_equal_p (arg01, arg10, 0)) 13001 return fold_build2_loc (loc, code, type, arg00, 13002 fold_convert_loc (loc, TREE_TYPE (arg00), 13003 arg11)); 13004 if (operand_equal_p (arg00, arg11, 0)) 13005 return fold_build2_loc (loc, code, type, arg01, 13006 fold_convert_loc (loc, TREE_TYPE (arg01), 13007 arg10)); 13008 if (operand_equal_p (arg00, arg10, 0)) 13009 return fold_build2_loc (loc, code, type, arg01, 13010 fold_convert_loc (loc, TREE_TYPE (arg01), 13011 arg11)); 13012 13013 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */ 13014 if (TREE_CODE (arg01) == INTEGER_CST 13015 && TREE_CODE (arg11) == INTEGER_CST) 13016 { 13017 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, 13018 fold_convert_loc (loc, itype, arg11)); 13019 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem); 13020 return fold_build2_loc (loc, code, type, tem, 13021 fold_convert_loc (loc, itype, arg10)); 13022 } 13023 } 13024 13025 /* Attempt to simplify equality/inequality comparisons of complex 13026 values. Only lower the comparison if the result is known or 13027 can be simplified to a single scalar comparison. */ 13028 if ((TREE_CODE (arg0) == COMPLEX_EXPR 13029 || TREE_CODE (arg0) == COMPLEX_CST) 13030 && (TREE_CODE (arg1) == COMPLEX_EXPR 13031 || TREE_CODE (arg1) == COMPLEX_CST)) 13032 { 13033 tree real0, imag0, real1, imag1; 13034 tree rcond, icond; 13035 13036 if (TREE_CODE (arg0) == COMPLEX_EXPR) 13037 { 13038 real0 = TREE_OPERAND (arg0, 0); 13039 imag0 = TREE_OPERAND (arg0, 1); 13040 } 13041 else 13042 { 13043 real0 = TREE_REALPART (arg0); 13044 imag0 = TREE_IMAGPART (arg0); 13045 } 13046 13047 if (TREE_CODE (arg1) == COMPLEX_EXPR) 13048 { 13049 real1 = TREE_OPERAND (arg1, 0); 13050 imag1 = TREE_OPERAND (arg1, 1); 13051 } 13052 else 13053 { 13054 real1 = TREE_REALPART (arg1); 13055 imag1 = TREE_IMAGPART (arg1); 13056 } 13057 13058 rcond = fold_binary_loc (loc, code, type, real0, real1); 13059 if (rcond && TREE_CODE (rcond) == INTEGER_CST) 13060 { 13061 if (integer_zerop (rcond)) 13062 { 13063 if (code == EQ_EXPR) 13064 return omit_two_operands_loc (loc, type, boolean_false_node, 13065 imag0, imag1); 13066 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1); 13067 } 13068 else 13069 { 13070 if (code == NE_EXPR) 13071 return omit_two_operands_loc (loc, type, boolean_true_node, 13072 imag0, imag1); 13073 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1); 13074 } 13075 } 13076 13077 icond = fold_binary_loc (loc, code, type, imag0, imag1); 13078 if (icond && TREE_CODE (icond) == INTEGER_CST) 13079 { 13080 if (integer_zerop (icond)) 13081 { 13082 if (code == EQ_EXPR) 13083 return omit_two_operands_loc (loc, type, boolean_false_node, 13084 real0, real1); 13085 return fold_build2_loc (loc, NE_EXPR, type, real0, real1); 13086 } 13087 else 13088 { 13089 if (code == NE_EXPR) 13090 return omit_two_operands_loc (loc, type, boolean_true_node, 13091 real0, real1); 13092 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1); 13093 } 13094 } 13095 } 13096 13097 return NULL_TREE; 13098 13099 case LT_EXPR: 13100 case GT_EXPR: 13101 case LE_EXPR: 13102 case GE_EXPR: 13103 tem = fold_comparison (loc, code, type, op0, op1); 13104 if (tem != NULL_TREE) 13105 return tem; 13106 13107 /* Transform comparisons of the form X +- C CMP X. */ 13108 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) 13109 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) 13110 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST 13111 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))) 13112 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST 13113 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))))) 13114 { 13115 tree arg01 = TREE_OPERAND (arg0, 1); 13116 enum tree_code code0 = TREE_CODE (arg0); 13117 int is_positive; 13118 13119 if (TREE_CODE (arg01) == REAL_CST) 13120 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1; 13121 else 13122 is_positive = tree_int_cst_sgn (arg01); 13123 13124 /* (X - c) > X becomes false. */ 13125 if (code == GT_EXPR 13126 && ((code0 == MINUS_EXPR && is_positive >= 0) 13127 || (code0 == PLUS_EXPR && is_positive <= 0))) 13128 { 13129 if (TREE_CODE (arg01) == INTEGER_CST 13130 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13131 fold_overflow_warning (("assuming signed overflow does not " 13132 "occur when assuming that (X - c) > X " 13133 "is always false"), 13134 WARN_STRICT_OVERFLOW_ALL); 13135 return constant_boolean_node (0, type); 13136 } 13137 13138 /* Likewise (X + c) < X becomes false. */ 13139 if (code == LT_EXPR 13140 && ((code0 == PLUS_EXPR && is_positive >= 0) 13141 || (code0 == MINUS_EXPR && is_positive <= 0))) 13142 { 13143 if (TREE_CODE (arg01) == INTEGER_CST 13144 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13145 fold_overflow_warning (("assuming signed overflow does not " 13146 "occur when assuming that " 13147 "(X + c) < X is always false"), 13148 WARN_STRICT_OVERFLOW_ALL); 13149 return constant_boolean_node (0, type); 13150 } 13151 13152 /* Convert (X - c) <= X to true. */ 13153 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 13154 && code == LE_EXPR 13155 && ((code0 == MINUS_EXPR && is_positive >= 0) 13156 || (code0 == PLUS_EXPR && is_positive <= 0))) 13157 { 13158 if (TREE_CODE (arg01) == INTEGER_CST 13159 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13160 fold_overflow_warning (("assuming signed overflow does not " 13161 "occur when assuming that " 13162 "(X - c) <= X is always true"), 13163 WARN_STRICT_OVERFLOW_ALL); 13164 return constant_boolean_node (1, type); 13165 } 13166 13167 /* Convert (X + c) >= X to true. */ 13168 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) 13169 && code == GE_EXPR 13170 && ((code0 == PLUS_EXPR && is_positive >= 0) 13171 || (code0 == MINUS_EXPR && is_positive <= 0))) 13172 { 13173 if (TREE_CODE (arg01) == INTEGER_CST 13174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13175 fold_overflow_warning (("assuming signed overflow does not " 13176 "occur when assuming that " 13177 "(X + c) >= X is always true"), 13178 WARN_STRICT_OVERFLOW_ALL); 13179 return constant_boolean_node (1, type); 13180 } 13181 13182 if (TREE_CODE (arg01) == INTEGER_CST) 13183 { 13184 /* Convert X + c > X and X - c < X to true for integers. */ 13185 if (code == GT_EXPR 13186 && ((code0 == PLUS_EXPR && is_positive > 0) 13187 || (code0 == MINUS_EXPR && is_positive < 0))) 13188 { 13189 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13190 fold_overflow_warning (("assuming signed overflow does " 13191 "not occur when assuming that " 13192 "(X + c) > X is always true"), 13193 WARN_STRICT_OVERFLOW_ALL); 13194 return constant_boolean_node (1, type); 13195 } 13196 13197 if (code == LT_EXPR 13198 && ((code0 == MINUS_EXPR && is_positive > 0) 13199 || (code0 == PLUS_EXPR && is_positive < 0))) 13200 { 13201 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13202 fold_overflow_warning (("assuming signed overflow does " 13203 "not occur when assuming that " 13204 "(X - c) < X is always true"), 13205 WARN_STRICT_OVERFLOW_ALL); 13206 return constant_boolean_node (1, type); 13207 } 13208 13209 /* Convert X + c <= X and X - c >= X to false for integers. */ 13210 if (code == LE_EXPR 13211 && ((code0 == PLUS_EXPR && is_positive > 0) 13212 || (code0 == MINUS_EXPR && is_positive < 0))) 13213 { 13214 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13215 fold_overflow_warning (("assuming signed overflow does " 13216 "not occur when assuming that " 13217 "(X + c) <= X is always false"), 13218 WARN_STRICT_OVERFLOW_ALL); 13219 return constant_boolean_node (0, type); 13220 } 13221 13222 if (code == GE_EXPR 13223 && ((code0 == MINUS_EXPR && is_positive > 0) 13224 || (code0 == PLUS_EXPR && is_positive < 0))) 13225 { 13226 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) 13227 fold_overflow_warning (("assuming signed overflow does " 13228 "not occur when assuming that " 13229 "(X - c) >= X is always false"), 13230 WARN_STRICT_OVERFLOW_ALL); 13231 return constant_boolean_node (0, type); 13232 } 13233 } 13234 } 13235 13236 /* Comparisons with the highest or lowest possible integer of 13237 the specified precision will have known values. */ 13238 { 13239 tree arg1_type = TREE_TYPE (arg1); 13240 unsigned int width = TYPE_PRECISION (arg1_type); 13241 13242 if (TREE_CODE (arg1) == INTEGER_CST 13243 && width <= 2 * HOST_BITS_PER_WIDE_INT 13244 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type))) 13245 { 13246 HOST_WIDE_INT signed_max_hi; 13247 unsigned HOST_WIDE_INT signed_max_lo; 13248 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo; 13249 13250 if (width <= HOST_BITS_PER_WIDE_INT) 13251 { 13252 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 13253 - 1; 13254 signed_max_hi = 0; 13255 max_hi = 0; 13256 13257 if (TYPE_UNSIGNED (arg1_type)) 13258 { 13259 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 13260 min_lo = 0; 13261 min_hi = 0; 13262 } 13263 else 13264 { 13265 max_lo = signed_max_lo; 13266 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 13267 min_hi = -1; 13268 } 13269 } 13270 else 13271 { 13272 width -= HOST_BITS_PER_WIDE_INT; 13273 signed_max_lo = -1; 13274 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) 13275 - 1; 13276 max_lo = -1; 13277 min_lo = 0; 13278 13279 if (TYPE_UNSIGNED (arg1_type)) 13280 { 13281 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; 13282 min_hi = 0; 13283 } 13284 else 13285 { 13286 max_hi = signed_max_hi; 13287 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1)); 13288 } 13289 } 13290 13291 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi 13292 && TREE_INT_CST_LOW (arg1) == max_lo) 13293 switch (code) 13294 { 13295 case GT_EXPR: 13296 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13297 13298 case GE_EXPR: 13299 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13300 13301 case LE_EXPR: 13302 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13303 13304 case LT_EXPR: 13305 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13306 13307 /* The GE_EXPR and LT_EXPR cases above are not normally 13308 reached because of previous transformations. */ 13309 13310 default: 13311 break; 13312 } 13313 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13314 == max_hi 13315 && TREE_INT_CST_LOW (arg1) == max_lo - 1) 13316 switch (code) 13317 { 13318 case GT_EXPR: 13319 arg1 = const_binop (PLUS_EXPR, arg1, 13320 build_int_cst (TREE_TYPE (arg1), 1)); 13321 return fold_build2_loc (loc, EQ_EXPR, type, 13322 fold_convert_loc (loc, 13323 TREE_TYPE (arg1), arg0), 13324 arg1); 13325 case LE_EXPR: 13326 arg1 = const_binop (PLUS_EXPR, arg1, 13327 build_int_cst (TREE_TYPE (arg1), 1)); 13328 return fold_build2_loc (loc, NE_EXPR, type, 13329 fold_convert_loc (loc, TREE_TYPE (arg1), 13330 arg0), 13331 arg1); 13332 default: 13333 break; 13334 } 13335 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13336 == min_hi 13337 && TREE_INT_CST_LOW (arg1) == min_lo) 13338 switch (code) 13339 { 13340 case LT_EXPR: 13341 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13342 13343 case LE_EXPR: 13344 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); 13345 13346 case GE_EXPR: 13347 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13348 13349 case GT_EXPR: 13350 return fold_build2_loc (loc, NE_EXPR, type, op0, op1); 13351 13352 default: 13353 break; 13354 } 13355 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) 13356 == min_hi 13357 && TREE_INT_CST_LOW (arg1) == min_lo + 1) 13358 switch (code) 13359 { 13360 case GE_EXPR: 13361 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node); 13362 return fold_build2_loc (loc, NE_EXPR, type, 13363 fold_convert_loc (loc, 13364 TREE_TYPE (arg1), arg0), 13365 arg1); 13366 case LT_EXPR: 13367 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node); 13368 return fold_build2_loc (loc, EQ_EXPR, type, 13369 fold_convert_loc (loc, TREE_TYPE (arg1), 13370 arg0), 13371 arg1); 13372 default: 13373 break; 13374 } 13375 13376 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi 13377 && TREE_INT_CST_LOW (arg1) == signed_max_lo 13378 && TYPE_UNSIGNED (arg1_type) 13379 /* We will flip the signedness of the comparison operator 13380 associated with the mode of arg1, so the sign bit is 13381 specified by this mode. Check that arg1 is the signed 13382 max associated with this sign bit. */ 13383 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type)) 13384 /* signed_type does not work on pointer types. */ 13385 && INTEGRAL_TYPE_P (arg1_type)) 13386 { 13387 /* The following case also applies to X < signed_max+1 13388 and X >= signed_max+1 because previous transformations. */ 13389 if (code == LE_EXPR || code == GT_EXPR) 13390 { 13391 tree st; 13392 st = signed_type_for (TREE_TYPE (arg1)); 13393 return fold_build2_loc (loc, 13394 code == LE_EXPR ? GE_EXPR : LT_EXPR, 13395 type, fold_convert_loc (loc, st, arg0), 13396 build_int_cst (st, 0)); 13397 } 13398 } 13399 } 13400 } 13401 13402 /* If we are comparing an ABS_EXPR with a constant, we can 13403 convert all the cases into explicit comparisons, but they may 13404 well not be faster than doing the ABS and one comparison. 13405 But ABS (X) <= C is a range comparison, which becomes a subtraction 13406 and a comparison, and is probably faster. */ 13407 if (code == LE_EXPR 13408 && TREE_CODE (arg1) == INTEGER_CST 13409 && TREE_CODE (arg0) == ABS_EXPR 13410 && ! TREE_SIDE_EFFECTS (arg0) 13411 && (0 != (tem = negate_expr (arg1))) 13412 && TREE_CODE (tem) == INTEGER_CST 13413 && !TREE_OVERFLOW (tem)) 13414 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13415 build2 (GE_EXPR, type, 13416 TREE_OPERAND (arg0, 0), tem), 13417 build2 (LE_EXPR, type, 13418 TREE_OPERAND (arg0, 0), arg1)); 13419 13420 /* Convert ABS_EXPR<x> >= 0 to true. */ 13421 strict_overflow_p = false; 13422 if (code == GE_EXPR 13423 && (integer_zerop (arg1) 13424 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) 13425 && real_zerop (arg1))) 13426 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13427 { 13428 if (strict_overflow_p) 13429 fold_overflow_warning (("assuming signed overflow does not occur " 13430 "when simplifying comparison of " 13431 "absolute value and zero"), 13432 WARN_STRICT_OVERFLOW_CONDITIONAL); 13433 return omit_one_operand_loc (loc, type, integer_one_node, arg0); 13434 } 13435 13436 /* Convert ABS_EXPR<x> < 0 to false. */ 13437 strict_overflow_p = false; 13438 if (code == LT_EXPR 13439 && (integer_zerop (arg1) || real_zerop (arg1)) 13440 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) 13441 { 13442 if (strict_overflow_p) 13443 fold_overflow_warning (("assuming signed overflow does not occur " 13444 "when simplifying comparison of " 13445 "absolute value and zero"), 13446 WARN_STRICT_OVERFLOW_CONDITIONAL); 13447 return omit_one_operand_loc (loc, type, integer_zero_node, arg0); 13448 } 13449 13450 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 13451 and similarly for >= into !=. */ 13452 if ((code == LT_EXPR || code == GE_EXPR) 13453 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13454 && TREE_CODE (arg1) == LSHIFT_EXPR 13455 && integer_onep (TREE_OPERAND (arg1, 0))) 13456 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13457 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 13458 TREE_OPERAND (arg1, 1)), 13459 build_int_cst (TREE_TYPE (arg0), 0)); 13460 13461 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing, 13462 otherwise Y might be >= # of bits in X's type and thus e.g. 13463 (unsigned char) (1 << Y) for Y 15 might be 0. 13464 If the cast is widening, then 1 << Y should have unsigned type, 13465 otherwise if Y is number of bits in the signed shift type minus 1, 13466 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y 13467 31 might be 0xffffffff80000000. */ 13468 if ((code == LT_EXPR || code == GE_EXPR) 13469 && TYPE_UNSIGNED (TREE_TYPE (arg0)) 13470 && CONVERT_EXPR_P (arg1) 13471 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR 13472 && (TYPE_PRECISION (TREE_TYPE (arg1)) 13473 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))) 13474 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))) 13475 || (TYPE_PRECISION (TREE_TYPE (arg1)) 13476 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))) 13477 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) 13478 { 13479 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, 13480 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1)); 13481 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, 13482 fold_convert_loc (loc, TREE_TYPE (arg0), tem), 13483 build_int_cst (TREE_TYPE (arg0), 0)); 13484 } 13485 13486 return NULL_TREE; 13487 13488 case UNORDERED_EXPR: 13489 case ORDERED_EXPR: 13490 case UNLT_EXPR: 13491 case UNLE_EXPR: 13492 case UNGT_EXPR: 13493 case UNGE_EXPR: 13494 case UNEQ_EXPR: 13495 case LTGT_EXPR: 13496 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST) 13497 { 13498 t1 = fold_relational_const (code, type, arg0, arg1); 13499 if (t1 != NULL_TREE) 13500 return t1; 13501 } 13502 13503 /* If the first operand is NaN, the result is constant. */ 13504 if (TREE_CODE (arg0) == REAL_CST 13505 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)) 13506 && (code != LTGT_EXPR || ! flag_trapping_math)) 13507 { 13508 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13509 ? integer_zero_node 13510 : integer_one_node; 13511 return omit_one_operand_loc (loc, type, t1, arg1); 13512 } 13513 13514 /* If the second operand is NaN, the result is constant. */ 13515 if (TREE_CODE (arg1) == REAL_CST 13516 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)) 13517 && (code != LTGT_EXPR || ! flag_trapping_math)) 13518 { 13519 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) 13520 ? integer_zero_node 13521 : integer_one_node; 13522 return omit_one_operand_loc (loc, type, t1, arg0); 13523 } 13524 13525 /* Simplify unordered comparison of something with itself. */ 13526 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR) 13527 && operand_equal_p (arg0, arg1, 0)) 13528 return constant_boolean_node (1, type); 13529 13530 if (code == LTGT_EXPR 13531 && !flag_trapping_math 13532 && operand_equal_p (arg0, arg1, 0)) 13533 return constant_boolean_node (0, type); 13534 13535 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ 13536 { 13537 tree targ0 = strip_float_extensions (arg0); 13538 tree targ1 = strip_float_extensions (arg1); 13539 tree newtype = TREE_TYPE (targ0); 13540 13541 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype)) 13542 newtype = TREE_TYPE (targ1); 13543 13544 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) 13545 return fold_build2_loc (loc, code, type, 13546 fold_convert_loc (loc, newtype, targ0), 13547 fold_convert_loc (loc, newtype, targ1)); 13548 } 13549 13550 return NULL_TREE; 13551 13552 case COMPOUND_EXPR: 13553 /* When pedantic, a compound expression can be neither an lvalue 13554 nor an integer constant expression. */ 13555 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1)) 13556 return NULL_TREE; 13557 /* Don't let (0, 0) be null pointer constant. */ 13558 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1) 13559 : fold_convert_loc (loc, type, arg1); 13560 return pedantic_non_lvalue_loc (loc, tem); 13561 13562 case COMPLEX_EXPR: 13563 if ((TREE_CODE (arg0) == REAL_CST 13564 && TREE_CODE (arg1) == REAL_CST) 13565 || (TREE_CODE (arg0) == INTEGER_CST 13566 && TREE_CODE (arg1) == INTEGER_CST)) 13567 return build_complex (type, arg0, arg1); 13568 if (TREE_CODE (arg0) == REALPART_EXPR 13569 && TREE_CODE (arg1) == IMAGPART_EXPR 13570 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type 13571 && operand_equal_p (TREE_OPERAND (arg0, 0), 13572 TREE_OPERAND (arg1, 0), 0)) 13573 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0), 13574 TREE_OPERAND (arg1, 0)); 13575 return NULL_TREE; 13576 13577 case ASSERT_EXPR: 13578 /* An ASSERT_EXPR should never be passed to fold_binary. */ 13579 gcc_unreachable (); 13580 13581 case VEC_PACK_TRUNC_EXPR: 13582 case VEC_PACK_FIX_TRUNC_EXPR: 13583 { 13584 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 13585 tree *elts, vals = NULL_TREE; 13586 13587 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2 13588 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2); 13589 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST) 13590 return NULL_TREE; 13591 13592 elts = XALLOCAVEC (tree, nelts); 13593 if (!vec_cst_ctor_to_array (arg0, elts) 13594 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2)) 13595 return NULL_TREE; 13596 13597 for (i = 0; i < nelts; i++) 13598 { 13599 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR 13600 ? NOP_EXPR : FIX_TRUNC_EXPR, 13601 TREE_TYPE (type), elts[i]); 13602 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) 13603 return NULL_TREE; 13604 } 13605 13606 for (i = 0; i < nelts; i++) 13607 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals); 13608 return build_vector (type, vals); 13609 } 13610 13611 case VEC_WIDEN_MULT_LO_EXPR: 13612 case VEC_WIDEN_MULT_HI_EXPR: 13613 { 13614 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 13615 tree *elts, vals = NULL_TREE; 13616 13617 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2 13618 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2); 13619 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST) 13620 return NULL_TREE; 13621 13622 elts = XALLOCAVEC (tree, nelts * 4); 13623 if (!vec_cst_ctor_to_array (arg0, elts) 13624 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2)) 13625 return NULL_TREE; 13626 13627 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_WIDEN_MULT_LO_EXPR)) 13628 elts += nelts; 13629 13630 for (i = 0; i < nelts; i++) 13631 { 13632 elts[i] = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[i]); 13633 elts[i + nelts * 2] 13634 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), 13635 elts[i + nelts * 2]); 13636 if (elts[i] == NULL_TREE || elts[i + nelts * 2] == NULL_TREE) 13637 return NULL_TREE; 13638 elts[i] = const_binop (MULT_EXPR, elts[i], elts[i + nelts * 2]); 13639 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i])) 13640 return NULL_TREE; 13641 } 13642 13643 for (i = 0; i < nelts; i++) 13644 vals = tree_cons (NULL_TREE, elts[nelts - i - 1], vals); 13645 return build_vector (type, vals); 13646 } 13647 13648 default: 13649 return NULL_TREE; 13650 } /* switch (code) */ 13651 } 13652 13653 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is 13654 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees 13655 of GOTO_EXPR. */ 13656 13657 static tree 13658 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 13659 { 13660 switch (TREE_CODE (*tp)) 13661 { 13662 case LABEL_EXPR: 13663 return *tp; 13664 13665 case GOTO_EXPR: 13666 *walk_subtrees = 0; 13667 13668 /* ... fall through ... */ 13669 13670 default: 13671 return NULL_TREE; 13672 } 13673 } 13674 13675 /* Return whether the sub-tree ST contains a label which is accessible from 13676 outside the sub-tree. */ 13677 13678 static bool 13679 contains_label_p (tree st) 13680 { 13681 return 13682 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE); 13683 } 13684 13685 /* Fold a ternary expression of code CODE and type TYPE with operands 13686 OP0, OP1, and OP2. Return the folded expression if folding is 13687 successful. Otherwise, return NULL_TREE. */ 13688 13689 tree 13690 fold_ternary_loc (location_t loc, enum tree_code code, tree type, 13691 tree op0, tree op1, tree op2) 13692 { 13693 tree tem; 13694 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE; 13695 enum tree_code_class kind = TREE_CODE_CLASS (code); 13696 13697 gcc_assert (IS_EXPR_CODE_CLASS (kind) 13698 && TREE_CODE_LENGTH (code) == 3); 13699 13700 /* Strip any conversions that don't change the mode. This is safe 13701 for every expression, except for a comparison expression because 13702 its signedness is derived from its operands. So, in the latter 13703 case, only strip conversions that don't change the signedness. 13704 13705 Note that this is done as an internal manipulation within the 13706 constant folder, in order to find the simplest representation of 13707 the arguments so that their form can be studied. In any cases, 13708 the appropriate type conversions should be put back in the tree 13709 that will get out of the constant folder. */ 13710 if (op0) 13711 { 13712 arg0 = op0; 13713 STRIP_NOPS (arg0); 13714 } 13715 13716 if (op1) 13717 { 13718 arg1 = op1; 13719 STRIP_NOPS (arg1); 13720 } 13721 13722 if (op2) 13723 { 13724 arg2 = op2; 13725 STRIP_NOPS (arg2); 13726 } 13727 13728 switch (code) 13729 { 13730 case COMPONENT_REF: 13731 if (TREE_CODE (arg0) == CONSTRUCTOR 13732 && ! type_contains_placeholder_p (TREE_TYPE (arg0))) 13733 { 13734 unsigned HOST_WIDE_INT idx; 13735 tree field, value; 13736 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value) 13737 if (field == arg1) 13738 return value; 13739 } 13740 return NULL_TREE; 13741 13742 case COND_EXPR: 13743 /* Pedantic ANSI C says that a conditional expression is never an lvalue, 13744 so all simple results must be passed through pedantic_non_lvalue. */ 13745 if (TREE_CODE (arg0) == INTEGER_CST) 13746 { 13747 tree unused_op = integer_zerop (arg0) ? op1 : op2; 13748 tem = integer_zerop (arg0) ? op2 : op1; 13749 /* Only optimize constant conditions when the selected branch 13750 has the same type as the COND_EXPR. This avoids optimizing 13751 away "c ? x : throw", where the throw has a void type. 13752 Avoid throwing away that operand which contains label. */ 13753 if ((!TREE_SIDE_EFFECTS (unused_op) 13754 || !contains_label_p (unused_op)) 13755 && (! VOID_TYPE_P (TREE_TYPE (tem)) 13756 || VOID_TYPE_P (type))) 13757 return pedantic_non_lvalue_loc (loc, tem); 13758 return NULL_TREE; 13759 } 13760 if (operand_equal_p (arg1, op2, 0)) 13761 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0); 13762 13763 /* If we have A op B ? A : C, we may be able to convert this to a 13764 simpler expression, depending on the operation and the values 13765 of B and C. Signed zeros prevent all of these transformations, 13766 for reasons given above each one. 13767 13768 Also try swapping the arguments and inverting the conditional. */ 13769 if (COMPARISON_CLASS_P (arg0) 13770 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13771 arg1, TREE_OPERAND (arg0, 1)) 13772 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1)))) 13773 { 13774 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2); 13775 if (tem) 13776 return tem; 13777 } 13778 13779 if (COMPARISON_CLASS_P (arg0) 13780 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), 13781 op2, 13782 TREE_OPERAND (arg0, 1)) 13783 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) 13784 { 13785 location_t loc0 = expr_location_or (arg0, loc); 13786 tem = fold_truth_not_expr (loc0, arg0); 13787 if (tem && COMPARISON_CLASS_P (tem)) 13788 { 13789 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1); 13790 if (tem) 13791 return tem; 13792 } 13793 } 13794 13795 /* If the second operand is simpler than the third, swap them 13796 since that produces better jump optimization results. */ 13797 if (truth_value_p (TREE_CODE (arg0)) 13798 && tree_swap_operands_p (op1, op2, false)) 13799 { 13800 location_t loc0 = expr_location_or (arg0, loc); 13801 /* See if this can be inverted. If it can't, possibly because 13802 it was a floating-point inequality comparison, don't do 13803 anything. */ 13804 tem = fold_truth_not_expr (loc0, arg0); 13805 if (tem) 13806 return fold_build3_loc (loc, code, type, tem, op2, op1); 13807 } 13808 13809 /* Convert A ? 1 : 0 to simply A. */ 13810 if (integer_onep (op1) 13811 && integer_zerop (op2) 13812 /* If we try to convert OP0 to our type, the 13813 call to fold will try to move the conversion inside 13814 a COND, which will recurse. In that case, the COND_EXPR 13815 is probably the best choice, so leave it alone. */ 13816 && type == TREE_TYPE (arg0)) 13817 return pedantic_non_lvalue_loc (loc, arg0); 13818 13819 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR 13820 over COND_EXPR in cases such as floating point comparisons. */ 13821 if (integer_zerop (op1) 13822 && integer_onep (op2) 13823 && truth_value_p (TREE_CODE (arg0))) 13824 return pedantic_non_lvalue_loc (loc, 13825 fold_convert_loc (loc, type, 13826 invert_truthvalue_loc (loc, 13827 arg0))); 13828 13829 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */ 13830 if (TREE_CODE (arg0) == LT_EXPR 13831 && integer_zerop (TREE_OPERAND (arg0, 1)) 13832 && integer_zerop (op2) 13833 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) 13834 { 13835 /* sign_bit_p only checks ARG1 bits within A's precision. 13836 If <sign bit of A> has wider type than A, bits outside 13837 of A's precision in <sign bit of A> need to be checked. 13838 If they are all 0, this optimization needs to be done 13839 in unsigned A's type, if they are all 1 in signed A's type, 13840 otherwise this can't be done. */ 13841 if (TYPE_PRECISION (TREE_TYPE (tem)) 13842 < TYPE_PRECISION (TREE_TYPE (arg1)) 13843 && TYPE_PRECISION (TREE_TYPE (tem)) 13844 < TYPE_PRECISION (type)) 13845 { 13846 unsigned HOST_WIDE_INT mask_lo; 13847 HOST_WIDE_INT mask_hi; 13848 int inner_width, outer_width; 13849 tree tem_type; 13850 13851 inner_width = TYPE_PRECISION (TREE_TYPE (tem)); 13852 outer_width = TYPE_PRECISION (TREE_TYPE (arg1)); 13853 if (outer_width > TYPE_PRECISION (type)) 13854 outer_width = TYPE_PRECISION (type); 13855 13856 if (outer_width > HOST_BITS_PER_WIDE_INT) 13857 { 13858 mask_hi = ((unsigned HOST_WIDE_INT) -1 13859 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width)); 13860 mask_lo = -1; 13861 } 13862 else 13863 { 13864 mask_hi = 0; 13865 mask_lo = ((unsigned HOST_WIDE_INT) -1 13866 >> (HOST_BITS_PER_WIDE_INT - outer_width)); 13867 } 13868 if (inner_width > HOST_BITS_PER_WIDE_INT) 13869 { 13870 mask_hi &= ~((unsigned HOST_WIDE_INT) -1 13871 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 13872 mask_lo = 0; 13873 } 13874 else 13875 mask_lo &= ~((unsigned HOST_WIDE_INT) -1 13876 >> (HOST_BITS_PER_WIDE_INT - inner_width)); 13877 13878 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi 13879 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo) 13880 { 13881 tem_type = signed_type_for (TREE_TYPE (tem)); 13882 tem = fold_convert_loc (loc, tem_type, tem); 13883 } 13884 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0 13885 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0) 13886 { 13887 tem_type = unsigned_type_for (TREE_TYPE (tem)); 13888 tem = fold_convert_loc (loc, tem_type, tem); 13889 } 13890 else 13891 tem = NULL; 13892 } 13893 13894 if (tem) 13895 return 13896 fold_convert_loc (loc, type, 13897 fold_build2_loc (loc, BIT_AND_EXPR, 13898 TREE_TYPE (tem), tem, 13899 fold_convert_loc (loc, 13900 TREE_TYPE (tem), 13901 arg1))); 13902 } 13903 13904 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was 13905 already handled above. */ 13906 if (TREE_CODE (arg0) == BIT_AND_EXPR 13907 && integer_onep (TREE_OPERAND (arg0, 1)) 13908 && integer_zerop (op2) 13909 && integer_pow2p (arg1)) 13910 { 13911 tree tem = TREE_OPERAND (arg0, 0); 13912 STRIP_NOPS (tem); 13913 if (TREE_CODE (tem) == RSHIFT_EXPR 13914 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST 13915 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) == 13916 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))) 13917 return fold_build2_loc (loc, BIT_AND_EXPR, type, 13918 TREE_OPERAND (tem, 0), arg1); 13919 } 13920 13921 /* A & N ? N : 0 is simply A & N if N is a power of two. This 13922 is probably obsolete because the first operand should be a 13923 truth value (that's why we have the two cases above), but let's 13924 leave it in until we can confirm this for all front-ends. */ 13925 if (integer_zerop (op2) 13926 && TREE_CODE (arg0) == NE_EXPR 13927 && integer_zerop (TREE_OPERAND (arg0, 1)) 13928 && integer_pow2p (arg1) 13929 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR 13930 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), 13931 arg1, OEP_ONLY_CONST)) 13932 return pedantic_non_lvalue_loc (loc, 13933 fold_convert_loc (loc, type, 13934 TREE_OPERAND (arg0, 0))); 13935 13936 /* Convert A ? B : 0 into A && B if A and B are truth values. */ 13937 if (integer_zerop (op2) 13938 && truth_value_p (TREE_CODE (arg0)) 13939 && truth_value_p (TREE_CODE (arg1))) 13940 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13941 fold_convert_loc (loc, type, arg0), 13942 arg1); 13943 13944 /* Convert A ? B : 1 into !A || B if A and B are truth values. */ 13945 if (integer_onep (op2) 13946 && truth_value_p (TREE_CODE (arg0)) 13947 && truth_value_p (TREE_CODE (arg1))) 13948 { 13949 location_t loc0 = expr_location_or (arg0, loc); 13950 /* Only perform transformation if ARG0 is easily inverted. */ 13951 tem = fold_truth_not_expr (loc0, arg0); 13952 if (tem) 13953 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 13954 fold_convert_loc (loc, type, tem), 13955 arg1); 13956 } 13957 13958 /* Convert A ? 0 : B into !A && B if A and B are truth values. */ 13959 if (integer_zerop (arg1) 13960 && truth_value_p (TREE_CODE (arg0)) 13961 && truth_value_p (TREE_CODE (op2))) 13962 { 13963 location_t loc0 = expr_location_or (arg0, loc); 13964 /* Only perform transformation if ARG0 is easily inverted. */ 13965 tem = fold_truth_not_expr (loc0, arg0); 13966 if (tem) 13967 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, 13968 fold_convert_loc (loc, type, tem), 13969 op2); 13970 } 13971 13972 /* Convert A ? 1 : B into A || B if A and B are truth values. */ 13973 if (integer_onep (arg1) 13974 && truth_value_p (TREE_CODE (arg0)) 13975 && truth_value_p (TREE_CODE (op2))) 13976 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, 13977 fold_convert_loc (loc, type, arg0), 13978 op2); 13979 13980 return NULL_TREE; 13981 13982 case CALL_EXPR: 13983 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses 13984 of fold_ternary on them. */ 13985 gcc_unreachable (); 13986 13987 case BIT_FIELD_REF: 13988 if ((TREE_CODE (arg0) == VECTOR_CST 13989 || TREE_CODE (arg0) == CONSTRUCTOR) 13990 && type == TREE_TYPE (TREE_TYPE (arg0))) 13991 { 13992 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1); 13993 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1); 13994 13995 if (width != 0 13996 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1 13997 && (idx % width) == 0 13998 && (idx = idx / width) 13999 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) 14000 { 14001 if (TREE_CODE (arg0) == VECTOR_CST) 14002 { 14003 tree elements = TREE_VECTOR_CST_ELTS (arg0); 14004 while (idx-- > 0 && elements) 14005 elements = TREE_CHAIN (elements); 14006 if (elements) 14007 return TREE_VALUE (elements); 14008 } 14009 else if (idx < CONSTRUCTOR_NELTS (arg0)) 14010 return CONSTRUCTOR_ELT (arg0, idx)->value; 14011 return build_zero_cst (type); 14012 } 14013 } 14014 14015 /* A bit-field-ref that referenced the full argument can be stripped. */ 14016 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) 14017 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1) 14018 && integer_zerop (op2)) 14019 return fold_convert_loc (loc, type, arg0); 14020 14021 return NULL_TREE; 14022 14023 case FMA_EXPR: 14024 /* For integers we can decompose the FMA if possible. */ 14025 if (TREE_CODE (arg0) == INTEGER_CST 14026 && TREE_CODE (arg1) == INTEGER_CST) 14027 return fold_build2_loc (loc, PLUS_EXPR, type, 14028 const_binop (MULT_EXPR, arg0, arg1), arg2); 14029 if (integer_zerop (arg2)) 14030 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); 14031 14032 return fold_fma (loc, type, arg0, arg1, arg2); 14033 14034 case VEC_PERM_EXPR: 14035 if (TREE_CODE (arg2) == VECTOR_CST) 14036 { 14037 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i; 14038 unsigned char *sel = XALLOCAVEC (unsigned char, nelts); 14039 tree t; 14040 bool need_mask_canon = false; 14041 14042 gcc_assert (nelts == TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))); 14043 for (i = 0, t = TREE_VECTOR_CST_ELTS (arg2); 14044 i < nelts && t; i++, t = TREE_CHAIN (t)) 14045 { 14046 if (TREE_CODE (TREE_VALUE (t)) != INTEGER_CST) 14047 return NULL_TREE; 14048 14049 sel[i] = TREE_INT_CST_LOW (TREE_VALUE (t)) & (2 * nelts - 1); 14050 if (TREE_INT_CST_HIGH (TREE_VALUE (t)) 14051 || ((unsigned HOST_WIDE_INT) 14052 TREE_INT_CST_LOW (TREE_VALUE (t)) != sel[i])) 14053 need_mask_canon = true; 14054 } 14055 if (t) 14056 return NULL_TREE; 14057 for (; i < nelts; i++) 14058 sel[i] = 0; 14059 14060 if ((TREE_CODE (arg0) == VECTOR_CST 14061 || TREE_CODE (arg0) == CONSTRUCTOR) 14062 && (TREE_CODE (arg1) == VECTOR_CST 14063 || TREE_CODE (arg1) == CONSTRUCTOR)) 14064 { 14065 t = fold_vec_perm (type, arg0, arg1, sel); 14066 if (t != NULL_TREE) 14067 return t; 14068 } 14069 14070 if (need_mask_canon && arg2 == op2) 14071 { 14072 tree list = NULL_TREE, eltype = TREE_TYPE (TREE_TYPE (arg2)); 14073 for (i = 0; i < nelts; i++) 14074 list = tree_cons (NULL_TREE, 14075 build_int_cst (eltype, sel[nelts - i - 1]), 14076 list); 14077 t = build_vector (TREE_TYPE (arg2), list); 14078 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, t); 14079 } 14080 } 14081 return NULL_TREE; 14082 14083 default: 14084 return NULL_TREE; 14085 } /* switch (code) */ 14086 } 14087 14088 /* Perform constant folding and related simplification of EXPR. 14089 The related simplifications include x*1 => x, x*0 => 0, etc., 14090 and application of the associative law. 14091 NOP_EXPR conversions may be removed freely (as long as we 14092 are careful not to change the type of the overall expression). 14093 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR, 14094 but we can constant-fold them if they have constant operands. */ 14095 14096 #ifdef ENABLE_FOLD_CHECKING 14097 # define fold(x) fold_1 (x) 14098 static tree fold_1 (tree); 14099 static 14100 #endif 14101 tree 14102 fold (tree expr) 14103 { 14104 const tree t = expr; 14105 enum tree_code code = TREE_CODE (t); 14106 enum tree_code_class kind = TREE_CODE_CLASS (code); 14107 tree tem; 14108 location_t loc = EXPR_LOCATION (expr); 14109 14110 /* Return right away if a constant. */ 14111 if (kind == tcc_constant) 14112 return t; 14113 14114 /* CALL_EXPR-like objects with variable numbers of operands are 14115 treated specially. */ 14116 if (kind == tcc_vl_exp) 14117 { 14118 if (code == CALL_EXPR) 14119 { 14120 tem = fold_call_expr (loc, expr, false); 14121 return tem ? tem : expr; 14122 } 14123 return expr; 14124 } 14125 14126 if (IS_EXPR_CODE_CLASS (kind)) 14127 { 14128 tree type = TREE_TYPE (t); 14129 tree op0, op1, op2; 14130 14131 switch (TREE_CODE_LENGTH (code)) 14132 { 14133 case 1: 14134 op0 = TREE_OPERAND (t, 0); 14135 tem = fold_unary_loc (loc, code, type, op0); 14136 return tem ? tem : expr; 14137 case 2: 14138 op0 = TREE_OPERAND (t, 0); 14139 op1 = TREE_OPERAND (t, 1); 14140 tem = fold_binary_loc (loc, code, type, op0, op1); 14141 return tem ? tem : expr; 14142 case 3: 14143 op0 = TREE_OPERAND (t, 0); 14144 op1 = TREE_OPERAND (t, 1); 14145 op2 = TREE_OPERAND (t, 2); 14146 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14147 return tem ? tem : expr; 14148 default: 14149 break; 14150 } 14151 } 14152 14153 switch (code) 14154 { 14155 case ARRAY_REF: 14156 { 14157 tree op0 = TREE_OPERAND (t, 0); 14158 tree op1 = TREE_OPERAND (t, 1); 14159 14160 if (TREE_CODE (op1) == INTEGER_CST 14161 && TREE_CODE (op0) == CONSTRUCTOR 14162 && ! type_contains_placeholder_p (TREE_TYPE (op0))) 14163 { 14164 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0); 14165 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts); 14166 unsigned HOST_WIDE_INT begin = 0; 14167 14168 /* Find a matching index by means of a binary search. */ 14169 while (begin != end) 14170 { 14171 unsigned HOST_WIDE_INT middle = (begin + end) / 2; 14172 tree index = VEC_index (constructor_elt, elts, middle)->index; 14173 14174 if (TREE_CODE (index) == INTEGER_CST 14175 && tree_int_cst_lt (index, op1)) 14176 begin = middle + 1; 14177 else if (TREE_CODE (index) == INTEGER_CST 14178 && tree_int_cst_lt (op1, index)) 14179 end = middle; 14180 else if (TREE_CODE (index) == RANGE_EXPR 14181 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1)) 14182 begin = middle + 1; 14183 else if (TREE_CODE (index) == RANGE_EXPR 14184 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0))) 14185 end = middle; 14186 else 14187 return VEC_index (constructor_elt, elts, middle)->value; 14188 } 14189 } 14190 14191 return t; 14192 } 14193 14194 case CONST_DECL: 14195 return fold (DECL_INITIAL (t)); 14196 14197 default: 14198 return t; 14199 } /* switch (code) */ 14200 } 14201 14202 #ifdef ENABLE_FOLD_CHECKING 14203 #undef fold 14204 14205 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t); 14206 static void fold_check_failed (const_tree, const_tree); 14207 void print_fold_checksum (const_tree); 14208 14209 /* When --enable-checking=fold, compute a digest of expr before 14210 and after actual fold call to see if fold did not accidentally 14211 change original expr. */ 14212 14213 tree 14214 fold (tree expr) 14215 { 14216 tree ret; 14217 struct md5_ctx ctx; 14218 unsigned char checksum_before[16], checksum_after[16]; 14219 htab_t ht; 14220 14221 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14222 md5_init_ctx (&ctx); 14223 fold_checksum_tree (expr, &ctx, ht); 14224 md5_finish_ctx (&ctx, checksum_before); 14225 htab_empty (ht); 14226 14227 ret = fold_1 (expr); 14228 14229 md5_init_ctx (&ctx); 14230 fold_checksum_tree (expr, &ctx, ht); 14231 md5_finish_ctx (&ctx, checksum_after); 14232 htab_delete (ht); 14233 14234 if (memcmp (checksum_before, checksum_after, 16)) 14235 fold_check_failed (expr, ret); 14236 14237 return ret; 14238 } 14239 14240 void 14241 print_fold_checksum (const_tree expr) 14242 { 14243 struct md5_ctx ctx; 14244 unsigned char checksum[16], cnt; 14245 htab_t ht; 14246 14247 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14248 md5_init_ctx (&ctx); 14249 fold_checksum_tree (expr, &ctx, ht); 14250 md5_finish_ctx (&ctx, checksum); 14251 htab_delete (ht); 14252 for (cnt = 0; cnt < 16; ++cnt) 14253 fprintf (stderr, "%02x", checksum[cnt]); 14254 putc ('\n', stderr); 14255 } 14256 14257 static void 14258 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED) 14259 { 14260 internal_error ("fold check: original tree changed by fold"); 14261 } 14262 14263 static void 14264 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht) 14265 { 14266 void **slot; 14267 enum tree_code code; 14268 union tree_node buf; 14269 int i, len; 14270 14271 recursive_label: 14272 if (expr == NULL) 14273 return; 14274 slot = (void **) htab_find_slot (ht, expr, INSERT); 14275 if (*slot != NULL) 14276 return; 14277 *slot = CONST_CAST_TREE (expr); 14278 code = TREE_CODE (expr); 14279 if (TREE_CODE_CLASS (code) == tcc_declaration 14280 && DECL_ASSEMBLER_NAME_SET_P (expr)) 14281 { 14282 /* Allow DECL_ASSEMBLER_NAME to be modified. */ 14283 memcpy ((char *) &buf, expr, tree_size (expr)); 14284 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL); 14285 expr = (tree) &buf; 14286 } 14287 else if (TREE_CODE_CLASS (code) == tcc_type 14288 && (TYPE_POINTER_TO (expr) 14289 || TYPE_REFERENCE_TO (expr) 14290 || TYPE_CACHED_VALUES_P (expr) 14291 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) 14292 || TYPE_NEXT_VARIANT (expr))) 14293 { 14294 /* Allow these fields to be modified. */ 14295 tree tmp; 14296 memcpy ((char *) &buf, expr, tree_size (expr)); 14297 expr = tmp = (tree) &buf; 14298 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0; 14299 TYPE_POINTER_TO (tmp) = NULL; 14300 TYPE_REFERENCE_TO (tmp) = NULL; 14301 TYPE_NEXT_VARIANT (tmp) = NULL; 14302 if (TYPE_CACHED_VALUES_P (tmp)) 14303 { 14304 TYPE_CACHED_VALUES_P (tmp) = 0; 14305 TYPE_CACHED_VALUES (tmp) = NULL; 14306 } 14307 } 14308 md5_process_bytes (expr, tree_size (expr), ctx); 14309 if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) 14310 fold_checksum_tree (TREE_TYPE (expr), ctx, ht); 14311 if (TREE_CODE_CLASS (code) != tcc_type 14312 && TREE_CODE_CLASS (code) != tcc_declaration 14313 && code != TREE_LIST 14314 && code != SSA_NAME 14315 && CODE_CONTAINS_STRUCT (code, TS_COMMON)) 14316 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht); 14317 switch (TREE_CODE_CLASS (code)) 14318 { 14319 case tcc_constant: 14320 switch (code) 14321 { 14322 case STRING_CST: 14323 md5_process_bytes (TREE_STRING_POINTER (expr), 14324 TREE_STRING_LENGTH (expr), ctx); 14325 break; 14326 case COMPLEX_CST: 14327 fold_checksum_tree (TREE_REALPART (expr), ctx, ht); 14328 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht); 14329 break; 14330 case VECTOR_CST: 14331 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht); 14332 break; 14333 default: 14334 break; 14335 } 14336 break; 14337 case tcc_exceptional: 14338 switch (code) 14339 { 14340 case TREE_LIST: 14341 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht); 14342 fold_checksum_tree (TREE_VALUE (expr), ctx, ht); 14343 expr = TREE_CHAIN (expr); 14344 goto recursive_label; 14345 break; 14346 case TREE_VEC: 14347 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i) 14348 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht); 14349 break; 14350 default: 14351 break; 14352 } 14353 break; 14354 case tcc_expression: 14355 case tcc_reference: 14356 case tcc_comparison: 14357 case tcc_unary: 14358 case tcc_binary: 14359 case tcc_statement: 14360 case tcc_vl_exp: 14361 len = TREE_OPERAND_LENGTH (expr); 14362 for (i = 0; i < len; ++i) 14363 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht); 14364 break; 14365 case tcc_declaration: 14366 fold_checksum_tree (DECL_NAME (expr), ctx, ht); 14367 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht); 14368 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON)) 14369 { 14370 fold_checksum_tree (DECL_SIZE (expr), ctx, ht); 14371 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht); 14372 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht); 14373 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht); 14374 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht); 14375 } 14376 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS)) 14377 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht); 14378 14379 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON)) 14380 { 14381 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht); 14382 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht); 14383 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht); 14384 } 14385 break; 14386 case tcc_type: 14387 if (TREE_CODE (expr) == ENUMERAL_TYPE) 14388 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht); 14389 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht); 14390 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht); 14391 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht); 14392 fold_checksum_tree (TYPE_NAME (expr), ctx, ht); 14393 if (INTEGRAL_TYPE_P (expr) 14394 || SCALAR_FLOAT_TYPE_P (expr)) 14395 { 14396 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht); 14397 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht); 14398 } 14399 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht); 14400 if (TREE_CODE (expr) == RECORD_TYPE 14401 || TREE_CODE (expr) == UNION_TYPE 14402 || TREE_CODE (expr) == QUAL_UNION_TYPE) 14403 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht); 14404 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht); 14405 break; 14406 default: 14407 break; 14408 } 14409 } 14410 14411 /* Helper function for outputting the checksum of a tree T. When 14412 debugging with gdb, you can "define mynext" to be "next" followed 14413 by "call debug_fold_checksum (op0)", then just trace down till the 14414 outputs differ. */ 14415 14416 DEBUG_FUNCTION void 14417 debug_fold_checksum (const_tree t) 14418 { 14419 int i; 14420 unsigned char checksum[16]; 14421 struct md5_ctx ctx; 14422 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14423 14424 md5_init_ctx (&ctx); 14425 fold_checksum_tree (t, &ctx, ht); 14426 md5_finish_ctx (&ctx, checksum); 14427 htab_empty (ht); 14428 14429 for (i = 0; i < 16; i++) 14430 fprintf (stderr, "%d ", checksum[i]); 14431 14432 fprintf (stderr, "\n"); 14433 } 14434 14435 #endif 14436 14437 /* Fold a unary tree expression with code CODE of type TYPE with an 14438 operand OP0. LOC is the location of the resulting expression. 14439 Return a folded expression if successful. Otherwise, return a tree 14440 expression with code CODE of type TYPE with an operand OP0. */ 14441 14442 tree 14443 fold_build1_stat_loc (location_t loc, 14444 enum tree_code code, tree type, tree op0 MEM_STAT_DECL) 14445 { 14446 tree tem; 14447 #ifdef ENABLE_FOLD_CHECKING 14448 unsigned char checksum_before[16], checksum_after[16]; 14449 struct md5_ctx ctx; 14450 htab_t ht; 14451 14452 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14453 md5_init_ctx (&ctx); 14454 fold_checksum_tree (op0, &ctx, ht); 14455 md5_finish_ctx (&ctx, checksum_before); 14456 htab_empty (ht); 14457 #endif 14458 14459 tem = fold_unary_loc (loc, code, type, op0); 14460 if (!tem) 14461 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT); 14462 14463 #ifdef ENABLE_FOLD_CHECKING 14464 md5_init_ctx (&ctx); 14465 fold_checksum_tree (op0, &ctx, ht); 14466 md5_finish_ctx (&ctx, checksum_after); 14467 htab_delete (ht); 14468 14469 if (memcmp (checksum_before, checksum_after, 16)) 14470 fold_check_failed (op0, tem); 14471 #endif 14472 return tem; 14473 } 14474 14475 /* Fold a binary tree expression with code CODE of type TYPE with 14476 operands OP0 and OP1. LOC is the location of the resulting 14477 expression. Return a folded expression if successful. Otherwise, 14478 return a tree expression with code CODE of type TYPE with operands 14479 OP0 and OP1. */ 14480 14481 tree 14482 fold_build2_stat_loc (location_t loc, 14483 enum tree_code code, tree type, tree op0, tree op1 14484 MEM_STAT_DECL) 14485 { 14486 tree tem; 14487 #ifdef ENABLE_FOLD_CHECKING 14488 unsigned char checksum_before_op0[16], 14489 checksum_before_op1[16], 14490 checksum_after_op0[16], 14491 checksum_after_op1[16]; 14492 struct md5_ctx ctx; 14493 htab_t ht; 14494 14495 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14496 md5_init_ctx (&ctx); 14497 fold_checksum_tree (op0, &ctx, ht); 14498 md5_finish_ctx (&ctx, checksum_before_op0); 14499 htab_empty (ht); 14500 14501 md5_init_ctx (&ctx); 14502 fold_checksum_tree (op1, &ctx, ht); 14503 md5_finish_ctx (&ctx, checksum_before_op1); 14504 htab_empty (ht); 14505 #endif 14506 14507 tem = fold_binary_loc (loc, code, type, op0, op1); 14508 if (!tem) 14509 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT); 14510 14511 #ifdef ENABLE_FOLD_CHECKING 14512 md5_init_ctx (&ctx); 14513 fold_checksum_tree (op0, &ctx, ht); 14514 md5_finish_ctx (&ctx, checksum_after_op0); 14515 htab_empty (ht); 14516 14517 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14518 fold_check_failed (op0, tem); 14519 14520 md5_init_ctx (&ctx); 14521 fold_checksum_tree (op1, &ctx, ht); 14522 md5_finish_ctx (&ctx, checksum_after_op1); 14523 htab_delete (ht); 14524 14525 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14526 fold_check_failed (op1, tem); 14527 #endif 14528 return tem; 14529 } 14530 14531 /* Fold a ternary tree expression with code CODE of type TYPE with 14532 operands OP0, OP1, and OP2. Return a folded expression if 14533 successful. Otherwise, return a tree expression with code CODE of 14534 type TYPE with operands OP0, OP1, and OP2. */ 14535 14536 tree 14537 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, 14538 tree op0, tree op1, tree op2 MEM_STAT_DECL) 14539 { 14540 tree tem; 14541 #ifdef ENABLE_FOLD_CHECKING 14542 unsigned char checksum_before_op0[16], 14543 checksum_before_op1[16], 14544 checksum_before_op2[16], 14545 checksum_after_op0[16], 14546 checksum_after_op1[16], 14547 checksum_after_op2[16]; 14548 struct md5_ctx ctx; 14549 htab_t ht; 14550 14551 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14552 md5_init_ctx (&ctx); 14553 fold_checksum_tree (op0, &ctx, ht); 14554 md5_finish_ctx (&ctx, checksum_before_op0); 14555 htab_empty (ht); 14556 14557 md5_init_ctx (&ctx); 14558 fold_checksum_tree (op1, &ctx, ht); 14559 md5_finish_ctx (&ctx, checksum_before_op1); 14560 htab_empty (ht); 14561 14562 md5_init_ctx (&ctx); 14563 fold_checksum_tree (op2, &ctx, ht); 14564 md5_finish_ctx (&ctx, checksum_before_op2); 14565 htab_empty (ht); 14566 #endif 14567 14568 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 14569 tem = fold_ternary_loc (loc, code, type, op0, op1, op2); 14570 if (!tem) 14571 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT); 14572 14573 #ifdef ENABLE_FOLD_CHECKING 14574 md5_init_ctx (&ctx); 14575 fold_checksum_tree (op0, &ctx, ht); 14576 md5_finish_ctx (&ctx, checksum_after_op0); 14577 htab_empty (ht); 14578 14579 if (memcmp (checksum_before_op0, checksum_after_op0, 16)) 14580 fold_check_failed (op0, tem); 14581 14582 md5_init_ctx (&ctx); 14583 fold_checksum_tree (op1, &ctx, ht); 14584 md5_finish_ctx (&ctx, checksum_after_op1); 14585 htab_empty (ht); 14586 14587 if (memcmp (checksum_before_op1, checksum_after_op1, 16)) 14588 fold_check_failed (op1, tem); 14589 14590 md5_init_ctx (&ctx); 14591 fold_checksum_tree (op2, &ctx, ht); 14592 md5_finish_ctx (&ctx, checksum_after_op2); 14593 htab_delete (ht); 14594 14595 if (memcmp (checksum_before_op2, checksum_after_op2, 16)) 14596 fold_check_failed (op2, tem); 14597 #endif 14598 return tem; 14599 } 14600 14601 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS 14602 arguments in ARGARRAY, and a null static chain. 14603 Return a folded expression if successful. Otherwise, return a CALL_EXPR 14604 of type TYPE from the given operands as constructed by build_call_array. */ 14605 14606 tree 14607 fold_build_call_array_loc (location_t loc, tree type, tree fn, 14608 int nargs, tree *argarray) 14609 { 14610 tree tem; 14611 #ifdef ENABLE_FOLD_CHECKING 14612 unsigned char checksum_before_fn[16], 14613 checksum_before_arglist[16], 14614 checksum_after_fn[16], 14615 checksum_after_arglist[16]; 14616 struct md5_ctx ctx; 14617 htab_t ht; 14618 int i; 14619 14620 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); 14621 md5_init_ctx (&ctx); 14622 fold_checksum_tree (fn, &ctx, ht); 14623 md5_finish_ctx (&ctx, checksum_before_fn); 14624 htab_empty (ht); 14625 14626 md5_init_ctx (&ctx); 14627 for (i = 0; i < nargs; i++) 14628 fold_checksum_tree (argarray[i], &ctx, ht); 14629 md5_finish_ctx (&ctx, checksum_before_arglist); 14630 htab_empty (ht); 14631 #endif 14632 14633 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray); 14634 14635 #ifdef ENABLE_FOLD_CHECKING 14636 md5_init_ctx (&ctx); 14637 fold_checksum_tree (fn, &ctx, ht); 14638 md5_finish_ctx (&ctx, checksum_after_fn); 14639 htab_empty (ht); 14640 14641 if (memcmp (checksum_before_fn, checksum_after_fn, 16)) 14642 fold_check_failed (fn, tem); 14643 14644 md5_init_ctx (&ctx); 14645 for (i = 0; i < nargs; i++) 14646 fold_checksum_tree (argarray[i], &ctx, ht); 14647 md5_finish_ctx (&ctx, checksum_after_arglist); 14648 htab_delete (ht); 14649 14650 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16)) 14651 fold_check_failed (NULL_TREE, tem); 14652 #endif 14653 return tem; 14654 } 14655 14656 /* Perform constant folding and related simplification of initializer 14657 expression EXPR. These behave identically to "fold_buildN" but ignore 14658 potential run-time traps and exceptions that fold must preserve. */ 14659 14660 #define START_FOLD_INIT \ 14661 int saved_signaling_nans = flag_signaling_nans;\ 14662 int saved_trapping_math = flag_trapping_math;\ 14663 int saved_rounding_math = flag_rounding_math;\ 14664 int saved_trapv = flag_trapv;\ 14665 int saved_folding_initializer = folding_initializer;\ 14666 flag_signaling_nans = 0;\ 14667 flag_trapping_math = 0;\ 14668 flag_rounding_math = 0;\ 14669 flag_trapv = 0;\ 14670 folding_initializer = 1; 14671 14672 #define END_FOLD_INIT \ 14673 flag_signaling_nans = saved_signaling_nans;\ 14674 flag_trapping_math = saved_trapping_math;\ 14675 flag_rounding_math = saved_rounding_math;\ 14676 flag_trapv = saved_trapv;\ 14677 folding_initializer = saved_folding_initializer; 14678 14679 tree 14680 fold_build1_initializer_loc (location_t loc, enum tree_code code, 14681 tree type, tree op) 14682 { 14683 tree result; 14684 START_FOLD_INIT; 14685 14686 result = fold_build1_loc (loc, code, type, op); 14687 14688 END_FOLD_INIT; 14689 return result; 14690 } 14691 14692 tree 14693 fold_build2_initializer_loc (location_t loc, enum tree_code code, 14694 tree type, tree op0, tree op1) 14695 { 14696 tree result; 14697 START_FOLD_INIT; 14698 14699 result = fold_build2_loc (loc, code, type, op0, op1); 14700 14701 END_FOLD_INIT; 14702 return result; 14703 } 14704 14705 tree 14706 fold_build3_initializer_loc (location_t loc, enum tree_code code, 14707 tree type, tree op0, tree op1, tree op2) 14708 { 14709 tree result; 14710 START_FOLD_INIT; 14711 14712 result = fold_build3_loc (loc, code, type, op0, op1, op2); 14713 14714 END_FOLD_INIT; 14715 return result; 14716 } 14717 14718 tree 14719 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn, 14720 int nargs, tree *argarray) 14721 { 14722 tree result; 14723 START_FOLD_INIT; 14724 14725 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray); 14726 14727 END_FOLD_INIT; 14728 return result; 14729 } 14730 14731 #undef START_FOLD_INIT 14732 #undef END_FOLD_INIT 14733 14734 /* Determine if first argument is a multiple of second argument. Return 0 if 14735 it is not, or we cannot easily determined it to be. 14736 14737 An example of the sort of thing we care about (at this point; this routine 14738 could surely be made more general, and expanded to do what the *_DIV_EXPR's 14739 fold cases do now) is discovering that 14740 14741 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14742 14743 is a multiple of 14744 14745 SAVE_EXPR (J * 8) 14746 14747 when we know that the two SAVE_EXPR (J * 8) nodes are the same node. 14748 14749 This code also handles discovering that 14750 14751 SAVE_EXPR (I) * SAVE_EXPR (J * 8) 14752 14753 is a multiple of 8 so we don't have to worry about dealing with a 14754 possible remainder. 14755 14756 Note that we *look* inside a SAVE_EXPR only to determine how it was 14757 calculated; it is not safe for fold to do much of anything else with the 14758 internals of a SAVE_EXPR, since it cannot know when it will be evaluated 14759 at run time. For example, the latter example above *cannot* be implemented 14760 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at 14761 evaluation time of the original SAVE_EXPR is not necessarily the same at 14762 the time the new expression is evaluated. The only optimization of this 14763 sort that would be valid is changing 14764 14765 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8) 14766 14767 divided by 8 to 14768 14769 SAVE_EXPR (I) * SAVE_EXPR (J) 14770 14771 (where the same SAVE_EXPR (J) is used in the original and the 14772 transformed version). */ 14773 14774 int 14775 multiple_of_p (tree type, const_tree top, const_tree bottom) 14776 { 14777 if (operand_equal_p (top, bottom, 0)) 14778 return 1; 14779 14780 if (TREE_CODE (type) != INTEGER_TYPE) 14781 return 0; 14782 14783 switch (TREE_CODE (top)) 14784 { 14785 case BIT_AND_EXPR: 14786 /* Bitwise and provides a power of two multiple. If the mask is 14787 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */ 14788 if (!integer_pow2p (bottom)) 14789 return 0; 14790 /* FALLTHRU */ 14791 14792 case MULT_EXPR: 14793 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14794 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14795 14796 case PLUS_EXPR: 14797 case MINUS_EXPR: 14798 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom) 14799 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom)); 14800 14801 case LSHIFT_EXPR: 14802 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST) 14803 { 14804 tree op1, t1; 14805 14806 op1 = TREE_OPERAND (top, 1); 14807 /* const_binop may not detect overflow correctly, 14808 so check for it explicitly here. */ 14809 if (TYPE_PRECISION (TREE_TYPE (size_one_node)) 14810 > TREE_INT_CST_LOW (op1) 14811 && TREE_INT_CST_HIGH (op1) == 0 14812 && 0 != (t1 = fold_convert (type, 14813 const_binop (LSHIFT_EXPR, 14814 size_one_node, 14815 op1))) 14816 && !TREE_OVERFLOW (t1)) 14817 return multiple_of_p (type, t1, bottom); 14818 } 14819 return 0; 14820 14821 case NOP_EXPR: 14822 /* Can't handle conversions from non-integral or wider integral type. */ 14823 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE) 14824 || (TYPE_PRECISION (type) 14825 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0))))) 14826 return 0; 14827 14828 /* .. fall through ... */ 14829 14830 case SAVE_EXPR: 14831 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); 14832 14833 case COND_EXPR: 14834 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom) 14835 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom)); 14836 14837 case INTEGER_CST: 14838 if (TREE_CODE (bottom) != INTEGER_CST 14839 || integer_zerop (bottom) 14840 || (TYPE_UNSIGNED (type) 14841 && (tree_int_cst_sgn (top) < 0 14842 || tree_int_cst_sgn (bottom) < 0))) 14843 return 0; 14844 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR, 14845 top, bottom)); 14846 14847 default: 14848 return 0; 14849 } 14850 } 14851 14852 /* Return true if CODE or TYPE is known to be non-negative. */ 14853 14854 static bool 14855 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type) 14856 { 14857 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type)) 14858 && truth_value_p (code)) 14859 /* Truth values evaluate to 0 or 1, which is nonnegative unless we 14860 have a signed:1 type (where the value is -1 and 0). */ 14861 return true; 14862 return false; 14863 } 14864 14865 /* Return true if (CODE OP0) is known to be non-negative. If the return 14866 value is based on the assumption that signed overflow is undefined, 14867 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14868 *STRICT_OVERFLOW_P. */ 14869 14870 bool 14871 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14872 bool *strict_overflow_p) 14873 { 14874 if (TYPE_UNSIGNED (type)) 14875 return true; 14876 14877 switch (code) 14878 { 14879 case ABS_EXPR: 14880 /* We can't return 1 if flag_wrapv is set because 14881 ABS_EXPR<INT_MIN> = INT_MIN. */ 14882 if (!INTEGRAL_TYPE_P (type)) 14883 return true; 14884 if (TYPE_OVERFLOW_UNDEFINED (type)) 14885 { 14886 *strict_overflow_p = true; 14887 return true; 14888 } 14889 break; 14890 14891 case NON_LVALUE_EXPR: 14892 case FLOAT_EXPR: 14893 case FIX_TRUNC_EXPR: 14894 return tree_expr_nonnegative_warnv_p (op0, 14895 strict_overflow_p); 14896 14897 case NOP_EXPR: 14898 { 14899 tree inner_type = TREE_TYPE (op0); 14900 tree outer_type = type; 14901 14902 if (TREE_CODE (outer_type) == REAL_TYPE) 14903 { 14904 if (TREE_CODE (inner_type) == REAL_TYPE) 14905 return tree_expr_nonnegative_warnv_p (op0, 14906 strict_overflow_p); 14907 if (TREE_CODE (inner_type) == INTEGER_TYPE) 14908 { 14909 if (TYPE_UNSIGNED (inner_type)) 14910 return true; 14911 return tree_expr_nonnegative_warnv_p (op0, 14912 strict_overflow_p); 14913 } 14914 } 14915 else if (TREE_CODE (outer_type) == INTEGER_TYPE) 14916 { 14917 if (TREE_CODE (inner_type) == REAL_TYPE) 14918 return tree_expr_nonnegative_warnv_p (op0, 14919 strict_overflow_p); 14920 if (TREE_CODE (inner_type) == INTEGER_TYPE) 14921 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type) 14922 && TYPE_UNSIGNED (inner_type); 14923 } 14924 } 14925 break; 14926 14927 default: 14928 return tree_simple_nonnegative_warnv_p (code, type); 14929 } 14930 14931 /* We don't know sign of `t', so be conservative and return false. */ 14932 return false; 14933 } 14934 14935 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return 14936 value is based on the assumption that signed overflow is undefined, 14937 set *STRICT_OVERFLOW_P to true; otherwise, don't change 14938 *STRICT_OVERFLOW_P. */ 14939 14940 bool 14941 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, 14942 tree op1, bool *strict_overflow_p) 14943 { 14944 if (TYPE_UNSIGNED (type)) 14945 return true; 14946 14947 switch (code) 14948 { 14949 case POINTER_PLUS_EXPR: 14950 case PLUS_EXPR: 14951 if (FLOAT_TYPE_P (type)) 14952 return (tree_expr_nonnegative_warnv_p (op0, 14953 strict_overflow_p) 14954 && tree_expr_nonnegative_warnv_p (op1, 14955 strict_overflow_p)); 14956 14957 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are 14958 both unsigned and at least 2 bits shorter than the result. */ 14959 if (TREE_CODE (type) == INTEGER_TYPE 14960 && TREE_CODE (op0) == NOP_EXPR 14961 && TREE_CODE (op1) == NOP_EXPR) 14962 { 14963 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0)); 14964 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0)); 14965 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1) 14966 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2)) 14967 { 14968 unsigned int prec = MAX (TYPE_PRECISION (inner1), 14969 TYPE_PRECISION (inner2)) + 1; 14970 return prec < TYPE_PRECISION (type); 14971 } 14972 } 14973 break; 14974 14975 case MULT_EXPR: 14976 if (FLOAT_TYPE_P (type)) 14977 { 14978 /* x * x for floating point x is always non-negative. */ 14979 if (operand_equal_p (op0, op1, 0)) 14980 return true; 14981 return (tree_expr_nonnegative_warnv_p (op0, 14982 strict_overflow_p) 14983 && tree_expr_nonnegative_warnv_p (op1, 14984 strict_overflow_p)); 14985 } 14986 14987 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are 14988 both unsigned and their total bits is shorter than the result. */ 14989 if (TREE_CODE (type) == INTEGER_TYPE 14990 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST) 14991 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST)) 14992 { 14993 tree inner0 = (TREE_CODE (op0) == NOP_EXPR) 14994 ? TREE_TYPE (TREE_OPERAND (op0, 0)) 14995 : TREE_TYPE (op0); 14996 tree inner1 = (TREE_CODE (op1) == NOP_EXPR) 14997 ? TREE_TYPE (TREE_OPERAND (op1, 0)) 14998 : TREE_TYPE (op1); 14999 15000 bool unsigned0 = TYPE_UNSIGNED (inner0); 15001 bool unsigned1 = TYPE_UNSIGNED (inner1); 15002 15003 if (TREE_CODE (op0) == INTEGER_CST) 15004 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0; 15005 15006 if (TREE_CODE (op1) == INTEGER_CST) 15007 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0; 15008 15009 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0 15010 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1) 15011 { 15012 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST) 15013 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true) 15014 : TYPE_PRECISION (inner0); 15015 15016 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST) 15017 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true) 15018 : TYPE_PRECISION (inner1); 15019 15020 return precision0 + precision1 < TYPE_PRECISION (type); 15021 } 15022 } 15023 return false; 15024 15025 case BIT_AND_EXPR: 15026 case MAX_EXPR: 15027 return (tree_expr_nonnegative_warnv_p (op0, 15028 strict_overflow_p) 15029 || tree_expr_nonnegative_warnv_p (op1, 15030 strict_overflow_p)); 15031 15032 case BIT_IOR_EXPR: 15033 case BIT_XOR_EXPR: 15034 case MIN_EXPR: 15035 case RDIV_EXPR: 15036 case TRUNC_DIV_EXPR: 15037 case CEIL_DIV_EXPR: 15038 case FLOOR_DIV_EXPR: 15039 case ROUND_DIV_EXPR: 15040 return (tree_expr_nonnegative_warnv_p (op0, 15041 strict_overflow_p) 15042 && tree_expr_nonnegative_warnv_p (op1, 15043 strict_overflow_p)); 15044 15045 case TRUNC_MOD_EXPR: 15046 case CEIL_MOD_EXPR: 15047 case FLOOR_MOD_EXPR: 15048 case ROUND_MOD_EXPR: 15049 return tree_expr_nonnegative_warnv_p (op0, 15050 strict_overflow_p); 15051 default: 15052 return tree_simple_nonnegative_warnv_p (code, type); 15053 } 15054 15055 /* We don't know sign of `t', so be conservative and return false. */ 15056 return false; 15057 } 15058 15059 /* Return true if T is known to be non-negative. If the return 15060 value is based on the assumption that signed overflow is undefined, 15061 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15062 *STRICT_OVERFLOW_P. */ 15063 15064 bool 15065 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15066 { 15067 if (TYPE_UNSIGNED (TREE_TYPE (t))) 15068 return true; 15069 15070 switch (TREE_CODE (t)) 15071 { 15072 case INTEGER_CST: 15073 return tree_int_cst_sgn (t) >= 0; 15074 15075 case REAL_CST: 15076 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); 15077 15078 case FIXED_CST: 15079 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t)); 15080 15081 case COND_EXPR: 15082 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15083 strict_overflow_p) 15084 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2), 15085 strict_overflow_p)); 15086 default: 15087 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 15088 TREE_TYPE (t)); 15089 } 15090 /* We don't know sign of `t', so be conservative and return false. */ 15091 return false; 15092 } 15093 15094 /* Return true if T is known to be non-negative. If the return 15095 value is based on the assumption that signed overflow is undefined, 15096 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15097 *STRICT_OVERFLOW_P. */ 15098 15099 bool 15100 tree_call_nonnegative_warnv_p (tree type, tree fndecl, 15101 tree arg0, tree arg1, bool *strict_overflow_p) 15102 { 15103 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 15104 switch (DECL_FUNCTION_CODE (fndecl)) 15105 { 15106 CASE_FLT_FN (BUILT_IN_ACOS): 15107 CASE_FLT_FN (BUILT_IN_ACOSH): 15108 CASE_FLT_FN (BUILT_IN_CABS): 15109 CASE_FLT_FN (BUILT_IN_COSH): 15110 CASE_FLT_FN (BUILT_IN_ERFC): 15111 CASE_FLT_FN (BUILT_IN_EXP): 15112 CASE_FLT_FN (BUILT_IN_EXP10): 15113 CASE_FLT_FN (BUILT_IN_EXP2): 15114 CASE_FLT_FN (BUILT_IN_FABS): 15115 CASE_FLT_FN (BUILT_IN_FDIM): 15116 CASE_FLT_FN (BUILT_IN_HYPOT): 15117 CASE_FLT_FN (BUILT_IN_POW10): 15118 CASE_INT_FN (BUILT_IN_FFS): 15119 CASE_INT_FN (BUILT_IN_PARITY): 15120 CASE_INT_FN (BUILT_IN_POPCOUNT): 15121 case BUILT_IN_BSWAP32: 15122 case BUILT_IN_BSWAP64: 15123 /* Always true. */ 15124 return true; 15125 15126 CASE_FLT_FN (BUILT_IN_SQRT): 15127 /* sqrt(-0.0) is -0.0. */ 15128 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) 15129 return true; 15130 return tree_expr_nonnegative_warnv_p (arg0, 15131 strict_overflow_p); 15132 15133 CASE_FLT_FN (BUILT_IN_ASINH): 15134 CASE_FLT_FN (BUILT_IN_ATAN): 15135 CASE_FLT_FN (BUILT_IN_ATANH): 15136 CASE_FLT_FN (BUILT_IN_CBRT): 15137 CASE_FLT_FN (BUILT_IN_CEIL): 15138 CASE_FLT_FN (BUILT_IN_ERF): 15139 CASE_FLT_FN (BUILT_IN_EXPM1): 15140 CASE_FLT_FN (BUILT_IN_FLOOR): 15141 CASE_FLT_FN (BUILT_IN_FMOD): 15142 CASE_FLT_FN (BUILT_IN_FREXP): 15143 CASE_FLT_FN (BUILT_IN_ICEIL): 15144 CASE_FLT_FN (BUILT_IN_IFLOOR): 15145 CASE_FLT_FN (BUILT_IN_IRINT): 15146 CASE_FLT_FN (BUILT_IN_IROUND): 15147 CASE_FLT_FN (BUILT_IN_LCEIL): 15148 CASE_FLT_FN (BUILT_IN_LDEXP): 15149 CASE_FLT_FN (BUILT_IN_LFLOOR): 15150 CASE_FLT_FN (BUILT_IN_LLCEIL): 15151 CASE_FLT_FN (BUILT_IN_LLFLOOR): 15152 CASE_FLT_FN (BUILT_IN_LLRINT): 15153 CASE_FLT_FN (BUILT_IN_LLROUND): 15154 CASE_FLT_FN (BUILT_IN_LRINT): 15155 CASE_FLT_FN (BUILT_IN_LROUND): 15156 CASE_FLT_FN (BUILT_IN_MODF): 15157 CASE_FLT_FN (BUILT_IN_NEARBYINT): 15158 CASE_FLT_FN (BUILT_IN_RINT): 15159 CASE_FLT_FN (BUILT_IN_ROUND): 15160 CASE_FLT_FN (BUILT_IN_SCALB): 15161 CASE_FLT_FN (BUILT_IN_SCALBLN): 15162 CASE_FLT_FN (BUILT_IN_SCALBN): 15163 CASE_FLT_FN (BUILT_IN_SIGNBIT): 15164 CASE_FLT_FN (BUILT_IN_SIGNIFICAND): 15165 CASE_FLT_FN (BUILT_IN_SINH): 15166 CASE_FLT_FN (BUILT_IN_TANH): 15167 CASE_FLT_FN (BUILT_IN_TRUNC): 15168 /* True if the 1st argument is nonnegative. */ 15169 return tree_expr_nonnegative_warnv_p (arg0, 15170 strict_overflow_p); 15171 15172 CASE_FLT_FN (BUILT_IN_FMAX): 15173 /* True if the 1st OR 2nd arguments are nonnegative. */ 15174 return (tree_expr_nonnegative_warnv_p (arg0, 15175 strict_overflow_p) 15176 || (tree_expr_nonnegative_warnv_p (arg1, 15177 strict_overflow_p))); 15178 15179 CASE_FLT_FN (BUILT_IN_FMIN): 15180 /* True if the 1st AND 2nd arguments are nonnegative. */ 15181 return (tree_expr_nonnegative_warnv_p (arg0, 15182 strict_overflow_p) 15183 && (tree_expr_nonnegative_warnv_p (arg1, 15184 strict_overflow_p))); 15185 15186 CASE_FLT_FN (BUILT_IN_COPYSIGN): 15187 /* True if the 2nd argument is nonnegative. */ 15188 return tree_expr_nonnegative_warnv_p (arg1, 15189 strict_overflow_p); 15190 15191 CASE_FLT_FN (BUILT_IN_POWI): 15192 /* True if the 1st argument is nonnegative or the second 15193 argument is an even integer. */ 15194 if (TREE_CODE (arg1) == INTEGER_CST 15195 && (TREE_INT_CST_LOW (arg1) & 1) == 0) 15196 return true; 15197 return tree_expr_nonnegative_warnv_p (arg0, 15198 strict_overflow_p); 15199 15200 CASE_FLT_FN (BUILT_IN_POW): 15201 /* True if the 1st argument is nonnegative or the second 15202 argument is an even integer valued real. */ 15203 if (TREE_CODE (arg1) == REAL_CST) 15204 { 15205 REAL_VALUE_TYPE c; 15206 HOST_WIDE_INT n; 15207 15208 c = TREE_REAL_CST (arg1); 15209 n = real_to_integer (&c); 15210 if ((n & 1) == 0) 15211 { 15212 REAL_VALUE_TYPE cint; 15213 real_from_integer (&cint, VOIDmode, n, 15214 n < 0 ? -1 : 0, 0); 15215 if (real_identical (&c, &cint)) 15216 return true; 15217 } 15218 } 15219 return tree_expr_nonnegative_warnv_p (arg0, 15220 strict_overflow_p); 15221 15222 default: 15223 break; 15224 } 15225 return tree_simple_nonnegative_warnv_p (CALL_EXPR, 15226 type); 15227 } 15228 15229 /* Return true if T is known to be non-negative. If the return 15230 value is based on the assumption that signed overflow is undefined, 15231 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15232 *STRICT_OVERFLOW_P. */ 15233 15234 bool 15235 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15236 { 15237 enum tree_code code = TREE_CODE (t); 15238 if (TYPE_UNSIGNED (TREE_TYPE (t))) 15239 return true; 15240 15241 switch (code) 15242 { 15243 case TARGET_EXPR: 15244 { 15245 tree temp = TARGET_EXPR_SLOT (t); 15246 t = TARGET_EXPR_INITIAL (t); 15247 15248 /* If the initializer is non-void, then it's a normal expression 15249 that will be assigned to the slot. */ 15250 if (!VOID_TYPE_P (t)) 15251 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p); 15252 15253 /* Otherwise, the initializer sets the slot in some way. One common 15254 way is an assignment statement at the end of the initializer. */ 15255 while (1) 15256 { 15257 if (TREE_CODE (t) == BIND_EXPR) 15258 t = expr_last (BIND_EXPR_BODY (t)); 15259 else if (TREE_CODE (t) == TRY_FINALLY_EXPR 15260 || TREE_CODE (t) == TRY_CATCH_EXPR) 15261 t = expr_last (TREE_OPERAND (t, 0)); 15262 else if (TREE_CODE (t) == STATEMENT_LIST) 15263 t = expr_last (t); 15264 else 15265 break; 15266 } 15267 if (TREE_CODE (t) == MODIFY_EXPR 15268 && TREE_OPERAND (t, 0) == temp) 15269 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15270 strict_overflow_p); 15271 15272 return false; 15273 } 15274 15275 case CALL_EXPR: 15276 { 15277 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE; 15278 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE; 15279 15280 return tree_call_nonnegative_warnv_p (TREE_TYPE (t), 15281 get_callee_fndecl (t), 15282 arg0, 15283 arg1, 15284 strict_overflow_p); 15285 } 15286 case COMPOUND_EXPR: 15287 case MODIFY_EXPR: 15288 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), 15289 strict_overflow_p); 15290 case BIND_EXPR: 15291 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)), 15292 strict_overflow_p); 15293 case SAVE_EXPR: 15294 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), 15295 strict_overflow_p); 15296 15297 default: 15298 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), 15299 TREE_TYPE (t)); 15300 } 15301 15302 /* We don't know sign of `t', so be conservative and return false. */ 15303 return false; 15304 } 15305 15306 /* Return true if T is known to be non-negative. If the return 15307 value is based on the assumption that signed overflow is undefined, 15308 set *STRICT_OVERFLOW_P to true; otherwise, don't change 15309 *STRICT_OVERFLOW_P. */ 15310 15311 bool 15312 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) 15313 { 15314 enum tree_code code; 15315 if (t == error_mark_node) 15316 return false; 15317 15318 code = TREE_CODE (t); 15319 switch (TREE_CODE_CLASS (code)) 15320 { 15321 case tcc_binary: 15322 case tcc_comparison: 15323 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15324 TREE_TYPE (t), 15325 TREE_OPERAND (t, 0), 15326 TREE_OPERAND (t, 1), 15327 strict_overflow_p); 15328 15329 case tcc_unary: 15330 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15331 TREE_TYPE (t), 15332 TREE_OPERAND (t, 0), 15333 strict_overflow_p); 15334 15335 case tcc_constant: 15336 case tcc_declaration: 15337 case tcc_reference: 15338 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15339 15340 default: 15341 break; 15342 } 15343 15344 switch (code) 15345 { 15346 case TRUTH_AND_EXPR: 15347 case TRUTH_OR_EXPR: 15348 case TRUTH_XOR_EXPR: 15349 return tree_binary_nonnegative_warnv_p (TREE_CODE (t), 15350 TREE_TYPE (t), 15351 TREE_OPERAND (t, 0), 15352 TREE_OPERAND (t, 1), 15353 strict_overflow_p); 15354 case TRUTH_NOT_EXPR: 15355 return tree_unary_nonnegative_warnv_p (TREE_CODE (t), 15356 TREE_TYPE (t), 15357 TREE_OPERAND (t, 0), 15358 strict_overflow_p); 15359 15360 case COND_EXPR: 15361 case CONSTRUCTOR: 15362 case OBJ_TYPE_REF: 15363 case ASSERT_EXPR: 15364 case ADDR_EXPR: 15365 case WITH_SIZE_EXPR: 15366 case SSA_NAME: 15367 return tree_single_nonnegative_warnv_p (t, strict_overflow_p); 15368 15369 default: 15370 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p); 15371 } 15372 } 15373 15374 /* Return true if `t' is known to be non-negative. Handle warnings 15375 about undefined signed overflow. */ 15376 15377 bool 15378 tree_expr_nonnegative_p (tree t) 15379 { 15380 bool ret, strict_overflow_p; 15381 15382 strict_overflow_p = false; 15383 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p); 15384 if (strict_overflow_p) 15385 fold_overflow_warning (("assuming signed overflow does not occur when " 15386 "determining that expression is always " 15387 "non-negative"), 15388 WARN_STRICT_OVERFLOW_MISC); 15389 return ret; 15390 } 15391 15392 15393 /* Return true when (CODE OP0) is an address and is known to be nonzero. 15394 For floating point we further ensure that T is not denormal. 15395 Similar logic is present in nonzero_address in rtlanal.h. 15396 15397 If the return value is based on the assumption that signed overflow 15398 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15399 change *STRICT_OVERFLOW_P. */ 15400 15401 bool 15402 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0, 15403 bool *strict_overflow_p) 15404 { 15405 switch (code) 15406 { 15407 case ABS_EXPR: 15408 return tree_expr_nonzero_warnv_p (op0, 15409 strict_overflow_p); 15410 15411 case NOP_EXPR: 15412 { 15413 tree inner_type = TREE_TYPE (op0); 15414 tree outer_type = type; 15415 15416 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type) 15417 && tree_expr_nonzero_warnv_p (op0, 15418 strict_overflow_p)); 15419 } 15420 break; 15421 15422 case NON_LVALUE_EXPR: 15423 return tree_expr_nonzero_warnv_p (op0, 15424 strict_overflow_p); 15425 15426 default: 15427 break; 15428 } 15429 15430 return false; 15431 } 15432 15433 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero. 15434 For floating point we further ensure that T is not denormal. 15435 Similar logic is present in nonzero_address in rtlanal.h. 15436 15437 If the return value is based on the assumption that signed overflow 15438 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15439 change *STRICT_OVERFLOW_P. */ 15440 15441 bool 15442 tree_binary_nonzero_warnv_p (enum tree_code code, 15443 tree type, 15444 tree op0, 15445 tree op1, bool *strict_overflow_p) 15446 { 15447 bool sub_strict_overflow_p; 15448 switch (code) 15449 { 15450 case POINTER_PLUS_EXPR: 15451 case PLUS_EXPR: 15452 if (TYPE_OVERFLOW_UNDEFINED (type)) 15453 { 15454 /* With the presence of negative values it is hard 15455 to say something. */ 15456 sub_strict_overflow_p = false; 15457 if (!tree_expr_nonnegative_warnv_p (op0, 15458 &sub_strict_overflow_p) 15459 || !tree_expr_nonnegative_warnv_p (op1, 15460 &sub_strict_overflow_p)) 15461 return false; 15462 /* One of operands must be positive and the other non-negative. */ 15463 /* We don't set *STRICT_OVERFLOW_P here: even if this value 15464 overflows, on a twos-complement machine the sum of two 15465 nonnegative numbers can never be zero. */ 15466 return (tree_expr_nonzero_warnv_p (op0, 15467 strict_overflow_p) 15468 || tree_expr_nonzero_warnv_p (op1, 15469 strict_overflow_p)); 15470 } 15471 break; 15472 15473 case MULT_EXPR: 15474 if (TYPE_OVERFLOW_UNDEFINED (type)) 15475 { 15476 if (tree_expr_nonzero_warnv_p (op0, 15477 strict_overflow_p) 15478 && tree_expr_nonzero_warnv_p (op1, 15479 strict_overflow_p)) 15480 { 15481 *strict_overflow_p = true; 15482 return true; 15483 } 15484 } 15485 break; 15486 15487 case MIN_EXPR: 15488 sub_strict_overflow_p = false; 15489 if (tree_expr_nonzero_warnv_p (op0, 15490 &sub_strict_overflow_p) 15491 && tree_expr_nonzero_warnv_p (op1, 15492 &sub_strict_overflow_p)) 15493 { 15494 if (sub_strict_overflow_p) 15495 *strict_overflow_p = true; 15496 } 15497 break; 15498 15499 case MAX_EXPR: 15500 sub_strict_overflow_p = false; 15501 if (tree_expr_nonzero_warnv_p (op0, 15502 &sub_strict_overflow_p)) 15503 { 15504 if (sub_strict_overflow_p) 15505 *strict_overflow_p = true; 15506 15507 /* When both operands are nonzero, then MAX must be too. */ 15508 if (tree_expr_nonzero_warnv_p (op1, 15509 strict_overflow_p)) 15510 return true; 15511 15512 /* MAX where operand 0 is positive is positive. */ 15513 return tree_expr_nonnegative_warnv_p (op0, 15514 strict_overflow_p); 15515 } 15516 /* MAX where operand 1 is positive is positive. */ 15517 else if (tree_expr_nonzero_warnv_p (op1, 15518 &sub_strict_overflow_p) 15519 && tree_expr_nonnegative_warnv_p (op1, 15520 &sub_strict_overflow_p)) 15521 { 15522 if (sub_strict_overflow_p) 15523 *strict_overflow_p = true; 15524 return true; 15525 } 15526 break; 15527 15528 case BIT_IOR_EXPR: 15529 return (tree_expr_nonzero_warnv_p (op1, 15530 strict_overflow_p) 15531 || tree_expr_nonzero_warnv_p (op0, 15532 strict_overflow_p)); 15533 15534 default: 15535 break; 15536 } 15537 15538 return false; 15539 } 15540 15541 /* Return true when T is an address and is known to be nonzero. 15542 For floating point we further ensure that T is not denormal. 15543 Similar logic is present in nonzero_address in rtlanal.h. 15544 15545 If the return value is based on the assumption that signed overflow 15546 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15547 change *STRICT_OVERFLOW_P. */ 15548 15549 bool 15550 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) 15551 { 15552 bool sub_strict_overflow_p; 15553 switch (TREE_CODE (t)) 15554 { 15555 case INTEGER_CST: 15556 return !integer_zerop (t); 15557 15558 case ADDR_EXPR: 15559 { 15560 tree base = TREE_OPERAND (t, 0); 15561 if (!DECL_P (base)) 15562 base = get_base_address (base); 15563 15564 if (!base) 15565 return false; 15566 15567 /* Weak declarations may link to NULL. Other things may also be NULL 15568 so protect with -fdelete-null-pointer-checks; but not variables 15569 allocated on the stack. */ 15570 if (DECL_P (base) 15571 && (flag_delete_null_pointer_checks 15572 || (DECL_CONTEXT (base) 15573 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL 15574 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))) 15575 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base); 15576 15577 /* Constants are never weak. */ 15578 if (CONSTANT_CLASS_P (base)) 15579 return true; 15580 15581 return false; 15582 } 15583 15584 case COND_EXPR: 15585 sub_strict_overflow_p = false; 15586 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 15587 &sub_strict_overflow_p) 15588 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2), 15589 &sub_strict_overflow_p)) 15590 { 15591 if (sub_strict_overflow_p) 15592 *strict_overflow_p = true; 15593 return true; 15594 } 15595 break; 15596 15597 default: 15598 break; 15599 } 15600 return false; 15601 } 15602 15603 /* Return true when T is an address and is known to be nonzero. 15604 For floating point we further ensure that T is not denormal. 15605 Similar logic is present in nonzero_address in rtlanal.h. 15606 15607 If the return value is based on the assumption that signed overflow 15608 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't 15609 change *STRICT_OVERFLOW_P. */ 15610 15611 bool 15612 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) 15613 { 15614 tree type = TREE_TYPE (t); 15615 enum tree_code code; 15616 15617 /* Doing something useful for floating point would need more work. */ 15618 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) 15619 return false; 15620 15621 code = TREE_CODE (t); 15622 switch (TREE_CODE_CLASS (code)) 15623 { 15624 case tcc_unary: 15625 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 15626 strict_overflow_p); 15627 case tcc_binary: 15628 case tcc_comparison: 15629 return tree_binary_nonzero_warnv_p (code, type, 15630 TREE_OPERAND (t, 0), 15631 TREE_OPERAND (t, 1), 15632 strict_overflow_p); 15633 case tcc_constant: 15634 case tcc_declaration: 15635 case tcc_reference: 15636 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 15637 15638 default: 15639 break; 15640 } 15641 15642 switch (code) 15643 { 15644 case TRUTH_NOT_EXPR: 15645 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0), 15646 strict_overflow_p); 15647 15648 case TRUTH_AND_EXPR: 15649 case TRUTH_OR_EXPR: 15650 case TRUTH_XOR_EXPR: 15651 return tree_binary_nonzero_warnv_p (code, type, 15652 TREE_OPERAND (t, 0), 15653 TREE_OPERAND (t, 1), 15654 strict_overflow_p); 15655 15656 case COND_EXPR: 15657 case CONSTRUCTOR: 15658 case OBJ_TYPE_REF: 15659 case ASSERT_EXPR: 15660 case ADDR_EXPR: 15661 case WITH_SIZE_EXPR: 15662 case SSA_NAME: 15663 return tree_single_nonzero_warnv_p (t, strict_overflow_p); 15664 15665 case COMPOUND_EXPR: 15666 case MODIFY_EXPR: 15667 case BIND_EXPR: 15668 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), 15669 strict_overflow_p); 15670 15671 case SAVE_EXPR: 15672 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), 15673 strict_overflow_p); 15674 15675 case CALL_EXPR: 15676 return alloca_call_p (t); 15677 15678 default: 15679 break; 15680 } 15681 return false; 15682 } 15683 15684 /* Return true when T is an address and is known to be nonzero. 15685 Handle warnings about undefined signed overflow. */ 15686 15687 bool 15688 tree_expr_nonzero_p (tree t) 15689 { 15690 bool ret, strict_overflow_p; 15691 15692 strict_overflow_p = false; 15693 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p); 15694 if (strict_overflow_p) 15695 fold_overflow_warning (("assuming signed overflow does not occur when " 15696 "determining that expression is always " 15697 "non-zero"), 15698 WARN_STRICT_OVERFLOW_MISC); 15699 return ret; 15700 } 15701 15702 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1, 15703 attempt to fold the expression to a constant without modifying TYPE, 15704 OP0 or OP1. 15705 15706 If the expression could be simplified to a constant, then return 15707 the constant. If the expression would not be simplified to a 15708 constant, then return NULL_TREE. */ 15709 15710 tree 15711 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1) 15712 { 15713 tree tem = fold_binary (code, type, op0, op1); 15714 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15715 } 15716 15717 /* Given the components of a unary expression CODE, TYPE and OP0, 15718 attempt to fold the expression to a constant without modifying 15719 TYPE or OP0. 15720 15721 If the expression could be simplified to a constant, then return 15722 the constant. If the expression would not be simplified to a 15723 constant, then return NULL_TREE. */ 15724 15725 tree 15726 fold_unary_to_constant (enum tree_code code, tree type, tree op0) 15727 { 15728 tree tem = fold_unary (code, type, op0); 15729 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE; 15730 } 15731 15732 /* If EXP represents referencing an element in a constant string 15733 (either via pointer arithmetic or array indexing), return the 15734 tree representing the value accessed, otherwise return NULL. */ 15735 15736 tree 15737 fold_read_from_constant_string (tree exp) 15738 { 15739 if ((TREE_CODE (exp) == INDIRECT_REF 15740 || TREE_CODE (exp) == ARRAY_REF) 15741 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE) 15742 { 15743 tree exp1 = TREE_OPERAND (exp, 0); 15744 tree index; 15745 tree string; 15746 location_t loc = EXPR_LOCATION (exp); 15747 15748 if (TREE_CODE (exp) == INDIRECT_REF) 15749 string = string_constant (exp1, &index); 15750 else 15751 { 15752 tree low_bound = array_ref_low_bound (exp); 15753 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1)); 15754 15755 /* Optimize the special-case of a zero lower bound. 15756 15757 We convert the low_bound to sizetype to avoid some problems 15758 with constant folding. (E.g. suppose the lower bound is 1, 15759 and its mode is QI. Without the conversion,l (ARRAY 15760 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) 15761 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ 15762 if (! integer_zerop (low_bound)) 15763 index = size_diffop_loc (loc, index, 15764 fold_convert_loc (loc, sizetype, low_bound)); 15765 15766 string = exp1; 15767 } 15768 15769 if (string 15770 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string))) 15771 && TREE_CODE (string) == STRING_CST 15772 && TREE_CODE (index) == INTEGER_CST 15773 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0 15774 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) 15775 == MODE_INT) 15776 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1)) 15777 return build_int_cst_type (TREE_TYPE (exp), 15778 (TREE_STRING_POINTER (string) 15779 [TREE_INT_CST_LOW (index)])); 15780 } 15781 return NULL; 15782 } 15783 15784 /* Return the tree for neg (ARG0) when ARG0 is known to be either 15785 an integer constant, real, or fixed-point constant. 15786 15787 TYPE is the type of the result. */ 15788 15789 static tree 15790 fold_negate_const (tree arg0, tree type) 15791 { 15792 tree t = NULL_TREE; 15793 15794 switch (TREE_CODE (arg0)) 15795 { 15796 case INTEGER_CST: 15797 { 15798 double_int val = tree_to_double_int (arg0); 15799 int overflow = neg_double (val.low, val.high, &val.low, &val.high); 15800 15801 t = force_fit_type_double (type, val, 1, 15802 (overflow | TREE_OVERFLOW (arg0)) 15803 && !TYPE_UNSIGNED (type)); 15804 break; 15805 } 15806 15807 case REAL_CST: 15808 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 15809 break; 15810 15811 case FIXED_CST: 15812 { 15813 FIXED_VALUE_TYPE f; 15814 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR, 15815 &(TREE_FIXED_CST (arg0)), NULL, 15816 TYPE_SATURATING (type)); 15817 t = build_fixed (type, f); 15818 /* Propagate overflow flags. */ 15819 if (overflow_p | TREE_OVERFLOW (arg0)) 15820 TREE_OVERFLOW (t) = 1; 15821 break; 15822 } 15823 15824 default: 15825 gcc_unreachable (); 15826 } 15827 15828 return t; 15829 } 15830 15831 /* Return the tree for abs (ARG0) when ARG0 is known to be either 15832 an integer constant or real constant. 15833 15834 TYPE is the type of the result. */ 15835 15836 tree 15837 fold_abs_const (tree arg0, tree type) 15838 { 15839 tree t = NULL_TREE; 15840 15841 switch (TREE_CODE (arg0)) 15842 { 15843 case INTEGER_CST: 15844 { 15845 double_int val = tree_to_double_int (arg0); 15846 15847 /* If the value is unsigned or non-negative, then the absolute value 15848 is the same as the ordinary value. */ 15849 if (TYPE_UNSIGNED (type) 15850 || !double_int_negative_p (val)) 15851 t = arg0; 15852 15853 /* If the value is negative, then the absolute value is 15854 its negation. */ 15855 else 15856 { 15857 int overflow; 15858 15859 overflow = neg_double (val.low, val.high, &val.low, &val.high); 15860 t = force_fit_type_double (type, val, -1, 15861 overflow | TREE_OVERFLOW (arg0)); 15862 } 15863 } 15864 break; 15865 15866 case REAL_CST: 15867 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) 15868 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); 15869 else 15870 t = arg0; 15871 break; 15872 15873 default: 15874 gcc_unreachable (); 15875 } 15876 15877 return t; 15878 } 15879 15880 /* Return the tree for not (ARG0) when ARG0 is known to be an integer 15881 constant. TYPE is the type of the result. */ 15882 15883 static tree 15884 fold_not_const (const_tree arg0, tree type) 15885 { 15886 double_int val; 15887 15888 gcc_assert (TREE_CODE (arg0) == INTEGER_CST); 15889 15890 val = double_int_not (tree_to_double_int (arg0)); 15891 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0)); 15892 } 15893 15894 /* Given CODE, a relational operator, the target type, TYPE and two 15895 constant operands OP0 and OP1, return the result of the 15896 relational operation. If the result is not a compile time 15897 constant, then return NULL_TREE. */ 15898 15899 static tree 15900 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) 15901 { 15902 int result, invert; 15903 15904 /* From here on, the only cases we handle are when the result is 15905 known to be a constant. */ 15906 15907 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST) 15908 { 15909 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0); 15910 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1); 15911 15912 /* Handle the cases where either operand is a NaN. */ 15913 if (real_isnan (c0) || real_isnan (c1)) 15914 { 15915 switch (code) 15916 { 15917 case EQ_EXPR: 15918 case ORDERED_EXPR: 15919 result = 0; 15920 break; 15921 15922 case NE_EXPR: 15923 case UNORDERED_EXPR: 15924 case UNLT_EXPR: 15925 case UNLE_EXPR: 15926 case UNGT_EXPR: 15927 case UNGE_EXPR: 15928 case UNEQ_EXPR: 15929 result = 1; 15930 break; 15931 15932 case LT_EXPR: 15933 case LE_EXPR: 15934 case GT_EXPR: 15935 case GE_EXPR: 15936 case LTGT_EXPR: 15937 if (flag_trapping_math) 15938 return NULL_TREE; 15939 result = 0; 15940 break; 15941 15942 default: 15943 gcc_unreachable (); 15944 } 15945 15946 return constant_boolean_node (result, type); 15947 } 15948 15949 return constant_boolean_node (real_compare (code, c0, c1), type); 15950 } 15951 15952 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST) 15953 { 15954 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0); 15955 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1); 15956 return constant_boolean_node (fixed_compare (code, c0, c1), type); 15957 } 15958 15959 /* Handle equality/inequality of complex constants. */ 15960 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST) 15961 { 15962 tree rcond = fold_relational_const (code, type, 15963 TREE_REALPART (op0), 15964 TREE_REALPART (op1)); 15965 tree icond = fold_relational_const (code, type, 15966 TREE_IMAGPART (op0), 15967 TREE_IMAGPART (op1)); 15968 if (code == EQ_EXPR) 15969 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond); 15970 else if (code == NE_EXPR) 15971 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond); 15972 else 15973 return NULL_TREE; 15974 } 15975 15976 /* From here on we only handle LT, LE, GT, GE, EQ and NE. 15977 15978 To compute GT, swap the arguments and do LT. 15979 To compute GE, do LT and invert the result. 15980 To compute LE, swap the arguments, do LT and invert the result. 15981 To compute NE, do EQ and invert the result. 15982 15983 Therefore, the code below must handle only EQ and LT. */ 15984 15985 if (code == LE_EXPR || code == GT_EXPR) 15986 { 15987 tree tem = op0; 15988 op0 = op1; 15989 op1 = tem; 15990 code = swap_tree_comparison (code); 15991 } 15992 15993 /* Note that it is safe to invert for real values here because we 15994 have already handled the one case that it matters. */ 15995 15996 invert = 0; 15997 if (code == NE_EXPR || code == GE_EXPR) 15998 { 15999 invert = 1; 16000 code = invert_tree_comparison (code, false); 16001 } 16002 16003 /* Compute a result for LT or EQ if args permit; 16004 Otherwise return T. */ 16005 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST) 16006 { 16007 if (code == EQ_EXPR) 16008 result = tree_int_cst_equal (op0, op1); 16009 else if (TYPE_UNSIGNED (TREE_TYPE (op0))) 16010 result = INT_CST_LT_UNSIGNED (op0, op1); 16011 else 16012 result = INT_CST_LT (op0, op1); 16013 } 16014 else 16015 return NULL_TREE; 16016 16017 if (invert) 16018 result ^= 1; 16019 return constant_boolean_node (result, type); 16020 } 16021 16022 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the 16023 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR 16024 itself. */ 16025 16026 tree 16027 fold_build_cleanup_point_expr (tree type, tree expr) 16028 { 16029 /* If the expression does not have side effects then we don't have to wrap 16030 it with a cleanup point expression. */ 16031 if (!TREE_SIDE_EFFECTS (expr)) 16032 return expr; 16033 16034 /* If the expression is a return, check to see if the expression inside the 16035 return has no side effects or the right hand side of the modify expression 16036 inside the return. If either don't have side effects set we don't need to 16037 wrap the expression in a cleanup point expression. Note we don't check the 16038 left hand side of the modify because it should always be a return decl. */ 16039 if (TREE_CODE (expr) == RETURN_EXPR) 16040 { 16041 tree op = TREE_OPERAND (expr, 0); 16042 if (!op || !TREE_SIDE_EFFECTS (op)) 16043 return expr; 16044 op = TREE_OPERAND (op, 1); 16045 if (!TREE_SIDE_EFFECTS (op)) 16046 return expr; 16047 } 16048 16049 return build1 (CLEANUP_POINT_EXPR, type, expr); 16050 } 16051 16052 /* Given a pointer value OP0 and a type TYPE, return a simplified version 16053 of an indirection through OP0, or NULL_TREE if no simplification is 16054 possible. */ 16055 16056 tree 16057 fold_indirect_ref_1 (location_t loc, tree type, tree op0) 16058 { 16059 tree sub = op0; 16060 tree subtype; 16061 16062 STRIP_NOPS (sub); 16063 subtype = TREE_TYPE (sub); 16064 if (!POINTER_TYPE_P (subtype)) 16065 return NULL_TREE; 16066 16067 if (TREE_CODE (sub) == ADDR_EXPR) 16068 { 16069 tree op = TREE_OPERAND (sub, 0); 16070 tree optype = TREE_TYPE (op); 16071 /* *&CONST_DECL -> to the value of the const decl. */ 16072 if (TREE_CODE (op) == CONST_DECL) 16073 return DECL_INITIAL (op); 16074 /* *&p => p; make sure to handle *&"str"[cst] here. */ 16075 if (type == optype) 16076 { 16077 tree fop = fold_read_from_constant_string (op); 16078 if (fop) 16079 return fop; 16080 else 16081 return op; 16082 } 16083 /* *(foo *)&fooarray => fooarray[0] */ 16084 else if (TREE_CODE (optype) == ARRAY_TYPE 16085 && type == TREE_TYPE (optype) 16086 && (!in_gimple_form 16087 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 16088 { 16089 tree type_domain = TYPE_DOMAIN (optype); 16090 tree min_val = size_zero_node; 16091 if (type_domain && TYPE_MIN_VALUE (type_domain)) 16092 min_val = TYPE_MIN_VALUE (type_domain); 16093 if (in_gimple_form 16094 && TREE_CODE (min_val) != INTEGER_CST) 16095 return NULL_TREE; 16096 return build4_loc (loc, ARRAY_REF, type, op, min_val, 16097 NULL_TREE, NULL_TREE); 16098 } 16099 /* *(foo *)&complexfoo => __real__ complexfoo */ 16100 else if (TREE_CODE (optype) == COMPLEX_TYPE 16101 && type == TREE_TYPE (optype)) 16102 return fold_build1_loc (loc, REALPART_EXPR, type, op); 16103 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */ 16104 else if (TREE_CODE (optype) == VECTOR_TYPE 16105 && type == TREE_TYPE (optype)) 16106 { 16107 tree part_width = TYPE_SIZE (type); 16108 tree index = bitsize_int (0); 16109 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); 16110 } 16111 } 16112 16113 if (TREE_CODE (sub) == POINTER_PLUS_EXPR 16114 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) 16115 { 16116 tree op00 = TREE_OPERAND (sub, 0); 16117 tree op01 = TREE_OPERAND (sub, 1); 16118 16119 STRIP_NOPS (op00); 16120 if (TREE_CODE (op00) == ADDR_EXPR) 16121 { 16122 tree op00type; 16123 op00 = TREE_OPERAND (op00, 0); 16124 op00type = TREE_TYPE (op00); 16125 16126 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */ 16127 if (TREE_CODE (op00type) == VECTOR_TYPE 16128 && type == TREE_TYPE (op00type)) 16129 { 16130 HOST_WIDE_INT offset = tree_low_cst (op01, 0); 16131 tree part_width = TYPE_SIZE (type); 16132 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; 16133 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; 16134 tree index = bitsize_int (indexi); 16135 16136 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type)) 16137 return fold_build3_loc (loc, 16138 BIT_FIELD_REF, type, op00, 16139 part_width, index); 16140 16141 } 16142 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ 16143 else if (TREE_CODE (op00type) == COMPLEX_TYPE 16144 && type == TREE_TYPE (op00type)) 16145 { 16146 tree size = TYPE_SIZE_UNIT (type); 16147 if (tree_int_cst_equal (size, op01)) 16148 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00); 16149 } 16150 /* ((foo *)&fooarray)[1] => fooarray[1] */ 16151 else if (TREE_CODE (op00type) == ARRAY_TYPE 16152 && type == TREE_TYPE (op00type)) 16153 { 16154 tree type_domain = TYPE_DOMAIN (op00type); 16155 tree min_val = size_zero_node; 16156 if (type_domain && TYPE_MIN_VALUE (type_domain)) 16157 min_val = TYPE_MIN_VALUE (type_domain); 16158 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01, 16159 TYPE_SIZE_UNIT (type)); 16160 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val); 16161 return build4_loc (loc, ARRAY_REF, type, op00, op01, 16162 NULL_TREE, NULL_TREE); 16163 } 16164 } 16165 } 16166 16167 /* *(foo *)fooarrptr => (*fooarrptr)[0] */ 16168 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE 16169 && type == TREE_TYPE (TREE_TYPE (subtype)) 16170 && (!in_gimple_form 16171 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)) 16172 { 16173 tree type_domain; 16174 tree min_val = size_zero_node; 16175 sub = build_fold_indirect_ref_loc (loc, sub); 16176 type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); 16177 if (type_domain && TYPE_MIN_VALUE (type_domain)) 16178 min_val = TYPE_MIN_VALUE (type_domain); 16179 if (in_gimple_form 16180 && TREE_CODE (min_val) != INTEGER_CST) 16181 return NULL_TREE; 16182 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE, 16183 NULL_TREE); 16184 } 16185 16186 return NULL_TREE; 16187 } 16188 16189 /* Builds an expression for an indirection through T, simplifying some 16190 cases. */ 16191 16192 tree 16193 build_fold_indirect_ref_loc (location_t loc, tree t) 16194 { 16195 tree type = TREE_TYPE (TREE_TYPE (t)); 16196 tree sub = fold_indirect_ref_1 (loc, type, t); 16197 16198 if (sub) 16199 return sub; 16200 16201 return build1_loc (loc, INDIRECT_REF, type, t); 16202 } 16203 16204 /* Given an INDIRECT_REF T, return either T or a simplified version. */ 16205 16206 tree 16207 fold_indirect_ref_loc (location_t loc, tree t) 16208 { 16209 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0)); 16210 16211 if (sub) 16212 return sub; 16213 else 16214 return t; 16215 } 16216 16217 /* Strip non-trapping, non-side-effecting tree nodes from an expression 16218 whose result is ignored. The type of the returned tree need not be 16219 the same as the original expression. */ 16220 16221 tree 16222 fold_ignored_result (tree t) 16223 { 16224 if (!TREE_SIDE_EFFECTS (t)) 16225 return integer_zero_node; 16226 16227 for (;;) 16228 switch (TREE_CODE_CLASS (TREE_CODE (t))) 16229 { 16230 case tcc_unary: 16231 t = TREE_OPERAND (t, 0); 16232 break; 16233 16234 case tcc_binary: 16235 case tcc_comparison: 16236 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16237 t = TREE_OPERAND (t, 0); 16238 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))) 16239 t = TREE_OPERAND (t, 1); 16240 else 16241 return t; 16242 break; 16243 16244 case tcc_expression: 16245 switch (TREE_CODE (t)) 16246 { 16247 case COMPOUND_EXPR: 16248 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))) 16249 return t; 16250 t = TREE_OPERAND (t, 0); 16251 break; 16252 16253 case COND_EXPR: 16254 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)) 16255 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2))) 16256 return t; 16257 t = TREE_OPERAND (t, 0); 16258 break; 16259 16260 default: 16261 return t; 16262 } 16263 break; 16264 16265 default: 16266 return t; 16267 } 16268 } 16269 16270 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. 16271 This can only be applied to objects of a sizetype. */ 16272 16273 tree 16274 round_up_loc (location_t loc, tree value, int divisor) 16275 { 16276 tree div = NULL_TREE; 16277 16278 gcc_assert (divisor > 0); 16279 if (divisor == 1) 16280 return value; 16281 16282 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16283 have to do anything. Only do this when we are not given a const, 16284 because in that case, this check is more expensive than just 16285 doing it. */ 16286 if (TREE_CODE (value) != INTEGER_CST) 16287 { 16288 div = build_int_cst (TREE_TYPE (value), divisor); 16289 16290 if (multiple_of_p (TREE_TYPE (value), value, div)) 16291 return value; 16292 } 16293 16294 /* If divisor is a power of two, simplify this to bit manipulation. */ 16295 if (divisor == (divisor & -divisor)) 16296 { 16297 if (TREE_CODE (value) == INTEGER_CST) 16298 { 16299 double_int val = tree_to_double_int (value); 16300 bool overflow_p; 16301 16302 if ((val.low & (divisor - 1)) == 0) 16303 return value; 16304 16305 overflow_p = TREE_OVERFLOW (value); 16306 val.low &= ~(divisor - 1); 16307 val.low += divisor; 16308 if (val.low == 0) 16309 { 16310 val.high++; 16311 if (val.high == 0) 16312 overflow_p = true; 16313 } 16314 16315 return force_fit_type_double (TREE_TYPE (value), val, 16316 -1, overflow_p); 16317 } 16318 else 16319 { 16320 tree t; 16321 16322 t = build_int_cst (TREE_TYPE (value), divisor - 1); 16323 value = size_binop_loc (loc, PLUS_EXPR, value, t); 16324 t = build_int_cst (TREE_TYPE (value), -divisor); 16325 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16326 } 16327 } 16328 else 16329 { 16330 if (!div) 16331 div = build_int_cst (TREE_TYPE (value), divisor); 16332 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div); 16333 value = size_binop_loc (loc, MULT_EXPR, value, div); 16334 } 16335 16336 return value; 16337 } 16338 16339 /* Likewise, but round down. */ 16340 16341 tree 16342 round_down_loc (location_t loc, tree value, int divisor) 16343 { 16344 tree div = NULL_TREE; 16345 16346 gcc_assert (divisor > 0); 16347 if (divisor == 1) 16348 return value; 16349 16350 /* See if VALUE is already a multiple of DIVISOR. If so, we don't 16351 have to do anything. Only do this when we are not given a const, 16352 because in that case, this check is more expensive than just 16353 doing it. */ 16354 if (TREE_CODE (value) != INTEGER_CST) 16355 { 16356 div = build_int_cst (TREE_TYPE (value), divisor); 16357 16358 if (multiple_of_p (TREE_TYPE (value), value, div)) 16359 return value; 16360 } 16361 16362 /* If divisor is a power of two, simplify this to bit manipulation. */ 16363 if (divisor == (divisor & -divisor)) 16364 { 16365 tree t; 16366 16367 t = build_int_cst (TREE_TYPE (value), -divisor); 16368 value = size_binop_loc (loc, BIT_AND_EXPR, value, t); 16369 } 16370 else 16371 { 16372 if (!div) 16373 div = build_int_cst (TREE_TYPE (value), divisor); 16374 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div); 16375 value = size_binop_loc (loc, MULT_EXPR, value, div); 16376 } 16377 16378 return value; 16379 } 16380 16381 /* Returns the pointer to the base of the object addressed by EXP and 16382 extracts the information about the offset of the access, storing it 16383 to PBITPOS and POFFSET. */ 16384 16385 static tree 16386 split_address_to_core_and_offset (tree exp, 16387 HOST_WIDE_INT *pbitpos, tree *poffset) 16388 { 16389 tree core; 16390 enum machine_mode mode; 16391 int unsignedp, volatilep; 16392 HOST_WIDE_INT bitsize; 16393 location_t loc = EXPR_LOCATION (exp); 16394 16395 if (TREE_CODE (exp) == ADDR_EXPR) 16396 { 16397 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, 16398 poffset, &mode, &unsignedp, &volatilep, 16399 false); 16400 core = build_fold_addr_expr_loc (loc, core); 16401 } 16402 else 16403 { 16404 core = exp; 16405 *pbitpos = 0; 16406 *poffset = NULL_TREE; 16407 } 16408 16409 return core; 16410 } 16411 16412 /* Returns true if addresses of E1 and E2 differ by a constant, false 16413 otherwise. If they do, E1 - E2 is stored in *DIFF. */ 16414 16415 bool 16416 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff) 16417 { 16418 tree core1, core2; 16419 HOST_WIDE_INT bitpos1, bitpos2; 16420 tree toffset1, toffset2, tdiff, type; 16421 16422 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1); 16423 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2); 16424 16425 if (bitpos1 % BITS_PER_UNIT != 0 16426 || bitpos2 % BITS_PER_UNIT != 0 16427 || !operand_equal_p (core1, core2, 0)) 16428 return false; 16429 16430 if (toffset1 && toffset2) 16431 { 16432 type = TREE_TYPE (toffset1); 16433 if (type != TREE_TYPE (toffset2)) 16434 toffset2 = fold_convert (type, toffset2); 16435 16436 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2); 16437 if (!cst_and_fits_in_hwi (tdiff)) 16438 return false; 16439 16440 *diff = int_cst_value (tdiff); 16441 } 16442 else if (toffset1 || toffset2) 16443 { 16444 /* If only one of the offsets is non-constant, the difference cannot 16445 be a constant. */ 16446 return false; 16447 } 16448 else 16449 *diff = 0; 16450 16451 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT; 16452 return true; 16453 } 16454 16455 /* Simplify the floating point expression EXP when the sign of the 16456 result is not significant. Return NULL_TREE if no simplification 16457 is possible. */ 16458 16459 tree 16460 fold_strip_sign_ops (tree exp) 16461 { 16462 tree arg0, arg1; 16463 location_t loc = EXPR_LOCATION (exp); 16464 16465 switch (TREE_CODE (exp)) 16466 { 16467 case ABS_EXPR: 16468 case NEGATE_EXPR: 16469 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16470 return arg0 ? arg0 : TREE_OPERAND (exp, 0); 16471 16472 case MULT_EXPR: 16473 case RDIV_EXPR: 16474 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp)))) 16475 return NULL_TREE; 16476 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); 16477 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16478 if (arg0 != NULL_TREE || arg1 != NULL_TREE) 16479 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp), 16480 arg0 ? arg0 : TREE_OPERAND (exp, 0), 16481 arg1 ? arg1 : TREE_OPERAND (exp, 1)); 16482 break; 16483 16484 case COMPOUND_EXPR: 16485 arg0 = TREE_OPERAND (exp, 0); 16486 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16487 if (arg1) 16488 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); 16489 break; 16490 16491 case COND_EXPR: 16492 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); 16493 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); 16494 if (arg0 || arg1) 16495 return fold_build3_loc (loc, 16496 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), 16497 arg0 ? arg0 : TREE_OPERAND (exp, 1), 16498 arg1 ? arg1 : TREE_OPERAND (exp, 2)); 16499 break; 16500 16501 case CALL_EXPR: 16502 { 16503 const enum built_in_function fcode = builtin_mathfn_code (exp); 16504 switch (fcode) 16505 { 16506 CASE_FLT_FN (BUILT_IN_COPYSIGN): 16507 /* Strip copysign function call, return the 1st argument. */ 16508 arg0 = CALL_EXPR_ARG (exp, 0); 16509 arg1 = CALL_EXPR_ARG (exp, 1); 16510 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1); 16511 16512 default: 16513 /* Strip sign ops from the argument of "odd" math functions. */ 16514 if (negate_mathfn_p (fcode)) 16515 { 16516 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0)); 16517 if (arg0) 16518 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0); 16519 } 16520 break; 16521 } 16522 } 16523 break; 16524 16525 default: 16526 break; 16527 } 16528 return NULL_TREE; 16529 } 16530