1 /* Combining of if-expressions on trees. 2 Copyright (C) 2007-2018 Free Software Foundation, Inc. 3 Contributed by Richard Guenther <rguenther@suse.de> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify 8 it under the terms of the GNU General Public License as published by 9 the Free Software Foundation; either version 3, or (at your option) 10 any later version. 11 12 GCC is distributed in the hope that it will be useful, 13 but WITHOUT ANY WARRANTY; without even the implied warranty of 14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15 GNU General Public License for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 #include "config.h" 22 #include "system.h" 23 #include "coretypes.h" 24 #include "backend.h" 25 #include "rtl.h" 26 #include "tree.h" 27 #include "gimple.h" 28 #include "cfghooks.h" 29 #include "tree-pass.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "ssa.h" 33 #include "tree-pretty-print.h" 34 /* rtl is needed only because arm back-end requires it for 35 BRANCH_COST. */ 36 #include "fold-const.h" 37 #include "cfganal.h" 38 #include "gimple-fold.h" 39 #include "gimple-iterator.h" 40 #include "gimplify-me.h" 41 #include "tree-cfg.h" 42 #include "tree-ssa.h" 43 44 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT 45 #define LOGICAL_OP_NON_SHORT_CIRCUIT \ 46 (BRANCH_COST (optimize_function_for_speed_p (cfun), \ 47 false) >= 2) 48 #endif 49 50 /* This pass combines COND_EXPRs to simplify control flow. It 51 currently recognizes bit tests and comparisons in chains that 52 represent logical and or logical or of two COND_EXPRs. 53 54 It does so by walking basic blocks in a approximate reverse 55 post-dominator order and trying to match CFG patterns that 56 represent logical and or logical or of two COND_EXPRs. 57 Transformations are done if the COND_EXPR conditions match 58 either 59 60 1. two single bit tests X & (1 << Yn) (for logical and) 61 62 2. two bit tests X & Yn (for logical or) 63 64 3. two comparisons X OPn Y (for logical or) 65 66 To simplify this pass, removing basic blocks and dead code 67 is left to CFG cleanup and DCE. */ 68 69 70 /* Recognize a if-then-else CFG pattern starting to match with the 71 COND_BB basic-block containing the COND_EXPR. The recognized 72 then end else blocks are stored to *THEN_BB and *ELSE_BB. If 73 *THEN_BB and/or *ELSE_BB are already set, they are required to 74 match the then and else basic-blocks to make the pattern match. 75 Returns true if the pattern matched, false otherwise. */ 76 77 static bool 78 recognize_if_then_else (basic_block cond_bb, 79 basic_block *then_bb, basic_block *else_bb) 80 { 81 edge t, e; 82 83 if (EDGE_COUNT (cond_bb->succs) != 2) 84 return false; 85 86 /* Find the then/else edges. */ 87 t = EDGE_SUCC (cond_bb, 0); 88 e = EDGE_SUCC (cond_bb, 1); 89 if (!(t->flags & EDGE_TRUE_VALUE)) 90 std::swap (t, e); 91 if (!(t->flags & EDGE_TRUE_VALUE) 92 || !(e->flags & EDGE_FALSE_VALUE)) 93 return false; 94 95 /* Check if the edge destinations point to the required block. */ 96 if (*then_bb 97 && t->dest != *then_bb) 98 return false; 99 if (*else_bb 100 && e->dest != *else_bb) 101 return false; 102 103 if (!*then_bb) 104 *then_bb = t->dest; 105 if (!*else_bb) 106 *else_bb = e->dest; 107 108 return true; 109 } 110 111 /* Verify if the basic block BB does not have side-effects. Return 112 true in this case, else false. */ 113 114 static bool 115 bb_no_side_effects_p (basic_block bb) 116 { 117 gimple_stmt_iterator gsi; 118 119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 120 { 121 gimple *stmt = gsi_stmt (gsi); 122 123 if (is_gimple_debug (stmt)) 124 continue; 125 126 if (gimple_has_side_effects (stmt) 127 || gimple_uses_undefined_value_p (stmt) 128 || gimple_could_trap_p (stmt) 129 || gimple_vuse (stmt) 130 /* const calls don't match any of the above, yet they could 131 still have some side-effects - they could contain 132 gimple_could_trap_p statements, like floating point 133 exceptions or integer division by zero. See PR70586. 134 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p 135 should handle this. */ 136 || is_gimple_call (stmt)) 137 return false; 138 } 139 140 return true; 141 } 142 143 /* Return true if BB is an empty forwarder block to TO_BB. */ 144 145 static bool 146 forwarder_block_to (basic_block bb, basic_block to_bb) 147 { 148 return empty_block_p (bb) 149 && single_succ_p (bb) 150 && single_succ (bb) == to_bb; 151 } 152 153 /* Verify if all PHI node arguments in DEST for edges from BB1 or 154 BB2 to DEST are the same. This makes the CFG merge point 155 free from side-effects. Return true in this case, else false. */ 156 157 static bool 158 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest) 159 { 160 edge e1 = find_edge (bb1, dest); 161 edge e2 = find_edge (bb2, dest); 162 gphi_iterator gsi; 163 gphi *phi; 164 165 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi)) 166 { 167 phi = gsi.phi (); 168 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1), 169 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0)) 170 return false; 171 } 172 173 return true; 174 } 175 176 /* Return the best representative SSA name for CANDIDATE which is used 177 in a bit test. */ 178 179 static tree 180 get_name_for_bit_test (tree candidate) 181 { 182 /* Skip single-use names in favor of using the name from a 183 non-widening conversion definition. */ 184 if (TREE_CODE (candidate) == SSA_NAME 185 && has_single_use (candidate)) 186 { 187 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate); 188 if (is_gimple_assign (def_stmt) 189 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))) 190 { 191 if (TYPE_PRECISION (TREE_TYPE (candidate)) 192 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt)))) 193 return gimple_assign_rhs1 (def_stmt); 194 } 195 } 196 197 return candidate; 198 } 199 200 /* Recognize a single bit test pattern in GIMPLE_COND and its defining 201 statements. Store the name being tested in *NAME and the bit 202 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT). 203 Returns true if the pattern matched, false otherwise. */ 204 205 static bool 206 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv) 207 { 208 gimple *stmt; 209 210 /* Get at the definition of the result of the bit test. */ 211 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR) 212 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME 213 || !integer_zerop (gimple_cond_rhs (cond))) 214 return false; 215 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)); 216 if (!is_gimple_assign (stmt)) 217 return false; 218 219 /* Look at which bit is tested. One form to recognize is 220 D.1985_5 = state_3(D) >> control1_4(D); 221 D.1986_6 = (int) D.1985_5; 222 D.1987_7 = op0 & 1; 223 if (D.1987_7 != 0) */ 224 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR 225 && integer_onep (gimple_assign_rhs2 (stmt)) 226 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 227 { 228 tree orig_name = gimple_assign_rhs1 (stmt); 229 230 /* Look through copies and conversions to eventually 231 find the stmt that computes the shift. */ 232 stmt = SSA_NAME_DEF_STMT (orig_name); 233 234 while (is_gimple_assign (stmt) 235 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)) 236 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt))) 237 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt)))) 238 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 239 || gimple_assign_ssa_name_copy_p (stmt))) 240 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); 241 242 /* If we found such, decompose it. */ 243 if (is_gimple_assign (stmt) 244 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR) 245 { 246 /* op0 & (1 << op1) */ 247 *bit = gimple_assign_rhs2 (stmt); 248 *name = gimple_assign_rhs1 (stmt); 249 } 250 else 251 { 252 /* t & 1 */ 253 *bit = integer_zero_node; 254 *name = get_name_for_bit_test (orig_name); 255 } 256 257 return true; 258 } 259 260 /* Another form is 261 D.1987_7 = op0 & (1 << CST) 262 if (D.1987_7 != 0) */ 263 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR 264 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME 265 && integer_pow2p (gimple_assign_rhs2 (stmt))) 266 { 267 *name = gimple_assign_rhs1 (stmt); 268 *bit = build_int_cst (integer_type_node, 269 tree_log2 (gimple_assign_rhs2 (stmt))); 270 return true; 271 } 272 273 /* Another form is 274 D.1986_6 = 1 << control1_4(D) 275 D.1987_7 = op0 & D.1986_6 276 if (D.1987_7 != 0) */ 277 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR 278 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME 279 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME) 280 { 281 gimple *tmp; 282 283 /* Both arguments of the BIT_AND_EXPR can be the single-bit 284 specifying expression. */ 285 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)); 286 if (is_gimple_assign (tmp) 287 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR 288 && integer_onep (gimple_assign_rhs1 (tmp))) 289 { 290 *name = gimple_assign_rhs2 (stmt); 291 *bit = gimple_assign_rhs2 (tmp); 292 return true; 293 } 294 295 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt)); 296 if (is_gimple_assign (tmp) 297 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR 298 && integer_onep (gimple_assign_rhs1 (tmp))) 299 { 300 *name = gimple_assign_rhs1 (stmt); 301 *bit = gimple_assign_rhs2 (tmp); 302 return true; 303 } 304 } 305 306 return false; 307 } 308 309 /* Recognize a bit test pattern in a GIMPLE_COND and its defining 310 statements. Store the name being tested in *NAME and the bits 311 in *BITS. The COND_EXPR computes *NAME & *BITS. 312 Returns true if the pattern matched, false otherwise. */ 313 314 static bool 315 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv) 316 { 317 gimple *stmt; 318 319 /* Get at the definition of the result of the bit test. */ 320 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR) 321 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME 322 || !integer_zerop (gimple_cond_rhs (cond))) 323 return false; 324 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)); 325 if (!is_gimple_assign (stmt) 326 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR) 327 return false; 328 329 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt)); 330 *bits = gimple_assign_rhs2 (stmt); 331 332 return true; 333 } 334 335 336 /* Update profile after code in outer_cond_bb was adjusted so 337 outer_cond_bb has no condition. */ 338 339 static void 340 update_profile_after_ifcombine (basic_block inner_cond_bb, 341 basic_block outer_cond_bb) 342 { 343 edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb); 344 edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner 345 ? EDGE_SUCC (outer_cond_bb, 1) 346 : EDGE_SUCC (outer_cond_bb, 0)); 347 edge inner_taken = EDGE_SUCC (inner_cond_bb, 0); 348 edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1); 349 350 if (inner_taken->dest != outer2->dest) 351 std::swap (inner_taken, inner_not_taken); 352 gcc_assert (inner_taken->dest == outer2->dest); 353 354 /* In the following we assume that inner_cond_bb has single predecessor. */ 355 gcc_assert (single_pred_p (inner_cond_bb)); 356 357 /* Path outer_cond_bb->(outer2) needs to be merged into path 358 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken) 359 and probability of inner_not_taken updated. */ 360 361 inner_cond_bb->count = outer_cond_bb->count; 362 363 inner_taken->probability = outer2->probability + outer_to_inner->probability 364 * inner_taken->probability; 365 inner_not_taken->probability = profile_probability::always () 366 - inner_taken->probability; 367 368 outer_to_inner->probability = profile_probability::always (); 369 outer2->probability = profile_probability::never (); 370 } 371 372 /* If-convert on a and pattern with a common else block. The inner 373 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB. 374 inner_inv, outer_inv and result_inv indicate whether the conditions 375 are inverted. 376 Returns true if the edges to the common else basic-block were merged. */ 377 378 static bool 379 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv, 380 basic_block outer_cond_bb, bool outer_inv, bool result_inv) 381 { 382 gimple_stmt_iterator gsi; 383 gimple *inner_stmt, *outer_stmt; 384 gcond *inner_cond, *outer_cond; 385 tree name1, name2, bit1, bit2, bits1, bits2; 386 387 inner_stmt = last_stmt (inner_cond_bb); 388 if (!inner_stmt 389 || gimple_code (inner_stmt) != GIMPLE_COND) 390 return false; 391 inner_cond = as_a <gcond *> (inner_stmt); 392 393 outer_stmt = last_stmt (outer_cond_bb); 394 if (!outer_stmt 395 || gimple_code (outer_stmt) != GIMPLE_COND) 396 return false; 397 outer_cond = as_a <gcond *> (outer_stmt); 398 399 /* See if we test a single bit of the same name in both tests. In 400 that case remove the outer test, merging both else edges, 401 and change the inner one to test for 402 name & (bit1 | bit2) == (bit1 | bit2). */ 403 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv) 404 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv) 405 && name1 == name2) 406 { 407 tree t, t2; 408 409 /* Do it. */ 410 gsi = gsi_for_stmt (inner_cond); 411 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1), 412 build_int_cst (TREE_TYPE (name1), 1), bit1); 413 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1), 414 build_int_cst (TREE_TYPE (name1), 1), bit2); 415 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2); 416 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, 417 true, GSI_SAME_STMT); 418 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t); 419 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE, 420 true, GSI_SAME_STMT); 421 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, 422 boolean_type_node, t2, t); 423 t = canonicalize_cond_expr_cond (t); 424 if (!t) 425 return false; 426 gimple_cond_set_condition_from_tree (inner_cond, t); 427 update_stmt (inner_cond); 428 429 /* Leave CFG optimization to cfg_cleanup. */ 430 gimple_cond_set_condition_from_tree (outer_cond, 431 outer_inv ? boolean_false_node : boolean_true_node); 432 update_stmt (outer_cond); 433 434 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb); 435 436 if (dump_file) 437 { 438 fprintf (dump_file, "optimizing double bit test to "); 439 print_generic_expr (dump_file, name1); 440 fprintf (dump_file, " & T == T\nwith temporary T = (1 << "); 441 print_generic_expr (dump_file, bit1); 442 fprintf (dump_file, ") | (1 << "); 443 print_generic_expr (dump_file, bit2); 444 fprintf (dump_file, ")\n"); 445 } 446 447 return true; 448 } 449 450 /* See if we have two bit tests of the same name in both tests. 451 In that case remove the outer test and change the inner one to 452 test for name & (bits1 | bits2) != 0. */ 453 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv) 454 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv)) 455 { 456 gimple_stmt_iterator gsi; 457 tree t; 458 459 /* Find the common name which is bit-tested. */ 460 if (name1 == name2) 461 ; 462 else if (bits1 == bits2) 463 { 464 std::swap (name2, bits2); 465 std::swap (name1, bits1); 466 } 467 else if (name1 == bits2) 468 std::swap (name2, bits2); 469 else if (bits1 == name2) 470 std::swap (name1, bits1); 471 else 472 return false; 473 474 /* As we strip non-widening conversions in finding a common 475 name that is tested make sure to end up with an integral 476 type for building the bit operations. */ 477 if (TYPE_PRECISION (TREE_TYPE (bits1)) 478 >= TYPE_PRECISION (TREE_TYPE (bits2))) 479 { 480 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1); 481 name1 = fold_convert (TREE_TYPE (bits1), name1); 482 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2); 483 bits2 = fold_convert (TREE_TYPE (bits1), bits2); 484 } 485 else 486 { 487 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2); 488 name1 = fold_convert (TREE_TYPE (bits2), name1); 489 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1); 490 bits1 = fold_convert (TREE_TYPE (bits2), bits1); 491 } 492 493 /* Do it. */ 494 gsi = gsi_for_stmt (inner_cond); 495 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2); 496 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, 497 true, GSI_SAME_STMT); 498 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t); 499 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE, 500 true, GSI_SAME_STMT); 501 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t, 502 build_int_cst (TREE_TYPE (t), 0)); 503 t = canonicalize_cond_expr_cond (t); 504 if (!t) 505 return false; 506 gimple_cond_set_condition_from_tree (inner_cond, t); 507 update_stmt (inner_cond); 508 509 /* Leave CFG optimization to cfg_cleanup. */ 510 gimple_cond_set_condition_from_tree (outer_cond, 511 outer_inv ? boolean_false_node : boolean_true_node); 512 update_stmt (outer_cond); 513 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb); 514 515 if (dump_file) 516 { 517 fprintf (dump_file, "optimizing bits or bits test to "); 518 print_generic_expr (dump_file, name1); 519 fprintf (dump_file, " & T != 0\nwith temporary T = "); 520 print_generic_expr (dump_file, bits1); 521 fprintf (dump_file, " | "); 522 print_generic_expr (dump_file, bits2); 523 fprintf (dump_file, "\n"); 524 } 525 526 return true; 527 } 528 529 /* See if we have two comparisons that we can merge into one. */ 530 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison 531 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison) 532 { 533 tree t; 534 enum tree_code inner_cond_code = gimple_cond_code (inner_cond); 535 enum tree_code outer_cond_code = gimple_cond_code (outer_cond); 536 537 /* Invert comparisons if necessary (and possible). */ 538 if (inner_inv) 539 inner_cond_code = invert_tree_comparison (inner_cond_code, 540 HONOR_NANS (gimple_cond_lhs (inner_cond))); 541 if (inner_cond_code == ERROR_MARK) 542 return false; 543 if (outer_inv) 544 outer_cond_code = invert_tree_comparison (outer_cond_code, 545 HONOR_NANS (gimple_cond_lhs (outer_cond))); 546 if (outer_cond_code == ERROR_MARK) 547 return false; 548 /* Don't return false so fast, try maybe_fold_or_comparisons? */ 549 550 if (!(t = maybe_fold_and_comparisons (inner_cond_code, 551 gimple_cond_lhs (inner_cond), 552 gimple_cond_rhs (inner_cond), 553 outer_cond_code, 554 gimple_cond_lhs (outer_cond), 555 gimple_cond_rhs (outer_cond)))) 556 { 557 tree t1, t2; 558 gimple_stmt_iterator gsi; 559 if (!LOGICAL_OP_NON_SHORT_CIRCUIT || flag_sanitize_coverage) 560 return false; 561 /* Only do this optimization if the inner bb contains only the conditional. */ 562 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb))) 563 return false; 564 t1 = fold_build2_loc (gimple_location (inner_cond), 565 inner_cond_code, 566 boolean_type_node, 567 gimple_cond_lhs (inner_cond), 568 gimple_cond_rhs (inner_cond)); 569 t2 = fold_build2_loc (gimple_location (outer_cond), 570 outer_cond_code, 571 boolean_type_node, 572 gimple_cond_lhs (outer_cond), 573 gimple_cond_rhs (outer_cond)); 574 t = fold_build2_loc (gimple_location (inner_cond), 575 TRUTH_AND_EXPR, boolean_type_node, t1, t2); 576 if (result_inv) 577 { 578 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t); 579 result_inv = false; 580 } 581 gsi = gsi_for_stmt (inner_cond); 582 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true, 583 GSI_SAME_STMT); 584 } 585 if (result_inv) 586 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t); 587 t = canonicalize_cond_expr_cond (t); 588 if (!t) 589 return false; 590 gimple_cond_set_condition_from_tree (inner_cond, t); 591 update_stmt (inner_cond); 592 593 /* Leave CFG optimization to cfg_cleanup. */ 594 gimple_cond_set_condition_from_tree (outer_cond, 595 outer_inv ? boolean_false_node : boolean_true_node); 596 update_stmt (outer_cond); 597 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb); 598 599 if (dump_file) 600 { 601 fprintf (dump_file, "optimizing two comparisons to "); 602 print_generic_expr (dump_file, t); 603 fprintf (dump_file, "\n"); 604 } 605 606 return true; 607 } 608 609 return false; 610 } 611 612 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and 613 dispatch to the appropriate if-conversion helper for a particular 614 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB. 615 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */ 616 617 static bool 618 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb, 619 basic_block then_bb, basic_block else_bb, 620 basic_block phi_pred_bb) 621 { 622 /* The && form is characterized by a common else_bb with 623 the two edges leading to it mergable. The latter is 624 guaranteed by matching PHI arguments in the else_bb and 625 the inner cond_bb having no side-effects. */ 626 if (phi_pred_bb != else_bb 627 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb) 628 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)) 629 { 630 /* We have 631 <outer_cond_bb> 632 if (q) goto inner_cond_bb; else goto else_bb; 633 <inner_cond_bb> 634 if (p) goto ...; else goto else_bb; 635 ... 636 <else_bb> 637 ... 638 */ 639 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false, 640 false); 641 } 642 643 /* And a version where the outer condition is negated. */ 644 if (phi_pred_bb != else_bb 645 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb) 646 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)) 647 { 648 /* We have 649 <outer_cond_bb> 650 if (q) goto else_bb; else goto inner_cond_bb; 651 <inner_cond_bb> 652 if (p) goto ...; else goto else_bb; 653 ... 654 <else_bb> 655 ... 656 */ 657 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true, 658 false); 659 } 660 661 /* The || form is characterized by a common then_bb with the 662 two edges leading to it mergable. The latter is guaranteed 663 by matching PHI arguments in the then_bb and the inner cond_bb 664 having no side-effects. */ 665 if (phi_pred_bb != then_bb 666 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb) 667 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)) 668 { 669 /* We have 670 <outer_cond_bb> 671 if (q) goto then_bb; else goto inner_cond_bb; 672 <inner_cond_bb> 673 if (q) goto then_bb; else goto ...; 674 <then_bb> 675 ... 676 */ 677 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true, 678 true); 679 } 680 681 /* And a version where the outer condition is negated. */ 682 if (phi_pred_bb != then_bb 683 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb) 684 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)) 685 { 686 /* We have 687 <outer_cond_bb> 688 if (q) goto inner_cond_bb; else goto then_bb; 689 <inner_cond_bb> 690 if (q) goto then_bb; else goto ...; 691 <then_bb> 692 ... 693 */ 694 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false, 695 true); 696 } 697 698 return false; 699 } 700 701 /* Recognize a CFG pattern and dispatch to the appropriate 702 if-conversion helper. We start with BB as the innermost 703 worker basic-block. Returns true if a transformation was done. */ 704 705 static bool 706 tree_ssa_ifcombine_bb (basic_block inner_cond_bb) 707 { 708 basic_block then_bb = NULL, else_bb = NULL; 709 710 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb)) 711 return false; 712 713 /* Recognize && and || of two conditions with a common 714 then/else block which entry edges we can merge. That is: 715 if (a || b) 716 ; 717 and 718 if (a && b) 719 ; 720 This requires a single predecessor of the inner cond_bb. */ 721 if (single_pred_p (inner_cond_bb) 722 && bb_no_side_effects_p (inner_cond_bb)) 723 { 724 basic_block outer_cond_bb = single_pred (inner_cond_bb); 725 726 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, 727 then_bb, else_bb, inner_cond_bb)) 728 return true; 729 730 if (forwarder_block_to (else_bb, then_bb)) 731 { 732 /* Other possibilities for the && form, if else_bb is 733 empty forwarder block to then_bb. Compared to the above simpler 734 forms this can be treated as if then_bb and else_bb were swapped, 735 and the corresponding inner_cond_bb not inverted because of that. 736 For same_phi_args_p we look at equality of arguments between 737 edge from outer_cond_bb and the forwarder block. */ 738 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb, 739 then_bb, else_bb)) 740 return true; 741 } 742 else if (forwarder_block_to (then_bb, else_bb)) 743 { 744 /* Other possibilities for the || form, if then_bb is 745 empty forwarder block to else_bb. Compared to the above simpler 746 forms this can be treated as if then_bb and else_bb were swapped, 747 and the corresponding inner_cond_bb not inverted because of that. 748 For same_phi_args_p we look at equality of arguments between 749 edge from outer_cond_bb and the forwarder block. */ 750 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb, 751 then_bb, then_bb)) 752 return true; 753 } 754 } 755 756 return false; 757 } 758 759 /* Main entry for the tree if-conversion pass. */ 760 761 namespace { 762 763 const pass_data pass_data_tree_ifcombine = 764 { 765 GIMPLE_PASS, /* type */ 766 "ifcombine", /* name */ 767 OPTGROUP_NONE, /* optinfo_flags */ 768 TV_TREE_IFCOMBINE, /* tv_id */ 769 ( PROP_cfg | PROP_ssa ), /* properties_required */ 770 0, /* properties_provided */ 771 0, /* properties_destroyed */ 772 0, /* todo_flags_start */ 773 TODO_update_ssa, /* todo_flags_finish */ 774 }; 775 776 class pass_tree_ifcombine : public gimple_opt_pass 777 { 778 public: 779 pass_tree_ifcombine (gcc::context *ctxt) 780 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt) 781 {} 782 783 /* opt_pass methods: */ 784 virtual unsigned int execute (function *); 785 786 }; // class pass_tree_ifcombine 787 788 unsigned int 789 pass_tree_ifcombine::execute (function *fun) 790 { 791 basic_block *bbs; 792 bool cfg_changed = false; 793 int i; 794 795 bbs = single_pred_before_succ_order (); 796 calculate_dominance_info (CDI_DOMINATORS); 797 798 /* Search every basic block for COND_EXPR we may be able to optimize. 799 800 We walk the blocks in order that guarantees that a block with 801 a single predecessor is processed after the predecessor. 802 This ensures that we collapse outter ifs before visiting the 803 inner ones, and also that we do not try to visit a removed 804 block. This is opposite of PHI-OPT, because we cascade the 805 combining rather than cascading PHIs. */ 806 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--) 807 { 808 basic_block bb = bbs[i]; 809 gimple *stmt = last_stmt (bb); 810 811 if (stmt 812 && gimple_code (stmt) == GIMPLE_COND) 813 if (tree_ssa_ifcombine_bb (bb)) 814 { 815 /* Clear range info from all stmts in BB which is now executed 816 conditional on a always true/false condition. */ 817 reset_flow_sensitive_info_in_bb (bb); 818 cfg_changed |= true; 819 } 820 } 821 822 free (bbs); 823 824 return cfg_changed ? TODO_cleanup_cfg : 0; 825 } 826 827 } // anon namespace 828 829 gimple_opt_pass * 830 make_pass_tree_ifcombine (gcc::context *ctxt) 831 { 832 return new pass_tree_ifcombine (ctxt); 833 } 834