1 /* Post-reload compare elimination. 2 Copyright (C) 2010-2018 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 /* There is a set of targets whose general-purpose move or addition 21 instructions clobber the flags. These targets cannot split their 22 CBRANCH/CSTORE etc patterns before reload is complete, lest reload 23 itself insert these instructions in between the flags setter and user. 24 Because these targets cannot split the compare from the use, they 25 cannot make use of the comparison elimination offered by the combine pass. 26 27 This is a small pass intended to provide comparison elimination similar to 28 what is available via NOTICE_UPDATE_CC for cc0 targets. This should help 29 encourage cc0 targets to convert to an explicit post-reload representation 30 of the flags. 31 32 This pass assumes: 33 34 (0) CBRANCH/CSTORE etc have been split in pass_split_after_reload. 35 36 (1) All comparison patterns are represented as 37 38 [(set (reg:CC) (compare:CC (reg) (reg_or_immediate)))] 39 40 (2) All insn patterns that modify the flags are represented as 41 42 [(set (reg) (operation) 43 (clobber (reg:CC))] 44 45 (3) If an insn of form (2) can usefully set the flags, there is 46 another pattern of the form 47 48 [(set (reg:CCM) (compare:CCM (operation) (immediate))) 49 (set (reg) (operation)] 50 51 The mode CCM will be chosen as if by SELECT_CC_MODE. 52 53 Note that unlike NOTICE_UPDATE_CC, we do not handle memory operands. 54 This could be handled as a future enhancement. 55 */ 56 57 #include "config.h" 58 #include "system.h" 59 #include "coretypes.h" 60 #include "backend.h" 61 #include "target.h" 62 #include "rtl.h" 63 #include "df.h" 64 #include "memmodel.h" 65 #include "tm_p.h" 66 #include "insn-config.h" 67 #include "recog.h" 68 #include "emit-rtl.h" 69 #include "cfgrtl.h" 70 #include "tree-pass.h" 71 #include "domwalk.h" 72 73 74 /* These structures describe a comparison and how it is used. */ 75 76 /* The choice of maximum 3 uses comes from wanting to eliminate the two 77 duplicate compares from a three-way branch on the sign of a value. 78 This is also sufficient to eliminate the duplicate compare against the 79 high-part of a double-word comparison. */ 80 #define MAX_CMP_USE 3 81 82 struct comparison_use 83 { 84 /* The instruction in which the result of the compare is used. */ 85 rtx_insn *insn; 86 /* The location of the flags register within the use. */ 87 rtx *loc; 88 /* The comparison code applied against the flags register. */ 89 enum rtx_code code; 90 }; 91 92 struct comparison 93 { 94 /* The comparison instruction. */ 95 rtx_insn *insn; 96 97 /* The insn prior to the comparison insn that clobbers the flags. */ 98 rtx_insn *prev_clobber; 99 100 /* The insn prior to the comparison insn that sets in_a REG. */ 101 rtx_insn *in_a_setter; 102 103 /* The two values being compared. These will be either REGs or 104 constants. */ 105 rtx in_a, in_b; 106 107 /* The REG_EH_REGION of the comparison. */ 108 rtx eh_note; 109 110 /* Information about how this comparison is used. */ 111 struct comparison_use uses[MAX_CMP_USE]; 112 113 /* The original CC_MODE for this comparison. */ 114 machine_mode orig_mode; 115 116 /* The number of uses identified for this comparison. */ 117 unsigned short n_uses; 118 119 /* True if not all uses of this comparison have been identified. 120 This can happen either for overflowing the array above, or if 121 the flags register is used in some unusual context. */ 122 bool missing_uses; 123 124 /* True if its inputs are still valid at the end of the block. */ 125 bool inputs_valid; 126 }; 127 128 static vec<comparison *> all_compares; 129 130 /* Look for a "conforming" comparison, as defined above. If valid, return 131 the rtx for the COMPARE itself. */ 132 133 static rtx 134 conforming_compare (rtx_insn *insn) 135 { 136 rtx set, src, dest; 137 138 set = single_set (insn); 139 if (set == NULL) 140 return NULL; 141 142 src = SET_SRC (set); 143 if (GET_CODE (src) != COMPARE) 144 return NULL; 145 146 dest = SET_DEST (set); 147 if (!REG_P (dest) || REGNO (dest) != targetm.flags_regnum) 148 return NULL; 149 150 if (!REG_P (XEXP (src, 0))) 151 return NULL; 152 153 if (CONSTANT_P (XEXP (src, 1)) || REG_P (XEXP (src, 1))) 154 return src; 155 156 if (GET_CODE (XEXP (src, 1)) == UNSPEC) 157 { 158 for (int i = 0; i < XVECLEN (XEXP (src, 1), 0); i++) 159 if (!REG_P (XVECEXP (XEXP (src, 1), 0, i))) 160 return NULL; 161 return src; 162 } 163 164 return NULL; 165 } 166 167 /* Look for a pattern of the "correct" form for an insn with a flags clobber 168 for which we may be able to eliminate a compare later. We're not looking 169 to validate any inputs at this time, merely see that the basic shape is 170 correct. The term "arithmetic" may be somewhat misleading... */ 171 172 static bool 173 arithmetic_flags_clobber_p (rtx_insn *insn) 174 { 175 rtx pat, x; 176 177 if (!NONJUMP_INSN_P (insn)) 178 return false; 179 pat = PATTERN (insn); 180 if (asm_noperands (pat) >= 0) 181 return false; 182 183 if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) == 2) 184 { 185 x = XVECEXP (pat, 0, 0); 186 if (GET_CODE (x) != SET) 187 return false; 188 x = SET_DEST (x); 189 if (!REG_P (x)) 190 return false; 191 192 x = XVECEXP (pat, 0, 1); 193 if (GET_CODE (x) == CLOBBER) 194 { 195 x = XEXP (x, 0); 196 if (REG_P (x) && REGNO (x) == targetm.flags_regnum) 197 return true; 198 } 199 } 200 201 return false; 202 } 203 204 /* Look for uses of FLAGS in INSN. If we find one we can analyze, record 205 it in CMP; otherwise indicate that we've missed a use. */ 206 207 static void 208 find_flags_uses_in_insn (struct comparison *cmp, rtx_insn *insn) 209 { 210 df_ref use; 211 212 /* If we've already lost track of uses, don't bother collecting more. */ 213 if (cmp->missing_uses) 214 return; 215 216 /* Find a USE of the flags register. */ 217 FOR_EACH_INSN_USE (use, insn) 218 if (DF_REF_REGNO (use) == targetm.flags_regnum) 219 { 220 rtx x, *loc; 221 222 /* If this is an unusual use, quit. */ 223 if (DF_REF_TYPE (use) != DF_REF_REG_USE) 224 goto fail; 225 226 /* If we've run out of slots to record uses, quit. */ 227 if (cmp->n_uses == MAX_CMP_USE) 228 goto fail; 229 230 /* Unfortunately the location of the flags register, while present 231 in the reference structure, doesn't help. We need to find the 232 comparison code that is outer to the actual flags use. */ 233 loc = DF_REF_LOC (use); 234 x = PATTERN (insn); 235 if (GET_CODE (x) == PARALLEL) 236 x = XVECEXP (x, 0, 0); 237 x = SET_SRC (x); 238 if (GET_CODE (x) == IF_THEN_ELSE) 239 x = XEXP (x, 0); 240 if (COMPARISON_P (x) 241 && loc == &XEXP (x, 0) 242 && XEXP (x, 1) == const0_rtx) 243 { 244 /* We've found a use of the flags that we understand. */ 245 struct comparison_use *cuse = &cmp->uses[cmp->n_uses++]; 246 cuse->insn = insn; 247 cuse->loc = loc; 248 cuse->code = GET_CODE (x); 249 } 250 else 251 goto fail; 252 } 253 return; 254 255 fail: 256 /* We failed to recognize this use of the flags register. */ 257 cmp->missing_uses = true; 258 } 259 260 class find_comparison_dom_walker : public dom_walker 261 { 262 public: 263 find_comparison_dom_walker (cdi_direction direction) 264 : dom_walker (direction) {} 265 266 virtual edge before_dom_children (basic_block); 267 }; 268 269 /* Return true if conforming COMPARE with EH_NOTE is redundant with comparison 270 CMP and can thus be eliminated. */ 271 272 static bool 273 can_eliminate_compare (rtx compare, rtx eh_note, struct comparison *cmp) 274 { 275 /* Take care that it's in the same EH region. */ 276 if (cfun->can_throw_non_call_exceptions 277 && !rtx_equal_p (eh_note, cmp->eh_note)) 278 return false; 279 280 /* Make sure the compare is redundant with the previous. */ 281 if (!rtx_equal_p (XEXP (compare, 0), cmp->in_a) 282 || !rtx_equal_p (XEXP (compare, 1), cmp->in_b)) 283 return false; 284 285 /* New mode must be compatible with the previous compare mode. */ 286 machine_mode new_mode 287 = targetm.cc_modes_compatible (GET_MODE (compare), cmp->orig_mode); 288 289 if (new_mode == VOIDmode) 290 return false; 291 292 if (cmp->orig_mode != new_mode) 293 { 294 /* Generate new comparison for substitution. */ 295 rtx flags = gen_rtx_REG (new_mode, targetm.flags_regnum); 296 rtx x = gen_rtx_COMPARE (new_mode, cmp->in_a, cmp->in_b); 297 x = gen_rtx_SET (flags, x); 298 299 if (!validate_change (cmp->insn, &PATTERN (cmp->insn), x, false)) 300 return false; 301 302 cmp->orig_mode = new_mode; 303 } 304 305 return true; 306 } 307 308 /* Identify comparison instructions within BB. If the flags from the last 309 compare in the BB is live at the end of the block, install the compare 310 in BB->AUX. Called via dom_walker.walk (). */ 311 312 edge 313 find_comparison_dom_walker::before_dom_children (basic_block bb) 314 { 315 rtx_insn *insn, *next; 316 bool need_purge = false; 317 rtx_insn *last_setter[FIRST_PSEUDO_REGISTER]; 318 319 /* The last comparison that was made. Will be reset to NULL 320 once the flags are clobbered. */ 321 struct comparison *last_cmp = NULL; 322 323 /* True iff the last comparison has not been clobbered, nor 324 have its inputs. Used to eliminate duplicate compares. */ 325 bool last_cmp_valid = false; 326 327 /* The last insn that clobbered the flags, if that insn is of 328 a form that may be valid for eliminating a following compare. 329 To be reset to NULL once the flags are set otherwise. */ 330 rtx_insn *last_clobber = NULL; 331 332 /* Propagate the last live comparison throughout the extended basic block. */ 333 if (single_pred_p (bb)) 334 { 335 last_cmp = (struct comparison *) single_pred (bb)->aux; 336 if (last_cmp) 337 last_cmp_valid = last_cmp->inputs_valid; 338 } 339 340 memset (last_setter, 0, sizeof (last_setter)); 341 for (insn = BB_HEAD (bb); insn; insn = next) 342 { 343 rtx src; 344 345 next = (insn == BB_END (bb) ? NULL : NEXT_INSN (insn)); 346 if (!NONDEBUG_INSN_P (insn)) 347 continue; 348 349 src = conforming_compare (insn); 350 if (src) 351 { 352 rtx eh_note = NULL; 353 354 if (cfun->can_throw_non_call_exceptions) 355 eh_note = find_reg_note (insn, REG_EH_REGION, NULL); 356 357 if (last_cmp_valid && can_eliminate_compare (src, eh_note, last_cmp)) 358 { 359 if (eh_note) 360 need_purge = true; 361 delete_insn (insn); 362 continue; 363 } 364 365 last_cmp = XCNEW (struct comparison); 366 last_cmp->insn = insn; 367 last_cmp->prev_clobber = last_clobber; 368 last_cmp->in_a = XEXP (src, 0); 369 last_cmp->in_b = XEXP (src, 1); 370 last_cmp->eh_note = eh_note; 371 last_cmp->orig_mode = GET_MODE (src); 372 if (last_cmp->in_b == const0_rtx 373 && last_setter[REGNO (last_cmp->in_a)]) 374 { 375 rtx set = single_set (last_setter[REGNO (last_cmp->in_a)]); 376 if (set && rtx_equal_p (SET_DEST (set), last_cmp->in_a)) 377 last_cmp->in_a_setter = last_setter[REGNO (last_cmp->in_a)]; 378 } 379 all_compares.safe_push (last_cmp); 380 381 /* It's unusual, but be prepared for comparison patterns that 382 also clobber an input, or perhaps a scratch. */ 383 last_clobber = NULL; 384 last_cmp_valid = true; 385 } 386 387 else 388 { 389 /* Notice if this instruction uses the flags register. */ 390 if (last_cmp) 391 find_flags_uses_in_insn (last_cmp, insn); 392 393 /* Notice if this instruction kills the flags register. */ 394 df_ref def; 395 FOR_EACH_INSN_DEF (def, insn) 396 if (DF_REF_REGNO (def) == targetm.flags_regnum) 397 { 398 /* See if this insn could be the "clobber" that eliminates 399 a future comparison. */ 400 last_clobber = (arithmetic_flags_clobber_p (insn) 401 ? insn : NULL); 402 403 /* In either case, the previous compare is no longer valid. */ 404 last_cmp = NULL; 405 last_cmp_valid = false; 406 break; 407 } 408 } 409 410 /* Notice if any of the inputs to the comparison have changed 411 and remember last insn that sets each register. */ 412 df_ref def; 413 FOR_EACH_INSN_DEF (def, insn) 414 { 415 if (last_cmp_valid 416 && (DF_REF_REGNO (def) == REGNO (last_cmp->in_a) 417 || (REG_P (last_cmp->in_b) 418 && DF_REF_REGNO (def) == REGNO (last_cmp->in_b)))) 419 last_cmp_valid = false; 420 last_setter[DF_REF_REGNO (def)] = insn; 421 } 422 } 423 424 /* Remember the live comparison for subsequent members of 425 the extended basic block. */ 426 if (last_cmp) 427 { 428 bb->aux = last_cmp; 429 last_cmp->inputs_valid = last_cmp_valid; 430 431 /* Look to see if the flags register is live outgoing here, and 432 incoming to any successor not part of the extended basic block. */ 433 if (bitmap_bit_p (df_get_live_out (bb), targetm.flags_regnum)) 434 { 435 edge e; 436 edge_iterator ei; 437 438 FOR_EACH_EDGE (e, ei, bb->succs) 439 { 440 basic_block dest = e->dest; 441 if (bitmap_bit_p (df_get_live_in (bb), targetm.flags_regnum) 442 && !single_pred_p (dest)) 443 { 444 last_cmp->missing_uses = true; 445 break; 446 } 447 } 448 } 449 } 450 451 /* If we deleted a compare with a REG_EH_REGION note, we may need to 452 remove EH edges. */ 453 if (need_purge) 454 purge_dead_edges (bb); 455 456 return NULL; 457 } 458 459 /* Find all comparisons in the function. */ 460 461 static void 462 find_comparisons (void) 463 { 464 calculate_dominance_info (CDI_DOMINATORS); 465 466 find_comparison_dom_walker (CDI_DOMINATORS) 467 .walk (cfun->cfg->x_entry_block_ptr); 468 469 clear_aux_for_blocks (); 470 free_dominance_info (CDI_DOMINATORS); 471 } 472 473 /* Select an alternate CC_MODE for a comparison insn comparing A and B. 474 Note that inputs are almost certainly different than the IN_A and IN_B 475 stored in CMP -- we're called while attempting to eliminate the compare 476 after all. Return the new FLAGS rtx if successful, else return NULL. 477 Note that this function may start a change group. */ 478 479 static rtx 480 maybe_select_cc_mode (struct comparison *cmp, rtx a ATTRIBUTE_UNUSED, 481 rtx b ATTRIBUTE_UNUSED) 482 { 483 machine_mode sel_mode; 484 const int n = cmp->n_uses; 485 rtx flags = NULL; 486 487 #ifndef SELECT_CC_MODE 488 /* Minimize code differences when this target macro is undefined. */ 489 return NULL; 490 #define SELECT_CC_MODE(A,B,C) (gcc_unreachable (), VOIDmode) 491 #endif 492 493 /* If we don't have access to all of the uses, we can't validate. */ 494 if (cmp->missing_uses || n == 0) 495 return NULL; 496 497 /* Find a new mode that works for all of the uses. Special case the 498 common case of exactly one use. */ 499 if (n == 1) 500 { 501 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b); 502 if (sel_mode != cmp->orig_mode) 503 { 504 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum); 505 validate_change (cmp->uses[0].insn, cmp->uses[0].loc, flags, true); 506 } 507 } 508 else 509 { 510 int i; 511 512 sel_mode = SELECT_CC_MODE (cmp->uses[0].code, a, b); 513 for (i = 1; i < n; ++i) 514 { 515 machine_mode new_mode = SELECT_CC_MODE (cmp->uses[i].code, a, b); 516 if (new_mode != sel_mode) 517 { 518 sel_mode = targetm.cc_modes_compatible (sel_mode, new_mode); 519 if (sel_mode == VOIDmode) 520 return NULL; 521 } 522 } 523 524 if (sel_mode != cmp->orig_mode) 525 { 526 flags = gen_rtx_REG (sel_mode, targetm.flags_regnum); 527 for (i = 0; i < n; ++i) 528 validate_change (cmp->uses[i].insn, cmp->uses[i].loc, flags, true); 529 } 530 } 531 532 return flags; 533 } 534 535 /* Return a register RTX holding the same value at START as REG at END, or 536 NULL_RTX if there is none. */ 537 538 static rtx 539 equivalent_reg_at_start (rtx reg, rtx_insn *end, rtx_insn *start) 540 { 541 machine_mode orig_mode = GET_MODE (reg); 542 rtx_insn *bb_head = BB_HEAD (BLOCK_FOR_INSN (end)); 543 544 for (rtx_insn *insn = PREV_INSN (end); 545 insn != start; 546 insn = PREV_INSN (insn)) 547 { 548 const int abnormal_flags 549 = (DF_REF_CONDITIONAL | DF_REF_PARTIAL | DF_REF_MAY_CLOBBER 550 | DF_REF_MUST_CLOBBER | DF_REF_SIGN_EXTRACT 551 | DF_REF_ZERO_EXTRACT | DF_REF_STRICT_LOW_PART 552 | DF_REF_PRE_POST_MODIFY); 553 df_ref def; 554 555 /* Note that the BB_HEAD is always either a note or a label, but in 556 any case it means that REG is defined outside the block. */ 557 if (insn == bb_head) 558 return NULL_RTX; 559 if (NOTE_P (insn) || DEBUG_INSN_P (insn)) 560 continue; 561 562 /* Find a possible def of REG in INSN. */ 563 FOR_EACH_INSN_DEF (def, insn) 564 if (DF_REF_REGNO (def) == REGNO (reg)) 565 break; 566 567 /* No definitions of REG; continue searching. */ 568 if (def == NULL) 569 continue; 570 571 /* Bail if this is not a totally normal set of REG. */ 572 if (DF_REF_IS_ARTIFICIAL (def)) 573 return NULL_RTX; 574 if (DF_REF_FLAGS (def) & abnormal_flags) 575 return NULL_RTX; 576 577 /* We've found an insn between the compare and the clobber that sets 578 REG. Given that pass_cprop_hardreg has not yet run, we still find 579 situations in which we can usefully look through a copy insn. */ 580 rtx x = single_set (insn); 581 if (x == NULL_RTX) 582 return NULL_RTX; 583 reg = SET_SRC (x); 584 if (!REG_P (reg)) 585 return NULL_RTX; 586 } 587 588 if (GET_MODE (reg) != orig_mode) 589 return NULL_RTX; 590 591 return reg; 592 } 593 594 /* Return true if it is okay to merge the comparison CMP_INSN with 595 the instruction ARITH_INSN. Both instructions are assumed to be in the 596 same basic block with ARITH_INSN appearing before CMP_INSN. This checks 597 that there are no uses or defs of the condition flags or control flow 598 changes between the two instructions. */ 599 600 static bool 601 can_merge_compare_into_arith (rtx_insn *cmp_insn, rtx_insn *arith_insn) 602 { 603 for (rtx_insn *insn = PREV_INSN (cmp_insn); 604 insn && insn != arith_insn; 605 insn = PREV_INSN (insn)) 606 { 607 if (!NONDEBUG_INSN_P (insn)) 608 continue; 609 /* Bail if there are jumps or calls in between. */ 610 if (!NONJUMP_INSN_P (insn)) 611 return false; 612 613 /* Bail on old-style asm statements because they lack 614 data flow information. */ 615 if (GET_CODE (PATTERN (insn)) == ASM_INPUT) 616 return false; 617 618 df_ref ref; 619 /* Find a USE of the flags register. */ 620 FOR_EACH_INSN_USE (ref, insn) 621 if (DF_REF_REGNO (ref) == targetm.flags_regnum) 622 return false; 623 624 /* Find a DEF of the flags register. */ 625 FOR_EACH_INSN_DEF (ref, insn) 626 if (DF_REF_REGNO (ref) == targetm.flags_regnum) 627 return false; 628 } 629 return true; 630 } 631 632 /* Given two SET expressions, SET_A and SET_B determine whether they form 633 a recognizable pattern when emitted in parallel. Return that parallel 634 if so. Otherwise return NULL. */ 635 636 static rtx 637 try_validate_parallel (rtx set_a, rtx set_b) 638 { 639 rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, set_a, set_b)); 640 rtx_insn *insn = make_insn_raw (par); 641 642 if (insn_invalid_p (insn, false)) 643 { 644 crtl->emit.x_cur_insn_uid--; 645 return NULL_RTX; 646 } 647 648 SET_PREV_INSN (insn) = NULL_RTX; 649 SET_NEXT_INSN (insn) = NULL_RTX; 650 INSN_LOCATION (insn) = 0; 651 return insn; 652 } 653 654 /* For a comparison instruction described by CMP check if it compares a 655 register with zero i.e. it is of the form CC := CMP R1, 0. 656 If it is, find the instruction defining R1 (say I1) and try to create a 657 PARALLEL consisting of I1 and the comparison, representing a flag-setting 658 arithmetic instruction. Example: 659 I1: R1 := R2 + R3 660 <instructions that don't read the condition register> 661 I2: CC := CMP R1 0 662 I2 can be merged with I1 into: 663 I1: { CC := CMP (R2 + R3) 0 ; R1 := R2 + R3 } 664 This catches cases where R1 is used between I1 and I2 and therefore 665 combine and other RTL optimisations will not try to propagate it into 666 I2. Return true if we succeeded in merging CMP. */ 667 668 static bool 669 try_merge_compare (struct comparison *cmp) 670 { 671 rtx_insn *cmp_insn = cmp->insn; 672 673 if (cmp->in_b != const0_rtx || cmp->in_a_setter == NULL) 674 return false; 675 rtx in_a = cmp->in_a; 676 df_ref use; 677 678 FOR_EACH_INSN_USE (use, cmp_insn) 679 if (DF_REF_REGNO (use) == REGNO (in_a)) 680 break; 681 if (!use) 682 return false; 683 684 rtx_insn *def_insn = cmp->in_a_setter; 685 rtx set = single_set (def_insn); 686 if (!set) 687 return false; 688 689 if (!can_merge_compare_into_arith (cmp_insn, def_insn)) 690 return false; 691 692 rtx src = SET_SRC (set); 693 rtx flags = maybe_select_cc_mode (cmp, src, CONST0_RTX (GET_MODE (src))); 694 if (!flags) 695 { 696 /* We may already have a change group going through maybe_select_cc_mode. 697 Discard it properly. */ 698 cancel_changes (0); 699 return false; 700 } 701 702 rtx flag_set 703 = gen_rtx_SET (flags, gen_rtx_COMPARE (GET_MODE (flags), 704 copy_rtx (src), 705 CONST0_RTX (GET_MODE (src)))); 706 rtx arith_set = copy_rtx (PATTERN (def_insn)); 707 rtx par = try_validate_parallel (flag_set, arith_set); 708 if (!par) 709 { 710 /* We may already have a change group going through maybe_select_cc_mode. 711 Discard it properly. */ 712 cancel_changes (0); 713 return false; 714 } 715 if (!apply_change_group ()) 716 return false; 717 emit_insn_after (par, def_insn); 718 delete_insn (def_insn); 719 delete_insn (cmp->insn); 720 return true; 721 } 722 723 /* Attempt to replace a comparison with a prior arithmetic insn that can 724 compute the same flags value as the comparison itself. Return true if 725 successful, having made all rtl modifications necessary. */ 726 727 static bool 728 try_eliminate_compare (struct comparison *cmp) 729 { 730 rtx flags, in_a, in_b, cmp_src; 731 732 if (try_merge_compare (cmp)) 733 return true; 734 735 /* We must have found an interesting "clobber" preceding the compare. */ 736 if (cmp->prev_clobber == NULL) 737 return false; 738 739 /* Verify that IN_A is not clobbered in between CMP and PREV_CLOBBER. 740 Given that this target requires this pass, we can assume that most 741 insns do clobber the flags, and so the distance between the compare 742 and the clobber is likely to be small. */ 743 /* ??? This is one point at which one could argue that DF_REF_CHAIN would 744 be useful, but it is thought to be too heavy-weight a solution here. */ 745 in_a = equivalent_reg_at_start (cmp->in_a, cmp->insn, cmp->prev_clobber); 746 if (!in_a) 747 return false; 748 749 /* Likewise for IN_B if need be. */ 750 if (CONSTANT_P (cmp->in_b)) 751 in_b = cmp->in_b; 752 else if (REG_P (cmp->in_b)) 753 { 754 in_b = equivalent_reg_at_start (cmp->in_b, cmp->insn, cmp->prev_clobber); 755 if (!in_b) 756 return false; 757 } 758 else if (GET_CODE (cmp->in_b) == UNSPEC) 759 { 760 const int len = XVECLEN (cmp->in_b, 0); 761 rtvec v = rtvec_alloc (len); 762 for (int i = 0; i < len; i++) 763 { 764 rtx r = equivalent_reg_at_start (XVECEXP (cmp->in_b, 0, i), 765 cmp->insn, cmp->prev_clobber); 766 if (!r) 767 return false; 768 RTVEC_ELT (v, i) = r; 769 } 770 in_b = gen_rtx_UNSPEC (GET_MODE (cmp->in_b), v, XINT (cmp->in_b, 1)); 771 } 772 else 773 gcc_unreachable (); 774 775 /* We've reached PREV_CLOBBER without finding a modification of IN_A. 776 Validate that PREV_CLOBBER itself does in fact refer to IN_A. Do 777 recall that we've already validated the shape of PREV_CLOBBER. */ 778 rtx_insn *insn = cmp->prev_clobber; 779 780 rtx x = XVECEXP (PATTERN (insn), 0, 0); 781 if (rtx_equal_p (SET_DEST (x), in_a)) 782 cmp_src = SET_SRC (x); 783 784 /* Also check operations with implicit extensions, e.g.: 785 [(set (reg:DI) 786 (zero_extend:DI (plus:SI (reg:SI) (reg:SI)))) 787 (set (reg:CCZ flags) 788 (compare:CCZ (plus:SI (reg:SI) (reg:SI)) 789 (const_int 0)))] */ 790 else if (REG_P (SET_DEST (x)) 791 && REG_P (in_a) 792 && REGNO (SET_DEST (x)) == REGNO (in_a) 793 && (GET_CODE (SET_SRC (x)) == ZERO_EXTEND 794 || GET_CODE (SET_SRC (x)) == SIGN_EXTEND) 795 && GET_MODE (XEXP (SET_SRC (x), 0)) == GET_MODE (in_a)) 796 cmp_src = XEXP (SET_SRC (x), 0); 797 798 /* Also check fully redundant comparisons, e.g.: 799 [(set (reg:SI) 800 (minus:SI (reg:SI) (reg:SI)))) 801 (set (reg:CC flags) 802 (compare:CC (reg:SI) (reg:SI)))] */ 803 else if (REG_P (in_b) 804 && GET_CODE (SET_SRC (x)) == MINUS 805 && rtx_equal_p (XEXP (SET_SRC (x), 0), in_a) 806 && rtx_equal_p (XEXP (SET_SRC (x), 1), in_b)) 807 cmp_src = in_a; 808 809 else 810 return false; 811 812 /* Determine if we ought to use a different CC_MODE here. */ 813 flags = maybe_select_cc_mode (cmp, cmp_src, in_b); 814 if (flags == NULL) 815 flags = gen_rtx_REG (cmp->orig_mode, targetm.flags_regnum); 816 817 /* Generate a new comparison for installation in the setter. */ 818 rtx y = copy_rtx (cmp_src); 819 y = gen_rtx_COMPARE (GET_MODE (flags), y, in_b); 820 y = gen_rtx_SET (flags, y); 821 822 /* Canonicalize instruction to: 823 [(set (reg:CCM) (compare:CCM (operation) (immediate))) 824 (set (reg) (operation)] */ 825 826 rtvec v = rtvec_alloc (2); 827 RTVEC_ELT (v, 0) = y; 828 RTVEC_ELT (v, 1) = x; 829 830 rtx pat = gen_rtx_PARALLEL (VOIDmode, v); 831 832 /* Succeed if the new instruction is valid. Note that we may have started 833 a change group within maybe_select_cc_mode, therefore we must continue. */ 834 validate_change (insn, &PATTERN (insn), pat, true); 835 836 if (!apply_change_group ()) 837 return false; 838 839 /* Success. Delete the compare insn... */ 840 delete_insn (cmp->insn); 841 842 /* ... and any notes that are now invalid due to multiple sets. */ 843 x = find_regno_note (insn, REG_UNUSED, targetm.flags_regnum); 844 if (x) 845 remove_note (insn, x); 846 x = find_reg_note (insn, REG_EQUAL, NULL); 847 if (x) 848 remove_note (insn, x); 849 x = find_reg_note (insn, REG_EQUIV, NULL); 850 if (x) 851 remove_note (insn, x); 852 853 return true; 854 } 855 856 /* Main entry point to the pass. */ 857 858 static unsigned int 859 execute_compare_elim_after_reload (void) 860 { 861 df_analyze (); 862 863 gcc_checking_assert (!all_compares.exists ()); 864 865 /* Locate all comparisons and their uses, and eliminate duplicates. */ 866 find_comparisons (); 867 if (all_compares.exists ()) 868 { 869 struct comparison *cmp; 870 size_t i; 871 872 /* Eliminate comparisons that are redundant with flags computation. */ 873 FOR_EACH_VEC_ELT (all_compares, i, cmp) 874 { 875 try_eliminate_compare (cmp); 876 XDELETE (cmp); 877 } 878 879 all_compares.release (); 880 } 881 882 return 0; 883 } 884 885 namespace { 886 887 const pass_data pass_data_compare_elim_after_reload = 888 { 889 RTL_PASS, /* type */ 890 "cmpelim", /* name */ 891 OPTGROUP_NONE, /* optinfo_flags */ 892 TV_NONE, /* tv_id */ 893 0, /* properties_required */ 894 0, /* properties_provided */ 895 0, /* properties_destroyed */ 896 0, /* todo_flags_start */ 897 ( TODO_df_finish | TODO_df_verify ), /* todo_flags_finish */ 898 }; 899 900 class pass_compare_elim_after_reload : public rtl_opt_pass 901 { 902 public: 903 pass_compare_elim_after_reload (gcc::context *ctxt) 904 : rtl_opt_pass (pass_data_compare_elim_after_reload, ctxt) 905 {} 906 907 /* opt_pass methods: */ 908 virtual bool gate (function *) 909 { 910 /* Setting this target hook value is how a backend indicates the need. */ 911 if (targetm.flags_regnum == INVALID_REGNUM) 912 return false; 913 return flag_compare_elim_after_reload; 914 } 915 916 virtual unsigned int execute (function *) 917 { 918 return execute_compare_elim_after_reload (); 919 } 920 921 }; // class pass_compare_elim_after_reload 922 923 } // anon namespace 924 925 rtl_opt_pass * 926 make_pass_compare_elim_after_reload (gcc::context *ctxt) 927 { 928 return new pass_compare_elim_after_reload (ctxt); 929 } 930