1 /* Perform simple optimizations to clean up the result of reload. 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 4 2010, 2011 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "tm.h" 26 27 #include "machmode.h" 28 #include "hard-reg-set.h" 29 #include "rtl.h" 30 #include "tm_p.h" 31 #include "obstack.h" 32 #include "insn-config.h" 33 #include "flags.h" 34 #include "function.h" 35 #include "expr.h" 36 #include "optabs.h" 37 #include "regs.h" 38 #include "basic-block.h" 39 #include "reload.h" 40 #include "recog.h" 41 #include "output.h" 42 #include "cselib.h" 43 #include "diagnostic-core.h" 44 #include "except.h" 45 #include "tree.h" 46 #include "target.h" 47 #include "timevar.h" 48 #include "tree-pass.h" 49 #include "df.h" 50 #include "dbgcnt.h" 51 52 static int reload_cse_noop_set_p (rtx); 53 static void reload_cse_simplify (rtx, rtx); 54 static void reload_cse_regs_1 (rtx); 55 static int reload_cse_simplify_set (rtx, rtx); 56 static int reload_cse_simplify_operands (rtx, rtx); 57 58 static void reload_combine (void); 59 static void reload_combine_note_use (rtx *, rtx, int, rtx); 60 static void reload_combine_note_store (rtx, const_rtx, void *); 61 62 static bool reload_cse_move2add (rtx); 63 static void move2add_note_store (rtx, const_rtx, void *); 64 65 /* Call cse / combine like post-reload optimization phases. 66 FIRST is the first instruction. */ 67 void 68 reload_cse_regs (rtx first ATTRIBUTE_UNUSED) 69 { 70 bool moves_converted; 71 reload_cse_regs_1 (first); 72 reload_combine (); 73 moves_converted = reload_cse_move2add (first); 74 if (flag_expensive_optimizations) 75 { 76 if (moves_converted) 77 reload_combine (); 78 reload_cse_regs_1 (first); 79 } 80 } 81 82 /* See whether a single set SET is a noop. */ 83 static int 84 reload_cse_noop_set_p (rtx set) 85 { 86 if (cselib_reg_set_mode (SET_DEST (set)) != GET_MODE (SET_DEST (set))) 87 return 0; 88 89 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set)); 90 } 91 92 /* Try to simplify INSN. */ 93 static void 94 reload_cse_simplify (rtx insn, rtx testreg) 95 { 96 rtx body = PATTERN (insn); 97 98 if (GET_CODE (body) == SET) 99 { 100 int count = 0; 101 102 /* Simplify even if we may think it is a no-op. 103 We may think a memory load of a value smaller than WORD_SIZE 104 is redundant because we haven't taken into account possible 105 implicit extension. reload_cse_simplify_set() will bring 106 this out, so it's safer to simplify before we delete. */ 107 count += reload_cse_simplify_set (body, insn); 108 109 if (!count && reload_cse_noop_set_p (body)) 110 { 111 rtx value = SET_DEST (body); 112 if (REG_P (value) 113 && ! REG_FUNCTION_VALUE_P (value)) 114 value = 0; 115 if (check_for_inc_dec (insn)) 116 delete_insn_and_edges (insn); 117 return; 118 } 119 120 if (count > 0) 121 apply_change_group (); 122 else 123 reload_cse_simplify_operands (insn, testreg); 124 } 125 else if (GET_CODE (body) == PARALLEL) 126 { 127 int i; 128 int count = 0; 129 rtx value = NULL_RTX; 130 131 /* Registers mentioned in the clobber list for an asm cannot be reused 132 within the body of the asm. Invalidate those registers now so that 133 we don't try to substitute values for them. */ 134 if (asm_noperands (body) >= 0) 135 { 136 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 137 { 138 rtx part = XVECEXP (body, 0, i); 139 if (GET_CODE (part) == CLOBBER && REG_P (XEXP (part, 0))) 140 cselib_invalidate_rtx (XEXP (part, 0)); 141 } 142 } 143 144 /* If every action in a PARALLEL is a noop, we can delete 145 the entire PARALLEL. */ 146 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 147 { 148 rtx part = XVECEXP (body, 0, i); 149 if (GET_CODE (part) == SET) 150 { 151 if (! reload_cse_noop_set_p (part)) 152 break; 153 if (REG_P (SET_DEST (part)) 154 && REG_FUNCTION_VALUE_P (SET_DEST (part))) 155 { 156 if (value) 157 break; 158 value = SET_DEST (part); 159 } 160 } 161 else if (GET_CODE (part) != CLOBBER) 162 break; 163 } 164 165 if (i < 0) 166 { 167 if (check_for_inc_dec (insn)) 168 delete_insn_and_edges (insn); 169 /* We're done with this insn. */ 170 return; 171 } 172 173 /* It's not a no-op, but we can try to simplify it. */ 174 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 175 if (GET_CODE (XVECEXP (body, 0, i)) == SET) 176 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn); 177 178 if (count > 0) 179 apply_change_group (); 180 else 181 reload_cse_simplify_operands (insn, testreg); 182 } 183 } 184 185 /* Do a very simple CSE pass over the hard registers. 186 187 This function detects no-op moves where we happened to assign two 188 different pseudo-registers to the same hard register, and then 189 copied one to the other. Reload will generate a useless 190 instruction copying a register to itself. 191 192 This function also detects cases where we load a value from memory 193 into two different registers, and (if memory is more expensive than 194 registers) changes it to simply copy the first register into the 195 second register. 196 197 Another optimization is performed that scans the operands of each 198 instruction to see whether the value is already available in a 199 hard register. It then replaces the operand with the hard register 200 if possible, much like an optional reload would. */ 201 202 static void 203 reload_cse_regs_1 (rtx first) 204 { 205 rtx insn; 206 rtx testreg = gen_rtx_REG (VOIDmode, -1); 207 208 cselib_init (CSELIB_RECORD_MEMORY); 209 init_alias_analysis (); 210 211 for (insn = first; insn; insn = NEXT_INSN (insn)) 212 { 213 if (INSN_P (insn)) 214 reload_cse_simplify (insn, testreg); 215 216 cselib_process_insn (insn); 217 } 218 219 /* Clean up. */ 220 end_alias_analysis (); 221 cselib_finish (); 222 } 223 224 /* Try to simplify a single SET instruction. SET is the set pattern. 225 INSN is the instruction it came from. 226 This function only handles one case: if we set a register to a value 227 which is not a register, we try to find that value in some other register 228 and change the set into a register copy. */ 229 230 static int 231 reload_cse_simplify_set (rtx set, rtx insn) 232 { 233 int did_change = 0; 234 int dreg; 235 rtx src; 236 reg_class_t dclass; 237 int old_cost; 238 cselib_val *val; 239 struct elt_loc_list *l; 240 #ifdef LOAD_EXTEND_OP 241 enum rtx_code extend_op = UNKNOWN; 242 #endif 243 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); 244 245 dreg = true_regnum (SET_DEST (set)); 246 if (dreg < 0) 247 return 0; 248 249 src = SET_SRC (set); 250 if (side_effects_p (src) || true_regnum (src) >= 0) 251 return 0; 252 253 dclass = REGNO_REG_CLASS (dreg); 254 255 #ifdef LOAD_EXTEND_OP 256 /* When replacing a memory with a register, we need to honor assumptions 257 that combine made wrt the contents of sign bits. We'll do this by 258 generating an extend instruction instead of a reg->reg copy. Thus 259 the destination must be a register that we can widen. */ 260 if (MEM_P (src) 261 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD 262 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN 263 && !REG_P (SET_DEST (set))) 264 return 0; 265 #endif 266 267 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode); 268 if (! val) 269 return 0; 270 271 /* If memory loads are cheaper than register copies, don't change them. */ 272 if (MEM_P (src)) 273 old_cost = memory_move_cost (GET_MODE (src), dclass, true); 274 else if (REG_P (src)) 275 old_cost = register_move_cost (GET_MODE (src), 276 REGNO_REG_CLASS (REGNO (src)), dclass); 277 else 278 old_cost = set_src_cost (src, speed); 279 280 for (l = val->locs; l; l = l->next) 281 { 282 rtx this_rtx = l->loc; 283 int this_cost; 284 285 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0)) 286 { 287 #ifdef LOAD_EXTEND_OP 288 if (extend_op != UNKNOWN) 289 { 290 HOST_WIDE_INT this_val; 291 292 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other 293 constants, such as SYMBOL_REF, cannot be extended. */ 294 if (!CONST_INT_P (this_rtx)) 295 continue; 296 297 this_val = INTVAL (this_rtx); 298 switch (extend_op) 299 { 300 case ZERO_EXTEND: 301 this_val &= GET_MODE_MASK (GET_MODE (src)); 302 break; 303 case SIGN_EXTEND: 304 /* ??? In theory we're already extended. */ 305 if (this_val == trunc_int_for_mode (this_val, GET_MODE (src))) 306 break; 307 default: 308 gcc_unreachable (); 309 } 310 this_rtx = GEN_INT (this_val); 311 } 312 #endif 313 this_cost = set_src_cost (this_rtx, speed); 314 } 315 else if (REG_P (this_rtx)) 316 { 317 #ifdef LOAD_EXTEND_OP 318 if (extend_op != UNKNOWN) 319 { 320 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx); 321 this_cost = set_src_cost (this_rtx, speed); 322 } 323 else 324 #endif 325 this_cost = register_move_cost (GET_MODE (this_rtx), 326 REGNO_REG_CLASS (REGNO (this_rtx)), 327 dclass); 328 } 329 else 330 continue; 331 332 /* If equal costs, prefer registers over anything else. That 333 tends to lead to smaller instructions on some machines. */ 334 if (this_cost < old_cost 335 || (this_cost == old_cost 336 && REG_P (this_rtx) 337 && !REG_P (SET_SRC (set)))) 338 { 339 #ifdef LOAD_EXTEND_OP 340 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD 341 && extend_op != UNKNOWN 342 #ifdef CANNOT_CHANGE_MODE_CLASS 343 && !CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)), 344 word_mode, 345 REGNO_REG_CLASS (REGNO (SET_DEST (set)))) 346 #endif 347 ) 348 { 349 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set))); 350 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set)); 351 validate_change (insn, &SET_DEST (set), wide_dest, 1); 352 } 353 #endif 354 355 validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1); 356 old_cost = this_cost, did_change = 1; 357 } 358 } 359 360 return did_change; 361 } 362 363 /* Try to replace operands in INSN with equivalent values that are already 364 in registers. This can be viewed as optional reloading. 365 366 For each non-register operand in the insn, see if any hard regs are 367 known to be equivalent to that operand. Record the alternatives which 368 can accept these hard registers. Among all alternatives, select the 369 ones which are better or equal to the one currently matching, where 370 "better" is in terms of '?' and '!' constraints. Among the remaining 371 alternatives, select the one which replaces most operands with 372 hard registers. */ 373 374 static int 375 reload_cse_simplify_operands (rtx insn, rtx testreg) 376 { 377 int i, j; 378 379 /* For each operand, all registers that are equivalent to it. */ 380 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS]; 381 382 const char *constraints[MAX_RECOG_OPERANDS]; 383 384 /* Vector recording how bad an alternative is. */ 385 int *alternative_reject; 386 /* Vector recording how many registers can be introduced by choosing 387 this alternative. */ 388 int *alternative_nregs; 389 /* Array of vectors recording, for each operand and each alternative, 390 which hard register to substitute, or -1 if the operand should be 391 left as it is. */ 392 int *op_alt_regno[MAX_RECOG_OPERANDS]; 393 /* Array of alternatives, sorted in order of decreasing desirability. */ 394 int *alternative_order; 395 396 extract_insn (insn); 397 398 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0) 399 return 0; 400 401 /* Figure out which alternative currently matches. */ 402 if (! constrain_operands (1)) 403 fatal_insn_not_found (insn); 404 405 alternative_reject = XALLOCAVEC (int, recog_data.n_alternatives); 406 alternative_nregs = XALLOCAVEC (int, recog_data.n_alternatives); 407 alternative_order = XALLOCAVEC (int, recog_data.n_alternatives); 408 memset (alternative_reject, 0, recog_data.n_alternatives * sizeof (int)); 409 memset (alternative_nregs, 0, recog_data.n_alternatives * sizeof (int)); 410 411 /* For each operand, find out which regs are equivalent. */ 412 for (i = 0; i < recog_data.n_operands; i++) 413 { 414 cselib_val *v; 415 struct elt_loc_list *l; 416 rtx op; 417 418 CLEAR_HARD_REG_SET (equiv_regs[i]); 419 420 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem 421 right, so avoid the problem here. Likewise if we have a constant 422 and the insn pattern doesn't tell us the mode we need. */ 423 if (LABEL_P (recog_data.operand[i]) 424 || (CONSTANT_P (recog_data.operand[i]) 425 && recog_data.operand_mode[i] == VOIDmode)) 426 continue; 427 428 op = recog_data.operand[i]; 429 #ifdef LOAD_EXTEND_OP 430 if (MEM_P (op) 431 && GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD 432 && LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN) 433 { 434 rtx set = single_set (insn); 435 436 /* We might have multiple sets, some of which do implicit 437 extension. Punt on this for now. */ 438 if (! set) 439 continue; 440 /* If the destination is also a MEM or a STRICT_LOW_PART, no 441 extension applies. 442 Also, if there is an explicit extension, we don't have to 443 worry about an implicit one. */ 444 else if (MEM_P (SET_DEST (set)) 445 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART 446 || GET_CODE (SET_SRC (set)) == ZERO_EXTEND 447 || GET_CODE (SET_SRC (set)) == SIGN_EXTEND) 448 ; /* Continue ordinary processing. */ 449 #ifdef CANNOT_CHANGE_MODE_CLASS 450 /* If the register cannot change mode to word_mode, it follows that 451 it cannot have been used in word_mode. */ 452 else if (REG_P (SET_DEST (set)) 453 && CANNOT_CHANGE_MODE_CLASS (GET_MODE (SET_DEST (set)), 454 word_mode, 455 REGNO_REG_CLASS (REGNO (SET_DEST (set))))) 456 ; /* Continue ordinary processing. */ 457 #endif 458 /* If this is a straight load, make the extension explicit. */ 459 else if (REG_P (SET_DEST (set)) 460 && recog_data.n_operands == 2 461 && SET_SRC (set) == op 462 && SET_DEST (set) == recog_data.operand[1-i]) 463 { 464 validate_change (insn, recog_data.operand_loc[i], 465 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (op)), 466 word_mode, op), 467 1); 468 validate_change (insn, recog_data.operand_loc[1-i], 469 gen_rtx_REG (word_mode, REGNO (SET_DEST (set))), 470 1); 471 if (! apply_change_group ()) 472 return 0; 473 return reload_cse_simplify_operands (insn, testreg); 474 } 475 else 476 /* ??? There might be arithmetic operations with memory that are 477 safe to optimize, but is it worth the trouble? */ 478 continue; 479 } 480 #endif /* LOAD_EXTEND_OP */ 481 if (side_effects_p (op)) 482 continue; 483 v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode); 484 if (! v) 485 continue; 486 487 for (l = v->locs; l; l = l->next) 488 if (REG_P (l->loc)) 489 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc)); 490 } 491 492 for (i = 0; i < recog_data.n_operands; i++) 493 { 494 enum machine_mode mode; 495 int regno; 496 const char *p; 497 498 op_alt_regno[i] = XALLOCAVEC (int, recog_data.n_alternatives); 499 for (j = 0; j < recog_data.n_alternatives; j++) 500 op_alt_regno[i][j] = -1; 501 502 p = constraints[i] = recog_data.constraints[i]; 503 mode = recog_data.operand_mode[i]; 504 505 /* Add the reject values for each alternative given by the constraints 506 for this operand. */ 507 j = 0; 508 while (*p != '\0') 509 { 510 char c = *p++; 511 if (c == ',') 512 j++; 513 else if (c == '?') 514 alternative_reject[j] += 3; 515 else if (c == '!') 516 alternative_reject[j] += 300; 517 } 518 519 /* We won't change operands which are already registers. We 520 also don't want to modify output operands. */ 521 regno = true_regnum (recog_data.operand[i]); 522 if (regno >= 0 523 || constraints[i][0] == '=' 524 || constraints[i][0] == '+') 525 continue; 526 527 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 528 { 529 enum reg_class rclass = NO_REGS; 530 531 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno)) 532 continue; 533 534 SET_REGNO_RAW (testreg, regno); 535 PUT_MODE (testreg, mode); 536 537 /* We found a register equal to this operand. Now look for all 538 alternatives that can accept this register and have not been 539 assigned a register they can use yet. */ 540 j = 0; 541 p = constraints[i]; 542 for (;;) 543 { 544 char c = *p; 545 546 switch (c) 547 { 548 case '=': case '+': case '?': 549 case '#': case '&': case '!': 550 case '*': case '%': 551 case '0': case '1': case '2': case '3': case '4': 552 case '5': case '6': case '7': case '8': case '9': 553 case '<': case '>': case 'V': case 'o': 554 case 'E': case 'F': case 'G': case 'H': 555 case 's': case 'i': case 'n': 556 case 'I': case 'J': case 'K': case 'L': 557 case 'M': case 'N': case 'O': case 'P': 558 case 'p': case 'X': case TARGET_MEM_CONSTRAINT: 559 /* These don't say anything we care about. */ 560 break; 561 562 case 'g': case 'r': 563 rclass = reg_class_subunion[(int) rclass][(int) GENERAL_REGS]; 564 break; 565 566 default: 567 rclass 568 = (reg_class_subunion 569 [(int) rclass] 570 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]); 571 break; 572 573 case ',': case '\0': 574 /* See if REGNO fits this alternative, and set it up as the 575 replacement register if we don't have one for this 576 alternative yet and the operand being replaced is not 577 a cheap CONST_INT. */ 578 if (op_alt_regno[i][j] == -1 579 && recog_data.alternative_enabled_p[j] 580 && reg_fits_class_p (testreg, rclass, 0, mode) 581 && (!CONST_INT_P (recog_data.operand[i]) 582 || (set_src_cost (recog_data.operand[i], 583 optimize_bb_for_speed_p 584 (BLOCK_FOR_INSN (insn))) 585 > set_src_cost (testreg, 586 optimize_bb_for_speed_p 587 (BLOCK_FOR_INSN (insn)))))) 588 { 589 alternative_nregs[j]++; 590 op_alt_regno[i][j] = regno; 591 } 592 j++; 593 rclass = NO_REGS; 594 break; 595 } 596 p += CONSTRAINT_LEN (c, p); 597 598 if (c == '\0') 599 break; 600 } 601 } 602 } 603 604 /* Record all alternatives which are better or equal to the currently 605 matching one in the alternative_order array. */ 606 for (i = j = 0; i < recog_data.n_alternatives; i++) 607 if (alternative_reject[i] <= alternative_reject[which_alternative]) 608 alternative_order[j++] = i; 609 recog_data.n_alternatives = j; 610 611 /* Sort it. Given a small number of alternatives, a dumb algorithm 612 won't hurt too much. */ 613 for (i = 0; i < recog_data.n_alternatives - 1; i++) 614 { 615 int best = i; 616 int best_reject = alternative_reject[alternative_order[i]]; 617 int best_nregs = alternative_nregs[alternative_order[i]]; 618 int tmp; 619 620 for (j = i + 1; j < recog_data.n_alternatives; j++) 621 { 622 int this_reject = alternative_reject[alternative_order[j]]; 623 int this_nregs = alternative_nregs[alternative_order[j]]; 624 625 if (this_reject < best_reject 626 || (this_reject == best_reject && this_nregs > best_nregs)) 627 { 628 best = j; 629 best_reject = this_reject; 630 best_nregs = this_nregs; 631 } 632 } 633 634 tmp = alternative_order[best]; 635 alternative_order[best] = alternative_order[i]; 636 alternative_order[i] = tmp; 637 } 638 639 /* Substitute the operands as determined by op_alt_regno for the best 640 alternative. */ 641 j = alternative_order[0]; 642 643 for (i = 0; i < recog_data.n_operands; i++) 644 { 645 enum machine_mode mode = recog_data.operand_mode[i]; 646 if (op_alt_regno[i][j] == -1) 647 continue; 648 649 validate_change (insn, recog_data.operand_loc[i], 650 gen_rtx_REG (mode, op_alt_regno[i][j]), 1); 651 } 652 653 for (i = recog_data.n_dups - 1; i >= 0; i--) 654 { 655 int op = recog_data.dup_num[i]; 656 enum machine_mode mode = recog_data.operand_mode[op]; 657 658 if (op_alt_regno[op][j] == -1) 659 continue; 660 661 validate_change (insn, recog_data.dup_loc[i], 662 gen_rtx_REG (mode, op_alt_regno[op][j]), 1); 663 } 664 665 return apply_change_group (); 666 } 667 668 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg 669 addressing now. 670 This code might also be useful when reload gave up on reg+reg addressing 671 because of clashes between the return register and INDEX_REG_CLASS. */ 672 673 /* The maximum number of uses of a register we can keep track of to 674 replace them with reg+reg addressing. */ 675 #define RELOAD_COMBINE_MAX_USES 16 676 677 /* Describes a recorded use of a register. */ 678 struct reg_use 679 { 680 /* The insn where a register has been used. */ 681 rtx insn; 682 /* Points to the memory reference enclosing the use, if any, NULL_RTX 683 otherwise. */ 684 rtx containing_mem; 685 /* Location of the register withing INSN. */ 686 rtx *usep; 687 /* The reverse uid of the insn. */ 688 int ruid; 689 }; 690 691 /* If the register is used in some unknown fashion, USE_INDEX is negative. 692 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID 693 indicates where it is first set or clobbered. 694 Otherwise, USE_INDEX is the index of the last encountered use of the 695 register (which is first among these we have seen since we scan backwards). 696 USE_RUID indicates the first encountered, i.e. last, of these uses. 697 If ALL_OFFSETS_MATCH is true, all encountered uses were inside a PLUS 698 with a constant offset; OFFSET contains this constant in that case. 699 STORE_RUID is always meaningful if we only want to use a value in a 700 register in a different place: it denotes the next insn in the insn 701 stream (i.e. the last encountered) that sets or clobbers the register. 702 REAL_STORE_RUID is similar, but clobbers are ignored when updating it. */ 703 static struct 704 { 705 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES]; 706 rtx offset; 707 int use_index; 708 int store_ruid; 709 int real_store_ruid; 710 int use_ruid; 711 bool all_offsets_match; 712 } reg_state[FIRST_PSEUDO_REGISTER]; 713 714 /* Reverse linear uid. This is increased in reload_combine while scanning 715 the instructions from last to first. It is used to set last_label_ruid 716 and the store_ruid / use_ruid fields in reg_state. */ 717 static int reload_combine_ruid; 718 719 /* The RUID of the last label we encountered in reload_combine. */ 720 static int last_label_ruid; 721 722 /* The RUID of the last jump we encountered in reload_combine. */ 723 static int last_jump_ruid; 724 725 /* The register numbers of the first and last index register. A value of 726 -1 in LAST_INDEX_REG indicates that we've previously computed these 727 values and found no suitable index registers. */ 728 static int first_index_reg = -1; 729 static int last_index_reg; 730 731 #define LABEL_LIVE(LABEL) \ 732 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno]) 733 734 /* Subroutine of reload_combine_split_ruids, called to fix up a single 735 ruid pointed to by *PRUID if it is higher than SPLIT_RUID. */ 736 737 static inline void 738 reload_combine_split_one_ruid (int *pruid, int split_ruid) 739 { 740 if (*pruid > split_ruid) 741 (*pruid)++; 742 } 743 744 /* Called when we insert a new insn in a position we've already passed in 745 the scan. Examine all our state, increasing all ruids that are higher 746 than SPLIT_RUID by one in order to make room for a new insn. */ 747 748 static void 749 reload_combine_split_ruids (int split_ruid) 750 { 751 unsigned i; 752 753 reload_combine_split_one_ruid (&reload_combine_ruid, split_ruid); 754 reload_combine_split_one_ruid (&last_label_ruid, split_ruid); 755 reload_combine_split_one_ruid (&last_jump_ruid, split_ruid); 756 757 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 758 { 759 int j, idx = reg_state[i].use_index; 760 reload_combine_split_one_ruid (®_state[i].use_ruid, split_ruid); 761 reload_combine_split_one_ruid (®_state[i].store_ruid, split_ruid); 762 reload_combine_split_one_ruid (®_state[i].real_store_ruid, 763 split_ruid); 764 if (idx < 0) 765 continue; 766 for (j = idx; j < RELOAD_COMBINE_MAX_USES; j++) 767 { 768 reload_combine_split_one_ruid (®_state[i].reg_use[j].ruid, 769 split_ruid); 770 } 771 } 772 } 773 774 /* Called when we are about to rescan a previously encountered insn with 775 reload_combine_note_use after modifying some part of it. This clears all 776 information about uses in that particular insn. */ 777 778 static void 779 reload_combine_purge_insn_uses (rtx insn) 780 { 781 unsigned i; 782 783 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 784 { 785 int j, k, idx = reg_state[i].use_index; 786 if (idx < 0) 787 continue; 788 j = k = RELOAD_COMBINE_MAX_USES; 789 while (j-- > idx) 790 { 791 if (reg_state[i].reg_use[j].insn != insn) 792 { 793 k--; 794 if (k != j) 795 reg_state[i].reg_use[k] = reg_state[i].reg_use[j]; 796 } 797 } 798 reg_state[i].use_index = k; 799 } 800 } 801 802 /* Called when we need to forget about all uses of REGNO after an insn 803 which is identified by RUID. */ 804 805 static void 806 reload_combine_purge_reg_uses_after_ruid (unsigned regno, int ruid) 807 { 808 int j, k, idx = reg_state[regno].use_index; 809 if (idx < 0) 810 return; 811 j = k = RELOAD_COMBINE_MAX_USES; 812 while (j-- > idx) 813 { 814 if (reg_state[regno].reg_use[j].ruid >= ruid) 815 { 816 k--; 817 if (k != j) 818 reg_state[regno].reg_use[k] = reg_state[regno].reg_use[j]; 819 } 820 } 821 reg_state[regno].use_index = k; 822 } 823 824 /* Find the use of REGNO with the ruid that is highest among those 825 lower than RUID_LIMIT, and return it if it is the only use of this 826 reg in the insn. Return NULL otherwise. */ 827 828 static struct reg_use * 829 reload_combine_closest_single_use (unsigned regno, int ruid_limit) 830 { 831 int i, best_ruid = 0; 832 int use_idx = reg_state[regno].use_index; 833 struct reg_use *retval; 834 835 if (use_idx < 0) 836 return NULL; 837 retval = NULL; 838 for (i = use_idx; i < RELOAD_COMBINE_MAX_USES; i++) 839 { 840 struct reg_use *use = reg_state[regno].reg_use + i; 841 int this_ruid = use->ruid; 842 if (this_ruid >= ruid_limit) 843 continue; 844 if (this_ruid > best_ruid) 845 { 846 best_ruid = this_ruid; 847 retval = use; 848 } 849 else if (this_ruid == best_ruid) 850 retval = NULL; 851 } 852 if (last_label_ruid >= best_ruid) 853 return NULL; 854 return retval; 855 } 856 857 /* After we've moved an add insn, fix up any debug insns that occur 858 between the old location of the add and the new location. REG is 859 the destination register of the add insn; REPLACEMENT is the 860 SET_SRC of the add. FROM and TO specify the range in which we 861 should make this change on debug insns. */ 862 863 static void 864 fixup_debug_insns (rtx reg, rtx replacement, rtx from, rtx to) 865 { 866 rtx insn; 867 for (insn = from; insn != to; insn = NEXT_INSN (insn)) 868 { 869 rtx t; 870 871 if (!DEBUG_INSN_P (insn)) 872 continue; 873 874 t = INSN_VAR_LOCATION_LOC (insn); 875 t = simplify_replace_rtx (t, reg, replacement); 876 validate_change (insn, &INSN_VAR_LOCATION_LOC (insn), t, 0); 877 } 878 } 879 880 /* Subroutine of reload_combine_recognize_const_pattern. Try to replace REG 881 with SRC in the insn described by USE, taking costs into account. Return 882 true if we made the replacement. */ 883 884 static bool 885 try_replace_in_use (struct reg_use *use, rtx reg, rtx src) 886 { 887 rtx use_insn = use->insn; 888 rtx mem = use->containing_mem; 889 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (use_insn)); 890 891 if (mem != NULL_RTX) 892 { 893 addr_space_t as = MEM_ADDR_SPACE (mem); 894 rtx oldaddr = XEXP (mem, 0); 895 rtx newaddr = NULL_RTX; 896 int old_cost = address_cost (oldaddr, GET_MODE (mem), as, speed); 897 int new_cost; 898 899 newaddr = simplify_replace_rtx (oldaddr, reg, src); 900 if (memory_address_addr_space_p (GET_MODE (mem), newaddr, as)) 901 { 902 XEXP (mem, 0) = newaddr; 903 new_cost = address_cost (newaddr, GET_MODE (mem), as, speed); 904 XEXP (mem, 0) = oldaddr; 905 if (new_cost <= old_cost 906 && validate_change (use_insn, 907 &XEXP (mem, 0), newaddr, 0)) 908 return true; 909 } 910 } 911 else 912 { 913 rtx new_set = single_set (use_insn); 914 if (new_set 915 && REG_P (SET_DEST (new_set)) 916 && GET_CODE (SET_SRC (new_set)) == PLUS 917 && REG_P (XEXP (SET_SRC (new_set), 0)) 918 && CONSTANT_P (XEXP (SET_SRC (new_set), 1))) 919 { 920 rtx new_src; 921 int old_cost = set_src_cost (SET_SRC (new_set), speed); 922 923 gcc_assert (rtx_equal_p (XEXP (SET_SRC (new_set), 0), reg)); 924 new_src = simplify_replace_rtx (SET_SRC (new_set), reg, src); 925 926 if (set_src_cost (new_src, speed) <= old_cost 927 && validate_change (use_insn, &SET_SRC (new_set), 928 new_src, 0)) 929 return true; 930 } 931 } 932 return false; 933 } 934 935 /* Called by reload_combine when scanning INSN. This function tries to detect 936 patterns where a constant is added to a register, and the result is used 937 in an address. 938 Return true if no further processing is needed on INSN; false if it wasn't 939 recognized and should be handled normally. */ 940 941 static bool 942 reload_combine_recognize_const_pattern (rtx insn) 943 { 944 int from_ruid = reload_combine_ruid; 945 rtx set, pat, reg, src, addreg; 946 unsigned int regno; 947 struct reg_use *use; 948 bool must_move_add; 949 rtx add_moved_after_insn = NULL_RTX; 950 int add_moved_after_ruid = 0; 951 int clobbered_regno = -1; 952 953 set = single_set (insn); 954 if (set == NULL_RTX) 955 return false; 956 957 reg = SET_DEST (set); 958 src = SET_SRC (set); 959 if (!REG_P (reg) 960 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1 961 || GET_MODE (reg) != Pmode 962 || reg == stack_pointer_rtx) 963 return false; 964 965 regno = REGNO (reg); 966 967 /* We look for a REG1 = REG2 + CONSTANT insn, followed by either 968 uses of REG1 inside an address, or inside another add insn. If 969 possible and profitable, merge the addition into subsequent 970 uses. */ 971 if (GET_CODE (src) != PLUS 972 || !REG_P (XEXP (src, 0)) 973 || !CONSTANT_P (XEXP (src, 1))) 974 return false; 975 976 addreg = XEXP (src, 0); 977 must_move_add = rtx_equal_p (reg, addreg); 978 979 pat = PATTERN (insn); 980 if (must_move_add && set != pat) 981 { 982 /* We have to be careful when moving the add; apart from the 983 single_set there may also be clobbers. Recognize one special 984 case, that of one clobber alongside the set (likely a clobber 985 of the CC register). */ 986 gcc_assert (GET_CODE (PATTERN (insn)) == PARALLEL); 987 if (XVECLEN (pat, 0) != 2 || XVECEXP (pat, 0, 0) != set 988 || GET_CODE (XVECEXP (pat, 0, 1)) != CLOBBER 989 || !REG_P (XEXP (XVECEXP (pat, 0, 1), 0))) 990 return false; 991 clobbered_regno = REGNO (XEXP (XVECEXP (pat, 0, 1), 0)); 992 } 993 994 do 995 { 996 use = reload_combine_closest_single_use (regno, from_ruid); 997 998 if (use) 999 /* Start the search for the next use from here. */ 1000 from_ruid = use->ruid; 1001 1002 if (use && GET_MODE (*use->usep) == Pmode) 1003 { 1004 bool delete_add = false; 1005 rtx use_insn = use->insn; 1006 int use_ruid = use->ruid; 1007 1008 /* Avoid moving the add insn past a jump. */ 1009 if (must_move_add && use_ruid <= last_jump_ruid) 1010 break; 1011 1012 /* If the add clobbers another hard reg in parallel, don't move 1013 it past a real set of this hard reg. */ 1014 if (must_move_add && clobbered_regno >= 0 1015 && reg_state[clobbered_regno].real_store_ruid >= use_ruid) 1016 break; 1017 1018 #ifdef HAVE_cc0 1019 /* Do not separate cc0 setter and cc0 user on HAVE_cc0 targets. */ 1020 if (must_move_add && sets_cc0_p (PATTERN (use_insn))) 1021 break; 1022 #endif 1023 1024 gcc_assert (reg_state[regno].store_ruid <= use_ruid); 1025 /* Avoid moving a use of ADDREG past a point where it is stored. */ 1026 if (reg_state[REGNO (addreg)].store_ruid > use_ruid) 1027 break; 1028 1029 /* We also must not move the addition past an insn that sets 1030 the same register, unless we can combine two add insns. */ 1031 if (must_move_add && reg_state[regno].store_ruid == use_ruid) 1032 { 1033 if (use->containing_mem == NULL_RTX) 1034 delete_add = true; 1035 else 1036 break; 1037 } 1038 1039 if (try_replace_in_use (use, reg, src)) 1040 { 1041 reload_combine_purge_insn_uses (use_insn); 1042 reload_combine_note_use (&PATTERN (use_insn), use_insn, 1043 use_ruid, NULL_RTX); 1044 1045 if (delete_add) 1046 { 1047 fixup_debug_insns (reg, src, insn, use_insn); 1048 delete_insn (insn); 1049 return true; 1050 } 1051 if (must_move_add) 1052 { 1053 add_moved_after_insn = use_insn; 1054 add_moved_after_ruid = use_ruid; 1055 } 1056 continue; 1057 } 1058 } 1059 /* If we get here, we couldn't handle this use. */ 1060 if (must_move_add) 1061 break; 1062 } 1063 while (use); 1064 1065 if (!must_move_add || add_moved_after_insn == NULL_RTX) 1066 /* Process the add normally. */ 1067 return false; 1068 1069 fixup_debug_insns (reg, src, insn, add_moved_after_insn); 1070 1071 reorder_insns (insn, insn, add_moved_after_insn); 1072 reload_combine_purge_reg_uses_after_ruid (regno, add_moved_after_ruid); 1073 reload_combine_split_ruids (add_moved_after_ruid - 1); 1074 reload_combine_note_use (&PATTERN (insn), insn, 1075 add_moved_after_ruid, NULL_RTX); 1076 reg_state[regno].store_ruid = add_moved_after_ruid; 1077 1078 return true; 1079 } 1080 1081 /* Called by reload_combine when scanning INSN. Try to detect a pattern we 1082 can handle and improve. Return true if no further processing is needed on 1083 INSN; false if it wasn't recognized and should be handled normally. */ 1084 1085 static bool 1086 reload_combine_recognize_pattern (rtx insn) 1087 { 1088 rtx set, reg, src; 1089 unsigned int regno; 1090 1091 set = single_set (insn); 1092 if (set == NULL_RTX) 1093 return false; 1094 1095 reg = SET_DEST (set); 1096 src = SET_SRC (set); 1097 if (!REG_P (reg) 1098 || hard_regno_nregs[REGNO (reg)][GET_MODE (reg)] != 1) 1099 return false; 1100 1101 regno = REGNO (reg); 1102 1103 /* Look for (set (REGX) (CONST_INT)) 1104 (set (REGX) (PLUS (REGX) (REGY))) 1105 ... 1106 ... (MEM (REGX)) ... 1107 and convert it to 1108 (set (REGZ) (CONST_INT)) 1109 ... 1110 ... (MEM (PLUS (REGZ) (REGY)))... . 1111 1112 First, check that we have (set (REGX) (PLUS (REGX) (REGY))) 1113 and that we know all uses of REGX before it dies. 1114 Also, explicitly check that REGX != REGY; our life information 1115 does not yet show whether REGY changes in this insn. */ 1116 1117 if (GET_CODE (src) == PLUS 1118 && reg_state[regno].all_offsets_match 1119 && last_index_reg != -1 1120 && REG_P (XEXP (src, 1)) 1121 && rtx_equal_p (XEXP (src, 0), reg) 1122 && !rtx_equal_p (XEXP (src, 1), reg) 1123 && reg_state[regno].use_index >= 0 1124 && reg_state[regno].use_index < RELOAD_COMBINE_MAX_USES 1125 && last_label_ruid < reg_state[regno].use_ruid) 1126 { 1127 rtx base = XEXP (src, 1); 1128 rtx prev = prev_nonnote_nondebug_insn (insn); 1129 rtx prev_set = prev ? single_set (prev) : NULL_RTX; 1130 rtx index_reg = NULL_RTX; 1131 rtx reg_sum = NULL_RTX; 1132 int i; 1133 1134 /* Now we need to set INDEX_REG to an index register (denoted as 1135 REGZ in the illustration above) and REG_SUM to the expression 1136 register+register that we want to use to substitute uses of REG 1137 (typically in MEMs) with. First check REG and BASE for being 1138 index registers; we can use them even if they are not dead. */ 1139 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno) 1140 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], 1141 REGNO (base))) 1142 { 1143 index_reg = reg; 1144 reg_sum = src; 1145 } 1146 else 1147 { 1148 /* Otherwise, look for a free index register. Since we have 1149 checked above that neither REG nor BASE are index registers, 1150 if we find anything at all, it will be different from these 1151 two registers. */ 1152 for (i = first_index_reg; i <= last_index_reg; i++) 1153 { 1154 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i) 1155 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES 1156 && reg_state[i].store_ruid <= reg_state[regno].use_ruid 1157 && (call_used_regs[i] || df_regs_ever_live_p (i)) 1158 && (!frame_pointer_needed || i != HARD_FRAME_POINTER_REGNUM) 1159 && !fixed_regs[i] && !global_regs[i] 1160 && hard_regno_nregs[i][GET_MODE (reg)] == 1 1161 && targetm.hard_regno_scratch_ok (i)) 1162 { 1163 index_reg = gen_rtx_REG (GET_MODE (reg), i); 1164 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base); 1165 break; 1166 } 1167 } 1168 } 1169 1170 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that 1171 (REGY), i.e. BASE, is not clobbered before the last use we'll 1172 create. */ 1173 if (reg_sum 1174 && prev_set 1175 && CONST_INT_P (SET_SRC (prev_set)) 1176 && rtx_equal_p (SET_DEST (prev_set), reg) 1177 && (reg_state[REGNO (base)].store_ruid 1178 <= reg_state[regno].use_ruid)) 1179 { 1180 /* Change destination register and, if necessary, the constant 1181 value in PREV, the constant loading instruction. */ 1182 validate_change (prev, &SET_DEST (prev_set), index_reg, 1); 1183 if (reg_state[regno].offset != const0_rtx) 1184 validate_change (prev, 1185 &SET_SRC (prev_set), 1186 GEN_INT (INTVAL (SET_SRC (prev_set)) 1187 + INTVAL (reg_state[regno].offset)), 1188 1); 1189 1190 /* Now for every use of REG that we have recorded, replace REG 1191 with REG_SUM. */ 1192 for (i = reg_state[regno].use_index; 1193 i < RELOAD_COMBINE_MAX_USES; i++) 1194 validate_unshare_change (reg_state[regno].reg_use[i].insn, 1195 reg_state[regno].reg_use[i].usep, 1196 /* Each change must have its own 1197 replacement. */ 1198 reg_sum, 1); 1199 1200 if (apply_change_group ()) 1201 { 1202 struct reg_use *lowest_ruid = NULL; 1203 1204 /* For every new use of REG_SUM, we have to record the use 1205 of BASE therein, i.e. operand 1. */ 1206 for (i = reg_state[regno].use_index; 1207 i < RELOAD_COMBINE_MAX_USES; i++) 1208 { 1209 struct reg_use *use = reg_state[regno].reg_use + i; 1210 reload_combine_note_use (&XEXP (*use->usep, 1), use->insn, 1211 use->ruid, use->containing_mem); 1212 if (lowest_ruid == NULL || use->ruid < lowest_ruid->ruid) 1213 lowest_ruid = use; 1214 } 1215 1216 fixup_debug_insns (reg, reg_sum, insn, lowest_ruid->insn); 1217 1218 /* Delete the reg-reg addition. */ 1219 delete_insn (insn); 1220 1221 if (reg_state[regno].offset != const0_rtx) 1222 /* Previous REG_EQUIV / REG_EQUAL notes for PREV 1223 are now invalid. */ 1224 remove_reg_equal_equiv_notes (prev); 1225 1226 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES; 1227 return true; 1228 } 1229 } 1230 } 1231 return false; 1232 } 1233 1234 static void 1235 reload_combine (void) 1236 { 1237 rtx insn, prev; 1238 basic_block bb; 1239 unsigned int r; 1240 int min_labelno, n_labels; 1241 HARD_REG_SET ever_live_at_start, *label_live; 1242 1243 /* To avoid wasting too much time later searching for an index register, 1244 determine the minimum and maximum index register numbers. */ 1245 if (INDEX_REG_CLASS == NO_REGS) 1246 last_index_reg = -1; 1247 else if (first_index_reg == -1 && last_index_reg == 0) 1248 { 1249 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1250 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r)) 1251 { 1252 if (first_index_reg == -1) 1253 first_index_reg = r; 1254 1255 last_index_reg = r; 1256 } 1257 1258 /* If no index register is available, we can quit now. Set LAST_INDEX_REG 1259 to -1 so we'll know to quit early the next time we get here. */ 1260 if (first_index_reg == -1) 1261 { 1262 last_index_reg = -1; 1263 return; 1264 } 1265 } 1266 1267 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime 1268 information is a bit fuzzy immediately after reload, but it's 1269 still good enough to determine which registers are live at a jump 1270 destination. */ 1271 min_labelno = get_first_label_num (); 1272 n_labels = max_label_num () - min_labelno; 1273 label_live = XNEWVEC (HARD_REG_SET, n_labels); 1274 CLEAR_HARD_REG_SET (ever_live_at_start); 1275 1276 FOR_EACH_BB_REVERSE (bb) 1277 { 1278 insn = BB_HEAD (bb); 1279 if (LABEL_P (insn)) 1280 { 1281 HARD_REG_SET live; 1282 bitmap live_in = df_get_live_in (bb); 1283 1284 REG_SET_TO_HARD_REG_SET (live, live_in); 1285 compute_use_by_pseudos (&live, live_in); 1286 COPY_HARD_REG_SET (LABEL_LIVE (insn), live); 1287 IOR_HARD_REG_SET (ever_live_at_start, live); 1288 } 1289 } 1290 1291 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */ 1292 last_label_ruid = last_jump_ruid = reload_combine_ruid = 0; 1293 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1294 { 1295 reg_state[r].store_ruid = 0; 1296 reg_state[r].real_store_ruid = 0; 1297 if (fixed_regs[r]) 1298 reg_state[r].use_index = -1; 1299 else 1300 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; 1301 } 1302 1303 for (insn = get_last_insn (); insn; insn = prev) 1304 { 1305 bool control_flow_insn; 1306 rtx note; 1307 1308 prev = PREV_INSN (insn); 1309 1310 /* We cannot do our optimization across labels. Invalidating all the use 1311 information we have would be costly, so we just note where the label 1312 is and then later disable any optimization that would cross it. */ 1313 if (LABEL_P (insn)) 1314 last_label_ruid = reload_combine_ruid; 1315 else if (BARRIER_P (insn)) 1316 { 1317 /* Crossing a barrier resets all the use information. */ 1318 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1319 if (! fixed_regs[r]) 1320 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; 1321 } 1322 else if (INSN_P (insn) && volatile_insn_p (PATTERN (insn))) 1323 /* Optimizations across insns being marked as volatile must be 1324 prevented. All the usage information is invalidated 1325 here. */ 1326 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1327 if (! fixed_regs[r] 1328 && reg_state[r].use_index != RELOAD_COMBINE_MAX_USES) 1329 reg_state[r].use_index = -1; 1330 1331 if (! NONDEBUG_INSN_P (insn)) 1332 continue; 1333 1334 reload_combine_ruid++; 1335 1336 control_flow_insn = control_flow_insn_p (insn); 1337 if (control_flow_insn) 1338 last_jump_ruid = reload_combine_ruid; 1339 1340 if (reload_combine_recognize_const_pattern (insn) 1341 || reload_combine_recognize_pattern (insn)) 1342 continue; 1343 1344 note_stores (PATTERN (insn), reload_combine_note_store, NULL); 1345 1346 if (CALL_P (insn)) 1347 { 1348 rtx link; 1349 1350 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1351 if (call_used_regs[r]) 1352 { 1353 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES; 1354 reg_state[r].store_ruid = reload_combine_ruid; 1355 } 1356 1357 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; 1358 link = XEXP (link, 1)) 1359 { 1360 rtx usage_rtx = XEXP (XEXP (link, 0), 0); 1361 if (REG_P (usage_rtx)) 1362 { 1363 unsigned int i; 1364 unsigned int start_reg = REGNO (usage_rtx); 1365 unsigned int num_regs 1366 = hard_regno_nregs[start_reg][GET_MODE (usage_rtx)]; 1367 unsigned int end_reg = start_reg + num_regs - 1; 1368 for (i = start_reg; i <= end_reg; i++) 1369 if (GET_CODE (XEXP (link, 0)) == CLOBBER) 1370 { 1371 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 1372 reg_state[i].store_ruid = reload_combine_ruid; 1373 } 1374 else 1375 reg_state[i].use_index = -1; 1376 } 1377 } 1378 } 1379 1380 if (control_flow_insn && GET_CODE (PATTERN (insn)) != RETURN) 1381 { 1382 /* Non-spill registers might be used at the call destination in 1383 some unknown fashion, so we have to mark the unknown use. */ 1384 HARD_REG_SET *live; 1385 1386 if ((condjump_p (insn) || condjump_in_parallel_p (insn)) 1387 && JUMP_LABEL (insn)) 1388 live = &LABEL_LIVE (JUMP_LABEL (insn)); 1389 else 1390 live = &ever_live_at_start; 1391 1392 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) 1393 if (TEST_HARD_REG_BIT (*live, r)) 1394 reg_state[r].use_index = -1; 1395 } 1396 1397 reload_combine_note_use (&PATTERN (insn), insn, reload_combine_ruid, 1398 NULL_RTX); 1399 1400 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 1401 { 1402 if (REG_NOTE_KIND (note) == REG_INC && REG_P (XEXP (note, 0))) 1403 { 1404 int regno = REGNO (XEXP (note, 0)); 1405 reg_state[regno].store_ruid = reload_combine_ruid; 1406 reg_state[regno].real_store_ruid = reload_combine_ruid; 1407 reg_state[regno].use_index = -1; 1408 } 1409 } 1410 } 1411 1412 free (label_live); 1413 } 1414 1415 /* Check if DST is a register or a subreg of a register; if it is, 1416 update store_ruid, real_store_ruid and use_index in the reg_state 1417 structure accordingly. Called via note_stores from reload_combine. */ 1418 1419 static void 1420 reload_combine_note_store (rtx dst, const_rtx set, void *data ATTRIBUTE_UNUSED) 1421 { 1422 int regno = 0; 1423 int i; 1424 enum machine_mode mode = GET_MODE (dst); 1425 1426 if (GET_CODE (dst) == SUBREG) 1427 { 1428 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)), 1429 GET_MODE (SUBREG_REG (dst)), 1430 SUBREG_BYTE (dst), 1431 GET_MODE (dst)); 1432 dst = SUBREG_REG (dst); 1433 } 1434 1435 /* Some targets do argument pushes without adding REG_INC notes. */ 1436 1437 if (MEM_P (dst)) 1438 { 1439 dst = XEXP (dst, 0); 1440 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC 1441 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC 1442 || GET_CODE (dst) == PRE_MODIFY || GET_CODE (dst) == POST_MODIFY) 1443 { 1444 regno = REGNO (XEXP (dst, 0)); 1445 mode = GET_MODE (XEXP (dst, 0)); 1446 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) 1447 { 1448 /* We could probably do better, but for now mark the register 1449 as used in an unknown fashion and set/clobbered at this 1450 insn. */ 1451 reg_state[i].use_index = -1; 1452 reg_state[i].store_ruid = reload_combine_ruid; 1453 reg_state[i].real_store_ruid = reload_combine_ruid; 1454 } 1455 } 1456 else 1457 return; 1458 } 1459 1460 if (!REG_P (dst)) 1461 return; 1462 regno += REGNO (dst); 1463 1464 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be 1465 careful with registers / register parts that are not full words. 1466 Similarly for ZERO_EXTRACT. */ 1467 if (GET_CODE (SET_DEST (set)) == ZERO_EXTRACT 1468 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART) 1469 { 1470 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) 1471 { 1472 reg_state[i].use_index = -1; 1473 reg_state[i].store_ruid = reload_combine_ruid; 1474 reg_state[i].real_store_ruid = reload_combine_ruid; 1475 } 1476 } 1477 else 1478 { 1479 for (i = hard_regno_nregs[regno][mode] - 1 + regno; i >= regno; i--) 1480 { 1481 reg_state[i].store_ruid = reload_combine_ruid; 1482 if (GET_CODE (set) == SET) 1483 reg_state[i].real_store_ruid = reload_combine_ruid; 1484 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES; 1485 } 1486 } 1487 } 1488 1489 /* XP points to a piece of rtl that has to be checked for any uses of 1490 registers. 1491 *XP is the pattern of INSN, or a part of it. 1492 Called from reload_combine, and recursively by itself. */ 1493 static void 1494 reload_combine_note_use (rtx *xp, rtx insn, int ruid, rtx containing_mem) 1495 { 1496 rtx x = *xp; 1497 enum rtx_code code = x->code; 1498 const char *fmt; 1499 int i, j; 1500 rtx offset = const0_rtx; /* For the REG case below. */ 1501 1502 switch (code) 1503 { 1504 case SET: 1505 if (REG_P (SET_DEST (x))) 1506 { 1507 reload_combine_note_use (&SET_SRC (x), insn, ruid, NULL_RTX); 1508 return; 1509 } 1510 break; 1511 1512 case USE: 1513 /* If this is the USE of a return value, we can't change it. */ 1514 if (REG_P (XEXP (x, 0)) && REG_FUNCTION_VALUE_P (XEXP (x, 0))) 1515 { 1516 /* Mark the return register as used in an unknown fashion. */ 1517 rtx reg = XEXP (x, 0); 1518 int regno = REGNO (reg); 1519 int nregs = hard_regno_nregs[regno][GET_MODE (reg)]; 1520 1521 while (--nregs >= 0) 1522 reg_state[regno + nregs].use_index = -1; 1523 return; 1524 } 1525 break; 1526 1527 case CLOBBER: 1528 if (REG_P (SET_DEST (x))) 1529 { 1530 /* No spurious CLOBBERs of pseudo registers may remain. */ 1531 gcc_assert (REGNO (SET_DEST (x)) < FIRST_PSEUDO_REGISTER); 1532 return; 1533 } 1534 break; 1535 1536 case PLUS: 1537 /* We are interested in (plus (reg) (const_int)) . */ 1538 if (!REG_P (XEXP (x, 0)) 1539 || !CONST_INT_P (XEXP (x, 1))) 1540 break; 1541 offset = XEXP (x, 1); 1542 x = XEXP (x, 0); 1543 /* Fall through. */ 1544 case REG: 1545 { 1546 int regno = REGNO (x); 1547 int use_index; 1548 int nregs; 1549 1550 /* No spurious USEs of pseudo registers may remain. */ 1551 gcc_assert (regno < FIRST_PSEUDO_REGISTER); 1552 1553 nregs = hard_regno_nregs[regno][GET_MODE (x)]; 1554 1555 /* We can't substitute into multi-hard-reg uses. */ 1556 if (nregs > 1) 1557 { 1558 while (--nregs >= 0) 1559 reg_state[regno + nregs].use_index = -1; 1560 return; 1561 } 1562 1563 /* We may be called to update uses in previously seen insns. 1564 Don't add uses beyond the last store we saw. */ 1565 if (ruid < reg_state[regno].store_ruid) 1566 return; 1567 1568 /* If this register is already used in some unknown fashion, we 1569 can't do anything. 1570 If we decrement the index from zero to -1, we can't store more 1571 uses, so this register becomes used in an unknown fashion. */ 1572 use_index = --reg_state[regno].use_index; 1573 if (use_index < 0) 1574 return; 1575 1576 if (use_index == RELOAD_COMBINE_MAX_USES - 1) 1577 { 1578 /* This is the first use of this register we have seen since we 1579 marked it as dead. */ 1580 reg_state[regno].offset = offset; 1581 reg_state[regno].all_offsets_match = true; 1582 reg_state[regno].use_ruid = ruid; 1583 } 1584 else 1585 { 1586 if (reg_state[regno].use_ruid > ruid) 1587 reg_state[regno].use_ruid = ruid; 1588 1589 if (! rtx_equal_p (offset, reg_state[regno].offset)) 1590 reg_state[regno].all_offsets_match = false; 1591 } 1592 1593 reg_state[regno].reg_use[use_index].insn = insn; 1594 reg_state[regno].reg_use[use_index].ruid = ruid; 1595 reg_state[regno].reg_use[use_index].containing_mem = containing_mem; 1596 reg_state[regno].reg_use[use_index].usep = xp; 1597 return; 1598 } 1599 1600 case MEM: 1601 containing_mem = x; 1602 break; 1603 1604 default: 1605 break; 1606 } 1607 1608 /* Recursively process the components of X. */ 1609 fmt = GET_RTX_FORMAT (code); 1610 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1611 { 1612 if (fmt[i] == 'e') 1613 reload_combine_note_use (&XEXP (x, i), insn, ruid, containing_mem); 1614 else if (fmt[i] == 'E') 1615 { 1616 for (j = XVECLEN (x, i) - 1; j >= 0; j--) 1617 reload_combine_note_use (&XVECEXP (x, i, j), insn, ruid, 1618 containing_mem); 1619 } 1620 } 1621 } 1622 1623 /* See if we can reduce the cost of a constant by replacing a move 1624 with an add. We track situations in which a register is set to a 1625 constant or to a register plus a constant. */ 1626 /* We cannot do our optimization across labels. Invalidating all the 1627 information about register contents we have would be costly, so we 1628 use move2add_last_label_luid to note where the label is and then 1629 later disable any optimization that would cross it. 1630 reg_offset[n] / reg_base_reg[n] / reg_symbol_ref[n] / reg_mode[n] 1631 are only valid if reg_set_luid[n] is greater than 1632 move2add_last_label_luid. */ 1633 static int reg_set_luid[FIRST_PSEUDO_REGISTER]; 1634 1635 /* If reg_base_reg[n] is negative, register n has been set to 1636 reg_offset[n] or reg_symbol_ref[n] + reg_offset[n] in mode reg_mode[n]. 1637 If reg_base_reg[n] is non-negative, register n has been set to the 1638 sum of reg_offset[n] and the value of register reg_base_reg[n] 1639 before reg_set_luid[n], calculated in mode reg_mode[n] . */ 1640 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER]; 1641 static int reg_base_reg[FIRST_PSEUDO_REGISTER]; 1642 static rtx reg_symbol_ref[FIRST_PSEUDO_REGISTER]; 1643 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER]; 1644 1645 /* move2add_luid is linearly increased while scanning the instructions 1646 from first to last. It is used to set reg_set_luid in 1647 reload_cse_move2add and move2add_note_store. */ 1648 static int move2add_luid; 1649 1650 /* move2add_last_label_luid is set whenever a label is found. Labels 1651 invalidate all previously collected reg_offset data. */ 1652 static int move2add_last_label_luid; 1653 1654 /* ??? We don't know how zero / sign extension is handled, hence we 1655 can't go from a narrower to a wider mode. */ 1656 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \ 1657 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \ 1658 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \ 1659 && TRULY_NOOP_TRUNCATION_MODES_P (OUTMODE, INMODE))) 1660 1661 /* This function is called with INSN that sets REG to (SYM + OFF), 1662 while REG is known to already have value (SYM + offset). 1663 This function tries to change INSN into an add instruction 1664 (set (REG) (plus (REG) (OFF - offset))) using the known value. 1665 It also updates the information about REG's known value. 1666 Return true if we made a change. */ 1667 1668 static bool 1669 move2add_use_add2_insn (rtx reg, rtx sym, rtx off, rtx insn) 1670 { 1671 rtx pat = PATTERN (insn); 1672 rtx src = SET_SRC (pat); 1673 int regno = REGNO (reg); 1674 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[regno], 1675 GET_MODE (reg)); 1676 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); 1677 bool changed = false; 1678 1679 /* (set (reg) (plus (reg) (const_int 0))) is not canonical; 1680 use (set (reg) (reg)) instead. 1681 We don't delete this insn, nor do we convert it into a 1682 note, to avoid losing register notes or the return 1683 value flag. jump2 already knows how to get rid of 1684 no-op moves. */ 1685 if (new_src == const0_rtx) 1686 { 1687 /* If the constants are different, this is a 1688 truncation, that, if turned into (set (reg) 1689 (reg)), would be discarded. Maybe we should 1690 try a truncMN pattern? */ 1691 if (INTVAL (off) == reg_offset [regno]) 1692 changed = validate_change (insn, &SET_SRC (pat), reg, 0); 1693 } 1694 else 1695 { 1696 struct full_rtx_costs oldcst, newcst; 1697 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src); 1698 1699 get_full_set_rtx_cost (pat, &oldcst); 1700 SET_SRC (pat) = tem; 1701 get_full_set_rtx_cost (pat, &newcst); 1702 SET_SRC (pat) = src; 1703 1704 if (costs_lt_p (&newcst, &oldcst, speed) 1705 && have_add2_insn (reg, new_src)) 1706 changed = validate_change (insn, &SET_SRC (pat), tem, 0); 1707 else if (sym == NULL_RTX && GET_MODE (reg) != BImode) 1708 { 1709 enum machine_mode narrow_mode; 1710 for (narrow_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); 1711 narrow_mode != VOIDmode 1712 && narrow_mode != GET_MODE (reg); 1713 narrow_mode = GET_MODE_WIDER_MODE (narrow_mode)) 1714 { 1715 if (have_insn_for (STRICT_LOW_PART, narrow_mode) 1716 && ((reg_offset[regno] & ~GET_MODE_MASK (narrow_mode)) 1717 == (INTVAL (off) & ~GET_MODE_MASK (narrow_mode)))) 1718 { 1719 rtx narrow_reg = gen_rtx_REG (narrow_mode, 1720 REGNO (reg)); 1721 rtx narrow_src = gen_int_mode (INTVAL (off), 1722 narrow_mode); 1723 rtx new_set 1724 = gen_rtx_SET (VOIDmode, 1725 gen_rtx_STRICT_LOW_PART (VOIDmode, 1726 narrow_reg), 1727 narrow_src); 1728 changed = validate_change (insn, &PATTERN (insn), 1729 new_set, 0); 1730 if (changed) 1731 break; 1732 } 1733 } 1734 } 1735 } 1736 reg_set_luid[regno] = move2add_luid; 1737 reg_base_reg[regno] = -1; 1738 reg_mode[regno] = GET_MODE (reg); 1739 reg_symbol_ref[regno] = sym; 1740 reg_offset[regno] = INTVAL (off); 1741 return changed; 1742 } 1743 1744 1745 /* This function is called with INSN that sets REG to (SYM + OFF), 1746 but REG doesn't have known value (SYM + offset). This function 1747 tries to find another register which is known to already have 1748 value (SYM + offset) and change INSN into an add instruction 1749 (set (REG) (plus (the found register) (OFF - offset))) if such 1750 a register is found. It also updates the information about 1751 REG's known value. 1752 Return true iff we made a change. */ 1753 1754 static bool 1755 move2add_use_add3_insn (rtx reg, rtx sym, rtx off, rtx insn) 1756 { 1757 rtx pat = PATTERN (insn); 1758 rtx src = SET_SRC (pat); 1759 int regno = REGNO (reg); 1760 int min_regno = 0; 1761 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); 1762 int i; 1763 bool changed = false; 1764 struct full_rtx_costs oldcst, newcst, mincst; 1765 rtx plus_expr; 1766 1767 init_costs_to_max (&mincst); 1768 get_full_set_rtx_cost (pat, &oldcst); 1769 1770 plus_expr = gen_rtx_PLUS (GET_MODE (reg), reg, const0_rtx); 1771 SET_SRC (pat) = plus_expr; 1772 1773 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1774 if (reg_set_luid[i] > move2add_last_label_luid 1775 && reg_mode[i] == GET_MODE (reg) 1776 && reg_base_reg[i] < 0 1777 && reg_symbol_ref[i] != NULL_RTX 1778 && rtx_equal_p (sym, reg_symbol_ref[i])) 1779 { 1780 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[i], 1781 GET_MODE (reg)); 1782 /* (set (reg) (plus (reg) (const_int 0))) is not canonical; 1783 use (set (reg) (reg)) instead. 1784 We don't delete this insn, nor do we convert it into a 1785 note, to avoid losing register notes or the return 1786 value flag. jump2 already knows how to get rid of 1787 no-op moves. */ 1788 if (new_src == const0_rtx) 1789 { 1790 init_costs_to_zero (&mincst); 1791 min_regno = i; 1792 break; 1793 } 1794 else 1795 { 1796 XEXP (plus_expr, 1) = new_src; 1797 get_full_set_rtx_cost (pat, &newcst); 1798 1799 if (costs_lt_p (&newcst, &mincst, speed)) 1800 { 1801 mincst = newcst; 1802 min_regno = i; 1803 } 1804 } 1805 } 1806 SET_SRC (pat) = src; 1807 1808 if (costs_lt_p (&mincst, &oldcst, speed)) 1809 { 1810 rtx tem; 1811 1812 tem = gen_rtx_REG (GET_MODE (reg), min_regno); 1813 if (i != min_regno) 1814 { 1815 rtx new_src = gen_int_mode (INTVAL (off) - reg_offset[min_regno], 1816 GET_MODE (reg)); 1817 tem = gen_rtx_PLUS (GET_MODE (reg), tem, new_src); 1818 } 1819 if (validate_change (insn, &SET_SRC (pat), tem, 0)) 1820 changed = true; 1821 } 1822 reg_set_luid[regno] = move2add_luid; 1823 reg_base_reg[regno] = -1; 1824 reg_mode[regno] = GET_MODE (reg); 1825 reg_symbol_ref[regno] = sym; 1826 reg_offset[regno] = INTVAL (off); 1827 return changed; 1828 } 1829 1830 /* Convert move insns with constant inputs to additions if they are cheaper. 1831 Return true if any changes were made. */ 1832 static bool 1833 reload_cse_move2add (rtx first) 1834 { 1835 int i; 1836 rtx insn; 1837 bool changed = false; 1838 1839 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) 1840 { 1841 reg_set_luid[i] = 0; 1842 reg_offset[i] = 0; 1843 reg_base_reg[i] = 0; 1844 reg_symbol_ref[i] = NULL_RTX; 1845 reg_mode[i] = VOIDmode; 1846 } 1847 1848 move2add_last_label_luid = 0; 1849 move2add_luid = 2; 1850 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++) 1851 { 1852 rtx pat, note; 1853 1854 if (LABEL_P (insn)) 1855 { 1856 move2add_last_label_luid = move2add_luid; 1857 /* We're going to increment move2add_luid twice after a 1858 label, so that we can use move2add_last_label_luid + 1 as 1859 the luid for constants. */ 1860 move2add_luid++; 1861 continue; 1862 } 1863 if (! INSN_P (insn)) 1864 continue; 1865 pat = PATTERN (insn); 1866 /* For simplicity, we only perform this optimization on 1867 straightforward SETs. */ 1868 if (GET_CODE (pat) == SET 1869 && REG_P (SET_DEST (pat))) 1870 { 1871 rtx reg = SET_DEST (pat); 1872 int regno = REGNO (reg); 1873 rtx src = SET_SRC (pat); 1874 1875 /* Check if we have valid information on the contents of this 1876 register in the mode of REG. */ 1877 if (reg_set_luid[regno] > move2add_last_label_luid 1878 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]) 1879 && dbg_cnt (cse2_move2add)) 1880 { 1881 /* Try to transform (set (REGX) (CONST_INT A)) 1882 ... 1883 (set (REGX) (CONST_INT B)) 1884 to 1885 (set (REGX) (CONST_INT A)) 1886 ... 1887 (set (REGX) (plus (REGX) (CONST_INT B-A))) 1888 or 1889 (set (REGX) (CONST_INT A)) 1890 ... 1891 (set (STRICT_LOW_PART (REGX)) (CONST_INT B)) 1892 */ 1893 1894 if (CONST_INT_P (src) 1895 && reg_base_reg[regno] < 0 1896 && reg_symbol_ref[regno] == NULL_RTX) 1897 { 1898 changed |= move2add_use_add2_insn (reg, NULL_RTX, src, insn); 1899 continue; 1900 } 1901 1902 /* Try to transform (set (REGX) (REGY)) 1903 (set (REGX) (PLUS (REGX) (CONST_INT A))) 1904 ... 1905 (set (REGX) (REGY)) 1906 (set (REGX) (PLUS (REGX) (CONST_INT B))) 1907 to 1908 (set (REGX) (REGY)) 1909 (set (REGX) (PLUS (REGX) (CONST_INT A))) 1910 ... 1911 (set (REGX) (plus (REGX) (CONST_INT B-A))) */ 1912 else if (REG_P (src) 1913 && reg_set_luid[regno] == reg_set_luid[REGNO (src)] 1914 && reg_base_reg[regno] == reg_base_reg[REGNO (src)] 1915 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), 1916 reg_mode[REGNO (src)])) 1917 { 1918 rtx next = next_nonnote_nondebug_insn (insn); 1919 rtx set = NULL_RTX; 1920 if (next) 1921 set = single_set (next); 1922 if (set 1923 && SET_DEST (set) == reg 1924 && GET_CODE (SET_SRC (set)) == PLUS 1925 && XEXP (SET_SRC (set), 0) == reg 1926 && CONST_INT_P (XEXP (SET_SRC (set), 1))) 1927 { 1928 rtx src3 = XEXP (SET_SRC (set), 1); 1929 HOST_WIDE_INT added_offset = INTVAL (src3); 1930 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)]; 1931 HOST_WIDE_INT regno_offset = reg_offset[regno]; 1932 rtx new_src = 1933 gen_int_mode (added_offset 1934 + base_offset 1935 - regno_offset, 1936 GET_MODE (reg)); 1937 bool success = false; 1938 bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)); 1939 1940 if (new_src == const0_rtx) 1941 /* See above why we create (set (reg) (reg)) here. */ 1942 success 1943 = validate_change (next, &SET_SRC (set), reg, 0); 1944 else 1945 { 1946 rtx old_src = SET_SRC (set); 1947 struct full_rtx_costs oldcst, newcst; 1948 rtx tem = gen_rtx_PLUS (GET_MODE (reg), reg, new_src); 1949 1950 get_full_set_rtx_cost (set, &oldcst); 1951 SET_SRC (set) = tem; 1952 get_full_set_src_cost (tem, &newcst); 1953 SET_SRC (set) = old_src; 1954 costs_add_n_insns (&oldcst, 1); 1955 1956 if (costs_lt_p (&newcst, &oldcst, speed) 1957 && have_add2_insn (reg, new_src)) 1958 { 1959 rtx newpat = gen_rtx_SET (VOIDmode, reg, tem); 1960 success 1961 = validate_change (next, &PATTERN (next), 1962 newpat, 0); 1963 } 1964 } 1965 if (success) 1966 delete_insn (insn); 1967 changed |= success; 1968 insn = next; 1969 reg_mode[regno] = GET_MODE (reg); 1970 reg_offset[regno] = 1971 trunc_int_for_mode (added_offset + base_offset, 1972 GET_MODE (reg)); 1973 continue; 1974 } 1975 } 1976 } 1977 1978 /* Try to transform 1979 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A)))) 1980 ... 1981 (set (REGY) (CONST (PLUS (SYMBOL_REF) (CONST_INT B)))) 1982 to 1983 (set (REGX) (CONST (PLUS (SYMBOL_REF) (CONST_INT A)))) 1984 ... 1985 (set (REGY) (CONST (PLUS (REGX) (CONST_INT B-A)))) */ 1986 if ((GET_CODE (src) == SYMBOL_REF 1987 || (GET_CODE (src) == CONST 1988 && GET_CODE (XEXP (src, 0)) == PLUS 1989 && GET_CODE (XEXP (XEXP (src, 0), 0)) == SYMBOL_REF 1990 && CONST_INT_P (XEXP (XEXP (src, 0), 1)))) 1991 && dbg_cnt (cse2_move2add)) 1992 { 1993 rtx sym, off; 1994 1995 if (GET_CODE (src) == SYMBOL_REF) 1996 { 1997 sym = src; 1998 off = const0_rtx; 1999 } 2000 else 2001 { 2002 sym = XEXP (XEXP (src, 0), 0); 2003 off = XEXP (XEXP (src, 0), 1); 2004 } 2005 2006 /* If the reg already contains the value which is sum of 2007 sym and some constant value, we can use an add2 insn. */ 2008 if (reg_set_luid[regno] > move2add_last_label_luid 2009 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]) 2010 && reg_base_reg[regno] < 0 2011 && reg_symbol_ref[regno] != NULL_RTX 2012 && rtx_equal_p (sym, reg_symbol_ref[regno])) 2013 changed |= move2add_use_add2_insn (reg, sym, off, insn); 2014 2015 /* Otherwise, we have to find a register whose value is sum 2016 of sym and some constant value. */ 2017 else 2018 changed |= move2add_use_add3_insn (reg, sym, off, insn); 2019 2020 continue; 2021 } 2022 } 2023 2024 for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) 2025 { 2026 if (REG_NOTE_KIND (note) == REG_INC 2027 && REG_P (XEXP (note, 0))) 2028 { 2029 /* Reset the information about this register. */ 2030 int regno = REGNO (XEXP (note, 0)); 2031 if (regno < FIRST_PSEUDO_REGISTER) 2032 reg_set_luid[regno] = 0; 2033 } 2034 } 2035 note_stores (PATTERN (insn), move2add_note_store, insn); 2036 2037 /* If INSN is a conditional branch, we try to extract an 2038 implicit set out of it. */ 2039 if (any_condjump_p (insn)) 2040 { 2041 rtx cnd = fis_get_condition (insn); 2042 2043 if (cnd != NULL_RTX 2044 && GET_CODE (cnd) == NE 2045 && REG_P (XEXP (cnd, 0)) 2046 && !reg_set_p (XEXP (cnd, 0), insn) 2047 /* The following two checks, which are also in 2048 move2add_note_store, are intended to reduce the 2049 number of calls to gen_rtx_SET to avoid memory 2050 allocation if possible. */ 2051 && SCALAR_INT_MODE_P (GET_MODE (XEXP (cnd, 0))) 2052 && hard_regno_nregs[REGNO (XEXP (cnd, 0))][GET_MODE (XEXP (cnd, 0))] == 1 2053 && CONST_INT_P (XEXP (cnd, 1))) 2054 { 2055 rtx implicit_set = 2056 gen_rtx_SET (VOIDmode, XEXP (cnd, 0), XEXP (cnd, 1)); 2057 move2add_note_store (SET_DEST (implicit_set), implicit_set, insn); 2058 } 2059 } 2060 2061 /* If this is a CALL_INSN, all call used registers are stored with 2062 unknown values. */ 2063 if (CALL_P (insn)) 2064 { 2065 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--) 2066 { 2067 if (call_used_regs[i]) 2068 /* Reset the information about this register. */ 2069 reg_set_luid[i] = 0; 2070 } 2071 } 2072 } 2073 return changed; 2074 } 2075 2076 /* SET is a SET or CLOBBER that sets DST. DATA is the insn which 2077 contains SET. 2078 Update reg_set_luid, reg_offset and reg_base_reg accordingly. 2079 Called from reload_cse_move2add via note_stores. */ 2080 2081 static void 2082 move2add_note_store (rtx dst, const_rtx set, void *data) 2083 { 2084 rtx insn = (rtx) data; 2085 unsigned int regno = 0; 2086 unsigned int nregs = 0; 2087 unsigned int i; 2088 enum machine_mode mode = GET_MODE (dst); 2089 2090 if (GET_CODE (dst) == SUBREG) 2091 { 2092 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)), 2093 GET_MODE (SUBREG_REG (dst)), 2094 SUBREG_BYTE (dst), 2095 GET_MODE (dst)); 2096 nregs = subreg_nregs (dst); 2097 dst = SUBREG_REG (dst); 2098 } 2099 2100 /* Some targets do argument pushes without adding REG_INC notes. */ 2101 2102 if (MEM_P (dst)) 2103 { 2104 dst = XEXP (dst, 0); 2105 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC 2106 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC) 2107 reg_set_luid[REGNO (XEXP (dst, 0))] = 0; 2108 return; 2109 } 2110 if (!REG_P (dst)) 2111 return; 2112 2113 regno += REGNO (dst); 2114 if (!nregs) 2115 nregs = hard_regno_nregs[regno][mode]; 2116 2117 if (SCALAR_INT_MODE_P (GET_MODE (dst)) 2118 && nregs == 1 && GET_CODE (set) == SET) 2119 { 2120 rtx note, sym = NULL_RTX; 2121 HOST_WIDE_INT off; 2122 2123 note = find_reg_equal_equiv_note (insn); 2124 if (note && GET_CODE (XEXP (note, 0)) == SYMBOL_REF) 2125 { 2126 sym = XEXP (note, 0); 2127 off = 0; 2128 } 2129 else if (note && GET_CODE (XEXP (note, 0)) == CONST 2130 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS 2131 && GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0)) == SYMBOL_REF 2132 && CONST_INT_P (XEXP (XEXP (XEXP (note, 0), 0), 1))) 2133 { 2134 sym = XEXP (XEXP (XEXP (note, 0), 0), 0); 2135 off = INTVAL (XEXP (XEXP (XEXP (note, 0), 0), 1)); 2136 } 2137 2138 if (sym != NULL_RTX) 2139 { 2140 reg_base_reg[regno] = -1; 2141 reg_symbol_ref[regno] = sym; 2142 reg_offset[regno] = off; 2143 reg_mode[regno] = mode; 2144 reg_set_luid[regno] = move2add_luid; 2145 return; 2146 } 2147 } 2148 2149 if (SCALAR_INT_MODE_P (GET_MODE (dst)) 2150 && nregs == 1 && GET_CODE (set) == SET 2151 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT 2152 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART) 2153 { 2154 rtx src = SET_SRC (set); 2155 rtx base_reg; 2156 HOST_WIDE_INT offset; 2157 int base_regno; 2158 /* This may be different from mode, if SET_DEST (set) is a 2159 SUBREG. */ 2160 enum machine_mode dst_mode = GET_MODE (dst); 2161 2162 switch (GET_CODE (src)) 2163 { 2164 case PLUS: 2165 if (REG_P (XEXP (src, 0))) 2166 { 2167 base_reg = XEXP (src, 0); 2168 2169 if (CONST_INT_P (XEXP (src, 1))) 2170 offset = INTVAL (XEXP (src, 1)); 2171 else if (REG_P (XEXP (src, 1)) 2172 && (reg_set_luid[REGNO (XEXP (src, 1))] 2173 > move2add_last_label_luid) 2174 && (MODES_OK_FOR_MOVE2ADD 2175 (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))) 2176 { 2177 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0 2178 && reg_symbol_ref[REGNO (XEXP (src, 1))] == NULL_RTX) 2179 offset = reg_offset[REGNO (XEXP (src, 1))]; 2180 /* Maybe the first register is known to be a 2181 constant. */ 2182 else if (reg_set_luid[REGNO (base_reg)] 2183 > move2add_last_label_luid 2184 && (MODES_OK_FOR_MOVE2ADD 2185 (dst_mode, reg_mode[REGNO (base_reg)])) 2186 && reg_base_reg[REGNO (base_reg)] < 0 2187 && reg_symbol_ref[REGNO (base_reg)] == NULL_RTX) 2188 { 2189 offset = reg_offset[REGNO (base_reg)]; 2190 base_reg = XEXP (src, 1); 2191 } 2192 else 2193 goto invalidate; 2194 } 2195 else 2196 goto invalidate; 2197 2198 break; 2199 } 2200 2201 goto invalidate; 2202 2203 case REG: 2204 base_reg = src; 2205 offset = 0; 2206 break; 2207 2208 case CONST_INT: 2209 /* Start tracking the register as a constant. */ 2210 reg_base_reg[regno] = -1; 2211 reg_symbol_ref[regno] = NULL_RTX; 2212 reg_offset[regno] = INTVAL (SET_SRC (set)); 2213 /* We assign the same luid to all registers set to constants. */ 2214 reg_set_luid[regno] = move2add_last_label_luid + 1; 2215 reg_mode[regno] = mode; 2216 return; 2217 2218 default: 2219 invalidate: 2220 /* Invalidate the contents of the register. */ 2221 reg_set_luid[regno] = 0; 2222 return; 2223 } 2224 2225 base_regno = REGNO (base_reg); 2226 /* If information about the base register is not valid, set it 2227 up as a new base register, pretending its value is known 2228 starting from the current insn. */ 2229 if (reg_set_luid[base_regno] <= move2add_last_label_luid) 2230 { 2231 reg_base_reg[base_regno] = base_regno; 2232 reg_symbol_ref[base_regno] = NULL_RTX; 2233 reg_offset[base_regno] = 0; 2234 reg_set_luid[base_regno] = move2add_luid; 2235 reg_mode[base_regno] = mode; 2236 } 2237 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode, 2238 reg_mode[base_regno])) 2239 goto invalidate; 2240 2241 reg_mode[regno] = mode; 2242 2243 /* Copy base information from our base register. */ 2244 reg_set_luid[regno] = reg_set_luid[base_regno]; 2245 reg_base_reg[regno] = reg_base_reg[base_regno]; 2246 reg_symbol_ref[regno] = reg_symbol_ref[base_regno]; 2247 2248 /* Compute the sum of the offsets or constants. */ 2249 reg_offset[regno] = trunc_int_for_mode (offset 2250 + reg_offset[base_regno], 2251 dst_mode); 2252 } 2253 else 2254 { 2255 unsigned int endregno = regno + nregs; 2256 2257 for (i = regno; i < endregno; i++) 2258 /* Reset the information about this register. */ 2259 reg_set_luid[i] = 0; 2260 } 2261 } 2262 2263 static bool 2264 gate_handle_postreload (void) 2265 { 2266 return (optimize > 0 && reload_completed); 2267 } 2268 2269 2270 static unsigned int 2271 rest_of_handle_postreload (void) 2272 { 2273 if (!dbg_cnt (postreload_cse)) 2274 return 0; 2275 2276 /* Do a very simple CSE pass over just the hard registers. */ 2277 reload_cse_regs (get_insns ()); 2278 /* Reload_cse_regs can eliminate potentially-trapping MEMs. 2279 Remove any EH edges associated with them. */ 2280 if (cfun->can_throw_non_call_exceptions) 2281 purge_all_dead_edges (); 2282 2283 return 0; 2284 } 2285 2286 struct rtl_opt_pass pass_postreload_cse = 2287 { 2288 { 2289 RTL_PASS, 2290 "postreload", /* name */ 2291 gate_handle_postreload, /* gate */ 2292 rest_of_handle_postreload, /* execute */ 2293 NULL, /* sub */ 2294 NULL, /* next */ 2295 0, /* static_pass_number */ 2296 TV_RELOAD_CSE_REGS, /* tv_id */ 2297 0, /* properties_required */ 2298 0, /* properties_provided */ 2299 0, /* properties_destroyed */ 2300 0, /* todo_flags_start */ 2301 TODO_df_finish | TODO_verify_rtl_sharing | 2302 0 /* todo_flags_finish */ 2303 } 2304 }; 2305