1 /* Common subexpression elimination library for GNU compiler. 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 4 2012 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "tm.h" 26 27 #include "rtl.h" 28 #include "tm_p.h" 29 #include "regs.h" 30 #include "hard-reg-set.h" 31 #include "flags.h" 32 #include "insn-config.h" 33 #include "recog.h" 34 #include "function.h" 35 #include "emit-rtl.h" 36 #include "diagnostic-core.h" 37 #include "output.h" 38 #include "ggc.h" 39 #include "hashtab.h" 40 #include "tree-pass.h" 41 #include "cselib.h" 42 #include "params.h" 43 #include "alloc-pool.h" 44 #include "target.h" 45 #include "bitmap.h" 46 47 /* A list of cselib_val structures. */ 48 struct elt_list { 49 struct elt_list *next; 50 cselib_val *elt; 51 }; 52 53 static bool cselib_record_memory; 54 static bool cselib_preserve_constants; 55 static bool cselib_any_perm_equivs; 56 static int entry_and_rtx_equal_p (const void *, const void *); 57 static hashval_t get_value_hash (const void *); 58 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); 59 static void new_elt_loc_list (cselib_val *, rtx); 60 static void unchain_one_value (cselib_val *); 61 static void unchain_one_elt_list (struct elt_list **); 62 static void unchain_one_elt_loc_list (struct elt_loc_list **); 63 static int discard_useless_locs (void **, void *); 64 static int discard_useless_values (void **, void *); 65 static void remove_useless_values (void); 66 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode); 67 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode); 68 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx); 69 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); 70 static cselib_val *cselib_lookup_mem (rtx, int); 71 static void cselib_invalidate_regno (unsigned int, enum machine_mode); 72 static void cselib_invalidate_mem (rtx); 73 static void cselib_record_set (rtx, cselib_val *, cselib_val *); 74 static void cselib_record_sets (rtx); 75 76 struct expand_value_data 77 { 78 bitmap regs_active; 79 cselib_expand_callback callback; 80 void *callback_arg; 81 bool dummy; 82 }; 83 84 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int); 85 86 /* There are three ways in which cselib can look up an rtx: 87 - for a REG, the reg_values table (which is indexed by regno) is used 88 - for a MEM, we recursively look up its address and then follow the 89 addr_list of that value 90 - for everything else, we compute a hash value and go through the hash 91 table. Since different rtx's can still have the same hash value, 92 this involves walking the table entries for a given value and comparing 93 the locations of the entries with the rtx we are looking up. */ 94 95 /* A table that enables us to look up elts by their value. */ 96 static htab_t cselib_hash_table; 97 98 /* This is a global so we don't have to pass this through every function. 99 It is used in new_elt_loc_list to set SETTING_INSN. */ 100 static rtx cselib_current_insn; 101 102 /* The unique id that the next create value will take. */ 103 static unsigned int next_uid; 104 105 /* The number of registers we had when the varrays were last resized. */ 106 static unsigned int cselib_nregs; 107 108 /* Count values without known locations, or with only locations that 109 wouldn't have been known except for debug insns. Whenever this 110 grows too big, we remove these useless values from the table. 111 112 Counting values with only debug values is a bit tricky. We don't 113 want to increment n_useless_values when we create a value for a 114 debug insn, for this would get n_useless_values out of sync, but we 115 want increment it if all locs in the list that were ever referenced 116 in nondebug insns are removed from the list. 117 118 In the general case, once we do that, we'd have to stop accepting 119 nondebug expressions in the loc list, to avoid having two values 120 equivalent that, without debug insns, would have been made into 121 separate values. However, because debug insns never introduce 122 equivalences themselves (no assignments), the only means for 123 growing loc lists is through nondebug assignments. If the locs 124 also happen to be referenced in debug insns, it will work just fine. 125 126 A consequence of this is that there's at most one debug-only loc in 127 each loc list. If we keep it in the first entry, testing whether 128 we have a debug-only loc list takes O(1). 129 130 Furthermore, since any additional entry in a loc list containing a 131 debug loc would have to come from an assignment (nondebug) that 132 references both the initial debug loc and the newly-equivalent loc, 133 the initial debug loc would be promoted to a nondebug loc, and the 134 loc list would not contain debug locs any more. 135 136 So the only case we have to be careful with in order to keep 137 n_useless_values in sync between debug and nondebug compilations is 138 to avoid incrementing n_useless_values when removing the single loc 139 from a value that turns out to not appear outside debug values. We 140 increment n_useless_debug_values instead, and leave such values 141 alone until, for other reasons, we garbage-collect useless 142 values. */ 143 static int n_useless_values; 144 static int n_useless_debug_values; 145 146 /* Count values whose locs have been taken exclusively from debug 147 insns for the entire life of the value. */ 148 static int n_debug_values; 149 150 /* Number of useless values before we remove them from the hash table. */ 151 #define MAX_USELESS_VALUES 32 152 153 /* This table maps from register number to values. It does not 154 contain pointers to cselib_val structures, but rather elt_lists. 155 The purpose is to be able to refer to the same register in 156 different modes. The first element of the list defines the mode in 157 which the register was set; if the mode is unknown or the value is 158 no longer valid in that mode, ELT will be NULL for the first 159 element. */ 160 static struct elt_list **reg_values; 161 static unsigned int reg_values_size; 162 #define REG_VALUES(i) reg_values[i] 163 164 /* The largest number of hard regs used by any entry added to the 165 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */ 166 static unsigned int max_value_regs; 167 168 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used 169 in cselib_clear_table() for fast emptying. */ 170 static unsigned int *used_regs; 171 static unsigned int n_used_regs; 172 173 /* We pass this to cselib_invalidate_mem to invalidate all of 174 memory for a non-const call instruction. */ 175 static GTY(()) rtx callmem; 176 177 /* Set by discard_useless_locs if it deleted the last location of any 178 value. */ 179 static int values_became_useless; 180 181 /* Used as stop element of the containing_mem list so we can check 182 presence in the list by checking the next pointer. */ 183 static cselib_val dummy_val; 184 185 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx 186 that is constant through the whole function and should never be 187 eliminated. */ 188 static cselib_val *cfa_base_preserved_val; 189 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM; 190 191 /* Used to list all values that contain memory reference. 192 May or may not contain the useless values - the list is compacted 193 each time memory is invalidated. */ 194 static cselib_val *first_containing_mem = &dummy_val; 195 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool; 196 197 /* If nonnull, cselib will call this function before freeing useless 198 VALUEs. A VALUE is deemed useless if its "locs" field is null. */ 199 void (*cselib_discard_hook) (cselib_val *); 200 201 /* If nonnull, cselib will call this function before recording sets or 202 even clobbering outputs of INSN. All the recorded sets will be 203 represented in the array sets[n_sets]. new_val_min can be used to 204 tell whether values present in sets are introduced by this 205 instruction. */ 206 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets, 207 int n_sets); 208 209 #define PRESERVED_VALUE_P(RTX) \ 210 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging) 211 212 213 214 /* Allocate a struct elt_list and fill in its two elements with the 215 arguments. */ 216 217 static inline struct elt_list * 218 new_elt_list (struct elt_list *next, cselib_val *elt) 219 { 220 struct elt_list *el; 221 el = (struct elt_list *) pool_alloc (elt_list_pool); 222 el->next = next; 223 el->elt = elt; 224 return el; 225 } 226 227 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc 228 list. */ 229 230 static inline void 231 new_elt_loc_list (cselib_val *val, rtx loc) 232 { 233 struct elt_loc_list *el, *next = val->locs; 234 235 gcc_checking_assert (!next || !next->setting_insn 236 || !DEBUG_INSN_P (next->setting_insn) 237 || cselib_current_insn == next->setting_insn); 238 239 /* If we're creating the first loc in a debug insn context, we've 240 just created a debug value. Count it. */ 241 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn)) 242 n_debug_values++; 243 244 val = canonical_cselib_val (val); 245 next = val->locs; 246 247 if (GET_CODE (loc) == VALUE) 248 { 249 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx; 250 251 gcc_checking_assert (PRESERVED_VALUE_P (loc) 252 == PRESERVED_VALUE_P (val->val_rtx)); 253 254 if (val->val_rtx == loc) 255 return; 256 else if (val->uid > CSELIB_VAL_PTR (loc)->uid) 257 { 258 /* Reverse the insertion. */ 259 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx); 260 return; 261 } 262 263 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid); 264 265 if (CSELIB_VAL_PTR (loc)->locs) 266 { 267 /* Bring all locs from LOC to VAL. */ 268 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next) 269 { 270 /* Adjust values that have LOC as canonical so that VAL 271 becomes their canonical. */ 272 if (el->loc && GET_CODE (el->loc) == VALUE) 273 { 274 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc 275 == loc); 276 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx; 277 } 278 } 279 el->next = val->locs; 280 next = val->locs = CSELIB_VAL_PTR (loc)->locs; 281 } 282 283 if (CSELIB_VAL_PTR (loc)->addr_list) 284 { 285 /* Bring in addr_list into canonical node. */ 286 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list; 287 while (last->next) 288 last = last->next; 289 last->next = val->addr_list; 290 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list; 291 CSELIB_VAL_PTR (loc)->addr_list = NULL; 292 } 293 294 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL 295 && val->next_containing_mem == NULL) 296 { 297 /* Add VAL to the containing_mem list after LOC. LOC will 298 be removed when we notice it doesn't contain any 299 MEMs. */ 300 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem; 301 CSELIB_VAL_PTR (loc)->next_containing_mem = val; 302 } 303 304 /* Chain LOC back to VAL. */ 305 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool); 306 el->loc = val->val_rtx; 307 el->setting_insn = cselib_current_insn; 308 el->next = NULL; 309 CSELIB_VAL_PTR (loc)->locs = el; 310 } 311 312 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool); 313 el->loc = loc; 314 el->setting_insn = cselib_current_insn; 315 el->next = next; 316 val->locs = el; 317 } 318 319 /* Promote loc L to a nondebug cselib_current_insn if L is marked as 320 originating from a debug insn, maintaining the debug values 321 count. */ 322 323 static inline void 324 promote_debug_loc (struct elt_loc_list *l) 325 { 326 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn) 327 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn))) 328 { 329 n_debug_values--; 330 l->setting_insn = cselib_current_insn; 331 if (cselib_preserve_constants && l->next) 332 { 333 gcc_assert (l->next->setting_insn 334 && DEBUG_INSN_P (l->next->setting_insn) 335 && !l->next->next); 336 l->next->setting_insn = cselib_current_insn; 337 } 338 else 339 gcc_assert (!l->next); 340 } 341 } 342 343 /* The elt_list at *PL is no longer needed. Unchain it and free its 344 storage. */ 345 346 static inline void 347 unchain_one_elt_list (struct elt_list **pl) 348 { 349 struct elt_list *l = *pl; 350 351 *pl = l->next; 352 pool_free (elt_list_pool, l); 353 } 354 355 /* Likewise for elt_loc_lists. */ 356 357 static void 358 unchain_one_elt_loc_list (struct elt_loc_list **pl) 359 { 360 struct elt_loc_list *l = *pl; 361 362 *pl = l->next; 363 pool_free (elt_loc_list_pool, l); 364 } 365 366 /* Likewise for cselib_vals. This also frees the addr_list associated with 367 V. */ 368 369 static void 370 unchain_one_value (cselib_val *v) 371 { 372 while (v->addr_list) 373 unchain_one_elt_list (&v->addr_list); 374 375 pool_free (cselib_val_pool, v); 376 } 377 378 /* Remove all entries from the hash table. Also used during 379 initialization. */ 380 381 void 382 cselib_clear_table (void) 383 { 384 cselib_reset_table (1); 385 } 386 387 /* Return TRUE if V is a constant, a function invariant or a VALUE 388 equivalence; FALSE otherwise. */ 389 390 static bool 391 invariant_or_equiv_p (cselib_val *v) 392 { 393 struct elt_loc_list *l; 394 395 if (v == cfa_base_preserved_val) 396 return true; 397 398 /* Keep VALUE equivalences around. */ 399 for (l = v->locs; l; l = l->next) 400 if (GET_CODE (l->loc) == VALUE) 401 return true; 402 403 if (v->locs != NULL 404 && v->locs->next == NULL) 405 { 406 if (CONSTANT_P (v->locs->loc) 407 && (GET_CODE (v->locs->loc) != CONST 408 || !references_value_p (v->locs->loc, 0))) 409 return true; 410 /* Although a debug expr may be bound to different expressions, 411 we can preserve it as if it was constant, to get unification 412 and proper merging within var-tracking. */ 413 if (GET_CODE (v->locs->loc) == DEBUG_EXPR 414 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR 415 || GET_CODE (v->locs->loc) == ENTRY_VALUE 416 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF) 417 return true; 418 419 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */ 420 if (GET_CODE (v->locs->loc) == PLUS 421 && CONST_INT_P (XEXP (v->locs->loc, 1)) 422 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE 423 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0)))) 424 return true; 425 } 426 427 return false; 428 } 429 430 /* Remove from hash table all VALUEs except constants, function 431 invariants and VALUE equivalences. */ 432 433 static int 434 preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED) 435 { 436 cselib_val *v = (cselib_val *)*x; 437 438 if (!invariant_or_equiv_p (v)) 439 htab_clear_slot (cselib_hash_table, x); 440 return 1; 441 } 442 443 /* Remove all entries from the hash table, arranging for the next 444 value to be numbered NUM. */ 445 446 void 447 cselib_reset_table (unsigned int num) 448 { 449 unsigned int i; 450 451 max_value_regs = 0; 452 453 if (cfa_base_preserved_val) 454 { 455 unsigned int regno = cfa_base_preserved_regno; 456 unsigned int new_used_regs = 0; 457 for (i = 0; i < n_used_regs; i++) 458 if (used_regs[i] == regno) 459 { 460 new_used_regs = 1; 461 continue; 462 } 463 else 464 REG_VALUES (used_regs[i]) = 0; 465 gcc_assert (new_used_regs == 1); 466 n_used_regs = new_used_regs; 467 used_regs[0] = regno; 468 max_value_regs 469 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)]; 470 } 471 else 472 { 473 for (i = 0; i < n_used_regs; i++) 474 REG_VALUES (used_regs[i]) = 0; 475 n_used_regs = 0; 476 } 477 478 if (cselib_preserve_constants) 479 htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL); 480 else 481 { 482 htab_empty (cselib_hash_table); 483 gcc_checking_assert (!cselib_any_perm_equivs); 484 } 485 486 n_useless_values = 0; 487 n_useless_debug_values = 0; 488 n_debug_values = 0; 489 490 next_uid = num; 491 492 first_containing_mem = &dummy_val; 493 } 494 495 /* Return the number of the next value that will be generated. */ 496 497 unsigned int 498 cselib_get_next_uid (void) 499 { 500 return next_uid; 501 } 502 503 /* See the documentation of cselib_find_slot below. */ 504 static enum machine_mode find_slot_memmode; 505 506 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE, 507 INSERTing if requested. When X is part of the address of a MEM, 508 MEMMODE should specify the mode of the MEM. While searching the 509 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs 510 in X can be resolved. */ 511 512 static void ** 513 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert, 514 enum machine_mode memmode) 515 { 516 void **slot; 517 find_slot_memmode = memmode; 518 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert); 519 find_slot_memmode = VOIDmode; 520 return slot; 521 } 522 523 /* The equality test for our hash table. The first argument ENTRY is a table 524 element (i.e. a cselib_val), while the second arg X is an rtx. We know 525 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a 526 CONST of an appropriate mode. */ 527 528 static int 529 entry_and_rtx_equal_p (const void *entry, const void *x_arg) 530 { 531 struct elt_loc_list *l; 532 const cselib_val *const v = (const cselib_val *) entry; 533 rtx x = CONST_CAST_RTX ((const_rtx)x_arg); 534 enum machine_mode mode = GET_MODE (x); 535 536 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED 537 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE)); 538 539 if (mode != GET_MODE (v->val_rtx)) 540 return 0; 541 542 /* Unwrap X if necessary. */ 543 if (GET_CODE (x) == CONST 544 && (CONST_INT_P (XEXP (x, 0)) 545 || GET_CODE (XEXP (x, 0)) == CONST_FIXED 546 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE)) 547 x = XEXP (x, 0); 548 549 /* We don't guarantee that distinct rtx's have different hash values, 550 so we need to do a comparison. */ 551 for (l = v->locs; l; l = l->next) 552 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode)) 553 { 554 promote_debug_loc (l); 555 return 1; 556 } 557 558 return 0; 559 } 560 561 /* The hash function for our hash table. The value is always computed with 562 cselib_hash_rtx when adding an element; this function just extracts the 563 hash value from a cselib_val structure. */ 564 565 static hashval_t 566 get_value_hash (const void *entry) 567 { 568 const cselib_val *const v = (const cselib_val *) entry; 569 return v->hash; 570 } 571 572 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we 573 only return true for values which point to a cselib_val whose value 574 element has been set to zero, which implies the cselib_val will be 575 removed. */ 576 577 int 578 references_value_p (const_rtx x, int only_useless) 579 { 580 const enum rtx_code code = GET_CODE (x); 581 const char *fmt = GET_RTX_FORMAT (code); 582 int i, j; 583 584 if (GET_CODE (x) == VALUE 585 && (! only_useless || 586 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x)))) 587 return 1; 588 589 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 590 { 591 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless)) 592 return 1; 593 else if (fmt[i] == 'E') 594 for (j = 0; j < XVECLEN (x, i); j++) 595 if (references_value_p (XVECEXP (x, i, j), only_useless)) 596 return 1; 597 } 598 599 return 0; 600 } 601 602 /* For all locations found in X, delete locations that reference useless 603 values (i.e. values without any location). Called through 604 htab_traverse. */ 605 606 static int 607 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED) 608 { 609 cselib_val *v = (cselib_val *)*x; 610 struct elt_loc_list **p = &v->locs; 611 bool had_locs = v->locs != NULL; 612 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL; 613 614 while (*p) 615 { 616 if (references_value_p ((*p)->loc, 1)) 617 unchain_one_elt_loc_list (p); 618 else 619 p = &(*p)->next; 620 } 621 622 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 623 { 624 if (setting_insn && DEBUG_INSN_P (setting_insn)) 625 n_useless_debug_values++; 626 else 627 n_useless_values++; 628 values_became_useless = 1; 629 } 630 return 1; 631 } 632 633 /* If X is a value with no locations, remove it from the hashtable. */ 634 635 static int 636 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED) 637 { 638 cselib_val *v = (cselib_val *)*x; 639 640 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 641 { 642 if (cselib_discard_hook) 643 cselib_discard_hook (v); 644 645 CSELIB_VAL_PTR (v->val_rtx) = NULL; 646 htab_clear_slot (cselib_hash_table, x); 647 unchain_one_value (v); 648 n_useless_values--; 649 } 650 651 return 1; 652 } 653 654 /* Clean out useless values (i.e. those which no longer have locations 655 associated with them) from the hash table. */ 656 657 static void 658 remove_useless_values (void) 659 { 660 cselib_val **p, *v; 661 662 /* First pass: eliminate locations that reference the value. That in 663 turn can make more values useless. */ 664 do 665 { 666 values_became_useless = 0; 667 htab_traverse (cselib_hash_table, discard_useless_locs, 0); 668 } 669 while (values_became_useless); 670 671 /* Second pass: actually remove the values. */ 672 673 p = &first_containing_mem; 674 for (v = *p; v != &dummy_val; v = v->next_containing_mem) 675 if (v->locs && v == canonical_cselib_val (v)) 676 { 677 *p = v; 678 p = &(*p)->next_containing_mem; 679 } 680 *p = &dummy_val; 681 682 n_useless_values += n_useless_debug_values; 683 n_debug_values -= n_useless_debug_values; 684 n_useless_debug_values = 0; 685 686 htab_traverse (cselib_hash_table, discard_useless_values, 0); 687 688 gcc_assert (!n_useless_values); 689 } 690 691 /* Arrange for a value to not be removed from the hash table even if 692 it becomes useless. */ 693 694 void 695 cselib_preserve_value (cselib_val *v) 696 { 697 PRESERVED_VALUE_P (v->val_rtx) = 1; 698 } 699 700 /* Test whether a value is preserved. */ 701 702 bool 703 cselib_preserved_value_p (cselib_val *v) 704 { 705 return PRESERVED_VALUE_P (v->val_rtx); 706 } 707 708 /* Arrange for a REG value to be assumed constant through the whole function, 709 never invalidated and preserved across cselib_reset_table calls. */ 710 711 void 712 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno) 713 { 714 if (cselib_preserve_constants 715 && v->locs 716 && REG_P (v->locs->loc)) 717 { 718 cfa_base_preserved_val = v; 719 cfa_base_preserved_regno = regno; 720 } 721 } 722 723 /* Clean all non-constant expressions in the hash table, but retain 724 their values. */ 725 726 void 727 cselib_preserve_only_values (void) 728 { 729 int i; 730 731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 732 cselib_invalidate_regno (i, reg_raw_mode[i]); 733 734 cselib_invalidate_mem (callmem); 735 736 remove_useless_values (); 737 738 gcc_assert (first_containing_mem == &dummy_val); 739 } 740 741 /* Return the mode in which a register was last set. If X is not a 742 register, return its mode. If the mode in which the register was 743 set is not known, or the value was already clobbered, return 744 VOIDmode. */ 745 746 enum machine_mode 747 cselib_reg_set_mode (const_rtx x) 748 { 749 if (!REG_P (x)) 750 return GET_MODE (x); 751 752 if (REG_VALUES (REGNO (x)) == NULL 753 || REG_VALUES (REGNO (x))->elt == NULL) 754 return VOIDmode; 755 756 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx); 757 } 758 759 /* Return nonzero if we can prove that X and Y contain the same value, taking 760 our gathered information into account. */ 761 762 int 763 rtx_equal_for_cselib_p (rtx x, rtx y) 764 { 765 return rtx_equal_for_cselib_1 (x, y, VOIDmode); 766 } 767 768 /* If x is a PLUS or an autoinc operation, expand the operation, 769 storing the offset, if any, in *OFF. */ 770 771 static rtx 772 autoinc_split (rtx x, rtx *off, enum machine_mode memmode) 773 { 774 switch (GET_CODE (x)) 775 { 776 case PLUS: 777 *off = XEXP (x, 1); 778 return XEXP (x, 0); 779 780 case PRE_DEC: 781 if (memmode == VOIDmode) 782 return x; 783 784 *off = GEN_INT (-GET_MODE_SIZE (memmode)); 785 return XEXP (x, 0); 786 break; 787 788 case PRE_INC: 789 if (memmode == VOIDmode) 790 return x; 791 792 *off = GEN_INT (GET_MODE_SIZE (memmode)); 793 return XEXP (x, 0); 794 795 case PRE_MODIFY: 796 return XEXP (x, 1); 797 798 case POST_DEC: 799 case POST_INC: 800 case POST_MODIFY: 801 return XEXP (x, 0); 802 803 default: 804 return x; 805 } 806 } 807 808 /* Return nonzero if we can prove that X and Y contain the same value, 809 taking our gathered information into account. MEMMODE holds the 810 mode of the enclosing MEM, if any, as required to deal with autoinc 811 addressing modes. If X and Y are not (known to be) part of 812 addresses, MEMMODE should be VOIDmode. */ 813 814 static int 815 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode) 816 { 817 enum rtx_code code; 818 const char *fmt; 819 int i; 820 821 if (REG_P (x) || MEM_P (x)) 822 { 823 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode); 824 825 if (e) 826 x = e->val_rtx; 827 } 828 829 if (REG_P (y) || MEM_P (y)) 830 { 831 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode); 832 833 if (e) 834 y = e->val_rtx; 835 } 836 837 if (x == y) 838 return 1; 839 840 if (GET_CODE (x) == VALUE) 841 { 842 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x)); 843 struct elt_loc_list *l; 844 845 if (GET_CODE (y) == VALUE) 846 return e == canonical_cselib_val (CSELIB_VAL_PTR (y)); 847 848 for (l = e->locs; l; l = l->next) 849 { 850 rtx t = l->loc; 851 852 /* Avoid infinite recursion. We know we have the canonical 853 value, so we can just skip any values in the equivalence 854 list. */ 855 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE) 856 continue; 857 else if (rtx_equal_for_cselib_1 (t, y, memmode)) 858 return 1; 859 } 860 861 return 0; 862 } 863 else if (GET_CODE (y) == VALUE) 864 { 865 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y)); 866 struct elt_loc_list *l; 867 868 for (l = e->locs; l; l = l->next) 869 { 870 rtx t = l->loc; 871 872 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE) 873 continue; 874 else if (rtx_equal_for_cselib_1 (x, t, memmode)) 875 return 1; 876 } 877 878 return 0; 879 } 880 881 if (GET_MODE (x) != GET_MODE (y)) 882 return 0; 883 884 if (GET_CODE (x) != GET_CODE (y)) 885 { 886 rtx xorig = x, yorig = y; 887 rtx xoff = NULL, yoff = NULL; 888 889 x = autoinc_split (x, &xoff, memmode); 890 y = autoinc_split (y, &yoff, memmode); 891 892 if (!xoff != !yoff) 893 return 0; 894 895 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode)) 896 return 0; 897 898 /* Don't recurse if nothing changed. */ 899 if (x != xorig || y != yorig) 900 return rtx_equal_for_cselib_1 (x, y, memmode); 901 902 return 0; 903 } 904 905 /* These won't be handled correctly by the code below. */ 906 switch (GET_CODE (x)) 907 { 908 case CONST_DOUBLE: 909 case CONST_FIXED: 910 case DEBUG_EXPR: 911 return 0; 912 913 case DEBUG_IMPLICIT_PTR: 914 return DEBUG_IMPLICIT_PTR_DECL (x) 915 == DEBUG_IMPLICIT_PTR_DECL (y); 916 917 case DEBUG_PARAMETER_REF: 918 return DEBUG_PARAMETER_REF_DECL (x) 919 == DEBUG_PARAMETER_REF_DECL (y); 920 921 case ENTRY_VALUE: 922 /* ENTRY_VALUEs are function invariant, it is thus undesirable to 923 use rtx_equal_for_cselib_1 to compare the operands. */ 924 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); 925 926 case LABEL_REF: 927 return XEXP (x, 0) == XEXP (y, 0); 928 929 case MEM: 930 /* We have to compare any autoinc operations in the addresses 931 using this MEM's mode. */ 932 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x)); 933 934 default: 935 break; 936 } 937 938 code = GET_CODE (x); 939 fmt = GET_RTX_FORMAT (code); 940 941 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 942 { 943 int j; 944 945 switch (fmt[i]) 946 { 947 case 'w': 948 if (XWINT (x, i) != XWINT (y, i)) 949 return 0; 950 break; 951 952 case 'n': 953 case 'i': 954 if (XINT (x, i) != XINT (y, i)) 955 return 0; 956 break; 957 958 case 'V': 959 case 'E': 960 /* Two vectors must have the same length. */ 961 if (XVECLEN (x, i) != XVECLEN (y, i)) 962 return 0; 963 964 /* And the corresponding elements must match. */ 965 for (j = 0; j < XVECLEN (x, i); j++) 966 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j), 967 XVECEXP (y, i, j), memmode)) 968 return 0; 969 break; 970 971 case 'e': 972 if (i == 1 973 && targetm.commutative_p (x, UNKNOWN) 974 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode) 975 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode)) 976 return 1; 977 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode)) 978 return 0; 979 break; 980 981 case 'S': 982 case 's': 983 if (strcmp (XSTR (x, i), XSTR (y, i))) 984 return 0; 985 break; 986 987 case 'u': 988 /* These are just backpointers, so they don't matter. */ 989 break; 990 991 case '0': 992 case 't': 993 break; 994 995 /* It is believed that rtx's at this level will never 996 contain anything but integers and other rtx's, 997 except for within LABEL_REFs and SYMBOL_REFs. */ 998 default: 999 gcc_unreachable (); 1000 } 1001 } 1002 return 1; 1003 } 1004 1005 /* We need to pass down the mode of constants through the hash table 1006 functions. For that purpose, wrap them in a CONST of the appropriate 1007 mode. */ 1008 static rtx 1009 wrap_constant (enum machine_mode mode, rtx x) 1010 { 1011 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED 1012 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode)) 1013 return x; 1014 gcc_assert (mode != VOIDmode); 1015 return gen_rtx_CONST (mode, x); 1016 } 1017 1018 /* Hash an rtx. Return 0 if we couldn't hash the rtx. 1019 For registers and memory locations, we look up their cselib_val structure 1020 and return its VALUE element. 1021 Possible reasons for return 0 are: the object is volatile, or we couldn't 1022 find a register or memory location in the table and CREATE is zero. If 1023 CREATE is nonzero, table elts are created for regs and mem. 1024 N.B. this hash function returns the same hash value for RTXes that 1025 differ only in the order of operands, thus it is suitable for comparisons 1026 that take commutativity into account. 1027 If we wanted to also support associative rules, we'd have to use a different 1028 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) . 1029 MEMMODE indicates the mode of an enclosing MEM, and it's only 1030 used to compute autoinc values. 1031 We used to have a MODE argument for hashing for CONST_INTs, but that 1032 didn't make sense, since it caused spurious hash differences between 1033 (set (reg:SI 1) (const_int)) 1034 (plus:SI (reg:SI 2) (reg:SI 1)) 1035 and 1036 (plus:SI (reg:SI 2) (const_int)) 1037 If the mode is important in any context, it must be checked specifically 1038 in a comparison anyway, since relying on hash differences is unsafe. */ 1039 1040 static unsigned int 1041 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode) 1042 { 1043 cselib_val *e; 1044 int i, j; 1045 enum rtx_code code; 1046 const char *fmt; 1047 unsigned int hash = 0; 1048 1049 code = GET_CODE (x); 1050 hash += (unsigned) code + (unsigned) GET_MODE (x); 1051 1052 switch (code) 1053 { 1054 case VALUE: 1055 e = CSELIB_VAL_PTR (x); 1056 return e->hash; 1057 1058 case MEM: 1059 case REG: 1060 e = cselib_lookup (x, GET_MODE (x), create, memmode); 1061 if (! e) 1062 return 0; 1063 1064 return e->hash; 1065 1066 case DEBUG_EXPR: 1067 hash += ((unsigned) DEBUG_EXPR << 7) 1068 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)); 1069 return hash ? hash : (unsigned int) DEBUG_EXPR; 1070 1071 case DEBUG_IMPLICIT_PTR: 1072 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7) 1073 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x)); 1074 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR; 1075 1076 case DEBUG_PARAMETER_REF: 1077 hash += ((unsigned) DEBUG_PARAMETER_REF << 7) 1078 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x)); 1079 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF; 1080 1081 case ENTRY_VALUE: 1082 /* ENTRY_VALUEs are function invariant, thus try to avoid 1083 recursing on argument if ENTRY_VALUE is one of the 1084 forms emitted by expand_debug_expr, otherwise 1085 ENTRY_VALUE hash would depend on the current value 1086 in some register or memory. */ 1087 if (REG_P (ENTRY_VALUE_EXP (x))) 1088 hash += (unsigned int) REG 1089 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x)) 1090 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x)); 1091 else if (MEM_P (ENTRY_VALUE_EXP (x)) 1092 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0))) 1093 hash += (unsigned int) MEM 1094 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0)) 1095 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0)); 1096 else 1097 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode); 1098 return hash ? hash : (unsigned int) ENTRY_VALUE; 1099 1100 case CONST_INT: 1101 hash += ((unsigned) CONST_INT << 7) + INTVAL (x); 1102 return hash ? hash : (unsigned int) CONST_INT; 1103 1104 case CONST_DOUBLE: 1105 /* This is like the general case, except that it only counts 1106 the integers representing the constant. */ 1107 hash += (unsigned) code + (unsigned) GET_MODE (x); 1108 if (GET_MODE (x) != VOIDmode) 1109 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)); 1110 else 1111 hash += ((unsigned) CONST_DOUBLE_LOW (x) 1112 + (unsigned) CONST_DOUBLE_HIGH (x)); 1113 return hash ? hash : (unsigned int) CONST_DOUBLE; 1114 1115 case CONST_FIXED: 1116 hash += (unsigned int) code + (unsigned int) GET_MODE (x); 1117 hash += fixed_hash (CONST_FIXED_VALUE (x)); 1118 return hash ? hash : (unsigned int) CONST_FIXED; 1119 1120 case CONST_VECTOR: 1121 { 1122 int units; 1123 rtx elt; 1124 1125 units = CONST_VECTOR_NUNITS (x); 1126 1127 for (i = 0; i < units; ++i) 1128 { 1129 elt = CONST_VECTOR_ELT (x, i); 1130 hash += cselib_hash_rtx (elt, 0, memmode); 1131 } 1132 1133 return hash; 1134 } 1135 1136 /* Assume there is only one rtx object for any given label. */ 1137 case LABEL_REF: 1138 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap 1139 differences and differences between each stage's debugging dumps. */ 1140 hash += (((unsigned int) LABEL_REF << 7) 1141 + CODE_LABEL_NUMBER (XEXP (x, 0))); 1142 return hash ? hash : (unsigned int) LABEL_REF; 1143 1144 case SYMBOL_REF: 1145 { 1146 /* Don't hash on the symbol's address to avoid bootstrap differences. 1147 Different hash values may cause expressions to be recorded in 1148 different orders and thus different registers to be used in the 1149 final assembler. This also avoids differences in the dump files 1150 between various stages. */ 1151 unsigned int h = 0; 1152 const unsigned char *p = (const unsigned char *) XSTR (x, 0); 1153 1154 while (*p) 1155 h += (h << 7) + *p++; /* ??? revisit */ 1156 1157 hash += ((unsigned int) SYMBOL_REF << 7) + h; 1158 return hash ? hash : (unsigned int) SYMBOL_REF; 1159 } 1160 1161 case PRE_DEC: 1162 case PRE_INC: 1163 /* We can't compute these without knowing the MEM mode. */ 1164 gcc_assert (memmode != VOIDmode); 1165 i = GET_MODE_SIZE (memmode); 1166 if (code == PRE_DEC) 1167 i = -i; 1168 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes 1169 like (mem:MEMMODE (plus (reg) (const_int I))). */ 1170 hash += (unsigned) PLUS - (unsigned)code 1171 + cselib_hash_rtx (XEXP (x, 0), create, memmode) 1172 + cselib_hash_rtx (GEN_INT (i), create, memmode); 1173 return hash ? hash : 1 + (unsigned) PLUS; 1174 1175 case PRE_MODIFY: 1176 gcc_assert (memmode != VOIDmode); 1177 return cselib_hash_rtx (XEXP (x, 1), create, memmode); 1178 1179 case POST_DEC: 1180 case POST_INC: 1181 case POST_MODIFY: 1182 gcc_assert (memmode != VOIDmode); 1183 return cselib_hash_rtx (XEXP (x, 0), create, memmode); 1184 1185 case PC: 1186 case CC0: 1187 case CALL: 1188 case UNSPEC_VOLATILE: 1189 return 0; 1190 1191 case ASM_OPERANDS: 1192 if (MEM_VOLATILE_P (x)) 1193 return 0; 1194 1195 break; 1196 1197 default: 1198 break; 1199 } 1200 1201 i = GET_RTX_LENGTH (code) - 1; 1202 fmt = GET_RTX_FORMAT (code); 1203 for (; i >= 0; i--) 1204 { 1205 switch (fmt[i]) 1206 { 1207 case 'e': 1208 { 1209 rtx tem = XEXP (x, i); 1210 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode); 1211 1212 if (tem_hash == 0) 1213 return 0; 1214 1215 hash += tem_hash; 1216 } 1217 break; 1218 case 'E': 1219 for (j = 0; j < XVECLEN (x, i); j++) 1220 { 1221 unsigned int tem_hash 1222 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode); 1223 1224 if (tem_hash == 0) 1225 return 0; 1226 1227 hash += tem_hash; 1228 } 1229 break; 1230 1231 case 's': 1232 { 1233 const unsigned char *p = (const unsigned char *) XSTR (x, i); 1234 1235 if (p) 1236 while (*p) 1237 hash += *p++; 1238 break; 1239 } 1240 1241 case 'i': 1242 hash += XINT (x, i); 1243 break; 1244 1245 case '0': 1246 case 't': 1247 /* unused */ 1248 break; 1249 1250 default: 1251 gcc_unreachable (); 1252 } 1253 } 1254 1255 return hash ? hash : 1 + (unsigned int) GET_CODE (x); 1256 } 1257 1258 /* Create a new value structure for VALUE and initialize it. The mode of the 1259 value is MODE. */ 1260 1261 static inline cselib_val * 1262 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x) 1263 { 1264 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool); 1265 1266 gcc_assert (hash); 1267 gcc_assert (next_uid); 1268 1269 e->hash = hash; 1270 e->uid = next_uid++; 1271 /* We use an alloc pool to allocate this RTL construct because it 1272 accounts for about 8% of the overall memory usage. We know 1273 precisely when we can have VALUE RTXen (when cselib is active) 1274 so we don't need to put them in garbage collected memory. 1275 ??? Why should a VALUE be an RTX in the first place? */ 1276 e->val_rtx = (rtx) pool_alloc (value_pool); 1277 memset (e->val_rtx, 0, RTX_HDR_SIZE); 1278 PUT_CODE (e->val_rtx, VALUE); 1279 PUT_MODE (e->val_rtx, mode); 1280 CSELIB_VAL_PTR (e->val_rtx) = e; 1281 e->addr_list = 0; 1282 e->locs = 0; 1283 e->next_containing_mem = 0; 1284 1285 if (dump_file && (dump_flags & TDF_CSELIB)) 1286 { 1287 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash); 1288 if (flag_dump_noaddr || flag_dump_unnumbered) 1289 fputs ("# ", dump_file); 1290 else 1291 fprintf (dump_file, "%p ", (void*)e); 1292 print_rtl_single (dump_file, x); 1293 fputc ('\n', dump_file); 1294 } 1295 1296 return e; 1297 } 1298 1299 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that 1300 contains the data at this address. X is a MEM that represents the 1301 value. Update the two value structures to represent this situation. */ 1302 1303 static void 1304 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x) 1305 { 1306 struct elt_loc_list *l; 1307 1308 addr_elt = canonical_cselib_val (addr_elt); 1309 mem_elt = canonical_cselib_val (mem_elt); 1310 1311 /* Avoid duplicates. */ 1312 for (l = mem_elt->locs; l; l = l->next) 1313 if (MEM_P (l->loc) 1314 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt) 1315 { 1316 promote_debug_loc (l); 1317 return; 1318 } 1319 1320 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt); 1321 new_elt_loc_list (mem_elt, 1322 replace_equiv_address_nv (x, addr_elt->val_rtx)); 1323 if (mem_elt->next_containing_mem == NULL) 1324 { 1325 mem_elt->next_containing_mem = first_containing_mem; 1326 first_containing_mem = mem_elt; 1327 } 1328 } 1329 1330 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx. 1331 If CREATE, make a new one if we haven't seen it before. */ 1332 1333 static cselib_val * 1334 cselib_lookup_mem (rtx x, int create) 1335 { 1336 enum machine_mode mode = GET_MODE (x); 1337 enum machine_mode addr_mode; 1338 void **slot; 1339 cselib_val *addr; 1340 cselib_val *mem_elt; 1341 struct elt_list *l; 1342 1343 if (MEM_VOLATILE_P (x) || mode == BLKmode 1344 || !cselib_record_memory 1345 || (FLOAT_MODE_P (mode) && flag_float_store)) 1346 return 0; 1347 1348 addr_mode = GET_MODE (XEXP (x, 0)); 1349 if (addr_mode == VOIDmode) 1350 addr_mode = Pmode; 1351 1352 /* Look up the value for the address. */ 1353 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode); 1354 if (! addr) 1355 return 0; 1356 1357 addr = canonical_cselib_val (addr); 1358 /* Find a value that describes a value of our mode at that address. */ 1359 for (l = addr->addr_list; l; l = l->next) 1360 if (GET_MODE (l->elt->val_rtx) == mode) 1361 { 1362 promote_debug_loc (l->elt->locs); 1363 return l->elt; 1364 } 1365 1366 if (! create) 1367 return 0; 1368 1369 mem_elt = new_cselib_val (next_uid, mode, x); 1370 add_mem_for_addr (addr, mem_elt, x); 1371 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash, 1372 INSERT, mode); 1373 *slot = mem_elt; 1374 return mem_elt; 1375 } 1376 1377 /* Search thru the possible substitutions in P. We prefer a non reg 1378 substitution because this allows us to expand the tree further. If 1379 we find, just a reg, take the lowest regno. There may be several 1380 non-reg results, we just take the first one because they will all 1381 expand to the same place. */ 1382 1383 static rtx 1384 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd, 1385 int max_depth) 1386 { 1387 rtx reg_result = NULL; 1388 unsigned int regno = UINT_MAX; 1389 struct elt_loc_list *p_in = p; 1390 1391 for (; p; p = p->next) 1392 { 1393 /* Return these right away to avoid returning stack pointer based 1394 expressions for frame pointer and vice versa, which is something 1395 that would confuse DSE. See the comment in cselib_expand_value_rtx_1 1396 for more details. */ 1397 if (REG_P (p->loc) 1398 && (REGNO (p->loc) == STACK_POINTER_REGNUM 1399 || REGNO (p->loc) == FRAME_POINTER_REGNUM 1400 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM 1401 || REGNO (p->loc) == cfa_base_preserved_regno)) 1402 return p->loc; 1403 /* Avoid infinite recursion trying to expand a reg into a 1404 the same reg. */ 1405 if ((REG_P (p->loc)) 1406 && (REGNO (p->loc) < regno) 1407 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc))) 1408 { 1409 reg_result = p->loc; 1410 regno = REGNO (p->loc); 1411 } 1412 /* Avoid infinite recursion and do not try to expand the 1413 value. */ 1414 else if (GET_CODE (p->loc) == VALUE 1415 && CSELIB_VAL_PTR (p->loc)->locs == p_in) 1416 continue; 1417 else if (!REG_P (p->loc)) 1418 { 1419 rtx result, note; 1420 if (dump_file && (dump_flags & TDF_CSELIB)) 1421 { 1422 print_inline_rtx (dump_file, p->loc, 0); 1423 fprintf (dump_file, "\n"); 1424 } 1425 if (GET_CODE (p->loc) == LO_SUM 1426 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF 1427 && p->setting_insn 1428 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX)) 1429 && XEXP (note, 0) == XEXP (p->loc, 1)) 1430 return XEXP (p->loc, 1); 1431 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1); 1432 if (result) 1433 return result; 1434 } 1435 1436 } 1437 1438 if (regno != UINT_MAX) 1439 { 1440 rtx result; 1441 if (dump_file && (dump_flags & TDF_CSELIB)) 1442 fprintf (dump_file, "r%d\n", regno); 1443 1444 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1); 1445 if (result) 1446 return result; 1447 } 1448 1449 if (dump_file && (dump_flags & TDF_CSELIB)) 1450 { 1451 if (reg_result) 1452 { 1453 print_inline_rtx (dump_file, reg_result, 0); 1454 fprintf (dump_file, "\n"); 1455 } 1456 else 1457 fprintf (dump_file, "NULL\n"); 1458 } 1459 return reg_result; 1460 } 1461 1462 1463 /* Forward substitute and expand an expression out to its roots. 1464 This is the opposite of common subexpression. Because local value 1465 numbering is such a weak optimization, the expanded expression is 1466 pretty much unique (not from a pointer equals point of view but 1467 from a tree shape point of view. 1468 1469 This function returns NULL if the expansion fails. The expansion 1470 will fail if there is no value number for one of the operands or if 1471 one of the operands has been overwritten between the current insn 1472 and the beginning of the basic block. For instance x has no 1473 expansion in: 1474 1475 r1 <- r1 + 3 1476 x <- r1 + 8 1477 1478 REGS_ACTIVE is a scratch bitmap that should be clear when passing in. 1479 It is clear on return. */ 1480 1481 rtx 1482 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth) 1483 { 1484 struct expand_value_data evd; 1485 1486 evd.regs_active = regs_active; 1487 evd.callback = NULL; 1488 evd.callback_arg = NULL; 1489 evd.dummy = false; 1490 1491 return cselib_expand_value_rtx_1 (orig, &evd, max_depth); 1492 } 1493 1494 /* Same as cselib_expand_value_rtx, but using a callback to try to 1495 resolve some expressions. The CB function should return ORIG if it 1496 can't or does not want to deal with a certain RTX. Any other 1497 return value, including NULL, will be used as the expansion for 1498 VALUE, without any further changes. */ 1499 1500 rtx 1501 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, 1502 cselib_expand_callback cb, void *data) 1503 { 1504 struct expand_value_data evd; 1505 1506 evd.regs_active = regs_active; 1507 evd.callback = cb; 1508 evd.callback_arg = data; 1509 evd.dummy = false; 1510 1511 return cselib_expand_value_rtx_1 (orig, &evd, max_depth); 1512 } 1513 1514 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied 1515 or simplified. Useful to find out whether cselib_expand_value_rtx_cb 1516 would return NULL or non-NULL, without allocating new rtx. */ 1517 1518 bool 1519 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth, 1520 cselib_expand_callback cb, void *data) 1521 { 1522 struct expand_value_data evd; 1523 1524 evd.regs_active = regs_active; 1525 evd.callback = cb; 1526 evd.callback_arg = data; 1527 evd.dummy = true; 1528 1529 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL; 1530 } 1531 1532 /* Internal implementation of cselib_expand_value_rtx and 1533 cselib_expand_value_rtx_cb. */ 1534 1535 static rtx 1536 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd, 1537 int max_depth) 1538 { 1539 rtx copy, scopy; 1540 int i, j; 1541 RTX_CODE code; 1542 const char *format_ptr; 1543 enum machine_mode mode; 1544 1545 code = GET_CODE (orig); 1546 1547 /* For the context of dse, if we end up expand into a huge tree, we 1548 will not have a useful address, so we might as well just give up 1549 quickly. */ 1550 if (max_depth <= 0) 1551 return NULL; 1552 1553 switch (code) 1554 { 1555 case REG: 1556 { 1557 struct elt_list *l = REG_VALUES (REGNO (orig)); 1558 1559 if (l && l->elt == NULL) 1560 l = l->next; 1561 for (; l; l = l->next) 1562 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig)) 1563 { 1564 rtx result; 1565 unsigned regno = REGNO (orig); 1566 1567 /* The only thing that we are not willing to do (this 1568 is requirement of dse and if others potential uses 1569 need this function we should add a parm to control 1570 it) is that we will not substitute the 1571 STACK_POINTER_REGNUM, FRAME_POINTER or the 1572 HARD_FRAME_POINTER. 1573 1574 These expansions confuses the code that notices that 1575 stores into the frame go dead at the end of the 1576 function and that the frame is not effected by calls 1577 to subroutines. If you allow the 1578 STACK_POINTER_REGNUM substitution, then dse will 1579 think that parameter pushing also goes dead which is 1580 wrong. If you allow the FRAME_POINTER or the 1581 HARD_FRAME_POINTER then you lose the opportunity to 1582 make the frame assumptions. */ 1583 if (regno == STACK_POINTER_REGNUM 1584 || regno == FRAME_POINTER_REGNUM 1585 || regno == HARD_FRAME_POINTER_REGNUM 1586 || regno == cfa_base_preserved_regno) 1587 return orig; 1588 1589 bitmap_set_bit (evd->regs_active, regno); 1590 1591 if (dump_file && (dump_flags & TDF_CSELIB)) 1592 fprintf (dump_file, "expanding: r%d into: ", regno); 1593 1594 result = expand_loc (l->elt->locs, evd, max_depth); 1595 bitmap_clear_bit (evd->regs_active, regno); 1596 1597 if (result) 1598 return result; 1599 else 1600 return orig; 1601 } 1602 } 1603 1604 case CONST_INT: 1605 case CONST_DOUBLE: 1606 case CONST_VECTOR: 1607 case SYMBOL_REF: 1608 case CODE_LABEL: 1609 case PC: 1610 case CC0: 1611 case SCRATCH: 1612 /* SCRATCH must be shared because they represent distinct values. */ 1613 return orig; 1614 case CLOBBER: 1615 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))) 1616 return orig; 1617 break; 1618 1619 case CONST: 1620 if (shared_const_p (orig)) 1621 return orig; 1622 break; 1623 1624 case SUBREG: 1625 { 1626 rtx subreg; 1627 1628 if (evd->callback) 1629 { 1630 subreg = evd->callback (orig, evd->regs_active, max_depth, 1631 evd->callback_arg); 1632 if (subreg != orig) 1633 return subreg; 1634 } 1635 1636 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd, 1637 max_depth - 1); 1638 if (!subreg) 1639 return NULL; 1640 scopy = simplify_gen_subreg (GET_MODE (orig), subreg, 1641 GET_MODE (SUBREG_REG (orig)), 1642 SUBREG_BYTE (orig)); 1643 if (scopy == NULL 1644 || (GET_CODE (scopy) == SUBREG 1645 && !REG_P (SUBREG_REG (scopy)) 1646 && !MEM_P (SUBREG_REG (scopy)))) 1647 return NULL; 1648 1649 return scopy; 1650 } 1651 1652 case VALUE: 1653 { 1654 rtx result; 1655 1656 if (dump_file && (dump_flags & TDF_CSELIB)) 1657 { 1658 fputs ("\nexpanding ", dump_file); 1659 print_rtl_single (dump_file, orig); 1660 fputs (" into...", dump_file); 1661 } 1662 1663 if (evd->callback) 1664 { 1665 result = evd->callback (orig, evd->regs_active, max_depth, 1666 evd->callback_arg); 1667 1668 if (result != orig) 1669 return result; 1670 } 1671 1672 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth); 1673 return result; 1674 } 1675 1676 case DEBUG_EXPR: 1677 if (evd->callback) 1678 return evd->callback (orig, evd->regs_active, max_depth, 1679 evd->callback_arg); 1680 return orig; 1681 1682 default: 1683 break; 1684 } 1685 1686 /* Copy the various flags, fields, and other information. We assume 1687 that all fields need copying, and then clear the fields that should 1688 not be copied. That is the sensible default behavior, and forces 1689 us to explicitly document why we are *not* copying a flag. */ 1690 if (evd->dummy) 1691 copy = NULL; 1692 else 1693 copy = shallow_copy_rtx (orig); 1694 1695 format_ptr = GET_RTX_FORMAT (code); 1696 1697 for (i = 0; i < GET_RTX_LENGTH (code); i++) 1698 switch (*format_ptr++) 1699 { 1700 case 'e': 1701 if (XEXP (orig, i) != NULL) 1702 { 1703 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd, 1704 max_depth - 1); 1705 if (!result) 1706 return NULL; 1707 if (copy) 1708 XEXP (copy, i) = result; 1709 } 1710 break; 1711 1712 case 'E': 1713 case 'V': 1714 if (XVEC (orig, i) != NULL) 1715 { 1716 if (copy) 1717 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); 1718 for (j = 0; j < XVECLEN (orig, i); j++) 1719 { 1720 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j), 1721 evd, max_depth - 1); 1722 if (!result) 1723 return NULL; 1724 if (copy) 1725 XVECEXP (copy, i, j) = result; 1726 } 1727 } 1728 break; 1729 1730 case 't': 1731 case 'w': 1732 case 'i': 1733 case 's': 1734 case 'S': 1735 case 'T': 1736 case 'u': 1737 case 'B': 1738 case '0': 1739 /* These are left unchanged. */ 1740 break; 1741 1742 default: 1743 gcc_unreachable (); 1744 } 1745 1746 if (evd->dummy) 1747 return orig; 1748 1749 mode = GET_MODE (copy); 1750 /* If an operand has been simplified into CONST_INT, which doesn't 1751 have a mode and the mode isn't derivable from whole rtx's mode, 1752 try simplify_*_operation first with mode from original's operand 1753 and as a fallback wrap CONST_INT into gen_rtx_CONST. */ 1754 scopy = copy; 1755 switch (GET_RTX_CLASS (code)) 1756 { 1757 case RTX_UNARY: 1758 if (CONST_INT_P (XEXP (copy, 0)) 1759 && GET_MODE (XEXP (orig, 0)) != VOIDmode) 1760 { 1761 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0), 1762 GET_MODE (XEXP (orig, 0))); 1763 if (scopy) 1764 return scopy; 1765 } 1766 break; 1767 case RTX_COMM_ARITH: 1768 case RTX_BIN_ARITH: 1769 /* These expressions can derive operand modes from the whole rtx's mode. */ 1770 break; 1771 case RTX_TERNARY: 1772 case RTX_BITFIELD_OPS: 1773 if (CONST_INT_P (XEXP (copy, 0)) 1774 && GET_MODE (XEXP (orig, 0)) != VOIDmode) 1775 { 1776 scopy = simplify_ternary_operation (code, mode, 1777 GET_MODE (XEXP (orig, 0)), 1778 XEXP (copy, 0), XEXP (copy, 1), 1779 XEXP (copy, 2)); 1780 if (scopy) 1781 return scopy; 1782 } 1783 break; 1784 case RTX_COMPARE: 1785 case RTX_COMM_COMPARE: 1786 if (CONST_INT_P (XEXP (copy, 0)) 1787 && GET_MODE (XEXP (copy, 1)) == VOIDmode 1788 && (GET_MODE (XEXP (orig, 0)) != VOIDmode 1789 || GET_MODE (XEXP (orig, 1)) != VOIDmode)) 1790 { 1791 scopy = simplify_relational_operation (code, mode, 1792 (GET_MODE (XEXP (orig, 0)) 1793 != VOIDmode) 1794 ? GET_MODE (XEXP (orig, 0)) 1795 : GET_MODE (XEXP (orig, 1)), 1796 XEXP (copy, 0), 1797 XEXP (copy, 1)); 1798 if (scopy) 1799 return scopy; 1800 } 1801 break; 1802 default: 1803 break; 1804 } 1805 scopy = simplify_rtx (copy); 1806 if (scopy) 1807 return scopy; 1808 return copy; 1809 } 1810 1811 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions 1812 with VALUE expressions. This way, it becomes independent of changes 1813 to registers and memory. 1814 X isn't actually modified; if modifications are needed, new rtl is 1815 allocated. However, the return value can share rtl with X. 1816 If X is within a MEM, MEMMODE must be the mode of the MEM. */ 1817 1818 rtx 1819 cselib_subst_to_values (rtx x, enum machine_mode memmode) 1820 { 1821 enum rtx_code code = GET_CODE (x); 1822 const char *fmt = GET_RTX_FORMAT (code); 1823 cselib_val *e; 1824 struct elt_list *l; 1825 rtx copy = x; 1826 int i; 1827 1828 switch (code) 1829 { 1830 case REG: 1831 l = REG_VALUES (REGNO (x)); 1832 if (l && l->elt == NULL) 1833 l = l->next; 1834 for (; l; l = l->next) 1835 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x)) 1836 return l->elt->val_rtx; 1837 1838 gcc_unreachable (); 1839 1840 case MEM: 1841 e = cselib_lookup_mem (x, 0); 1842 /* This used to happen for autoincrements, but we deal with them 1843 properly now. Remove the if stmt for the next release. */ 1844 if (! e) 1845 { 1846 /* Assign a value that doesn't match any other. */ 1847 e = new_cselib_val (next_uid, GET_MODE (x), x); 1848 } 1849 return e->val_rtx; 1850 1851 case ENTRY_VALUE: 1852 e = cselib_lookup (x, GET_MODE (x), 0, memmode); 1853 if (! e) 1854 break; 1855 return e->val_rtx; 1856 1857 case CONST_DOUBLE: 1858 case CONST_VECTOR: 1859 case CONST_INT: 1860 case CONST_FIXED: 1861 return x; 1862 1863 case PRE_DEC: 1864 case PRE_INC: 1865 gcc_assert (memmode != VOIDmode); 1866 i = GET_MODE_SIZE (memmode); 1867 if (code == PRE_DEC) 1868 i = -i; 1869 return cselib_subst_to_values (plus_constant (XEXP (x, 0), i), 1870 memmode); 1871 1872 case PRE_MODIFY: 1873 gcc_assert (memmode != VOIDmode); 1874 return cselib_subst_to_values (XEXP (x, 1), memmode); 1875 1876 case POST_DEC: 1877 case POST_INC: 1878 case POST_MODIFY: 1879 gcc_assert (memmode != VOIDmode); 1880 return cselib_subst_to_values (XEXP (x, 0), memmode); 1881 1882 default: 1883 break; 1884 } 1885 1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1887 { 1888 if (fmt[i] == 'e') 1889 { 1890 rtx t = cselib_subst_to_values (XEXP (x, i), memmode); 1891 1892 if (t != XEXP (x, i)) 1893 { 1894 if (x == copy) 1895 copy = shallow_copy_rtx (x); 1896 XEXP (copy, i) = t; 1897 } 1898 } 1899 else if (fmt[i] == 'E') 1900 { 1901 int j; 1902 1903 for (j = 0; j < XVECLEN (x, i); j++) 1904 { 1905 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode); 1906 1907 if (t != XVECEXP (x, i, j)) 1908 { 1909 if (XVEC (x, i) == XVEC (copy, i)) 1910 { 1911 if (x == copy) 1912 copy = shallow_copy_rtx (x); 1913 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i)); 1914 } 1915 XVECEXP (copy, i, j) = t; 1916 } 1917 } 1918 } 1919 } 1920 1921 return copy; 1922 } 1923 1924 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */ 1925 1926 rtx 1927 cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn) 1928 { 1929 rtx ret; 1930 gcc_assert (!cselib_current_insn); 1931 cselib_current_insn = insn; 1932 ret = cselib_subst_to_values (x, memmode); 1933 cselib_current_insn = NULL; 1934 return ret; 1935 } 1936 1937 /* Look up the rtl expression X in our tables and return the value it 1938 has. If CREATE is zero, we return NULL if we don't know the value. 1939 Otherwise, we create a new one if possible, using mode MODE if X 1940 doesn't have a mode (i.e. because it's a constant). When X is part 1941 of an address, MEMMODE should be the mode of the enclosing MEM if 1942 we're tracking autoinc expressions. */ 1943 1944 static cselib_val * 1945 cselib_lookup_1 (rtx x, enum machine_mode mode, 1946 int create, enum machine_mode memmode) 1947 { 1948 void **slot; 1949 cselib_val *e; 1950 unsigned int hashval; 1951 1952 if (GET_MODE (x) != VOIDmode) 1953 mode = GET_MODE (x); 1954 1955 if (GET_CODE (x) == VALUE) 1956 return CSELIB_VAL_PTR (x); 1957 1958 if (REG_P (x)) 1959 { 1960 struct elt_list *l; 1961 unsigned int i = REGNO (x); 1962 1963 l = REG_VALUES (i); 1964 if (l && l->elt == NULL) 1965 l = l->next; 1966 for (; l; l = l->next) 1967 if (mode == GET_MODE (l->elt->val_rtx)) 1968 { 1969 promote_debug_loc (l->elt->locs); 1970 return l->elt; 1971 } 1972 1973 if (! create) 1974 return 0; 1975 1976 if (i < FIRST_PSEUDO_REGISTER) 1977 { 1978 unsigned int n = hard_regno_nregs[i][mode]; 1979 1980 if (n > max_value_regs) 1981 max_value_regs = n; 1982 } 1983 1984 e = new_cselib_val (next_uid, GET_MODE (x), x); 1985 new_elt_loc_list (e, x); 1986 if (REG_VALUES (i) == 0) 1987 { 1988 /* Maintain the invariant that the first entry of 1989 REG_VALUES, if present, must be the value used to set the 1990 register, or NULL. */ 1991 used_regs[n_used_regs++] = i; 1992 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL); 1993 } 1994 else if (cselib_preserve_constants 1995 && GET_MODE_CLASS (mode) == MODE_INT) 1996 { 1997 /* During var-tracking, try harder to find equivalences 1998 for SUBREGs. If a setter sets say a DImode register 1999 and user uses that register only in SImode, add a lowpart 2000 subreg location. */ 2001 struct elt_list *lwider = NULL; 2002 l = REG_VALUES (i); 2003 if (l && l->elt == NULL) 2004 l = l->next; 2005 for (; l; l = l->next) 2006 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT 2007 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) 2008 > GET_MODE_SIZE (mode) 2009 && (lwider == NULL 2010 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx)) 2011 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx)))) 2012 { 2013 struct elt_loc_list *el; 2014 if (i < FIRST_PSEUDO_REGISTER 2015 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1) 2016 continue; 2017 for (el = l->elt->locs; el; el = el->next) 2018 if (!REG_P (el->loc)) 2019 break; 2020 if (el) 2021 lwider = l; 2022 } 2023 if (lwider) 2024 { 2025 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx, 2026 GET_MODE (lwider->elt->val_rtx)); 2027 if (sub) 2028 new_elt_loc_list (e, sub); 2029 } 2030 } 2031 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e); 2032 slot = cselib_find_slot (x, e->hash, INSERT, memmode); 2033 *slot = e; 2034 return e; 2035 } 2036 2037 if (MEM_P (x)) 2038 return cselib_lookup_mem (x, create); 2039 2040 hashval = cselib_hash_rtx (x, create, memmode); 2041 /* Can't even create if hashing is not possible. */ 2042 if (! hashval) 2043 return 0; 2044 2045 slot = cselib_find_slot (wrap_constant (mode, x), hashval, 2046 create ? INSERT : NO_INSERT, memmode); 2047 if (slot == 0) 2048 return 0; 2049 2050 e = (cselib_val *) *slot; 2051 if (e) 2052 return e; 2053 2054 e = new_cselib_val (hashval, mode, x); 2055 2056 /* We have to fill the slot before calling cselib_subst_to_values: 2057 the hash table is inconsistent until we do so, and 2058 cselib_subst_to_values will need to do lookups. */ 2059 *slot = (void *) e; 2060 new_elt_loc_list (e, cselib_subst_to_values (x, memmode)); 2061 return e; 2062 } 2063 2064 /* Wrapper for cselib_lookup, that indicates X is in INSN. */ 2065 2066 cselib_val * 2067 cselib_lookup_from_insn (rtx x, enum machine_mode mode, 2068 int create, enum machine_mode memmode, rtx insn) 2069 { 2070 cselib_val *ret; 2071 2072 gcc_assert (!cselib_current_insn); 2073 cselib_current_insn = insn; 2074 2075 ret = cselib_lookup (x, mode, create, memmode); 2076 2077 cselib_current_insn = NULL; 2078 2079 return ret; 2080 } 2081 2082 /* Wrapper for cselib_lookup_1, that logs the lookup result and 2083 maintains invariants related with debug insns. */ 2084 2085 cselib_val * 2086 cselib_lookup (rtx x, enum machine_mode mode, 2087 int create, enum machine_mode memmode) 2088 { 2089 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode); 2090 2091 /* ??? Should we return NULL if we're not to create an entry, the 2092 found loc is a debug loc and cselib_current_insn is not DEBUG? 2093 If so, we should also avoid converting val to non-DEBUG; probably 2094 easiest setting cselib_current_insn to NULL before the call 2095 above. */ 2096 2097 if (dump_file && (dump_flags & TDF_CSELIB)) 2098 { 2099 fputs ("cselib lookup ", dump_file); 2100 print_inline_rtx (dump_file, x, 2); 2101 fprintf (dump_file, " => %u:%u\n", 2102 ret ? ret->uid : 0, 2103 ret ? ret->hash : 0); 2104 } 2105 2106 return ret; 2107 } 2108 2109 /* Invalidate any entries in reg_values that overlap REGNO. This is called 2110 if REGNO is changing. MODE is the mode of the assignment to REGNO, which 2111 is used to determine how many hard registers are being changed. If MODE 2112 is VOIDmode, then only REGNO is being changed; this is used when 2113 invalidating call clobbered registers across a call. */ 2114 2115 static void 2116 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) 2117 { 2118 unsigned int endregno; 2119 unsigned int i; 2120 2121 /* If we see pseudos after reload, something is _wrong_. */ 2122 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER 2123 || reg_renumber[regno] < 0); 2124 2125 /* Determine the range of registers that must be invalidated. For 2126 pseudos, only REGNO is affected. For hard regs, we must take MODE 2127 into account, and we must also invalidate lower register numbers 2128 if they contain values that overlap REGNO. */ 2129 if (regno < FIRST_PSEUDO_REGISTER) 2130 { 2131 gcc_assert (mode != VOIDmode); 2132 2133 if (regno < max_value_regs) 2134 i = 0; 2135 else 2136 i = regno - max_value_regs; 2137 2138 endregno = end_hard_regno (mode, regno); 2139 } 2140 else 2141 { 2142 i = regno; 2143 endregno = regno + 1; 2144 } 2145 2146 for (; i < endregno; i++) 2147 { 2148 struct elt_list **l = ®_VALUES (i); 2149 2150 /* Go through all known values for this reg; if it overlaps the range 2151 we're invalidating, remove the value. */ 2152 while (*l) 2153 { 2154 cselib_val *v = (*l)->elt; 2155 bool had_locs; 2156 rtx setting_insn; 2157 struct elt_loc_list **p; 2158 unsigned int this_last = i; 2159 2160 if (i < FIRST_PSEUDO_REGISTER && v != NULL) 2161 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1; 2162 2163 if (this_last < regno || v == NULL 2164 || (v == cfa_base_preserved_val 2165 && i == cfa_base_preserved_regno)) 2166 { 2167 l = &(*l)->next; 2168 continue; 2169 } 2170 2171 /* We have an overlap. */ 2172 if (*l == REG_VALUES (i)) 2173 { 2174 /* Maintain the invariant that the first entry of 2175 REG_VALUES, if present, must be the value used to set 2176 the register, or NULL. This is also nice because 2177 then we won't push the same regno onto user_regs 2178 multiple times. */ 2179 (*l)->elt = NULL; 2180 l = &(*l)->next; 2181 } 2182 else 2183 unchain_one_elt_list (l); 2184 2185 v = canonical_cselib_val (v); 2186 2187 had_locs = v->locs != NULL; 2188 setting_insn = v->locs ? v->locs->setting_insn : NULL; 2189 2190 /* Now, we clear the mapping from value to reg. It must exist, so 2191 this code will crash intentionally if it doesn't. */ 2192 for (p = &v->locs; ; p = &(*p)->next) 2193 { 2194 rtx x = (*p)->loc; 2195 2196 if (REG_P (x) && REGNO (x) == i) 2197 { 2198 unchain_one_elt_loc_list (p); 2199 break; 2200 } 2201 } 2202 2203 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 2204 { 2205 if (setting_insn && DEBUG_INSN_P (setting_insn)) 2206 n_useless_debug_values++; 2207 else 2208 n_useless_values++; 2209 } 2210 } 2211 } 2212 } 2213 2214 /* Invalidate any locations in the table which are changed because of a 2215 store to MEM_RTX. If this is called because of a non-const call 2216 instruction, MEM_RTX is (mem:BLK const0_rtx). */ 2217 2218 static void 2219 cselib_invalidate_mem (rtx mem_rtx) 2220 { 2221 cselib_val **vp, *v, *next; 2222 int num_mems = 0; 2223 rtx mem_addr; 2224 2225 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0))); 2226 mem_rtx = canon_rtx (mem_rtx); 2227 2228 vp = &first_containing_mem; 2229 for (v = *vp; v != &dummy_val; v = next) 2230 { 2231 bool has_mem = false; 2232 struct elt_loc_list **p = &v->locs; 2233 bool had_locs = v->locs != NULL; 2234 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL; 2235 2236 while (*p) 2237 { 2238 rtx x = (*p)->loc; 2239 cselib_val *addr; 2240 struct elt_list **mem_chain; 2241 2242 /* MEMs may occur in locations only at the top level; below 2243 that every MEM or REG is substituted by its VALUE. */ 2244 if (!MEM_P (x)) 2245 { 2246 p = &(*p)->next; 2247 continue; 2248 } 2249 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS) 2250 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), 2251 mem_addr, x, NULL_RTX)) 2252 { 2253 has_mem = true; 2254 num_mems++; 2255 p = &(*p)->next; 2256 continue; 2257 } 2258 2259 /* This one overlaps. */ 2260 /* We must have a mapping from this MEM's address to the 2261 value (E). Remove that, too. */ 2262 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x)); 2263 addr = canonical_cselib_val (addr); 2264 gcc_checking_assert (v == canonical_cselib_val (v)); 2265 mem_chain = &addr->addr_list; 2266 for (;;) 2267 { 2268 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt); 2269 2270 if (canon == v) 2271 { 2272 unchain_one_elt_list (mem_chain); 2273 break; 2274 } 2275 2276 /* Record canonicalized elt. */ 2277 (*mem_chain)->elt = canon; 2278 2279 mem_chain = &(*mem_chain)->next; 2280 } 2281 2282 unchain_one_elt_loc_list (p); 2283 } 2284 2285 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx)) 2286 { 2287 if (setting_insn && DEBUG_INSN_P (setting_insn)) 2288 n_useless_debug_values++; 2289 else 2290 n_useless_values++; 2291 } 2292 2293 next = v->next_containing_mem; 2294 if (has_mem) 2295 { 2296 *vp = v; 2297 vp = &(*vp)->next_containing_mem; 2298 } 2299 else 2300 v->next_containing_mem = NULL; 2301 } 2302 *vp = &dummy_val; 2303 } 2304 2305 /* Invalidate DEST, which is being assigned to or clobbered. */ 2306 2307 void 2308 cselib_invalidate_rtx (rtx dest) 2309 { 2310 while (GET_CODE (dest) == SUBREG 2311 || GET_CODE (dest) == ZERO_EXTRACT 2312 || GET_CODE (dest) == STRICT_LOW_PART) 2313 dest = XEXP (dest, 0); 2314 2315 if (REG_P (dest)) 2316 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest)); 2317 else if (MEM_P (dest)) 2318 cselib_invalidate_mem (dest); 2319 } 2320 2321 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */ 2322 2323 static void 2324 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED, 2325 void *data ATTRIBUTE_UNUSED) 2326 { 2327 cselib_invalidate_rtx (dest); 2328 } 2329 2330 /* Record the result of a SET instruction. DEST is being set; the source 2331 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT 2332 describes its address. */ 2333 2334 static void 2335 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) 2336 { 2337 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1; 2338 2339 if (src_elt == 0 || side_effects_p (dest)) 2340 return; 2341 2342 if (dreg >= 0) 2343 { 2344 if (dreg < FIRST_PSEUDO_REGISTER) 2345 { 2346 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)]; 2347 2348 if (n > max_value_regs) 2349 max_value_regs = n; 2350 } 2351 2352 if (REG_VALUES (dreg) == 0) 2353 { 2354 used_regs[n_used_regs++] = dreg; 2355 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt); 2356 } 2357 else 2358 { 2359 /* The register should have been invalidated. */ 2360 gcc_assert (REG_VALUES (dreg)->elt == 0); 2361 REG_VALUES (dreg)->elt = src_elt; 2362 } 2363 2364 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx)) 2365 n_useless_values--; 2366 new_elt_loc_list (src_elt, dest); 2367 } 2368 else if (MEM_P (dest) && dest_addr_elt != 0 2369 && cselib_record_memory) 2370 { 2371 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx)) 2372 n_useless_values--; 2373 add_mem_for_addr (dest_addr_elt, src_elt, dest); 2374 } 2375 } 2376 2377 /* Make ELT and X's VALUE equivalent to each other at INSN. */ 2378 2379 void 2380 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn) 2381 { 2382 cselib_val *nelt; 2383 rtx save_cselib_current_insn = cselib_current_insn; 2384 2385 gcc_checking_assert (elt); 2386 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx)); 2387 gcc_checking_assert (!side_effects_p (x)); 2388 2389 cselib_current_insn = insn; 2390 2391 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode); 2392 2393 if (nelt != elt) 2394 { 2395 cselib_any_perm_equivs = true; 2396 2397 if (!PRESERVED_VALUE_P (nelt->val_rtx)) 2398 cselib_preserve_value (nelt); 2399 2400 new_elt_loc_list (nelt, elt->val_rtx); 2401 } 2402 2403 cselib_current_insn = save_cselib_current_insn; 2404 } 2405 2406 /* Return TRUE if any permanent equivalences have been recorded since 2407 the table was last initialized. */ 2408 bool 2409 cselib_have_permanent_equivalences (void) 2410 { 2411 return cselib_any_perm_equivs; 2412 } 2413 2414 /* There is no good way to determine how many elements there can be 2415 in a PARALLEL. Since it's fairly cheap, use a really large number. */ 2416 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2) 2417 2418 struct cselib_record_autoinc_data 2419 { 2420 struct cselib_set *sets; 2421 int n_sets; 2422 }; 2423 2424 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by 2425 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */ 2426 2427 static int 2428 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED, 2429 rtx dest, rtx src, rtx srcoff, void *arg) 2430 { 2431 struct cselib_record_autoinc_data *data; 2432 data = (struct cselib_record_autoinc_data *)arg; 2433 2434 data->sets[data->n_sets].dest = dest; 2435 2436 if (srcoff) 2437 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff); 2438 else 2439 data->sets[data->n_sets].src = src; 2440 2441 data->n_sets++; 2442 2443 return -1; 2444 } 2445 2446 /* Record the effects of any sets and autoincs in INSN. */ 2447 static void 2448 cselib_record_sets (rtx insn) 2449 { 2450 int n_sets = 0; 2451 int i; 2452 struct cselib_set sets[MAX_SETS]; 2453 rtx body = PATTERN (insn); 2454 rtx cond = 0; 2455 int n_sets_before_autoinc; 2456 struct cselib_record_autoinc_data data; 2457 2458 body = PATTERN (insn); 2459 if (GET_CODE (body) == COND_EXEC) 2460 { 2461 cond = COND_EXEC_TEST (body); 2462 body = COND_EXEC_CODE (body); 2463 } 2464 2465 /* Find all sets. */ 2466 if (GET_CODE (body) == SET) 2467 { 2468 sets[0].src = SET_SRC (body); 2469 sets[0].dest = SET_DEST (body); 2470 n_sets = 1; 2471 } 2472 else if (GET_CODE (body) == PARALLEL) 2473 { 2474 /* Look through the PARALLEL and record the values being 2475 set, if possible. Also handle any CLOBBERs. */ 2476 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) 2477 { 2478 rtx x = XVECEXP (body, 0, i); 2479 2480 if (GET_CODE (x) == SET) 2481 { 2482 sets[n_sets].src = SET_SRC (x); 2483 sets[n_sets].dest = SET_DEST (x); 2484 n_sets++; 2485 } 2486 } 2487 } 2488 2489 if (n_sets == 1 2490 && MEM_P (sets[0].src) 2491 && !cselib_record_memory 2492 && MEM_READONLY_P (sets[0].src)) 2493 { 2494 rtx note = find_reg_equal_equiv_note (insn); 2495 2496 if (note && CONSTANT_P (XEXP (note, 0))) 2497 sets[0].src = XEXP (note, 0); 2498 } 2499 2500 data.sets = sets; 2501 data.n_sets = n_sets_before_autoinc = n_sets; 2502 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data); 2503 n_sets = data.n_sets; 2504 2505 /* Look up the values that are read. Do this before invalidating the 2506 locations that are written. */ 2507 for (i = 0; i < n_sets; i++) 2508 { 2509 rtx dest = sets[i].dest; 2510 2511 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for 2512 the low part after invalidating any knowledge about larger modes. */ 2513 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART) 2514 sets[i].dest = dest = XEXP (dest, 0); 2515 2516 /* We don't know how to record anything but REG or MEM. */ 2517 if (REG_P (dest) 2518 || (MEM_P (dest) && cselib_record_memory)) 2519 { 2520 rtx src = sets[i].src; 2521 if (cond) 2522 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest); 2523 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode); 2524 if (MEM_P (dest)) 2525 { 2526 enum machine_mode address_mode 2527 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest)); 2528 2529 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), 2530 address_mode, 1, 2531 GET_MODE (dest)); 2532 } 2533 else 2534 sets[i].dest_addr_elt = 0; 2535 } 2536 } 2537 2538 if (cselib_record_sets_hook) 2539 cselib_record_sets_hook (insn, sets, n_sets); 2540 2541 /* Invalidate all locations written by this insn. Note that the elts we 2542 looked up in the previous loop aren't affected, just some of their 2543 locations may go away. */ 2544 note_stores (body, cselib_invalidate_rtx_note_stores, NULL); 2545 2546 for (i = n_sets_before_autoinc; i < n_sets; i++) 2547 cselib_invalidate_rtx (sets[i].dest); 2548 2549 /* If this is an asm, look for duplicate sets. This can happen when the 2550 user uses the same value as an output multiple times. This is valid 2551 if the outputs are not actually used thereafter. Treat this case as 2552 if the value isn't actually set. We do this by smashing the destination 2553 to pc_rtx, so that we won't record the value later. */ 2554 if (n_sets >= 2 && asm_noperands (body) >= 0) 2555 { 2556 for (i = 0; i < n_sets; i++) 2557 { 2558 rtx dest = sets[i].dest; 2559 if (REG_P (dest) || MEM_P (dest)) 2560 { 2561 int j; 2562 for (j = i + 1; j < n_sets; j++) 2563 if (rtx_equal_p (dest, sets[j].dest)) 2564 { 2565 sets[i].dest = pc_rtx; 2566 sets[j].dest = pc_rtx; 2567 } 2568 } 2569 } 2570 } 2571 2572 /* Now enter the equivalences in our tables. */ 2573 for (i = 0; i < n_sets; i++) 2574 { 2575 rtx dest = sets[i].dest; 2576 if (REG_P (dest) 2577 || (MEM_P (dest) && cselib_record_memory)) 2578 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); 2579 } 2580 } 2581 2582 /* Record the effects of INSN. */ 2583 2584 void 2585 cselib_process_insn (rtx insn) 2586 { 2587 int i; 2588 rtx x; 2589 2590 cselib_current_insn = insn; 2591 2592 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */ 2593 if (LABEL_P (insn) 2594 || (CALL_P (insn) 2595 && find_reg_note (insn, REG_SETJMP, NULL)) 2596 || (NONJUMP_INSN_P (insn) 2597 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS 2598 && MEM_VOLATILE_P (PATTERN (insn)))) 2599 { 2600 cselib_reset_table (next_uid); 2601 cselib_current_insn = NULL_RTX; 2602 return; 2603 } 2604 2605 if (! INSN_P (insn)) 2606 { 2607 cselib_current_insn = NULL_RTX; 2608 return; 2609 } 2610 2611 /* If this is a call instruction, forget anything stored in a 2612 call clobbered register, or, if this is not a const call, in 2613 memory. */ 2614 if (CALL_P (insn)) 2615 { 2616 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 2617 if (call_used_regs[i] 2618 || (REG_VALUES (i) && REG_VALUES (i)->elt 2619 && HARD_REGNO_CALL_PART_CLOBBERED (i, 2620 GET_MODE (REG_VALUES (i)->elt->val_rtx)))) 2621 cselib_invalidate_regno (i, reg_raw_mode[i]); 2622 2623 /* Since it is not clear how cselib is going to be used, be 2624 conservative here and treat looping pure or const functions 2625 as if they were regular functions. */ 2626 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn) 2627 || !(RTL_CONST_OR_PURE_CALL_P (insn))) 2628 cselib_invalidate_mem (callmem); 2629 } 2630 2631 cselib_record_sets (insn); 2632 2633 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only 2634 after we have processed the insn. */ 2635 if (CALL_P (insn)) 2636 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) 2637 if (GET_CODE (XEXP (x, 0)) == CLOBBER) 2638 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)); 2639 2640 cselib_current_insn = NULL_RTX; 2641 2642 if (n_useless_values > MAX_USELESS_VALUES 2643 /* remove_useless_values is linear in the hash table size. Avoid 2644 quadratic behavior for very large hashtables with very few 2645 useless elements. */ 2646 && ((unsigned int)n_useless_values 2647 > (cselib_hash_table->n_elements 2648 - cselib_hash_table->n_deleted 2649 - n_debug_values) / 4)) 2650 remove_useless_values (); 2651 } 2652 2653 /* Initialize cselib for one pass. The caller must also call 2654 init_alias_analysis. */ 2655 2656 void 2657 cselib_init (int record_what) 2658 { 2659 elt_list_pool = create_alloc_pool ("elt_list", 2660 sizeof (struct elt_list), 10); 2661 elt_loc_list_pool = create_alloc_pool ("elt_loc_list", 2662 sizeof (struct elt_loc_list), 10); 2663 cselib_val_pool = create_alloc_pool ("cselib_val_list", 2664 sizeof (cselib_val), 10); 2665 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100); 2666 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY; 2667 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS; 2668 cselib_any_perm_equivs = false; 2669 2670 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything, 2671 see canon_true_dependence. This is only created once. */ 2672 if (! callmem) 2673 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)); 2674 2675 cselib_nregs = max_reg_num (); 2676 2677 /* We preserve reg_values to allow expensive clearing of the whole thing. 2678 Reallocate it however if it happens to be too large. */ 2679 if (!reg_values || reg_values_size < cselib_nregs 2680 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4)) 2681 { 2682 free (reg_values); 2683 /* Some space for newly emit instructions so we don't end up 2684 reallocating in between passes. */ 2685 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16; 2686 reg_values = XCNEWVEC (struct elt_list *, reg_values_size); 2687 } 2688 used_regs = XNEWVEC (unsigned int, cselib_nregs); 2689 n_used_regs = 0; 2690 cselib_hash_table = htab_create (31, get_value_hash, 2691 entry_and_rtx_equal_p, NULL); 2692 next_uid = 1; 2693 } 2694 2695 /* Called when the current user is done with cselib. */ 2696 2697 void 2698 cselib_finish (void) 2699 { 2700 cselib_discard_hook = NULL; 2701 cselib_preserve_constants = false; 2702 cselib_any_perm_equivs = false; 2703 cfa_base_preserved_val = NULL; 2704 cfa_base_preserved_regno = INVALID_REGNUM; 2705 free_alloc_pool (elt_list_pool); 2706 free_alloc_pool (elt_loc_list_pool); 2707 free_alloc_pool (cselib_val_pool); 2708 free_alloc_pool (value_pool); 2709 cselib_clear_table (); 2710 htab_delete (cselib_hash_table); 2711 free (used_regs); 2712 used_regs = 0; 2713 cselib_hash_table = 0; 2714 n_useless_values = 0; 2715 n_useless_debug_values = 0; 2716 n_debug_values = 0; 2717 next_uid = 0; 2718 } 2719 2720 /* Dump the cselib_val *X to FILE *info. */ 2721 2722 static int 2723 dump_cselib_val (void **x, void *info) 2724 { 2725 cselib_val *v = (cselib_val *)*x; 2726 FILE *out = (FILE *)info; 2727 bool need_lf = true; 2728 2729 print_inline_rtx (out, v->val_rtx, 0); 2730 2731 if (v->locs) 2732 { 2733 struct elt_loc_list *l = v->locs; 2734 if (need_lf) 2735 { 2736 fputc ('\n', out); 2737 need_lf = false; 2738 } 2739 fputs (" locs:", out); 2740 do 2741 { 2742 if (l->setting_insn) 2743 fprintf (out, "\n from insn %i ", 2744 INSN_UID (l->setting_insn)); 2745 else 2746 fprintf (out, "\n "); 2747 print_inline_rtx (out, l->loc, 4); 2748 } 2749 while ((l = l->next)); 2750 fputc ('\n', out); 2751 } 2752 else 2753 { 2754 fputs (" no locs", out); 2755 need_lf = true; 2756 } 2757 2758 if (v->addr_list) 2759 { 2760 struct elt_list *e = v->addr_list; 2761 if (need_lf) 2762 { 2763 fputc ('\n', out); 2764 need_lf = false; 2765 } 2766 fputs (" addr list:", out); 2767 do 2768 { 2769 fputs ("\n ", out); 2770 print_inline_rtx (out, e->elt->val_rtx, 2); 2771 } 2772 while ((e = e->next)); 2773 fputc ('\n', out); 2774 } 2775 else 2776 { 2777 fputs (" no addrs", out); 2778 need_lf = true; 2779 } 2780 2781 if (v->next_containing_mem == &dummy_val) 2782 fputs (" last mem\n", out); 2783 else if (v->next_containing_mem) 2784 { 2785 fputs (" next mem ", out); 2786 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2); 2787 fputc ('\n', out); 2788 } 2789 else if (need_lf) 2790 fputc ('\n', out); 2791 2792 return 1; 2793 } 2794 2795 /* Dump to OUT everything in the CSELIB table. */ 2796 2797 void 2798 dump_cselib_table (FILE *out) 2799 { 2800 fprintf (out, "cselib hash table:\n"); 2801 htab_traverse (cselib_hash_table, dump_cselib_val, out); 2802 if (first_containing_mem != &dummy_val) 2803 { 2804 fputs ("first mem ", out); 2805 print_inline_rtx (out, first_containing_mem->val_rtx, 2); 2806 fputc ('\n', out); 2807 } 2808 fprintf (out, "next uid %i\n", next_uid); 2809 } 2810 2811 #include "gt-cselib.h" 2812