1 /* Memory address lowering and addressing mode selection. 2 Copyright (C) 2004-2018 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it 7 under the terms of the GNU General Public License as published by the 8 Free Software Foundation; either version 3, or (at your option) any 9 later version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT 12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions 21 that directly map to addressing modes of the target. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "gimple.h" 31 #include "memmodel.h" 32 #include "stringpool.h" 33 #include "tree-vrp.h" 34 #include "tree-ssanames.h" 35 #include "expmed.h" 36 #include "insn-config.h" 37 #include "emit-rtl.h" 38 #include "recog.h" 39 #include "tree-pretty-print.h" 40 #include "fold-const.h" 41 #include "stor-layout.h" 42 #include "gimple-iterator.h" 43 #include "gimplify-me.h" 44 #include "tree-ssa-loop-ivopts.h" 45 #include "expr.h" 46 #include "tree-dfa.h" 47 #include "dumpfile.h" 48 #include "tree-affine.h" 49 #include "gimplify.h" 50 51 /* FIXME: We compute address costs using RTL. */ 52 #include "tree-ssa-address.h" 53 54 /* TODO -- handling of symbols (according to Richard Hendersons 55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html): 56 57 There are at least 5 different kinds of symbols that we can run up against: 58 59 (1) binds_local_p, small data area. 60 (2) binds_local_p, eg local statics 61 (3) !binds_local_p, eg global variables 62 (4) thread local, local_exec 63 (5) thread local, !local_exec 64 65 Now, (1) won't appear often in an array context, but it certainly can. 66 All you have to do is set -GN high enough, or explicitly mark any 67 random object __attribute__((section (".sdata"))). 68 69 All of these affect whether or not a symbol is in fact a valid address. 70 The only one tested here is (3). And that result may very well 71 be incorrect for (4) or (5). 72 73 An incorrect result here does not cause incorrect results out the 74 back end, because the expander in expr.c validizes the address. However 75 it would be nice to improve the handling here in order to produce more 76 precise results. */ 77 78 /* A "template" for memory address, used to determine whether the address is 79 valid for mode. */ 80 81 struct GTY (()) mem_addr_template { 82 rtx ref; /* The template. */ 83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be 84 filled in. */ 85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should 86 be filled in. */ 87 }; 88 89 90 /* The templates. Each of the low five bits of the index corresponds to one 91 component of TARGET_MEM_REF being present, while the high bits identify 92 the address space. See TEMPL_IDX. */ 93 94 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list; 95 96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \ 97 (((int) (AS) << 5) \ 98 | ((SYMBOL != 0) << 4) \ 99 | ((BASE != 0) << 3) \ 100 | ((INDEX != 0) << 2) \ 101 | ((STEP != 0) << 1) \ 102 | (OFFSET != 0)) 103 104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX, 105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers 106 to where step is placed to *STEP_P and offset to *OFFSET_P. */ 107 108 static void 109 gen_addr_rtx (machine_mode address_mode, 110 rtx symbol, rtx base, rtx index, rtx step, rtx offset, 111 rtx *addr, rtx **step_p, rtx **offset_p) 112 { 113 rtx act_elem; 114 115 *addr = NULL_RTX; 116 if (step_p) 117 *step_p = NULL; 118 if (offset_p) 119 *offset_p = NULL; 120 121 if (index && index != const0_rtx) 122 { 123 act_elem = index; 124 if (step) 125 { 126 act_elem = gen_rtx_MULT (address_mode, act_elem, step); 127 128 if (step_p) 129 *step_p = &XEXP (act_elem, 1); 130 } 131 132 *addr = act_elem; 133 } 134 135 if (base && base != const0_rtx) 136 { 137 if (*addr) 138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr); 139 else 140 *addr = base; 141 } 142 143 if (symbol) 144 { 145 act_elem = symbol; 146 if (offset) 147 { 148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset); 149 150 if (offset_p) 151 *offset_p = &XEXP (act_elem, 1); 152 153 if (GET_CODE (symbol) == SYMBOL_REF 154 || GET_CODE (symbol) == LABEL_REF 155 || GET_CODE (symbol) == CONST) 156 act_elem = gen_rtx_CONST (address_mode, act_elem); 157 } 158 159 if (*addr) 160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem); 161 else 162 *addr = act_elem; 163 } 164 else if (offset) 165 { 166 if (*addr) 167 { 168 *addr = gen_rtx_PLUS (address_mode, *addr, offset); 169 if (offset_p) 170 *offset_p = &XEXP (*addr, 1); 171 } 172 else 173 { 174 *addr = offset; 175 if (offset_p) 176 *offset_p = addr; 177 } 178 } 179 180 if (!*addr) 181 *addr = const0_rtx; 182 } 183 184 /* Returns address for TARGET_MEM_REF with parameters given by ADDR 185 in address space AS. 186 If REALLY_EXPAND is false, just make fake registers instead 187 of really expanding the operands, and perform the expansion in-place 188 by using one of the "templates". */ 189 190 rtx 191 addr_for_mem_ref (struct mem_address *addr, addr_space_t as, 192 bool really_expand) 193 { 194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as); 195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as); 196 rtx address, sym, bse, idx, st, off; 197 struct mem_addr_template *templ; 198 199 if (addr->step && !integer_onep (addr->step)) 200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode); 201 else 202 st = NULL_RTX; 203 204 if (addr->offset && !integer_zerop (addr->offset)) 205 { 206 poly_offset_int dc 207 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED); 208 off = immed_wide_int_const (dc, pointer_mode); 209 } 210 else 211 off = NULL_RTX; 212 213 if (!really_expand) 214 { 215 unsigned int templ_index 216 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off); 217 218 if (templ_index >= vec_safe_length (mem_addr_template_list)) 219 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1); 220 221 /* Reuse the templates for addresses, so that we do not waste memory. */ 222 templ = &(*mem_addr_template_list)[templ_index]; 223 if (!templ->ref) 224 { 225 sym = (addr->symbol ? 226 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol")) 227 : NULL_RTX); 228 bse = (addr->base ? 229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1) 230 : NULL_RTX); 231 idx = (addr->index ? 232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2) 233 : NULL_RTX); 234 235 gen_addr_rtx (pointer_mode, sym, bse, idx, 236 st? const0_rtx : NULL_RTX, 237 off? const0_rtx : NULL_RTX, 238 &templ->ref, 239 &templ->step_p, 240 &templ->off_p); 241 } 242 243 if (st) 244 *templ->step_p = st; 245 if (off) 246 *templ->off_p = off; 247 248 return templ->ref; 249 } 250 251 /* Otherwise really expand the expressions. */ 252 sym = (addr->symbol 253 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL) 254 : NULL_RTX); 255 bse = (addr->base 256 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL) 257 : NULL_RTX); 258 idx = (addr->index 259 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL) 260 : NULL_RTX); 261 262 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL); 263 if (pointer_mode != address_mode) 264 address = convert_memory_address (address_mode, address); 265 return address; 266 } 267 268 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting 269 the mem_address structure. */ 270 271 rtx 272 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand) 273 { 274 struct mem_address addr; 275 get_address_description (exp, &addr); 276 return addr_for_mem_ref (&addr, as, really_expand); 277 } 278 279 /* Returns address of MEM_REF in TYPE. */ 280 281 tree 282 tree_mem_ref_addr (tree type, tree mem_ref) 283 { 284 tree addr; 285 tree act_elem; 286 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref); 287 tree addr_base = NULL_TREE, addr_off = NULL_TREE; 288 289 addr_base = fold_convert (type, TMR_BASE (mem_ref)); 290 291 act_elem = TMR_INDEX (mem_ref); 292 if (act_elem) 293 { 294 if (step) 295 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem), 296 act_elem, step); 297 addr_off = act_elem; 298 } 299 300 act_elem = TMR_INDEX2 (mem_ref); 301 if (act_elem) 302 { 303 if (addr_off) 304 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), 305 addr_off, act_elem); 306 else 307 addr_off = act_elem; 308 } 309 310 if (offset && !integer_zerop (offset)) 311 { 312 if (addr_off) 313 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off, 314 fold_convert (TREE_TYPE (addr_off), offset)); 315 else 316 addr_off = offset; 317 } 318 319 if (addr_off) 320 addr = fold_build_pointer_plus (addr_base, addr_off); 321 else 322 addr = addr_base; 323 324 return addr; 325 } 326 327 /* Returns true if a memory reference in MODE and with parameters given by 328 ADDR is valid on the current target. */ 329 330 bool 331 valid_mem_ref_p (machine_mode mode, addr_space_t as, 332 struct mem_address *addr) 333 { 334 rtx address; 335 336 address = addr_for_mem_ref (addr, as, false); 337 if (!address) 338 return false; 339 340 return memory_address_addr_space_p (mode, address, as); 341 } 342 343 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR 344 is valid on the current target and if so, creates and returns the 345 TARGET_MEM_REF. If VERIFY is false omit the verification step. */ 346 347 static tree 348 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr, 349 bool verify) 350 { 351 tree base, index2; 352 353 if (verify 354 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr)) 355 return NULL_TREE; 356 357 if (addr->step && integer_onep (addr->step)) 358 addr->step = NULL_TREE; 359 360 if (addr->offset) 361 addr->offset = fold_convert (alias_ptr_type, addr->offset); 362 else 363 addr->offset = build_int_cst (alias_ptr_type, 0); 364 365 if (addr->symbol) 366 { 367 base = addr->symbol; 368 index2 = addr->base; 369 } 370 else if (addr->base 371 && POINTER_TYPE_P (TREE_TYPE (addr->base))) 372 { 373 base = addr->base; 374 index2 = NULL_TREE; 375 } 376 else 377 { 378 base = build_int_cst (build_pointer_type (type), 0); 379 index2 = addr->base; 380 } 381 382 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. 383 ??? As IVOPTs does not follow restrictions to where the base 384 pointer may point to create a MEM_REF only if we know that 385 base is valid. */ 386 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST) 387 && (!index2 || integer_zerop (index2)) 388 && (!addr->index || integer_zerop (addr->index))) 389 return fold_build2 (MEM_REF, type, base, addr->offset); 390 391 return build5 (TARGET_MEM_REF, type, 392 base, addr->offset, addr->index, addr->step, index2); 393 } 394 395 /* Returns true if OBJ is an object whose address is a link time constant. */ 396 397 static bool 398 fixed_address_object_p (tree obj) 399 { 400 return (VAR_P (obj) 401 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj)) 402 && ! DECL_DLLIMPORT_P (obj)); 403 } 404 405 /* If ADDR contains an address of object that is a link time constant, 406 move it to PARTS->symbol. */ 407 408 void 409 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr) 410 { 411 unsigned i; 412 tree val = NULL_TREE; 413 414 for (i = 0; i < addr->n; i++) 415 { 416 if (addr->elts[i].coef != 1) 417 continue; 418 419 val = addr->elts[i].val; 420 if (TREE_CODE (val) == ADDR_EXPR 421 && fixed_address_object_p (TREE_OPERAND (val, 0))) 422 break; 423 } 424 425 if (i == addr->n) 426 return; 427 428 parts->symbol = val; 429 aff_combination_remove_elt (addr, i); 430 } 431 432 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to 433 PARTS->base. */ 434 435 static bool 436 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint, 437 aff_tree *addr) 438 { 439 unsigned i; 440 tree val = NULL_TREE; 441 int qual; 442 443 for (i = 0; i < addr->n; i++) 444 { 445 if (addr->elts[i].coef != 1) 446 continue; 447 448 val = addr->elts[i].val; 449 if (operand_equal_p (val, base_hint, 0)) 450 break; 451 } 452 453 if (i == addr->n) 454 return false; 455 456 /* Cast value to appropriate pointer type. We cannot use a pointer 457 to TYPE directly, as the back-end will assume registers of pointer 458 type are aligned, and just the base itself may not actually be. 459 We use void pointer to the type's address space instead. */ 460 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type)); 461 type = build_qualified_type (void_type_node, qual); 462 parts->base = fold_convert (build_pointer_type (type), val); 463 aff_combination_remove_elt (addr, i); 464 return true; 465 } 466 467 /* If ADDR contains an address of a dereferenced pointer, move it to 468 PARTS->base. */ 469 470 static void 471 move_pointer_to_base (struct mem_address *parts, aff_tree *addr) 472 { 473 unsigned i; 474 tree val = NULL_TREE; 475 476 for (i = 0; i < addr->n; i++) 477 { 478 if (addr->elts[i].coef != 1) 479 continue; 480 481 val = addr->elts[i].val; 482 if (POINTER_TYPE_P (TREE_TYPE (val))) 483 break; 484 } 485 486 if (i == addr->n) 487 return; 488 489 parts->base = val; 490 aff_combination_remove_elt (addr, i); 491 } 492 493 /* Moves the loop variant part V in linear address ADDR to be the index 494 of PARTS. */ 495 496 static void 497 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v) 498 { 499 unsigned i; 500 tree val = NULL_TREE; 501 502 gcc_assert (!parts->index); 503 for (i = 0; i < addr->n; i++) 504 { 505 val = addr->elts[i].val; 506 if (operand_equal_p (val, v, 0)) 507 break; 508 } 509 510 if (i == addr->n) 511 return; 512 513 parts->index = fold_convert (sizetype, val); 514 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef); 515 aff_combination_remove_elt (addr, i); 516 } 517 518 /* Adds ELT to PARTS. */ 519 520 static void 521 add_to_parts (struct mem_address *parts, tree elt) 522 { 523 tree type; 524 525 if (!parts->index) 526 { 527 parts->index = fold_convert (sizetype, elt); 528 return; 529 } 530 531 if (!parts->base) 532 { 533 parts->base = elt; 534 return; 535 } 536 537 /* Add ELT to base. */ 538 type = TREE_TYPE (parts->base); 539 if (POINTER_TYPE_P (type)) 540 parts->base = fold_build_pointer_plus (parts->base, elt); 541 else 542 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt); 543 } 544 545 /* Returns true if multiplying by RATIO is allowed in an address. Test the 546 validity for a memory reference accessing memory of mode MODE in address 547 space AS. */ 548 549 static bool 550 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode, 551 addr_space_t as) 552 { 553 #define MAX_RATIO 128 554 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode; 555 static vec<sbitmap> valid_mult_list; 556 sbitmap valid_mult; 557 558 if (data_index >= valid_mult_list.length ()) 559 valid_mult_list.safe_grow_cleared (data_index + 1); 560 561 valid_mult = valid_mult_list[data_index]; 562 if (!valid_mult) 563 { 564 machine_mode address_mode = targetm.addr_space.address_mode (as); 565 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1); 566 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2); 567 rtx addr, scaled; 568 HOST_WIDE_INT i; 569 570 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1); 571 bitmap_clear (valid_mult); 572 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX); 573 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2); 574 for (i = -MAX_RATIO; i <= MAX_RATIO; i++) 575 { 576 XEXP (scaled, 1) = gen_int_mode (i, address_mode); 577 if (memory_address_addr_space_p (mode, addr, as) 578 || memory_address_addr_space_p (mode, scaled, as)) 579 bitmap_set_bit (valid_mult, i + MAX_RATIO); 580 } 581 582 if (dump_file && (dump_flags & TDF_DETAILS)) 583 { 584 fprintf (dump_file, " allowed multipliers:"); 585 for (i = -MAX_RATIO; i <= MAX_RATIO; i++) 586 if (bitmap_bit_p (valid_mult, i + MAX_RATIO)) 587 fprintf (dump_file, " %d", (int) i); 588 fprintf (dump_file, "\n"); 589 fprintf (dump_file, "\n"); 590 } 591 592 valid_mult_list[data_index] = valid_mult; 593 } 594 595 if (ratio > MAX_RATIO || ratio < -MAX_RATIO) 596 return false; 597 598 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO); 599 } 600 601 /* Finds the most expensive multiplication in ADDR that can be 602 expressed in an addressing mode and move the corresponding 603 element(s) to PARTS. */ 604 605 static void 606 most_expensive_mult_to_index (tree type, struct mem_address *parts, 607 aff_tree *addr, bool speed) 608 { 609 addr_space_t as = TYPE_ADDR_SPACE (type); 610 machine_mode address_mode = targetm.addr_space.address_mode (as); 611 HOST_WIDE_INT coef; 612 unsigned best_mult_cost = 0, acost; 613 tree mult_elt = NULL_TREE, elt; 614 unsigned i, j; 615 enum tree_code op_code; 616 617 offset_int best_mult = 0; 618 for (i = 0; i < addr->n; i++) 619 { 620 if (!wi::fits_shwi_p (addr->elts[i].coef)) 621 continue; 622 623 coef = addr->elts[i].coef.to_shwi (); 624 if (coef == 1 625 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as)) 626 continue; 627 628 acost = mult_by_coeff_cost (coef, address_mode, speed); 629 630 if (acost > best_mult_cost) 631 { 632 best_mult_cost = acost; 633 best_mult = offset_int::from (addr->elts[i].coef, SIGNED); 634 } 635 } 636 637 if (!best_mult_cost) 638 return; 639 640 /* Collect elements multiplied by best_mult. */ 641 for (i = j = 0; i < addr->n; i++) 642 { 643 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED); 644 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type)); 645 646 if (amult == best_mult) 647 op_code = PLUS_EXPR; 648 else if (amult_neg == best_mult) 649 op_code = MINUS_EXPR; 650 else 651 { 652 addr->elts[j] = addr->elts[i]; 653 j++; 654 continue; 655 } 656 657 elt = fold_convert (sizetype, addr->elts[i].val); 658 if (mult_elt) 659 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt); 660 else if (op_code == PLUS_EXPR) 661 mult_elt = elt; 662 else 663 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt); 664 } 665 addr->n = j; 666 667 parts->index = mult_elt; 668 parts->step = wide_int_to_tree (sizetype, best_mult); 669 } 670 671 /* Splits address ADDR for a memory access of type TYPE into PARTS. 672 If BASE_HINT is non-NULL, it specifies an SSA name to be used 673 preferentially as base of the reference, and IV_CAND is the selected 674 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant 675 part of address is split to PARTS.base. 676 677 TODO -- be more clever about the distribution of the elements of ADDR 678 to PARTS. Some architectures do not support anything but single 679 register in address, possibly with a small integer offset; while 680 create_mem_ref will simplify the address to an acceptable shape 681 later, it would be more efficient to know that asking for complicated 682 addressing modes is useless. */ 683 684 static void 685 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint, 686 struct mem_address *parts, bool *var_in_base, bool speed) 687 { 688 tree part; 689 unsigned i; 690 691 parts->symbol = NULL_TREE; 692 parts->base = NULL_TREE; 693 parts->index = NULL_TREE; 694 parts->step = NULL_TREE; 695 696 if (maybe_ne (addr->offset, 0)) 697 parts->offset = wide_int_to_tree (sizetype, addr->offset); 698 else 699 parts->offset = NULL_TREE; 700 701 /* Try to find a symbol. */ 702 move_fixed_address_to_symbol (parts, addr); 703 704 /* Since at the moment there is no reliable way to know how to 705 distinguish between pointer and its offset, we decide if var 706 part is the pointer based on guess. */ 707 *var_in_base = (base_hint != NULL && parts->symbol == NULL); 708 if (*var_in_base) 709 *var_in_base = move_hint_to_base (type, parts, base_hint, addr); 710 else 711 move_variant_to_index (parts, addr, iv_cand); 712 713 /* First move the most expensive feasible multiplication to index. */ 714 if (!parts->index) 715 most_expensive_mult_to_index (type, parts, addr, speed); 716 717 /* Move pointer into base. */ 718 if (!parts->symbol && !parts->base) 719 move_pointer_to_base (parts, addr); 720 721 /* Then try to process the remaining elements. */ 722 for (i = 0; i < addr->n; i++) 723 { 724 part = fold_convert (sizetype, addr->elts[i].val); 725 if (addr->elts[i].coef != 1) 726 part = fold_build2 (MULT_EXPR, sizetype, part, 727 wide_int_to_tree (sizetype, addr->elts[i].coef)); 728 add_to_parts (parts, part); 729 } 730 if (addr->rest) 731 add_to_parts (parts, fold_convert (sizetype, addr->rest)); 732 } 733 734 /* Force the PARTS to register. */ 735 736 static void 737 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts) 738 { 739 if (parts->base) 740 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base, 741 is_gimple_mem_ref_addr, NULL_TREE, 742 true, GSI_SAME_STMT); 743 if (parts->index) 744 parts->index = force_gimple_operand_gsi (gsi, parts->index, 745 true, NULL_TREE, 746 true, GSI_SAME_STMT); 747 } 748 749 /* Return true if the OFFSET in PARTS is the only thing that is making 750 it an invalid address for type TYPE. */ 751 752 static bool 753 mem_ref_valid_without_offset_p (tree type, mem_address parts) 754 { 755 if (!parts.base) 756 parts.base = parts.offset; 757 parts.offset = NULL_TREE; 758 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts); 759 } 760 761 /* Fold PARTS->offset into PARTS->base, so that there is no longer 762 a separate offset. Emit any new instructions before GSI. */ 763 764 static void 765 add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts) 766 { 767 tree tmp = parts->offset; 768 if (parts->base) 769 { 770 tmp = fold_build_pointer_plus (parts->base, tmp); 771 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr, 772 NULL_TREE, true, GSI_SAME_STMT); 773 } 774 parts->base = tmp; 775 parts->offset = NULL_TREE; 776 } 777 778 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary 779 computations are emitted in front of GSI. TYPE is the mode 780 of created memory reference. IV_CAND is the selected iv candidate in ADDR, 781 and BASE_HINT is non NULL if IV_CAND comes from a base address 782 object. */ 783 784 tree 785 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr, 786 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed) 787 { 788 bool var_in_base; 789 tree mem_ref, tmp; 790 struct mem_address parts; 791 792 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed); 793 gimplify_mem_ref_parts (gsi, &parts); 794 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 795 if (mem_ref) 796 return mem_ref; 797 798 /* The expression is too complicated. Try making it simpler. */ 799 800 /* Merge symbol into other parts. */ 801 if (parts.symbol) 802 { 803 tmp = parts.symbol; 804 parts.symbol = NULL_TREE; 805 gcc_assert (is_gimple_val (tmp)); 806 807 if (parts.base) 808 { 809 gcc_assert (useless_type_conversion_p (sizetype, 810 TREE_TYPE (parts.base))); 811 812 if (parts.index) 813 { 814 /* Add the symbol to base, eventually forcing it to register. */ 815 tmp = fold_build_pointer_plus (tmp, parts.base); 816 tmp = force_gimple_operand_gsi_1 (gsi, tmp, 817 is_gimple_mem_ref_addr, 818 NULL_TREE, true, 819 GSI_SAME_STMT); 820 } 821 else 822 { 823 /* Move base to index, then move the symbol to base. */ 824 parts.index = parts.base; 825 } 826 parts.base = tmp; 827 } 828 else 829 parts.base = tmp; 830 831 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 832 if (mem_ref) 833 return mem_ref; 834 } 835 836 /* Move multiplication to index by transforming address expression: 837 [... + index << step + ...] 838 into: 839 index' = index << step; 840 [... + index' + ,,,]. */ 841 if (parts.step && !integer_onep (parts.step)) 842 { 843 gcc_assert (parts.index); 844 if (parts.offset && mem_ref_valid_without_offset_p (type, parts)) 845 { 846 add_offset_to_base (gsi, &parts); 847 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 848 gcc_assert (mem_ref); 849 return mem_ref; 850 } 851 852 parts.index = force_gimple_operand_gsi (gsi, 853 fold_build2 (MULT_EXPR, sizetype, 854 parts.index, parts.step), 855 true, NULL_TREE, true, GSI_SAME_STMT); 856 parts.step = NULL_TREE; 857 858 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 859 if (mem_ref) 860 return mem_ref; 861 } 862 863 /* Add offset to invariant part by transforming address expression: 864 [base + index + offset] 865 into: 866 base' = base + offset; 867 [base' + index] 868 or: 869 index' = index + offset; 870 [base + index'] 871 depending on which one is invariant. */ 872 if (parts.offset && !integer_zerop (parts.offset)) 873 { 874 tree old_base = unshare_expr (parts.base); 875 tree old_index = unshare_expr (parts.index); 876 tree old_offset = unshare_expr (parts.offset); 877 878 tmp = parts.offset; 879 parts.offset = NULL_TREE; 880 /* Add offset to invariant part. */ 881 if (!var_in_base) 882 { 883 if (parts.base) 884 { 885 tmp = fold_build_pointer_plus (parts.base, tmp); 886 tmp = force_gimple_operand_gsi_1 (gsi, tmp, 887 is_gimple_mem_ref_addr, 888 NULL_TREE, true, 889 GSI_SAME_STMT); 890 } 891 parts.base = tmp; 892 } 893 else 894 { 895 if (parts.index) 896 { 897 tmp = fold_build_pointer_plus (parts.index, tmp); 898 tmp = force_gimple_operand_gsi_1 (gsi, tmp, 899 is_gimple_mem_ref_addr, 900 NULL_TREE, true, 901 GSI_SAME_STMT); 902 } 903 parts.index = tmp; 904 } 905 906 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 907 if (mem_ref) 908 return mem_ref; 909 910 /* Restore parts.base, index and offset so that we can check if 911 [base + offset] addressing mode is supported in next step. 912 This is necessary for targets only support [base + offset], 913 but not [base + index] addressing mode. */ 914 parts.base = old_base; 915 parts.index = old_index; 916 parts.offset = old_offset; 917 } 918 919 /* Transform [base + index + ...] into: 920 base' = base + index; 921 [base' + ...]. */ 922 if (parts.index) 923 { 924 tmp = parts.index; 925 parts.index = NULL_TREE; 926 /* Add index to base. */ 927 if (parts.base) 928 { 929 tmp = fold_build_pointer_plus (parts.base, tmp); 930 tmp = force_gimple_operand_gsi_1 (gsi, tmp, 931 is_gimple_mem_ref_addr, 932 NULL_TREE, true, GSI_SAME_STMT); 933 } 934 parts.base = tmp; 935 936 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 937 if (mem_ref) 938 return mem_ref; 939 } 940 941 /* Transform [base + offset] into: 942 base' = base + offset; 943 [base']. */ 944 if (parts.offset && !integer_zerop (parts.offset)) 945 { 946 add_offset_to_base (gsi, &parts); 947 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); 948 if (mem_ref) 949 return mem_ref; 950 } 951 952 /* Verify that the address is in the simplest possible shape 953 (only a register). If we cannot create such a memory reference, 954 something is really wrong. */ 955 gcc_assert (parts.symbol == NULL_TREE); 956 gcc_assert (parts.index == NULL_TREE); 957 gcc_assert (!parts.step || integer_onep (parts.step)); 958 gcc_assert (!parts.offset || integer_zerop (parts.offset)); 959 gcc_unreachable (); 960 } 961 962 /* Copies components of the address from OP to ADDR. */ 963 964 void 965 get_address_description (tree op, struct mem_address *addr) 966 { 967 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR) 968 { 969 addr->symbol = TMR_BASE (op); 970 addr->base = TMR_INDEX2 (op); 971 } 972 else 973 { 974 addr->symbol = NULL_TREE; 975 if (TMR_INDEX2 (op)) 976 { 977 gcc_assert (integer_zerop (TMR_BASE (op))); 978 addr->base = TMR_INDEX2 (op); 979 } 980 else 981 addr->base = TMR_BASE (op); 982 } 983 addr->index = TMR_INDEX (op); 984 addr->step = TMR_STEP (op); 985 addr->offset = TMR_OFFSET (op); 986 } 987 988 /* Copies the reference information from OLD_REF to NEW_REF, where 989 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */ 990 991 void 992 copy_ref_info (tree new_ref, tree old_ref) 993 { 994 tree new_ptr_base = NULL_TREE; 995 996 gcc_assert (TREE_CODE (new_ref) == MEM_REF 997 || TREE_CODE (new_ref) == TARGET_MEM_REF); 998 999 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref); 1000 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref); 1001 1002 new_ptr_base = TREE_OPERAND (new_ref, 0); 1003 1004 /* We can transfer points-to information from an old pointer 1005 or decl base to the new one. */ 1006 if (new_ptr_base 1007 && TREE_CODE (new_ptr_base) == SSA_NAME 1008 && !SSA_NAME_PTR_INFO (new_ptr_base)) 1009 { 1010 tree base = get_base_address (old_ref); 1011 if (!base) 1012 ; 1013 else if ((TREE_CODE (base) == MEM_REF 1014 || TREE_CODE (base) == TARGET_MEM_REF) 1015 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME 1016 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))) 1017 { 1018 struct ptr_info_def *new_pi; 1019 unsigned int align, misalign; 1020 1021 duplicate_ssa_name_ptr_info 1022 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))); 1023 new_pi = SSA_NAME_PTR_INFO (new_ptr_base); 1024 /* We have to be careful about transferring alignment information. */ 1025 if (get_ptr_info_alignment (new_pi, &align, &misalign) 1026 && TREE_CODE (old_ref) == MEM_REF 1027 && !(TREE_CODE (new_ref) == TARGET_MEM_REF 1028 && (TMR_INDEX2 (new_ref) 1029 /* TODO: Below conditions can be relaxed if TMR_INDEX 1030 is an indcution variable and its initial value and 1031 step are aligned. */ 1032 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref)) 1033 || (TMR_STEP (new_ref) 1034 && (TREE_INT_CST_LOW (TMR_STEP (new_ref)) 1035 < align))))) 1036 { 1037 poly_uint64 inc = (mem_ref_offset (old_ref) 1038 - mem_ref_offset (new_ref)).force_uhwi (); 1039 adjust_ptr_info_misalignment (new_pi, inc); 1040 } 1041 else 1042 mark_ptr_info_alignment_unknown (new_pi); 1043 } 1044 else if (VAR_P (base) 1045 || TREE_CODE (base) == PARM_DECL 1046 || TREE_CODE (base) == RESULT_DECL) 1047 { 1048 struct ptr_info_def *pi = get_ptr_info (new_ptr_base); 1049 pt_solution_set_var (&pi->pt, base); 1050 } 1051 } 1052 } 1053 1054 /* Move constants in target_mem_ref REF to offset. Returns the new target 1055 mem ref if anything changes, NULL_TREE otherwise. */ 1056 1057 tree 1058 maybe_fold_tmr (tree ref) 1059 { 1060 struct mem_address addr; 1061 bool changed = false; 1062 tree new_ref, off; 1063 1064 get_address_description (ref, &addr); 1065 1066 if (addr.base 1067 && TREE_CODE (addr.base) == INTEGER_CST 1068 && !integer_zerop (addr.base)) 1069 { 1070 addr.offset = fold_binary_to_constant (PLUS_EXPR, 1071 TREE_TYPE (addr.offset), 1072 addr.offset, addr.base); 1073 addr.base = NULL_TREE; 1074 changed = true; 1075 } 1076 1077 if (addr.symbol 1078 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF) 1079 { 1080 addr.offset = fold_binary_to_constant 1081 (PLUS_EXPR, TREE_TYPE (addr.offset), 1082 addr.offset, 1083 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1)); 1084 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0); 1085 changed = true; 1086 } 1087 else if (addr.symbol 1088 && handled_component_p (TREE_OPERAND (addr.symbol, 0))) 1089 { 1090 poly_int64 offset; 1091 addr.symbol = build_fold_addr_expr 1092 (get_addr_base_and_unit_offset 1093 (TREE_OPERAND (addr.symbol, 0), &offset)); 1094 addr.offset = int_const_binop (PLUS_EXPR, 1095 addr.offset, size_int (offset)); 1096 changed = true; 1097 } 1098 1099 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST) 1100 { 1101 off = addr.index; 1102 if (addr.step) 1103 { 1104 off = fold_binary_to_constant (MULT_EXPR, sizetype, 1105 off, addr.step); 1106 addr.step = NULL_TREE; 1107 } 1108 1109 addr.offset = fold_binary_to_constant (PLUS_EXPR, 1110 TREE_TYPE (addr.offset), 1111 addr.offset, off); 1112 addr.index = NULL_TREE; 1113 changed = true; 1114 } 1115 1116 if (!changed) 1117 return NULL_TREE; 1118 1119 /* If we have propagated something into this TARGET_MEM_REF and thus 1120 ended up folding it, always create a new TARGET_MEM_REF regardless 1121 if it is valid in this for on the target - the propagation result 1122 wouldn't be anyway. */ 1123 new_ref = create_mem_ref_raw (TREE_TYPE (ref), 1124 TREE_TYPE (addr.offset), &addr, false); 1125 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref); 1126 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref); 1127 return new_ref; 1128 } 1129 1130 /* Dump PARTS to FILE. */ 1131 1132 extern void dump_mem_address (FILE *, struct mem_address *); 1133 void 1134 dump_mem_address (FILE *file, struct mem_address *parts) 1135 { 1136 if (parts->symbol) 1137 { 1138 fprintf (file, "symbol: "); 1139 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM); 1140 fprintf (file, "\n"); 1141 } 1142 if (parts->base) 1143 { 1144 fprintf (file, "base: "); 1145 print_generic_expr (file, parts->base, TDF_SLIM); 1146 fprintf (file, "\n"); 1147 } 1148 if (parts->index) 1149 { 1150 fprintf (file, "index: "); 1151 print_generic_expr (file, parts->index, TDF_SLIM); 1152 fprintf (file, "\n"); 1153 } 1154 if (parts->step) 1155 { 1156 fprintf (file, "step: "); 1157 print_generic_expr (file, parts->step, TDF_SLIM); 1158 fprintf (file, "\n"); 1159 } 1160 if (parts->offset) 1161 { 1162 fprintf (file, "offset: "); 1163 print_generic_expr (file, parts->offset, TDF_SLIM); 1164 fprintf (file, "\n"); 1165 } 1166 } 1167 1168 #include "gt-tree-ssa-address.h" 1169