1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
19 02110-1301, USA. */
20
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "diagnostic.h"
35 #include "tree-flow.h"
36 #include "tree-dump.h"
37 #include "tree-pass.h"
38 #include "timevar.h"
39 #include "flags.h"
40 #include "tree-inline.h"
41 #include "insn-config.h"
42 #include "recog.h"
43 #include "expr.h"
44 #include "ggc.h"
45
46 /* TODO -- handling of symbols (according to Richard Hendersons
47 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
48
49 There are at least 5 different kinds of symbols that we can run up against:
50
51 (1) binds_local_p, small data area.
52 (2) binds_local_p, eg local statics
53 (3) !binds_local_p, eg global variables
54 (4) thread local, local_exec
55 (5) thread local, !local_exec
56
57 Now, (1) won't appear often in an array context, but it certainly can.
58 All you have to do is set -GN high enough, or explicitly mark any
59 random object __attribute__((section (".sdata"))).
60
61 All of these affect whether or not a symbol is in fact a valid address.
62 The only one tested here is (3). And that result may very well
63 be incorrect for (4) or (5).
64
65 An incorrect result here does not cause incorrect results out the
66 back end, because the expander in expr.c validizes the address. However
67 it would be nice to improve the handling here in order to produce more
68 precise results. */
69
70 /* A "template" for memory address, used to determine whether the address is
71 valid for mode. */
72
73 struct mem_addr_template GTY (())
74 {
75 rtx ref; /* The template. */
76 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
77 filled in. */
78 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
79 be filled in. */
80 };
81
82 /* The templates. Each of the five bits of the index corresponds to one
83 component of TARGET_MEM_REF being present, see TEMPL_IDX. */
84
85 static GTY (()) struct mem_addr_template templates[32];
86
87 #define TEMPL_IDX(SYMBOL, BASE, INDEX, STEP, OFFSET) \
88 (((SYMBOL != 0) << 4) \
89 | ((BASE != 0) << 3) \
90 | ((INDEX != 0) << 2) \
91 | ((STEP != 0) << 1) \
92 | (OFFSET != 0))
93
94 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
95 STEP and OFFSET to *ADDR. Stores pointers to where step is placed to
96 *STEP_P and offset to *OFFSET_P. */
97
98 static void
gen_addr_rtx(rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)99 gen_addr_rtx (rtx symbol, rtx base, rtx index, rtx step, rtx offset,
100 rtx *addr, rtx **step_p, rtx **offset_p)
101 {
102 rtx act_elem;
103
104 *addr = NULL_RTX;
105 if (step_p)
106 *step_p = NULL;
107 if (offset_p)
108 *offset_p = NULL;
109
110 if (index)
111 {
112 act_elem = index;
113 if (step)
114 {
115 act_elem = gen_rtx_MULT (Pmode, act_elem, step);
116
117 if (step_p)
118 *step_p = &XEXP (act_elem, 1);
119 }
120
121 *addr = act_elem;
122 }
123
124 if (base)
125 {
126 if (*addr)
127 *addr = gen_rtx_PLUS (Pmode, *addr, base);
128 else
129 *addr = base;
130 }
131
132 if (symbol)
133 {
134 act_elem = symbol;
135 if (offset)
136 {
137 act_elem = gen_rtx_CONST (Pmode,
138 gen_rtx_PLUS (Pmode, act_elem, offset));
139 if (offset_p)
140 *offset_p = &XEXP (XEXP (act_elem, 0), 1);
141 }
142
143 if (*addr)
144 *addr = gen_rtx_PLUS (Pmode, *addr, act_elem);
145 else
146 *addr = act_elem;
147 }
148 else if (offset)
149 {
150 if (*addr)
151 {
152 *addr = gen_rtx_PLUS (Pmode, *addr, offset);
153 if (offset_p)
154 *offset_p = &XEXP (*addr, 1);
155 }
156 else
157 {
158 *addr = offset;
159 if (offset_p)
160 *offset_p = addr;
161 }
162 }
163
164 if (!*addr)
165 *addr = const0_rtx;
166 }
167
168 /* Returns address for TARGET_MEM_REF with parameters given by ADDR.
169 If REALLY_EXPAND is false, just make fake registers instead
170 of really expanding the operands, and perform the expansion in-place
171 by using one of the "templates". */
172
173 rtx
addr_for_mem_ref(struct mem_address * addr,bool really_expand)174 addr_for_mem_ref (struct mem_address *addr, bool really_expand)
175 {
176 rtx address, sym, bse, idx, st, off;
177 static bool templates_initialized = false;
178 struct mem_addr_template *templ;
179
180 if (addr->step && !integer_onep (addr->step))
181 st = immed_double_const (TREE_INT_CST_LOW (addr->step),
182 TREE_INT_CST_HIGH (addr->step), Pmode);
183 else
184 st = NULL_RTX;
185
186 if (addr->offset && !integer_zerop (addr->offset))
187 off = immed_double_const (TREE_INT_CST_LOW (addr->offset),
188 TREE_INT_CST_HIGH (addr->offset), Pmode);
189 else
190 off = NULL_RTX;
191
192 if (!really_expand)
193 {
194 /* Reuse the templates for addresses, so that we do not waste memory. */
195 if (!templates_initialized)
196 {
197 unsigned i;
198
199 templates_initialized = true;
200 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup ("test_symbol"));
201 bse = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1);
202 idx = gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 2);
203
204 for (i = 0; i < 32; i++)
205 gen_addr_rtx ((i & 16 ? sym : NULL_RTX),
206 (i & 8 ? bse : NULL_RTX),
207 (i & 4 ? idx : NULL_RTX),
208 (i & 2 ? const0_rtx : NULL_RTX),
209 (i & 1 ? const0_rtx : NULL_RTX),
210 &templates[i].ref,
211 &templates[i].step_p,
212 &templates[i].off_p);
213 }
214
215 templ = templates + TEMPL_IDX (addr->symbol, addr->base, addr->index,
216 st, off);
217 if (st)
218 *templ->step_p = st;
219 if (off)
220 *templ->off_p = off;
221
222 return templ->ref;
223 }
224
225 /* Otherwise really expand the expressions. */
226 sym = (addr->symbol
227 ? expand_expr (build_addr (addr->symbol, current_function_decl),
228 NULL_RTX, Pmode, EXPAND_NORMAL)
229 : NULL_RTX);
230 bse = (addr->base
231 ? expand_expr (addr->base, NULL_RTX, Pmode, EXPAND_NORMAL)
232 : NULL_RTX);
233 idx = (addr->index
234 ? expand_expr (addr->index, NULL_RTX, Pmode, EXPAND_NORMAL)
235 : NULL_RTX);
236
237 gen_addr_rtx (sym, bse, idx, st, off, &address, NULL, NULL);
238 return address;
239 }
240
241 /* Returns address of MEM_REF in TYPE. */
242
243 tree
tree_mem_ref_addr(tree type,tree mem_ref)244 tree_mem_ref_addr (tree type, tree mem_ref)
245 {
246 tree addr;
247 tree act_elem;
248 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
249 tree sym = TMR_SYMBOL (mem_ref), base = TMR_BASE (mem_ref);
250 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
251
252 if (sym)
253 addr_base = fold_convert (type, build_addr (sym, current_function_decl));
254 else if (base && POINTER_TYPE_P (TREE_TYPE (base)))
255 {
256 addr_base = fold_convert (type, base);
257 base = NULL_TREE;
258 }
259
260 act_elem = TMR_INDEX (mem_ref);
261 if (act_elem)
262 {
263 if (step)
264 act_elem = fold_build2 (MULT_EXPR, sizetype, act_elem, step);
265 addr_off = act_elem;
266 }
267
268 act_elem = base;
269 if (act_elem)
270 {
271 if (addr_off)
272 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, act_elem);
273 else
274 addr_off = act_elem;
275 }
276
277 if (!zero_p (offset))
278 {
279 if (addr_off)
280 addr_off = fold_build2 (PLUS_EXPR, sizetype, addr_off, offset);
281 else
282 addr_off = offset;
283 }
284
285 if (addr_off)
286 {
287 addr = fold_convert (type, addr_off);
288 if (addr_base)
289 addr = fold_build2 (PLUS_EXPR, type, addr_base, addr);
290 }
291 else if (addr_base)
292 addr = addr_base;
293 else
294 addr = build_int_cst (type, 0);
295
296 return addr;
297 }
298
299 /* Returns true if a memory reference in MODE and with parameters given by
300 ADDR is valid on the current target. */
301
302 static bool
valid_mem_ref_p(enum machine_mode mode,struct mem_address * addr)303 valid_mem_ref_p (enum machine_mode mode, struct mem_address *addr)
304 {
305 rtx address;
306
307 address = addr_for_mem_ref (addr, false);
308 if (!address)
309 return false;
310
311 return memory_address_p (mode, address);
312 }
313
314 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
315 is valid on the current target and if so, creates and returns the
316 TARGET_MEM_REF. */
317
318 static tree
create_mem_ref_raw(tree type,struct mem_address * addr)319 create_mem_ref_raw (tree type, struct mem_address *addr)
320 {
321 if (!valid_mem_ref_p (TYPE_MODE (type), addr))
322 return NULL_TREE;
323
324 if (addr->step && integer_onep (addr->step))
325 addr->step = NULL_TREE;
326
327 if (addr->offset && zero_p (addr->offset))
328 addr->offset = NULL_TREE;
329
330 return build7 (TARGET_MEM_REF, type,
331 addr->symbol, addr->base, addr->index,
332 addr->step, addr->offset, NULL, NULL);
333 }
334
335 /* Returns true if OBJ is an object whose address is a link time constant. */
336
337 static bool
fixed_address_object_p(tree obj)338 fixed_address_object_p (tree obj)
339 {
340 return (TREE_CODE (obj) == VAR_DECL
341 && (TREE_STATIC (obj)
342 || DECL_EXTERNAL (obj)));
343 }
344
345 /* Remove M-th element from COMB. */
346
347 static void
aff_combination_remove_elt(struct affine_tree_combination * comb,unsigned m)348 aff_combination_remove_elt (struct affine_tree_combination *comb, unsigned m)
349 {
350 comb->n--;
351 if (m <= comb->n)
352 {
353 comb->coefs[m] = comb->coefs[comb->n];
354 comb->elts[m] = comb->elts[comb->n];
355 }
356 if (comb->rest)
357 {
358 comb->coefs[comb->n] = 1;
359 comb->elts[comb->n] = comb->rest;
360 comb->rest = NULL_TREE;
361 comb->n++;
362 }
363 }
364
365 /* If ADDR contains an address of object that is a link time constant,
366 move it to PARTS->symbol. */
367
368 static void
move_fixed_address_to_symbol(struct mem_address * parts,struct affine_tree_combination * addr)369 move_fixed_address_to_symbol (struct mem_address *parts,
370 struct affine_tree_combination *addr)
371 {
372 unsigned i;
373 tree val = NULL_TREE;
374
375 for (i = 0; i < addr->n; i++)
376 {
377 if (addr->coefs[i] != 1)
378 continue;
379
380 val = addr->elts[i];
381 if (TREE_CODE (val) == ADDR_EXPR
382 && fixed_address_object_p (TREE_OPERAND (val, 0)))
383 break;
384 }
385
386 if (i == addr->n)
387 return;
388
389 parts->symbol = TREE_OPERAND (val, 0);
390 aff_combination_remove_elt (addr, i);
391 }
392
393 /* If ADDR contains an address of a dereferenced pointer, move it to
394 PARTS->base. */
395
396 static void
move_pointer_to_base(struct mem_address * parts,struct affine_tree_combination * addr)397 move_pointer_to_base (struct mem_address *parts,
398 struct affine_tree_combination *addr)
399 {
400 unsigned i;
401 tree val = NULL_TREE;
402
403 for (i = 0; i < addr->n; i++)
404 {
405 if (addr->coefs[i] != 1)
406 continue;
407
408 val = addr->elts[i];
409 if (POINTER_TYPE_P (TREE_TYPE (val)))
410 break;
411 }
412
413 if (i == addr->n)
414 return;
415
416 parts->base = val;
417 aff_combination_remove_elt (addr, i);
418 }
419
420 /* Adds ELT to PARTS. */
421
422 static void
add_to_parts(struct mem_address * parts,tree elt)423 add_to_parts (struct mem_address *parts, tree elt)
424 {
425 tree type;
426
427 if (!parts->index)
428 {
429 parts->index = elt;
430 return;
431 }
432
433 if (!parts->base)
434 {
435 parts->base = elt;
436 return;
437 }
438
439 /* Add ELT to base. */
440 type = TREE_TYPE (parts->base);
441 parts->base = fold_build2 (PLUS_EXPR, type,
442 parts->base,
443 fold_convert (type, elt));
444 }
445
446 /* Finds the most expensive multiplication in ADDR that can be
447 expressed in an addressing mode and move the corresponding
448 element(s) to PARTS. */
449
450 static void
most_expensive_mult_to_index(struct mem_address * parts,struct affine_tree_combination * addr)451 most_expensive_mult_to_index (struct mem_address *parts,
452 struct affine_tree_combination *addr)
453 {
454 unsigned HOST_WIDE_INT best_mult = 0;
455 unsigned best_mult_cost = 0, acost;
456 tree mult_elt = NULL_TREE, elt;
457 unsigned i, j;
458
459 for (i = 0; i < addr->n; i++)
460 {
461 if (addr->coefs[i] == 1
462 || !multiplier_allowed_in_address_p (addr->coefs[i]))
463 continue;
464
465 acost = multiply_by_cost (addr->coefs[i], Pmode);
466
467 if (acost > best_mult_cost)
468 {
469 best_mult_cost = acost;
470 best_mult = addr->coefs[i];
471 }
472 }
473
474 if (!best_mult)
475 return;
476
477 for (i = j = 0; i < addr->n; i++)
478 {
479 if (addr->coefs[i] != best_mult)
480 {
481 addr->coefs[j] = addr->coefs[i];
482 addr->elts[j] = addr->elts[i];
483 j++;
484 continue;
485 }
486
487 elt = fold_convert (sizetype, addr->elts[i]);
488 if (!mult_elt)
489 mult_elt = elt;
490 else
491 mult_elt = fold_build2 (PLUS_EXPR, sizetype, mult_elt, elt);
492 }
493 addr->n = j;
494
495 parts->index = mult_elt;
496 parts->step = build_int_cst_type (sizetype, best_mult);
497 }
498
499 /* Splits address ADDR into PARTS.
500
501 TODO -- be more clever about the distribution of the elements of ADDR
502 to PARTS. Some architectures do not support anything but single
503 register in address, possibly with a small integer offset; while
504 create_mem_ref will simplify the address to an acceptable shape
505 later, it would be a small bit more efficient to know that asking
506 for complicated addressing modes is useless. */
507
508 static void
addr_to_parts(struct affine_tree_combination * addr,struct mem_address * parts)509 addr_to_parts (struct affine_tree_combination *addr, struct mem_address *parts)
510 {
511 unsigned i;
512 tree part;
513
514 parts->symbol = NULL_TREE;
515 parts->base = NULL_TREE;
516 parts->index = NULL_TREE;
517 parts->step = NULL_TREE;
518
519 if (addr->offset)
520 parts->offset = build_int_cst_type (sizetype, addr->offset);
521 else
522 parts->offset = NULL_TREE;
523
524 /* Try to find a symbol. */
525 move_fixed_address_to_symbol (parts, addr);
526
527 /* First move the most expensive feasible multiplication
528 to index. */
529 most_expensive_mult_to_index (parts, addr);
530
531 /* Try to find a base of the reference. Since at the moment
532 there is no reliable way how to distinguish between pointer and its
533 offset, this is just a guess. */
534 if (!parts->symbol)
535 move_pointer_to_base (parts, addr);
536
537 /* Then try to process the remaining elements. */
538 for (i = 0; i < addr->n; i++)
539 {
540 part = fold_convert (sizetype, addr->elts[i]);
541 if (addr->coefs[i] != 1)
542 part = fold_build2 (MULT_EXPR, sizetype, part,
543 build_int_cst_type (sizetype, addr->coefs[i]));
544 add_to_parts (parts, part);
545 }
546 if (addr->rest)
547 add_to_parts (parts, fold_convert (sizetype, addr->rest));
548 }
549
550 /* Force the PARTS to register. */
551
552 static void
gimplify_mem_ref_parts(block_stmt_iterator * bsi,struct mem_address * parts)553 gimplify_mem_ref_parts (block_stmt_iterator *bsi, struct mem_address *parts)
554 {
555 if (parts->base)
556 parts->base = force_gimple_operand_bsi (bsi, parts->base,
557 true, NULL_TREE);
558 if (parts->index)
559 parts->index = force_gimple_operand_bsi (bsi, parts->index,
560 true, NULL_TREE);
561 }
562
563 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
564 computations are emitted in front of BSI. TYPE is the mode
565 of created memory reference. */
566
567 tree
create_mem_ref(block_stmt_iterator * bsi,tree type,struct affine_tree_combination * addr)568 create_mem_ref (block_stmt_iterator *bsi, tree type,
569 struct affine_tree_combination *addr)
570 {
571 tree mem_ref, tmp;
572 tree addr_type = build_pointer_type (type), atype;
573 struct mem_address parts;
574
575 addr_to_parts (addr, &parts);
576 gimplify_mem_ref_parts (bsi, &parts);
577 mem_ref = create_mem_ref_raw (type, &parts);
578 if (mem_ref)
579 return mem_ref;
580
581 /* The expression is too complicated. Try making it simpler. */
582
583 if (parts.step && !integer_onep (parts.step))
584 {
585 /* Move the multiplication to index. */
586 gcc_assert (parts.index);
587 parts.index = force_gimple_operand_bsi (bsi,
588 fold_build2 (MULT_EXPR, sizetype,
589 parts.index, parts.step),
590 true, NULL_TREE);
591 parts.step = NULL_TREE;
592
593 mem_ref = create_mem_ref_raw (type, &parts);
594 if (mem_ref)
595 return mem_ref;
596 }
597
598 if (parts.symbol)
599 {
600 tmp = fold_convert (addr_type,
601 build_addr (parts.symbol, current_function_decl));
602
603 /* Add the symbol to base, eventually forcing it to register. */
604 if (parts.base)
605 {
606 if (parts.index)
607 parts.base = force_gimple_operand_bsi (bsi,
608 fold_build2 (PLUS_EXPR, addr_type,
609 fold_convert (addr_type, parts.base),
610 tmp),
611 true, NULL_TREE);
612 else
613 {
614 parts.index = parts.base;
615 parts.base = tmp;
616 }
617 }
618 else
619 parts.base = tmp;
620 parts.symbol = NULL_TREE;
621
622 mem_ref = create_mem_ref_raw (type, &parts);
623 if (mem_ref)
624 return mem_ref;
625 }
626
627 if (parts.index)
628 {
629 /* Add index to base. */
630 if (parts.base)
631 {
632 atype = TREE_TYPE (parts.base);
633 parts.base = force_gimple_operand_bsi (bsi,
634 fold_build2 (PLUS_EXPR, atype,
635 parts.base,
636 fold_convert (atype, parts.index)),
637 true, NULL_TREE);
638 }
639 else
640 parts.base = parts.index;
641 parts.index = NULL_TREE;
642
643 mem_ref = create_mem_ref_raw (type, &parts);
644 if (mem_ref)
645 return mem_ref;
646 }
647
648 if (parts.offset && !integer_zerop (parts.offset))
649 {
650 /* Try adding offset to base. */
651 if (parts.base)
652 {
653 atype = TREE_TYPE (parts.base);
654 parts.base = force_gimple_operand_bsi (bsi,
655 fold_build2 (PLUS_EXPR, atype,
656 parts.base,
657 fold_convert (atype, parts.offset)),
658 true, NULL_TREE);
659 }
660 else
661 parts.base = parts.offset;
662
663 parts.offset = NULL_TREE;
664
665 mem_ref = create_mem_ref_raw (type, &parts);
666 if (mem_ref)
667 return mem_ref;
668 }
669
670 /* Verify that the address is in the simplest possible shape
671 (only a register). If we cannot create such a memory reference,
672 something is really wrong. */
673 gcc_assert (parts.symbol == NULL_TREE);
674 gcc_assert (parts.index == NULL_TREE);
675 gcc_assert (!parts.step || integer_onep (parts.step));
676 gcc_assert (!parts.offset || integer_zerop (parts.offset));
677 gcc_unreachable ();
678 }
679
680 /* Copies components of the address from OP to ADDR. */
681
682 void
get_address_description(tree op,struct mem_address * addr)683 get_address_description (tree op, struct mem_address *addr)
684 {
685 addr->symbol = TMR_SYMBOL (op);
686 addr->base = TMR_BASE (op);
687 addr->index = TMR_INDEX (op);
688 addr->step = TMR_STEP (op);
689 addr->offset = TMR_OFFSET (op);
690 }
691
692 /* Copies the additional information attached to target_mem_ref FROM to TO. */
693
694 void
copy_mem_ref_info(tree to,tree from)695 copy_mem_ref_info (tree to, tree from)
696 {
697 /* Copy the annotation, to preserve the aliasing information. */
698 TMR_TAG (to) = TMR_TAG (from);
699
700 /* And the info about the original reference. */
701 TMR_ORIGINAL (to) = TMR_ORIGINAL (from);
702 }
703
704 /* Move constants in target_mem_ref REF to offset. Returns the new target
705 mem ref if anything changes, NULL_TREE otherwise. */
706
707 tree
maybe_fold_tmr(tree ref)708 maybe_fold_tmr (tree ref)
709 {
710 struct mem_address addr;
711 bool changed = false;
712 tree ret, off;
713
714 get_address_description (ref, &addr);
715
716 if (addr.base && TREE_CODE (addr.base) == INTEGER_CST)
717 {
718 if (addr.offset)
719 addr.offset = fold_binary_to_constant (PLUS_EXPR, sizetype,
720 addr.offset,
721 fold_convert (sizetype, addr.base));
722 else
723 addr.offset = addr.base;
724
725 addr.base = NULL_TREE;
726 changed = true;
727 }
728
729 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
730 {
731 off = addr.index;
732 if (addr.step)
733 {
734 off = fold_binary_to_constant (MULT_EXPR, sizetype,
735 off, addr.step);
736 addr.step = NULL_TREE;
737 }
738
739 if (addr.offset)
740 {
741 addr.offset = fold_binary_to_constant (PLUS_EXPR, sizetype,
742 addr.offset, off);
743 }
744 else
745 addr.offset = off;
746
747 addr.index = NULL_TREE;
748 changed = true;
749 }
750
751 if (!changed)
752 return NULL_TREE;
753
754 ret = create_mem_ref_raw (TREE_TYPE (ref), &addr);
755 if (!ret)
756 return NULL_TREE;
757
758 copy_mem_ref_info (ret, ref);
759 return ret;
760 }
761
762 /* Dump PARTS to FILE. */
763
764 extern void dump_mem_address (FILE *, struct mem_address *);
765 void
dump_mem_address(FILE * file,struct mem_address * parts)766 dump_mem_address (FILE *file, struct mem_address *parts)
767 {
768 if (parts->symbol)
769 {
770 fprintf (file, "symbol: ");
771 print_generic_expr (file, parts->symbol, TDF_SLIM);
772 fprintf (file, "\n");
773 }
774 if (parts->base)
775 {
776 fprintf (file, "base: ");
777 print_generic_expr (file, parts->base, TDF_SLIM);
778 fprintf (file, "\n");
779 }
780 if (parts->index)
781 {
782 fprintf (file, "index: ");
783 print_generic_expr (file, parts->index, TDF_SLIM);
784 fprintf (file, "\n");
785 }
786 if (parts->step)
787 {
788 fprintf (file, "step: ");
789 print_generic_expr (file, parts->step, TDF_SLIM);
790 fprintf (file, "\n");
791 }
792 if (parts->offset)
793 {
794 fprintf (file, "offset: ");
795 print_generic_expr (file, parts->offset, TDF_SLIM);
796 fprintf (file, "\n");
797 }
798 }
799
800 #include "gt-tree-ssa-address.h"
801