1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3    Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22    that directly map to addressing modes of the target.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "tree-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
36 #include "timevar.h"
37 #include "flags.h"
38 #include "tree-inline.h"
39 #include "tree-affine.h"
40 
41 /* FIXME: We compute address costs using RTL.  */
42 #include "insn-config.h"
43 #include "rtl.h"
44 #include "recog.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "target.h"
48 
49 /* TODO -- handling of symbols (according to Richard Hendersons
50    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
51 
52    There are at least 5 different kinds of symbols that we can run up against:
53 
54      (1) binds_local_p, small data area.
55      (2) binds_local_p, eg local statics
56      (3) !binds_local_p, eg global variables
57      (4) thread local, local_exec
58      (5) thread local, !local_exec
59 
60    Now, (1) won't appear often in an array context, but it certainly can.
61    All you have to do is set -GN high enough, or explicitly mark any
62    random object __attribute__((section (".sdata"))).
63 
64    All of these affect whether or not a symbol is in fact a valid address.
65    The only one tested here is (3).  And that result may very well
66    be incorrect for (4) or (5).
67 
68    An incorrect result here does not cause incorrect results out the
69    back end, because the expander in expr.c validizes the address.  However
70    it would be nice to improve the handling here in order to produce more
71    precise results.  */
72 
73 /* A "template" for memory address, used to determine whether the address is
74    valid for mode.  */
75 
76 typedef struct GTY (()) mem_addr_template {
77   rtx ref;			/* The template.  */
78   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
79 				   filled in.  */
80   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
81 				   be filled in.  */
82 } mem_addr_template;
83 
84 DEF_VEC_O (mem_addr_template);
85 DEF_VEC_ALLOC_O (mem_addr_template, gc);
86 
87 /* The templates.  Each of the low five bits of the index corresponds to one
88    component of TARGET_MEM_REF being present, while the high bits identify
89    the address space.  See TEMPL_IDX.  */
90 
91 static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
92 
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
94   (((int) (AS) << 5) \
95    | ((SYMBOL != 0) << 4) \
96    | ((BASE != 0) << 3) \
97    | ((INDEX != 0) << 2) \
98    | ((STEP != 0) << 1) \
99    | (OFFSET != 0))
100 
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
103    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
104 
105 static void
106 gen_addr_rtx (enum machine_mode address_mode,
107 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
108 	      rtx *addr, rtx **step_p, rtx **offset_p)
109 {
110   rtx act_elem;
111 
112   *addr = NULL_RTX;
113   if (step_p)
114     *step_p = NULL;
115   if (offset_p)
116     *offset_p = NULL;
117 
118   if (index)
119     {
120       act_elem = index;
121       if (step)
122 	{
123 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
124 
125 	  if (step_p)
126 	    *step_p = &XEXP (act_elem, 1);
127 	}
128 
129       *addr = act_elem;
130     }
131 
132   if (base && base != const0_rtx)
133     {
134       if (*addr)
135 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
136       else
137 	*addr = base;
138     }
139 
140   if (symbol)
141     {
142       act_elem = symbol;
143       if (offset)
144 	{
145 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
146 
147 	  if (offset_p)
148 	    *offset_p = &XEXP (act_elem, 1);
149 
150 	  if (GET_CODE (symbol) == SYMBOL_REF
151 	      || GET_CODE (symbol) == LABEL_REF
152 	      || GET_CODE (symbol) == CONST)
153 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
154 	}
155 
156       if (*addr)
157 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
158       else
159 	*addr = act_elem;
160     }
161   else if (offset)
162     {
163       if (*addr)
164 	{
165 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
166 	  if (offset_p)
167 	    *offset_p = &XEXP (*addr, 1);
168 	}
169       else
170 	{
171 	  *addr = offset;
172 	  if (offset_p)
173 	    *offset_p = addr;
174 	}
175     }
176 
177   if (!*addr)
178     *addr = const0_rtx;
179 }
180 
181 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
182    in address space AS.
183    If REALLY_EXPAND is false, just make fake registers instead
184    of really expanding the operands, and perform the expansion in-place
185    by using one of the "templates".  */
186 
187 rtx
188 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
189 		  bool really_expand)
190 {
191   enum machine_mode address_mode = targetm.addr_space.address_mode (as);
192   enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
193   rtx address, sym, bse, idx, st, off;
194   struct mem_addr_template *templ;
195 
196   if (addr->step && !integer_onep (addr->step))
197     st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
198   else
199     st = NULL_RTX;
200 
201   if (addr->offset && !integer_zerop (addr->offset))
202     off = immed_double_int_const
203 	    (double_int_sext (tree_to_double_int (addr->offset),
204 			      TYPE_PRECISION (TREE_TYPE (addr->offset))),
205 	     pointer_mode);
206   else
207     off = NULL_RTX;
208 
209   if (!really_expand)
210     {
211       unsigned int templ_index
212 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
213 
214       if (templ_index
215 	  >= VEC_length (mem_addr_template, mem_addr_template_list))
216 	VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
217 			       templ_index + 1);
218 
219       /* Reuse the templates for addresses, so that we do not waste memory.  */
220       templ = VEC_index (mem_addr_template, mem_addr_template_list, templ_index);
221       if (!templ->ref)
222 	{
223 	  sym = (addr->symbol ?
224 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
225 		 : NULL_RTX);
226 	  bse = (addr->base ?
227 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
228 		 : NULL_RTX);
229 	  idx = (addr->index ?
230 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
231 		 : NULL_RTX);
232 
233 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
234 			st? const0_rtx : NULL_RTX,
235 			off? const0_rtx : NULL_RTX,
236 			&templ->ref,
237 			&templ->step_p,
238 			&templ->off_p);
239 	}
240 
241       if (st)
242 	*templ->step_p = st;
243       if (off)
244 	*templ->off_p = off;
245 
246       return templ->ref;
247     }
248 
249   /* Otherwise really expand the expressions.  */
250   sym = (addr->symbol
251 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
252 	 : NULL_RTX);
253   bse = (addr->base
254 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
255 	 : NULL_RTX);
256   idx = (addr->index
257 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
258 	 : NULL_RTX);
259 
260   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
261   if (pointer_mode != address_mode)
262     address = convert_memory_address (address_mode, address);
263   return address;
264 }
265 
266 /* Returns address of MEM_REF in TYPE.  */
267 
268 tree
269 tree_mem_ref_addr (tree type, tree mem_ref)
270 {
271   tree addr;
272   tree act_elem;
273   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
274   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
275 
276   addr_base = fold_convert (type, TMR_BASE (mem_ref));
277 
278   act_elem = TMR_INDEX (mem_ref);
279   if (act_elem)
280     {
281       if (step)
282 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
283 				act_elem, step);
284       addr_off = act_elem;
285     }
286 
287   act_elem = TMR_INDEX2 (mem_ref);
288   if (act_elem)
289     {
290       if (addr_off)
291 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
292 				addr_off, act_elem);
293       else
294 	addr_off = act_elem;
295     }
296 
297   if (offset && !integer_zerop (offset))
298     {
299       if (addr_off)
300 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
301 				fold_convert (TREE_TYPE (addr_off), offset));
302       else
303 	addr_off = offset;
304     }
305 
306   if (addr_off)
307     addr = fold_build_pointer_plus (addr_base, addr_off);
308   else
309     addr = addr_base;
310 
311   return addr;
312 }
313 
314 /* Returns true if a memory reference in MODE and with parameters given by
315    ADDR is valid on the current target.  */
316 
317 static bool
318 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
319 		 struct mem_address *addr)
320 {
321   rtx address;
322 
323   address = addr_for_mem_ref (addr, as, false);
324   if (!address)
325     return false;
326 
327   return memory_address_addr_space_p (mode, address, as);
328 }
329 
330 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
331    is valid on the current target and if so, creates and returns the
332    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
333 
334 static tree
335 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
336 		    bool verify)
337 {
338   tree base, index2;
339 
340   if (verify
341       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
342     return NULL_TREE;
343 
344   if (addr->step && integer_onep (addr->step))
345     addr->step = NULL_TREE;
346 
347   if (addr->offset)
348     addr->offset = fold_convert (alias_ptr_type, addr->offset);
349   else
350     addr->offset = build_int_cst (alias_ptr_type, 0);
351 
352   if (addr->symbol)
353     {
354       base = addr->symbol;
355       index2 = addr->base;
356     }
357   else if (addr->base
358 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
359     {
360       base = addr->base;
361       index2 = NULL_TREE;
362     }
363   else
364     {
365       base = build_int_cst (ptr_type_node, 0);
366       index2 = addr->base;
367     }
368 
369   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
370      ???  As IVOPTs does not follow restrictions to where the base
371      pointer may point to create a MEM_REF only if we know that
372      base is valid.  */
373   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
374       && (!index2 || integer_zerop (index2))
375       && (!addr->index || integer_zerop (addr->index)))
376     return fold_build2 (MEM_REF, type, base, addr->offset);
377 
378   return build5 (TARGET_MEM_REF, type,
379 		 base, addr->offset, addr->index, addr->step, index2);
380 }
381 
382 /* Returns true if OBJ is an object whose address is a link time constant.  */
383 
384 static bool
385 fixed_address_object_p (tree obj)
386 {
387   return (TREE_CODE (obj) == VAR_DECL
388 	  && (TREE_STATIC (obj)
389 	      || DECL_EXTERNAL (obj))
390 	  && ! DECL_DLLIMPORT_P (obj));
391 }
392 
393 /* If ADDR contains an address of object that is a link time constant,
394    move it to PARTS->symbol.  */
395 
396 static void
397 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
398 {
399   unsigned i;
400   tree val = NULL_TREE;
401 
402   for (i = 0; i < addr->n; i++)
403     {
404       if (!double_int_one_p (addr->elts[i].coef))
405 	continue;
406 
407       val = addr->elts[i].val;
408       if (TREE_CODE (val) == ADDR_EXPR
409 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
410 	break;
411     }
412 
413   if (i == addr->n)
414     return;
415 
416   parts->symbol = val;
417   aff_combination_remove_elt (addr, i);
418 }
419 
420 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base.  */
421 
422 static void
423 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
424 		   aff_tree *addr)
425 {
426   unsigned i;
427   tree val = NULL_TREE;
428   int qual;
429 
430   for (i = 0; i < addr->n; i++)
431     {
432       if (!double_int_one_p (addr->elts[i].coef))
433 	continue;
434 
435       val = addr->elts[i].val;
436       if (operand_equal_p (val, base_hint, 0))
437 	break;
438     }
439 
440   if (i == addr->n)
441     return;
442 
443   /* Cast value to appropriate pointer type.  We cannot use a pointer
444      to TYPE directly, as the back-end will assume registers of pointer
445      type are aligned, and just the base itself may not actually be.
446      We use void pointer to the type's address space instead.  */
447   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
448   type = build_qualified_type (void_type_node, qual);
449   parts->base = fold_convert (build_pointer_type (type), val);
450   aff_combination_remove_elt (addr, i);
451 }
452 
453 /* If ADDR contains an address of a dereferenced pointer, move it to
454    PARTS->base.  */
455 
456 static void
457 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
458 {
459   unsigned i;
460   tree val = NULL_TREE;
461 
462   for (i = 0; i < addr->n; i++)
463     {
464       if (!double_int_one_p (addr->elts[i].coef))
465 	continue;
466 
467       val = addr->elts[i].val;
468       if (POINTER_TYPE_P (TREE_TYPE (val)))
469 	break;
470     }
471 
472   if (i == addr->n)
473     return;
474 
475   parts->base = val;
476   aff_combination_remove_elt (addr, i);
477 }
478 
479 /* Moves the loop variant part V in linear address ADDR to be the index
480    of PARTS.  */
481 
482 static void
483 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
484 {
485   unsigned i;
486   tree val = NULL_TREE;
487 
488   gcc_assert (!parts->index);
489   for (i = 0; i < addr->n; i++)
490     {
491       val = addr->elts[i].val;
492       if (operand_equal_p (val, v, 0))
493 	break;
494     }
495 
496   if (i == addr->n)
497     return;
498 
499   parts->index = fold_convert (sizetype, val);
500   parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
501   aff_combination_remove_elt (addr, i);
502 }
503 
504 /* Adds ELT to PARTS.  */
505 
506 static void
507 add_to_parts (struct mem_address *parts, tree elt)
508 {
509   tree type;
510 
511   if (!parts->index)
512     {
513       parts->index = fold_convert (sizetype, elt);
514       return;
515     }
516 
517   if (!parts->base)
518     {
519       parts->base = elt;
520       return;
521     }
522 
523   /* Add ELT to base.  */
524   type = TREE_TYPE (parts->base);
525   if (POINTER_TYPE_P (type))
526     parts->base = fold_build_pointer_plus (parts->base, elt);
527   else
528     parts->base = fold_build2 (PLUS_EXPR, type,
529 			       parts->base, elt);
530 }
531 
532 /* Finds the most expensive multiplication in ADDR that can be
533    expressed in an addressing mode and move the corresponding
534    element(s) to PARTS.  */
535 
536 static void
537 most_expensive_mult_to_index (tree type, struct mem_address *parts,
538 			      aff_tree *addr, bool speed)
539 {
540   addr_space_t as = TYPE_ADDR_SPACE (type);
541   enum machine_mode address_mode = targetm.addr_space.address_mode (as);
542   HOST_WIDE_INT coef;
543   double_int best_mult, amult, amult_neg;
544   unsigned best_mult_cost = 0, acost;
545   tree mult_elt = NULL_TREE, elt;
546   unsigned i, j;
547   enum tree_code op_code;
548 
549   best_mult = double_int_zero;
550   for (i = 0; i < addr->n; i++)
551     {
552       if (!double_int_fits_in_shwi_p (addr->elts[i].coef))
553 	continue;
554 
555       coef = double_int_to_shwi (addr->elts[i].coef);
556       if (coef == 1
557 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
558 	continue;
559 
560       acost = multiply_by_cost (coef, address_mode, speed);
561 
562       if (acost > best_mult_cost)
563 	{
564 	  best_mult_cost = acost;
565 	  best_mult = addr->elts[i].coef;
566 	}
567     }
568 
569   if (!best_mult_cost)
570     return;
571 
572   /* Collect elements multiplied by best_mult.  */
573   for (i = j = 0; i < addr->n; i++)
574     {
575       amult = addr->elts[i].coef;
576       amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr);
577 
578       if (double_int_equal_p (amult, best_mult))
579 	op_code = PLUS_EXPR;
580       else if (double_int_equal_p (amult_neg, best_mult))
581 	op_code = MINUS_EXPR;
582       else
583 	{
584 	  addr->elts[j] = addr->elts[i];
585 	  j++;
586 	  continue;
587 	}
588 
589       elt = fold_convert (sizetype, addr->elts[i].val);
590       if (mult_elt)
591 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
592       else if (op_code == PLUS_EXPR)
593 	mult_elt = elt;
594       else
595 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
596     }
597   addr->n = j;
598 
599   parts->index = mult_elt;
600   parts->step = double_int_to_tree (sizetype, best_mult);
601 }
602 
603 /* Splits address ADDR for a memory access of type TYPE into PARTS.
604    If BASE_HINT is non-NULL, it specifies an SSA name to be used
605    preferentially as base of the reference, and IV_CAND is the selected
606    iv candidate used in ADDR.
607 
608    TODO -- be more clever about the distribution of the elements of ADDR
609    to PARTS.  Some architectures do not support anything but single
610    register in address, possibly with a small integer offset; while
611    create_mem_ref will simplify the address to an acceptable shape
612    later, it would be more efficient to know that asking for complicated
613    addressing modes is useless.  */
614 
615 static void
616 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
617 	       tree base_hint, struct mem_address *parts,
618                bool speed)
619 {
620   tree part;
621   unsigned i;
622 
623   parts->symbol = NULL_TREE;
624   parts->base = NULL_TREE;
625   parts->index = NULL_TREE;
626   parts->step = NULL_TREE;
627 
628   if (!double_int_zero_p (addr->offset))
629     parts->offset = double_int_to_tree (sizetype, addr->offset);
630   else
631     parts->offset = NULL_TREE;
632 
633   /* Try to find a symbol.  */
634   move_fixed_address_to_symbol (parts, addr);
635 
636   /* No need to do address parts reassociation if the number of parts
637      is <= 2 -- in that case, no loop invariant code motion can be
638      exposed.  */
639 
640   if (!base_hint && (addr->n > 2))
641     move_variant_to_index (parts, addr, iv_cand);
642 
643   /* First move the most expensive feasible multiplication
644      to index.  */
645   if (!parts->index)
646     most_expensive_mult_to_index (type, parts, addr, speed);
647 
648   /* Try to find a base of the reference.  Since at the moment
649      there is no reliable way how to distinguish between pointer and its
650      offset, this is just a guess.  */
651   if (!parts->symbol && base_hint)
652     move_hint_to_base (type, parts, base_hint, addr);
653   if (!parts->symbol && !parts->base)
654     move_pointer_to_base (parts, addr);
655 
656   /* Then try to process the remaining elements.  */
657   for (i = 0; i < addr->n; i++)
658     {
659       part = fold_convert (sizetype, addr->elts[i].val);
660       if (!double_int_one_p (addr->elts[i].coef))
661 	part = fold_build2 (MULT_EXPR, sizetype, part,
662 			    double_int_to_tree (sizetype, addr->elts[i].coef));
663       add_to_parts (parts, part);
664     }
665   if (addr->rest)
666     add_to_parts (parts, fold_convert (sizetype, addr->rest));
667 }
668 
669 /* Force the PARTS to register.  */
670 
671 static void
672 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
673 {
674   if (parts->base)
675     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
676 					    is_gimple_mem_ref_addr, NULL_TREE,
677 					    true, GSI_SAME_STMT);
678   if (parts->index)
679     parts->index = force_gimple_operand_gsi (gsi, parts->index,
680 					     true, NULL_TREE,
681 					     true, GSI_SAME_STMT);
682 }
683 
684 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
685    computations are emitted in front of GSI.  TYPE is the mode
686    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
687    and BASE_HINT is non NULL if IV_CAND comes from a base address
688    object.  */
689 
690 tree
691 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
692 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
693 {
694   tree mem_ref, tmp;
695   struct mem_address parts;
696 
697   addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
698   gimplify_mem_ref_parts (gsi, &parts);
699   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
700   if (mem_ref)
701     return mem_ref;
702 
703   /* The expression is too complicated.  Try making it simpler.  */
704 
705   if (parts.step && !integer_onep (parts.step))
706     {
707       /* Move the multiplication to index.  */
708       gcc_assert (parts.index);
709       parts.index = force_gimple_operand_gsi (gsi,
710 				fold_build2 (MULT_EXPR, sizetype,
711 					     parts.index, parts.step),
712 				true, NULL_TREE, true, GSI_SAME_STMT);
713       parts.step = NULL_TREE;
714 
715       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
716       if (mem_ref)
717 	return mem_ref;
718     }
719 
720   if (parts.symbol)
721     {
722       tmp = parts.symbol;
723       gcc_assert (is_gimple_val (tmp));
724 
725       /* Add the symbol to base, eventually forcing it to register.  */
726       if (parts.base)
727 	{
728 	  gcc_assert (useless_type_conversion_p
729 				(sizetype, TREE_TYPE (parts.base)));
730 
731 	  if (parts.index)
732 	    {
733 	      parts.base = force_gimple_operand_gsi_1 (gsi,
734 			fold_build_pointer_plus (tmp, parts.base),
735 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
736 	    }
737 	  else
738 	    {
739 	      parts.index = parts.base;
740 	      parts.base = tmp;
741 	    }
742 	}
743       else
744 	parts.base = tmp;
745       parts.symbol = NULL_TREE;
746 
747       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
748       if (mem_ref)
749 	return mem_ref;
750     }
751 
752   if (parts.index)
753     {
754       /* Add index to base.  */
755       if (parts.base)
756 	{
757 	  parts.base = force_gimple_operand_gsi_1 (gsi,
758 			fold_build_pointer_plus (parts.base, parts.index),
759 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
760 	}
761       else
762 	parts.base = parts.index;
763       parts.index = NULL_TREE;
764 
765       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
766       if (mem_ref)
767 	return mem_ref;
768     }
769 
770   if (parts.offset && !integer_zerop (parts.offset))
771     {
772       /* Try adding offset to base.  */
773       if (parts.base)
774 	{
775 	  parts.base = force_gimple_operand_gsi_1 (gsi,
776 			fold_build_pointer_plus (parts.base, parts.offset),
777 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
778 	}
779       else
780 	parts.base = parts.offset;
781 
782       parts.offset = NULL_TREE;
783 
784       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
785       if (mem_ref)
786 	return mem_ref;
787     }
788 
789   /* Verify that the address is in the simplest possible shape
790      (only a register).  If we cannot create such a memory reference,
791      something is really wrong.  */
792   gcc_assert (parts.symbol == NULL_TREE);
793   gcc_assert (parts.index == NULL_TREE);
794   gcc_assert (!parts.step || integer_onep (parts.step));
795   gcc_assert (!parts.offset || integer_zerop (parts.offset));
796   gcc_unreachable ();
797 }
798 
799 /* Copies components of the address from OP to ADDR.  */
800 
801 void
802 get_address_description (tree op, struct mem_address *addr)
803 {
804   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
805     {
806       addr->symbol = TMR_BASE (op);
807       addr->base = TMR_INDEX2 (op);
808     }
809   else
810     {
811       addr->symbol = NULL_TREE;
812       if (TMR_INDEX2 (op))
813 	{
814 	  gcc_assert (integer_zerop (TMR_BASE (op)));
815 	  addr->base = TMR_INDEX2 (op);
816 	}
817       else
818 	addr->base = TMR_BASE (op);
819     }
820   addr->index = TMR_INDEX (op);
821   addr->step = TMR_STEP (op);
822   addr->offset = TMR_OFFSET (op);
823 }
824 
825 /* Copies the additional information attached to target_mem_ref FROM to TO.  */
826 
827 void
828 copy_mem_ref_info (tree to, tree from)
829 {
830   /* And the info about the original reference.  */
831   TREE_SIDE_EFFECTS (to) = TREE_SIDE_EFFECTS (from);
832   TREE_THIS_VOLATILE (to) = TREE_THIS_VOLATILE (from);
833 }
834 
835 /* Copies the reference information from OLD_REF to NEW_REF, where
836    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
837 
838 void
839 copy_ref_info (tree new_ref, tree old_ref)
840 {
841   tree new_ptr_base = NULL_TREE;
842 
843   gcc_assert (TREE_CODE (new_ref) == MEM_REF
844 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
845 
846   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
847   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
848 
849   new_ptr_base = TREE_OPERAND (new_ref, 0);
850 
851   /* We can transfer points-to information from an old pointer
852      or decl base to the new one.  */
853   if (new_ptr_base
854       && TREE_CODE (new_ptr_base) == SSA_NAME
855       && !SSA_NAME_PTR_INFO (new_ptr_base))
856     {
857       tree base = get_base_address (old_ref);
858       if (!base)
859 	;
860       else if ((TREE_CODE (base) == MEM_REF
861 		|| TREE_CODE (base) == TARGET_MEM_REF)
862 	       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
863 	       && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
864 	{
865 	  struct ptr_info_def *new_pi;
866 	  duplicate_ssa_name_ptr_info
867 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
868 	  new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
869 	  /* We have to be careful about transfering alignment information.  */
870 	  if (TREE_CODE (old_ref) == MEM_REF
871 	      && !(TREE_CODE (new_ref) == TARGET_MEM_REF
872 		   && (TMR_INDEX2 (new_ref)
873 		       || (TMR_STEP (new_ref)
874 			   && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
875 			       < new_pi->align)))))
876 	    {
877 	      new_pi->misalign += double_int_sub (mem_ref_offset (old_ref),
878 						  mem_ref_offset (new_ref)).low;
879 	      new_pi->misalign &= (new_pi->align - 1);
880 	    }
881 	  else
882 	    {
883 	      new_pi->align = 1;
884 	      new_pi->misalign = 0;
885 	    }
886 	}
887       else if (TREE_CODE (base) == VAR_DECL
888 	       || TREE_CODE (base) == PARM_DECL
889 	       || TREE_CODE (base) == RESULT_DECL)
890 	{
891 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
892 	  pt_solution_set_var (&pi->pt, base);
893 	}
894     }
895 }
896 
897 /* Move constants in target_mem_ref REF to offset.  Returns the new target
898    mem ref if anything changes, NULL_TREE otherwise.  */
899 
900 tree
901 maybe_fold_tmr (tree ref)
902 {
903   struct mem_address addr;
904   bool changed = false;
905   tree ret, off;
906 
907   get_address_description (ref, &addr);
908 
909   if (addr.base
910       && TREE_CODE (addr.base) == INTEGER_CST
911       && !integer_zerop (addr.base))
912     {
913       addr.offset = fold_binary_to_constant (PLUS_EXPR,
914 					     TREE_TYPE (addr.offset),
915 					     addr.offset, addr.base);
916       addr.base = NULL_TREE;
917       changed = true;
918     }
919 
920   if (addr.symbol
921       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
922     {
923       addr.offset = fold_binary_to_constant
924 			(PLUS_EXPR, TREE_TYPE (addr.offset),
925 			 addr.offset,
926 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
927       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
928       changed = true;
929     }
930   else if (addr.symbol
931 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
932     {
933       HOST_WIDE_INT offset;
934       addr.symbol = build_fold_addr_expr
935 		      (get_addr_base_and_unit_offset
936 		         (TREE_OPERAND (addr.symbol, 0), &offset));
937       addr.offset = int_const_binop (PLUS_EXPR,
938 				     addr.offset, size_int (offset));
939       changed = true;
940     }
941 
942   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
943     {
944       off = addr.index;
945       if (addr.step)
946 	{
947 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
948 					 off, addr.step);
949 	  addr.step = NULL_TREE;
950 	}
951 
952       addr.offset = fold_binary_to_constant (PLUS_EXPR,
953 					     TREE_TYPE (addr.offset),
954 					     addr.offset, off);
955       addr.index = NULL_TREE;
956       changed = true;
957     }
958 
959   if (!changed)
960     return NULL_TREE;
961 
962   /* If we have propagated something into this TARGET_MEM_REF and thus
963      ended up folding it, always create a new TARGET_MEM_REF regardless
964      if it is valid in this for on the target - the propagation result
965      wouldn't be anyway.  */
966   ret = create_mem_ref_raw (TREE_TYPE (ref),
967 			    TREE_TYPE (addr.offset), &addr, false);
968   copy_mem_ref_info (ret, ref);
969   return ret;
970 }
971 
972 /* Dump PARTS to FILE.  */
973 
974 extern void dump_mem_address (FILE *, struct mem_address *);
975 void
976 dump_mem_address (FILE *file, struct mem_address *parts)
977 {
978   if (parts->symbol)
979     {
980       fprintf (file, "symbol: ");
981       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
982       fprintf (file, "\n");
983     }
984   if (parts->base)
985     {
986       fprintf (file, "base: ");
987       print_generic_expr (file, parts->base, TDF_SLIM);
988       fprintf (file, "\n");
989     }
990   if (parts->index)
991     {
992       fprintf (file, "index: ");
993       print_generic_expr (file, parts->index, TDF_SLIM);
994       fprintf (file, "\n");
995     }
996   if (parts->step)
997     {
998       fprintf (file, "step: ");
999       print_generic_expr (file, parts->step, TDF_SLIM);
1000       fprintf (file, "\n");
1001     }
1002   if (parts->offset)
1003     {
1004       fprintf (file, "offset: ");
1005       print_generic_expr (file, parts->offset, TDF_SLIM);
1006       fprintf (file, "\n");
1007     }
1008 }
1009 
1010 #include "gt-tree-ssa-address.h"
1011