1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21    that directly map to addressing modes of the target.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "tree-pretty-print.h"
31 #include "tree-flow.h"
32 #include "dumpfile.h"
33 #include "flags.h"
34 #include "tree-inline.h"
35 #include "tree-affine.h"
36 
37 /* FIXME: We compute address costs using RTL.  */
38 #include "insn-config.h"
39 #include "rtl.h"
40 #include "recog.h"
41 #include "expr.h"
42 #include "ggc.h"
43 #include "target.h"
44 #include "expmed.h"
45 
46 /* TODO -- handling of symbols (according to Richard Hendersons
47    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
48 
49    There are at least 5 different kinds of symbols that we can run up against:
50 
51      (1) binds_local_p, small data area.
52      (2) binds_local_p, eg local statics
53      (3) !binds_local_p, eg global variables
54      (4) thread local, local_exec
55      (5) thread local, !local_exec
56 
57    Now, (1) won't appear often in an array context, but it certainly can.
58    All you have to do is set -GN high enough, or explicitly mark any
59    random object __attribute__((section (".sdata"))).
60 
61    All of these affect whether or not a symbol is in fact a valid address.
62    The only one tested here is (3).  And that result may very well
63    be incorrect for (4) or (5).
64 
65    An incorrect result here does not cause incorrect results out the
66    back end, because the expander in expr.c validizes the address.  However
67    it would be nice to improve the handling here in order to produce more
68    precise results.  */
69 
70 /* A "template" for memory address, used to determine whether the address is
71    valid for mode.  */
72 
73 typedef struct GTY (()) mem_addr_template {
74   rtx ref;			/* The template.  */
75   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
76 				   filled in.  */
77   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
78 				   be filled in.  */
79 } mem_addr_template;
80 
81 
82 /* The templates.  Each of the low five bits of the index corresponds to one
83    component of TARGET_MEM_REF being present, while the high bits identify
84    the address space.  See TEMPL_IDX.  */
85 
86 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
87 
88 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
89   (((int) (AS) << 5) \
90    | ((SYMBOL != 0) << 4) \
91    | ((BASE != 0) << 3) \
92    | ((INDEX != 0) << 2) \
93    | ((STEP != 0) << 1) \
94    | (OFFSET != 0))
95 
96 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
97    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
98    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
99 
100 static void
gen_addr_rtx(enum machine_mode address_mode,rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)101 gen_addr_rtx (enum machine_mode address_mode,
102 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
103 	      rtx *addr, rtx **step_p, rtx **offset_p)
104 {
105   rtx act_elem;
106 
107   *addr = NULL_RTX;
108   if (step_p)
109     *step_p = NULL;
110   if (offset_p)
111     *offset_p = NULL;
112 
113   if (index)
114     {
115       act_elem = index;
116       if (step)
117 	{
118 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
119 
120 	  if (step_p)
121 	    *step_p = &XEXP (act_elem, 1);
122 	}
123 
124       *addr = act_elem;
125     }
126 
127   if (base && base != const0_rtx)
128     {
129       if (*addr)
130 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
131       else
132 	*addr = base;
133     }
134 
135   if (symbol)
136     {
137       act_elem = symbol;
138       if (offset)
139 	{
140 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
141 
142 	  if (offset_p)
143 	    *offset_p = &XEXP (act_elem, 1);
144 
145 	  if (GET_CODE (symbol) == SYMBOL_REF
146 	      || GET_CODE (symbol) == LABEL_REF
147 	      || GET_CODE (symbol) == CONST)
148 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
149 	}
150 
151       if (*addr)
152 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
153       else
154 	*addr = act_elem;
155     }
156   else if (offset)
157     {
158       if (*addr)
159 	{
160 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
161 	  if (offset_p)
162 	    *offset_p = &XEXP (*addr, 1);
163 	}
164       else
165 	{
166 	  *addr = offset;
167 	  if (offset_p)
168 	    *offset_p = addr;
169 	}
170     }
171 
172   if (!*addr)
173     *addr = const0_rtx;
174 }
175 
176 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
177    in address space AS.
178    If REALLY_EXPAND is false, just make fake registers instead
179    of really expanding the operands, and perform the expansion in-place
180    by using one of the "templates".  */
181 
182 rtx
addr_for_mem_ref(struct mem_address * addr,addr_space_t as,bool really_expand)183 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
184 		  bool really_expand)
185 {
186   enum machine_mode address_mode = targetm.addr_space.address_mode (as);
187   enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
188   rtx address, sym, bse, idx, st, off;
189   struct mem_addr_template *templ;
190 
191   if (addr->step && !integer_onep (addr->step))
192     st = immed_double_int_const (tree_to_double_int (addr->step), pointer_mode);
193   else
194     st = NULL_RTX;
195 
196   if (addr->offset && !integer_zerop (addr->offset))
197     off = immed_double_int_const
198 	    (tree_to_double_int (addr->offset)
199 	     .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
200 	     pointer_mode);
201   else
202     off = NULL_RTX;
203 
204   if (!really_expand)
205     {
206       unsigned int templ_index
207 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
208 
209       if (templ_index >= vec_safe_length (mem_addr_template_list))
210 	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
211 
212       /* Reuse the templates for addresses, so that we do not waste memory.  */
213       templ = &(*mem_addr_template_list)[templ_index];
214       if (!templ->ref)
215 	{
216 	  sym = (addr->symbol ?
217 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
218 		 : NULL_RTX);
219 	  bse = (addr->base ?
220 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
221 		 : NULL_RTX);
222 	  idx = (addr->index ?
223 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
224 		 : NULL_RTX);
225 
226 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
227 			st? const0_rtx : NULL_RTX,
228 			off? const0_rtx : NULL_RTX,
229 			&templ->ref,
230 			&templ->step_p,
231 			&templ->off_p);
232 	}
233 
234       if (st)
235 	*templ->step_p = st;
236       if (off)
237 	*templ->off_p = off;
238 
239       return templ->ref;
240     }
241 
242   /* Otherwise really expand the expressions.  */
243   sym = (addr->symbol
244 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
245 	 : NULL_RTX);
246   bse = (addr->base
247 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
248 	 : NULL_RTX);
249   idx = (addr->index
250 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
251 	 : NULL_RTX);
252 
253   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
254   if (pointer_mode != address_mode)
255     address = convert_memory_address (address_mode, address);
256   return address;
257 }
258 
259 /* Returns address of MEM_REF in TYPE.  */
260 
261 tree
tree_mem_ref_addr(tree type,tree mem_ref)262 tree_mem_ref_addr (tree type, tree mem_ref)
263 {
264   tree addr;
265   tree act_elem;
266   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
267   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
268 
269   addr_base = fold_convert (type, TMR_BASE (mem_ref));
270 
271   act_elem = TMR_INDEX (mem_ref);
272   if (act_elem)
273     {
274       if (step)
275 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
276 				act_elem, step);
277       addr_off = act_elem;
278     }
279 
280   act_elem = TMR_INDEX2 (mem_ref);
281   if (act_elem)
282     {
283       if (addr_off)
284 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
285 				addr_off, act_elem);
286       else
287 	addr_off = act_elem;
288     }
289 
290   if (offset && !integer_zerop (offset))
291     {
292       if (addr_off)
293 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
294 				fold_convert (TREE_TYPE (addr_off), offset));
295       else
296 	addr_off = offset;
297     }
298 
299   if (addr_off)
300     addr = fold_build_pointer_plus (addr_base, addr_off);
301   else
302     addr = addr_base;
303 
304   return addr;
305 }
306 
307 /* Returns true if a memory reference in MODE and with parameters given by
308    ADDR is valid on the current target.  */
309 
310 static bool
valid_mem_ref_p(enum machine_mode mode,addr_space_t as,struct mem_address * addr)311 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
312 		 struct mem_address *addr)
313 {
314   rtx address;
315 
316   address = addr_for_mem_ref (addr, as, false);
317   if (!address)
318     return false;
319 
320   return memory_address_addr_space_p (mode, address, as);
321 }
322 
323 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
324    is valid on the current target and if so, creates and returns the
325    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
326 
327 static tree
create_mem_ref_raw(tree type,tree alias_ptr_type,struct mem_address * addr,bool verify)328 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
329 		    bool verify)
330 {
331   tree base, index2;
332 
333   if (verify
334       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
335     return NULL_TREE;
336 
337   if (addr->step && integer_onep (addr->step))
338     addr->step = NULL_TREE;
339 
340   if (addr->offset)
341     addr->offset = fold_convert (alias_ptr_type, addr->offset);
342   else
343     addr->offset = build_int_cst (alias_ptr_type, 0);
344 
345   if (addr->symbol)
346     {
347       base = addr->symbol;
348       index2 = addr->base;
349     }
350   else if (addr->base
351 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
352     {
353       base = addr->base;
354       index2 = NULL_TREE;
355     }
356   else
357     {
358       base = build_int_cst (ptr_type_node, 0);
359       index2 = addr->base;
360     }
361 
362   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
363      ???  As IVOPTs does not follow restrictions to where the base
364      pointer may point to create a MEM_REF only if we know that
365      base is valid.  */
366   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
367       && (!index2 || integer_zerop (index2))
368       && (!addr->index || integer_zerop (addr->index)))
369     return fold_build2 (MEM_REF, type, base, addr->offset);
370 
371   return build5 (TARGET_MEM_REF, type,
372 		 base, addr->offset, addr->index, addr->step, index2);
373 }
374 
375 /* Returns true if OBJ is an object whose address is a link time constant.  */
376 
377 static bool
fixed_address_object_p(tree obj)378 fixed_address_object_p (tree obj)
379 {
380   return (TREE_CODE (obj) == VAR_DECL
381 	  && (TREE_STATIC (obj)
382 	      || DECL_EXTERNAL (obj))
383 	  && ! DECL_DLLIMPORT_P (obj));
384 }
385 
386 /* If ADDR contains an address of object that is a link time constant,
387    move it to PARTS->symbol.  */
388 
389 static void
move_fixed_address_to_symbol(struct mem_address * parts,aff_tree * addr)390 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
391 {
392   unsigned i;
393   tree val = NULL_TREE;
394 
395   for (i = 0; i < addr->n; i++)
396     {
397       if (!addr->elts[i].coef.is_one ())
398 	continue;
399 
400       val = addr->elts[i].val;
401       if (TREE_CODE (val) == ADDR_EXPR
402 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
403 	break;
404     }
405 
406   if (i == addr->n)
407     return;
408 
409   parts->symbol = val;
410   aff_combination_remove_elt (addr, i);
411 }
412 
413 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base.  */
414 
415 static void
move_hint_to_base(tree type,struct mem_address * parts,tree base_hint,aff_tree * addr)416 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
417 		   aff_tree *addr)
418 {
419   unsigned i;
420   tree val = NULL_TREE;
421   int qual;
422 
423   for (i = 0; i < addr->n; i++)
424     {
425       if (!addr->elts[i].coef.is_one ())
426 	continue;
427 
428       val = addr->elts[i].val;
429       if (operand_equal_p (val, base_hint, 0))
430 	break;
431     }
432 
433   if (i == addr->n)
434     return;
435 
436   /* Cast value to appropriate pointer type.  We cannot use a pointer
437      to TYPE directly, as the back-end will assume registers of pointer
438      type are aligned, and just the base itself may not actually be.
439      We use void pointer to the type's address space instead.  */
440   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
441   type = build_qualified_type (void_type_node, qual);
442   parts->base = fold_convert (build_pointer_type (type), val);
443   aff_combination_remove_elt (addr, i);
444 }
445 
446 /* If ADDR contains an address of a dereferenced pointer, move it to
447    PARTS->base.  */
448 
449 static void
move_pointer_to_base(struct mem_address * parts,aff_tree * addr)450 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
451 {
452   unsigned i;
453   tree val = NULL_TREE;
454 
455   for (i = 0; i < addr->n; i++)
456     {
457       if (!addr->elts[i].coef.is_one ())
458 	continue;
459 
460       val = addr->elts[i].val;
461       if (POINTER_TYPE_P (TREE_TYPE (val)))
462 	break;
463     }
464 
465   if (i == addr->n)
466     return;
467 
468   parts->base = val;
469   aff_combination_remove_elt (addr, i);
470 }
471 
472 /* Moves the loop variant part V in linear address ADDR to be the index
473    of PARTS.  */
474 
475 static void
move_variant_to_index(struct mem_address * parts,aff_tree * addr,tree v)476 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
477 {
478   unsigned i;
479   tree val = NULL_TREE;
480 
481   gcc_assert (!parts->index);
482   for (i = 0; i < addr->n; i++)
483     {
484       val = addr->elts[i].val;
485       if (operand_equal_p (val, v, 0))
486 	break;
487     }
488 
489   if (i == addr->n)
490     return;
491 
492   parts->index = fold_convert (sizetype, val);
493   parts->step = double_int_to_tree (sizetype, addr->elts[i].coef);
494   aff_combination_remove_elt (addr, i);
495 }
496 
497 /* Adds ELT to PARTS.  */
498 
499 static void
add_to_parts(struct mem_address * parts,tree elt)500 add_to_parts (struct mem_address *parts, tree elt)
501 {
502   tree type;
503 
504   if (!parts->index)
505     {
506       parts->index = fold_convert (sizetype, elt);
507       return;
508     }
509 
510   if (!parts->base)
511     {
512       parts->base = elt;
513       return;
514     }
515 
516   /* Add ELT to base.  */
517   type = TREE_TYPE (parts->base);
518   if (POINTER_TYPE_P (type))
519     parts->base = fold_build_pointer_plus (parts->base, elt);
520   else
521     parts->base = fold_build2 (PLUS_EXPR, type,
522 			       parts->base, elt);
523 }
524 
525 /* Finds the most expensive multiplication in ADDR that can be
526    expressed in an addressing mode and move the corresponding
527    element(s) to PARTS.  */
528 
529 static void
most_expensive_mult_to_index(tree type,struct mem_address * parts,aff_tree * addr,bool speed)530 most_expensive_mult_to_index (tree type, struct mem_address *parts,
531 			      aff_tree *addr, bool speed)
532 {
533   addr_space_t as = TYPE_ADDR_SPACE (type);
534   enum machine_mode address_mode = targetm.addr_space.address_mode (as);
535   HOST_WIDE_INT coef;
536   double_int best_mult, amult, amult_neg;
537   unsigned best_mult_cost = 0, acost;
538   tree mult_elt = NULL_TREE, elt;
539   unsigned i, j;
540   enum tree_code op_code;
541 
542   best_mult = double_int_zero;
543   for (i = 0; i < addr->n; i++)
544     {
545       if (!addr->elts[i].coef.fits_shwi ())
546 	continue;
547 
548       coef = addr->elts[i].coef.to_shwi ();
549       if (coef == 1
550 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
551 	continue;
552 
553       acost = mult_by_coeff_cost (coef, address_mode, speed);
554 
555       if (acost > best_mult_cost)
556 	{
557 	  best_mult_cost = acost;
558 	  best_mult = addr->elts[i].coef;
559 	}
560     }
561 
562   if (!best_mult_cost)
563     return;
564 
565   /* Collect elements multiplied by best_mult.  */
566   for (i = j = 0; i < addr->n; i++)
567     {
568       amult = addr->elts[i].coef;
569       amult_neg = double_int_ext_for_comb (-amult, addr);
570 
571       if (amult == best_mult)
572 	op_code = PLUS_EXPR;
573       else if (amult_neg == best_mult)
574 	op_code = MINUS_EXPR;
575       else
576 	{
577 	  addr->elts[j] = addr->elts[i];
578 	  j++;
579 	  continue;
580 	}
581 
582       elt = fold_convert (sizetype, addr->elts[i].val);
583       if (mult_elt)
584 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
585       else if (op_code == PLUS_EXPR)
586 	mult_elt = elt;
587       else
588 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
589     }
590   addr->n = j;
591 
592   parts->index = mult_elt;
593   parts->step = double_int_to_tree (sizetype, best_mult);
594 }
595 
596 /* Splits address ADDR for a memory access of type TYPE into PARTS.
597    If BASE_HINT is non-NULL, it specifies an SSA name to be used
598    preferentially as base of the reference, and IV_CAND is the selected
599    iv candidate used in ADDR.
600 
601    TODO -- be more clever about the distribution of the elements of ADDR
602    to PARTS.  Some architectures do not support anything but single
603    register in address, possibly with a small integer offset; while
604    create_mem_ref will simplify the address to an acceptable shape
605    later, it would be more efficient to know that asking for complicated
606    addressing modes is useless.  */
607 
608 static void
addr_to_parts(tree type,aff_tree * addr,tree iv_cand,tree base_hint,struct mem_address * parts,bool speed)609 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
610 	       tree base_hint, struct mem_address *parts,
611                bool speed)
612 {
613   tree part;
614   unsigned i;
615 
616   parts->symbol = NULL_TREE;
617   parts->base = NULL_TREE;
618   parts->index = NULL_TREE;
619   parts->step = NULL_TREE;
620 
621   if (!addr->offset.is_zero ())
622     parts->offset = double_int_to_tree (sizetype, addr->offset);
623   else
624     parts->offset = NULL_TREE;
625 
626   /* Try to find a symbol.  */
627   move_fixed_address_to_symbol (parts, addr);
628 
629   /* No need to do address parts reassociation if the number of parts
630      is <= 2 -- in that case, no loop invariant code motion can be
631      exposed.  */
632 
633   if (!base_hint && (addr->n > 2))
634     move_variant_to_index (parts, addr, iv_cand);
635 
636   /* First move the most expensive feasible multiplication
637      to index.  */
638   if (!parts->index)
639     most_expensive_mult_to_index (type, parts, addr, speed);
640 
641   /* Try to find a base of the reference.  Since at the moment
642      there is no reliable way how to distinguish between pointer and its
643      offset, this is just a guess.  */
644   if (!parts->symbol && base_hint)
645     move_hint_to_base (type, parts, base_hint, addr);
646   if (!parts->symbol && !parts->base)
647     move_pointer_to_base (parts, addr);
648 
649   /* Then try to process the remaining elements.  */
650   for (i = 0; i < addr->n; i++)
651     {
652       part = fold_convert (sizetype, addr->elts[i].val);
653       if (!addr->elts[i].coef.is_one ())
654 	part = fold_build2 (MULT_EXPR, sizetype, part,
655 			    double_int_to_tree (sizetype, addr->elts[i].coef));
656       add_to_parts (parts, part);
657     }
658   if (addr->rest)
659     add_to_parts (parts, fold_convert (sizetype, addr->rest));
660 }
661 
662 /* Force the PARTS to register.  */
663 
664 static void
gimplify_mem_ref_parts(gimple_stmt_iterator * gsi,struct mem_address * parts)665 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
666 {
667   if (parts->base)
668     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
669 					    is_gimple_mem_ref_addr, NULL_TREE,
670 					    true, GSI_SAME_STMT);
671   if (parts->index)
672     parts->index = force_gimple_operand_gsi (gsi, parts->index,
673 					     true, NULL_TREE,
674 					     true, GSI_SAME_STMT);
675 }
676 
677 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
678    computations are emitted in front of GSI.  TYPE is the mode
679    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
680    and BASE_HINT is non NULL if IV_CAND comes from a base address
681    object.  */
682 
683 tree
create_mem_ref(gimple_stmt_iterator * gsi,tree type,aff_tree * addr,tree alias_ptr_type,tree iv_cand,tree base_hint,bool speed)684 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
685 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
686 {
687   tree mem_ref, tmp;
688   struct mem_address parts;
689 
690   addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
691   gimplify_mem_ref_parts (gsi, &parts);
692   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
693   if (mem_ref)
694     return mem_ref;
695 
696   /* The expression is too complicated.  Try making it simpler.  */
697 
698   if (parts.step && !integer_onep (parts.step))
699     {
700       /* Move the multiplication to index.  */
701       gcc_assert (parts.index);
702       parts.index = force_gimple_operand_gsi (gsi,
703 				fold_build2 (MULT_EXPR, sizetype,
704 					     parts.index, parts.step),
705 				true, NULL_TREE, true, GSI_SAME_STMT);
706       parts.step = NULL_TREE;
707 
708       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
709       if (mem_ref)
710 	return mem_ref;
711     }
712 
713   if (parts.symbol)
714     {
715       tmp = parts.symbol;
716       gcc_assert (is_gimple_val (tmp));
717 
718       /* Add the symbol to base, eventually forcing it to register.  */
719       if (parts.base)
720 	{
721 	  gcc_assert (useless_type_conversion_p
722 				(sizetype, TREE_TYPE (parts.base)));
723 
724 	  if (parts.index)
725 	    {
726 	      parts.base = force_gimple_operand_gsi_1 (gsi,
727 			fold_build_pointer_plus (tmp, parts.base),
728 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
729 	    }
730 	  else
731 	    {
732 	      parts.index = parts.base;
733 	      parts.base = tmp;
734 	    }
735 	}
736       else
737 	parts.base = tmp;
738       parts.symbol = NULL_TREE;
739 
740       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
741       if (mem_ref)
742 	return mem_ref;
743     }
744 
745   if (parts.index)
746     {
747       /* Add index to base.  */
748       if (parts.base)
749 	{
750 	  parts.base = force_gimple_operand_gsi_1 (gsi,
751 			fold_build_pointer_plus (parts.base, parts.index),
752 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
753 	}
754       else
755 	parts.base = parts.index;
756       parts.index = NULL_TREE;
757 
758       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
759       if (mem_ref)
760 	return mem_ref;
761     }
762 
763   if (parts.offset && !integer_zerop (parts.offset))
764     {
765       /* Try adding offset to base.  */
766       if (parts.base)
767 	{
768 	  parts.base = force_gimple_operand_gsi_1 (gsi,
769 			fold_build_pointer_plus (parts.base, parts.offset),
770 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
771 	}
772       else
773 	parts.base = parts.offset;
774 
775       parts.offset = NULL_TREE;
776 
777       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
778       if (mem_ref)
779 	return mem_ref;
780     }
781 
782   /* Verify that the address is in the simplest possible shape
783      (only a register).  If we cannot create such a memory reference,
784      something is really wrong.  */
785   gcc_assert (parts.symbol == NULL_TREE);
786   gcc_assert (parts.index == NULL_TREE);
787   gcc_assert (!parts.step || integer_onep (parts.step));
788   gcc_assert (!parts.offset || integer_zerop (parts.offset));
789   gcc_unreachable ();
790 }
791 
792 /* Copies components of the address from OP to ADDR.  */
793 
794 void
get_address_description(tree op,struct mem_address * addr)795 get_address_description (tree op, struct mem_address *addr)
796 {
797   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
798     {
799       addr->symbol = TMR_BASE (op);
800       addr->base = TMR_INDEX2 (op);
801     }
802   else
803     {
804       addr->symbol = NULL_TREE;
805       if (TMR_INDEX2 (op))
806 	{
807 	  gcc_assert (integer_zerop (TMR_BASE (op)));
808 	  addr->base = TMR_INDEX2 (op);
809 	}
810       else
811 	addr->base = TMR_BASE (op);
812     }
813   addr->index = TMR_INDEX (op);
814   addr->step = TMR_STEP (op);
815   addr->offset = TMR_OFFSET (op);
816 }
817 
818 /* Copies the reference information from OLD_REF to NEW_REF, where
819    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
820 
821 void
copy_ref_info(tree new_ref,tree old_ref)822 copy_ref_info (tree new_ref, tree old_ref)
823 {
824   tree new_ptr_base = NULL_TREE;
825 
826   gcc_assert (TREE_CODE (new_ref) == MEM_REF
827 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
828 
829   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
830   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
831 
832   new_ptr_base = TREE_OPERAND (new_ref, 0);
833 
834   /* We can transfer points-to information from an old pointer
835      or decl base to the new one.  */
836   if (new_ptr_base
837       && TREE_CODE (new_ptr_base) == SSA_NAME
838       && !SSA_NAME_PTR_INFO (new_ptr_base))
839     {
840       tree base = get_base_address (old_ref);
841       if (!base)
842 	;
843       else if ((TREE_CODE (base) == MEM_REF
844 		|| TREE_CODE (base) == TARGET_MEM_REF)
845 	       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
846 	       && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
847 	{
848 	  struct ptr_info_def *new_pi;
849 	  unsigned int align, misalign;
850 
851 	  duplicate_ssa_name_ptr_info
852 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
853 	  new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
854 	  /* We have to be careful about transferring alignment information.  */
855 	  if (get_ptr_info_alignment (new_pi, &align, &misalign)
856 	      && TREE_CODE (old_ref) == MEM_REF
857 	      && !(TREE_CODE (new_ref) == TARGET_MEM_REF
858 		   && (TMR_INDEX2 (new_ref)
859 		       || (TMR_STEP (new_ref)
860 			   && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
861 			       < align)))))
862 	    {
863 	      unsigned int inc = (mem_ref_offset (old_ref)
864 				  - mem_ref_offset (new_ref)).low;
865 	      adjust_ptr_info_misalignment (new_pi, inc);
866 	    }
867 	  else
868 	    mark_ptr_info_alignment_unknown (new_pi);
869 	}
870       else if (TREE_CODE (base) == VAR_DECL
871 	       || TREE_CODE (base) == PARM_DECL
872 	       || TREE_CODE (base) == RESULT_DECL)
873 	{
874 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
875 	  pt_solution_set_var (&pi->pt, base);
876 	}
877     }
878 }
879 
880 /* Move constants in target_mem_ref REF to offset.  Returns the new target
881    mem ref if anything changes, NULL_TREE otherwise.  */
882 
883 tree
maybe_fold_tmr(tree ref)884 maybe_fold_tmr (tree ref)
885 {
886   struct mem_address addr;
887   bool changed = false;
888   tree new_ref, off;
889 
890   get_address_description (ref, &addr);
891 
892   if (addr.base
893       && TREE_CODE (addr.base) == INTEGER_CST
894       && !integer_zerop (addr.base))
895     {
896       addr.offset = fold_binary_to_constant (PLUS_EXPR,
897 					     TREE_TYPE (addr.offset),
898 					     addr.offset, addr.base);
899       addr.base = NULL_TREE;
900       changed = true;
901     }
902 
903   if (addr.symbol
904       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
905     {
906       addr.offset = fold_binary_to_constant
907 			(PLUS_EXPR, TREE_TYPE (addr.offset),
908 			 addr.offset,
909 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
910       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
911       changed = true;
912     }
913   else if (addr.symbol
914 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
915     {
916       HOST_WIDE_INT offset;
917       addr.symbol = build_fold_addr_expr
918 		      (get_addr_base_and_unit_offset
919 		         (TREE_OPERAND (addr.symbol, 0), &offset));
920       addr.offset = int_const_binop (PLUS_EXPR,
921 				     addr.offset, size_int (offset));
922       changed = true;
923     }
924 
925   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
926     {
927       off = addr.index;
928       if (addr.step)
929 	{
930 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
931 					 off, addr.step);
932 	  addr.step = NULL_TREE;
933 	}
934 
935       addr.offset = fold_binary_to_constant (PLUS_EXPR,
936 					     TREE_TYPE (addr.offset),
937 					     addr.offset, off);
938       addr.index = NULL_TREE;
939       changed = true;
940     }
941 
942   if (!changed)
943     return NULL_TREE;
944 
945   /* If we have propagated something into this TARGET_MEM_REF and thus
946      ended up folding it, always create a new TARGET_MEM_REF regardless
947      if it is valid in this for on the target - the propagation result
948      wouldn't be anyway.  */
949   new_ref = create_mem_ref_raw (TREE_TYPE (ref),
950 			        TREE_TYPE (addr.offset), &addr, false);
951   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
952   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
953   return new_ref;
954 }
955 
956 /* Dump PARTS to FILE.  */
957 
958 extern void dump_mem_address (FILE *, struct mem_address *);
959 void
dump_mem_address(FILE * file,struct mem_address * parts)960 dump_mem_address (FILE *file, struct mem_address *parts)
961 {
962   if (parts->symbol)
963     {
964       fprintf (file, "symbol: ");
965       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
966       fprintf (file, "\n");
967     }
968   if (parts->base)
969     {
970       fprintf (file, "base: ");
971       print_generic_expr (file, parts->base, TDF_SLIM);
972       fprintf (file, "\n");
973     }
974   if (parts->index)
975     {
976       fprintf (file, "index: ");
977       print_generic_expr (file, parts->index, TDF_SLIM);
978       fprintf (file, "\n");
979     }
980   if (parts->step)
981     {
982       fprintf (file, "step: ");
983       print_generic_expr (file, parts->step, TDF_SLIM);
984       fprintf (file, "\n");
985     }
986   if (parts->offset)
987     {
988       fprintf (file, "offset: ");
989       print_generic_expr (file, parts->offset, TDF_SLIM);
990       fprintf (file, "\n");
991     }
992 }
993 
994 #include "gt-tree-ssa-address.h"
995