1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21    that directly map to addressing modes of the target.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "stringpool.h"
32 #include "tree-ssanames.h"
33 #include "expmed.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "tree-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "tree-ssa-loop-ivopts.h"
42 #include "expr.h"
43 #include "tree-dfa.h"
44 #include "dumpfile.h"
45 #include "tree-affine.h"
46 
47 /* FIXME: We compute address costs using RTL.  */
48 #include "tree-ssa-address.h"
49 
50 /* TODO -- handling of symbols (according to Richard Hendersons
51    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
52 
53    There are at least 5 different kinds of symbols that we can run up against:
54 
55      (1) binds_local_p, small data area.
56      (2) binds_local_p, eg local statics
57      (3) !binds_local_p, eg global variables
58      (4) thread local, local_exec
59      (5) thread local, !local_exec
60 
61    Now, (1) won't appear often in an array context, but it certainly can.
62    All you have to do is set -GN high enough, or explicitly mark any
63    random object __attribute__((section (".sdata"))).
64 
65    All of these affect whether or not a symbol is in fact a valid address.
66    The only one tested here is (3).  And that result may very well
67    be incorrect for (4) or (5).
68 
69    An incorrect result here does not cause incorrect results out the
70    back end, because the expander in expr.c validizes the address.  However
71    it would be nice to improve the handling here in order to produce more
72    precise results.  */
73 
74 /* A "template" for memory address, used to determine whether the address is
75    valid for mode.  */
76 
77 struct GTY (()) mem_addr_template {
78   rtx ref;			/* The template.  */
79   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
80 				   filled in.  */
81   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
82 				   be filled in.  */
83 };
84 
85 
86 /* The templates.  Each of the low five bits of the index corresponds to one
87    component of TARGET_MEM_REF being present, while the high bits identify
88    the address space.  See TEMPL_IDX.  */
89 
90 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
91 
92 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93   (((int) (AS) << 5) \
94    | ((SYMBOL != 0) << 4) \
95    | ((BASE != 0) << 3) \
96    | ((INDEX != 0) << 2) \
97    | ((STEP != 0) << 1) \
98    | (OFFSET != 0))
99 
100 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
101    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
102    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
103 
104 static void
gen_addr_rtx(machine_mode address_mode,rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)105 gen_addr_rtx (machine_mode address_mode,
106 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
107 	      rtx *addr, rtx **step_p, rtx **offset_p)
108 {
109   rtx act_elem;
110 
111   *addr = NULL_RTX;
112   if (step_p)
113     *step_p = NULL;
114   if (offset_p)
115     *offset_p = NULL;
116 
117   if (index)
118     {
119       act_elem = index;
120       if (step)
121 	{
122 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
123 
124 	  if (step_p)
125 	    *step_p = &XEXP (act_elem, 1);
126 	}
127 
128       *addr = act_elem;
129     }
130 
131   if (base && base != const0_rtx)
132     {
133       if (*addr)
134 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
135       else
136 	*addr = base;
137     }
138 
139   if (symbol)
140     {
141       act_elem = symbol;
142       if (offset)
143 	{
144 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
145 
146 	  if (offset_p)
147 	    *offset_p = &XEXP (act_elem, 1);
148 
149 	  if (GET_CODE (symbol) == SYMBOL_REF
150 	      || GET_CODE (symbol) == LABEL_REF
151 	      || GET_CODE (symbol) == CONST)
152 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
153 	}
154 
155       if (*addr)
156 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
157       else
158 	*addr = act_elem;
159     }
160   else if (offset)
161     {
162       if (*addr)
163 	{
164 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
165 	  if (offset_p)
166 	    *offset_p = &XEXP (*addr, 1);
167 	}
168       else
169 	{
170 	  *addr = offset;
171 	  if (offset_p)
172 	    *offset_p = addr;
173 	}
174     }
175 
176   if (!*addr)
177     *addr = const0_rtx;
178 }
179 
180 /* Description of a memory address.  */
181 
182 struct mem_address
183 {
184   tree symbol, base, index, step, offset;
185 };
186 
187 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
188    in address space AS.
189    If REALLY_EXPAND is false, just make fake registers instead
190    of really expanding the operands, and perform the expansion in-place
191    by using one of the "templates".  */
192 
193 rtx
addr_for_mem_ref(struct mem_address * addr,addr_space_t as,bool really_expand)194 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
195 		  bool really_expand)
196 {
197   machine_mode address_mode = targetm.addr_space.address_mode (as);
198   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
199   rtx address, sym, bse, idx, st, off;
200   struct mem_addr_template *templ;
201 
202   if (addr->step && !integer_onep (addr->step))
203     st = immed_wide_int_const (addr->step, pointer_mode);
204   else
205     st = NULL_RTX;
206 
207   if (addr->offset && !integer_zerop (addr->offset))
208     {
209       offset_int dc = offset_int::from (addr->offset, SIGNED);
210       off = immed_wide_int_const (dc, pointer_mode);
211     }
212   else
213     off = NULL_RTX;
214 
215   if (!really_expand)
216     {
217       unsigned int templ_index
218 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
219 
220       if (templ_index >= vec_safe_length (mem_addr_template_list))
221 	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
222 
223       /* Reuse the templates for addresses, so that we do not waste memory.  */
224       templ = &(*mem_addr_template_list)[templ_index];
225       if (!templ->ref)
226 	{
227 	  sym = (addr->symbol ?
228 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
229 		 : NULL_RTX);
230 	  bse = (addr->base ?
231 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
232 		 : NULL_RTX);
233 	  idx = (addr->index ?
234 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
235 		 : NULL_RTX);
236 
237 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
238 			st? const0_rtx : NULL_RTX,
239 			off? const0_rtx : NULL_RTX,
240 			&templ->ref,
241 			&templ->step_p,
242 			&templ->off_p);
243 	}
244 
245       if (st)
246 	*templ->step_p = st;
247       if (off)
248 	*templ->off_p = off;
249 
250       return templ->ref;
251     }
252 
253   /* Otherwise really expand the expressions.  */
254   sym = (addr->symbol
255 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
256 	 : NULL_RTX);
257   bse = (addr->base
258 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
259 	 : NULL_RTX);
260   idx = (addr->index
261 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
262 	 : NULL_RTX);
263 
264   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
265   if (pointer_mode != address_mode)
266     address = convert_memory_address (address_mode, address);
267   return address;
268 }
269 
270 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
271    the mem_address structure.  */
272 
273 rtx
addr_for_mem_ref(tree exp,addr_space_t as,bool really_expand)274 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
275 {
276   struct mem_address addr;
277   get_address_description (exp, &addr);
278   return addr_for_mem_ref (&addr, as, really_expand);
279 }
280 
281 /* Returns address of MEM_REF in TYPE.  */
282 
283 tree
tree_mem_ref_addr(tree type,tree mem_ref)284 tree_mem_ref_addr (tree type, tree mem_ref)
285 {
286   tree addr;
287   tree act_elem;
288   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
289   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
290 
291   addr_base = fold_convert (type, TMR_BASE (mem_ref));
292 
293   act_elem = TMR_INDEX (mem_ref);
294   if (act_elem)
295     {
296       if (step)
297 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
298 				act_elem, step);
299       addr_off = act_elem;
300     }
301 
302   act_elem = TMR_INDEX2 (mem_ref);
303   if (act_elem)
304     {
305       if (addr_off)
306 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
307 				addr_off, act_elem);
308       else
309 	addr_off = act_elem;
310     }
311 
312   if (offset && !integer_zerop (offset))
313     {
314       if (addr_off)
315 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
316 				fold_convert (TREE_TYPE (addr_off), offset));
317       else
318 	addr_off = offset;
319     }
320 
321   if (addr_off)
322     addr = fold_build_pointer_plus (addr_base, addr_off);
323   else
324     addr = addr_base;
325 
326   return addr;
327 }
328 
329 /* Returns true if a memory reference in MODE and with parameters given by
330    ADDR is valid on the current target.  */
331 
332 static bool
valid_mem_ref_p(machine_mode mode,addr_space_t as,struct mem_address * addr)333 valid_mem_ref_p (machine_mode mode, addr_space_t as,
334 		 struct mem_address *addr)
335 {
336   rtx address;
337 
338   address = addr_for_mem_ref (addr, as, false);
339   if (!address)
340     return false;
341 
342   return memory_address_addr_space_p (mode, address, as);
343 }
344 
345 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
346    is valid on the current target and if so, creates and returns the
347    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
348 
349 static tree
create_mem_ref_raw(tree type,tree alias_ptr_type,struct mem_address * addr,bool verify)350 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
351 		    bool verify)
352 {
353   tree base, index2;
354 
355   if (verify
356       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
357     return NULL_TREE;
358 
359   if (addr->step && integer_onep (addr->step))
360     addr->step = NULL_TREE;
361 
362   if (addr->offset)
363     addr->offset = fold_convert (alias_ptr_type, addr->offset);
364   else
365     addr->offset = build_int_cst (alias_ptr_type, 0);
366 
367   if (addr->symbol)
368     {
369       base = addr->symbol;
370       index2 = addr->base;
371     }
372   else if (addr->base
373 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
374     {
375       base = addr->base;
376       index2 = NULL_TREE;
377     }
378   else
379     {
380       base = build_int_cst (build_pointer_type (type), 0);
381       index2 = addr->base;
382     }
383 
384   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
385      ???  As IVOPTs does not follow restrictions to where the base
386      pointer may point to create a MEM_REF only if we know that
387      base is valid.  */
388   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
389       && (!index2 || integer_zerop (index2))
390       && (!addr->index || integer_zerop (addr->index)))
391     return fold_build2 (MEM_REF, type, base, addr->offset);
392 
393   return build5 (TARGET_MEM_REF, type,
394 		 base, addr->offset, addr->index, addr->step, index2);
395 }
396 
397 /* Returns true if OBJ is an object whose address is a link time constant.  */
398 
399 static bool
fixed_address_object_p(tree obj)400 fixed_address_object_p (tree obj)
401 {
402   return (TREE_CODE (obj) == VAR_DECL
403 	  && (TREE_STATIC (obj)
404 	      || DECL_EXTERNAL (obj))
405 	  && ! DECL_DLLIMPORT_P (obj));
406 }
407 
408 /* If ADDR contains an address of object that is a link time constant,
409    move it to PARTS->symbol.  */
410 
411 static void
move_fixed_address_to_symbol(struct mem_address * parts,aff_tree * addr)412 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
413 {
414   unsigned i;
415   tree val = NULL_TREE;
416 
417   for (i = 0; i < addr->n; i++)
418     {
419       if (addr->elts[i].coef != 1)
420 	continue;
421 
422       val = addr->elts[i].val;
423       if (TREE_CODE (val) == ADDR_EXPR
424 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
425 	break;
426     }
427 
428   if (i == addr->n)
429     return;
430 
431   parts->symbol = val;
432   aff_combination_remove_elt (addr, i);
433 }
434 
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base.  */
436 
437 static void
move_hint_to_base(tree type,struct mem_address * parts,tree base_hint,aff_tree * addr)438 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
439 		   aff_tree *addr)
440 {
441   unsigned i;
442   tree val = NULL_TREE;
443   int qual;
444 
445   for (i = 0; i < addr->n; i++)
446     {
447       if (addr->elts[i].coef != 1)
448 	continue;
449 
450       val = addr->elts[i].val;
451       if (operand_equal_p (val, base_hint, 0))
452 	break;
453     }
454 
455   if (i == addr->n)
456     return;
457 
458   /* Cast value to appropriate pointer type.  We cannot use a pointer
459      to TYPE directly, as the back-end will assume registers of pointer
460      type are aligned, and just the base itself may not actually be.
461      We use void pointer to the type's address space instead.  */
462   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
463   type = build_qualified_type (void_type_node, qual);
464   parts->base = fold_convert (build_pointer_type (type), val);
465   aff_combination_remove_elt (addr, i);
466 }
467 
468 /* If ADDR contains an address of a dereferenced pointer, move it to
469    PARTS->base.  */
470 
471 static void
move_pointer_to_base(struct mem_address * parts,aff_tree * addr)472 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
473 {
474   unsigned i;
475   tree val = NULL_TREE;
476 
477   for (i = 0; i < addr->n; i++)
478     {
479       if (addr->elts[i].coef != 1)
480 	continue;
481 
482       val = addr->elts[i].val;
483       if (POINTER_TYPE_P (TREE_TYPE (val)))
484 	break;
485     }
486 
487   if (i == addr->n)
488     return;
489 
490   parts->base = val;
491   aff_combination_remove_elt (addr, i);
492 }
493 
494 /* Moves the loop variant part V in linear address ADDR to be the index
495    of PARTS.  */
496 
497 static void
move_variant_to_index(struct mem_address * parts,aff_tree * addr,tree v)498 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
499 {
500   unsigned i;
501   tree val = NULL_TREE;
502 
503   gcc_assert (!parts->index);
504   for (i = 0; i < addr->n; i++)
505     {
506       val = addr->elts[i].val;
507       if (operand_equal_p (val, v, 0))
508 	break;
509     }
510 
511   if (i == addr->n)
512     return;
513 
514   parts->index = fold_convert (sizetype, val);
515   parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
516   aff_combination_remove_elt (addr, i);
517 }
518 
519 /* Adds ELT to PARTS.  */
520 
521 static void
add_to_parts(struct mem_address * parts,tree elt)522 add_to_parts (struct mem_address *parts, tree elt)
523 {
524   tree type;
525 
526   if (!parts->index)
527     {
528       parts->index = fold_convert (sizetype, elt);
529       return;
530     }
531 
532   if (!parts->base)
533     {
534       parts->base = elt;
535       return;
536     }
537 
538   /* Add ELT to base.  */
539   type = TREE_TYPE (parts->base);
540   if (POINTER_TYPE_P (type))
541     parts->base = fold_build_pointer_plus (parts->base, elt);
542   else
543     parts->base = fold_build2 (PLUS_EXPR, type,
544 			       parts->base, elt);
545 }
546 
547 /* Finds the most expensive multiplication in ADDR that can be
548    expressed in an addressing mode and move the corresponding
549    element(s) to PARTS.  */
550 
551 static void
most_expensive_mult_to_index(tree type,struct mem_address * parts,aff_tree * addr,bool speed)552 most_expensive_mult_to_index (tree type, struct mem_address *parts,
553 			      aff_tree *addr, bool speed)
554 {
555   addr_space_t as = TYPE_ADDR_SPACE (type);
556   machine_mode address_mode = targetm.addr_space.address_mode (as);
557   HOST_WIDE_INT coef;
558   unsigned best_mult_cost = 0, acost;
559   tree mult_elt = NULL_TREE, elt;
560   unsigned i, j;
561   enum tree_code op_code;
562 
563   offset_int best_mult = 0;
564   for (i = 0; i < addr->n; i++)
565     {
566       if (!wi::fits_shwi_p (addr->elts[i].coef))
567 	continue;
568 
569       coef = addr->elts[i].coef.to_shwi ();
570       if (coef == 1
571 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
572 	continue;
573 
574       acost = mult_by_coeff_cost (coef, address_mode, speed);
575 
576       if (acost > best_mult_cost)
577 	{
578 	  best_mult_cost = acost;
579 	  best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
580 	}
581     }
582 
583   if (!best_mult_cost)
584     return;
585 
586   /* Collect elements multiplied by best_mult.  */
587   for (i = j = 0; i < addr->n; i++)
588     {
589       offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
590       offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
591 
592       if (amult == best_mult)
593 	op_code = PLUS_EXPR;
594       else if (amult_neg == best_mult)
595 	op_code = MINUS_EXPR;
596       else
597 	{
598 	  addr->elts[j] = addr->elts[i];
599 	  j++;
600 	  continue;
601 	}
602 
603       elt = fold_convert (sizetype, addr->elts[i].val);
604       if (mult_elt)
605 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
606       else if (op_code == PLUS_EXPR)
607 	mult_elt = elt;
608       else
609 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
610     }
611   addr->n = j;
612 
613   parts->index = mult_elt;
614   parts->step = wide_int_to_tree (sizetype, best_mult);
615 }
616 
617 /* Splits address ADDR for a memory access of type TYPE into PARTS.
618    If BASE_HINT is non-NULL, it specifies an SSA name to be used
619    preferentially as base of the reference, and IV_CAND is the selected
620    iv candidate used in ADDR.
621 
622    TODO -- be more clever about the distribution of the elements of ADDR
623    to PARTS.  Some architectures do not support anything but single
624    register in address, possibly with a small integer offset; while
625    create_mem_ref will simplify the address to an acceptable shape
626    later, it would be more efficient to know that asking for complicated
627    addressing modes is useless.  */
628 
629 static void
addr_to_parts(tree type,aff_tree * addr,tree iv_cand,tree base_hint,struct mem_address * parts,bool speed)630 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
631 	       tree base_hint, struct mem_address *parts,
632                bool speed)
633 {
634   tree part;
635   unsigned i;
636 
637   parts->symbol = NULL_TREE;
638   parts->base = NULL_TREE;
639   parts->index = NULL_TREE;
640   parts->step = NULL_TREE;
641 
642   if (addr->offset != 0)
643     parts->offset = wide_int_to_tree (sizetype, addr->offset);
644   else
645     parts->offset = NULL_TREE;
646 
647   /* Try to find a symbol.  */
648   move_fixed_address_to_symbol (parts, addr);
649 
650   /* No need to do address parts reassociation if the number of parts
651      is <= 2 -- in that case, no loop invariant code motion can be
652      exposed.  */
653 
654   if (!base_hint && (addr->n > 2))
655     move_variant_to_index (parts, addr, iv_cand);
656 
657   /* First move the most expensive feasible multiplication
658      to index.  */
659   if (!parts->index)
660     most_expensive_mult_to_index (type, parts, addr, speed);
661 
662   /* Try to find a base of the reference.  Since at the moment
663      there is no reliable way how to distinguish between pointer and its
664      offset, this is just a guess.  */
665   if (!parts->symbol && base_hint)
666     move_hint_to_base (type, parts, base_hint, addr);
667   if (!parts->symbol && !parts->base)
668     move_pointer_to_base (parts, addr);
669 
670   /* Then try to process the remaining elements.  */
671   for (i = 0; i < addr->n; i++)
672     {
673       part = fold_convert (sizetype, addr->elts[i].val);
674       if (addr->elts[i].coef != 1)
675 	part = fold_build2 (MULT_EXPR, sizetype, part,
676 			    wide_int_to_tree (sizetype, addr->elts[i].coef));
677       add_to_parts (parts, part);
678     }
679   if (addr->rest)
680     add_to_parts (parts, fold_convert (sizetype, addr->rest));
681 }
682 
683 /* Force the PARTS to register.  */
684 
685 static void
gimplify_mem_ref_parts(gimple_stmt_iterator * gsi,struct mem_address * parts)686 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
687 {
688   if (parts->base)
689     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
690 					    is_gimple_mem_ref_addr, NULL_TREE,
691 					    true, GSI_SAME_STMT);
692   if (parts->index)
693     parts->index = force_gimple_operand_gsi (gsi, parts->index,
694 					     true, NULL_TREE,
695 					     true, GSI_SAME_STMT);
696 }
697 
698 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
699    computations are emitted in front of GSI.  TYPE is the mode
700    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
701    and BASE_HINT is non NULL if IV_CAND comes from a base address
702    object.  */
703 
704 tree
create_mem_ref(gimple_stmt_iterator * gsi,tree type,aff_tree * addr,tree alias_ptr_type,tree iv_cand,tree base_hint,bool speed)705 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
706 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
707 {
708   tree mem_ref, tmp;
709   struct mem_address parts;
710 
711   addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
712   gimplify_mem_ref_parts (gsi, &parts);
713   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
714   if (mem_ref)
715     return mem_ref;
716 
717   /* The expression is too complicated.  Try making it simpler.  */
718 
719   if (parts.step && !integer_onep (parts.step))
720     {
721       /* Move the multiplication to index.  */
722       gcc_assert (parts.index);
723       parts.index = force_gimple_operand_gsi (gsi,
724 				fold_build2 (MULT_EXPR, sizetype,
725 					     parts.index, parts.step),
726 				true, NULL_TREE, true, GSI_SAME_STMT);
727       parts.step = NULL_TREE;
728 
729       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
730       if (mem_ref)
731 	return mem_ref;
732     }
733 
734   if (parts.symbol)
735     {
736       tmp = parts.symbol;
737       gcc_assert (is_gimple_val (tmp));
738 
739       /* Add the symbol to base, eventually forcing it to register.  */
740       if (parts.base)
741 	{
742 	  gcc_assert (useless_type_conversion_p
743 				(sizetype, TREE_TYPE (parts.base)));
744 
745 	  if (parts.index)
746 	    {
747 	      parts.base = force_gimple_operand_gsi_1 (gsi,
748 			fold_build_pointer_plus (tmp, parts.base),
749 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
750 	    }
751 	  else
752 	    {
753 	      parts.index = parts.base;
754 	      parts.base = tmp;
755 	    }
756 	}
757       else
758 	parts.base = tmp;
759       parts.symbol = NULL_TREE;
760 
761       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
762       if (mem_ref)
763 	return mem_ref;
764     }
765 
766   if (parts.index)
767     {
768       /* Add index to base.  */
769       if (parts.base)
770 	{
771 	  parts.base = force_gimple_operand_gsi_1 (gsi,
772 			fold_build_pointer_plus (parts.base, parts.index),
773 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
774 	}
775       else
776 	parts.base = parts.index;
777       parts.index = NULL_TREE;
778 
779       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
780       if (mem_ref)
781 	return mem_ref;
782     }
783 
784   if (parts.offset && !integer_zerop (parts.offset))
785     {
786       /* Try adding offset to base.  */
787       if (parts.base)
788 	{
789 	  parts.base = force_gimple_operand_gsi_1 (gsi,
790 			fold_build_pointer_plus (parts.base, parts.offset),
791 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
792 	}
793       else
794 	parts.base = parts.offset;
795 
796       parts.offset = NULL_TREE;
797 
798       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
799       if (mem_ref)
800 	return mem_ref;
801     }
802 
803   /* Verify that the address is in the simplest possible shape
804      (only a register).  If we cannot create such a memory reference,
805      something is really wrong.  */
806   gcc_assert (parts.symbol == NULL_TREE);
807   gcc_assert (parts.index == NULL_TREE);
808   gcc_assert (!parts.step || integer_onep (parts.step));
809   gcc_assert (!parts.offset || integer_zerop (parts.offset));
810   gcc_unreachable ();
811 }
812 
813 /* Copies components of the address from OP to ADDR.  */
814 
815 void
get_address_description(tree op,struct mem_address * addr)816 get_address_description (tree op, struct mem_address *addr)
817 {
818   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
819     {
820       addr->symbol = TMR_BASE (op);
821       addr->base = TMR_INDEX2 (op);
822     }
823   else
824     {
825       addr->symbol = NULL_TREE;
826       if (TMR_INDEX2 (op))
827 	{
828 	  gcc_assert (integer_zerop (TMR_BASE (op)));
829 	  addr->base = TMR_INDEX2 (op);
830 	}
831       else
832 	addr->base = TMR_BASE (op);
833     }
834   addr->index = TMR_INDEX (op);
835   addr->step = TMR_STEP (op);
836   addr->offset = TMR_OFFSET (op);
837 }
838 
839 /* Copies the reference information from OLD_REF to NEW_REF, where
840    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
841 
842 void
copy_ref_info(tree new_ref,tree old_ref)843 copy_ref_info (tree new_ref, tree old_ref)
844 {
845   tree new_ptr_base = NULL_TREE;
846 
847   gcc_assert (TREE_CODE (new_ref) == MEM_REF
848 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
849 
850   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
851   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
852 
853   new_ptr_base = TREE_OPERAND (new_ref, 0);
854 
855   /* We can transfer points-to information from an old pointer
856      or decl base to the new one.  */
857   if (new_ptr_base
858       && TREE_CODE (new_ptr_base) == SSA_NAME
859       && !SSA_NAME_PTR_INFO (new_ptr_base))
860     {
861       tree base = get_base_address (old_ref);
862       if (!base)
863 	;
864       else if ((TREE_CODE (base) == MEM_REF
865 		|| TREE_CODE (base) == TARGET_MEM_REF)
866 	       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
867 	       && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
868 	{
869 	  struct ptr_info_def *new_pi;
870 	  unsigned int align, misalign;
871 
872 	  duplicate_ssa_name_ptr_info
873 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
874 	  new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
875 	  /* We have to be careful about transferring alignment information.  */
876 	  if (get_ptr_info_alignment (new_pi, &align, &misalign)
877 	      && TREE_CODE (old_ref) == MEM_REF
878 	      && !(TREE_CODE (new_ref) == TARGET_MEM_REF
879 		   && (TMR_INDEX2 (new_ref)
880 		       || (TMR_STEP (new_ref)
881 			   && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
882 			       < align)))))
883 	    {
884 	      unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
885 				  - mem_ref_offset (new_ref).to_short_addr ());
886 	      adjust_ptr_info_misalignment (new_pi, inc);
887 	    }
888 	  else
889 	    mark_ptr_info_alignment_unknown (new_pi);
890 	}
891       else if (TREE_CODE (base) == VAR_DECL
892 	       || TREE_CODE (base) == PARM_DECL
893 	       || TREE_CODE (base) == RESULT_DECL)
894 	{
895 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
896 	  pt_solution_set_var (&pi->pt, base);
897 	}
898     }
899 }
900 
901 /* Move constants in target_mem_ref REF to offset.  Returns the new target
902    mem ref if anything changes, NULL_TREE otherwise.  */
903 
904 tree
maybe_fold_tmr(tree ref)905 maybe_fold_tmr (tree ref)
906 {
907   struct mem_address addr;
908   bool changed = false;
909   tree new_ref, off;
910 
911   get_address_description (ref, &addr);
912 
913   if (addr.base
914       && TREE_CODE (addr.base) == INTEGER_CST
915       && !integer_zerop (addr.base))
916     {
917       addr.offset = fold_binary_to_constant (PLUS_EXPR,
918 					     TREE_TYPE (addr.offset),
919 					     addr.offset, addr.base);
920       addr.base = NULL_TREE;
921       changed = true;
922     }
923 
924   if (addr.symbol
925       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
926     {
927       addr.offset = fold_binary_to_constant
928 			(PLUS_EXPR, TREE_TYPE (addr.offset),
929 			 addr.offset,
930 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
931       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
932       changed = true;
933     }
934   else if (addr.symbol
935 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
936     {
937       HOST_WIDE_INT offset;
938       addr.symbol = build_fold_addr_expr
939 		      (get_addr_base_and_unit_offset
940 		         (TREE_OPERAND (addr.symbol, 0), &offset));
941       addr.offset = int_const_binop (PLUS_EXPR,
942 				     addr.offset, size_int (offset));
943       changed = true;
944     }
945 
946   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
947     {
948       off = addr.index;
949       if (addr.step)
950 	{
951 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
952 					 off, addr.step);
953 	  addr.step = NULL_TREE;
954 	}
955 
956       addr.offset = fold_binary_to_constant (PLUS_EXPR,
957 					     TREE_TYPE (addr.offset),
958 					     addr.offset, off);
959       addr.index = NULL_TREE;
960       changed = true;
961     }
962 
963   if (!changed)
964     return NULL_TREE;
965 
966   /* If we have propagated something into this TARGET_MEM_REF and thus
967      ended up folding it, always create a new TARGET_MEM_REF regardless
968      if it is valid in this for on the target - the propagation result
969      wouldn't be anyway.  */
970   new_ref = create_mem_ref_raw (TREE_TYPE (ref),
971 			        TREE_TYPE (addr.offset), &addr, false);
972   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
973   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
974   return new_ref;
975 }
976 
977 /* Dump PARTS to FILE.  */
978 
979 extern void dump_mem_address (FILE *, struct mem_address *);
980 void
dump_mem_address(FILE * file,struct mem_address * parts)981 dump_mem_address (FILE *file, struct mem_address *parts)
982 {
983   if (parts->symbol)
984     {
985       fprintf (file, "symbol: ");
986       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
987       fprintf (file, "\n");
988     }
989   if (parts->base)
990     {
991       fprintf (file, "base: ");
992       print_generic_expr (file, parts->base, TDF_SLIM);
993       fprintf (file, "\n");
994     }
995   if (parts->index)
996     {
997       fprintf (file, "index: ");
998       print_generic_expr (file, parts->index, TDF_SLIM);
999       fprintf (file, "\n");
1000     }
1001   if (parts->step)
1002     {
1003       fprintf (file, "step: ");
1004       print_generic_expr (file, parts->step, TDF_SLIM);
1005       fprintf (file, "\n");
1006     }
1007   if (parts->offset)
1008     {
1009       fprintf (file, "offset: ");
1010       print_generic_expr (file, parts->offset, TDF_SLIM);
1011       fprintf (file, "\n");
1012     }
1013 }
1014 
1015 #include "gt-tree-ssa-address.h"
1016