1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21    that directly map to addressing modes of the target.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49 #include "gimplify.h"
50 
51 /* FIXME: We compute address costs using RTL.  */
52 #include "tree-ssa-address.h"
53 
54 /* TODO -- handling of symbols (according to Richard Hendersons
55    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
56 
57    There are at least 5 different kinds of symbols that we can run up against:
58 
59      (1) binds_local_p, small data area.
60      (2) binds_local_p, eg local statics
61      (3) !binds_local_p, eg global variables
62      (4) thread local, local_exec
63      (5) thread local, !local_exec
64 
65    Now, (1) won't appear often in an array context, but it certainly can.
66    All you have to do is set -GN high enough, or explicitly mark any
67    random object __attribute__((section (".sdata"))).
68 
69    All of these affect whether or not a symbol is in fact a valid address.
70    The only one tested here is (3).  And that result may very well
71    be incorrect for (4) or (5).
72 
73    An incorrect result here does not cause incorrect results out the
74    back end, because the expander in expr.c validizes the address.  However
75    it would be nice to improve the handling here in order to produce more
76    precise results.  */
77 
78 /* A "template" for memory address, used to determine whether the address is
79    valid for mode.  */
80 
81 struct GTY (()) mem_addr_template {
82   rtx ref;			/* The template.  */
83   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
84 				   filled in.  */
85   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
86 				   be filled in.  */
87 };
88 
89 
90 /* The templates.  Each of the low five bits of the index corresponds to one
91    component of TARGET_MEM_REF being present, while the high bits identify
92    the address space.  See TEMPL_IDX.  */
93 
94 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
95 
96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97   (((int) (AS) << 5) \
98    | ((SYMBOL != 0) << 4) \
99    | ((BASE != 0) << 3) \
100    | ((INDEX != 0) << 2) \
101    | ((STEP != 0) << 1) \
102    | (OFFSET != 0))
103 
104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
106    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
107 
108 static void
gen_addr_rtx(machine_mode address_mode,rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)109 gen_addr_rtx (machine_mode address_mode,
110 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
111 	      rtx *addr, rtx **step_p, rtx **offset_p)
112 {
113   rtx act_elem;
114 
115   *addr = NULL_RTX;
116   if (step_p)
117     *step_p = NULL;
118   if (offset_p)
119     *offset_p = NULL;
120 
121   if (index && index != const0_rtx)
122     {
123       act_elem = index;
124       if (step)
125 	{
126 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
127 
128 	  if (step_p)
129 	    *step_p = &XEXP (act_elem, 1);
130 	}
131 
132       *addr = act_elem;
133     }
134 
135   if (base && base != const0_rtx)
136     {
137       if (*addr)
138 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
139       else
140 	*addr = base;
141     }
142 
143   if (symbol)
144     {
145       act_elem = symbol;
146       if (offset)
147 	{
148 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
149 
150 	  if (offset_p)
151 	    *offset_p = &XEXP (act_elem, 1);
152 
153 	  if (GET_CODE (symbol) == SYMBOL_REF
154 	      || GET_CODE (symbol) == LABEL_REF
155 	      || GET_CODE (symbol) == CONST)
156 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
157 	}
158 
159       if (*addr)
160 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
161       else
162 	*addr = act_elem;
163     }
164   else if (offset)
165     {
166       if (*addr)
167 	{
168 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
169 	  if (offset_p)
170 	    *offset_p = &XEXP (*addr, 1);
171 	}
172       else
173 	{
174 	  *addr = offset;
175 	  if (offset_p)
176 	    *offset_p = addr;
177 	}
178     }
179 
180   if (!*addr)
181     *addr = const0_rtx;
182 }
183 
184 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
185    in address space AS.
186    If REALLY_EXPAND is false, just make fake registers instead
187    of really expanding the operands, and perform the expansion in-place
188    by using one of the "templates".  */
189 
190 rtx
addr_for_mem_ref(struct mem_address * addr,addr_space_t as,bool really_expand)191 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 		  bool really_expand)
193 {
194   scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195   scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
196   rtx address, sym, bse, idx, st, off;
197   struct mem_addr_template *templ;
198 
199   if (addr->step && !integer_onep (addr->step))
200     st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
201   else
202     st = NULL_RTX;
203 
204   if (addr->offset && !integer_zerop (addr->offset))
205     {
206       poly_offset_int dc
207 	= poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
208       off = immed_wide_int_const (dc, pointer_mode);
209     }
210   else
211     off = NULL_RTX;
212 
213   if (!really_expand)
214     {
215       unsigned int templ_index
216 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
217 
218       if (templ_index >= vec_safe_length (mem_addr_template_list))
219 	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
220 
221       /* Reuse the templates for addresses, so that we do not waste memory.  */
222       templ = &(*mem_addr_template_list)[templ_index];
223       if (!templ->ref)
224 	{
225 	  sym = (addr->symbol ?
226 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
227 		 : NULL_RTX);
228 	  bse = (addr->base ?
229 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
230 		 : NULL_RTX);
231 	  idx = (addr->index ?
232 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
233 		 : NULL_RTX);
234 
235 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
236 			st? const0_rtx : NULL_RTX,
237 			off? const0_rtx : NULL_RTX,
238 			&templ->ref,
239 			&templ->step_p,
240 			&templ->off_p);
241 	}
242 
243       if (st)
244 	*templ->step_p = st;
245       if (off)
246 	*templ->off_p = off;
247 
248       return templ->ref;
249     }
250 
251   /* Otherwise really expand the expressions.  */
252   sym = (addr->symbol
253 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254 	 : NULL_RTX);
255   bse = (addr->base
256 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 	 : NULL_RTX);
258   idx = (addr->index
259 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260 	 : NULL_RTX);
261 
262   /* addr->base could be an SSA_NAME that was set to a constant value.  The
263      call to expand_expr may expose that constant.  If so, fold the value
264      into OFF and clear BSE.  Otherwise we may later try to pull a mode from
265      BSE to generate a REG, which won't work with constants because they
266      are modeless.  */
267   if (bse && GET_CODE (bse) == CONST_INT)
268     {
269       if (off)
270 	off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
271       else
272 	off = bse;
273       gcc_assert (GET_CODE (off) == CONST_INT);
274       bse = NULL_RTX;
275     }
276   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
277   if (pointer_mode != address_mode)
278     address = convert_memory_address (address_mode, address);
279   return address;
280 }
281 
282 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
283    the mem_address structure.  */
284 
285 rtx
addr_for_mem_ref(tree exp,addr_space_t as,bool really_expand)286 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
287 {
288   struct mem_address addr;
289   get_address_description (exp, &addr);
290   return addr_for_mem_ref (&addr, as, really_expand);
291 }
292 
293 /* Returns address of MEM_REF in TYPE.  */
294 
295 tree
tree_mem_ref_addr(tree type,tree mem_ref)296 tree_mem_ref_addr (tree type, tree mem_ref)
297 {
298   tree addr;
299   tree act_elem;
300   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
301   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
302 
303   addr_base = fold_convert (type, TMR_BASE (mem_ref));
304 
305   act_elem = TMR_INDEX (mem_ref);
306   if (act_elem)
307     {
308       if (step)
309 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
310 				act_elem, step);
311       addr_off = act_elem;
312     }
313 
314   act_elem = TMR_INDEX2 (mem_ref);
315   if (act_elem)
316     {
317       if (addr_off)
318 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
319 				addr_off, act_elem);
320       else
321 	addr_off = act_elem;
322     }
323 
324   if (offset && !integer_zerop (offset))
325     {
326       if (addr_off)
327 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
328 				fold_convert (TREE_TYPE (addr_off), offset));
329       else
330 	addr_off = offset;
331     }
332 
333   if (addr_off)
334     addr = fold_build_pointer_plus (addr_base, addr_off);
335   else
336     addr = addr_base;
337 
338   return addr;
339 }
340 
341 /* Returns true if a memory reference in MODE and with parameters given by
342    ADDR is valid on the current target.  */
343 
344 bool
valid_mem_ref_p(machine_mode mode,addr_space_t as,struct mem_address * addr)345 valid_mem_ref_p (machine_mode mode, addr_space_t as,
346 		 struct mem_address *addr)
347 {
348   rtx address;
349 
350   address = addr_for_mem_ref (addr, as, false);
351   if (!address)
352     return false;
353 
354   return memory_address_addr_space_p (mode, address, as);
355 }
356 
357 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
358    is valid on the current target and if so, creates and returns the
359    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
360 
361 static tree
create_mem_ref_raw(tree type,tree alias_ptr_type,struct mem_address * addr,bool verify)362 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
363 		    bool verify)
364 {
365   tree base, index2;
366 
367   if (verify
368       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
369     return NULL_TREE;
370 
371   if (addr->step && integer_onep (addr->step))
372     addr->step = NULL_TREE;
373 
374   if (addr->offset)
375     addr->offset = fold_convert (alias_ptr_type, addr->offset);
376   else
377     addr->offset = build_int_cst (alias_ptr_type, 0);
378 
379   if (addr->symbol)
380     {
381       base = addr->symbol;
382       index2 = addr->base;
383     }
384   else if (addr->base
385 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
386     {
387       base = addr->base;
388       index2 = NULL_TREE;
389     }
390   else
391     {
392       base = build_int_cst (build_pointer_type (type), 0);
393       index2 = addr->base;
394     }
395 
396   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
397      ???  As IVOPTs does not follow restrictions to where the base
398      pointer may point to create a MEM_REF only if we know that
399      base is valid.  */
400   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
401       && (!index2 || integer_zerop (index2))
402       && (!addr->index || integer_zerop (addr->index)))
403     return fold_build2 (MEM_REF, type, base, addr->offset);
404 
405   return build5 (TARGET_MEM_REF, type,
406 		 base, addr->offset, addr->index, addr->step, index2);
407 }
408 
409 /* Returns true if OBJ is an object whose address is a link time constant.  */
410 
411 static bool
fixed_address_object_p(tree obj)412 fixed_address_object_p (tree obj)
413 {
414   return (VAR_P (obj)
415 	  && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
416 	  && ! DECL_DLLIMPORT_P (obj));
417 }
418 
419 /* If ADDR contains an address of object that is a link time constant,
420    move it to PARTS->symbol.  */
421 
422 void
move_fixed_address_to_symbol(struct mem_address * parts,aff_tree * addr)423 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
424 {
425   unsigned i;
426   tree val = NULL_TREE;
427 
428   for (i = 0; i < addr->n; i++)
429     {
430       if (addr->elts[i].coef != 1)
431 	continue;
432 
433       val = addr->elts[i].val;
434       if (TREE_CODE (val) == ADDR_EXPR
435 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
436 	break;
437     }
438 
439   if (i == addr->n)
440     return;
441 
442   parts->symbol = val;
443   aff_combination_remove_elt (addr, i);
444 }
445 
446 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
447    PARTS->base.  */
448 
449 static bool
move_hint_to_base(tree type,struct mem_address * parts,tree base_hint,aff_tree * addr)450 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
451 		   aff_tree *addr)
452 {
453   unsigned i;
454   tree val = NULL_TREE;
455   int qual;
456 
457   for (i = 0; i < addr->n; i++)
458     {
459       if (addr->elts[i].coef != 1)
460 	continue;
461 
462       val = addr->elts[i].val;
463       if (operand_equal_p (val, base_hint, 0))
464 	break;
465     }
466 
467   if (i == addr->n)
468     return false;
469 
470   /* Cast value to appropriate pointer type.  We cannot use a pointer
471      to TYPE directly, as the back-end will assume registers of pointer
472      type are aligned, and just the base itself may not actually be.
473      We use void pointer to the type's address space instead.  */
474   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
475   type = build_qualified_type (void_type_node, qual);
476   parts->base = fold_convert (build_pointer_type (type), val);
477   aff_combination_remove_elt (addr, i);
478   return true;
479 }
480 
481 /* If ADDR contains an address of a dereferenced pointer, move it to
482    PARTS->base.  */
483 
484 static void
move_pointer_to_base(struct mem_address * parts,aff_tree * addr)485 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
486 {
487   unsigned i;
488   tree val = NULL_TREE;
489 
490   for (i = 0; i < addr->n; i++)
491     {
492       if (addr->elts[i].coef != 1)
493 	continue;
494 
495       val = addr->elts[i].val;
496       if (POINTER_TYPE_P (TREE_TYPE (val)))
497 	break;
498     }
499 
500   if (i == addr->n)
501     return;
502 
503   parts->base = val;
504   aff_combination_remove_elt (addr, i);
505 }
506 
507 /* Moves the loop variant part V in linear address ADDR to be the index
508    of PARTS.  */
509 
510 static void
move_variant_to_index(struct mem_address * parts,aff_tree * addr,tree v)511 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
512 {
513   unsigned i;
514   tree val = NULL_TREE;
515 
516   gcc_assert (!parts->index);
517   for (i = 0; i < addr->n; i++)
518     {
519       val = addr->elts[i].val;
520       if (operand_equal_p (val, v, 0))
521 	break;
522     }
523 
524   if (i == addr->n)
525     return;
526 
527   parts->index = fold_convert (sizetype, val);
528   parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
529   aff_combination_remove_elt (addr, i);
530 }
531 
532 /* Adds ELT to PARTS.  */
533 
534 static void
add_to_parts(struct mem_address * parts,tree elt)535 add_to_parts (struct mem_address *parts, tree elt)
536 {
537   tree type;
538 
539   if (!parts->index)
540     {
541       parts->index = fold_convert (sizetype, elt);
542       return;
543     }
544 
545   if (!parts->base)
546     {
547       parts->base = elt;
548       return;
549     }
550 
551   /* Add ELT to base.  */
552   type = TREE_TYPE (parts->base);
553   if (POINTER_TYPE_P (type))
554     parts->base = fold_build_pointer_plus (parts->base, elt);
555   else
556     parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
557 }
558 
559 /* Returns true if multiplying by RATIO is allowed in an address.  Test the
560    validity for a memory reference accessing memory of mode MODE in address
561    space AS.  */
562 
563 static bool
multiplier_allowed_in_address_p(HOST_WIDE_INT ratio,machine_mode mode,addr_space_t as)564 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
565 				 addr_space_t as)
566 {
567 #define MAX_RATIO 128
568   unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
569   static vec<sbitmap> valid_mult_list;
570   sbitmap valid_mult;
571 
572   if (data_index >= valid_mult_list.length ())
573     valid_mult_list.safe_grow_cleared (data_index + 1);
574 
575   valid_mult = valid_mult_list[data_index];
576   if (!valid_mult)
577     {
578       machine_mode address_mode = targetm.addr_space.address_mode (as);
579       rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
580       rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
581       rtx addr, scaled;
582       HOST_WIDE_INT i;
583 
584       valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
585       bitmap_clear (valid_mult);
586       scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
587       addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
588       for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
589 	{
590 	  XEXP (scaled, 1) = gen_int_mode (i, address_mode);
591 	  if (memory_address_addr_space_p (mode, addr, as)
592 	      || memory_address_addr_space_p (mode, scaled, as))
593 	    bitmap_set_bit (valid_mult, i + MAX_RATIO);
594 	}
595 
596       if (dump_file && (dump_flags & TDF_DETAILS))
597 	{
598 	  fprintf (dump_file, "  allowed multipliers:");
599 	  for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
600 	    if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
601 	      fprintf (dump_file, " %d", (int) i);
602 	  fprintf (dump_file, "\n");
603 	  fprintf (dump_file, "\n");
604 	}
605 
606       valid_mult_list[data_index] = valid_mult;
607     }
608 
609   if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
610     return false;
611 
612   return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
613 }
614 
615 /* Finds the most expensive multiplication in ADDR that can be
616    expressed in an addressing mode and move the corresponding
617    element(s) to PARTS.  */
618 
619 static void
most_expensive_mult_to_index(tree type,struct mem_address * parts,aff_tree * addr,bool speed)620 most_expensive_mult_to_index (tree type, struct mem_address *parts,
621 			      aff_tree *addr, bool speed)
622 {
623   addr_space_t as = TYPE_ADDR_SPACE (type);
624   machine_mode address_mode = targetm.addr_space.address_mode (as);
625   HOST_WIDE_INT coef;
626   unsigned best_mult_cost = 0, acost;
627   tree mult_elt = NULL_TREE, elt;
628   unsigned i, j;
629   enum tree_code op_code;
630 
631   offset_int best_mult = 0;
632   for (i = 0; i < addr->n; i++)
633     {
634       if (!wi::fits_shwi_p (addr->elts[i].coef))
635 	continue;
636 
637       coef = addr->elts[i].coef.to_shwi ();
638       if (coef == 1
639 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
640 	continue;
641 
642       acost = mult_by_coeff_cost (coef, address_mode, speed);
643 
644       if (acost > best_mult_cost)
645 	{
646 	  best_mult_cost = acost;
647 	  best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
648 	}
649     }
650 
651   if (!best_mult_cost)
652     return;
653 
654   /* Collect elements multiplied by best_mult.  */
655   for (i = j = 0; i < addr->n; i++)
656     {
657       offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
658       offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
659 
660       if (amult == best_mult)
661 	op_code = PLUS_EXPR;
662       else if (amult_neg == best_mult)
663 	op_code = MINUS_EXPR;
664       else
665 	{
666 	  addr->elts[j] = addr->elts[i];
667 	  j++;
668 	  continue;
669 	}
670 
671       elt = fold_convert (sizetype, addr->elts[i].val);
672       if (mult_elt)
673 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
674       else if (op_code == PLUS_EXPR)
675 	mult_elt = elt;
676       else
677 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
678     }
679   addr->n = j;
680 
681   parts->index = mult_elt;
682   parts->step = wide_int_to_tree (sizetype, best_mult);
683 }
684 
685 /* Splits address ADDR for a memory access of type TYPE into PARTS.
686    If BASE_HINT is non-NULL, it specifies an SSA name to be used
687    preferentially as base of the reference, and IV_CAND is the selected
688    iv candidate used in ADDR.  Store true to VAR_IN_BASE if variant
689    part of address is split to PARTS.base.
690 
691    TODO -- be more clever about the distribution of the elements of ADDR
692    to PARTS.  Some architectures do not support anything but single
693    register in address, possibly with a small integer offset; while
694    create_mem_ref will simplify the address to an acceptable shape
695    later, it would be more efficient to know that asking for complicated
696    addressing modes is useless.  */
697 
698 static void
addr_to_parts(tree type,aff_tree * addr,tree iv_cand,tree base_hint,struct mem_address * parts,bool * var_in_base,bool speed)699 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
700 	       struct mem_address *parts, bool *var_in_base, bool speed)
701 {
702   tree part;
703   unsigned i;
704 
705   parts->symbol = NULL_TREE;
706   parts->base = NULL_TREE;
707   parts->index = NULL_TREE;
708   parts->step = NULL_TREE;
709 
710   if (maybe_ne (addr->offset, 0))
711     parts->offset = wide_int_to_tree (sizetype, addr->offset);
712   else
713     parts->offset = NULL_TREE;
714 
715   /* Try to find a symbol.  */
716   move_fixed_address_to_symbol (parts, addr);
717 
718   /* Since at the moment there is no reliable way to know how to
719      distinguish between pointer and its offset, we decide if var
720      part is the pointer based on guess.  */
721   *var_in_base = (base_hint != NULL && parts->symbol == NULL);
722   if (*var_in_base)
723     *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
724   else
725     move_variant_to_index (parts, addr, iv_cand);
726 
727   /* First move the most expensive feasible multiplication to index.  */
728   if (!parts->index)
729     most_expensive_mult_to_index (type, parts, addr, speed);
730 
731   /* Move pointer into base.  */
732   if (!parts->symbol && !parts->base)
733     move_pointer_to_base (parts, addr);
734 
735   /* Then try to process the remaining elements.  */
736   for (i = 0; i < addr->n; i++)
737     {
738       part = fold_convert (sizetype, addr->elts[i].val);
739       if (addr->elts[i].coef != 1)
740 	part = fold_build2 (MULT_EXPR, sizetype, part,
741 			    wide_int_to_tree (sizetype, addr->elts[i].coef));
742       add_to_parts (parts, part);
743     }
744   if (addr->rest)
745     add_to_parts (parts, fold_convert (sizetype, addr->rest));
746 }
747 
748 /* Force the PARTS to register.  */
749 
750 static void
gimplify_mem_ref_parts(gimple_stmt_iterator * gsi,struct mem_address * parts)751 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
752 {
753   if (parts->base)
754     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
755 					    is_gimple_mem_ref_addr, NULL_TREE,
756 					    true, GSI_SAME_STMT);
757   if (parts->index)
758     parts->index = force_gimple_operand_gsi (gsi, parts->index,
759 					     true, NULL_TREE,
760 					     true, GSI_SAME_STMT);
761 }
762 
763 /* Return true if the OFFSET in PARTS is the only thing that is making
764    it an invalid address for type TYPE.  */
765 
766 static bool
mem_ref_valid_without_offset_p(tree type,mem_address parts)767 mem_ref_valid_without_offset_p (tree type, mem_address parts)
768 {
769   if (!parts.base)
770     parts.base = parts.offset;
771   parts.offset = NULL_TREE;
772   return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
773 }
774 
775 /* Fold PARTS->offset into PARTS->base, so that there is no longer
776    a separate offset.  Emit any new instructions before GSI.  */
777 
778 static void
add_offset_to_base(gimple_stmt_iterator * gsi,mem_address * parts)779 add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
780 {
781   tree tmp = parts->offset;
782   if (parts->base)
783     {
784       tmp = fold_build_pointer_plus (parts->base, tmp);
785       tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
786 					NULL_TREE, true, GSI_SAME_STMT);
787     }
788   parts->base = tmp;
789   parts->offset = NULL_TREE;
790 }
791 
792 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
793    computations are emitted in front of GSI.  TYPE is the mode
794    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
795    and BASE_HINT is non NULL if IV_CAND comes from a base address
796    object.  */
797 
798 tree
create_mem_ref(gimple_stmt_iterator * gsi,tree type,aff_tree * addr,tree alias_ptr_type,tree iv_cand,tree base_hint,bool speed)799 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
800 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
801 {
802   bool var_in_base;
803   tree mem_ref, tmp;
804   struct mem_address parts;
805 
806   addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
807   gimplify_mem_ref_parts (gsi, &parts);
808   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
809   if (mem_ref)
810     return mem_ref;
811 
812   /* The expression is too complicated.  Try making it simpler.  */
813 
814   /* Merge symbol into other parts.  */
815   if (parts.symbol)
816     {
817       tmp = parts.symbol;
818       parts.symbol = NULL_TREE;
819       gcc_assert (is_gimple_val (tmp));
820 
821       if (parts.base)
822 	{
823 	  gcc_assert (useless_type_conversion_p (sizetype,
824 						 TREE_TYPE (parts.base)));
825 
826 	  if (parts.index)
827 	    {
828 	      /* Add the symbol to base, eventually forcing it to register.  */
829 	      tmp = fold_build_pointer_plus (tmp, parts.base);
830 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
831 						is_gimple_mem_ref_addr,
832 						NULL_TREE, true,
833 						GSI_SAME_STMT);
834 	    }
835 	  else
836 	    {
837 	      /* Move base to index, then move the symbol to base.  */
838 	      parts.index = parts.base;
839 	    }
840 	  parts.base = tmp;
841 	}
842       else
843 	parts.base = tmp;
844 
845       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
846       if (mem_ref)
847 	return mem_ref;
848     }
849 
850   /* Move multiplication to index by transforming address expression:
851        [... + index << step + ...]
852      into:
853        index' = index << step;
854        [... + index' + ,,,].  */
855   if (parts.step && !integer_onep (parts.step))
856     {
857       gcc_assert (parts.index);
858       if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
859 	{
860 	  add_offset_to_base (gsi, &parts);
861 	  mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
862 	  gcc_assert (mem_ref);
863 	  return mem_ref;
864 	}
865 
866       parts.index = force_gimple_operand_gsi (gsi,
867 				fold_build2 (MULT_EXPR, sizetype,
868 					     parts.index, parts.step),
869 				true, NULL_TREE, true, GSI_SAME_STMT);
870       parts.step = NULL_TREE;
871 
872       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
873       if (mem_ref)
874 	return mem_ref;
875     }
876 
877   /* Add offset to invariant part by transforming address expression:
878        [base + index + offset]
879      into:
880        base' = base + offset;
881        [base' + index]
882      or:
883        index' = index + offset;
884        [base + index']
885      depending on which one is invariant.  */
886   if (parts.offset && !integer_zerop (parts.offset))
887     {
888       tree old_base = unshare_expr (parts.base);
889       tree old_index = unshare_expr (parts.index);
890       tree old_offset = unshare_expr (parts.offset);
891 
892       tmp = parts.offset;
893       parts.offset = NULL_TREE;
894       /* Add offset to invariant part.  */
895       if (!var_in_base)
896 	{
897 	  if (parts.base)
898 	    {
899 	      tmp = fold_build_pointer_plus (parts.base, tmp);
900 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
901 						is_gimple_mem_ref_addr,
902 						NULL_TREE, true,
903 						GSI_SAME_STMT);
904 	    }
905 	  parts.base = tmp;
906 	}
907       else
908 	{
909 	  if (parts.index)
910 	    {
911 	      tmp = fold_build_pointer_plus (parts.index, tmp);
912 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
913 						is_gimple_mem_ref_addr,
914 						NULL_TREE, true,
915 						GSI_SAME_STMT);
916 	    }
917 	  parts.index = tmp;
918 	}
919 
920       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
921       if (mem_ref)
922 	return mem_ref;
923 
924       /* Restore parts.base, index and offset so that we can check if
925 	 [base + offset] addressing mode is supported in next step.
926 	 This is necessary for targets only support [base + offset],
927 	 but not [base + index] addressing mode.  */
928       parts.base = old_base;
929       parts.index = old_index;
930       parts.offset = old_offset;
931     }
932 
933   /* Transform [base + index + ...] into:
934        base' = base + index;
935        [base' + ...].  */
936   if (parts.index)
937     {
938       tmp = parts.index;
939       parts.index = NULL_TREE;
940       /* Add index to base.  */
941       if (parts.base)
942 	{
943 	  tmp = fold_build_pointer_plus (parts.base, tmp);
944 	  tmp = force_gimple_operand_gsi_1 (gsi, tmp,
945 					    is_gimple_mem_ref_addr,
946 					    NULL_TREE, true, GSI_SAME_STMT);
947 	}
948       parts.base = tmp;
949 
950       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
951       if (mem_ref)
952 	return mem_ref;
953     }
954 
955   /* Transform [base + offset] into:
956        base' = base + offset;
957        [base'].  */
958   if (parts.offset && !integer_zerop (parts.offset))
959     {
960       add_offset_to_base (gsi, &parts);
961       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
962       if (mem_ref)
963 	return mem_ref;
964     }
965 
966   /* Verify that the address is in the simplest possible shape
967      (only a register).  If we cannot create such a memory reference,
968      something is really wrong.  */
969   gcc_assert (parts.symbol == NULL_TREE);
970   gcc_assert (parts.index == NULL_TREE);
971   gcc_assert (!parts.step || integer_onep (parts.step));
972   gcc_assert (!parts.offset || integer_zerop (parts.offset));
973   gcc_unreachable ();
974 }
975 
976 /* Copies components of the address from OP to ADDR.  */
977 
978 void
get_address_description(tree op,struct mem_address * addr)979 get_address_description (tree op, struct mem_address *addr)
980 {
981   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
982     {
983       addr->symbol = TMR_BASE (op);
984       addr->base = TMR_INDEX2 (op);
985     }
986   else
987     {
988       addr->symbol = NULL_TREE;
989       if (TMR_INDEX2 (op))
990 	{
991 	  gcc_assert (integer_zerop (TMR_BASE (op)));
992 	  addr->base = TMR_INDEX2 (op);
993 	}
994       else
995 	addr->base = TMR_BASE (op);
996     }
997   addr->index = TMR_INDEX (op);
998   addr->step = TMR_STEP (op);
999   addr->offset = TMR_OFFSET (op);
1000 }
1001 
1002 /* Copies the reference information from OLD_REF to NEW_REF, where
1003    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
1004 
1005 void
copy_ref_info(tree new_ref,tree old_ref)1006 copy_ref_info (tree new_ref, tree old_ref)
1007 {
1008   tree new_ptr_base = NULL_TREE;
1009 
1010   gcc_assert (TREE_CODE (new_ref) == MEM_REF
1011 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
1012 
1013   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1014   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1015 
1016   new_ptr_base = TREE_OPERAND (new_ref, 0);
1017 
1018   /* We can transfer points-to information from an old pointer
1019      or decl base to the new one.  */
1020   if (new_ptr_base
1021       && TREE_CODE (new_ptr_base) == SSA_NAME
1022       && !SSA_NAME_PTR_INFO (new_ptr_base))
1023     {
1024       tree base = get_base_address (old_ref);
1025       if (!base)
1026 	;
1027       else if ((TREE_CODE (base) == MEM_REF
1028 		|| TREE_CODE (base) == TARGET_MEM_REF)
1029 	       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1030 	       && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1031 	{
1032 	  struct ptr_info_def *new_pi;
1033 	  unsigned int align, misalign;
1034 
1035 	  duplicate_ssa_name_ptr_info
1036 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1037 	  new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
1038 	  /* We have to be careful about transferring alignment information.  */
1039 	  if (get_ptr_info_alignment (new_pi, &align, &misalign)
1040 	      && TREE_CODE (old_ref) == MEM_REF
1041 	      && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1042 		   && (TMR_INDEX2 (new_ref)
1043 		       /* TODO: Below conditions can be relaxed if TMR_INDEX
1044 			  is an indcution variable and its initial value and
1045 			  step are aligned.  */
1046 		       || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1047 		       || (TMR_STEP (new_ref)
1048 			   && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1049 			       < align)))))
1050 	    {
1051 	      poly_uint64 inc = (mem_ref_offset (old_ref)
1052 				 - mem_ref_offset (new_ref)).force_uhwi ();
1053 	      adjust_ptr_info_misalignment (new_pi, inc);
1054 	    }
1055 	  else
1056 	    mark_ptr_info_alignment_unknown (new_pi);
1057 	}
1058       else if (VAR_P (base)
1059 	       || TREE_CODE (base) == PARM_DECL
1060 	       || TREE_CODE (base) == RESULT_DECL)
1061 	{
1062 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1063 	  pt_solution_set_var (&pi->pt, base);
1064 	}
1065     }
1066 }
1067 
1068 /* Move constants in target_mem_ref REF to offset.  Returns the new target
1069    mem ref if anything changes, NULL_TREE otherwise.  */
1070 
1071 tree
maybe_fold_tmr(tree ref)1072 maybe_fold_tmr (tree ref)
1073 {
1074   struct mem_address addr;
1075   bool changed = false;
1076   tree new_ref, off;
1077 
1078   get_address_description (ref, &addr);
1079 
1080   if (addr.base
1081       && TREE_CODE (addr.base) == INTEGER_CST
1082       && !integer_zerop (addr.base))
1083     {
1084       addr.offset = fold_binary_to_constant (PLUS_EXPR,
1085 					     TREE_TYPE (addr.offset),
1086 					     addr.offset, addr.base);
1087       addr.base = NULL_TREE;
1088       changed = true;
1089     }
1090 
1091   if (addr.symbol
1092       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1093     {
1094       addr.offset = fold_binary_to_constant
1095 			(PLUS_EXPR, TREE_TYPE (addr.offset),
1096 			 addr.offset,
1097 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1098       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1099       changed = true;
1100     }
1101   else if (addr.symbol
1102 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1103     {
1104       poly_int64 offset;
1105       addr.symbol = build_fold_addr_expr
1106 		      (get_addr_base_and_unit_offset
1107 		         (TREE_OPERAND (addr.symbol, 0), &offset));
1108       addr.offset = int_const_binop (PLUS_EXPR,
1109 				     addr.offset, size_int (offset));
1110       changed = true;
1111     }
1112 
1113   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1114     {
1115       off = addr.index;
1116       if (addr.step)
1117 	{
1118 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
1119 					 off, addr.step);
1120 	  addr.step = NULL_TREE;
1121 	}
1122 
1123       addr.offset = fold_binary_to_constant (PLUS_EXPR,
1124 					     TREE_TYPE (addr.offset),
1125 					     addr.offset, off);
1126       addr.index = NULL_TREE;
1127       changed = true;
1128     }
1129 
1130   if (!changed)
1131     return NULL_TREE;
1132 
1133   /* If we have propagated something into this TARGET_MEM_REF and thus
1134      ended up folding it, always create a new TARGET_MEM_REF regardless
1135      if it is valid in this for on the target - the propagation result
1136      wouldn't be anyway.  */
1137   new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1138 			        TREE_TYPE (addr.offset), &addr, false);
1139   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1140   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1141   return new_ref;
1142 }
1143 
1144 /* Return the preferred index scale factor for accessing memory of mode
1145    MEM_MODE in the address space of pointer BASE.  Assume that we're
1146    optimizing for speed if SPEED is true and for size otherwise.  */
1147 unsigned int
preferred_mem_scale_factor(tree base,machine_mode mem_mode,bool speed)1148 preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1149 			    bool speed)
1150 {
1151   /* For BLKmode, we can't do anything so return 1.  */
1152   if (mem_mode == BLKmode)
1153     return 1;
1154 
1155   struct mem_address parts = {};
1156   addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1157   unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1158 
1159   /* Addressing mode "base + index".  */
1160   parts.index = integer_one_node;
1161   parts.base = integer_one_node;
1162   rtx addr = addr_for_mem_ref (&parts, as, false);
1163   unsigned cost = address_cost (addr, mem_mode, as, speed);
1164 
1165   /* Addressing mode "base + index << scale".  */
1166   parts.step = wide_int_to_tree (sizetype, fact);
1167   addr = addr_for_mem_ref (&parts, as, false);
1168   unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1169 
1170   /* Compare the cost of an address with an unscaled index with
1171      a scaled index and return factor if useful. */
1172   if (new_cost < cost)
1173     return GET_MODE_UNIT_SIZE (mem_mode);
1174   return 1;
1175 }
1176 
1177 /* Dump PARTS to FILE.  */
1178 
1179 extern void dump_mem_address (FILE *, struct mem_address *);
1180 void
dump_mem_address(FILE * file,struct mem_address * parts)1181 dump_mem_address (FILE *file, struct mem_address *parts)
1182 {
1183   if (parts->symbol)
1184     {
1185       fprintf (file, "symbol: ");
1186       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1187       fprintf (file, "\n");
1188     }
1189   if (parts->base)
1190     {
1191       fprintf (file, "base: ");
1192       print_generic_expr (file, parts->base, TDF_SLIM);
1193       fprintf (file, "\n");
1194     }
1195   if (parts->index)
1196     {
1197       fprintf (file, "index: ");
1198       print_generic_expr (file, parts->index, TDF_SLIM);
1199       fprintf (file, "\n");
1200     }
1201   if (parts->step)
1202     {
1203       fprintf (file, "step: ");
1204       print_generic_expr (file, parts->step, TDF_SLIM);
1205       fprintf (file, "\n");
1206     }
1207   if (parts->offset)
1208     {
1209       fprintf (file, "offset: ");
1210       print_generic_expr (file, parts->offset, TDF_SLIM);
1211       fprintf (file, "\n");
1212     }
1213 }
1214 
1215 #include "gt-tree-ssa-address.h"
1216