1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21    that directly map to addressing modes of the target.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49 #include "gimplify.h"
50 #include "builtins.h"
51 
52 /* FIXME: We compute address costs using RTL.  */
53 #include "tree-ssa-address.h"
54 
55 /* TODO -- handling of symbols (according to Richard Hendersons
56    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
57 
58    There are at least 5 different kinds of symbols that we can run up against:
59 
60      (1) binds_local_p, small data area.
61      (2) binds_local_p, eg local statics
62      (3) !binds_local_p, eg global variables
63      (4) thread local, local_exec
64      (5) thread local, !local_exec
65 
66    Now, (1) won't appear often in an array context, but it certainly can.
67    All you have to do is set -GN high enough, or explicitly mark any
68    random object __attribute__((section (".sdata"))).
69 
70    All of these affect whether or not a symbol is in fact a valid address.
71    The only one tested here is (3).  And that result may very well
72    be incorrect for (4) or (5).
73 
74    An incorrect result here does not cause incorrect results out the
75    back end, because the expander in expr.c validizes the address.  However
76    it would be nice to improve the handling here in order to produce more
77    precise results.  */
78 
79 /* A "template" for memory address, used to determine whether the address is
80    valid for mode.  */
81 
82 struct GTY (()) mem_addr_template {
83   rtx ref;			/* The template.  */
84   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
85 				   filled in.  */
86   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
87 				   be filled in.  */
88 };
89 
90 
91 /* The templates.  Each of the low five bits of the index corresponds to one
92    component of TARGET_MEM_REF being present, while the high bits identify
93    the address space.  See TEMPL_IDX.  */
94 
95 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
96 
97 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
98   (((int) (AS) << 5) \
99    | ((SYMBOL != 0) << 4) \
100    | ((BASE != 0) << 3) \
101    | ((INDEX != 0) << 2) \
102    | ((STEP != 0) << 1) \
103    | (OFFSET != 0))
104 
105 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
106    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
107    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
108 
109 static void
gen_addr_rtx(machine_mode address_mode,rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)110 gen_addr_rtx (machine_mode address_mode,
111 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
112 	      rtx *addr, rtx **step_p, rtx **offset_p)
113 {
114   rtx act_elem;
115 
116   *addr = NULL_RTX;
117   if (step_p)
118     *step_p = NULL;
119   if (offset_p)
120     *offset_p = NULL;
121 
122   if (index && index != const0_rtx)
123     {
124       act_elem = index;
125       if (step)
126 	{
127 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
128 
129 	  if (step_p)
130 	    *step_p = &XEXP (act_elem, 1);
131 	}
132 
133       *addr = act_elem;
134     }
135 
136   if (base && base != const0_rtx)
137     {
138       if (*addr)
139 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
140       else
141 	*addr = base;
142     }
143 
144   if (symbol)
145     {
146       act_elem = symbol;
147       if (offset)
148 	{
149 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
150 
151 	  if (offset_p)
152 	    *offset_p = &XEXP (act_elem, 1);
153 
154 	  if (GET_CODE (symbol) == SYMBOL_REF
155 	      || GET_CODE (symbol) == LABEL_REF
156 	      || GET_CODE (symbol) == CONST)
157 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
158 	}
159 
160       if (*addr)
161 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
162       else
163 	*addr = act_elem;
164     }
165   else if (offset)
166     {
167       if (*addr)
168 	{
169 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
170 	  if (offset_p)
171 	    *offset_p = &XEXP (*addr, 1);
172 	}
173       else
174 	{
175 	  *addr = offset;
176 	  if (offset_p)
177 	    *offset_p = addr;
178 	}
179     }
180 
181   if (!*addr)
182     *addr = const0_rtx;
183 }
184 
185 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
186    in address space AS.
187    If REALLY_EXPAND is false, just make fake registers instead
188    of really expanding the operands, and perform the expansion in-place
189    by using one of the "templates".  */
190 
191 rtx
addr_for_mem_ref(struct mem_address * addr,addr_space_t as,bool really_expand)192 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
193 		  bool really_expand)
194 {
195   scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
196   scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
197   rtx address, sym, bse, idx, st, off;
198   struct mem_addr_template *templ;
199 
200   if (addr->step && !integer_onep (addr->step))
201     st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
202   else
203     st = NULL_RTX;
204 
205   if (addr->offset && !integer_zerop (addr->offset))
206     {
207       poly_offset_int dc
208 	= poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
209       off = immed_wide_int_const (dc, pointer_mode);
210     }
211   else
212     off = NULL_RTX;
213 
214   if (!really_expand)
215     {
216       unsigned int templ_index
217 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
218 
219       if (templ_index >= vec_safe_length (mem_addr_template_list))
220 	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
221 
222       /* Reuse the templates for addresses, so that we do not waste memory.  */
223       templ = &(*mem_addr_template_list)[templ_index];
224       if (!templ->ref)
225 	{
226 	  sym = (addr->symbol ?
227 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
228 		 : NULL_RTX);
229 	  bse = (addr->base ?
230 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
231 		 : NULL_RTX);
232 	  idx = (addr->index ?
233 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
234 		 : NULL_RTX);
235 
236 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
237 			st? const0_rtx : NULL_RTX,
238 			off? const0_rtx : NULL_RTX,
239 			&templ->ref,
240 			&templ->step_p,
241 			&templ->off_p);
242 	}
243 
244       if (st)
245 	*templ->step_p = st;
246       if (off)
247 	*templ->off_p = off;
248 
249       return templ->ref;
250     }
251 
252   /* Otherwise really expand the expressions.  */
253   sym = (addr->symbol
254 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
255 	 : NULL_RTX);
256   bse = (addr->base
257 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
258 	 : NULL_RTX);
259   idx = (addr->index
260 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
261 	 : NULL_RTX);
262 
263   /* addr->base could be an SSA_NAME that was set to a constant value.  The
264      call to expand_expr may expose that constant.  If so, fold the value
265      into OFF and clear BSE.  Otherwise we may later try to pull a mode from
266      BSE to generate a REG, which won't work with constants because they
267      are modeless.  */
268   if (bse && GET_CODE (bse) == CONST_INT)
269     {
270       if (off)
271 	off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
272       else
273 	off = bse;
274       gcc_assert (GET_CODE (off) == CONST_INT);
275       bse = NULL_RTX;
276     }
277   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
278   if (pointer_mode != address_mode)
279     address = convert_memory_address (address_mode, address);
280   return address;
281 }
282 
283 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
284    the mem_address structure.  */
285 
286 rtx
addr_for_mem_ref(tree exp,addr_space_t as,bool really_expand)287 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
288 {
289   struct mem_address addr;
290   get_address_description (exp, &addr);
291   return addr_for_mem_ref (&addr, as, really_expand);
292 }
293 
294 /* Returns address of MEM_REF in TYPE.  */
295 
296 tree
tree_mem_ref_addr(tree type,tree mem_ref)297 tree_mem_ref_addr (tree type, tree mem_ref)
298 {
299   tree addr;
300   tree act_elem;
301   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
302   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
303 
304   addr_base = fold_convert (type, TMR_BASE (mem_ref));
305 
306   act_elem = TMR_INDEX (mem_ref);
307   if (act_elem)
308     {
309       if (step)
310 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
311 				act_elem, step);
312       addr_off = act_elem;
313     }
314 
315   act_elem = TMR_INDEX2 (mem_ref);
316   if (act_elem)
317     {
318       if (addr_off)
319 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
320 				addr_off, act_elem);
321       else
322 	addr_off = act_elem;
323     }
324 
325   if (offset && !integer_zerop (offset))
326     {
327       if (addr_off)
328 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
329 				fold_convert (TREE_TYPE (addr_off), offset));
330       else
331 	addr_off = offset;
332     }
333 
334   if (addr_off)
335     addr = fold_build_pointer_plus (addr_base, addr_off);
336   else
337     addr = addr_base;
338 
339   return addr;
340 }
341 
342 /* Returns true if a memory reference in MODE and with parameters given by
343    ADDR is valid on the current target.  */
344 
345 bool
valid_mem_ref_p(machine_mode mode,addr_space_t as,struct mem_address * addr)346 valid_mem_ref_p (machine_mode mode, addr_space_t as,
347 		 struct mem_address *addr)
348 {
349   rtx address;
350 
351   address = addr_for_mem_ref (addr, as, false);
352   if (!address)
353     return false;
354 
355   return memory_address_addr_space_p (mode, address, as);
356 }
357 
358 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
359    is valid on the current target and if so, creates and returns the
360    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
361 
362 static tree
create_mem_ref_raw(tree type,tree alias_ptr_type,struct mem_address * addr,bool verify)363 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
364 		    bool verify)
365 {
366   tree base, index2;
367 
368   if (verify
369       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
370     return NULL_TREE;
371 
372   if (addr->step && integer_onep (addr->step))
373     addr->step = NULL_TREE;
374 
375   if (addr->offset)
376     addr->offset = fold_convert (alias_ptr_type, addr->offset);
377   else
378     addr->offset = build_int_cst (alias_ptr_type, 0);
379 
380   if (addr->symbol)
381     {
382       base = addr->symbol;
383       index2 = addr->base;
384     }
385   else if (addr->base
386 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
387     {
388       base = addr->base;
389       index2 = NULL_TREE;
390     }
391   else
392     {
393       base = build_int_cst (build_pointer_type (type), 0);
394       index2 = addr->base;
395     }
396 
397   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
398      ???  As IVOPTs does not follow restrictions to where the base
399      pointer may point to create a MEM_REF only if we know that
400      base is valid.  */
401   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
402       && (!index2 || integer_zerop (index2))
403       && (!addr->index || integer_zerop (addr->index)))
404     return fold_build2 (MEM_REF, type, base, addr->offset);
405 
406   return build5 (TARGET_MEM_REF, type,
407 		 base, addr->offset, addr->index, addr->step, index2);
408 }
409 
410 /* Returns true if OBJ is an object whose address is a link time constant.  */
411 
412 static bool
fixed_address_object_p(tree obj)413 fixed_address_object_p (tree obj)
414 {
415   return (VAR_P (obj)
416 	  && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
417 	  && ! DECL_DLLIMPORT_P (obj));
418 }
419 
420 /* If ADDR contains an address of object that is a link time constant,
421    move it to PARTS->symbol.  */
422 
423 void
move_fixed_address_to_symbol(struct mem_address * parts,aff_tree * addr)424 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
425 {
426   unsigned i;
427   tree val = NULL_TREE;
428 
429   for (i = 0; i < addr->n; i++)
430     {
431       if (addr->elts[i].coef != 1)
432 	continue;
433 
434       val = addr->elts[i].val;
435       if (TREE_CODE (val) == ADDR_EXPR
436 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
437 	break;
438     }
439 
440   if (i == addr->n)
441     return;
442 
443   parts->symbol = val;
444   aff_combination_remove_elt (addr, i);
445 }
446 
447 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
448    PARTS->base.  */
449 
450 static bool
move_hint_to_base(tree type,struct mem_address * parts,tree base_hint,aff_tree * addr)451 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
452 		   aff_tree *addr)
453 {
454   unsigned i;
455   tree val = NULL_TREE;
456   int qual;
457 
458   for (i = 0; i < addr->n; i++)
459     {
460       if (addr->elts[i].coef != 1)
461 	continue;
462 
463       val = addr->elts[i].val;
464       if (operand_equal_p (val, base_hint, 0))
465 	break;
466     }
467 
468   if (i == addr->n)
469     return false;
470 
471   /* Cast value to appropriate pointer type.  We cannot use a pointer
472      to TYPE directly, as the back-end will assume registers of pointer
473      type are aligned, and just the base itself may not actually be.
474      We use void pointer to the type's address space instead.  */
475   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
476   type = build_qualified_type (void_type_node, qual);
477   parts->base = fold_convert (build_pointer_type (type), val);
478   aff_combination_remove_elt (addr, i);
479   return true;
480 }
481 
482 /* If ADDR contains an address of a dereferenced pointer, move it to
483    PARTS->base.  */
484 
485 static void
move_pointer_to_base(struct mem_address * parts,aff_tree * addr)486 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
487 {
488   unsigned i;
489   tree val = NULL_TREE;
490 
491   for (i = 0; i < addr->n; i++)
492     {
493       if (addr->elts[i].coef != 1)
494 	continue;
495 
496       val = addr->elts[i].val;
497       if (POINTER_TYPE_P (TREE_TYPE (val)))
498 	break;
499     }
500 
501   if (i == addr->n)
502     return;
503 
504   parts->base = val;
505   aff_combination_remove_elt (addr, i);
506 }
507 
508 /* Moves the loop variant part V in linear address ADDR to be the index
509    of PARTS.  */
510 
511 static void
move_variant_to_index(struct mem_address * parts,aff_tree * addr,tree v)512 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
513 {
514   unsigned i;
515   tree val = NULL_TREE;
516 
517   gcc_assert (!parts->index);
518   for (i = 0; i < addr->n; i++)
519     {
520       val = addr->elts[i].val;
521       if (operand_equal_p (val, v, 0))
522 	break;
523     }
524 
525   if (i == addr->n)
526     return;
527 
528   parts->index = fold_convert (sizetype, val);
529   parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
530   aff_combination_remove_elt (addr, i);
531 }
532 
533 /* Adds ELT to PARTS.  */
534 
535 static void
add_to_parts(struct mem_address * parts,tree elt)536 add_to_parts (struct mem_address *parts, tree elt)
537 {
538   tree type;
539 
540   if (!parts->index)
541     {
542       parts->index = fold_convert (sizetype, elt);
543       return;
544     }
545 
546   if (!parts->base)
547     {
548       parts->base = elt;
549       return;
550     }
551 
552   /* Add ELT to base.  */
553   type = TREE_TYPE (parts->base);
554   if (POINTER_TYPE_P (type))
555     parts->base = fold_build_pointer_plus (parts->base, elt);
556   else
557     parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
558 }
559 
560 /* Returns true if multiplying by RATIO is allowed in an address.  Test the
561    validity for a memory reference accessing memory of mode MODE in address
562    space AS.  */
563 
564 static bool
multiplier_allowed_in_address_p(HOST_WIDE_INT ratio,machine_mode mode,addr_space_t as)565 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
566 				 addr_space_t as)
567 {
568 #define MAX_RATIO 128
569   unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
570   static vec<sbitmap> valid_mult_list;
571   sbitmap valid_mult;
572 
573   if (data_index >= valid_mult_list.length ())
574     valid_mult_list.safe_grow_cleared (data_index + 1);
575 
576   valid_mult = valid_mult_list[data_index];
577   if (!valid_mult)
578     {
579       machine_mode address_mode = targetm.addr_space.address_mode (as);
580       rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
581       rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
582       rtx addr, scaled;
583       HOST_WIDE_INT i;
584 
585       valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
586       bitmap_clear (valid_mult);
587       scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
588       addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
589       for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
590 	{
591 	  XEXP (scaled, 1) = gen_int_mode (i, address_mode);
592 	  if (memory_address_addr_space_p (mode, addr, as)
593 	      || memory_address_addr_space_p (mode, scaled, as))
594 	    bitmap_set_bit (valid_mult, i + MAX_RATIO);
595 	}
596 
597       if (dump_file && (dump_flags & TDF_DETAILS))
598 	{
599 	  fprintf (dump_file, "  allowed multipliers:");
600 	  for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
601 	    if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
602 	      fprintf (dump_file, " %d", (int) i);
603 	  fprintf (dump_file, "\n");
604 	  fprintf (dump_file, "\n");
605 	}
606 
607       valid_mult_list[data_index] = valid_mult;
608     }
609 
610   if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
611     return false;
612 
613   return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
614 }
615 
616 /* Finds the most expensive multiplication in ADDR that can be
617    expressed in an addressing mode and move the corresponding
618    element(s) to PARTS.  */
619 
620 static void
most_expensive_mult_to_index(tree type,struct mem_address * parts,aff_tree * addr,bool speed)621 most_expensive_mult_to_index (tree type, struct mem_address *parts,
622 			      aff_tree *addr, bool speed)
623 {
624   addr_space_t as = TYPE_ADDR_SPACE (type);
625   machine_mode address_mode = targetm.addr_space.address_mode (as);
626   HOST_WIDE_INT coef;
627   unsigned best_mult_cost = 0, acost;
628   tree mult_elt = NULL_TREE, elt;
629   unsigned i, j;
630   enum tree_code op_code;
631 
632   offset_int best_mult = 0;
633   for (i = 0; i < addr->n; i++)
634     {
635       if (!wi::fits_shwi_p (addr->elts[i].coef))
636 	continue;
637 
638       coef = addr->elts[i].coef.to_shwi ();
639       if (coef == 1
640 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
641 	continue;
642 
643       acost = mult_by_coeff_cost (coef, address_mode, speed);
644 
645       if (acost > best_mult_cost)
646 	{
647 	  best_mult_cost = acost;
648 	  best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
649 	}
650     }
651 
652   if (!best_mult_cost)
653     return;
654 
655   /* Collect elements multiplied by best_mult.  */
656   for (i = j = 0; i < addr->n; i++)
657     {
658       offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
659       offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
660 
661       if (amult == best_mult)
662 	op_code = PLUS_EXPR;
663       else if (amult_neg == best_mult)
664 	op_code = MINUS_EXPR;
665       else
666 	{
667 	  addr->elts[j] = addr->elts[i];
668 	  j++;
669 	  continue;
670 	}
671 
672       elt = fold_convert (sizetype, addr->elts[i].val);
673       if (mult_elt)
674 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
675       else if (op_code == PLUS_EXPR)
676 	mult_elt = elt;
677       else
678 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
679     }
680   addr->n = j;
681 
682   parts->index = mult_elt;
683   parts->step = wide_int_to_tree (sizetype, best_mult);
684 }
685 
686 /* Splits address ADDR for a memory access of type TYPE into PARTS.
687    If BASE_HINT is non-NULL, it specifies an SSA name to be used
688    preferentially as base of the reference, and IV_CAND is the selected
689    iv candidate used in ADDR.  Store true to VAR_IN_BASE if variant
690    part of address is split to PARTS.base.
691 
692    TODO -- be more clever about the distribution of the elements of ADDR
693    to PARTS.  Some architectures do not support anything but single
694    register in address, possibly with a small integer offset; while
695    create_mem_ref will simplify the address to an acceptable shape
696    later, it would be more efficient to know that asking for complicated
697    addressing modes is useless.  */
698 
699 static void
addr_to_parts(tree type,aff_tree * addr,tree iv_cand,tree base_hint,struct mem_address * parts,bool * var_in_base,bool speed)700 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
701 	       struct mem_address *parts, bool *var_in_base, bool speed)
702 {
703   tree part;
704   unsigned i;
705 
706   parts->symbol = NULL_TREE;
707   parts->base = NULL_TREE;
708   parts->index = NULL_TREE;
709   parts->step = NULL_TREE;
710 
711   if (maybe_ne (addr->offset, 0))
712     parts->offset = wide_int_to_tree (sizetype, addr->offset);
713   else
714     parts->offset = NULL_TREE;
715 
716   /* Try to find a symbol.  */
717   move_fixed_address_to_symbol (parts, addr);
718 
719   /* Since at the moment there is no reliable way to know how to
720      distinguish between pointer and its offset, we decide if var
721      part is the pointer based on guess.  */
722   *var_in_base = (base_hint != NULL && parts->symbol == NULL);
723   if (*var_in_base)
724     *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
725   else
726     move_variant_to_index (parts, addr, iv_cand);
727 
728   /* First move the most expensive feasible multiplication to index.  */
729   if (!parts->index)
730     most_expensive_mult_to_index (type, parts, addr, speed);
731 
732   /* Move pointer into base.  */
733   if (!parts->symbol && !parts->base)
734     move_pointer_to_base (parts, addr);
735 
736   /* Then try to process the remaining elements.  */
737   for (i = 0; i < addr->n; i++)
738     {
739       part = fold_convert (sizetype, addr->elts[i].val);
740       if (addr->elts[i].coef != 1)
741 	part = fold_build2 (MULT_EXPR, sizetype, part,
742 			    wide_int_to_tree (sizetype, addr->elts[i].coef));
743       add_to_parts (parts, part);
744     }
745   if (addr->rest)
746     add_to_parts (parts, fold_convert (sizetype, addr->rest));
747 }
748 
749 /* Force the PARTS to register.  */
750 
751 static void
gimplify_mem_ref_parts(gimple_stmt_iterator * gsi,struct mem_address * parts)752 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
753 {
754   if (parts->base)
755     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
756 					    is_gimple_mem_ref_addr, NULL_TREE,
757 					    true, GSI_SAME_STMT);
758   if (parts->index)
759     parts->index = force_gimple_operand_gsi (gsi, parts->index,
760 					     true, NULL_TREE,
761 					     true, GSI_SAME_STMT);
762 }
763 
764 /* Return true if the OFFSET in PARTS is the only thing that is making
765    it an invalid address for type TYPE.  */
766 
767 static bool
mem_ref_valid_without_offset_p(tree type,mem_address parts)768 mem_ref_valid_without_offset_p (tree type, mem_address parts)
769 {
770   if (!parts.base)
771     parts.base = parts.offset;
772   parts.offset = NULL_TREE;
773   return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
774 }
775 
776 /* Fold PARTS->offset into PARTS->base, so that there is no longer
777    a separate offset.  Emit any new instructions before GSI.  */
778 
779 static void
add_offset_to_base(gimple_stmt_iterator * gsi,mem_address * parts)780 add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
781 {
782   tree tmp = parts->offset;
783   if (parts->base)
784     {
785       tmp = fold_build_pointer_plus (parts->base, tmp);
786       tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
787 					NULL_TREE, true, GSI_SAME_STMT);
788     }
789   parts->base = tmp;
790   parts->offset = NULL_TREE;
791 }
792 
793 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
794    computations are emitted in front of GSI.  TYPE is the mode
795    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
796    and BASE_HINT is non NULL if IV_CAND comes from a base address
797    object.  */
798 
799 tree
create_mem_ref(gimple_stmt_iterator * gsi,tree type,aff_tree * addr,tree alias_ptr_type,tree iv_cand,tree base_hint,bool speed)800 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
801 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
802 {
803   bool var_in_base;
804   tree mem_ref, tmp;
805   struct mem_address parts;
806 
807   addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
808   gimplify_mem_ref_parts (gsi, &parts);
809   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
810   if (mem_ref)
811     return mem_ref;
812 
813   /* The expression is too complicated.  Try making it simpler.  */
814 
815   /* Merge symbol into other parts.  */
816   if (parts.symbol)
817     {
818       tmp = parts.symbol;
819       parts.symbol = NULL_TREE;
820       gcc_assert (is_gimple_val (tmp));
821 
822       if (parts.base)
823 	{
824 	  gcc_assert (useless_type_conversion_p (sizetype,
825 						 TREE_TYPE (parts.base)));
826 
827 	  if (parts.index)
828 	    {
829 	      /* Add the symbol to base, eventually forcing it to register.  */
830 	      tmp = fold_build_pointer_plus (tmp, parts.base);
831 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
832 						is_gimple_mem_ref_addr,
833 						NULL_TREE, true,
834 						GSI_SAME_STMT);
835 	    }
836 	  else
837 	    {
838 	      /* Move base to index, then move the symbol to base.  */
839 	      parts.index = parts.base;
840 	    }
841 	  parts.base = tmp;
842 	}
843       else
844 	parts.base = tmp;
845 
846       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
847       if (mem_ref)
848 	return mem_ref;
849     }
850 
851   /* Move multiplication to index by transforming address expression:
852        [... + index << step + ...]
853      into:
854        index' = index << step;
855        [... + index' + ,,,].  */
856   if (parts.step && !integer_onep (parts.step))
857     {
858       gcc_assert (parts.index);
859       if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
860 	{
861 	  add_offset_to_base (gsi, &parts);
862 	  mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
863 	  gcc_assert (mem_ref);
864 	  return mem_ref;
865 	}
866 
867       parts.index = force_gimple_operand_gsi (gsi,
868 				fold_build2 (MULT_EXPR, sizetype,
869 					     parts.index, parts.step),
870 				true, NULL_TREE, true, GSI_SAME_STMT);
871       parts.step = NULL_TREE;
872 
873       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
874       if (mem_ref)
875 	return mem_ref;
876     }
877 
878   /* Add offset to invariant part by transforming address expression:
879        [base + index + offset]
880      into:
881        base' = base + offset;
882        [base' + index]
883      or:
884        index' = index + offset;
885        [base + index']
886      depending on which one is invariant.  */
887   if (parts.offset && !integer_zerop (parts.offset))
888     {
889       tree old_base = unshare_expr (parts.base);
890       tree old_index = unshare_expr (parts.index);
891       tree old_offset = unshare_expr (parts.offset);
892 
893       tmp = parts.offset;
894       parts.offset = NULL_TREE;
895       /* Add offset to invariant part.  */
896       if (!var_in_base)
897 	{
898 	  if (parts.base)
899 	    {
900 	      tmp = fold_build_pointer_plus (parts.base, tmp);
901 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
902 						is_gimple_mem_ref_addr,
903 						NULL_TREE, true,
904 						GSI_SAME_STMT);
905 	    }
906 	  parts.base = tmp;
907 	}
908       else
909 	{
910 	  if (parts.index)
911 	    {
912 	      tmp = fold_build_pointer_plus (parts.index, tmp);
913 	      tmp = force_gimple_operand_gsi_1 (gsi, tmp,
914 						is_gimple_mem_ref_addr,
915 						NULL_TREE, true,
916 						GSI_SAME_STMT);
917 	    }
918 	  parts.index = tmp;
919 	}
920 
921       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
922       if (mem_ref)
923 	return mem_ref;
924 
925       /* Restore parts.base, index and offset so that we can check if
926 	 [base + offset] addressing mode is supported in next step.
927 	 This is necessary for targets only support [base + offset],
928 	 but not [base + index] addressing mode.  */
929       parts.base = old_base;
930       parts.index = old_index;
931       parts.offset = old_offset;
932     }
933 
934   /* Transform [base + index + ...] into:
935        base' = base + index;
936        [base' + ...].  */
937   if (parts.index)
938     {
939       tmp = parts.index;
940       parts.index = NULL_TREE;
941       /* Add index to base.  */
942       if (parts.base)
943 	{
944 	  tmp = fold_build_pointer_plus (parts.base, tmp);
945 	  tmp = force_gimple_operand_gsi_1 (gsi, tmp,
946 					    is_gimple_mem_ref_addr,
947 					    NULL_TREE, true, GSI_SAME_STMT);
948 	}
949       parts.base = tmp;
950 
951       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
952       if (mem_ref)
953 	return mem_ref;
954     }
955 
956   /* Transform [base + offset] into:
957        base' = base + offset;
958        [base'].  */
959   if (parts.offset && !integer_zerop (parts.offset))
960     {
961       add_offset_to_base (gsi, &parts);
962       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
963       if (mem_ref)
964 	return mem_ref;
965     }
966 
967   /* Verify that the address is in the simplest possible shape
968      (only a register).  If we cannot create such a memory reference,
969      something is really wrong.  */
970   gcc_assert (parts.symbol == NULL_TREE);
971   gcc_assert (parts.index == NULL_TREE);
972   gcc_assert (!parts.step || integer_onep (parts.step));
973   gcc_assert (!parts.offset || integer_zerop (parts.offset));
974   gcc_unreachable ();
975 }
976 
977 /* Copies components of the address from OP to ADDR.  */
978 
979 void
get_address_description(tree op,struct mem_address * addr)980 get_address_description (tree op, struct mem_address *addr)
981 {
982   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
983     {
984       addr->symbol = TMR_BASE (op);
985       addr->base = TMR_INDEX2 (op);
986     }
987   else
988     {
989       addr->symbol = NULL_TREE;
990       if (TMR_INDEX2 (op))
991 	{
992 	  gcc_assert (integer_zerop (TMR_BASE (op)));
993 	  addr->base = TMR_INDEX2 (op);
994 	}
995       else
996 	addr->base = TMR_BASE (op);
997     }
998   addr->index = TMR_INDEX (op);
999   addr->step = TMR_STEP (op);
1000   addr->offset = TMR_OFFSET (op);
1001 }
1002 
1003 /* Copies the reference information from OLD_REF to NEW_REF, where
1004    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
1005 
1006 void
copy_ref_info(tree new_ref,tree old_ref)1007 copy_ref_info (tree new_ref, tree old_ref)
1008 {
1009   tree new_ptr_base = NULL_TREE;
1010 
1011   gcc_assert (TREE_CODE (new_ref) == MEM_REF
1012 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
1013 
1014   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1015   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1016 
1017   new_ptr_base = TREE_OPERAND (new_ref, 0);
1018 
1019   tree base = get_base_address (old_ref);
1020   if (!base)
1021     return;
1022 
1023   /* We can transfer points-to information from an old pointer
1024      or decl base to the new one.  */
1025   if (new_ptr_base
1026       && TREE_CODE (new_ptr_base) == SSA_NAME
1027       && !SSA_NAME_PTR_INFO (new_ptr_base))
1028     {
1029       if ((TREE_CODE (base) == MEM_REF
1030 	   || TREE_CODE (base) == TARGET_MEM_REF)
1031 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1032 	  && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1033 	{
1034 	  duplicate_ssa_name_ptr_info
1035 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1036 	  reset_flow_sensitive_info (new_ptr_base);
1037 	}
1038       else if (VAR_P (base)
1039 	       || TREE_CODE (base) == PARM_DECL
1040 	       || TREE_CODE (base) == RESULT_DECL)
1041 	{
1042 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1043 	  pt_solution_set_var (&pi->pt, base);
1044 	}
1045     }
1046 
1047   /* And alignment info.  Note we cannot transfer misalignment info
1048      since that sits on the SSA name but this is flow-sensitive info
1049      which we cannot transfer in this generic routine.  */
1050   unsigned old_align = get_object_alignment (old_ref);
1051   unsigned new_align = get_object_alignment (new_ref);
1052   if (new_align < old_align)
1053     TREE_TYPE (new_ref) = build_aligned_type (TREE_TYPE (new_ref), old_align);
1054 }
1055 
1056 /* Move constants in target_mem_ref REF to offset.  Returns the new target
1057    mem ref if anything changes, NULL_TREE otherwise.  */
1058 
1059 tree
maybe_fold_tmr(tree ref)1060 maybe_fold_tmr (tree ref)
1061 {
1062   struct mem_address addr;
1063   bool changed = false;
1064   tree new_ref, off;
1065 
1066   get_address_description (ref, &addr);
1067 
1068   if (addr.base
1069       && TREE_CODE (addr.base) == INTEGER_CST
1070       && !integer_zerop (addr.base))
1071     {
1072       addr.offset = fold_binary_to_constant (PLUS_EXPR,
1073 					     TREE_TYPE (addr.offset),
1074 					     addr.offset, addr.base);
1075       addr.base = NULL_TREE;
1076       changed = true;
1077     }
1078 
1079   if (addr.symbol
1080       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1081     {
1082       addr.offset = fold_binary_to_constant
1083 			(PLUS_EXPR, TREE_TYPE (addr.offset),
1084 			 addr.offset,
1085 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1086       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1087       changed = true;
1088     }
1089   else if (addr.symbol
1090 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1091     {
1092       poly_int64 offset;
1093       addr.symbol = build_fold_addr_expr
1094 		      (get_addr_base_and_unit_offset
1095 		         (TREE_OPERAND (addr.symbol, 0), &offset));
1096       addr.offset = int_const_binop (PLUS_EXPR,
1097 				     addr.offset, size_int (offset));
1098       changed = true;
1099     }
1100 
1101   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1102     {
1103       off = addr.index;
1104       if (addr.step)
1105 	{
1106 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
1107 					 off, addr.step);
1108 	  addr.step = NULL_TREE;
1109 	}
1110 
1111       addr.offset = fold_binary_to_constant (PLUS_EXPR,
1112 					     TREE_TYPE (addr.offset),
1113 					     addr.offset, off);
1114       addr.index = NULL_TREE;
1115       changed = true;
1116     }
1117 
1118   if (!changed)
1119     return NULL_TREE;
1120 
1121   /* If we have propagated something into this TARGET_MEM_REF and thus
1122      ended up folding it, always create a new TARGET_MEM_REF regardless
1123      if it is valid in this for on the target - the propagation result
1124      wouldn't be anyway.  */
1125   new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1126 			        TREE_TYPE (addr.offset), &addr, false);
1127   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1128   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1129   return new_ref;
1130 }
1131 
1132 /* Return the preferred index scale factor for accessing memory of mode
1133    MEM_MODE in the address space of pointer BASE.  Assume that we're
1134    optimizing for speed if SPEED is true and for size otherwise.  */
1135 unsigned int
preferred_mem_scale_factor(tree base,machine_mode mem_mode,bool speed)1136 preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1137 			    bool speed)
1138 {
1139   /* For BLKmode, we can't do anything so return 1.  */
1140   if (mem_mode == BLKmode)
1141     return 1;
1142 
1143   struct mem_address parts = {};
1144   addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1145   unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1146 
1147   /* Addressing mode "base + index".  */
1148   parts.index = integer_one_node;
1149   parts.base = integer_one_node;
1150   rtx addr = addr_for_mem_ref (&parts, as, false);
1151   unsigned cost = address_cost (addr, mem_mode, as, speed);
1152 
1153   /* Addressing mode "base + index << scale".  */
1154   parts.step = wide_int_to_tree (sizetype, fact);
1155   addr = addr_for_mem_ref (&parts, as, false);
1156   unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1157 
1158   /* Compare the cost of an address with an unscaled index with
1159      a scaled index and return factor if useful. */
1160   if (new_cost < cost)
1161     return GET_MODE_UNIT_SIZE (mem_mode);
1162   return 1;
1163 }
1164 
1165 /* Dump PARTS to FILE.  */
1166 
1167 extern void dump_mem_address (FILE *, struct mem_address *);
1168 void
dump_mem_address(FILE * file,struct mem_address * parts)1169 dump_mem_address (FILE *file, struct mem_address *parts)
1170 {
1171   if (parts->symbol)
1172     {
1173       fprintf (file, "symbol: ");
1174       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1175       fprintf (file, "\n");
1176     }
1177   if (parts->base)
1178     {
1179       fprintf (file, "base: ");
1180       print_generic_expr (file, parts->base, TDF_SLIM);
1181       fprintf (file, "\n");
1182     }
1183   if (parts->index)
1184     {
1185       fprintf (file, "index: ");
1186       print_generic_expr (file, parts->index, TDF_SLIM);
1187       fprintf (file, "\n");
1188     }
1189   if (parts->step)
1190     {
1191       fprintf (file, "step: ");
1192       print_generic_expr (file, parts->step, TDF_SLIM);
1193       fprintf (file, "\n");
1194     }
1195   if (parts->offset)
1196     {
1197       fprintf (file, "offset: ");
1198       print_generic_expr (file, parts->offset, TDF_SLIM);
1199       fprintf (file, "\n");
1200     }
1201 }
1202 
1203 #include "gt-tree-ssa-address.h"
1204