xref: /dragonfly/contrib/gcc-8.0/gcc/emit-rtl.c (revision 38fd1498)
1*38fd1498Szrj /* Emit RTL for the GCC expander.
2*38fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
7*38fd1498Szrj the terms of the GNU General Public License as published by the Free
8*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later
9*38fd1498Szrj version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj 
21*38fd1498Szrj /* Middle-to-low level generation of rtx code and insns.
22*38fd1498Szrj 
23*38fd1498Szrj    This file contains support functions for creating rtl expressions
24*38fd1498Szrj    and manipulating them in the doubly-linked chain of insns.
25*38fd1498Szrj 
26*38fd1498Szrj    The patterns of the insns are created by machine-dependent
27*38fd1498Szrj    routines in insn-emit.c, which is generated automatically from
28*38fd1498Szrj    the machine description.  These routines make the individual rtx's
29*38fd1498Szrj    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30*38fd1498Szrj    which are automatically generated from rtl.def; what is machine
31*38fd1498Szrj    dependent is the kind of rtx's they make and what arguments they
32*38fd1498Szrj    use.  */
33*38fd1498Szrj 
34*38fd1498Szrj #include "config.h"
35*38fd1498Szrj #include "system.h"
36*38fd1498Szrj #include "coretypes.h"
37*38fd1498Szrj #include "memmodel.h"
38*38fd1498Szrj #include "backend.h"
39*38fd1498Szrj #include "target.h"
40*38fd1498Szrj #include "rtl.h"
41*38fd1498Szrj #include "tree.h"
42*38fd1498Szrj #include "df.h"
43*38fd1498Szrj #include "tm_p.h"
44*38fd1498Szrj #include "stringpool.h"
45*38fd1498Szrj #include "insn-config.h"
46*38fd1498Szrj #include "regs.h"
47*38fd1498Szrj #include "emit-rtl.h"
48*38fd1498Szrj #include "recog.h"
49*38fd1498Szrj #include "diagnostic-core.h"
50*38fd1498Szrj #include "alias.h"
51*38fd1498Szrj #include "fold-const.h"
52*38fd1498Szrj #include "varasm.h"
53*38fd1498Szrj #include "cfgrtl.h"
54*38fd1498Szrj #include "tree-eh.h"
55*38fd1498Szrj #include "explow.h"
56*38fd1498Szrj #include "expr.h"
57*38fd1498Szrj #include "params.h"
58*38fd1498Szrj #include "builtins.h"
59*38fd1498Szrj #include "rtl-iter.h"
60*38fd1498Szrj #include "stor-layout.h"
61*38fd1498Szrj #include "opts.h"
62*38fd1498Szrj #include "predict.h"
63*38fd1498Szrj #include "rtx-vector-builder.h"
64*38fd1498Szrj 
65*38fd1498Szrj struct target_rtl default_target_rtl;
66*38fd1498Szrj #if SWITCHABLE_TARGET
67*38fd1498Szrj struct target_rtl *this_target_rtl = &default_target_rtl;
68*38fd1498Szrj #endif
69*38fd1498Szrj 
70*38fd1498Szrj #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71*38fd1498Szrj 
72*38fd1498Szrj /* Commonly used modes.  */
73*38fd1498Szrj 
74*38fd1498Szrj scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
75*38fd1498Szrj scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
76*38fd1498Szrj scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
77*38fd1498Szrj 
78*38fd1498Szrj /* Datastructures maintained for currently processed function in RTL form.  */
79*38fd1498Szrj 
80*38fd1498Szrj struct rtl_data x_rtl;
81*38fd1498Szrj 
82*38fd1498Szrj /* Indexed by pseudo register number, gives the rtx for that pseudo.
83*38fd1498Szrj    Allocated in parallel with regno_pointer_align.
84*38fd1498Szrj    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85*38fd1498Szrj    with length attribute nested in top level structures.  */
86*38fd1498Szrj 
87*38fd1498Szrj rtx * regno_reg_rtx;
88*38fd1498Szrj 
89*38fd1498Szrj /* This is *not* reset after each function.  It gives each CODE_LABEL
90*38fd1498Szrj    in the entire compilation a unique label number.  */
91*38fd1498Szrj 
92*38fd1498Szrj static GTY(()) int label_num = 1;
93*38fd1498Szrj 
94*38fd1498Szrj /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95*38fd1498Szrj    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
96*38fd1498Szrj    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
97*38fd1498Szrj    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
98*38fd1498Szrj 
99*38fd1498Szrj rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100*38fd1498Szrj 
101*38fd1498Szrj rtx const_true_rtx;
102*38fd1498Szrj 
103*38fd1498Szrj REAL_VALUE_TYPE dconst0;
104*38fd1498Szrj REAL_VALUE_TYPE dconst1;
105*38fd1498Szrj REAL_VALUE_TYPE dconst2;
106*38fd1498Szrj REAL_VALUE_TYPE dconstm1;
107*38fd1498Szrj REAL_VALUE_TYPE dconsthalf;
108*38fd1498Szrj 
109*38fd1498Szrj /* Record fixed-point constant 0 and 1.  */
110*38fd1498Szrj FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111*38fd1498Szrj FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112*38fd1498Szrj 
113*38fd1498Szrj /* We make one copy of (const_int C) where C is in
114*38fd1498Szrj    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115*38fd1498Szrj    to save space during the compilation and simplify comparisons of
116*38fd1498Szrj    integers.  */
117*38fd1498Szrj 
118*38fd1498Szrj rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119*38fd1498Szrj 
120*38fd1498Szrj /* Standard pieces of rtx, to be substituted directly into things.  */
121*38fd1498Szrj rtx pc_rtx;
122*38fd1498Szrj rtx ret_rtx;
123*38fd1498Szrj rtx simple_return_rtx;
124*38fd1498Szrj rtx cc0_rtx;
125*38fd1498Szrj 
126*38fd1498Szrj /* Marker used for denoting an INSN, which should never be accessed (i.e.,
127*38fd1498Szrj    this pointer should normally never be dereferenced), but is required to be
128*38fd1498Szrj    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
129*38fd1498Szrj rtx_insn *invalid_insn_rtx;
130*38fd1498Szrj 
131*38fd1498Szrj /* A hash table storing CONST_INTs whose absolute value is greater
132*38fd1498Szrj    than MAX_SAVED_CONST_INT.  */
133*38fd1498Szrj 
134*38fd1498Szrj struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
135*38fd1498Szrj {
136*38fd1498Szrj   typedef HOST_WIDE_INT compare_type;
137*38fd1498Szrj 
138*38fd1498Szrj   static hashval_t hash (rtx i);
139*38fd1498Szrj   static bool equal (rtx i, HOST_WIDE_INT h);
140*38fd1498Szrj };
141*38fd1498Szrj 
142*38fd1498Szrj static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
143*38fd1498Szrj 
144*38fd1498Szrj struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
145*38fd1498Szrj {
146*38fd1498Szrj   static hashval_t hash (rtx x);
147*38fd1498Szrj   static bool equal (rtx x, rtx y);
148*38fd1498Szrj };
149*38fd1498Szrj 
150*38fd1498Szrj static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
151*38fd1498Szrj 
152*38fd1498Szrj struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153*38fd1498Szrj {
154*38fd1498Szrj   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155*38fd1498Szrj 
156*38fd1498Szrj   static hashval_t hash (rtx x);
157*38fd1498Szrj   static bool equal (rtx x, const compare_type &y);
158*38fd1498Szrj };
159*38fd1498Szrj 
160*38fd1498Szrj static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161*38fd1498Szrj 
162*38fd1498Szrj /* A hash table storing register attribute structures.  */
163*38fd1498Szrj struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
164*38fd1498Szrj {
165*38fd1498Szrj   static hashval_t hash (reg_attrs *x);
166*38fd1498Szrj   static bool equal (reg_attrs *a, reg_attrs *b);
167*38fd1498Szrj };
168*38fd1498Szrj 
169*38fd1498Szrj static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
170*38fd1498Szrj 
171*38fd1498Szrj /* A hash table storing all CONST_DOUBLEs.  */
172*38fd1498Szrj struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
173*38fd1498Szrj {
174*38fd1498Szrj   static hashval_t hash (rtx x);
175*38fd1498Szrj   static bool equal (rtx x, rtx y);
176*38fd1498Szrj };
177*38fd1498Szrj 
178*38fd1498Szrj static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
179*38fd1498Szrj 
180*38fd1498Szrj /* A hash table storing all CONST_FIXEDs.  */
181*38fd1498Szrj struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
182*38fd1498Szrj {
183*38fd1498Szrj   static hashval_t hash (rtx x);
184*38fd1498Szrj   static bool equal (rtx x, rtx y);
185*38fd1498Szrj };
186*38fd1498Szrj 
187*38fd1498Szrj static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
188*38fd1498Szrj 
189*38fd1498Szrj #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190*38fd1498Szrj #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191*38fd1498Szrj #define first_label_num (crtl->emit.x_first_label_num)
192*38fd1498Szrj 
193*38fd1498Szrj static void set_used_decls (tree);
194*38fd1498Szrj static void mark_label_nuses (rtx);
195*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
196*38fd1498Szrj static rtx lookup_const_wide_int (rtx);
197*38fd1498Szrj #endif
198*38fd1498Szrj static rtx lookup_const_double (rtx);
199*38fd1498Szrj static rtx lookup_const_fixed (rtx);
200*38fd1498Szrj static rtx gen_const_vector (machine_mode, int);
201*38fd1498Szrj static void copy_rtx_if_shared_1 (rtx *orig);
202*38fd1498Szrj 
203*38fd1498Szrj /* Probability of the conditional branch currently proceeded by try_split.  */
204*38fd1498Szrj profile_probability split_branch_probability;
205*38fd1498Szrj 
206*38fd1498Szrj /* Returns a hash code for X (which is a really a CONST_INT).  */
207*38fd1498Szrj 
208*38fd1498Szrj hashval_t
hash(rtx x)209*38fd1498Szrj const_int_hasher::hash (rtx x)
210*38fd1498Szrj {
211*38fd1498Szrj   return (hashval_t) INTVAL (x);
212*38fd1498Szrj }
213*38fd1498Szrj 
214*38fd1498Szrj /* Returns nonzero if the value represented by X (which is really a
215*38fd1498Szrj    CONST_INT) is the same as that given by Y (which is really a
216*38fd1498Szrj    HOST_WIDE_INT *).  */
217*38fd1498Szrj 
218*38fd1498Szrj bool
equal(rtx x,HOST_WIDE_INT y)219*38fd1498Szrj const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
220*38fd1498Szrj {
221*38fd1498Szrj   return (INTVAL (x) == y);
222*38fd1498Szrj }
223*38fd1498Szrj 
224*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
225*38fd1498Szrj /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
226*38fd1498Szrj 
227*38fd1498Szrj hashval_t
hash(rtx x)228*38fd1498Szrj const_wide_int_hasher::hash (rtx x)
229*38fd1498Szrj {
230*38fd1498Szrj   int i;
231*38fd1498Szrj   unsigned HOST_WIDE_INT hash = 0;
232*38fd1498Szrj   const_rtx xr = x;
233*38fd1498Szrj 
234*38fd1498Szrj   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
235*38fd1498Szrj     hash += CONST_WIDE_INT_ELT (xr, i);
236*38fd1498Szrj 
237*38fd1498Szrj   return (hashval_t) hash;
238*38fd1498Szrj }
239*38fd1498Szrj 
240*38fd1498Szrj /* Returns nonzero if the value represented by X (which is really a
241*38fd1498Szrj    CONST_WIDE_INT) is the same as that given by Y (which is really a
242*38fd1498Szrj    CONST_WIDE_INT).  */
243*38fd1498Szrj 
244*38fd1498Szrj bool
equal(rtx x,rtx y)245*38fd1498Szrj const_wide_int_hasher::equal (rtx x, rtx y)
246*38fd1498Szrj {
247*38fd1498Szrj   int i;
248*38fd1498Szrj   const_rtx xr = x;
249*38fd1498Szrj   const_rtx yr = y;
250*38fd1498Szrj   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
251*38fd1498Szrj     return false;
252*38fd1498Szrj 
253*38fd1498Szrj   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
254*38fd1498Szrj     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
255*38fd1498Szrj       return false;
256*38fd1498Szrj 
257*38fd1498Szrj   return true;
258*38fd1498Szrj }
259*38fd1498Szrj #endif
260*38fd1498Szrj 
261*38fd1498Szrj /* Returns a hash code for CONST_POLY_INT X.  */
262*38fd1498Szrj 
263*38fd1498Szrj hashval_t
hash(rtx x)264*38fd1498Szrj const_poly_int_hasher::hash (rtx x)
265*38fd1498Szrj {
266*38fd1498Szrj   inchash::hash h;
267*38fd1498Szrj   h.add_int (GET_MODE (x));
268*38fd1498Szrj   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269*38fd1498Szrj     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270*38fd1498Szrj   return h.end ();
271*38fd1498Szrj }
272*38fd1498Szrj 
273*38fd1498Szrj /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
274*38fd1498Szrj 
275*38fd1498Szrj bool
equal(rtx x,const compare_type & y)276*38fd1498Szrj const_poly_int_hasher::equal (rtx x, const compare_type &y)
277*38fd1498Szrj {
278*38fd1498Szrj   if (GET_MODE (x) != y.first)
279*38fd1498Szrj     return false;
280*38fd1498Szrj   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281*38fd1498Szrj     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282*38fd1498Szrj       return false;
283*38fd1498Szrj   return true;
284*38fd1498Szrj }
285*38fd1498Szrj 
286*38fd1498Szrj /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
287*38fd1498Szrj hashval_t
hash(rtx x)288*38fd1498Szrj const_double_hasher::hash (rtx x)
289*38fd1498Szrj {
290*38fd1498Szrj   const_rtx const value = x;
291*38fd1498Szrj   hashval_t h;
292*38fd1498Szrj 
293*38fd1498Szrj   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
294*38fd1498Szrj     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
295*38fd1498Szrj   else
296*38fd1498Szrj     {
297*38fd1498Szrj       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
298*38fd1498Szrj       /* MODE is used in the comparison, so it should be in the hash.  */
299*38fd1498Szrj       h ^= GET_MODE (value);
300*38fd1498Szrj     }
301*38fd1498Szrj   return h;
302*38fd1498Szrj }
303*38fd1498Szrj 
304*38fd1498Szrj /* Returns nonzero if the value represented by X (really a ...)
305*38fd1498Szrj    is the same as that represented by Y (really a ...) */
306*38fd1498Szrj bool
equal(rtx x,rtx y)307*38fd1498Szrj const_double_hasher::equal (rtx x, rtx y)
308*38fd1498Szrj {
309*38fd1498Szrj   const_rtx const a = x, b = y;
310*38fd1498Szrj 
311*38fd1498Szrj   if (GET_MODE (a) != GET_MODE (b))
312*38fd1498Szrj     return 0;
313*38fd1498Szrj   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
314*38fd1498Szrj     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
315*38fd1498Szrj 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
316*38fd1498Szrj   else
317*38fd1498Szrj     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
318*38fd1498Szrj 			   CONST_DOUBLE_REAL_VALUE (b));
319*38fd1498Szrj }
320*38fd1498Szrj 
321*38fd1498Szrj /* Returns a hash code for X (which is really a CONST_FIXED).  */
322*38fd1498Szrj 
323*38fd1498Szrj hashval_t
hash(rtx x)324*38fd1498Szrj const_fixed_hasher::hash (rtx x)
325*38fd1498Szrj {
326*38fd1498Szrj   const_rtx const value = x;
327*38fd1498Szrj   hashval_t h;
328*38fd1498Szrj 
329*38fd1498Szrj   h = fixed_hash (CONST_FIXED_VALUE (value));
330*38fd1498Szrj   /* MODE is used in the comparison, so it should be in the hash.  */
331*38fd1498Szrj   h ^= GET_MODE (value);
332*38fd1498Szrj   return h;
333*38fd1498Szrj }
334*38fd1498Szrj 
335*38fd1498Szrj /* Returns nonzero if the value represented by X is the same as that
336*38fd1498Szrj    represented by Y.  */
337*38fd1498Szrj 
338*38fd1498Szrj bool
equal(rtx x,rtx y)339*38fd1498Szrj const_fixed_hasher::equal (rtx x, rtx y)
340*38fd1498Szrj {
341*38fd1498Szrj   const_rtx const a = x, b = y;
342*38fd1498Szrj 
343*38fd1498Szrj   if (GET_MODE (a) != GET_MODE (b))
344*38fd1498Szrj     return 0;
345*38fd1498Szrj   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
346*38fd1498Szrj }
347*38fd1498Szrj 
348*38fd1498Szrj /* Return true if the given memory attributes are equal.  */
349*38fd1498Szrj 
350*38fd1498Szrj bool
mem_attrs_eq_p(const struct mem_attrs * p,const struct mem_attrs * q)351*38fd1498Szrj mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
352*38fd1498Szrj {
353*38fd1498Szrj   if (p == q)
354*38fd1498Szrj     return true;
355*38fd1498Szrj   if (!p || !q)
356*38fd1498Szrj     return false;
357*38fd1498Szrj   return (p->alias == q->alias
358*38fd1498Szrj 	  && p->offset_known_p == q->offset_known_p
359*38fd1498Szrj 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
360*38fd1498Szrj 	  && p->size_known_p == q->size_known_p
361*38fd1498Szrj 	  && (!p->size_known_p || known_eq (p->size, q->size))
362*38fd1498Szrj 	  && p->align == q->align
363*38fd1498Szrj 	  && p->addrspace == q->addrspace
364*38fd1498Szrj 	  && (p->expr == q->expr
365*38fd1498Szrj 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
366*38fd1498Szrj 		  && operand_equal_p (p->expr, q->expr, 0))));
367*38fd1498Szrj }
368*38fd1498Szrj 
369*38fd1498Szrj /* Set MEM's memory attributes so that they are the same as ATTRS.  */
370*38fd1498Szrj 
371*38fd1498Szrj static void
set_mem_attrs(rtx mem,mem_attrs * attrs)372*38fd1498Szrj set_mem_attrs (rtx mem, mem_attrs *attrs)
373*38fd1498Szrj {
374*38fd1498Szrj   /* If everything is the default, we can just clear the attributes.  */
375*38fd1498Szrj   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
376*38fd1498Szrj     {
377*38fd1498Szrj       MEM_ATTRS (mem) = 0;
378*38fd1498Szrj       return;
379*38fd1498Szrj     }
380*38fd1498Szrj 
381*38fd1498Szrj   if (!MEM_ATTRS (mem)
382*38fd1498Szrj       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
383*38fd1498Szrj     {
384*38fd1498Szrj       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
385*38fd1498Szrj       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
386*38fd1498Szrj     }
387*38fd1498Szrj }
388*38fd1498Szrj 
389*38fd1498Szrj /* Returns a hash code for X (which is a really a reg_attrs *).  */
390*38fd1498Szrj 
391*38fd1498Szrj hashval_t
hash(reg_attrs * x)392*38fd1498Szrj reg_attr_hasher::hash (reg_attrs *x)
393*38fd1498Szrj {
394*38fd1498Szrj   const reg_attrs *const p = x;
395*38fd1498Szrj 
396*38fd1498Szrj   inchash::hash h;
397*38fd1498Szrj   h.add_ptr (p->decl);
398*38fd1498Szrj   h.add_poly_hwi (p->offset);
399*38fd1498Szrj   return h.end ();
400*38fd1498Szrj }
401*38fd1498Szrj 
402*38fd1498Szrj /* Returns nonzero if the value represented by X  is the same as that given by
403*38fd1498Szrj    Y.  */
404*38fd1498Szrj 
405*38fd1498Szrj bool
equal(reg_attrs * x,reg_attrs * y)406*38fd1498Szrj reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
407*38fd1498Szrj {
408*38fd1498Szrj   const reg_attrs *const p = x;
409*38fd1498Szrj   const reg_attrs *const q = y;
410*38fd1498Szrj 
411*38fd1498Szrj   return (p->decl == q->decl && known_eq (p->offset, q->offset));
412*38fd1498Szrj }
413*38fd1498Szrj /* Allocate a new reg_attrs structure and insert it into the hash table if
414*38fd1498Szrj    one identical to it is not already in the table.  We are doing this for
415*38fd1498Szrj    MEM of mode MODE.  */
416*38fd1498Szrj 
417*38fd1498Szrj static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)418*38fd1498Szrj get_reg_attrs (tree decl, poly_int64 offset)
419*38fd1498Szrj {
420*38fd1498Szrj   reg_attrs attrs;
421*38fd1498Szrj 
422*38fd1498Szrj   /* If everything is the default, we can just return zero.  */
423*38fd1498Szrj   if (decl == 0 && known_eq (offset, 0))
424*38fd1498Szrj     return 0;
425*38fd1498Szrj 
426*38fd1498Szrj   attrs.decl = decl;
427*38fd1498Szrj   attrs.offset = offset;
428*38fd1498Szrj 
429*38fd1498Szrj   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
430*38fd1498Szrj   if (*slot == 0)
431*38fd1498Szrj     {
432*38fd1498Szrj       *slot = ggc_alloc<reg_attrs> ();
433*38fd1498Szrj       memcpy (*slot, &attrs, sizeof (reg_attrs));
434*38fd1498Szrj     }
435*38fd1498Szrj 
436*38fd1498Szrj   return *slot;
437*38fd1498Szrj }
438*38fd1498Szrj 
439*38fd1498Szrj 
440*38fd1498Szrj #if !HAVE_blockage
441*38fd1498Szrj /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
442*38fd1498Szrj    and to block register equivalences to be seen across this insn.  */
443*38fd1498Szrj 
444*38fd1498Szrj rtx
gen_blockage(void)445*38fd1498Szrj gen_blockage (void)
446*38fd1498Szrj {
447*38fd1498Szrj   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
448*38fd1498Szrj   MEM_VOLATILE_P (x) = true;
449*38fd1498Szrj   return x;
450*38fd1498Szrj }
451*38fd1498Szrj #endif
452*38fd1498Szrj 
453*38fd1498Szrj 
454*38fd1498Szrj /* Set the mode and register number of X to MODE and REGNO.  */
455*38fd1498Szrj 
456*38fd1498Szrj void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)457*38fd1498Szrj set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
458*38fd1498Szrj {
459*38fd1498Szrj   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
460*38fd1498Szrj 			? hard_regno_nregs (regno, mode)
461*38fd1498Szrj 			: 1);
462*38fd1498Szrj   PUT_MODE_RAW (x, mode);
463*38fd1498Szrj   set_regno_raw (x, regno, nregs);
464*38fd1498Szrj }
465*38fd1498Szrj 
466*38fd1498Szrj /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
467*38fd1498Szrj    don't attempt to share with the various global pieces of rtl (such as
468*38fd1498Szrj    frame_pointer_rtx).  */
469*38fd1498Szrj 
470*38fd1498Szrj rtx
gen_raw_REG(machine_mode mode,unsigned int regno)471*38fd1498Szrj gen_raw_REG (machine_mode mode, unsigned int regno)
472*38fd1498Szrj {
473*38fd1498Szrj   rtx x = rtx_alloc (REG MEM_STAT_INFO);
474*38fd1498Szrj   set_mode_and_regno (x, mode, regno);
475*38fd1498Szrj   REG_ATTRS (x) = NULL;
476*38fd1498Szrj   ORIGINAL_REGNO (x) = regno;
477*38fd1498Szrj   return x;
478*38fd1498Szrj }
479*38fd1498Szrj 
480*38fd1498Szrj /* There are some RTL codes that require special attention; the generation
481*38fd1498Szrj    functions do the raw handling.  If you add to this list, modify
482*38fd1498Szrj    special_rtx in gengenrtl.c as well.  */
483*38fd1498Szrj 
484*38fd1498Szrj rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)485*38fd1498Szrj gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
486*38fd1498Szrj {
487*38fd1498Szrj   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
488*38fd1498Szrj 						 expr_list));
489*38fd1498Szrj }
490*38fd1498Szrj 
491*38fd1498Szrj rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)492*38fd1498Szrj gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
493*38fd1498Szrj {
494*38fd1498Szrj   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
495*38fd1498Szrj 						 insn_list));
496*38fd1498Szrj }
497*38fd1498Szrj 
498*38fd1498Szrj rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)499*38fd1498Szrj gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
500*38fd1498Szrj 	      basic_block bb, rtx pattern, int location, int code,
501*38fd1498Szrj 	      rtx reg_notes)
502*38fd1498Szrj {
503*38fd1498Szrj   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
504*38fd1498Szrj 						 prev_insn, next_insn,
505*38fd1498Szrj 						 bb, pattern, location, code,
506*38fd1498Szrj 						 reg_notes));
507*38fd1498Szrj }
508*38fd1498Szrj 
509*38fd1498Szrj rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)510*38fd1498Szrj gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
511*38fd1498Szrj {
512*38fd1498Szrj   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
513*38fd1498Szrj     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
514*38fd1498Szrj 
515*38fd1498Szrj #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
516*38fd1498Szrj   if (const_true_rtx && arg == STORE_FLAG_VALUE)
517*38fd1498Szrj     return const_true_rtx;
518*38fd1498Szrj #endif
519*38fd1498Szrj 
520*38fd1498Szrj   /* Look up the CONST_INT in the hash table.  */
521*38fd1498Szrj   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
522*38fd1498Szrj 						   INSERT);
523*38fd1498Szrj   if (*slot == 0)
524*38fd1498Szrj     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
525*38fd1498Szrj 
526*38fd1498Szrj   return *slot;
527*38fd1498Szrj }
528*38fd1498Szrj 
529*38fd1498Szrj rtx
gen_int_mode(poly_int64 c,machine_mode mode)530*38fd1498Szrj gen_int_mode (poly_int64 c, machine_mode mode)
531*38fd1498Szrj {
532*38fd1498Szrj   c = trunc_int_for_mode (c, mode);
533*38fd1498Szrj   if (c.is_constant ())
534*38fd1498Szrj     return GEN_INT (c.coeffs[0]);
535*38fd1498Szrj   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536*38fd1498Szrj   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
537*38fd1498Szrj }
538*38fd1498Szrj 
539*38fd1498Szrj /* CONST_DOUBLEs might be created from pairs of integers, or from
540*38fd1498Szrj    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
541*38fd1498Szrj    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
542*38fd1498Szrj 
543*38fd1498Szrj /* Determine whether REAL, a CONST_DOUBLE, already exists in the
544*38fd1498Szrj    hash table.  If so, return its counterpart; otherwise add it
545*38fd1498Szrj    to the hash table and return it.  */
546*38fd1498Szrj static rtx
lookup_const_double(rtx real)547*38fd1498Szrj lookup_const_double (rtx real)
548*38fd1498Szrj {
549*38fd1498Szrj   rtx *slot = const_double_htab->find_slot (real, INSERT);
550*38fd1498Szrj   if (*slot == 0)
551*38fd1498Szrj     *slot = real;
552*38fd1498Szrj 
553*38fd1498Szrj   return *slot;
554*38fd1498Szrj }
555*38fd1498Szrj 
556*38fd1498Szrj /* Return a CONST_DOUBLE rtx for a floating-point value specified by
557*38fd1498Szrj    VALUE in mode MODE.  */
558*38fd1498Szrj rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)559*38fd1498Szrj const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
560*38fd1498Szrj {
561*38fd1498Szrj   rtx real = rtx_alloc (CONST_DOUBLE);
562*38fd1498Szrj   PUT_MODE (real, mode);
563*38fd1498Szrj 
564*38fd1498Szrj   real->u.rv = value;
565*38fd1498Szrj 
566*38fd1498Szrj   return lookup_const_double (real);
567*38fd1498Szrj }
568*38fd1498Szrj 
569*38fd1498Szrj /* Determine whether FIXED, a CONST_FIXED, already exists in the
570*38fd1498Szrj    hash table.  If so, return its counterpart; otherwise add it
571*38fd1498Szrj    to the hash table and return it.  */
572*38fd1498Szrj 
573*38fd1498Szrj static rtx
lookup_const_fixed(rtx fixed)574*38fd1498Szrj lookup_const_fixed (rtx fixed)
575*38fd1498Szrj {
576*38fd1498Szrj   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
577*38fd1498Szrj   if (*slot == 0)
578*38fd1498Szrj     *slot = fixed;
579*38fd1498Szrj 
580*38fd1498Szrj   return *slot;
581*38fd1498Szrj }
582*38fd1498Szrj 
583*38fd1498Szrj /* Return a CONST_FIXED rtx for a fixed-point value specified by
584*38fd1498Szrj    VALUE in mode MODE.  */
585*38fd1498Szrj 
586*38fd1498Szrj rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)587*38fd1498Szrj const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
588*38fd1498Szrj {
589*38fd1498Szrj   rtx fixed = rtx_alloc (CONST_FIXED);
590*38fd1498Szrj   PUT_MODE (fixed, mode);
591*38fd1498Szrj 
592*38fd1498Szrj   fixed->u.fv = value;
593*38fd1498Szrj 
594*38fd1498Szrj   return lookup_const_fixed (fixed);
595*38fd1498Szrj }
596*38fd1498Szrj 
597*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT == 0
598*38fd1498Szrj /* Constructs double_int from rtx CST.  */
599*38fd1498Szrj 
600*38fd1498Szrj double_int
rtx_to_double_int(const_rtx cst)601*38fd1498Szrj rtx_to_double_int (const_rtx cst)
602*38fd1498Szrj {
603*38fd1498Szrj   double_int r;
604*38fd1498Szrj 
605*38fd1498Szrj   if (CONST_INT_P (cst))
606*38fd1498Szrj       r = double_int::from_shwi (INTVAL (cst));
607*38fd1498Szrj   else if (CONST_DOUBLE_AS_INT_P (cst))
608*38fd1498Szrj     {
609*38fd1498Szrj       r.low = CONST_DOUBLE_LOW (cst);
610*38fd1498Szrj       r.high = CONST_DOUBLE_HIGH (cst);
611*38fd1498Szrj     }
612*38fd1498Szrj   else
613*38fd1498Szrj     gcc_unreachable ();
614*38fd1498Szrj 
615*38fd1498Szrj   return r;
616*38fd1498Szrj }
617*38fd1498Szrj #endif
618*38fd1498Szrj 
619*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
620*38fd1498Szrj /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
621*38fd1498Szrj    If so, return its counterpart; otherwise add it to the hash table and
622*38fd1498Szrj    return it.  */
623*38fd1498Szrj 
624*38fd1498Szrj static rtx
lookup_const_wide_int(rtx wint)625*38fd1498Szrj lookup_const_wide_int (rtx wint)
626*38fd1498Szrj {
627*38fd1498Szrj   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
628*38fd1498Szrj   if (*slot == 0)
629*38fd1498Szrj     *slot = wint;
630*38fd1498Szrj 
631*38fd1498Szrj   return *slot;
632*38fd1498Szrj }
633*38fd1498Szrj #endif
634*38fd1498Szrj 
635*38fd1498Szrj /* Return an rtx constant for V, given that the constant has mode MODE.
636*38fd1498Szrj    The returned rtx will be a CONST_INT if V fits, otherwise it will be
637*38fd1498Szrj    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
638*38fd1498Szrj    (if TARGET_SUPPORTS_WIDE_INT).  */
639*38fd1498Szrj 
640*38fd1498Szrj static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)641*38fd1498Szrj immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
642*38fd1498Szrj {
643*38fd1498Szrj   unsigned int len = v.get_len ();
644*38fd1498Szrj   /* Not scalar_int_mode because we also allow pointer bound modes.  */
645*38fd1498Szrj   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
646*38fd1498Szrj 
647*38fd1498Szrj   /* Allow truncation but not extension since we do not know if the
648*38fd1498Szrj      number is signed or unsigned.  */
649*38fd1498Szrj   gcc_assert (prec <= v.get_precision ());
650*38fd1498Szrj 
651*38fd1498Szrj   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
652*38fd1498Szrj     return gen_int_mode (v.elt (0), mode);
653*38fd1498Szrj 
654*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
655*38fd1498Szrj   {
656*38fd1498Szrj     unsigned int i;
657*38fd1498Szrj     rtx value;
658*38fd1498Szrj     unsigned int blocks_needed
659*38fd1498Szrj       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
660*38fd1498Szrj 
661*38fd1498Szrj     if (len > blocks_needed)
662*38fd1498Szrj       len = blocks_needed;
663*38fd1498Szrj 
664*38fd1498Szrj     value = const_wide_int_alloc (len);
665*38fd1498Szrj 
666*38fd1498Szrj     /* It is so tempting to just put the mode in here.  Must control
667*38fd1498Szrj        myself ... */
668*38fd1498Szrj     PUT_MODE (value, VOIDmode);
669*38fd1498Szrj     CWI_PUT_NUM_ELEM (value, len);
670*38fd1498Szrj 
671*38fd1498Szrj     for (i = 0; i < len; i++)
672*38fd1498Szrj       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
673*38fd1498Szrj 
674*38fd1498Szrj     return lookup_const_wide_int (value);
675*38fd1498Szrj   }
676*38fd1498Szrj #else
677*38fd1498Szrj   return immed_double_const (v.elt (0), v.elt (1), mode);
678*38fd1498Szrj #endif
679*38fd1498Szrj }
680*38fd1498Szrj 
681*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT == 0
682*38fd1498Szrj /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
683*38fd1498Szrj    of ints: I0 is the low-order word and I1 is the high-order word.
684*38fd1498Szrj    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
685*38fd1498Szrj    implied upper bits are copies of the high bit of i1.  The value
686*38fd1498Szrj    itself is neither signed nor unsigned.  Do not use this routine for
687*38fd1498Szrj    non-integer modes; convert to REAL_VALUE_TYPE and use
688*38fd1498Szrj    const_double_from_real_value.  */
689*38fd1498Szrj 
690*38fd1498Szrj rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)691*38fd1498Szrj immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
692*38fd1498Szrj {
693*38fd1498Szrj   rtx value;
694*38fd1498Szrj   unsigned int i;
695*38fd1498Szrj 
696*38fd1498Szrj   /* There are the following cases (note that there are no modes with
697*38fd1498Szrj      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
698*38fd1498Szrj 
699*38fd1498Szrj      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
700*38fd1498Szrj 	gen_int_mode.
701*38fd1498Szrj      2) If the value of the integer fits into HOST_WIDE_INT anyway
702*38fd1498Szrj         (i.e., i1 consists only from copies of the sign bit, and sign
703*38fd1498Szrj 	of i0 and i1 are the same), then we return a CONST_INT for i0.
704*38fd1498Szrj      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
705*38fd1498Szrj   scalar_mode smode;
706*38fd1498Szrj   if (is_a <scalar_mode> (mode, &smode)
707*38fd1498Szrj       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
708*38fd1498Szrj     return gen_int_mode (i0, mode);
709*38fd1498Szrj 
710*38fd1498Szrj   /* If this integer fits in one word, return a CONST_INT.  */
711*38fd1498Szrj   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
712*38fd1498Szrj     return GEN_INT (i0);
713*38fd1498Szrj 
714*38fd1498Szrj   /* We use VOIDmode for integers.  */
715*38fd1498Szrj   value = rtx_alloc (CONST_DOUBLE);
716*38fd1498Szrj   PUT_MODE (value, VOIDmode);
717*38fd1498Szrj 
718*38fd1498Szrj   CONST_DOUBLE_LOW (value) = i0;
719*38fd1498Szrj   CONST_DOUBLE_HIGH (value) = i1;
720*38fd1498Szrj 
721*38fd1498Szrj   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
722*38fd1498Szrj     XWINT (value, i) = 0;
723*38fd1498Szrj 
724*38fd1498Szrj   return lookup_const_double (value);
725*38fd1498Szrj }
726*38fd1498Szrj #endif
727*38fd1498Szrj 
728*38fd1498Szrj /* Return an rtx representation of C in mode MODE.  */
729*38fd1498Szrj 
730*38fd1498Szrj rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)731*38fd1498Szrj immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732*38fd1498Szrj {
733*38fd1498Szrj   if (c.is_constant ())
734*38fd1498Szrj     return immed_wide_int_const_1 (c.coeffs[0], mode);
735*38fd1498Szrj 
736*38fd1498Szrj   /* Not scalar_int_mode because we also allow pointer bound modes.  */
737*38fd1498Szrj   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738*38fd1498Szrj 
739*38fd1498Szrj   /* Allow truncation but not extension since we do not know if the
740*38fd1498Szrj      number is signed or unsigned.  */
741*38fd1498Szrj   gcc_assert (prec <= c.coeffs[0].get_precision ());
742*38fd1498Szrj   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743*38fd1498Szrj 
744*38fd1498Szrj   /* See whether we already have an rtx for this constant.  */
745*38fd1498Szrj   inchash::hash h;
746*38fd1498Szrj   h.add_int (mode);
747*38fd1498Szrj   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748*38fd1498Szrj     h.add_wide_int (newc.coeffs[i]);
749*38fd1498Szrj   const_poly_int_hasher::compare_type typed_value (mode, newc);
750*38fd1498Szrj   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751*38fd1498Szrj 							h.end (), INSERT);
752*38fd1498Szrj   rtx x = *slot;
753*38fd1498Szrj   if (x)
754*38fd1498Szrj     return x;
755*38fd1498Szrj 
756*38fd1498Szrj   /* Create a new rtx.  There's a choice to be made here between installing
757*38fd1498Szrj      the actual mode of the rtx or leaving it as VOIDmode (for consistency
758*38fd1498Szrj      with CONST_INT).  In practice the handling of the codes is different
759*38fd1498Szrj      enough that we get no benefit from using VOIDmode, and various places
760*38fd1498Szrj      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
761*38fd1498Szrj      the right long-term direction anyway.  */
762*38fd1498Szrj   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763*38fd1498Szrj   size_t extra_size = twi::extra_size (prec);
764*38fd1498Szrj   x = rtx_alloc_v (CONST_POLY_INT,
765*38fd1498Szrj 		   sizeof (struct const_poly_int_def) + extra_size);
766*38fd1498Szrj   PUT_MODE (x, mode);
767*38fd1498Szrj   CONST_POLY_INT_COEFFS (x).set_precision (prec);
768*38fd1498Szrj   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769*38fd1498Szrj     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770*38fd1498Szrj 
771*38fd1498Szrj   *slot = x;
772*38fd1498Szrj   return x;
773*38fd1498Szrj }
774*38fd1498Szrj 
775*38fd1498Szrj rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)776*38fd1498Szrj gen_rtx_REG (machine_mode mode, unsigned int regno)
777*38fd1498Szrj {
778*38fd1498Szrj   /* In case the MD file explicitly references the frame pointer, have
779*38fd1498Szrj      all such references point to the same frame pointer.  This is
780*38fd1498Szrj      used during frame pointer elimination to distinguish the explicit
781*38fd1498Szrj      references to these registers from pseudos that happened to be
782*38fd1498Szrj      assigned to them.
783*38fd1498Szrj 
784*38fd1498Szrj      If we have eliminated the frame pointer or arg pointer, we will
785*38fd1498Szrj      be using it as a normal register, for example as a spill
786*38fd1498Szrj      register.  In such cases, we might be accessing it in a mode that
787*38fd1498Szrj      is not Pmode and therefore cannot use the pre-allocated rtx.
788*38fd1498Szrj 
789*38fd1498Szrj      Also don't do this when we are making new REGs in reload, since
790*38fd1498Szrj      we don't want to get confused with the real pointers.  */
791*38fd1498Szrj 
792*38fd1498Szrj   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
793*38fd1498Szrj     {
794*38fd1498Szrj       if (regno == FRAME_POINTER_REGNUM
795*38fd1498Szrj 	  && (!reload_completed || frame_pointer_needed))
796*38fd1498Szrj 	return frame_pointer_rtx;
797*38fd1498Szrj 
798*38fd1498Szrj       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
799*38fd1498Szrj 	  && regno == HARD_FRAME_POINTER_REGNUM
800*38fd1498Szrj 	  && (!reload_completed || frame_pointer_needed))
801*38fd1498Szrj 	return hard_frame_pointer_rtx;
802*38fd1498Szrj #if !HARD_FRAME_POINTER_IS_ARG_POINTER
803*38fd1498Szrj       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
804*38fd1498Szrj 	  && regno == ARG_POINTER_REGNUM)
805*38fd1498Szrj 	return arg_pointer_rtx;
806*38fd1498Szrj #endif
807*38fd1498Szrj #ifdef RETURN_ADDRESS_POINTER_REGNUM
808*38fd1498Szrj       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
809*38fd1498Szrj 	return return_address_pointer_rtx;
810*38fd1498Szrj #endif
811*38fd1498Szrj       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
812*38fd1498Szrj 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
813*38fd1498Szrj 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
814*38fd1498Szrj 	return pic_offset_table_rtx;
815*38fd1498Szrj       if (regno == STACK_POINTER_REGNUM)
816*38fd1498Szrj 	return stack_pointer_rtx;
817*38fd1498Szrj     }
818*38fd1498Szrj 
819*38fd1498Szrj #if 0
820*38fd1498Szrj   /* If the per-function register table has been set up, try to re-use
821*38fd1498Szrj      an existing entry in that table to avoid useless generation of RTL.
822*38fd1498Szrj 
823*38fd1498Szrj      This code is disabled for now until we can fix the various backends
824*38fd1498Szrj      which depend on having non-shared hard registers in some cases.   Long
825*38fd1498Szrj      term we want to re-enable this code as it can significantly cut down
826*38fd1498Szrj      on the amount of useless RTL that gets generated.
827*38fd1498Szrj 
828*38fd1498Szrj      We'll also need to fix some code that runs after reload that wants to
829*38fd1498Szrj      set ORIGINAL_REGNO.  */
830*38fd1498Szrj 
831*38fd1498Szrj   if (cfun
832*38fd1498Szrj       && cfun->emit
833*38fd1498Szrj       && regno_reg_rtx
834*38fd1498Szrj       && regno < FIRST_PSEUDO_REGISTER
835*38fd1498Szrj       && reg_raw_mode[regno] == mode)
836*38fd1498Szrj     return regno_reg_rtx[regno];
837*38fd1498Szrj #endif
838*38fd1498Szrj 
839*38fd1498Szrj   return gen_raw_REG (mode, regno);
840*38fd1498Szrj }
841*38fd1498Szrj 
842*38fd1498Szrj rtx
gen_rtx_MEM(machine_mode mode,rtx addr)843*38fd1498Szrj gen_rtx_MEM (machine_mode mode, rtx addr)
844*38fd1498Szrj {
845*38fd1498Szrj   rtx rt = gen_rtx_raw_MEM (mode, addr);
846*38fd1498Szrj 
847*38fd1498Szrj   /* This field is not cleared by the mere allocation of the rtx, so
848*38fd1498Szrj      we clear it here.  */
849*38fd1498Szrj   MEM_ATTRS (rt) = 0;
850*38fd1498Szrj 
851*38fd1498Szrj   return rt;
852*38fd1498Szrj }
853*38fd1498Szrj 
854*38fd1498Szrj /* Generate a memory referring to non-trapping constant memory.  */
855*38fd1498Szrj 
856*38fd1498Szrj rtx
gen_const_mem(machine_mode mode,rtx addr)857*38fd1498Szrj gen_const_mem (machine_mode mode, rtx addr)
858*38fd1498Szrj {
859*38fd1498Szrj   rtx mem = gen_rtx_MEM (mode, addr);
860*38fd1498Szrj   MEM_READONLY_P (mem) = 1;
861*38fd1498Szrj   MEM_NOTRAP_P (mem) = 1;
862*38fd1498Szrj   return mem;
863*38fd1498Szrj }
864*38fd1498Szrj 
865*38fd1498Szrj /* Generate a MEM referring to fixed portions of the frame, e.g., register
866*38fd1498Szrj    save areas.  */
867*38fd1498Szrj 
868*38fd1498Szrj rtx
gen_frame_mem(machine_mode mode,rtx addr)869*38fd1498Szrj gen_frame_mem (machine_mode mode, rtx addr)
870*38fd1498Szrj {
871*38fd1498Szrj   rtx mem = gen_rtx_MEM (mode, addr);
872*38fd1498Szrj   MEM_NOTRAP_P (mem) = 1;
873*38fd1498Szrj   set_mem_alias_set (mem, get_frame_alias_set ());
874*38fd1498Szrj   return mem;
875*38fd1498Szrj }
876*38fd1498Szrj 
877*38fd1498Szrj /* Generate a MEM referring to a temporary use of the stack, not part
878*38fd1498Szrj     of the fixed stack frame.  For example, something which is pushed
879*38fd1498Szrj     by a target splitter.  */
880*38fd1498Szrj rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)881*38fd1498Szrj gen_tmp_stack_mem (machine_mode mode, rtx addr)
882*38fd1498Szrj {
883*38fd1498Szrj   rtx mem = gen_rtx_MEM (mode, addr);
884*38fd1498Szrj   MEM_NOTRAP_P (mem) = 1;
885*38fd1498Szrj   if (!cfun->calls_alloca)
886*38fd1498Szrj     set_mem_alias_set (mem, get_frame_alias_set ());
887*38fd1498Szrj   return mem;
888*38fd1498Szrj }
889*38fd1498Szrj 
890*38fd1498Szrj /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
891*38fd1498Szrj    this construct would be valid, and false otherwise.  */
892*38fd1498Szrj 
893*38fd1498Szrj bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)894*38fd1498Szrj validate_subreg (machine_mode omode, machine_mode imode,
895*38fd1498Szrj 		 const_rtx reg, poly_uint64 offset)
896*38fd1498Szrj {
897*38fd1498Szrj   poly_uint64 isize = GET_MODE_SIZE (imode);
898*38fd1498Szrj   poly_uint64 osize = GET_MODE_SIZE (omode);
899*38fd1498Szrj 
900*38fd1498Szrj   /* The sizes must be ordered, so that we know whether the subreg
901*38fd1498Szrj      is partial, paradoxical or complete.  */
902*38fd1498Szrj   if (!ordered_p (isize, osize))
903*38fd1498Szrj     return false;
904*38fd1498Szrj 
905*38fd1498Szrj   /* All subregs must be aligned.  */
906*38fd1498Szrj   if (!multiple_p (offset, osize))
907*38fd1498Szrj     return false;
908*38fd1498Szrj 
909*38fd1498Szrj   /* The subreg offset cannot be outside the inner object.  */
910*38fd1498Szrj   if (maybe_ge (offset, isize))
911*38fd1498Szrj     return false;
912*38fd1498Szrj 
913*38fd1498Szrj   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
914*38fd1498Szrj 
915*38fd1498Szrj   /* ??? This should not be here.  Temporarily continue to allow word_mode
916*38fd1498Szrj      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
917*38fd1498Szrj      Generally, backends are doing something sketchy but it'll take time to
918*38fd1498Szrj      fix them all.  */
919*38fd1498Szrj   if (omode == word_mode)
920*38fd1498Szrj     ;
921*38fd1498Szrj   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
922*38fd1498Szrj      is the culprit here, and not the backends.  */
923*38fd1498Szrj   else if (known_ge (osize, regsize) && known_ge (isize, osize))
924*38fd1498Szrj     ;
925*38fd1498Szrj   /* Allow component subregs of complex and vector.  Though given the below
926*38fd1498Szrj      extraction rules, it's not always clear what that means.  */
927*38fd1498Szrj   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
928*38fd1498Szrj 	   && GET_MODE_INNER (imode) == omode)
929*38fd1498Szrj     ;
930*38fd1498Szrj   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
931*38fd1498Szrj      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
932*38fd1498Szrj      represent this.  It's questionable if this ought to be represented at
933*38fd1498Szrj      all -- why can't this all be hidden in post-reload splitters that make
934*38fd1498Szrj      arbitrarily mode changes to the registers themselves.  */
935*38fd1498Szrj   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
936*38fd1498Szrj     ;
937*38fd1498Szrj   /* Subregs involving floating point modes are not allowed to
938*38fd1498Szrj      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
939*38fd1498Szrj      (subreg:SI (reg:DF) 0) isn't.  */
940*38fd1498Szrj   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
941*38fd1498Szrj     {
942*38fd1498Szrj       if (! (known_eq (isize, osize)
943*38fd1498Szrj 	     /* LRA can use subreg to store a floating point value in
944*38fd1498Szrj 		an integer mode.  Although the floating point and the
945*38fd1498Szrj 		integer modes need the same number of hard registers,
946*38fd1498Szrj 		the size of floating point mode can be less than the
947*38fd1498Szrj 		integer mode.  LRA also uses subregs for a register
948*38fd1498Szrj 		should be used in different mode in on insn.  */
949*38fd1498Szrj 	     || lra_in_progress))
950*38fd1498Szrj 	return false;
951*38fd1498Szrj     }
952*38fd1498Szrj 
953*38fd1498Szrj   /* Paradoxical subregs must have offset zero.  */
954*38fd1498Szrj   if (maybe_gt (osize, isize))
955*38fd1498Szrj     return known_eq (offset, 0U);
956*38fd1498Szrj 
957*38fd1498Szrj   /* This is a normal subreg.  Verify that the offset is representable.  */
958*38fd1498Szrj 
959*38fd1498Szrj   /* For hard registers, we already have most of these rules collected in
960*38fd1498Szrj      subreg_offset_representable_p.  */
961*38fd1498Szrj   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
962*38fd1498Szrj     {
963*38fd1498Szrj       unsigned int regno = REGNO (reg);
964*38fd1498Szrj 
965*38fd1498Szrj       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
966*38fd1498Szrj 	  && GET_MODE_INNER (imode) == omode)
967*38fd1498Szrj 	;
968*38fd1498Szrj       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
969*38fd1498Szrj 	return false;
970*38fd1498Szrj 
971*38fd1498Szrj       return subreg_offset_representable_p (regno, imode, offset, omode);
972*38fd1498Szrj     }
973*38fd1498Szrj 
974*38fd1498Szrj   /* The outer size must be ordered wrt the register size, otherwise
975*38fd1498Szrj      we wouldn't know at compile time how many registers the outer
976*38fd1498Szrj      mode occupies.  */
977*38fd1498Szrj   if (!ordered_p (osize, regsize))
978*38fd1498Szrj     return false;
979*38fd1498Szrj 
980*38fd1498Szrj   /* For pseudo registers, we want most of the same checks.  Namely:
981*38fd1498Szrj 
982*38fd1498Szrj      Assume that the pseudo register will be allocated to hard registers
983*38fd1498Szrj      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
984*38fd1498Szrj      the remainder must correspond to the lowpart of the containing hard
985*38fd1498Szrj      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
986*38fd1498Szrj      otherwise it is at the lowest offset.
987*38fd1498Szrj 
988*38fd1498Szrj      Given that we've already checked the mode and offset alignment,
989*38fd1498Szrj      we only have to check subblock subregs here.  */
990*38fd1498Szrj   if (maybe_lt (osize, regsize)
991*38fd1498Szrj       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
992*38fd1498Szrj     {
993*38fd1498Szrj       /* It is invalid for the target to pick a register size for a mode
994*38fd1498Szrj 	 that isn't ordered wrt to the size of that mode.  */
995*38fd1498Szrj       poly_uint64 block_size = ordered_min (isize, regsize);
996*38fd1498Szrj       unsigned int start_reg;
997*38fd1498Szrj       poly_uint64 offset_within_reg;
998*38fd1498Szrj       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
999*38fd1498Szrj 	  || (BYTES_BIG_ENDIAN
1000*38fd1498Szrj 	      ? maybe_ne (offset_within_reg, block_size - osize)
1001*38fd1498Szrj 	      : maybe_ne (offset_within_reg, 0U)))
1002*38fd1498Szrj 	return false;
1003*38fd1498Szrj     }
1004*38fd1498Szrj   return true;
1005*38fd1498Szrj }
1006*38fd1498Szrj 
1007*38fd1498Szrj rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1008*38fd1498Szrj gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
1009*38fd1498Szrj {
1010*38fd1498Szrj   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
1011*38fd1498Szrj   return gen_rtx_raw_SUBREG (mode, reg, offset);
1012*38fd1498Szrj }
1013*38fd1498Szrj 
1014*38fd1498Szrj /* Generate a SUBREG representing the least-significant part of REG if MODE
1015*38fd1498Szrj    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
1016*38fd1498Szrj 
1017*38fd1498Szrj rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)1018*38fd1498Szrj gen_lowpart_SUBREG (machine_mode mode, rtx reg)
1019*38fd1498Szrj {
1020*38fd1498Szrj   machine_mode inmode;
1021*38fd1498Szrj 
1022*38fd1498Szrj   inmode = GET_MODE (reg);
1023*38fd1498Szrj   if (inmode == VOIDmode)
1024*38fd1498Szrj     inmode = mode;
1025*38fd1498Szrj   return gen_rtx_SUBREG (mode, reg,
1026*38fd1498Szrj 			 subreg_lowpart_offset (mode, inmode));
1027*38fd1498Szrj }
1028*38fd1498Szrj 
1029*38fd1498Szrj rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)1030*38fd1498Szrj gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
1031*38fd1498Szrj 		      enum var_init_status status)
1032*38fd1498Szrj {
1033*38fd1498Szrj   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
1034*38fd1498Szrj   PAT_VAR_LOCATION_STATUS (x) = status;
1035*38fd1498Szrj   return x;
1036*38fd1498Szrj }
1037*38fd1498Szrj 
1038*38fd1498Szrj 
1039*38fd1498Szrj /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
1040*38fd1498Szrj 
1041*38fd1498Szrj rtvec
gen_rtvec(int n,...)1042*38fd1498Szrj gen_rtvec (int n, ...)
1043*38fd1498Szrj {
1044*38fd1498Szrj   int i;
1045*38fd1498Szrj   rtvec rt_val;
1046*38fd1498Szrj   va_list p;
1047*38fd1498Szrj 
1048*38fd1498Szrj   va_start (p, n);
1049*38fd1498Szrj 
1050*38fd1498Szrj   /* Don't allocate an empty rtvec...  */
1051*38fd1498Szrj   if (n == 0)
1052*38fd1498Szrj     {
1053*38fd1498Szrj       va_end (p);
1054*38fd1498Szrj       return NULL_RTVEC;
1055*38fd1498Szrj     }
1056*38fd1498Szrj 
1057*38fd1498Szrj   rt_val = rtvec_alloc (n);
1058*38fd1498Szrj 
1059*38fd1498Szrj   for (i = 0; i < n; i++)
1060*38fd1498Szrj     rt_val->elem[i] = va_arg (p, rtx);
1061*38fd1498Szrj 
1062*38fd1498Szrj   va_end (p);
1063*38fd1498Szrj   return rt_val;
1064*38fd1498Szrj }
1065*38fd1498Szrj 
1066*38fd1498Szrj rtvec
gen_rtvec_v(int n,rtx * argp)1067*38fd1498Szrj gen_rtvec_v (int n, rtx *argp)
1068*38fd1498Szrj {
1069*38fd1498Szrj   int i;
1070*38fd1498Szrj   rtvec rt_val;
1071*38fd1498Szrj 
1072*38fd1498Szrj   /* Don't allocate an empty rtvec...  */
1073*38fd1498Szrj   if (n == 0)
1074*38fd1498Szrj     return NULL_RTVEC;
1075*38fd1498Szrj 
1076*38fd1498Szrj   rt_val = rtvec_alloc (n);
1077*38fd1498Szrj 
1078*38fd1498Szrj   for (i = 0; i < n; i++)
1079*38fd1498Szrj     rt_val->elem[i] = *argp++;
1080*38fd1498Szrj 
1081*38fd1498Szrj   return rt_val;
1082*38fd1498Szrj }
1083*38fd1498Szrj 
1084*38fd1498Szrj rtvec
gen_rtvec_v(int n,rtx_insn ** argp)1085*38fd1498Szrj gen_rtvec_v (int n, rtx_insn **argp)
1086*38fd1498Szrj {
1087*38fd1498Szrj   int i;
1088*38fd1498Szrj   rtvec rt_val;
1089*38fd1498Szrj 
1090*38fd1498Szrj   /* Don't allocate an empty rtvec...  */
1091*38fd1498Szrj   if (n == 0)
1092*38fd1498Szrj     return NULL_RTVEC;
1093*38fd1498Szrj 
1094*38fd1498Szrj   rt_val = rtvec_alloc (n);
1095*38fd1498Szrj 
1096*38fd1498Szrj   for (i = 0; i < n; i++)
1097*38fd1498Szrj     rt_val->elem[i] = *argp++;
1098*38fd1498Szrj 
1099*38fd1498Szrj   return rt_val;
1100*38fd1498Szrj }
1101*38fd1498Szrj 
1102*38fd1498Szrj 
1103*38fd1498Szrj /* Return the number of bytes between the start of an OUTER_MODE
1104*38fd1498Szrj    in-memory value and the start of an INNER_MODE in-memory value,
1105*38fd1498Szrj    given that the former is a lowpart of the latter.  It may be a
1106*38fd1498Szrj    paradoxical lowpart, in which case the offset will be negative
1107*38fd1498Szrj    on big-endian targets.  */
1108*38fd1498Szrj 
1109*38fd1498Szrj poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)1110*38fd1498Szrj byte_lowpart_offset (machine_mode outer_mode,
1111*38fd1498Szrj 		     machine_mode inner_mode)
1112*38fd1498Szrj {
1113*38fd1498Szrj   if (paradoxical_subreg_p (outer_mode, inner_mode))
1114*38fd1498Szrj     return -subreg_lowpart_offset (inner_mode, outer_mode);
1115*38fd1498Szrj   else
1116*38fd1498Szrj     return subreg_lowpart_offset (outer_mode, inner_mode);
1117*38fd1498Szrj }
1118*38fd1498Szrj 
1119*38fd1498Szrj /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1120*38fd1498Szrj    from address X.  For paradoxical big-endian subregs this is a
1121*38fd1498Szrj    negative value, otherwise it's the same as OFFSET.  */
1122*38fd1498Szrj 
1123*38fd1498Szrj poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1124*38fd1498Szrj subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1125*38fd1498Szrj 		      poly_uint64 offset)
1126*38fd1498Szrj {
1127*38fd1498Szrj   if (paradoxical_subreg_p (outer_mode, inner_mode))
1128*38fd1498Szrj     {
1129*38fd1498Szrj       gcc_assert (known_eq (offset, 0U));
1130*38fd1498Szrj       return -subreg_lowpart_offset (inner_mode, outer_mode);
1131*38fd1498Szrj     }
1132*38fd1498Szrj   return offset;
1133*38fd1498Szrj }
1134*38fd1498Szrj 
1135*38fd1498Szrj /* As above, but return the offset that existing subreg X would have
1136*38fd1498Szrj    if SUBREG_REG (X) were stored in memory.  The only significant thing
1137*38fd1498Szrj    about the current SUBREG_REG is its mode.  */
1138*38fd1498Szrj 
1139*38fd1498Szrj poly_int64
subreg_memory_offset(const_rtx x)1140*38fd1498Szrj subreg_memory_offset (const_rtx x)
1141*38fd1498Szrj {
1142*38fd1498Szrj   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1143*38fd1498Szrj 			       SUBREG_BYTE (x));
1144*38fd1498Szrj }
1145*38fd1498Szrj 
1146*38fd1498Szrj /* Generate a REG rtx for a new pseudo register of mode MODE.
1147*38fd1498Szrj    This pseudo is assigned the next sequential register number.  */
1148*38fd1498Szrj 
1149*38fd1498Szrj rtx
gen_reg_rtx(machine_mode mode)1150*38fd1498Szrj gen_reg_rtx (machine_mode mode)
1151*38fd1498Szrj {
1152*38fd1498Szrj   rtx val;
1153*38fd1498Szrj   unsigned int align = GET_MODE_ALIGNMENT (mode);
1154*38fd1498Szrj 
1155*38fd1498Szrj   gcc_assert (can_create_pseudo_p ());
1156*38fd1498Szrj 
1157*38fd1498Szrj   /* If a virtual register with bigger mode alignment is generated,
1158*38fd1498Szrj      increase stack alignment estimation because it might be spilled
1159*38fd1498Szrj      to stack later.  */
1160*38fd1498Szrj   if (SUPPORTS_STACK_ALIGNMENT
1161*38fd1498Szrj       && crtl->stack_alignment_estimated < align
1162*38fd1498Szrj       && !crtl->stack_realign_processed)
1163*38fd1498Szrj     {
1164*38fd1498Szrj       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
1165*38fd1498Szrj       if (crtl->stack_alignment_estimated < min_align)
1166*38fd1498Szrj 	crtl->stack_alignment_estimated = min_align;
1167*38fd1498Szrj     }
1168*38fd1498Szrj 
1169*38fd1498Szrj   if (generating_concat_p
1170*38fd1498Szrj       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
1171*38fd1498Szrj 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
1172*38fd1498Szrj     {
1173*38fd1498Szrj       /* For complex modes, don't make a single pseudo.
1174*38fd1498Szrj 	 Instead, make a CONCAT of two pseudos.
1175*38fd1498Szrj 	 This allows noncontiguous allocation of the real and imaginary parts,
1176*38fd1498Szrj 	 which makes much better code.  Besides, allocating DCmode
1177*38fd1498Szrj 	 pseudos overstrains reload on some machines like the 386.  */
1178*38fd1498Szrj       rtx realpart, imagpart;
1179*38fd1498Szrj       machine_mode partmode = GET_MODE_INNER (mode);
1180*38fd1498Szrj 
1181*38fd1498Szrj       realpart = gen_reg_rtx (partmode);
1182*38fd1498Szrj       imagpart = gen_reg_rtx (partmode);
1183*38fd1498Szrj       return gen_rtx_CONCAT (mode, realpart, imagpart);
1184*38fd1498Szrj     }
1185*38fd1498Szrj 
1186*38fd1498Szrj   /* Do not call gen_reg_rtx with uninitialized crtl.  */
1187*38fd1498Szrj   gcc_assert (crtl->emit.regno_pointer_align_length);
1188*38fd1498Szrj 
1189*38fd1498Szrj   crtl->emit.ensure_regno_capacity ();
1190*38fd1498Szrj   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
1191*38fd1498Szrj 
1192*38fd1498Szrj   val = gen_raw_REG (mode, reg_rtx_no);
1193*38fd1498Szrj   regno_reg_rtx[reg_rtx_no++] = val;
1194*38fd1498Szrj   return val;
1195*38fd1498Szrj }
1196*38fd1498Szrj 
1197*38fd1498Szrj /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1198*38fd1498Szrj    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
1199*38fd1498Szrj 
1200*38fd1498Szrj void
ensure_regno_capacity()1201*38fd1498Szrj emit_status::ensure_regno_capacity ()
1202*38fd1498Szrj {
1203*38fd1498Szrj   int old_size = regno_pointer_align_length;
1204*38fd1498Szrj 
1205*38fd1498Szrj   if (reg_rtx_no < old_size)
1206*38fd1498Szrj     return;
1207*38fd1498Szrj 
1208*38fd1498Szrj   int new_size = old_size * 2;
1209*38fd1498Szrj   while (reg_rtx_no >= new_size)
1210*38fd1498Szrj     new_size *= 2;
1211*38fd1498Szrj 
1212*38fd1498Szrj   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
1213*38fd1498Szrj   memset (tmp + old_size, 0, new_size - old_size);
1214*38fd1498Szrj   regno_pointer_align = (unsigned char *) tmp;
1215*38fd1498Szrj 
1216*38fd1498Szrj   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
1217*38fd1498Szrj   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
1218*38fd1498Szrj   regno_reg_rtx = new1;
1219*38fd1498Szrj 
1220*38fd1498Szrj   crtl->emit.regno_pointer_align_length = new_size;
1221*38fd1498Szrj }
1222*38fd1498Szrj 
1223*38fd1498Szrj /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
1224*38fd1498Szrj 
1225*38fd1498Szrj bool
reg_is_parm_p(rtx reg)1226*38fd1498Szrj reg_is_parm_p (rtx reg)
1227*38fd1498Szrj {
1228*38fd1498Szrj   tree decl;
1229*38fd1498Szrj 
1230*38fd1498Szrj   gcc_assert (REG_P (reg));
1231*38fd1498Szrj   decl = REG_EXPR (reg);
1232*38fd1498Szrj   return (decl && TREE_CODE (decl) == PARM_DECL);
1233*38fd1498Szrj }
1234*38fd1498Szrj 
1235*38fd1498Szrj /* Update NEW with the same attributes as REG, but with OFFSET added
1236*38fd1498Szrj    to the REG_OFFSET.  */
1237*38fd1498Szrj 
1238*38fd1498Szrj static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1239*38fd1498Szrj update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
1240*38fd1498Szrj {
1241*38fd1498Szrj   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
1242*38fd1498Szrj 				       REG_OFFSET (reg) + offset);
1243*38fd1498Szrj }
1244*38fd1498Szrj 
1245*38fd1498Szrj /* Generate a register with same attributes as REG, but with OFFSET
1246*38fd1498Szrj    added to the REG_OFFSET.  */
1247*38fd1498Szrj 
1248*38fd1498Szrj rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)1249*38fd1498Szrj gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1250*38fd1498Szrj 		    poly_int64 offset)
1251*38fd1498Szrj {
1252*38fd1498Szrj   rtx new_rtx = gen_rtx_REG (mode, regno);
1253*38fd1498Szrj 
1254*38fd1498Szrj   update_reg_offset (new_rtx, reg, offset);
1255*38fd1498Szrj   return new_rtx;
1256*38fd1498Szrj }
1257*38fd1498Szrj 
1258*38fd1498Szrj /* Generate a new pseudo-register with the same attributes as REG, but
1259*38fd1498Szrj    with OFFSET added to the REG_OFFSET.  */
1260*38fd1498Szrj 
1261*38fd1498Szrj rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)1262*38fd1498Szrj gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
1263*38fd1498Szrj {
1264*38fd1498Szrj   rtx new_rtx = gen_reg_rtx (mode);
1265*38fd1498Szrj 
1266*38fd1498Szrj   update_reg_offset (new_rtx, reg, offset);
1267*38fd1498Szrj   return new_rtx;
1268*38fd1498Szrj }
1269*38fd1498Szrj 
1270*38fd1498Szrj /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
1271*38fd1498Szrj    new register is a (possibly paradoxical) lowpart of the old one.  */
1272*38fd1498Szrj 
1273*38fd1498Szrj void
adjust_reg_mode(rtx reg,machine_mode mode)1274*38fd1498Szrj adjust_reg_mode (rtx reg, machine_mode mode)
1275*38fd1498Szrj {
1276*38fd1498Szrj   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
1277*38fd1498Szrj   PUT_MODE (reg, mode);
1278*38fd1498Szrj }
1279*38fd1498Szrj 
1280*38fd1498Szrj /* Copy REG's attributes from X, if X has any attributes.  If REG and X
1281*38fd1498Szrj    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
1282*38fd1498Szrj 
1283*38fd1498Szrj void
set_reg_attrs_from_value(rtx reg,rtx x)1284*38fd1498Szrj set_reg_attrs_from_value (rtx reg, rtx x)
1285*38fd1498Szrj {
1286*38fd1498Szrj   poly_int64 offset;
1287*38fd1498Szrj   bool can_be_reg_pointer = true;
1288*38fd1498Szrj 
1289*38fd1498Szrj   /* Don't call mark_reg_pointer for incompatible pointer sign
1290*38fd1498Szrj      extension.  */
1291*38fd1498Szrj   while (GET_CODE (x) == SIGN_EXTEND
1292*38fd1498Szrj 	 || GET_CODE (x) == ZERO_EXTEND
1293*38fd1498Szrj 	 || GET_CODE (x) == TRUNCATE
1294*38fd1498Szrj 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
1295*38fd1498Szrj     {
1296*38fd1498Szrj #if defined(POINTERS_EXTEND_UNSIGNED)
1297*38fd1498Szrj       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
1298*38fd1498Szrj 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
1299*38fd1498Szrj 	   || (paradoxical_subreg_p (x)
1300*38fd1498Szrj 	       && ! (SUBREG_PROMOTED_VAR_P (x)
1301*38fd1498Szrj 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
1302*38fd1498Szrj 						    POINTERS_EXTEND_UNSIGNED))))
1303*38fd1498Szrj 	  && !targetm.have_ptr_extend ())
1304*38fd1498Szrj 	can_be_reg_pointer = false;
1305*38fd1498Szrj #endif
1306*38fd1498Szrj       x = XEXP (x, 0);
1307*38fd1498Szrj     }
1308*38fd1498Szrj 
1309*38fd1498Szrj   /* Hard registers can be reused for multiple purposes within the same
1310*38fd1498Szrj      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1311*38fd1498Szrj      on them is wrong.  */
1312*38fd1498Szrj   if (HARD_REGISTER_P (reg))
1313*38fd1498Szrj     return;
1314*38fd1498Szrj 
1315*38fd1498Szrj   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1316*38fd1498Szrj   if (MEM_P (x))
1317*38fd1498Szrj     {
1318*38fd1498Szrj       if (MEM_OFFSET_KNOWN_P (x))
1319*38fd1498Szrj 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1320*38fd1498Szrj 					 MEM_OFFSET (x) + offset);
1321*38fd1498Szrj       if (can_be_reg_pointer && MEM_POINTER (x))
1322*38fd1498Szrj 	mark_reg_pointer (reg, 0);
1323*38fd1498Szrj     }
1324*38fd1498Szrj   else if (REG_P (x))
1325*38fd1498Szrj     {
1326*38fd1498Szrj       if (REG_ATTRS (x))
1327*38fd1498Szrj 	update_reg_offset (reg, x, offset);
1328*38fd1498Szrj       if (can_be_reg_pointer && REG_POINTER (x))
1329*38fd1498Szrj 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1330*38fd1498Szrj     }
1331*38fd1498Szrj }
1332*38fd1498Szrj 
1333*38fd1498Szrj /* Generate a REG rtx for a new pseudo register, copying the mode
1334*38fd1498Szrj    and attributes from X.  */
1335*38fd1498Szrj 
1336*38fd1498Szrj rtx
gen_reg_rtx_and_attrs(rtx x)1337*38fd1498Szrj gen_reg_rtx_and_attrs (rtx x)
1338*38fd1498Szrj {
1339*38fd1498Szrj   rtx reg = gen_reg_rtx (GET_MODE (x));
1340*38fd1498Szrj   set_reg_attrs_from_value (reg, x);
1341*38fd1498Szrj   return reg;
1342*38fd1498Szrj }
1343*38fd1498Szrj 
1344*38fd1498Szrj /* Set the register attributes for registers contained in PARM_RTX.
1345*38fd1498Szrj    Use needed values from memory attributes of MEM.  */
1346*38fd1498Szrj 
1347*38fd1498Szrj void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)1348*38fd1498Szrj set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1349*38fd1498Szrj {
1350*38fd1498Szrj   if (REG_P (parm_rtx))
1351*38fd1498Szrj     set_reg_attrs_from_value (parm_rtx, mem);
1352*38fd1498Szrj   else if (GET_CODE (parm_rtx) == PARALLEL)
1353*38fd1498Szrj     {
1354*38fd1498Szrj       /* Check for a NULL entry in the first slot, used to indicate that the
1355*38fd1498Szrj 	 parameter goes both on the stack and in registers.  */
1356*38fd1498Szrj       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1357*38fd1498Szrj       for (; i < XVECLEN (parm_rtx, 0); i++)
1358*38fd1498Szrj 	{
1359*38fd1498Szrj 	  rtx x = XVECEXP (parm_rtx, 0, i);
1360*38fd1498Szrj 	  if (REG_P (XEXP (x, 0)))
1361*38fd1498Szrj 	    REG_ATTRS (XEXP (x, 0))
1362*38fd1498Szrj 	      = get_reg_attrs (MEM_EXPR (mem),
1363*38fd1498Szrj 			       INTVAL (XEXP (x, 1)));
1364*38fd1498Szrj 	}
1365*38fd1498Szrj     }
1366*38fd1498Szrj }
1367*38fd1498Szrj 
1368*38fd1498Szrj /* Set the REG_ATTRS for registers in value X, given that X represents
1369*38fd1498Szrj    decl T.  */
1370*38fd1498Szrj 
1371*38fd1498Szrj void
set_reg_attrs_for_decl_rtl(tree t,rtx x)1372*38fd1498Szrj set_reg_attrs_for_decl_rtl (tree t, rtx x)
1373*38fd1498Szrj {
1374*38fd1498Szrj   if (!t)
1375*38fd1498Szrj     return;
1376*38fd1498Szrj   tree tdecl = t;
1377*38fd1498Szrj   if (GET_CODE (x) == SUBREG)
1378*38fd1498Szrj     {
1379*38fd1498Szrj       gcc_assert (subreg_lowpart_p (x));
1380*38fd1498Szrj       x = SUBREG_REG (x);
1381*38fd1498Szrj     }
1382*38fd1498Szrj   if (REG_P (x))
1383*38fd1498Szrj     REG_ATTRS (x)
1384*38fd1498Szrj       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1385*38fd1498Szrj 					       DECL_P (tdecl)
1386*38fd1498Szrj 					       ? DECL_MODE (tdecl)
1387*38fd1498Szrj 					       : TYPE_MODE (TREE_TYPE (tdecl))));
1388*38fd1498Szrj   if (GET_CODE (x) == CONCAT)
1389*38fd1498Szrj     {
1390*38fd1498Szrj       if (REG_P (XEXP (x, 0)))
1391*38fd1498Szrj         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1392*38fd1498Szrj       if (REG_P (XEXP (x, 1)))
1393*38fd1498Szrj 	REG_ATTRS (XEXP (x, 1))
1394*38fd1498Szrj 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1395*38fd1498Szrj     }
1396*38fd1498Szrj   if (GET_CODE (x) == PARALLEL)
1397*38fd1498Szrj     {
1398*38fd1498Szrj       int i, start;
1399*38fd1498Szrj 
1400*38fd1498Szrj       /* Check for a NULL entry, used to indicate that the parameter goes
1401*38fd1498Szrj 	 both on the stack and in registers.  */
1402*38fd1498Szrj       if (XEXP (XVECEXP (x, 0, 0), 0))
1403*38fd1498Szrj 	start = 0;
1404*38fd1498Szrj       else
1405*38fd1498Szrj 	start = 1;
1406*38fd1498Szrj 
1407*38fd1498Szrj       for (i = start; i < XVECLEN (x, 0); i++)
1408*38fd1498Szrj 	{
1409*38fd1498Szrj 	  rtx y = XVECEXP (x, 0, i);
1410*38fd1498Szrj 	  if (REG_P (XEXP (y, 0)))
1411*38fd1498Szrj 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1412*38fd1498Szrj 	}
1413*38fd1498Szrj     }
1414*38fd1498Szrj }
1415*38fd1498Szrj 
1416*38fd1498Szrj /* Assign the RTX X to declaration T.  */
1417*38fd1498Szrj 
1418*38fd1498Szrj void
set_decl_rtl(tree t,rtx x)1419*38fd1498Szrj set_decl_rtl (tree t, rtx x)
1420*38fd1498Szrj {
1421*38fd1498Szrj   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1422*38fd1498Szrj   if (x)
1423*38fd1498Szrj     set_reg_attrs_for_decl_rtl (t, x);
1424*38fd1498Szrj }
1425*38fd1498Szrj 
1426*38fd1498Szrj /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1427*38fd1498Szrj    if the ABI requires the parameter to be passed by reference.  */
1428*38fd1498Szrj 
1429*38fd1498Szrj void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)1430*38fd1498Szrj set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1431*38fd1498Szrj {
1432*38fd1498Szrj   DECL_INCOMING_RTL (t) = x;
1433*38fd1498Szrj   if (x && !by_reference_p)
1434*38fd1498Szrj     set_reg_attrs_for_decl_rtl (t, x);
1435*38fd1498Szrj }
1436*38fd1498Szrj 
1437*38fd1498Szrj /* Identify REG (which may be a CONCAT) as a user register.  */
1438*38fd1498Szrj 
1439*38fd1498Szrj void
mark_user_reg(rtx reg)1440*38fd1498Szrj mark_user_reg (rtx reg)
1441*38fd1498Szrj {
1442*38fd1498Szrj   if (GET_CODE (reg) == CONCAT)
1443*38fd1498Szrj     {
1444*38fd1498Szrj       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1445*38fd1498Szrj       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1446*38fd1498Szrj     }
1447*38fd1498Szrj   else
1448*38fd1498Szrj     {
1449*38fd1498Szrj       gcc_assert (REG_P (reg));
1450*38fd1498Szrj       REG_USERVAR_P (reg) = 1;
1451*38fd1498Szrj     }
1452*38fd1498Szrj }
1453*38fd1498Szrj 
1454*38fd1498Szrj /* Identify REG as a probable pointer register and show its alignment
1455*38fd1498Szrj    as ALIGN, if nonzero.  */
1456*38fd1498Szrj 
1457*38fd1498Szrj void
mark_reg_pointer(rtx reg,int align)1458*38fd1498Szrj mark_reg_pointer (rtx reg, int align)
1459*38fd1498Szrj {
1460*38fd1498Szrj   if (! REG_POINTER (reg))
1461*38fd1498Szrj     {
1462*38fd1498Szrj       REG_POINTER (reg) = 1;
1463*38fd1498Szrj 
1464*38fd1498Szrj       if (align)
1465*38fd1498Szrj 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1466*38fd1498Szrj     }
1467*38fd1498Szrj   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1468*38fd1498Szrj     /* We can no-longer be sure just how aligned this pointer is.  */
1469*38fd1498Szrj     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1470*38fd1498Szrj }
1471*38fd1498Szrj 
1472*38fd1498Szrj /* Return 1 plus largest pseudo reg number used in the current function.  */
1473*38fd1498Szrj 
1474*38fd1498Szrj int
max_reg_num(void)1475*38fd1498Szrj max_reg_num (void)
1476*38fd1498Szrj {
1477*38fd1498Szrj   return reg_rtx_no;
1478*38fd1498Szrj }
1479*38fd1498Szrj 
1480*38fd1498Szrj /* Return 1 + the largest label number used so far in the current function.  */
1481*38fd1498Szrj 
1482*38fd1498Szrj int
max_label_num(void)1483*38fd1498Szrj max_label_num (void)
1484*38fd1498Szrj {
1485*38fd1498Szrj   return label_num;
1486*38fd1498Szrj }
1487*38fd1498Szrj 
1488*38fd1498Szrj /* Return first label number used in this function (if any were used).  */
1489*38fd1498Szrj 
1490*38fd1498Szrj int
get_first_label_num(void)1491*38fd1498Szrj get_first_label_num (void)
1492*38fd1498Szrj {
1493*38fd1498Szrj   return first_label_num;
1494*38fd1498Szrj }
1495*38fd1498Szrj 
1496*38fd1498Szrj /* If the rtx for label was created during the expansion of a nested
1497*38fd1498Szrj    function, then first_label_num won't include this label number.
1498*38fd1498Szrj    Fix this now so that array indices work later.  */
1499*38fd1498Szrj 
1500*38fd1498Szrj void
maybe_set_first_label_num(rtx_code_label * x)1501*38fd1498Szrj maybe_set_first_label_num (rtx_code_label *x)
1502*38fd1498Szrj {
1503*38fd1498Szrj   if (CODE_LABEL_NUMBER (x) < first_label_num)
1504*38fd1498Szrj     first_label_num = CODE_LABEL_NUMBER (x);
1505*38fd1498Szrj }
1506*38fd1498Szrj 
1507*38fd1498Szrj /* For use by the RTL function loader, when mingling with normal
1508*38fd1498Szrj    functions.
1509*38fd1498Szrj    Ensure that label_num is greater than the label num of X, to avoid
1510*38fd1498Szrj    duplicate labels in the generated assembler.  */
1511*38fd1498Szrj 
1512*38fd1498Szrj void
maybe_set_max_label_num(rtx_code_label * x)1513*38fd1498Szrj maybe_set_max_label_num (rtx_code_label *x)
1514*38fd1498Szrj {
1515*38fd1498Szrj   if (CODE_LABEL_NUMBER (x) >= label_num)
1516*38fd1498Szrj     label_num = CODE_LABEL_NUMBER (x) + 1;
1517*38fd1498Szrj }
1518*38fd1498Szrj 
1519*38fd1498Szrj 
1520*38fd1498Szrj /* Return a value representing some low-order bits of X, where the number
1521*38fd1498Szrj    of low-order bits is given by MODE.  Note that no conversion is done
1522*38fd1498Szrj    between floating-point and fixed-point values, rather, the bit
1523*38fd1498Szrj    representation is returned.
1524*38fd1498Szrj 
1525*38fd1498Szrj    This function handles the cases in common between gen_lowpart, below,
1526*38fd1498Szrj    and two variants in cse.c and combine.c.  These are the cases that can
1527*38fd1498Szrj    be safely handled at all points in the compilation.
1528*38fd1498Szrj 
1529*38fd1498Szrj    If this is not a case we can handle, return 0.  */
1530*38fd1498Szrj 
1531*38fd1498Szrj rtx
gen_lowpart_common(machine_mode mode,rtx x)1532*38fd1498Szrj gen_lowpart_common (machine_mode mode, rtx x)
1533*38fd1498Szrj {
1534*38fd1498Szrj   poly_uint64 msize = GET_MODE_SIZE (mode);
1535*38fd1498Szrj   machine_mode innermode;
1536*38fd1498Szrj 
1537*38fd1498Szrj   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1538*38fd1498Szrj      so we have to make one up.  Yuk.  */
1539*38fd1498Szrj   innermode = GET_MODE (x);
1540*38fd1498Szrj   if (CONST_INT_P (x)
1541*38fd1498Szrj       && known_le (msize * BITS_PER_UNIT,
1542*38fd1498Szrj 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1543*38fd1498Szrj     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
1544*38fd1498Szrj   else if (innermode == VOIDmode)
1545*38fd1498Szrj     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
1546*38fd1498Szrj 
1547*38fd1498Szrj   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1548*38fd1498Szrj 
1549*38fd1498Szrj   if (innermode == mode)
1550*38fd1498Szrj     return x;
1551*38fd1498Szrj 
1552*38fd1498Szrj   /* The size of the outer and inner modes must be ordered.  */
1553*38fd1498Szrj   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1554*38fd1498Szrj   if (!ordered_p (msize, xsize))
1555*38fd1498Szrj     return 0;
1556*38fd1498Szrj 
1557*38fd1498Szrj   if (SCALAR_FLOAT_MODE_P (mode))
1558*38fd1498Szrj     {
1559*38fd1498Szrj       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1560*38fd1498Szrj       if (maybe_gt (msize, xsize))
1561*38fd1498Szrj 	return 0;
1562*38fd1498Szrj     }
1563*38fd1498Szrj   else
1564*38fd1498Szrj     {
1565*38fd1498Szrj       /* MODE must occupy no more of the underlying registers than X.  */
1566*38fd1498Szrj       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1567*38fd1498Szrj       unsigned int mregs, xregs;
1568*38fd1498Szrj       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1569*38fd1498Szrj 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1570*38fd1498Szrj 	  || mregs > xregs)
1571*38fd1498Szrj 	return 0;
1572*38fd1498Szrj     }
1573*38fd1498Szrj 
1574*38fd1498Szrj   scalar_int_mode int_mode, int_innermode, from_mode;
1575*38fd1498Szrj   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1576*38fd1498Szrj       && is_a <scalar_int_mode> (mode, &int_mode)
1577*38fd1498Szrj       && is_a <scalar_int_mode> (innermode, &int_innermode)
1578*38fd1498Szrj       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
1579*38fd1498Szrj     {
1580*38fd1498Szrj       /* If we are getting the low-order part of something that has been
1581*38fd1498Szrj 	 sign- or zero-extended, we can either just use the object being
1582*38fd1498Szrj 	 extended or make a narrower extension.  If we want an even smaller
1583*38fd1498Szrj 	 piece than the size of the object being extended, call ourselves
1584*38fd1498Szrj 	 recursively.
1585*38fd1498Szrj 
1586*38fd1498Szrj 	 This case is used mostly by combine and cse.  */
1587*38fd1498Szrj 
1588*38fd1498Szrj       if (from_mode == int_mode)
1589*38fd1498Szrj 	return XEXP (x, 0);
1590*38fd1498Szrj       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1591*38fd1498Szrj 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1592*38fd1498Szrj       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1593*38fd1498Szrj 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
1594*38fd1498Szrj     }
1595*38fd1498Szrj   else if (GET_CODE (x) == SUBREG || REG_P (x)
1596*38fd1498Szrj 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1597*38fd1498Szrj 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1598*38fd1498Szrj 	   || CONST_POLY_INT_P (x))
1599*38fd1498Szrj     return lowpart_subreg (mode, x, innermode);
1600*38fd1498Szrj 
1601*38fd1498Szrj   /* Otherwise, we can't do this.  */
1602*38fd1498Szrj   return 0;
1603*38fd1498Szrj }
1604*38fd1498Szrj 
1605*38fd1498Szrj rtx
gen_highpart(machine_mode mode,rtx x)1606*38fd1498Szrj gen_highpart (machine_mode mode, rtx x)
1607*38fd1498Szrj {
1608*38fd1498Szrj   poly_uint64 msize = GET_MODE_SIZE (mode);
1609*38fd1498Szrj   rtx result;
1610*38fd1498Szrj 
1611*38fd1498Szrj   /* This case loses if X is a subreg.  To catch bugs early,
1612*38fd1498Szrj      complain if an invalid MODE is used even in other cases.  */
1613*38fd1498Szrj   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1614*38fd1498Szrj 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
1615*38fd1498Szrj 
1616*38fd1498Szrj   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1617*38fd1498Szrj 				subreg_highpart_offset (mode, GET_MODE (x)));
1618*38fd1498Szrj   gcc_assert (result);
1619*38fd1498Szrj 
1620*38fd1498Szrj   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1621*38fd1498Szrj      the target if we have a MEM.  gen_highpart must return a valid operand,
1622*38fd1498Szrj      emitting code if necessary to do so.  */
1623*38fd1498Szrj   if (MEM_P (result))
1624*38fd1498Szrj     {
1625*38fd1498Szrj       result = validize_mem (result);
1626*38fd1498Szrj       gcc_assert (result);
1627*38fd1498Szrj     }
1628*38fd1498Szrj 
1629*38fd1498Szrj   return result;
1630*38fd1498Szrj }
1631*38fd1498Szrj 
1632*38fd1498Szrj /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1633*38fd1498Szrj    be VOIDmode constant.  */
1634*38fd1498Szrj rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)1635*38fd1498Szrj gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
1636*38fd1498Szrj {
1637*38fd1498Szrj   if (GET_MODE (exp) != VOIDmode)
1638*38fd1498Szrj     {
1639*38fd1498Szrj       gcc_assert (GET_MODE (exp) == innermode);
1640*38fd1498Szrj       return gen_highpart (outermode, exp);
1641*38fd1498Szrj     }
1642*38fd1498Szrj   return simplify_gen_subreg (outermode, exp, innermode,
1643*38fd1498Szrj 			      subreg_highpart_offset (outermode, innermode));
1644*38fd1498Szrj }
1645*38fd1498Szrj 
1646*38fd1498Szrj /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1647*38fd1498Szrj    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1648*38fd1498Szrj 
1649*38fd1498Szrj poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1650*38fd1498Szrj subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1651*38fd1498Szrj {
1652*38fd1498Szrj   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1653*38fd1498Szrj   if (maybe_gt (outer_bytes, inner_bytes))
1654*38fd1498Szrj     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
1655*38fd1498Szrj     return 0;
1656*38fd1498Szrj 
1657*38fd1498Szrj   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1658*38fd1498Szrj     return inner_bytes - outer_bytes;
1659*38fd1498Szrj   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1660*38fd1498Szrj     return 0;
1661*38fd1498Szrj   else
1662*38fd1498Szrj     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
1663*38fd1498Szrj }
1664*38fd1498Szrj 
1665*38fd1498Szrj /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1666*38fd1498Szrj    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
1667*38fd1498Szrj 
1668*38fd1498Szrj poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1669*38fd1498Szrj subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
1670*38fd1498Szrj {
1671*38fd1498Szrj   gcc_assert (known_ge (inner_bytes, outer_bytes));
1672*38fd1498Szrj 
1673*38fd1498Szrj   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
1674*38fd1498Szrj     return 0;
1675*38fd1498Szrj   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
1676*38fd1498Szrj     return inner_bytes - outer_bytes;
1677*38fd1498Szrj   else
1678*38fd1498Szrj     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
1679*38fd1498Szrj 					(inner_bytes - outer_bytes)
1680*38fd1498Szrj 					* BITS_PER_UNIT);
1681*38fd1498Szrj }
1682*38fd1498Szrj 
1683*38fd1498Szrj /* Return 1 iff X, assumed to be a SUBREG,
1684*38fd1498Szrj    refers to the least significant part of its containing reg.
1685*38fd1498Szrj    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1686*38fd1498Szrj 
1687*38fd1498Szrj int
subreg_lowpart_p(const_rtx x)1688*38fd1498Szrj subreg_lowpart_p (const_rtx x)
1689*38fd1498Szrj {
1690*38fd1498Szrj   if (GET_CODE (x) != SUBREG)
1691*38fd1498Szrj     return 1;
1692*38fd1498Szrj   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1693*38fd1498Szrj     return 0;
1694*38fd1498Szrj 
1695*38fd1498Szrj   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1696*38fd1498Szrj 					  GET_MODE (SUBREG_REG (x))),
1697*38fd1498Szrj 		   SUBREG_BYTE (x));
1698*38fd1498Szrj }
1699*38fd1498Szrj 
1700*38fd1498Szrj /* Return subword OFFSET of operand OP.
1701*38fd1498Szrj    The word number, OFFSET, is interpreted as the word number starting
1702*38fd1498Szrj    at the low-order address.  OFFSET 0 is the low-order word if not
1703*38fd1498Szrj    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1704*38fd1498Szrj 
1705*38fd1498Szrj    If we cannot extract the required word, we return zero.  Otherwise,
1706*38fd1498Szrj    an rtx corresponding to the requested word will be returned.
1707*38fd1498Szrj 
1708*38fd1498Szrj    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1709*38fd1498Szrj    reload has completed, a valid address will always be returned.  After
1710*38fd1498Szrj    reload, if a valid address cannot be returned, we return zero.
1711*38fd1498Szrj 
1712*38fd1498Szrj    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1713*38fd1498Szrj    it is the responsibility of the caller.
1714*38fd1498Szrj 
1715*38fd1498Szrj    MODE is the mode of OP in case it is a CONST_INT.
1716*38fd1498Szrj 
1717*38fd1498Szrj    ??? This is still rather broken for some cases.  The problem for the
1718*38fd1498Szrj    moment is that all callers of this thing provide no 'goal mode' to
1719*38fd1498Szrj    tell us to work with.  This exists because all callers were written
1720*38fd1498Szrj    in a word based SUBREG world.
1721*38fd1498Szrj    Now use of this function can be deprecated by simplify_subreg in most
1722*38fd1498Szrj    cases.
1723*38fd1498Szrj  */
1724*38fd1498Szrj 
1725*38fd1498Szrj rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1726*38fd1498Szrj operand_subword (rtx op, poly_uint64 offset, int validate_address,
1727*38fd1498Szrj 		 machine_mode mode)
1728*38fd1498Szrj {
1729*38fd1498Szrj   if (mode == VOIDmode)
1730*38fd1498Szrj     mode = GET_MODE (op);
1731*38fd1498Szrj 
1732*38fd1498Szrj   gcc_assert (mode != VOIDmode);
1733*38fd1498Szrj 
1734*38fd1498Szrj   /* If OP is narrower than a word, fail.  */
1735*38fd1498Szrj   if (mode != BLKmode
1736*38fd1498Szrj       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
1737*38fd1498Szrj     return 0;
1738*38fd1498Szrj 
1739*38fd1498Szrj   /* If we want a word outside OP, return zero.  */
1740*38fd1498Szrj   if (mode != BLKmode
1741*38fd1498Szrj       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
1742*38fd1498Szrj     return const0_rtx;
1743*38fd1498Szrj 
1744*38fd1498Szrj   /* Form a new MEM at the requested address.  */
1745*38fd1498Szrj   if (MEM_P (op))
1746*38fd1498Szrj     {
1747*38fd1498Szrj       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1748*38fd1498Szrj 
1749*38fd1498Szrj       if (! validate_address)
1750*38fd1498Szrj 	return new_rtx;
1751*38fd1498Szrj 
1752*38fd1498Szrj       else if (reload_completed)
1753*38fd1498Szrj 	{
1754*38fd1498Szrj 	  if (! strict_memory_address_addr_space_p (word_mode,
1755*38fd1498Szrj 						    XEXP (new_rtx, 0),
1756*38fd1498Szrj 						    MEM_ADDR_SPACE (op)))
1757*38fd1498Szrj 	    return 0;
1758*38fd1498Szrj 	}
1759*38fd1498Szrj       else
1760*38fd1498Szrj 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1761*38fd1498Szrj     }
1762*38fd1498Szrj 
1763*38fd1498Szrj   /* Rest can be handled by simplify_subreg.  */
1764*38fd1498Szrj   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1765*38fd1498Szrj }
1766*38fd1498Szrj 
1767*38fd1498Szrj /* Similar to `operand_subword', but never return 0.  If we can't
1768*38fd1498Szrj    extract the required subword, put OP into a register and try again.
1769*38fd1498Szrj    The second attempt must succeed.  We always validate the address in
1770*38fd1498Szrj    this case.
1771*38fd1498Szrj 
1772*38fd1498Szrj    MODE is the mode of OP, in case it is CONST_INT.  */
1773*38fd1498Szrj 
1774*38fd1498Szrj rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1775*38fd1498Szrj operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
1776*38fd1498Szrj {
1777*38fd1498Szrj   rtx result = operand_subword (op, offset, 1, mode);
1778*38fd1498Szrj 
1779*38fd1498Szrj   if (result)
1780*38fd1498Szrj     return result;
1781*38fd1498Szrj 
1782*38fd1498Szrj   if (mode != BLKmode && mode != VOIDmode)
1783*38fd1498Szrj     {
1784*38fd1498Szrj       /* If this is a register which can not be accessed by words, copy it
1785*38fd1498Szrj 	 to a pseudo register.  */
1786*38fd1498Szrj       if (REG_P (op))
1787*38fd1498Szrj 	op = copy_to_reg (op);
1788*38fd1498Szrj       else
1789*38fd1498Szrj 	op = force_reg (mode, op);
1790*38fd1498Szrj     }
1791*38fd1498Szrj 
1792*38fd1498Szrj   result = operand_subword (op, offset, 1, mode);
1793*38fd1498Szrj   gcc_assert (result);
1794*38fd1498Szrj 
1795*38fd1498Szrj   return result;
1796*38fd1498Szrj }
1797*38fd1498Szrj 
mem_attrs()1798*38fd1498Szrj mem_attrs::mem_attrs ()
1799*38fd1498Szrj   : expr (NULL_TREE),
1800*38fd1498Szrj     offset (0),
1801*38fd1498Szrj     size (0),
1802*38fd1498Szrj     alias (0),
1803*38fd1498Szrj     align (0),
1804*38fd1498Szrj     addrspace (ADDR_SPACE_GENERIC),
1805*38fd1498Szrj     offset_known_p (false),
1806*38fd1498Szrj     size_known_p (false)
1807*38fd1498Szrj {}
1808*38fd1498Szrj 
1809*38fd1498Szrj /* Returns 1 if both MEM_EXPR can be considered equal
1810*38fd1498Szrj    and 0 otherwise.  */
1811*38fd1498Szrj 
1812*38fd1498Szrj int
mem_expr_equal_p(const_tree expr1,const_tree expr2)1813*38fd1498Szrj mem_expr_equal_p (const_tree expr1, const_tree expr2)
1814*38fd1498Szrj {
1815*38fd1498Szrj   if (expr1 == expr2)
1816*38fd1498Szrj     return 1;
1817*38fd1498Szrj 
1818*38fd1498Szrj   if (! expr1 || ! expr2)
1819*38fd1498Szrj     return 0;
1820*38fd1498Szrj 
1821*38fd1498Szrj   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1822*38fd1498Szrj     return 0;
1823*38fd1498Szrj 
1824*38fd1498Szrj   return operand_equal_p (expr1, expr2, 0);
1825*38fd1498Szrj }
1826*38fd1498Szrj 
1827*38fd1498Szrj /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1828*38fd1498Szrj    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1829*38fd1498Szrj    -1 if not known.  */
1830*38fd1498Szrj 
1831*38fd1498Szrj int
get_mem_align_offset(rtx mem,unsigned int align)1832*38fd1498Szrj get_mem_align_offset (rtx mem, unsigned int align)
1833*38fd1498Szrj {
1834*38fd1498Szrj   tree expr;
1835*38fd1498Szrj   poly_uint64 offset;
1836*38fd1498Szrj 
1837*38fd1498Szrj   /* This function can't use
1838*38fd1498Szrj      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1839*38fd1498Szrj 	 || (MAX (MEM_ALIGN (mem),
1840*38fd1498Szrj 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1841*38fd1498Szrj 	     < align))
1842*38fd1498Szrj        return -1;
1843*38fd1498Szrj      else
1844*38fd1498Szrj        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1845*38fd1498Szrj      for two reasons:
1846*38fd1498Szrj      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1847*38fd1498Szrj        for <variable>.  get_inner_reference doesn't handle it and
1848*38fd1498Szrj        even if it did, the alignment in that case needs to be determined
1849*38fd1498Szrj        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1850*38fd1498Szrj      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1851*38fd1498Szrj        isn't sufficiently aligned, the object it is in might be.  */
1852*38fd1498Szrj   gcc_assert (MEM_P (mem));
1853*38fd1498Szrj   expr = MEM_EXPR (mem);
1854*38fd1498Szrj   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1855*38fd1498Szrj     return -1;
1856*38fd1498Szrj 
1857*38fd1498Szrj   offset = MEM_OFFSET (mem);
1858*38fd1498Szrj   if (DECL_P (expr))
1859*38fd1498Szrj     {
1860*38fd1498Szrj       if (DECL_ALIGN (expr) < align)
1861*38fd1498Szrj 	return -1;
1862*38fd1498Szrj     }
1863*38fd1498Szrj   else if (INDIRECT_REF_P (expr))
1864*38fd1498Szrj     {
1865*38fd1498Szrj       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1866*38fd1498Szrj 	return -1;
1867*38fd1498Szrj     }
1868*38fd1498Szrj   else if (TREE_CODE (expr) == COMPONENT_REF)
1869*38fd1498Szrj     {
1870*38fd1498Szrj       while (1)
1871*38fd1498Szrj 	{
1872*38fd1498Szrj 	  tree inner = TREE_OPERAND (expr, 0);
1873*38fd1498Szrj 	  tree field = TREE_OPERAND (expr, 1);
1874*38fd1498Szrj 	  tree byte_offset = component_ref_field_offset (expr);
1875*38fd1498Szrj 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1876*38fd1498Szrj 
1877*38fd1498Szrj 	  poly_uint64 suboffset;
1878*38fd1498Szrj 	  if (!byte_offset
1879*38fd1498Szrj 	      || !poly_int_tree_p (byte_offset, &suboffset)
1880*38fd1498Szrj 	      || !tree_fits_uhwi_p (bit_offset))
1881*38fd1498Szrj 	    return -1;
1882*38fd1498Szrj 
1883*38fd1498Szrj 	  offset += suboffset;
1884*38fd1498Szrj 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
1885*38fd1498Szrj 
1886*38fd1498Szrj 	  if (inner == NULL_TREE)
1887*38fd1498Szrj 	    {
1888*38fd1498Szrj 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1889*38fd1498Szrj 		  < (unsigned int) align)
1890*38fd1498Szrj 		return -1;
1891*38fd1498Szrj 	      break;
1892*38fd1498Szrj 	    }
1893*38fd1498Szrj 	  else if (DECL_P (inner))
1894*38fd1498Szrj 	    {
1895*38fd1498Szrj 	      if (DECL_ALIGN (inner) < align)
1896*38fd1498Szrj 		return -1;
1897*38fd1498Szrj 	      break;
1898*38fd1498Szrj 	    }
1899*38fd1498Szrj 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1900*38fd1498Szrj 	    return -1;
1901*38fd1498Szrj 	  expr = inner;
1902*38fd1498Szrj 	}
1903*38fd1498Szrj     }
1904*38fd1498Szrj   else
1905*38fd1498Szrj     return -1;
1906*38fd1498Szrj 
1907*38fd1498Szrj   HOST_WIDE_INT misalign;
1908*38fd1498Szrj   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1909*38fd1498Szrj     return -1;
1910*38fd1498Szrj   return misalign;
1911*38fd1498Szrj }
1912*38fd1498Szrj 
1913*38fd1498Szrj /* Given REF (a MEM) and T, either the type of X or the expression
1914*38fd1498Szrj    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1915*38fd1498Szrj    if we are making a new object of this type.  BITPOS is nonzero if
1916*38fd1498Szrj    there is an offset outstanding on T that will be applied later.  */
1917*38fd1498Szrj 
1918*38fd1498Szrj void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)1919*38fd1498Szrj set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1920*38fd1498Szrj 				 poly_int64 bitpos)
1921*38fd1498Szrj {
1922*38fd1498Szrj   poly_int64 apply_bitpos = 0;
1923*38fd1498Szrj   tree type;
1924*38fd1498Szrj   struct mem_attrs attrs, *defattrs, *refattrs;
1925*38fd1498Szrj   addr_space_t as;
1926*38fd1498Szrj 
1927*38fd1498Szrj   /* It can happen that type_for_mode was given a mode for which there
1928*38fd1498Szrj      is no language-level type.  In which case it returns NULL, which
1929*38fd1498Szrj      we can see here.  */
1930*38fd1498Szrj   if (t == NULL_TREE)
1931*38fd1498Szrj     return;
1932*38fd1498Szrj 
1933*38fd1498Szrj   type = TYPE_P (t) ? t : TREE_TYPE (t);
1934*38fd1498Szrj   if (type == error_mark_node)
1935*38fd1498Szrj     return;
1936*38fd1498Szrj 
1937*38fd1498Szrj   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1938*38fd1498Szrj      wrong answer, as it assumes that DECL_RTL already has the right alias
1939*38fd1498Szrj      info.  Callers should not set DECL_RTL until after the call to
1940*38fd1498Szrj      set_mem_attributes.  */
1941*38fd1498Szrj   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1942*38fd1498Szrj 
1943*38fd1498Szrj   /* Get the alias set from the expression or type (perhaps using a
1944*38fd1498Szrj      front-end routine) and use it.  */
1945*38fd1498Szrj   attrs.alias = get_alias_set (t);
1946*38fd1498Szrj 
1947*38fd1498Szrj   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1948*38fd1498Szrj   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1949*38fd1498Szrj 
1950*38fd1498Szrj   /* Default values from pre-existing memory attributes if present.  */
1951*38fd1498Szrj   refattrs = MEM_ATTRS (ref);
1952*38fd1498Szrj   if (refattrs)
1953*38fd1498Szrj     {
1954*38fd1498Szrj       /* ??? Can this ever happen?  Calling this routine on a MEM that
1955*38fd1498Szrj 	 already carries memory attributes should probably be invalid.  */
1956*38fd1498Szrj       attrs.expr = refattrs->expr;
1957*38fd1498Szrj       attrs.offset_known_p = refattrs->offset_known_p;
1958*38fd1498Szrj       attrs.offset = refattrs->offset;
1959*38fd1498Szrj       attrs.size_known_p = refattrs->size_known_p;
1960*38fd1498Szrj       attrs.size = refattrs->size;
1961*38fd1498Szrj       attrs.align = refattrs->align;
1962*38fd1498Szrj     }
1963*38fd1498Szrj 
1964*38fd1498Szrj   /* Otherwise, default values from the mode of the MEM reference.  */
1965*38fd1498Szrj   else
1966*38fd1498Szrj     {
1967*38fd1498Szrj       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1968*38fd1498Szrj       gcc_assert (!defattrs->expr);
1969*38fd1498Szrj       gcc_assert (!defattrs->offset_known_p);
1970*38fd1498Szrj 
1971*38fd1498Szrj       /* Respect mode size.  */
1972*38fd1498Szrj       attrs.size_known_p = defattrs->size_known_p;
1973*38fd1498Szrj       attrs.size = defattrs->size;
1974*38fd1498Szrj       /* ??? Is this really necessary?  We probably should always get
1975*38fd1498Szrj 	 the size from the type below.  */
1976*38fd1498Szrj 
1977*38fd1498Szrj       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1978*38fd1498Szrj          if T is an object, always compute the object alignment below.  */
1979*38fd1498Szrj       if (TYPE_P (t))
1980*38fd1498Szrj 	attrs.align = defattrs->align;
1981*38fd1498Szrj       else
1982*38fd1498Szrj 	attrs.align = BITS_PER_UNIT;
1983*38fd1498Szrj       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1984*38fd1498Szrj 	 e.g. if the type carries an alignment attribute.  Should we be
1985*38fd1498Szrj 	 able to simply always use TYPE_ALIGN?  */
1986*38fd1498Szrj     }
1987*38fd1498Szrj 
1988*38fd1498Szrj   /* We can set the alignment from the type if we are making an object or if
1989*38fd1498Szrj      this is an INDIRECT_REF.  */
1990*38fd1498Szrj   if (objectp || TREE_CODE (t) == INDIRECT_REF)
1991*38fd1498Szrj     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1992*38fd1498Szrj 
1993*38fd1498Szrj   /* If the size is known, we can set that.  */
1994*38fd1498Szrj   tree new_size = TYPE_SIZE_UNIT (type);
1995*38fd1498Szrj 
1996*38fd1498Szrj   /* The address-space is that of the type.  */
1997*38fd1498Szrj   as = TYPE_ADDR_SPACE (type);
1998*38fd1498Szrj 
1999*38fd1498Szrj   /* If T is not a type, we may be able to deduce some more information about
2000*38fd1498Szrj      the expression.  */
2001*38fd1498Szrj   if (! TYPE_P (t))
2002*38fd1498Szrj     {
2003*38fd1498Szrj       tree base;
2004*38fd1498Szrj 
2005*38fd1498Szrj       if (TREE_THIS_VOLATILE (t))
2006*38fd1498Szrj 	MEM_VOLATILE_P (ref) = 1;
2007*38fd1498Szrj 
2008*38fd1498Szrj       /* Now remove any conversions: they don't change what the underlying
2009*38fd1498Szrj 	 object is.  Likewise for SAVE_EXPR.  */
2010*38fd1498Szrj       while (CONVERT_EXPR_P (t)
2011*38fd1498Szrj 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
2012*38fd1498Szrj 	     || TREE_CODE (t) == SAVE_EXPR)
2013*38fd1498Szrj 	t = TREE_OPERAND (t, 0);
2014*38fd1498Szrj 
2015*38fd1498Szrj       /* Note whether this expression can trap.  */
2016*38fd1498Szrj       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
2017*38fd1498Szrj 
2018*38fd1498Szrj       base = get_base_address (t);
2019*38fd1498Szrj       if (base)
2020*38fd1498Szrj 	{
2021*38fd1498Szrj 	  if (DECL_P (base)
2022*38fd1498Szrj 	      && TREE_READONLY (base)
2023*38fd1498Szrj 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
2024*38fd1498Szrj 	      && !TREE_THIS_VOLATILE (base))
2025*38fd1498Szrj 	    MEM_READONLY_P (ref) = 1;
2026*38fd1498Szrj 
2027*38fd1498Szrj 	  /* Mark static const strings readonly as well.  */
2028*38fd1498Szrj 	  if (TREE_CODE (base) == STRING_CST
2029*38fd1498Szrj 	      && TREE_READONLY (base)
2030*38fd1498Szrj 	      && TREE_STATIC (base))
2031*38fd1498Szrj 	    MEM_READONLY_P (ref) = 1;
2032*38fd1498Szrj 
2033*38fd1498Szrj 	  /* Address-space information is on the base object.  */
2034*38fd1498Szrj 	  if (TREE_CODE (base) == MEM_REF
2035*38fd1498Szrj 	      || TREE_CODE (base) == TARGET_MEM_REF)
2036*38fd1498Szrj 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
2037*38fd1498Szrj 								      0))));
2038*38fd1498Szrj 	  else
2039*38fd1498Szrj 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
2040*38fd1498Szrj 	}
2041*38fd1498Szrj 
2042*38fd1498Szrj       /* If this expression uses it's parent's alias set, mark it such
2043*38fd1498Szrj 	 that we won't change it.  */
2044*38fd1498Szrj       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
2045*38fd1498Szrj 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
2046*38fd1498Szrj 
2047*38fd1498Szrj       /* If this is a decl, set the attributes of the MEM from it.  */
2048*38fd1498Szrj       if (DECL_P (t))
2049*38fd1498Szrj 	{
2050*38fd1498Szrj 	  attrs.expr = t;
2051*38fd1498Szrj 	  attrs.offset_known_p = true;
2052*38fd1498Szrj 	  attrs.offset = 0;
2053*38fd1498Szrj 	  apply_bitpos = bitpos;
2054*38fd1498Szrj 	  new_size = DECL_SIZE_UNIT (t);
2055*38fd1498Szrj 	}
2056*38fd1498Szrj 
2057*38fd1498Szrj       /* ???  If we end up with a constant here do record a MEM_EXPR.  */
2058*38fd1498Szrj       else if (CONSTANT_CLASS_P (t))
2059*38fd1498Szrj 	;
2060*38fd1498Szrj 
2061*38fd1498Szrj       /* If this is a field reference, record it.  */
2062*38fd1498Szrj       else if (TREE_CODE (t) == COMPONENT_REF)
2063*38fd1498Szrj 	{
2064*38fd1498Szrj 	  attrs.expr = t;
2065*38fd1498Szrj 	  attrs.offset_known_p = true;
2066*38fd1498Szrj 	  attrs.offset = 0;
2067*38fd1498Szrj 	  apply_bitpos = bitpos;
2068*38fd1498Szrj 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
2069*38fd1498Szrj 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
2070*38fd1498Szrj 	}
2071*38fd1498Szrj 
2072*38fd1498Szrj       /* If this is an array reference, look for an outer field reference.  */
2073*38fd1498Szrj       else if (TREE_CODE (t) == ARRAY_REF)
2074*38fd1498Szrj 	{
2075*38fd1498Szrj 	  tree off_tree = size_zero_node;
2076*38fd1498Szrj 	  /* We can't modify t, because we use it at the end of the
2077*38fd1498Szrj 	     function.  */
2078*38fd1498Szrj 	  tree t2 = t;
2079*38fd1498Szrj 
2080*38fd1498Szrj 	  do
2081*38fd1498Szrj 	    {
2082*38fd1498Szrj 	      tree index = TREE_OPERAND (t2, 1);
2083*38fd1498Szrj 	      tree low_bound = array_ref_low_bound (t2);
2084*38fd1498Szrj 	      tree unit_size = array_ref_element_size (t2);
2085*38fd1498Szrj 
2086*38fd1498Szrj 	      /* We assume all arrays have sizes that are a multiple of a byte.
2087*38fd1498Szrj 		 First subtract the lower bound, if any, in the type of the
2088*38fd1498Szrj 		 index, then convert to sizetype and multiply by the size of
2089*38fd1498Szrj 		 the array element.  */
2090*38fd1498Szrj 	      if (! integer_zerop (low_bound))
2091*38fd1498Szrj 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
2092*38fd1498Szrj 				     index, low_bound);
2093*38fd1498Szrj 
2094*38fd1498Szrj 	      off_tree = size_binop (PLUS_EXPR,
2095*38fd1498Szrj 				     size_binop (MULT_EXPR,
2096*38fd1498Szrj 						 fold_convert (sizetype,
2097*38fd1498Szrj 							       index),
2098*38fd1498Szrj 						 unit_size),
2099*38fd1498Szrj 				     off_tree);
2100*38fd1498Szrj 	      t2 = TREE_OPERAND (t2, 0);
2101*38fd1498Szrj 	    }
2102*38fd1498Szrj 	  while (TREE_CODE (t2) == ARRAY_REF);
2103*38fd1498Szrj 
2104*38fd1498Szrj 	  if (DECL_P (t2)
2105*38fd1498Szrj 	      || (TREE_CODE (t2) == COMPONENT_REF
2106*38fd1498Szrj 		  /* For trailing arrays t2 doesn't have a size that
2107*38fd1498Szrj 		     covers all valid accesses.  */
2108*38fd1498Szrj 		  && ! array_at_struct_end_p (t)))
2109*38fd1498Szrj 	    {
2110*38fd1498Szrj 	      attrs.expr = t2;
2111*38fd1498Szrj 	      attrs.offset_known_p = false;
2112*38fd1498Szrj 	      if (poly_int_tree_p (off_tree, &attrs.offset))
2113*38fd1498Szrj 		{
2114*38fd1498Szrj 		  attrs.offset_known_p = true;
2115*38fd1498Szrj 		  apply_bitpos = bitpos;
2116*38fd1498Szrj 		}
2117*38fd1498Szrj 	    }
2118*38fd1498Szrj 	  /* Else do not record a MEM_EXPR.  */
2119*38fd1498Szrj 	}
2120*38fd1498Szrj 
2121*38fd1498Szrj       /* If this is an indirect reference, record it.  */
2122*38fd1498Szrj       else if (TREE_CODE (t) == MEM_REF
2123*38fd1498Szrj 	       || TREE_CODE (t) == TARGET_MEM_REF)
2124*38fd1498Szrj 	{
2125*38fd1498Szrj 	  attrs.expr = t;
2126*38fd1498Szrj 	  attrs.offset_known_p = true;
2127*38fd1498Szrj 	  attrs.offset = 0;
2128*38fd1498Szrj 	  apply_bitpos = bitpos;
2129*38fd1498Szrj 	}
2130*38fd1498Szrj 
2131*38fd1498Szrj       /* Compute the alignment.  */
2132*38fd1498Szrj       unsigned int obj_align;
2133*38fd1498Szrj       unsigned HOST_WIDE_INT obj_bitpos;
2134*38fd1498Szrj       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2135*38fd1498Szrj       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2136*38fd1498Szrj       if (diff_align != 0)
2137*38fd1498Szrj 	obj_align = MIN (obj_align, diff_align);
2138*38fd1498Szrj       attrs.align = MAX (attrs.align, obj_align);
2139*38fd1498Szrj     }
2140*38fd1498Szrj 
2141*38fd1498Szrj   poly_uint64 const_size;
2142*38fd1498Szrj   if (poly_int_tree_p (new_size, &const_size))
2143*38fd1498Szrj     {
2144*38fd1498Szrj       attrs.size_known_p = true;
2145*38fd1498Szrj       attrs.size = const_size;
2146*38fd1498Szrj     }
2147*38fd1498Szrj 
2148*38fd1498Szrj   /* If we modified OFFSET based on T, then subtract the outstanding
2149*38fd1498Szrj      bit position offset.  Similarly, increase the size of the accessed
2150*38fd1498Szrj      object to contain the negative offset.  */
2151*38fd1498Szrj   if (maybe_ne (apply_bitpos, 0))
2152*38fd1498Szrj     {
2153*38fd1498Szrj       gcc_assert (attrs.offset_known_p);
2154*38fd1498Szrj       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2155*38fd1498Szrj       attrs.offset -= bytepos;
2156*38fd1498Szrj       if (attrs.size_known_p)
2157*38fd1498Szrj 	attrs.size += bytepos;
2158*38fd1498Szrj     }
2159*38fd1498Szrj 
2160*38fd1498Szrj   /* Now set the attributes we computed above.  */
2161*38fd1498Szrj   attrs.addrspace = as;
2162*38fd1498Szrj   set_mem_attrs (ref, &attrs);
2163*38fd1498Szrj }
2164*38fd1498Szrj 
2165*38fd1498Szrj void
set_mem_attributes(rtx ref,tree t,int objectp)2166*38fd1498Szrj set_mem_attributes (rtx ref, tree t, int objectp)
2167*38fd1498Szrj {
2168*38fd1498Szrj   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
2169*38fd1498Szrj }
2170*38fd1498Szrj 
2171*38fd1498Szrj /* Set the alias set of MEM to SET.  */
2172*38fd1498Szrj 
2173*38fd1498Szrj void
set_mem_alias_set(rtx mem,alias_set_type set)2174*38fd1498Szrj set_mem_alias_set (rtx mem, alias_set_type set)
2175*38fd1498Szrj {
2176*38fd1498Szrj   /* If the new and old alias sets don't conflict, something is wrong.  */
2177*38fd1498Szrj   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2178*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2179*38fd1498Szrj   attrs.alias = set;
2180*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2181*38fd1498Szrj }
2182*38fd1498Szrj 
2183*38fd1498Szrj /* Set the address space of MEM to ADDRSPACE (target-defined).  */
2184*38fd1498Szrj 
2185*38fd1498Szrj void
set_mem_addr_space(rtx mem,addr_space_t addrspace)2186*38fd1498Szrj set_mem_addr_space (rtx mem, addr_space_t addrspace)
2187*38fd1498Szrj {
2188*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2189*38fd1498Szrj   attrs.addrspace = addrspace;
2190*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2191*38fd1498Szrj }
2192*38fd1498Szrj 
2193*38fd1498Szrj /* Set the alignment of MEM to ALIGN bits.  */
2194*38fd1498Szrj 
2195*38fd1498Szrj void
set_mem_align(rtx mem,unsigned int align)2196*38fd1498Szrj set_mem_align (rtx mem, unsigned int align)
2197*38fd1498Szrj {
2198*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2199*38fd1498Szrj   attrs.align = align;
2200*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2201*38fd1498Szrj }
2202*38fd1498Szrj 
2203*38fd1498Szrj /* Set the expr for MEM to EXPR.  */
2204*38fd1498Szrj 
2205*38fd1498Szrj void
set_mem_expr(rtx mem,tree expr)2206*38fd1498Szrj set_mem_expr (rtx mem, tree expr)
2207*38fd1498Szrj {
2208*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2209*38fd1498Szrj   attrs.expr = expr;
2210*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2211*38fd1498Szrj }
2212*38fd1498Szrj 
2213*38fd1498Szrj /* Set the offset of MEM to OFFSET.  */
2214*38fd1498Szrj 
2215*38fd1498Szrj void
set_mem_offset(rtx mem,poly_int64 offset)2216*38fd1498Szrj set_mem_offset (rtx mem, poly_int64 offset)
2217*38fd1498Szrj {
2218*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2219*38fd1498Szrj   attrs.offset_known_p = true;
2220*38fd1498Szrj   attrs.offset = offset;
2221*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2222*38fd1498Szrj }
2223*38fd1498Szrj 
2224*38fd1498Szrj /* Clear the offset of MEM.  */
2225*38fd1498Szrj 
2226*38fd1498Szrj void
clear_mem_offset(rtx mem)2227*38fd1498Szrj clear_mem_offset (rtx mem)
2228*38fd1498Szrj {
2229*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2230*38fd1498Szrj   attrs.offset_known_p = false;
2231*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2232*38fd1498Szrj }
2233*38fd1498Szrj 
2234*38fd1498Szrj /* Set the size of MEM to SIZE.  */
2235*38fd1498Szrj 
2236*38fd1498Szrj void
set_mem_size(rtx mem,poly_int64 size)2237*38fd1498Szrj set_mem_size (rtx mem, poly_int64 size)
2238*38fd1498Szrj {
2239*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2240*38fd1498Szrj   attrs.size_known_p = true;
2241*38fd1498Szrj   attrs.size = size;
2242*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2243*38fd1498Szrj }
2244*38fd1498Szrj 
2245*38fd1498Szrj /* Clear the size of MEM.  */
2246*38fd1498Szrj 
2247*38fd1498Szrj void
clear_mem_size(rtx mem)2248*38fd1498Szrj clear_mem_size (rtx mem)
2249*38fd1498Szrj {
2250*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2251*38fd1498Szrj   attrs.size_known_p = false;
2252*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2253*38fd1498Szrj }
2254*38fd1498Szrj 
2255*38fd1498Szrj /* Return a memory reference like MEMREF, but with its mode changed to MODE
2256*38fd1498Szrj    and its address changed to ADDR.  (VOIDmode means don't change the mode.
2257*38fd1498Szrj    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
2258*38fd1498Szrj    returned memory location is required to be valid.  INPLACE is true if any
2259*38fd1498Szrj    changes can be made directly to MEMREF or false if MEMREF must be treated
2260*38fd1498Szrj    as immutable.
2261*38fd1498Szrj 
2262*38fd1498Szrj    The memory attributes are not changed.  */
2263*38fd1498Szrj 
2264*38fd1498Szrj static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)2265*38fd1498Szrj change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
2266*38fd1498Szrj 		  bool inplace)
2267*38fd1498Szrj {
2268*38fd1498Szrj   addr_space_t as;
2269*38fd1498Szrj   rtx new_rtx;
2270*38fd1498Szrj 
2271*38fd1498Szrj   gcc_assert (MEM_P (memref));
2272*38fd1498Szrj   as = MEM_ADDR_SPACE (memref);
2273*38fd1498Szrj   if (mode == VOIDmode)
2274*38fd1498Szrj     mode = GET_MODE (memref);
2275*38fd1498Szrj   if (addr == 0)
2276*38fd1498Szrj     addr = XEXP (memref, 0);
2277*38fd1498Szrj   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
2278*38fd1498Szrj       && (!validate || memory_address_addr_space_p (mode, addr, as)))
2279*38fd1498Szrj     return memref;
2280*38fd1498Szrj 
2281*38fd1498Szrj   /* Don't validate address for LRA.  LRA can make the address valid
2282*38fd1498Szrj      by itself in most efficient way.  */
2283*38fd1498Szrj   if (validate && !lra_in_progress)
2284*38fd1498Szrj     {
2285*38fd1498Szrj       if (reload_in_progress || reload_completed)
2286*38fd1498Szrj 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2287*38fd1498Szrj       else
2288*38fd1498Szrj 	addr = memory_address_addr_space (mode, addr, as);
2289*38fd1498Szrj     }
2290*38fd1498Szrj 
2291*38fd1498Szrj   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2292*38fd1498Szrj     return memref;
2293*38fd1498Szrj 
2294*38fd1498Szrj   if (inplace)
2295*38fd1498Szrj     {
2296*38fd1498Szrj       XEXP (memref, 0) = addr;
2297*38fd1498Szrj       return memref;
2298*38fd1498Szrj     }
2299*38fd1498Szrj 
2300*38fd1498Szrj   new_rtx = gen_rtx_MEM (mode, addr);
2301*38fd1498Szrj   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2302*38fd1498Szrj   return new_rtx;
2303*38fd1498Szrj }
2304*38fd1498Szrj 
2305*38fd1498Szrj /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2306*38fd1498Szrj    way we are changing MEMREF, so we only preserve the alias set.  */
2307*38fd1498Szrj 
2308*38fd1498Szrj rtx
change_address(rtx memref,machine_mode mode,rtx addr)2309*38fd1498Szrj change_address (rtx memref, machine_mode mode, rtx addr)
2310*38fd1498Szrj {
2311*38fd1498Szrj   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
2312*38fd1498Szrj   machine_mode mmode = GET_MODE (new_rtx);
2313*38fd1498Szrj   struct mem_attrs *defattrs;
2314*38fd1498Szrj 
2315*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (memref));
2316*38fd1498Szrj   defattrs = mode_mem_attrs[(int) mmode];
2317*38fd1498Szrj   attrs.expr = NULL_TREE;
2318*38fd1498Szrj   attrs.offset_known_p = false;
2319*38fd1498Szrj   attrs.size_known_p = defattrs->size_known_p;
2320*38fd1498Szrj   attrs.size = defattrs->size;
2321*38fd1498Szrj   attrs.align = defattrs->align;
2322*38fd1498Szrj 
2323*38fd1498Szrj   /* If there are no changes, just return the original memory reference.  */
2324*38fd1498Szrj   if (new_rtx == memref)
2325*38fd1498Szrj     {
2326*38fd1498Szrj       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2327*38fd1498Szrj 	return new_rtx;
2328*38fd1498Szrj 
2329*38fd1498Szrj       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2330*38fd1498Szrj       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2331*38fd1498Szrj     }
2332*38fd1498Szrj 
2333*38fd1498Szrj   set_mem_attrs (new_rtx, &attrs);
2334*38fd1498Szrj   return new_rtx;
2335*38fd1498Szrj }
2336*38fd1498Szrj 
2337*38fd1498Szrj /* Return a memory reference like MEMREF, but with its mode changed
2338*38fd1498Szrj    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2339*38fd1498Szrj    nonzero, the memory address is forced to be valid.
2340*38fd1498Szrj    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2341*38fd1498Szrj    and the caller is responsible for adjusting MEMREF base register.
2342*38fd1498Szrj    If ADJUST_OBJECT is zero, the underlying object associated with the
2343*38fd1498Szrj    memory reference is left unchanged and the caller is responsible for
2344*38fd1498Szrj    dealing with it.  Otherwise, if the new memory reference is outside
2345*38fd1498Szrj    the underlying object, even partially, then the object is dropped.
2346*38fd1498Szrj    SIZE, if nonzero, is the size of an access in cases where MODE
2347*38fd1498Szrj    has no inherent size.  */
2348*38fd1498Szrj 
2349*38fd1498Szrj rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2350*38fd1498Szrj adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
2351*38fd1498Szrj 		  int validate, int adjust_address, int adjust_object,
2352*38fd1498Szrj 		  poly_int64 size)
2353*38fd1498Szrj {
2354*38fd1498Szrj   rtx addr = XEXP (memref, 0);
2355*38fd1498Szrj   rtx new_rtx;
2356*38fd1498Szrj   scalar_int_mode address_mode;
2357*38fd1498Szrj   struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
2358*38fd1498Szrj   unsigned HOST_WIDE_INT max_align;
2359*38fd1498Szrj #ifdef POINTERS_EXTEND_UNSIGNED
2360*38fd1498Szrj   scalar_int_mode pointer_mode
2361*38fd1498Szrj     = targetm.addr_space.pointer_mode (attrs.addrspace);
2362*38fd1498Szrj #endif
2363*38fd1498Szrj 
2364*38fd1498Szrj   /* VOIDmode means no mode change for change_address_1.  */
2365*38fd1498Szrj   if (mode == VOIDmode)
2366*38fd1498Szrj     mode = GET_MODE (memref);
2367*38fd1498Szrj 
2368*38fd1498Szrj   /* Take the size of non-BLKmode accesses from the mode.  */
2369*38fd1498Szrj   defattrs = mode_mem_attrs[(int) mode];
2370*38fd1498Szrj   if (defattrs->size_known_p)
2371*38fd1498Szrj     size = defattrs->size;
2372*38fd1498Szrj 
2373*38fd1498Szrj   /* If there are no changes, just return the original memory reference.  */
2374*38fd1498Szrj   if (mode == GET_MODE (memref)
2375*38fd1498Szrj       && known_eq (offset, 0)
2376*38fd1498Szrj       && (known_eq (size, 0)
2377*38fd1498Szrj 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
2378*38fd1498Szrj       && (!validate || memory_address_addr_space_p (mode, addr,
2379*38fd1498Szrj 						    attrs.addrspace)))
2380*38fd1498Szrj     return memref;
2381*38fd1498Szrj 
2382*38fd1498Szrj   /* ??? Prefer to create garbage instead of creating shared rtl.
2383*38fd1498Szrj      This may happen even if offset is nonzero -- consider
2384*38fd1498Szrj      (plus (plus reg reg) const_int) -- so do this always.  */
2385*38fd1498Szrj   addr = copy_rtx (addr);
2386*38fd1498Szrj 
2387*38fd1498Szrj   /* Convert a possibly large offset to a signed value within the
2388*38fd1498Szrj      range of the target address space.  */
2389*38fd1498Szrj   address_mode = get_address_mode (memref);
2390*38fd1498Szrj   offset = trunc_int_for_mode (offset, address_mode);
2391*38fd1498Szrj 
2392*38fd1498Szrj   if (adjust_address)
2393*38fd1498Szrj     {
2394*38fd1498Szrj       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2395*38fd1498Szrj 	 object, we can merge it into the LO_SUM.  */
2396*38fd1498Szrj       if (GET_MODE (memref) != BLKmode
2397*38fd1498Szrj 	  && GET_CODE (addr) == LO_SUM
2398*38fd1498Szrj 	  && known_in_range_p (offset,
2399*38fd1498Szrj 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2400*38fd1498Szrj 				   / BITS_PER_UNIT)))
2401*38fd1498Szrj 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2402*38fd1498Szrj 			       plus_constant (address_mode,
2403*38fd1498Szrj 					      XEXP (addr, 1), offset));
2404*38fd1498Szrj #ifdef POINTERS_EXTEND_UNSIGNED
2405*38fd1498Szrj       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2406*38fd1498Szrj 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
2407*38fd1498Szrj 	 the fact that pointers are not allowed to overflow.  */
2408*38fd1498Szrj       else if (POINTERS_EXTEND_UNSIGNED > 0
2409*38fd1498Szrj 	       && GET_CODE (addr) == ZERO_EXTEND
2410*38fd1498Szrj 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2411*38fd1498Szrj 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
2412*38fd1498Szrj 	addr = gen_rtx_ZERO_EXTEND (address_mode,
2413*38fd1498Szrj 				    plus_constant (pointer_mode,
2414*38fd1498Szrj 						   XEXP (addr, 0), offset));
2415*38fd1498Szrj #endif
2416*38fd1498Szrj       else
2417*38fd1498Szrj 	addr = plus_constant (address_mode, addr, offset);
2418*38fd1498Szrj     }
2419*38fd1498Szrj 
2420*38fd1498Szrj   new_rtx = change_address_1 (memref, mode, addr, validate, false);
2421*38fd1498Szrj 
2422*38fd1498Szrj   /* If the address is a REG, change_address_1 rightfully returns memref,
2423*38fd1498Szrj      but this would destroy memref's MEM_ATTRS.  */
2424*38fd1498Szrj   if (new_rtx == memref && maybe_ne (offset, 0))
2425*38fd1498Szrj     new_rtx = copy_rtx (new_rtx);
2426*38fd1498Szrj 
2427*38fd1498Szrj   /* Conservatively drop the object if we don't know where we start from.  */
2428*38fd1498Szrj   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2429*38fd1498Szrj     {
2430*38fd1498Szrj       attrs.expr = NULL_TREE;
2431*38fd1498Szrj       attrs.alias = 0;
2432*38fd1498Szrj     }
2433*38fd1498Szrj 
2434*38fd1498Szrj   /* Compute the new values of the memory attributes due to this adjustment.
2435*38fd1498Szrj      We add the offsets and update the alignment.  */
2436*38fd1498Szrj   if (attrs.offset_known_p)
2437*38fd1498Szrj     {
2438*38fd1498Szrj       attrs.offset += offset;
2439*38fd1498Szrj 
2440*38fd1498Szrj       /* Drop the object if the new left end is not within its bounds.  */
2441*38fd1498Szrj       if (adjust_object && maybe_lt (attrs.offset, 0))
2442*38fd1498Szrj 	{
2443*38fd1498Szrj 	  attrs.expr = NULL_TREE;
2444*38fd1498Szrj 	  attrs.alias = 0;
2445*38fd1498Szrj 	}
2446*38fd1498Szrj     }
2447*38fd1498Szrj 
2448*38fd1498Szrj   /* Compute the new alignment by taking the MIN of the alignment and the
2449*38fd1498Szrj      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2450*38fd1498Szrj      if zero.  */
2451*38fd1498Szrj   if (maybe_ne (offset, 0))
2452*38fd1498Szrj     {
2453*38fd1498Szrj       max_align = known_alignment (offset) * BITS_PER_UNIT;
2454*38fd1498Szrj       attrs.align = MIN (attrs.align, max_align);
2455*38fd1498Szrj     }
2456*38fd1498Szrj 
2457*38fd1498Szrj   if (maybe_ne (size, 0))
2458*38fd1498Szrj     {
2459*38fd1498Szrj       /* Drop the object if the new right end is not within its bounds.  */
2460*38fd1498Szrj       if (adjust_object && maybe_gt (offset + size, attrs.size))
2461*38fd1498Szrj 	{
2462*38fd1498Szrj 	  attrs.expr = NULL_TREE;
2463*38fd1498Szrj 	  attrs.alias = 0;
2464*38fd1498Szrj 	}
2465*38fd1498Szrj       attrs.size_known_p = true;
2466*38fd1498Szrj       attrs.size = size;
2467*38fd1498Szrj     }
2468*38fd1498Szrj   else if (attrs.size_known_p)
2469*38fd1498Szrj     {
2470*38fd1498Szrj       gcc_assert (!adjust_object);
2471*38fd1498Szrj       attrs.size -= offset;
2472*38fd1498Szrj       /* ??? The store_by_pieces machinery generates negative sizes,
2473*38fd1498Szrj 	 so don't assert for that here.  */
2474*38fd1498Szrj     }
2475*38fd1498Szrj 
2476*38fd1498Szrj   set_mem_attrs (new_rtx, &attrs);
2477*38fd1498Szrj 
2478*38fd1498Szrj   return new_rtx;
2479*38fd1498Szrj }
2480*38fd1498Szrj 
2481*38fd1498Szrj /* Return a memory reference like MEMREF, but with its mode changed
2482*38fd1498Szrj    to MODE and its address changed to ADDR, which is assumed to be
2483*38fd1498Szrj    MEMREF offset by OFFSET bytes.  If VALIDATE is
2484*38fd1498Szrj    nonzero, the memory address is forced to be valid.  */
2485*38fd1498Szrj 
2486*38fd1498Szrj rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)2487*38fd1498Szrj adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2488*38fd1498Szrj 			     poly_int64 offset, int validate)
2489*38fd1498Szrj {
2490*38fd1498Szrj   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
2491*38fd1498Szrj   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2492*38fd1498Szrj }
2493*38fd1498Szrj 
2494*38fd1498Szrj /* Return a memory reference like MEMREF, but whose address is changed by
2495*38fd1498Szrj    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2496*38fd1498Szrj    known to be in OFFSET (possibly 1).  */
2497*38fd1498Szrj 
2498*38fd1498Szrj rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)2499*38fd1498Szrj offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2500*38fd1498Szrj {
2501*38fd1498Szrj   rtx new_rtx, addr = XEXP (memref, 0);
2502*38fd1498Szrj   machine_mode address_mode;
2503*38fd1498Szrj   struct mem_attrs *defattrs;
2504*38fd1498Szrj 
2505*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (memref));
2506*38fd1498Szrj   address_mode = get_address_mode (memref);
2507*38fd1498Szrj   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2508*38fd1498Szrj 
2509*38fd1498Szrj   /* At this point we don't know _why_ the address is invalid.  It
2510*38fd1498Szrj      could have secondary memory references, multiplies or anything.
2511*38fd1498Szrj 
2512*38fd1498Szrj      However, if we did go and rearrange things, we can wind up not
2513*38fd1498Szrj      being able to recognize the magic around pic_offset_table_rtx.
2514*38fd1498Szrj      This stuff is fragile, and is yet another example of why it is
2515*38fd1498Szrj      bad to expose PIC machinery too early.  */
2516*38fd1498Szrj   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2517*38fd1498Szrj 				     attrs.addrspace)
2518*38fd1498Szrj       && GET_CODE (addr) == PLUS
2519*38fd1498Szrj       && XEXP (addr, 0) == pic_offset_table_rtx)
2520*38fd1498Szrj     {
2521*38fd1498Szrj       addr = force_reg (GET_MODE (addr), addr);
2522*38fd1498Szrj       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2523*38fd1498Szrj     }
2524*38fd1498Szrj 
2525*38fd1498Szrj   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2526*38fd1498Szrj   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
2527*38fd1498Szrj 
2528*38fd1498Szrj   /* If there are no changes, just return the original memory reference.  */
2529*38fd1498Szrj   if (new_rtx == memref)
2530*38fd1498Szrj     return new_rtx;
2531*38fd1498Szrj 
2532*38fd1498Szrj   /* Update the alignment to reflect the offset.  Reset the offset, which
2533*38fd1498Szrj      we don't know.  */
2534*38fd1498Szrj   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2535*38fd1498Szrj   attrs.offset_known_p = false;
2536*38fd1498Szrj   attrs.size_known_p = defattrs->size_known_p;
2537*38fd1498Szrj   attrs.size = defattrs->size;
2538*38fd1498Szrj   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2539*38fd1498Szrj   set_mem_attrs (new_rtx, &attrs);
2540*38fd1498Szrj   return new_rtx;
2541*38fd1498Szrj }
2542*38fd1498Szrj 
2543*38fd1498Szrj /* Return a memory reference like MEMREF, but with its address changed to
2544*38fd1498Szrj    ADDR.  The caller is asserting that the actual piece of memory pointed
2545*38fd1498Szrj    to is the same, just the form of the address is being changed, such as
2546*38fd1498Szrj    by putting something into a register.  INPLACE is true if any changes
2547*38fd1498Szrj    can be made directly to MEMREF or false if MEMREF must be treated as
2548*38fd1498Szrj    immutable.  */
2549*38fd1498Szrj 
2550*38fd1498Szrj rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)2551*38fd1498Szrj replace_equiv_address (rtx memref, rtx addr, bool inplace)
2552*38fd1498Szrj {
2553*38fd1498Szrj   /* change_address_1 copies the memory attribute structure without change
2554*38fd1498Szrj      and that's exactly what we want here.  */
2555*38fd1498Szrj   update_temp_slot_address (XEXP (memref, 0), addr);
2556*38fd1498Szrj   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
2557*38fd1498Szrj }
2558*38fd1498Szrj 
2559*38fd1498Szrj /* Likewise, but the reference is not required to be valid.  */
2560*38fd1498Szrj 
2561*38fd1498Szrj rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)2562*38fd1498Szrj replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
2563*38fd1498Szrj {
2564*38fd1498Szrj   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
2565*38fd1498Szrj }
2566*38fd1498Szrj 
2567*38fd1498Szrj /* Return a memory reference like MEMREF, but with its mode widened to
2568*38fd1498Szrj    MODE and offset by OFFSET.  This would be used by targets that e.g.
2569*38fd1498Szrj    cannot issue QImode memory operations and have to use SImode memory
2570*38fd1498Szrj    operations plus masking logic.  */
2571*38fd1498Szrj 
2572*38fd1498Szrj rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2573*38fd1498Szrj widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
2574*38fd1498Szrj {
2575*38fd1498Szrj   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2576*38fd1498Szrj   poly_uint64 size = GET_MODE_SIZE (mode);
2577*38fd1498Szrj 
2578*38fd1498Szrj   /* If there are no changes, just return the original memory reference.  */
2579*38fd1498Szrj   if (new_rtx == memref)
2580*38fd1498Szrj     return new_rtx;
2581*38fd1498Szrj 
2582*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (new_rtx));
2583*38fd1498Szrj 
2584*38fd1498Szrj   /* If we don't know what offset we were at within the expression, then
2585*38fd1498Szrj      we can't know if we've overstepped the bounds.  */
2586*38fd1498Szrj   if (! attrs.offset_known_p)
2587*38fd1498Szrj     attrs.expr = NULL_TREE;
2588*38fd1498Szrj 
2589*38fd1498Szrj   while (attrs.expr)
2590*38fd1498Szrj     {
2591*38fd1498Szrj       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2592*38fd1498Szrj 	{
2593*38fd1498Szrj 	  tree field = TREE_OPERAND (attrs.expr, 1);
2594*38fd1498Szrj 	  tree offset = component_ref_field_offset (attrs.expr);
2595*38fd1498Szrj 
2596*38fd1498Szrj 	  if (! DECL_SIZE_UNIT (field))
2597*38fd1498Szrj 	    {
2598*38fd1498Szrj 	      attrs.expr = NULL_TREE;
2599*38fd1498Szrj 	      break;
2600*38fd1498Szrj 	    }
2601*38fd1498Szrj 
2602*38fd1498Szrj 	  /* Is the field at least as large as the access?  If so, ok,
2603*38fd1498Szrj 	     otherwise strip back to the containing structure.  */
2604*38fd1498Szrj 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2605*38fd1498Szrj 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2606*38fd1498Szrj 	      && known_ge (attrs.offset, 0))
2607*38fd1498Szrj 	    break;
2608*38fd1498Szrj 
2609*38fd1498Szrj 	  poly_uint64 suboffset;
2610*38fd1498Szrj 	  if (!poly_int_tree_p (offset, &suboffset))
2611*38fd1498Szrj 	    {
2612*38fd1498Szrj 	      attrs.expr = NULL_TREE;
2613*38fd1498Szrj 	      break;
2614*38fd1498Szrj 	    }
2615*38fd1498Szrj 
2616*38fd1498Szrj 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2617*38fd1498Szrj 	  attrs.offset += suboffset;
2618*38fd1498Szrj 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
2619*38fd1498Szrj 			   / BITS_PER_UNIT);
2620*38fd1498Szrj 	}
2621*38fd1498Szrj       /* Similarly for the decl.  */
2622*38fd1498Szrj       else if (DECL_P (attrs.expr)
2623*38fd1498Szrj 	       && DECL_SIZE_UNIT (attrs.expr)
2624*38fd1498Szrj 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2625*38fd1498Szrj 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2626*38fd1498Szrj 			   size)
2627*38fd1498Szrj 	       && known_ge (attrs.offset, 0))
2628*38fd1498Szrj 	break;
2629*38fd1498Szrj       else
2630*38fd1498Szrj 	{
2631*38fd1498Szrj 	  /* The widened memory access overflows the expression, which means
2632*38fd1498Szrj 	     that it could alias another expression.  Zap it.  */
2633*38fd1498Szrj 	  attrs.expr = NULL_TREE;
2634*38fd1498Szrj 	  break;
2635*38fd1498Szrj 	}
2636*38fd1498Szrj     }
2637*38fd1498Szrj 
2638*38fd1498Szrj   if (! attrs.expr)
2639*38fd1498Szrj     attrs.offset_known_p = false;
2640*38fd1498Szrj 
2641*38fd1498Szrj   /* The widened memory may alias other stuff, so zap the alias set.  */
2642*38fd1498Szrj   /* ??? Maybe use get_alias_set on any remaining expression.  */
2643*38fd1498Szrj   attrs.alias = 0;
2644*38fd1498Szrj   attrs.size_known_p = true;
2645*38fd1498Szrj   attrs.size = size;
2646*38fd1498Szrj   set_mem_attrs (new_rtx, &attrs);
2647*38fd1498Szrj   return new_rtx;
2648*38fd1498Szrj }
2649*38fd1498Szrj 
2650*38fd1498Szrj /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2651*38fd1498Szrj static GTY(()) tree spill_slot_decl;
2652*38fd1498Szrj 
2653*38fd1498Szrj tree
get_spill_slot_decl(bool force_build_p)2654*38fd1498Szrj get_spill_slot_decl (bool force_build_p)
2655*38fd1498Szrj {
2656*38fd1498Szrj   tree d = spill_slot_decl;
2657*38fd1498Szrj   rtx rd;
2658*38fd1498Szrj 
2659*38fd1498Szrj   if (d || !force_build_p)
2660*38fd1498Szrj     return d;
2661*38fd1498Szrj 
2662*38fd1498Szrj   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2663*38fd1498Szrj 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2664*38fd1498Szrj   DECL_ARTIFICIAL (d) = 1;
2665*38fd1498Szrj   DECL_IGNORED_P (d) = 1;
2666*38fd1498Szrj   TREE_USED (d) = 1;
2667*38fd1498Szrj   spill_slot_decl = d;
2668*38fd1498Szrj 
2669*38fd1498Szrj   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2670*38fd1498Szrj   MEM_NOTRAP_P (rd) = 1;
2671*38fd1498Szrj   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
2672*38fd1498Szrj   attrs.alias = new_alias_set ();
2673*38fd1498Szrj   attrs.expr = d;
2674*38fd1498Szrj   set_mem_attrs (rd, &attrs);
2675*38fd1498Szrj   SET_DECL_RTL (d, rd);
2676*38fd1498Szrj 
2677*38fd1498Szrj   return d;
2678*38fd1498Szrj }
2679*38fd1498Szrj 
2680*38fd1498Szrj /* Given MEM, a result from assign_stack_local, fill in the memory
2681*38fd1498Szrj    attributes as appropriate for a register allocator spill slot.
2682*38fd1498Szrj    These slots are not aliasable by other memory.  We arrange for
2683*38fd1498Szrj    them all to use a single MEM_EXPR, so that the aliasing code can
2684*38fd1498Szrj    work properly in the case of shared spill slots.  */
2685*38fd1498Szrj 
2686*38fd1498Szrj void
set_mem_attrs_for_spill(rtx mem)2687*38fd1498Szrj set_mem_attrs_for_spill (rtx mem)
2688*38fd1498Szrj {
2689*38fd1498Szrj   rtx addr;
2690*38fd1498Szrj 
2691*38fd1498Szrj   mem_attrs attrs (*get_mem_attrs (mem));
2692*38fd1498Szrj   attrs.expr = get_spill_slot_decl (true);
2693*38fd1498Szrj   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2694*38fd1498Szrj   attrs.addrspace = ADDR_SPACE_GENERIC;
2695*38fd1498Szrj 
2696*38fd1498Szrj   /* We expect the incoming memory to be of the form:
2697*38fd1498Szrj 	(mem:MODE (plus (reg sfp) (const_int offset)))
2698*38fd1498Szrj      with perhaps the plus missing for offset = 0.  */
2699*38fd1498Szrj   addr = XEXP (mem, 0);
2700*38fd1498Szrj   attrs.offset_known_p = true;
2701*38fd1498Szrj   strip_offset (addr, &attrs.offset);
2702*38fd1498Szrj 
2703*38fd1498Szrj   set_mem_attrs (mem, &attrs);
2704*38fd1498Szrj   MEM_NOTRAP_P (mem) = 1;
2705*38fd1498Szrj }
2706*38fd1498Szrj 
2707*38fd1498Szrj /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2708*38fd1498Szrj 
2709*38fd1498Szrj rtx_code_label *
gen_label_rtx(void)2710*38fd1498Szrj gen_label_rtx (void)
2711*38fd1498Szrj {
2712*38fd1498Szrj   return as_a <rtx_code_label *> (
2713*38fd1498Szrj 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
2714*38fd1498Szrj 				NULL, label_num++, NULL));
2715*38fd1498Szrj }
2716*38fd1498Szrj 
2717*38fd1498Szrj /* For procedure integration.  */
2718*38fd1498Szrj 
2719*38fd1498Szrj /* Install new pointers to the first and last insns in the chain.
2720*38fd1498Szrj    Also, set cur_insn_uid to one higher than the last in use.
2721*38fd1498Szrj    Used for an inline-procedure after copying the insn chain.  */
2722*38fd1498Szrj 
2723*38fd1498Szrj void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)2724*38fd1498Szrj set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
2725*38fd1498Szrj {
2726*38fd1498Szrj   rtx_insn *insn;
2727*38fd1498Szrj 
2728*38fd1498Szrj   set_first_insn (first);
2729*38fd1498Szrj   set_last_insn (last);
2730*38fd1498Szrj   cur_insn_uid = 0;
2731*38fd1498Szrj 
2732*38fd1498Szrj   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2733*38fd1498Szrj     {
2734*38fd1498Szrj       int debug_count = 0;
2735*38fd1498Szrj 
2736*38fd1498Szrj       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2737*38fd1498Szrj       cur_debug_insn_uid = 0;
2738*38fd1498Szrj 
2739*38fd1498Szrj       for (insn = first; insn; insn = NEXT_INSN (insn))
2740*38fd1498Szrj 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2741*38fd1498Szrj 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2742*38fd1498Szrj 	else
2743*38fd1498Szrj 	  {
2744*38fd1498Szrj 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2745*38fd1498Szrj 	    if (DEBUG_INSN_P (insn))
2746*38fd1498Szrj 	      debug_count++;
2747*38fd1498Szrj 	  }
2748*38fd1498Szrj 
2749*38fd1498Szrj       if (debug_count)
2750*38fd1498Szrj 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2751*38fd1498Szrj       else
2752*38fd1498Szrj 	cur_debug_insn_uid++;
2753*38fd1498Szrj     }
2754*38fd1498Szrj   else
2755*38fd1498Szrj     for (insn = first; insn; insn = NEXT_INSN (insn))
2756*38fd1498Szrj       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2757*38fd1498Szrj 
2758*38fd1498Szrj   cur_insn_uid++;
2759*38fd1498Szrj }
2760*38fd1498Szrj 
2761*38fd1498Szrj /* Go through all the RTL insn bodies and copy any invalid shared
2762*38fd1498Szrj    structure.  This routine should only be called once.  */
2763*38fd1498Szrj 
2764*38fd1498Szrj static void
unshare_all_rtl_1(rtx_insn * insn)2765*38fd1498Szrj unshare_all_rtl_1 (rtx_insn *insn)
2766*38fd1498Szrj {
2767*38fd1498Szrj   /* Unshare just about everything else.  */
2768*38fd1498Szrj   unshare_all_rtl_in_chain (insn);
2769*38fd1498Szrj 
2770*38fd1498Szrj   /* Make sure the addresses of stack slots found outside the insn chain
2771*38fd1498Szrj      (such as, in DECL_RTL of a variable) are not shared
2772*38fd1498Szrj      with the insn chain.
2773*38fd1498Szrj 
2774*38fd1498Szrj      This special care is necessary when the stack slot MEM does not
2775*38fd1498Szrj      actually appear in the insn chain.  If it does appear, its address
2776*38fd1498Szrj      is unshared from all else at that point.  */
2777*38fd1498Szrj   unsigned int i;
2778*38fd1498Szrj   rtx temp;
2779*38fd1498Szrj   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2780*38fd1498Szrj     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
2781*38fd1498Szrj }
2782*38fd1498Szrj 
2783*38fd1498Szrj /* Go through all the RTL insn bodies and copy any invalid shared
2784*38fd1498Szrj    structure, again.  This is a fairly expensive thing to do so it
2785*38fd1498Szrj    should be done sparingly.  */
2786*38fd1498Szrj 
2787*38fd1498Szrj void
unshare_all_rtl_again(rtx_insn * insn)2788*38fd1498Szrj unshare_all_rtl_again (rtx_insn *insn)
2789*38fd1498Szrj {
2790*38fd1498Szrj   rtx_insn *p;
2791*38fd1498Szrj   tree decl;
2792*38fd1498Szrj 
2793*38fd1498Szrj   for (p = insn; p; p = NEXT_INSN (p))
2794*38fd1498Szrj     if (INSN_P (p))
2795*38fd1498Szrj       {
2796*38fd1498Szrj 	reset_used_flags (PATTERN (p));
2797*38fd1498Szrj 	reset_used_flags (REG_NOTES (p));
2798*38fd1498Szrj 	if (CALL_P (p))
2799*38fd1498Szrj 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2800*38fd1498Szrj       }
2801*38fd1498Szrj 
2802*38fd1498Szrj   /* Make sure that virtual stack slots are not shared.  */
2803*38fd1498Szrj   set_used_decls (DECL_INITIAL (cfun->decl));
2804*38fd1498Szrj 
2805*38fd1498Szrj   /* Make sure that virtual parameters are not shared.  */
2806*38fd1498Szrj   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2807*38fd1498Szrj     set_used_flags (DECL_RTL (decl));
2808*38fd1498Szrj 
2809*38fd1498Szrj   rtx temp;
2810*38fd1498Szrj   unsigned int i;
2811*38fd1498Szrj   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
2812*38fd1498Szrj     reset_used_flags (temp);
2813*38fd1498Szrj 
2814*38fd1498Szrj   unshare_all_rtl_1 (insn);
2815*38fd1498Szrj }
2816*38fd1498Szrj 
2817*38fd1498Szrj unsigned int
unshare_all_rtl(void)2818*38fd1498Szrj unshare_all_rtl (void)
2819*38fd1498Szrj {
2820*38fd1498Szrj   unshare_all_rtl_1 (get_insns ());
2821*38fd1498Szrj 
2822*38fd1498Szrj   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2823*38fd1498Szrj     {
2824*38fd1498Szrj       if (DECL_RTL_SET_P (decl))
2825*38fd1498Szrj 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2826*38fd1498Szrj       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
2827*38fd1498Szrj     }
2828*38fd1498Szrj 
2829*38fd1498Szrj   return 0;
2830*38fd1498Szrj }
2831*38fd1498Szrj 
2832*38fd1498Szrj 
2833*38fd1498Szrj /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2834*38fd1498Szrj    Recursively does the same for subexpressions.  */
2835*38fd1498Szrj 
2836*38fd1498Szrj static void
verify_rtx_sharing(rtx orig,rtx insn)2837*38fd1498Szrj verify_rtx_sharing (rtx orig, rtx insn)
2838*38fd1498Szrj {
2839*38fd1498Szrj   rtx x = orig;
2840*38fd1498Szrj   int i;
2841*38fd1498Szrj   enum rtx_code code;
2842*38fd1498Szrj   const char *format_ptr;
2843*38fd1498Szrj 
2844*38fd1498Szrj   if (x == 0)
2845*38fd1498Szrj     return;
2846*38fd1498Szrj 
2847*38fd1498Szrj   code = GET_CODE (x);
2848*38fd1498Szrj 
2849*38fd1498Szrj   /* These types may be freely shared.  */
2850*38fd1498Szrj 
2851*38fd1498Szrj   switch (code)
2852*38fd1498Szrj     {
2853*38fd1498Szrj     case REG:
2854*38fd1498Szrj     case DEBUG_EXPR:
2855*38fd1498Szrj     case VALUE:
2856*38fd1498Szrj     CASE_CONST_ANY:
2857*38fd1498Szrj     case SYMBOL_REF:
2858*38fd1498Szrj     case LABEL_REF:
2859*38fd1498Szrj     case CODE_LABEL:
2860*38fd1498Szrj     case PC:
2861*38fd1498Szrj     case CC0:
2862*38fd1498Szrj     case RETURN:
2863*38fd1498Szrj     case SIMPLE_RETURN:
2864*38fd1498Szrj     case SCRATCH:
2865*38fd1498Szrj       /* SCRATCH must be shared because they represent distinct values.  */
2866*38fd1498Szrj       return;
2867*38fd1498Szrj     case CLOBBER:
2868*38fd1498Szrj       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2869*38fd1498Szrj          clobbers or clobbers of hard registers that originated as pseudos.
2870*38fd1498Szrj          This is needed to allow safe register renaming.  */
2871*38fd1498Szrj       if (REG_P (XEXP (x, 0))
2872*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
2873*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
2874*38fd1498Szrj 	return;
2875*38fd1498Szrj       break;
2876*38fd1498Szrj 
2877*38fd1498Szrj     case CONST:
2878*38fd1498Szrj       if (shared_const_p (orig))
2879*38fd1498Szrj 	return;
2880*38fd1498Szrj       break;
2881*38fd1498Szrj 
2882*38fd1498Szrj     case MEM:
2883*38fd1498Szrj       /* A MEM is allowed to be shared if its address is constant.  */
2884*38fd1498Szrj       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2885*38fd1498Szrj 	  || reload_completed || reload_in_progress)
2886*38fd1498Szrj 	return;
2887*38fd1498Szrj 
2888*38fd1498Szrj       break;
2889*38fd1498Szrj 
2890*38fd1498Szrj     default:
2891*38fd1498Szrj       break;
2892*38fd1498Szrj     }
2893*38fd1498Szrj 
2894*38fd1498Szrj   /* This rtx may not be shared.  If it has already been seen,
2895*38fd1498Szrj      replace it with a copy of itself.  */
2896*38fd1498Szrj   if (flag_checking && RTX_FLAG (x, used))
2897*38fd1498Szrj     {
2898*38fd1498Szrj       error ("invalid rtl sharing found in the insn");
2899*38fd1498Szrj       debug_rtx (insn);
2900*38fd1498Szrj       error ("shared rtx");
2901*38fd1498Szrj       debug_rtx (x);
2902*38fd1498Szrj       internal_error ("internal consistency failure");
2903*38fd1498Szrj     }
2904*38fd1498Szrj   gcc_assert (!RTX_FLAG (x, used));
2905*38fd1498Szrj 
2906*38fd1498Szrj   RTX_FLAG (x, used) = 1;
2907*38fd1498Szrj 
2908*38fd1498Szrj   /* Now scan the subexpressions recursively.  */
2909*38fd1498Szrj 
2910*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
2911*38fd1498Szrj 
2912*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2913*38fd1498Szrj     {
2914*38fd1498Szrj       switch (*format_ptr++)
2915*38fd1498Szrj 	{
2916*38fd1498Szrj 	case 'e':
2917*38fd1498Szrj 	  verify_rtx_sharing (XEXP (x, i), insn);
2918*38fd1498Szrj 	  break;
2919*38fd1498Szrj 
2920*38fd1498Szrj 	case 'E':
2921*38fd1498Szrj 	  if (XVEC (x, i) != NULL)
2922*38fd1498Szrj 	    {
2923*38fd1498Szrj 	      int j;
2924*38fd1498Szrj 	      int len = XVECLEN (x, i);
2925*38fd1498Szrj 
2926*38fd1498Szrj 	      for (j = 0; j < len; j++)
2927*38fd1498Szrj 		{
2928*38fd1498Szrj 		  /* We allow sharing of ASM_OPERANDS inside single
2929*38fd1498Szrj 		     instruction.  */
2930*38fd1498Szrj 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2931*38fd1498Szrj 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2932*38fd1498Szrj 			  == ASM_OPERANDS))
2933*38fd1498Szrj 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2934*38fd1498Szrj 		  else
2935*38fd1498Szrj 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2936*38fd1498Szrj 		}
2937*38fd1498Szrj 	    }
2938*38fd1498Szrj 	  break;
2939*38fd1498Szrj 	}
2940*38fd1498Szrj     }
2941*38fd1498Szrj   return;
2942*38fd1498Szrj }
2943*38fd1498Szrj 
2944*38fd1498Szrj /* Reset used-flags for INSN.  */
2945*38fd1498Szrj 
2946*38fd1498Szrj static void
reset_insn_used_flags(rtx insn)2947*38fd1498Szrj reset_insn_used_flags (rtx insn)
2948*38fd1498Szrj {
2949*38fd1498Szrj   gcc_assert (INSN_P (insn));
2950*38fd1498Szrj   reset_used_flags (PATTERN (insn));
2951*38fd1498Szrj   reset_used_flags (REG_NOTES (insn));
2952*38fd1498Szrj   if (CALL_P (insn))
2953*38fd1498Szrj     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2954*38fd1498Szrj }
2955*38fd1498Szrj 
2956*38fd1498Szrj /* Go through all the RTL insn bodies and clear all the USED bits.  */
2957*38fd1498Szrj 
2958*38fd1498Szrj static void
reset_all_used_flags(void)2959*38fd1498Szrj reset_all_used_flags (void)
2960*38fd1498Szrj {
2961*38fd1498Szrj   rtx_insn *p;
2962*38fd1498Szrj 
2963*38fd1498Szrj   for (p = get_insns (); p; p = NEXT_INSN (p))
2964*38fd1498Szrj     if (INSN_P (p))
2965*38fd1498Szrj       {
2966*38fd1498Szrj 	rtx pat = PATTERN (p);
2967*38fd1498Szrj 	if (GET_CODE (pat) != SEQUENCE)
2968*38fd1498Szrj 	  reset_insn_used_flags (p);
2969*38fd1498Szrj 	else
2970*38fd1498Szrj 	  {
2971*38fd1498Szrj 	    gcc_assert (REG_NOTES (p) == NULL);
2972*38fd1498Szrj 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
2973*38fd1498Szrj 	      {
2974*38fd1498Szrj 		rtx insn = XVECEXP (pat, 0, i);
2975*38fd1498Szrj 		if (INSN_P (insn))
2976*38fd1498Szrj 		  reset_insn_used_flags (insn);
2977*38fd1498Szrj 	      }
2978*38fd1498Szrj 	  }
2979*38fd1498Szrj       }
2980*38fd1498Szrj }
2981*38fd1498Szrj 
2982*38fd1498Szrj /* Verify sharing in INSN.  */
2983*38fd1498Szrj 
2984*38fd1498Szrj static void
verify_insn_sharing(rtx insn)2985*38fd1498Szrj verify_insn_sharing (rtx insn)
2986*38fd1498Szrj {
2987*38fd1498Szrj   gcc_assert (INSN_P (insn));
2988*38fd1498Szrj   verify_rtx_sharing (PATTERN (insn), insn);
2989*38fd1498Szrj   verify_rtx_sharing (REG_NOTES (insn), insn);
2990*38fd1498Szrj   if (CALL_P (insn))
2991*38fd1498Szrj     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
2992*38fd1498Szrj }
2993*38fd1498Szrj 
2994*38fd1498Szrj /* Go through all the RTL insn bodies and check that there is no unexpected
2995*38fd1498Szrj    sharing in between the subexpressions.  */
2996*38fd1498Szrj 
2997*38fd1498Szrj DEBUG_FUNCTION void
verify_rtl_sharing(void)2998*38fd1498Szrj verify_rtl_sharing (void)
2999*38fd1498Szrj {
3000*38fd1498Szrj   rtx_insn *p;
3001*38fd1498Szrj 
3002*38fd1498Szrj   timevar_push (TV_VERIFY_RTL_SHARING);
3003*38fd1498Szrj 
3004*38fd1498Szrj   reset_all_used_flags ();
3005*38fd1498Szrj 
3006*38fd1498Szrj   for (p = get_insns (); p; p = NEXT_INSN (p))
3007*38fd1498Szrj     if (INSN_P (p))
3008*38fd1498Szrj       {
3009*38fd1498Szrj 	rtx pat = PATTERN (p);
3010*38fd1498Szrj 	if (GET_CODE (pat) != SEQUENCE)
3011*38fd1498Szrj 	  verify_insn_sharing (p);
3012*38fd1498Szrj 	else
3013*38fd1498Szrj 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
3014*38fd1498Szrj 	      {
3015*38fd1498Szrj 		rtx insn = XVECEXP (pat, 0, i);
3016*38fd1498Szrj 		if (INSN_P (insn))
3017*38fd1498Szrj 		  verify_insn_sharing (insn);
3018*38fd1498Szrj 	      }
3019*38fd1498Szrj       }
3020*38fd1498Szrj 
3021*38fd1498Szrj   reset_all_used_flags ();
3022*38fd1498Szrj 
3023*38fd1498Szrj   timevar_pop (TV_VERIFY_RTL_SHARING);
3024*38fd1498Szrj }
3025*38fd1498Szrj 
3026*38fd1498Szrj /* Go through all the RTL insn bodies and copy any invalid shared structure.
3027*38fd1498Szrj    Assumes the mark bits are cleared at entry.  */
3028*38fd1498Szrj 
3029*38fd1498Szrj void
unshare_all_rtl_in_chain(rtx_insn * insn)3030*38fd1498Szrj unshare_all_rtl_in_chain (rtx_insn *insn)
3031*38fd1498Szrj {
3032*38fd1498Szrj   for (; insn; insn = NEXT_INSN (insn))
3033*38fd1498Szrj     if (INSN_P (insn))
3034*38fd1498Szrj       {
3035*38fd1498Szrj 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
3036*38fd1498Szrj 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
3037*38fd1498Szrj 	if (CALL_P (insn))
3038*38fd1498Szrj 	  CALL_INSN_FUNCTION_USAGE (insn)
3039*38fd1498Szrj 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
3040*38fd1498Szrj       }
3041*38fd1498Szrj }
3042*38fd1498Szrj 
3043*38fd1498Szrj /* Go through all virtual stack slots of a function and mark them as
3044*38fd1498Szrj    shared.  We never replace the DECL_RTLs themselves with a copy,
3045*38fd1498Szrj    but expressions mentioned into a DECL_RTL cannot be shared with
3046*38fd1498Szrj    expressions in the instruction stream.
3047*38fd1498Szrj 
3048*38fd1498Szrj    Note that reload may convert pseudo registers into memories in-place.
3049*38fd1498Szrj    Pseudo registers are always shared, but MEMs never are.  Thus if we
3050*38fd1498Szrj    reset the used flags on MEMs in the instruction stream, we must set
3051*38fd1498Szrj    them again on MEMs that appear in DECL_RTLs.  */
3052*38fd1498Szrj 
3053*38fd1498Szrj static void
set_used_decls(tree blk)3054*38fd1498Szrj set_used_decls (tree blk)
3055*38fd1498Szrj {
3056*38fd1498Szrj   tree t;
3057*38fd1498Szrj 
3058*38fd1498Szrj   /* Mark decls.  */
3059*38fd1498Szrj   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
3060*38fd1498Szrj     if (DECL_RTL_SET_P (t))
3061*38fd1498Szrj       set_used_flags (DECL_RTL (t));
3062*38fd1498Szrj 
3063*38fd1498Szrj   /* Now process sub-blocks.  */
3064*38fd1498Szrj   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
3065*38fd1498Szrj     set_used_decls (t);
3066*38fd1498Szrj }
3067*38fd1498Szrj 
3068*38fd1498Szrj /* Mark ORIG as in use, and return a copy of it if it was already in use.
3069*38fd1498Szrj    Recursively does the same for subexpressions.  Uses
3070*38fd1498Szrj    copy_rtx_if_shared_1 to reduce stack space.  */
3071*38fd1498Szrj 
3072*38fd1498Szrj rtx
copy_rtx_if_shared(rtx orig)3073*38fd1498Szrj copy_rtx_if_shared (rtx orig)
3074*38fd1498Szrj {
3075*38fd1498Szrj   copy_rtx_if_shared_1 (&orig);
3076*38fd1498Szrj   return orig;
3077*38fd1498Szrj }
3078*38fd1498Szrj 
3079*38fd1498Szrj /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3080*38fd1498Szrj    use.  Recursively does the same for subexpressions.  */
3081*38fd1498Szrj 
3082*38fd1498Szrj static void
copy_rtx_if_shared_1(rtx * orig1)3083*38fd1498Szrj copy_rtx_if_shared_1 (rtx *orig1)
3084*38fd1498Szrj {
3085*38fd1498Szrj   rtx x;
3086*38fd1498Szrj   int i;
3087*38fd1498Szrj   enum rtx_code code;
3088*38fd1498Szrj   rtx *last_ptr;
3089*38fd1498Szrj   const char *format_ptr;
3090*38fd1498Szrj   int copied = 0;
3091*38fd1498Szrj   int length;
3092*38fd1498Szrj 
3093*38fd1498Szrj   /* Repeat is used to turn tail-recursion into iteration.  */
3094*38fd1498Szrj repeat:
3095*38fd1498Szrj   x = *orig1;
3096*38fd1498Szrj 
3097*38fd1498Szrj   if (x == 0)
3098*38fd1498Szrj     return;
3099*38fd1498Szrj 
3100*38fd1498Szrj   code = GET_CODE (x);
3101*38fd1498Szrj 
3102*38fd1498Szrj   /* These types may be freely shared.  */
3103*38fd1498Szrj 
3104*38fd1498Szrj   switch (code)
3105*38fd1498Szrj     {
3106*38fd1498Szrj     case REG:
3107*38fd1498Szrj     case DEBUG_EXPR:
3108*38fd1498Szrj     case VALUE:
3109*38fd1498Szrj     CASE_CONST_ANY:
3110*38fd1498Szrj     case SYMBOL_REF:
3111*38fd1498Szrj     case LABEL_REF:
3112*38fd1498Szrj     case CODE_LABEL:
3113*38fd1498Szrj     case PC:
3114*38fd1498Szrj     case CC0:
3115*38fd1498Szrj     case RETURN:
3116*38fd1498Szrj     case SIMPLE_RETURN:
3117*38fd1498Szrj     case SCRATCH:
3118*38fd1498Szrj       /* SCRATCH must be shared because they represent distinct values.  */
3119*38fd1498Szrj       return;
3120*38fd1498Szrj     case CLOBBER:
3121*38fd1498Szrj       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3122*38fd1498Szrj          clobbers or clobbers of hard registers that originated as pseudos.
3123*38fd1498Szrj          This is needed to allow safe register renaming.  */
3124*38fd1498Szrj       if (REG_P (XEXP (x, 0))
3125*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
3126*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
3127*38fd1498Szrj 	return;
3128*38fd1498Szrj       break;
3129*38fd1498Szrj 
3130*38fd1498Szrj     case CONST:
3131*38fd1498Szrj       if (shared_const_p (x))
3132*38fd1498Szrj 	return;
3133*38fd1498Szrj       break;
3134*38fd1498Szrj 
3135*38fd1498Szrj     case DEBUG_INSN:
3136*38fd1498Szrj     case INSN:
3137*38fd1498Szrj     case JUMP_INSN:
3138*38fd1498Szrj     case CALL_INSN:
3139*38fd1498Szrj     case NOTE:
3140*38fd1498Szrj     case BARRIER:
3141*38fd1498Szrj       /* The chain of insns is not being copied.  */
3142*38fd1498Szrj       return;
3143*38fd1498Szrj 
3144*38fd1498Szrj     default:
3145*38fd1498Szrj       break;
3146*38fd1498Szrj     }
3147*38fd1498Szrj 
3148*38fd1498Szrj   /* This rtx may not be shared.  If it has already been seen,
3149*38fd1498Szrj      replace it with a copy of itself.  */
3150*38fd1498Szrj 
3151*38fd1498Szrj   if (RTX_FLAG (x, used))
3152*38fd1498Szrj     {
3153*38fd1498Szrj       x = shallow_copy_rtx (x);
3154*38fd1498Szrj       copied = 1;
3155*38fd1498Szrj     }
3156*38fd1498Szrj   RTX_FLAG (x, used) = 1;
3157*38fd1498Szrj 
3158*38fd1498Szrj   /* Now scan the subexpressions recursively.
3159*38fd1498Szrj      We can store any replaced subexpressions directly into X
3160*38fd1498Szrj      since we know X is not shared!  Any vectors in X
3161*38fd1498Szrj      must be copied if X was copied.  */
3162*38fd1498Szrj 
3163*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
3164*38fd1498Szrj   length = GET_RTX_LENGTH (code);
3165*38fd1498Szrj   last_ptr = NULL;
3166*38fd1498Szrj 
3167*38fd1498Szrj   for (i = 0; i < length; i++)
3168*38fd1498Szrj     {
3169*38fd1498Szrj       switch (*format_ptr++)
3170*38fd1498Szrj 	{
3171*38fd1498Szrj 	case 'e':
3172*38fd1498Szrj           if (last_ptr)
3173*38fd1498Szrj             copy_rtx_if_shared_1 (last_ptr);
3174*38fd1498Szrj 	  last_ptr = &XEXP (x, i);
3175*38fd1498Szrj 	  break;
3176*38fd1498Szrj 
3177*38fd1498Szrj 	case 'E':
3178*38fd1498Szrj 	  if (XVEC (x, i) != NULL)
3179*38fd1498Szrj 	    {
3180*38fd1498Szrj 	      int j;
3181*38fd1498Szrj 	      int len = XVECLEN (x, i);
3182*38fd1498Szrj 
3183*38fd1498Szrj               /* Copy the vector iff I copied the rtx and the length
3184*38fd1498Szrj 		 is nonzero.  */
3185*38fd1498Szrj 	      if (copied && len > 0)
3186*38fd1498Szrj 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
3187*38fd1498Szrj 
3188*38fd1498Szrj               /* Call recursively on all inside the vector.  */
3189*38fd1498Szrj 	      for (j = 0; j < len; j++)
3190*38fd1498Szrj                 {
3191*38fd1498Szrj 		  if (last_ptr)
3192*38fd1498Szrj 		    copy_rtx_if_shared_1 (last_ptr);
3193*38fd1498Szrj                   last_ptr = &XVECEXP (x, i, j);
3194*38fd1498Szrj                 }
3195*38fd1498Szrj 	    }
3196*38fd1498Szrj 	  break;
3197*38fd1498Szrj 	}
3198*38fd1498Szrj     }
3199*38fd1498Szrj   *orig1 = x;
3200*38fd1498Szrj   if (last_ptr)
3201*38fd1498Szrj     {
3202*38fd1498Szrj       orig1 = last_ptr;
3203*38fd1498Szrj       goto repeat;
3204*38fd1498Szrj     }
3205*38fd1498Szrj   return;
3206*38fd1498Szrj }
3207*38fd1498Szrj 
3208*38fd1498Szrj /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
3209*38fd1498Szrj 
3210*38fd1498Szrj static void
mark_used_flags(rtx x,int flag)3211*38fd1498Szrj mark_used_flags (rtx x, int flag)
3212*38fd1498Szrj {
3213*38fd1498Szrj   int i, j;
3214*38fd1498Szrj   enum rtx_code code;
3215*38fd1498Szrj   const char *format_ptr;
3216*38fd1498Szrj   int length;
3217*38fd1498Szrj 
3218*38fd1498Szrj   /* Repeat is used to turn tail-recursion into iteration.  */
3219*38fd1498Szrj repeat:
3220*38fd1498Szrj   if (x == 0)
3221*38fd1498Szrj     return;
3222*38fd1498Szrj 
3223*38fd1498Szrj   code = GET_CODE (x);
3224*38fd1498Szrj 
3225*38fd1498Szrj   /* These types may be freely shared so we needn't do any resetting
3226*38fd1498Szrj      for them.  */
3227*38fd1498Szrj 
3228*38fd1498Szrj   switch (code)
3229*38fd1498Szrj     {
3230*38fd1498Szrj     case REG:
3231*38fd1498Szrj     case DEBUG_EXPR:
3232*38fd1498Szrj     case VALUE:
3233*38fd1498Szrj     CASE_CONST_ANY:
3234*38fd1498Szrj     case SYMBOL_REF:
3235*38fd1498Szrj     case CODE_LABEL:
3236*38fd1498Szrj     case PC:
3237*38fd1498Szrj     case CC0:
3238*38fd1498Szrj     case RETURN:
3239*38fd1498Szrj     case SIMPLE_RETURN:
3240*38fd1498Szrj       return;
3241*38fd1498Szrj 
3242*38fd1498Szrj     case DEBUG_INSN:
3243*38fd1498Szrj     case INSN:
3244*38fd1498Szrj     case JUMP_INSN:
3245*38fd1498Szrj     case CALL_INSN:
3246*38fd1498Szrj     case NOTE:
3247*38fd1498Szrj     case LABEL_REF:
3248*38fd1498Szrj     case BARRIER:
3249*38fd1498Szrj       /* The chain of insns is not being copied.  */
3250*38fd1498Szrj       return;
3251*38fd1498Szrj 
3252*38fd1498Szrj     default:
3253*38fd1498Szrj       break;
3254*38fd1498Szrj     }
3255*38fd1498Szrj 
3256*38fd1498Szrj   RTX_FLAG (x, used) = flag;
3257*38fd1498Szrj 
3258*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (code);
3259*38fd1498Szrj   length = GET_RTX_LENGTH (code);
3260*38fd1498Szrj 
3261*38fd1498Szrj   for (i = 0; i < length; i++)
3262*38fd1498Szrj     {
3263*38fd1498Szrj       switch (*format_ptr++)
3264*38fd1498Szrj 	{
3265*38fd1498Szrj 	case 'e':
3266*38fd1498Szrj           if (i == length-1)
3267*38fd1498Szrj             {
3268*38fd1498Szrj               x = XEXP (x, i);
3269*38fd1498Szrj 	      goto repeat;
3270*38fd1498Szrj             }
3271*38fd1498Szrj 	  mark_used_flags (XEXP (x, i), flag);
3272*38fd1498Szrj 	  break;
3273*38fd1498Szrj 
3274*38fd1498Szrj 	case 'E':
3275*38fd1498Szrj 	  for (j = 0; j < XVECLEN (x, i); j++)
3276*38fd1498Szrj 	    mark_used_flags (XVECEXP (x, i, j), flag);
3277*38fd1498Szrj 	  break;
3278*38fd1498Szrj 	}
3279*38fd1498Szrj     }
3280*38fd1498Szrj }
3281*38fd1498Szrj 
3282*38fd1498Szrj /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3283*38fd1498Szrj    to look for shared sub-parts.  */
3284*38fd1498Szrj 
3285*38fd1498Szrj void
reset_used_flags(rtx x)3286*38fd1498Szrj reset_used_flags (rtx x)
3287*38fd1498Szrj {
3288*38fd1498Szrj   mark_used_flags (x, 0);
3289*38fd1498Szrj }
3290*38fd1498Szrj 
3291*38fd1498Szrj /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3292*38fd1498Szrj    to look for shared sub-parts.  */
3293*38fd1498Szrj 
3294*38fd1498Szrj void
set_used_flags(rtx x)3295*38fd1498Szrj set_used_flags (rtx x)
3296*38fd1498Szrj {
3297*38fd1498Szrj   mark_used_flags (x, 1);
3298*38fd1498Szrj }
3299*38fd1498Szrj 
3300*38fd1498Szrj /* Copy X if necessary so that it won't be altered by changes in OTHER.
3301*38fd1498Szrj    Return X or the rtx for the pseudo reg the value of X was copied into.
3302*38fd1498Szrj    OTHER must be valid as a SET_DEST.  */
3303*38fd1498Szrj 
3304*38fd1498Szrj rtx
make_safe_from(rtx x,rtx other)3305*38fd1498Szrj make_safe_from (rtx x, rtx other)
3306*38fd1498Szrj {
3307*38fd1498Szrj   while (1)
3308*38fd1498Szrj     switch (GET_CODE (other))
3309*38fd1498Szrj       {
3310*38fd1498Szrj       case SUBREG:
3311*38fd1498Szrj 	other = SUBREG_REG (other);
3312*38fd1498Szrj 	break;
3313*38fd1498Szrj       case STRICT_LOW_PART:
3314*38fd1498Szrj       case SIGN_EXTEND:
3315*38fd1498Szrj       case ZERO_EXTEND:
3316*38fd1498Szrj 	other = XEXP (other, 0);
3317*38fd1498Szrj 	break;
3318*38fd1498Szrj       default:
3319*38fd1498Szrj 	goto done;
3320*38fd1498Szrj       }
3321*38fd1498Szrj  done:
3322*38fd1498Szrj   if ((MEM_P (other)
3323*38fd1498Szrj        && ! CONSTANT_P (x)
3324*38fd1498Szrj        && !REG_P (x)
3325*38fd1498Szrj        && GET_CODE (x) != SUBREG)
3326*38fd1498Szrj       || (REG_P (other)
3327*38fd1498Szrj 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
3328*38fd1498Szrj 	      || reg_mentioned_p (other, x))))
3329*38fd1498Szrj     {
3330*38fd1498Szrj       rtx temp = gen_reg_rtx (GET_MODE (x));
3331*38fd1498Szrj       emit_move_insn (temp, x);
3332*38fd1498Szrj       return temp;
3333*38fd1498Szrj     }
3334*38fd1498Szrj   return x;
3335*38fd1498Szrj }
3336*38fd1498Szrj 
3337*38fd1498Szrj /* Emission of insns (adding them to the doubly-linked list).  */
3338*38fd1498Szrj 
3339*38fd1498Szrj /* Return the last insn emitted, even if it is in a sequence now pushed.  */
3340*38fd1498Szrj 
3341*38fd1498Szrj rtx_insn *
get_last_insn_anywhere(void)3342*38fd1498Szrj get_last_insn_anywhere (void)
3343*38fd1498Szrj {
3344*38fd1498Szrj   struct sequence_stack *seq;
3345*38fd1498Szrj   for (seq = get_current_sequence (); seq; seq = seq->next)
3346*38fd1498Szrj     if (seq->last != 0)
3347*38fd1498Szrj       return seq->last;
3348*38fd1498Szrj   return 0;
3349*38fd1498Szrj }
3350*38fd1498Szrj 
3351*38fd1498Szrj /* Return the first nonnote insn emitted in current sequence or current
3352*38fd1498Szrj    function.  This routine looks inside SEQUENCEs.  */
3353*38fd1498Szrj 
3354*38fd1498Szrj rtx_insn *
get_first_nonnote_insn(void)3355*38fd1498Szrj get_first_nonnote_insn (void)
3356*38fd1498Szrj {
3357*38fd1498Szrj   rtx_insn *insn = get_insns ();
3358*38fd1498Szrj 
3359*38fd1498Szrj   if (insn)
3360*38fd1498Szrj     {
3361*38fd1498Szrj       if (NOTE_P (insn))
3362*38fd1498Szrj 	for (insn = next_insn (insn);
3363*38fd1498Szrj 	     insn && NOTE_P (insn);
3364*38fd1498Szrj 	     insn = next_insn (insn))
3365*38fd1498Szrj 	  continue;
3366*38fd1498Szrj       else
3367*38fd1498Szrj 	{
3368*38fd1498Szrj 	  if (NONJUMP_INSN_P (insn)
3369*38fd1498Szrj 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3370*38fd1498Szrj 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3371*38fd1498Szrj 	}
3372*38fd1498Szrj     }
3373*38fd1498Szrj 
3374*38fd1498Szrj   return insn;
3375*38fd1498Szrj }
3376*38fd1498Szrj 
3377*38fd1498Szrj /* Return the last nonnote insn emitted in current sequence or current
3378*38fd1498Szrj    function.  This routine looks inside SEQUENCEs.  */
3379*38fd1498Szrj 
3380*38fd1498Szrj rtx_insn *
get_last_nonnote_insn(void)3381*38fd1498Szrj get_last_nonnote_insn (void)
3382*38fd1498Szrj {
3383*38fd1498Szrj   rtx_insn *insn = get_last_insn ();
3384*38fd1498Szrj 
3385*38fd1498Szrj   if (insn)
3386*38fd1498Szrj     {
3387*38fd1498Szrj       if (NOTE_P (insn))
3388*38fd1498Szrj 	for (insn = previous_insn (insn);
3389*38fd1498Szrj 	     insn && NOTE_P (insn);
3390*38fd1498Szrj 	     insn = previous_insn (insn))
3391*38fd1498Szrj 	  continue;
3392*38fd1498Szrj       else
3393*38fd1498Szrj 	{
3394*38fd1498Szrj 	  if (NONJUMP_INSN_P (insn))
3395*38fd1498Szrj 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3396*38fd1498Szrj 	      insn = seq->insn (seq->len () - 1);
3397*38fd1498Szrj 	}
3398*38fd1498Szrj     }
3399*38fd1498Szrj 
3400*38fd1498Szrj   return insn;
3401*38fd1498Szrj }
3402*38fd1498Szrj 
3403*38fd1498Szrj /* Return the number of actual (non-debug) insns emitted in this
3404*38fd1498Szrj    function.  */
3405*38fd1498Szrj 
3406*38fd1498Szrj int
get_max_insn_count(void)3407*38fd1498Szrj get_max_insn_count (void)
3408*38fd1498Szrj {
3409*38fd1498Szrj   int n = cur_insn_uid;
3410*38fd1498Szrj 
3411*38fd1498Szrj   /* The table size must be stable across -g, to avoid codegen
3412*38fd1498Szrj      differences due to debug insns, and not be affected by
3413*38fd1498Szrj      -fmin-insn-uid, to avoid excessive table size and to simplify
3414*38fd1498Szrj      debugging of -fcompare-debug failures.  */
3415*38fd1498Szrj   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3416*38fd1498Szrj     n -= cur_debug_insn_uid;
3417*38fd1498Szrj   else
3418*38fd1498Szrj     n -= MIN_NONDEBUG_INSN_UID;
3419*38fd1498Szrj 
3420*38fd1498Szrj   return n;
3421*38fd1498Szrj }
3422*38fd1498Szrj 
3423*38fd1498Szrj 
3424*38fd1498Szrj /* Return the next insn.  If it is a SEQUENCE, return the first insn
3425*38fd1498Szrj    of the sequence.  */
3426*38fd1498Szrj 
3427*38fd1498Szrj rtx_insn *
next_insn(rtx_insn * insn)3428*38fd1498Szrj next_insn (rtx_insn *insn)
3429*38fd1498Szrj {
3430*38fd1498Szrj   if (insn)
3431*38fd1498Szrj     {
3432*38fd1498Szrj       insn = NEXT_INSN (insn);
3433*38fd1498Szrj       if (insn && NONJUMP_INSN_P (insn)
3434*38fd1498Szrj 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3435*38fd1498Szrj 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3436*38fd1498Szrj     }
3437*38fd1498Szrj 
3438*38fd1498Szrj   return insn;
3439*38fd1498Szrj }
3440*38fd1498Szrj 
3441*38fd1498Szrj /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3442*38fd1498Szrj    of the sequence.  */
3443*38fd1498Szrj 
3444*38fd1498Szrj rtx_insn *
previous_insn(rtx_insn * insn)3445*38fd1498Szrj previous_insn (rtx_insn *insn)
3446*38fd1498Szrj {
3447*38fd1498Szrj   if (insn)
3448*38fd1498Szrj     {
3449*38fd1498Szrj       insn = PREV_INSN (insn);
3450*38fd1498Szrj       if (insn && NONJUMP_INSN_P (insn))
3451*38fd1498Szrj 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
3452*38fd1498Szrj 	  insn = seq->insn (seq->len () - 1);
3453*38fd1498Szrj     }
3454*38fd1498Szrj 
3455*38fd1498Szrj   return insn;
3456*38fd1498Szrj }
3457*38fd1498Szrj 
3458*38fd1498Szrj /* Return the next insn after INSN that is not a NOTE.  This routine does not
3459*38fd1498Szrj    look inside SEQUENCEs.  */
3460*38fd1498Szrj 
3461*38fd1498Szrj rtx_insn *
next_nonnote_insn(rtx_insn * insn)3462*38fd1498Szrj next_nonnote_insn (rtx_insn *insn)
3463*38fd1498Szrj {
3464*38fd1498Szrj   while (insn)
3465*38fd1498Szrj     {
3466*38fd1498Szrj       insn = NEXT_INSN (insn);
3467*38fd1498Szrj       if (insn == 0 || !NOTE_P (insn))
3468*38fd1498Szrj 	break;
3469*38fd1498Szrj     }
3470*38fd1498Szrj 
3471*38fd1498Szrj   return insn;
3472*38fd1498Szrj }
3473*38fd1498Szrj 
3474*38fd1498Szrj /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3475*38fd1498Szrj    routine does not look inside SEQUENCEs.  */
3476*38fd1498Szrj 
3477*38fd1498Szrj rtx_insn *
next_nondebug_insn(rtx_insn * insn)3478*38fd1498Szrj next_nondebug_insn (rtx_insn *insn)
3479*38fd1498Szrj {
3480*38fd1498Szrj   while (insn)
3481*38fd1498Szrj     {
3482*38fd1498Szrj       insn = NEXT_INSN (insn);
3483*38fd1498Szrj       if (insn == 0 || !DEBUG_INSN_P (insn))
3484*38fd1498Szrj 	break;
3485*38fd1498Szrj     }
3486*38fd1498Szrj 
3487*38fd1498Szrj   return insn;
3488*38fd1498Szrj }
3489*38fd1498Szrj 
3490*38fd1498Szrj /* Return the previous insn before INSN that is not a NOTE.  This routine does
3491*38fd1498Szrj    not look inside SEQUENCEs.  */
3492*38fd1498Szrj 
3493*38fd1498Szrj rtx_insn *
prev_nonnote_insn(rtx_insn * insn)3494*38fd1498Szrj prev_nonnote_insn (rtx_insn *insn)
3495*38fd1498Szrj {
3496*38fd1498Szrj   while (insn)
3497*38fd1498Szrj     {
3498*38fd1498Szrj       insn = PREV_INSN (insn);
3499*38fd1498Szrj       if (insn == 0 || !NOTE_P (insn))
3500*38fd1498Szrj 	break;
3501*38fd1498Szrj     }
3502*38fd1498Szrj 
3503*38fd1498Szrj   return insn;
3504*38fd1498Szrj }
3505*38fd1498Szrj 
3506*38fd1498Szrj /* Return the previous insn before INSN that is not a DEBUG_INSN.
3507*38fd1498Szrj    This routine does not look inside SEQUENCEs.  */
3508*38fd1498Szrj 
3509*38fd1498Szrj rtx_insn *
prev_nondebug_insn(rtx_insn * insn)3510*38fd1498Szrj prev_nondebug_insn (rtx_insn *insn)
3511*38fd1498Szrj {
3512*38fd1498Szrj   while (insn)
3513*38fd1498Szrj     {
3514*38fd1498Szrj       insn = PREV_INSN (insn);
3515*38fd1498Szrj       if (insn == 0 || !DEBUG_INSN_P (insn))
3516*38fd1498Szrj 	break;
3517*38fd1498Szrj     }
3518*38fd1498Szrj 
3519*38fd1498Szrj   return insn;
3520*38fd1498Szrj }
3521*38fd1498Szrj 
3522*38fd1498Szrj /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3523*38fd1498Szrj    This routine does not look inside SEQUENCEs.  */
3524*38fd1498Szrj 
3525*38fd1498Szrj rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)3526*38fd1498Szrj next_nonnote_nondebug_insn (rtx_insn *insn)
3527*38fd1498Szrj {
3528*38fd1498Szrj   while (insn)
3529*38fd1498Szrj     {
3530*38fd1498Szrj       insn = NEXT_INSN (insn);
3531*38fd1498Szrj       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3532*38fd1498Szrj 	break;
3533*38fd1498Szrj     }
3534*38fd1498Szrj 
3535*38fd1498Szrj   return insn;
3536*38fd1498Szrj }
3537*38fd1498Szrj 
3538*38fd1498Szrj /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3539*38fd1498Szrj    but stop the search before we enter another basic block.  This
3540*38fd1498Szrj    routine does not look inside SEQUENCEs.  */
3541*38fd1498Szrj 
3542*38fd1498Szrj rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3543*38fd1498Szrj next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544*38fd1498Szrj {
3545*38fd1498Szrj   while (insn)
3546*38fd1498Szrj     {
3547*38fd1498Szrj       insn = NEXT_INSN (insn);
3548*38fd1498Szrj       if (insn == 0)
3549*38fd1498Szrj 	break;
3550*38fd1498Szrj       if (DEBUG_INSN_P (insn))
3551*38fd1498Szrj 	continue;
3552*38fd1498Szrj       if (!NOTE_P (insn))
3553*38fd1498Szrj 	break;
3554*38fd1498Szrj       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3555*38fd1498Szrj 	return NULL;
3556*38fd1498Szrj     }
3557*38fd1498Szrj 
3558*38fd1498Szrj   return insn;
3559*38fd1498Szrj }
3560*38fd1498Szrj 
3561*38fd1498Szrj /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3562*38fd1498Szrj    This routine does not look inside SEQUENCEs.  */
3563*38fd1498Szrj 
3564*38fd1498Szrj rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)3565*38fd1498Szrj prev_nonnote_nondebug_insn (rtx_insn *insn)
3566*38fd1498Szrj {
3567*38fd1498Szrj   while (insn)
3568*38fd1498Szrj     {
3569*38fd1498Szrj       insn = PREV_INSN (insn);
3570*38fd1498Szrj       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3571*38fd1498Szrj 	break;
3572*38fd1498Szrj     }
3573*38fd1498Szrj 
3574*38fd1498Szrj   return insn;
3575*38fd1498Szrj }
3576*38fd1498Szrj 
3577*38fd1498Szrj /* Return the previous insn before INSN that is not a NOTE nor
3578*38fd1498Szrj    DEBUG_INSN, but stop the search before we enter another basic
3579*38fd1498Szrj    block.  This routine does not look inside SEQUENCEs.  */
3580*38fd1498Szrj 
3581*38fd1498Szrj rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3582*38fd1498Szrj prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583*38fd1498Szrj {
3584*38fd1498Szrj   while (insn)
3585*38fd1498Szrj     {
3586*38fd1498Szrj       insn = PREV_INSN (insn);
3587*38fd1498Szrj       if (insn == 0)
3588*38fd1498Szrj 	break;
3589*38fd1498Szrj       if (DEBUG_INSN_P (insn))
3590*38fd1498Szrj 	continue;
3591*38fd1498Szrj       if (!NOTE_P (insn))
3592*38fd1498Szrj 	break;
3593*38fd1498Szrj       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3594*38fd1498Szrj 	return NULL;
3595*38fd1498Szrj     }
3596*38fd1498Szrj 
3597*38fd1498Szrj   return insn;
3598*38fd1498Szrj }
3599*38fd1498Szrj 
3600*38fd1498Szrj /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3601*38fd1498Szrj    or 0, if there is none.  This routine does not look inside
3602*38fd1498Szrj    SEQUENCEs.  */
3603*38fd1498Szrj 
3604*38fd1498Szrj rtx_insn *
next_real_insn(rtx uncast_insn)3605*38fd1498Szrj next_real_insn (rtx uncast_insn)
3606*38fd1498Szrj {
3607*38fd1498Szrj   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3608*38fd1498Szrj 
3609*38fd1498Szrj   while (insn)
3610*38fd1498Szrj     {
3611*38fd1498Szrj       insn = NEXT_INSN (insn);
3612*38fd1498Szrj       if (insn == 0 || INSN_P (insn))
3613*38fd1498Szrj 	break;
3614*38fd1498Szrj     }
3615*38fd1498Szrj 
3616*38fd1498Szrj   return insn;
3617*38fd1498Szrj }
3618*38fd1498Szrj 
3619*38fd1498Szrj /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3620*38fd1498Szrj    or 0, if there is none.  This routine does not look inside
3621*38fd1498Szrj    SEQUENCEs.  */
3622*38fd1498Szrj 
3623*38fd1498Szrj rtx_insn *
prev_real_insn(rtx_insn * insn)3624*38fd1498Szrj prev_real_insn (rtx_insn *insn)
3625*38fd1498Szrj {
3626*38fd1498Szrj   while (insn)
3627*38fd1498Szrj     {
3628*38fd1498Szrj       insn = PREV_INSN (insn);
3629*38fd1498Szrj       if (insn == 0 || INSN_P (insn))
3630*38fd1498Szrj 	break;
3631*38fd1498Szrj     }
3632*38fd1498Szrj 
3633*38fd1498Szrj   return insn;
3634*38fd1498Szrj }
3635*38fd1498Szrj 
3636*38fd1498Szrj /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3637*38fd1498Szrj    or 0, if there is none.  This routine does not look inside
3638*38fd1498Szrj    SEQUENCEs.  */
3639*38fd1498Szrj 
3640*38fd1498Szrj rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3641*38fd1498Szrj next_real_nondebug_insn (rtx uncast_insn)
3642*38fd1498Szrj {
3643*38fd1498Szrj   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3644*38fd1498Szrj 
3645*38fd1498Szrj   while (insn)
3646*38fd1498Szrj     {
3647*38fd1498Szrj       insn = NEXT_INSN (insn);
3648*38fd1498Szrj       if (insn == 0 || NONDEBUG_INSN_P (insn))
3649*38fd1498Szrj 	break;
3650*38fd1498Szrj     }
3651*38fd1498Szrj 
3652*38fd1498Szrj   return insn;
3653*38fd1498Szrj }
3654*38fd1498Szrj 
3655*38fd1498Szrj /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3656*38fd1498Szrj    or 0, if there is none.  This routine does not look inside
3657*38fd1498Szrj    SEQUENCEs.  */
3658*38fd1498Szrj 
3659*38fd1498Szrj rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3660*38fd1498Szrj prev_real_nondebug_insn (rtx_insn *insn)
3661*38fd1498Szrj {
3662*38fd1498Szrj   while (insn)
3663*38fd1498Szrj     {
3664*38fd1498Szrj       insn = PREV_INSN (insn);
3665*38fd1498Szrj       if (insn == 0 || NONDEBUG_INSN_P (insn))
3666*38fd1498Szrj 	break;
3667*38fd1498Szrj     }
3668*38fd1498Szrj 
3669*38fd1498Szrj   return insn;
3670*38fd1498Szrj }
3671*38fd1498Szrj 
3672*38fd1498Szrj /* Return the last CALL_INSN in the current list, or 0 if there is none.
3673*38fd1498Szrj    This routine does not look inside SEQUENCEs.  */
3674*38fd1498Szrj 
3675*38fd1498Szrj rtx_call_insn *
last_call_insn(void)3676*38fd1498Szrj last_call_insn (void)
3677*38fd1498Szrj {
3678*38fd1498Szrj   rtx_insn *insn;
3679*38fd1498Szrj 
3680*38fd1498Szrj   for (insn = get_last_insn ();
3681*38fd1498Szrj        insn && !CALL_P (insn);
3682*38fd1498Szrj        insn = PREV_INSN (insn))
3683*38fd1498Szrj     ;
3684*38fd1498Szrj 
3685*38fd1498Szrj   return safe_as_a <rtx_call_insn *> (insn);
3686*38fd1498Szrj }
3687*38fd1498Szrj 
3688*38fd1498Szrj /* Find the next insn after INSN that really does something.  This routine
3689*38fd1498Szrj    does not look inside SEQUENCEs.  After reload this also skips over
3690*38fd1498Szrj    standalone USE and CLOBBER insn.  */
3691*38fd1498Szrj 
3692*38fd1498Szrj int
active_insn_p(const rtx_insn * insn)3693*38fd1498Szrj active_insn_p (const rtx_insn *insn)
3694*38fd1498Szrj {
3695*38fd1498Szrj   return (CALL_P (insn) || JUMP_P (insn)
3696*38fd1498Szrj 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
3697*38fd1498Szrj 	  || (NONJUMP_INSN_P (insn)
3698*38fd1498Szrj 	      && (! reload_completed
3699*38fd1498Szrj 		  || (GET_CODE (PATTERN (insn)) != USE
3700*38fd1498Szrj 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3701*38fd1498Szrj }
3702*38fd1498Szrj 
3703*38fd1498Szrj rtx_insn *
next_active_insn(rtx_insn * insn)3704*38fd1498Szrj next_active_insn (rtx_insn *insn)
3705*38fd1498Szrj {
3706*38fd1498Szrj   while (insn)
3707*38fd1498Szrj     {
3708*38fd1498Szrj       insn = NEXT_INSN (insn);
3709*38fd1498Szrj       if (insn == 0 || active_insn_p (insn))
3710*38fd1498Szrj 	break;
3711*38fd1498Szrj     }
3712*38fd1498Szrj 
3713*38fd1498Szrj   return insn;
3714*38fd1498Szrj }
3715*38fd1498Szrj 
3716*38fd1498Szrj /* Find the last insn before INSN that really does something.  This routine
3717*38fd1498Szrj    does not look inside SEQUENCEs.  After reload this also skips over
3718*38fd1498Szrj    standalone USE and CLOBBER insn.  */
3719*38fd1498Szrj 
3720*38fd1498Szrj rtx_insn *
prev_active_insn(rtx_insn * insn)3721*38fd1498Szrj prev_active_insn (rtx_insn *insn)
3722*38fd1498Szrj {
3723*38fd1498Szrj   while (insn)
3724*38fd1498Szrj     {
3725*38fd1498Szrj       insn = PREV_INSN (insn);
3726*38fd1498Szrj       if (insn == 0 || active_insn_p (insn))
3727*38fd1498Szrj 	break;
3728*38fd1498Szrj     }
3729*38fd1498Szrj 
3730*38fd1498Szrj   return insn;
3731*38fd1498Szrj }
3732*38fd1498Szrj 
3733*38fd1498Szrj /* Return the next insn that uses CC0 after INSN, which is assumed to
3734*38fd1498Szrj    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3735*38fd1498Szrj    applied to the result of this function should yield INSN).
3736*38fd1498Szrj 
3737*38fd1498Szrj    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3738*38fd1498Szrj    is present, it contains the insn that uses CC0.
3739*38fd1498Szrj 
3740*38fd1498Szrj    Return 0 if we can't find the insn.  */
3741*38fd1498Szrj 
3742*38fd1498Szrj rtx_insn *
next_cc0_user(rtx_insn * insn)3743*38fd1498Szrj next_cc0_user (rtx_insn *insn)
3744*38fd1498Szrj {
3745*38fd1498Szrj   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3746*38fd1498Szrj 
3747*38fd1498Szrj   if (note)
3748*38fd1498Szrj     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3749*38fd1498Szrj 
3750*38fd1498Szrj   insn = next_nonnote_insn (insn);
3751*38fd1498Szrj   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3752*38fd1498Szrj     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
3753*38fd1498Szrj 
3754*38fd1498Szrj   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3755*38fd1498Szrj     return insn;
3756*38fd1498Szrj 
3757*38fd1498Szrj   return 0;
3758*38fd1498Szrj }
3759*38fd1498Szrj 
3760*38fd1498Szrj /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3761*38fd1498Szrj    note, it is the previous insn.  */
3762*38fd1498Szrj 
3763*38fd1498Szrj rtx_insn *
prev_cc0_setter(rtx_insn * insn)3764*38fd1498Szrj prev_cc0_setter (rtx_insn *insn)
3765*38fd1498Szrj {
3766*38fd1498Szrj   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3767*38fd1498Szrj 
3768*38fd1498Szrj   if (note)
3769*38fd1498Szrj     return safe_as_a <rtx_insn *> (XEXP (note, 0));
3770*38fd1498Szrj 
3771*38fd1498Szrj   insn = prev_nonnote_insn (insn);
3772*38fd1498Szrj   gcc_assert (sets_cc0_p (PATTERN (insn)));
3773*38fd1498Szrj 
3774*38fd1498Szrj   return insn;
3775*38fd1498Szrj }
3776*38fd1498Szrj 
3777*38fd1498Szrj /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3778*38fd1498Szrj 
3779*38fd1498Szrj static int
find_auto_inc(const_rtx x,const_rtx reg)3780*38fd1498Szrj find_auto_inc (const_rtx x, const_rtx reg)
3781*38fd1498Szrj {
3782*38fd1498Szrj   subrtx_iterator::array_type array;
3783*38fd1498Szrj   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
3784*38fd1498Szrj     {
3785*38fd1498Szrj       const_rtx x = *iter;
3786*38fd1498Szrj       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
3787*38fd1498Szrj 	  && rtx_equal_p (reg, XEXP (x, 0)))
3788*38fd1498Szrj 	return true;
3789*38fd1498Szrj     }
3790*38fd1498Szrj   return false;
3791*38fd1498Szrj }
3792*38fd1498Szrj 
3793*38fd1498Szrj /* Increment the label uses for all labels present in rtx.  */
3794*38fd1498Szrj 
3795*38fd1498Szrj static void
mark_label_nuses(rtx x)3796*38fd1498Szrj mark_label_nuses (rtx x)
3797*38fd1498Szrj {
3798*38fd1498Szrj   enum rtx_code code;
3799*38fd1498Szrj   int i, j;
3800*38fd1498Szrj   const char *fmt;
3801*38fd1498Szrj 
3802*38fd1498Szrj   code = GET_CODE (x);
3803*38fd1498Szrj   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
3804*38fd1498Szrj     LABEL_NUSES (label_ref_label (x))++;
3805*38fd1498Szrj 
3806*38fd1498Szrj   fmt = GET_RTX_FORMAT (code);
3807*38fd1498Szrj   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3808*38fd1498Szrj     {
3809*38fd1498Szrj       if (fmt[i] == 'e')
3810*38fd1498Szrj 	mark_label_nuses (XEXP (x, i));
3811*38fd1498Szrj       else if (fmt[i] == 'E')
3812*38fd1498Szrj 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3813*38fd1498Szrj 	  mark_label_nuses (XVECEXP (x, i, j));
3814*38fd1498Szrj     }
3815*38fd1498Szrj }
3816*38fd1498Szrj 
3817*38fd1498Szrj 
3818*38fd1498Szrj /* Try splitting insns that can be split for better scheduling.
3819*38fd1498Szrj    PAT is the pattern which might split.
3820*38fd1498Szrj    TRIAL is the insn providing PAT.
3821*38fd1498Szrj    LAST is nonzero if we should return the last insn of the sequence produced.
3822*38fd1498Szrj 
3823*38fd1498Szrj    If this routine succeeds in splitting, it returns the first or last
3824*38fd1498Szrj    replacement insn depending on the value of LAST.  Otherwise, it
3825*38fd1498Szrj    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3826*38fd1498Szrj 
3827*38fd1498Szrj rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)3828*38fd1498Szrj try_split (rtx pat, rtx_insn *trial, int last)
3829*38fd1498Szrj {
3830*38fd1498Szrj   rtx_insn *before, *after;
3831*38fd1498Szrj   rtx note;
3832*38fd1498Szrj   rtx_insn *seq, *tem;
3833*38fd1498Szrj   profile_probability probability;
3834*38fd1498Szrj   rtx_insn *insn_last, *insn;
3835*38fd1498Szrj   int njumps = 0;
3836*38fd1498Szrj   rtx_insn *call_insn = NULL;
3837*38fd1498Szrj 
3838*38fd1498Szrj   /* We're not good at redistributing frame information.  */
3839*38fd1498Szrj   if (RTX_FRAME_RELATED_P (trial))
3840*38fd1498Szrj     return trial;
3841*38fd1498Szrj 
3842*38fd1498Szrj   if (any_condjump_p (trial)
3843*38fd1498Szrj       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3844*38fd1498Szrj     split_branch_probability
3845*38fd1498Szrj       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3846*38fd1498Szrj   else
3847*38fd1498Szrj     split_branch_probability = profile_probability::uninitialized ();
3848*38fd1498Szrj 
3849*38fd1498Szrj   probability = split_branch_probability;
3850*38fd1498Szrj 
3851*38fd1498Szrj   seq = split_insns (pat, trial);
3852*38fd1498Szrj 
3853*38fd1498Szrj   split_branch_probability = profile_probability::uninitialized ();
3854*38fd1498Szrj 
3855*38fd1498Szrj   if (!seq)
3856*38fd1498Szrj     return trial;
3857*38fd1498Szrj 
3858*38fd1498Szrj   /* Avoid infinite loop if any insn of the result matches
3859*38fd1498Szrj      the original pattern.  */
3860*38fd1498Szrj   insn_last = seq;
3861*38fd1498Szrj   while (1)
3862*38fd1498Szrj     {
3863*38fd1498Szrj       if (INSN_P (insn_last)
3864*38fd1498Szrj 	  && rtx_equal_p (PATTERN (insn_last), pat))
3865*38fd1498Szrj 	return trial;
3866*38fd1498Szrj       if (!NEXT_INSN (insn_last))
3867*38fd1498Szrj 	break;
3868*38fd1498Szrj       insn_last = NEXT_INSN (insn_last);
3869*38fd1498Szrj     }
3870*38fd1498Szrj 
3871*38fd1498Szrj   /* We will be adding the new sequence to the function.  The splitters
3872*38fd1498Szrj      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3873*38fd1498Szrj   unshare_all_rtl_in_chain (seq);
3874*38fd1498Szrj 
3875*38fd1498Szrj   /* Mark labels and copy flags.  */
3876*38fd1498Szrj   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3877*38fd1498Szrj     {
3878*38fd1498Szrj       if (JUMP_P (insn))
3879*38fd1498Szrj 	{
3880*38fd1498Szrj 	  if (JUMP_P (trial))
3881*38fd1498Szrj 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
3882*38fd1498Szrj 	  mark_jump_label (PATTERN (insn), insn, 0);
3883*38fd1498Szrj 	  njumps++;
3884*38fd1498Szrj 	  if (probability.initialized_p ()
3885*38fd1498Szrj 	      && any_condjump_p (insn)
3886*38fd1498Szrj 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3887*38fd1498Szrj 	    {
3888*38fd1498Szrj 	      /* We can preserve the REG_BR_PROB notes only if exactly
3889*38fd1498Szrj 		 one jump is created, otherwise the machine description
3890*38fd1498Szrj 		 is responsible for this step using
3891*38fd1498Szrj 		 split_branch_probability variable.  */
3892*38fd1498Szrj 	      gcc_assert (njumps == 1);
3893*38fd1498Szrj 	      add_reg_br_prob_note (insn, probability);
3894*38fd1498Szrj 	    }
3895*38fd1498Szrj 	}
3896*38fd1498Szrj     }
3897*38fd1498Szrj 
3898*38fd1498Szrj   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3899*38fd1498Szrj      in SEQ and copy any additional information across.  */
3900*38fd1498Szrj   if (CALL_P (trial))
3901*38fd1498Szrj     {
3902*38fd1498Szrj       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3903*38fd1498Szrj 	if (CALL_P (insn))
3904*38fd1498Szrj 	  {
3905*38fd1498Szrj 	    gcc_assert (call_insn == NULL_RTX);
3906*38fd1498Szrj 	    call_insn = insn;
3907*38fd1498Szrj 
3908*38fd1498Szrj 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3909*38fd1498Szrj 	       target may have explicitly specified.  */
3910*38fd1498Szrj 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3911*38fd1498Szrj 	    while (*p)
3912*38fd1498Szrj 	      p = &XEXP (*p, 1);
3913*38fd1498Szrj 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3914*38fd1498Szrj 
3915*38fd1498Szrj 	    /* If the old call was a sibling call, the new one must
3916*38fd1498Szrj 	       be too.  */
3917*38fd1498Szrj 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3918*38fd1498Szrj 	  }
3919*38fd1498Szrj     }
3920*38fd1498Szrj 
3921*38fd1498Szrj   /* Copy notes, particularly those related to the CFG.  */
3922*38fd1498Szrj   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3923*38fd1498Szrj     {
3924*38fd1498Szrj       switch (REG_NOTE_KIND (note))
3925*38fd1498Szrj 	{
3926*38fd1498Szrj 	case REG_EH_REGION:
3927*38fd1498Szrj 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3928*38fd1498Szrj 	  break;
3929*38fd1498Szrj 
3930*38fd1498Szrj 	case REG_NORETURN:
3931*38fd1498Szrj 	case REG_SETJMP:
3932*38fd1498Szrj 	case REG_TM:
3933*38fd1498Szrj 	case REG_CALL_NOCF_CHECK:
3934*38fd1498Szrj 	case REG_CALL_ARG_LOCATION:
3935*38fd1498Szrj 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3936*38fd1498Szrj 	    {
3937*38fd1498Szrj 	      if (CALL_P (insn))
3938*38fd1498Szrj 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3939*38fd1498Szrj 	    }
3940*38fd1498Szrj 	  break;
3941*38fd1498Szrj 
3942*38fd1498Szrj 	case REG_NON_LOCAL_GOTO:
3943*38fd1498Szrj 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3944*38fd1498Szrj 	    {
3945*38fd1498Szrj 	      if (JUMP_P (insn))
3946*38fd1498Szrj 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3947*38fd1498Szrj 	    }
3948*38fd1498Szrj 	  break;
3949*38fd1498Szrj 
3950*38fd1498Szrj 	case REG_INC:
3951*38fd1498Szrj 	  if (!AUTO_INC_DEC)
3952*38fd1498Szrj 	    break;
3953*38fd1498Szrj 
3954*38fd1498Szrj 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3955*38fd1498Szrj 	    {
3956*38fd1498Szrj 	      rtx reg = XEXP (note, 0);
3957*38fd1498Szrj 	      if (!FIND_REG_INC_NOTE (insn, reg)
3958*38fd1498Szrj 		  && find_auto_inc (PATTERN (insn), reg))
3959*38fd1498Szrj 		add_reg_note (insn, REG_INC, reg);
3960*38fd1498Szrj 	    }
3961*38fd1498Szrj 	  break;
3962*38fd1498Szrj 
3963*38fd1498Szrj 	case REG_ARGS_SIZE:
3964*38fd1498Szrj 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
3965*38fd1498Szrj 	  break;
3966*38fd1498Szrj 
3967*38fd1498Szrj 	case REG_CALL_DECL:
3968*38fd1498Szrj 	  gcc_assert (call_insn != NULL_RTX);
3969*38fd1498Szrj 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3970*38fd1498Szrj 	  break;
3971*38fd1498Szrj 
3972*38fd1498Szrj 	default:
3973*38fd1498Szrj 	  break;
3974*38fd1498Szrj 	}
3975*38fd1498Szrj     }
3976*38fd1498Szrj 
3977*38fd1498Szrj   /* If there are LABELS inside the split insns increment the
3978*38fd1498Szrj      usage count so we don't delete the label.  */
3979*38fd1498Szrj   if (INSN_P (trial))
3980*38fd1498Szrj     {
3981*38fd1498Szrj       insn = insn_last;
3982*38fd1498Szrj       while (insn != NULL_RTX)
3983*38fd1498Szrj 	{
3984*38fd1498Szrj 	  /* JUMP_P insns have already been "marked" above.  */
3985*38fd1498Szrj 	  if (NONJUMP_INSN_P (insn))
3986*38fd1498Szrj 	    mark_label_nuses (PATTERN (insn));
3987*38fd1498Szrj 
3988*38fd1498Szrj 	  insn = PREV_INSN (insn);
3989*38fd1498Szrj 	}
3990*38fd1498Szrj     }
3991*38fd1498Szrj 
3992*38fd1498Szrj   before = PREV_INSN (trial);
3993*38fd1498Szrj   after = NEXT_INSN (trial);
3994*38fd1498Szrj 
3995*38fd1498Szrj   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3996*38fd1498Szrj 
3997*38fd1498Szrj   delete_insn (trial);
3998*38fd1498Szrj 
3999*38fd1498Szrj   /* Recursively call try_split for each new insn created; by the
4000*38fd1498Szrj      time control returns here that insn will be fully split, so
4001*38fd1498Szrj      set LAST and continue from the insn after the one returned.
4002*38fd1498Szrj      We can't use next_active_insn here since AFTER may be a note.
4003*38fd1498Szrj      Ignore deleted insns, which can be occur if not optimizing.  */
4004*38fd1498Szrj   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
4005*38fd1498Szrj     if (! tem->deleted () && INSN_P (tem))
4006*38fd1498Szrj       tem = try_split (PATTERN (tem), tem, 1);
4007*38fd1498Szrj 
4008*38fd1498Szrj   /* Return either the first or the last insn, depending on which was
4009*38fd1498Szrj      requested.  */
4010*38fd1498Szrj   return last
4011*38fd1498Szrj     ? (after ? PREV_INSN (after) : get_last_insn ())
4012*38fd1498Szrj     : NEXT_INSN (before);
4013*38fd1498Szrj }
4014*38fd1498Szrj 
4015*38fd1498Szrj /* Make and return an INSN rtx, initializing all its slots.
4016*38fd1498Szrj    Store PATTERN in the pattern slots.  */
4017*38fd1498Szrj 
4018*38fd1498Szrj rtx_insn *
make_insn_raw(rtx pattern)4019*38fd1498Szrj make_insn_raw (rtx pattern)
4020*38fd1498Szrj {
4021*38fd1498Szrj   rtx_insn *insn;
4022*38fd1498Szrj 
4023*38fd1498Szrj   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
4024*38fd1498Szrj 
4025*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
4026*38fd1498Szrj   PATTERN (insn) = pattern;
4027*38fd1498Szrj   INSN_CODE (insn) = -1;
4028*38fd1498Szrj   REG_NOTES (insn) = NULL;
4029*38fd1498Szrj   INSN_LOCATION (insn) = curr_insn_location ();
4030*38fd1498Szrj   BLOCK_FOR_INSN (insn) = NULL;
4031*38fd1498Szrj 
4032*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
4033*38fd1498Szrj   if (insn
4034*38fd1498Szrj       && INSN_P (insn)
4035*38fd1498Szrj       && (returnjump_p (insn)
4036*38fd1498Szrj 	  || (GET_CODE (insn) == SET
4037*38fd1498Szrj 	      && SET_DEST (insn) == pc_rtx)))
4038*38fd1498Szrj     {
4039*38fd1498Szrj       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4040*38fd1498Szrj       debug_rtx (insn);
4041*38fd1498Szrj     }
4042*38fd1498Szrj #endif
4043*38fd1498Szrj 
4044*38fd1498Szrj   return insn;
4045*38fd1498Szrj }
4046*38fd1498Szrj 
4047*38fd1498Szrj /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
4048*38fd1498Szrj 
4049*38fd1498Szrj static rtx_insn *
make_debug_insn_raw(rtx pattern)4050*38fd1498Szrj make_debug_insn_raw (rtx pattern)
4051*38fd1498Szrj {
4052*38fd1498Szrj   rtx_debug_insn *insn;
4053*38fd1498Szrj 
4054*38fd1498Szrj   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
4055*38fd1498Szrj   INSN_UID (insn) = cur_debug_insn_uid++;
4056*38fd1498Szrj   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
4057*38fd1498Szrj     INSN_UID (insn) = cur_insn_uid++;
4058*38fd1498Szrj 
4059*38fd1498Szrj   PATTERN (insn) = pattern;
4060*38fd1498Szrj   INSN_CODE (insn) = -1;
4061*38fd1498Szrj   REG_NOTES (insn) = NULL;
4062*38fd1498Szrj   INSN_LOCATION (insn) = curr_insn_location ();
4063*38fd1498Szrj   BLOCK_FOR_INSN (insn) = NULL;
4064*38fd1498Szrj 
4065*38fd1498Szrj   return insn;
4066*38fd1498Szrj }
4067*38fd1498Szrj 
4068*38fd1498Szrj /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
4069*38fd1498Szrj 
4070*38fd1498Szrj static rtx_insn *
make_jump_insn_raw(rtx pattern)4071*38fd1498Szrj make_jump_insn_raw (rtx pattern)
4072*38fd1498Szrj {
4073*38fd1498Szrj   rtx_jump_insn *insn;
4074*38fd1498Szrj 
4075*38fd1498Szrj   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
4076*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
4077*38fd1498Szrj 
4078*38fd1498Szrj   PATTERN (insn) = pattern;
4079*38fd1498Szrj   INSN_CODE (insn) = -1;
4080*38fd1498Szrj   REG_NOTES (insn) = NULL;
4081*38fd1498Szrj   JUMP_LABEL (insn) = NULL;
4082*38fd1498Szrj   INSN_LOCATION (insn) = curr_insn_location ();
4083*38fd1498Szrj   BLOCK_FOR_INSN (insn) = NULL;
4084*38fd1498Szrj 
4085*38fd1498Szrj   return insn;
4086*38fd1498Szrj }
4087*38fd1498Szrj 
4088*38fd1498Szrj /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
4089*38fd1498Szrj 
4090*38fd1498Szrj static rtx_insn *
make_call_insn_raw(rtx pattern)4091*38fd1498Szrj make_call_insn_raw (rtx pattern)
4092*38fd1498Szrj {
4093*38fd1498Szrj   rtx_call_insn *insn;
4094*38fd1498Szrj 
4095*38fd1498Szrj   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
4096*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
4097*38fd1498Szrj 
4098*38fd1498Szrj   PATTERN (insn) = pattern;
4099*38fd1498Szrj   INSN_CODE (insn) = -1;
4100*38fd1498Szrj   REG_NOTES (insn) = NULL;
4101*38fd1498Szrj   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
4102*38fd1498Szrj   INSN_LOCATION (insn) = curr_insn_location ();
4103*38fd1498Szrj   BLOCK_FOR_INSN (insn) = NULL;
4104*38fd1498Szrj 
4105*38fd1498Szrj   return insn;
4106*38fd1498Szrj }
4107*38fd1498Szrj 
4108*38fd1498Szrj /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
4109*38fd1498Szrj 
4110*38fd1498Szrj static rtx_note *
make_note_raw(enum insn_note subtype)4111*38fd1498Szrj make_note_raw (enum insn_note subtype)
4112*38fd1498Szrj {
4113*38fd1498Szrj   /* Some notes are never created this way at all.  These notes are
4114*38fd1498Szrj      only created by patching out insns.  */
4115*38fd1498Szrj   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
4116*38fd1498Szrj 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
4117*38fd1498Szrj 
4118*38fd1498Szrj   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
4119*38fd1498Szrj   INSN_UID (note) = cur_insn_uid++;
4120*38fd1498Szrj   NOTE_KIND (note) = subtype;
4121*38fd1498Szrj   BLOCK_FOR_INSN (note) = NULL;
4122*38fd1498Szrj   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4123*38fd1498Szrj   return note;
4124*38fd1498Szrj }
4125*38fd1498Szrj 
4126*38fd1498Szrj /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4127*38fd1498Szrj    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4128*38fd1498Szrj    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
4129*38fd1498Szrj 
4130*38fd1498Szrj static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)4131*38fd1498Szrj link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
4132*38fd1498Szrj {
4133*38fd1498Szrj   SET_PREV_INSN (insn) = prev;
4134*38fd1498Szrj   SET_NEXT_INSN (insn) = next;
4135*38fd1498Szrj   if (prev != NULL)
4136*38fd1498Szrj     {
4137*38fd1498Szrj       SET_NEXT_INSN (prev) = insn;
4138*38fd1498Szrj       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4139*38fd1498Szrj 	{
4140*38fd1498Szrj 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4141*38fd1498Szrj 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
4142*38fd1498Szrj 	}
4143*38fd1498Szrj     }
4144*38fd1498Szrj   if (next != NULL)
4145*38fd1498Szrj     {
4146*38fd1498Szrj       SET_PREV_INSN (next) = insn;
4147*38fd1498Szrj       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4148*38fd1498Szrj 	{
4149*38fd1498Szrj 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4150*38fd1498Szrj 	  SET_PREV_INSN (sequence->insn (0)) = insn;
4151*38fd1498Szrj 	}
4152*38fd1498Szrj     }
4153*38fd1498Szrj 
4154*38fd1498Szrj   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4155*38fd1498Szrj     {
4156*38fd1498Szrj       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
4157*38fd1498Szrj       SET_PREV_INSN (sequence->insn (0)) = prev;
4158*38fd1498Szrj       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4159*38fd1498Szrj     }
4160*38fd1498Szrj }
4161*38fd1498Szrj 
4162*38fd1498Szrj /* Add INSN to the end of the doubly-linked list.
4163*38fd1498Szrj    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
4164*38fd1498Szrj 
4165*38fd1498Szrj void
add_insn(rtx_insn * insn)4166*38fd1498Szrj add_insn (rtx_insn *insn)
4167*38fd1498Szrj {
4168*38fd1498Szrj   rtx_insn *prev = get_last_insn ();
4169*38fd1498Szrj   link_insn_into_chain (insn, prev, NULL);
4170*38fd1498Szrj   if (get_insns () == NULL)
4171*38fd1498Szrj     set_first_insn (insn);
4172*38fd1498Szrj   set_last_insn (insn);
4173*38fd1498Szrj }
4174*38fd1498Szrj 
4175*38fd1498Szrj /* Add INSN into the doubly-linked list after insn AFTER.  */
4176*38fd1498Szrj 
4177*38fd1498Szrj static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)4178*38fd1498Szrj add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
4179*38fd1498Szrj {
4180*38fd1498Szrj   rtx_insn *next = NEXT_INSN (after);
4181*38fd1498Szrj 
4182*38fd1498Szrj   gcc_assert (!optimize || !after->deleted ());
4183*38fd1498Szrj 
4184*38fd1498Szrj   link_insn_into_chain (insn, after, next);
4185*38fd1498Szrj 
4186*38fd1498Szrj   if (next == NULL)
4187*38fd1498Szrj     {
4188*38fd1498Szrj       struct sequence_stack *seq;
4189*38fd1498Szrj 
4190*38fd1498Szrj       for (seq = get_current_sequence (); seq; seq = seq->next)
4191*38fd1498Szrj 	if (after == seq->last)
4192*38fd1498Szrj 	  {
4193*38fd1498Szrj 	    seq->last = insn;
4194*38fd1498Szrj 	    break;
4195*38fd1498Szrj 	  }
4196*38fd1498Szrj     }
4197*38fd1498Szrj }
4198*38fd1498Szrj 
4199*38fd1498Szrj /* Add INSN into the doubly-linked list before insn BEFORE.  */
4200*38fd1498Szrj 
4201*38fd1498Szrj static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)4202*38fd1498Szrj add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
4203*38fd1498Szrj {
4204*38fd1498Szrj   rtx_insn *prev = PREV_INSN (before);
4205*38fd1498Szrj 
4206*38fd1498Szrj   gcc_assert (!optimize || !before->deleted ());
4207*38fd1498Szrj 
4208*38fd1498Szrj   link_insn_into_chain (insn, prev, before);
4209*38fd1498Szrj 
4210*38fd1498Szrj   if (prev == NULL)
4211*38fd1498Szrj     {
4212*38fd1498Szrj       struct sequence_stack *seq;
4213*38fd1498Szrj 
4214*38fd1498Szrj       for (seq = get_current_sequence (); seq; seq = seq->next)
4215*38fd1498Szrj 	if (before == seq->first)
4216*38fd1498Szrj 	  {
4217*38fd1498Szrj 	    seq->first = insn;
4218*38fd1498Szrj 	    break;
4219*38fd1498Szrj 	  }
4220*38fd1498Szrj 
4221*38fd1498Szrj       gcc_assert (seq);
4222*38fd1498Szrj     }
4223*38fd1498Szrj }
4224*38fd1498Szrj 
4225*38fd1498Szrj /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4226*38fd1498Szrj    If BB is NULL, an attempt is made to infer the bb from before.
4227*38fd1498Szrj 
4228*38fd1498Szrj    This and the next function should be the only functions called
4229*38fd1498Szrj    to insert an insn once delay slots have been filled since only
4230*38fd1498Szrj    they know how to update a SEQUENCE. */
4231*38fd1498Szrj 
4232*38fd1498Szrj void
add_insn_after(rtx uncast_insn,rtx uncast_after,basic_block bb)4233*38fd1498Szrj add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
4234*38fd1498Szrj {
4235*38fd1498Szrj   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4236*38fd1498Szrj   rtx_insn *after = as_a <rtx_insn *> (uncast_after);
4237*38fd1498Szrj   add_insn_after_nobb (insn, after);
4238*38fd1498Szrj   if (!BARRIER_P (after)
4239*38fd1498Szrj       && !BARRIER_P (insn)
4240*38fd1498Szrj       && (bb = BLOCK_FOR_INSN (after)))
4241*38fd1498Szrj     {
4242*38fd1498Szrj       set_block_for_insn (insn, bb);
4243*38fd1498Szrj       if (INSN_P (insn))
4244*38fd1498Szrj 	df_insn_rescan (insn);
4245*38fd1498Szrj       /* Should not happen as first in the BB is always
4246*38fd1498Szrj 	 either NOTE or LABEL.  */
4247*38fd1498Szrj       if (BB_END (bb) == after
4248*38fd1498Szrj 	  /* Avoid clobbering of structure when creating new BB.  */
4249*38fd1498Szrj 	  && !BARRIER_P (insn)
4250*38fd1498Szrj 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
4251*38fd1498Szrj 	BB_END (bb) = insn;
4252*38fd1498Szrj     }
4253*38fd1498Szrj }
4254*38fd1498Szrj 
4255*38fd1498Szrj /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4256*38fd1498Szrj    If BB is NULL, an attempt is made to infer the bb from before.
4257*38fd1498Szrj 
4258*38fd1498Szrj    This and the previous function should be the only functions called
4259*38fd1498Szrj    to insert an insn once delay slots have been filled since only
4260*38fd1498Szrj    they know how to update a SEQUENCE. */
4261*38fd1498Szrj 
4262*38fd1498Szrj void
add_insn_before(rtx uncast_insn,rtx uncast_before,basic_block bb)4263*38fd1498Szrj add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
4264*38fd1498Szrj {
4265*38fd1498Szrj   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4266*38fd1498Szrj   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4267*38fd1498Szrj   add_insn_before_nobb (insn, before);
4268*38fd1498Szrj 
4269*38fd1498Szrj   if (!bb
4270*38fd1498Szrj       && !BARRIER_P (before)
4271*38fd1498Szrj       && !BARRIER_P (insn))
4272*38fd1498Szrj     bb = BLOCK_FOR_INSN (before);
4273*38fd1498Szrj 
4274*38fd1498Szrj   if (bb)
4275*38fd1498Szrj     {
4276*38fd1498Szrj       set_block_for_insn (insn, bb);
4277*38fd1498Szrj       if (INSN_P (insn))
4278*38fd1498Szrj 	df_insn_rescan (insn);
4279*38fd1498Szrj       /* Should not happen as first in the BB is always either NOTE or
4280*38fd1498Szrj 	 LABEL.  */
4281*38fd1498Szrj       gcc_assert (BB_HEAD (bb) != insn
4282*38fd1498Szrj 		  /* Avoid clobbering of structure when creating new BB.  */
4283*38fd1498Szrj 		  || BARRIER_P (insn)
4284*38fd1498Szrj 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
4285*38fd1498Szrj     }
4286*38fd1498Szrj }
4287*38fd1498Szrj 
4288*38fd1498Szrj /* Replace insn with an deleted instruction note.  */
4289*38fd1498Szrj 
4290*38fd1498Szrj void
set_insn_deleted(rtx insn)4291*38fd1498Szrj set_insn_deleted (rtx insn)
4292*38fd1498Szrj {
4293*38fd1498Szrj   if (INSN_P (insn))
4294*38fd1498Szrj     df_insn_delete (as_a <rtx_insn *> (insn));
4295*38fd1498Szrj   PUT_CODE (insn, NOTE);
4296*38fd1498Szrj   NOTE_KIND (insn) = NOTE_INSN_DELETED;
4297*38fd1498Szrj }
4298*38fd1498Szrj 
4299*38fd1498Szrj 
4300*38fd1498Szrj /* Unlink INSN from the insn chain.
4301*38fd1498Szrj 
4302*38fd1498Szrj    This function knows how to handle sequences.
4303*38fd1498Szrj 
4304*38fd1498Szrj    This function does not invalidate data flow information associated with
4305*38fd1498Szrj    INSN (i.e. does not call df_insn_delete).  That makes this function
4306*38fd1498Szrj    usable for only disconnecting an insn from the chain, and re-emit it
4307*38fd1498Szrj    elsewhere later.
4308*38fd1498Szrj 
4309*38fd1498Szrj    To later insert INSN elsewhere in the insn chain via add_insn and
4310*38fd1498Szrj    similar functions, PREV_INSN and NEXT_INSN must be nullified by
4311*38fd1498Szrj    the caller.  Nullifying them here breaks many insn chain walks.
4312*38fd1498Szrj 
4313*38fd1498Szrj    To really delete an insn and related DF information, use delete_insn.  */
4314*38fd1498Szrj 
4315*38fd1498Szrj void
remove_insn(rtx uncast_insn)4316*38fd1498Szrj remove_insn (rtx uncast_insn)
4317*38fd1498Szrj {
4318*38fd1498Szrj   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
4319*38fd1498Szrj   rtx_insn *next = NEXT_INSN (insn);
4320*38fd1498Szrj   rtx_insn *prev = PREV_INSN (insn);
4321*38fd1498Szrj   basic_block bb;
4322*38fd1498Szrj 
4323*38fd1498Szrj   if (prev)
4324*38fd1498Szrj     {
4325*38fd1498Szrj       SET_NEXT_INSN (prev) = next;
4326*38fd1498Szrj       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4327*38fd1498Szrj 	{
4328*38fd1498Szrj 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
4329*38fd1498Szrj 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
4330*38fd1498Szrj 	}
4331*38fd1498Szrj     }
4332*38fd1498Szrj   else
4333*38fd1498Szrj     {
4334*38fd1498Szrj       struct sequence_stack *seq;
4335*38fd1498Szrj 
4336*38fd1498Szrj       for (seq = get_current_sequence (); seq; seq = seq->next)
4337*38fd1498Szrj 	if (insn == seq->first)
4338*38fd1498Szrj 	  {
4339*38fd1498Szrj 	    seq->first = next;
4340*38fd1498Szrj 	    break;
4341*38fd1498Szrj 	  }
4342*38fd1498Szrj 
4343*38fd1498Szrj       gcc_assert (seq);
4344*38fd1498Szrj     }
4345*38fd1498Szrj 
4346*38fd1498Szrj   if (next)
4347*38fd1498Szrj     {
4348*38fd1498Szrj       SET_PREV_INSN (next) = prev;
4349*38fd1498Szrj       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4350*38fd1498Szrj 	{
4351*38fd1498Szrj 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
4352*38fd1498Szrj 	  SET_PREV_INSN (sequence->insn (0)) = prev;
4353*38fd1498Szrj 	}
4354*38fd1498Szrj     }
4355*38fd1498Szrj   else
4356*38fd1498Szrj     {
4357*38fd1498Szrj       struct sequence_stack *seq;
4358*38fd1498Szrj 
4359*38fd1498Szrj       for (seq = get_current_sequence (); seq; seq = seq->next)
4360*38fd1498Szrj 	if (insn == seq->last)
4361*38fd1498Szrj 	  {
4362*38fd1498Szrj 	    seq->last = prev;
4363*38fd1498Szrj 	    break;
4364*38fd1498Szrj 	  }
4365*38fd1498Szrj 
4366*38fd1498Szrj       gcc_assert (seq);
4367*38fd1498Szrj     }
4368*38fd1498Szrj 
4369*38fd1498Szrj   /* Fix up basic block boundaries, if necessary.  */
4370*38fd1498Szrj   if (!BARRIER_P (insn)
4371*38fd1498Szrj       && (bb = BLOCK_FOR_INSN (insn)))
4372*38fd1498Szrj     {
4373*38fd1498Szrj       if (BB_HEAD (bb) == insn)
4374*38fd1498Szrj 	{
4375*38fd1498Szrj 	  /* Never ever delete the basic block note without deleting whole
4376*38fd1498Szrj 	     basic block.  */
4377*38fd1498Szrj 	  gcc_assert (!NOTE_P (insn));
4378*38fd1498Szrj 	  BB_HEAD (bb) = next;
4379*38fd1498Szrj 	}
4380*38fd1498Szrj       if (BB_END (bb) == insn)
4381*38fd1498Szrj 	BB_END (bb) = prev;
4382*38fd1498Szrj     }
4383*38fd1498Szrj }
4384*38fd1498Szrj 
4385*38fd1498Szrj /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4386*38fd1498Szrj 
4387*38fd1498Szrj void
add_function_usage_to(rtx call_insn,rtx call_fusage)4388*38fd1498Szrj add_function_usage_to (rtx call_insn, rtx call_fusage)
4389*38fd1498Szrj {
4390*38fd1498Szrj   gcc_assert (call_insn && CALL_P (call_insn));
4391*38fd1498Szrj 
4392*38fd1498Szrj   /* Put the register usage information on the CALL.  If there is already
4393*38fd1498Szrj      some usage information, put ours at the end.  */
4394*38fd1498Szrj   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4395*38fd1498Szrj     {
4396*38fd1498Szrj       rtx link;
4397*38fd1498Szrj 
4398*38fd1498Szrj       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4399*38fd1498Szrj 	   link = XEXP (link, 1))
4400*38fd1498Szrj 	;
4401*38fd1498Szrj 
4402*38fd1498Szrj       XEXP (link, 1) = call_fusage;
4403*38fd1498Szrj     }
4404*38fd1498Szrj   else
4405*38fd1498Szrj     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4406*38fd1498Szrj }
4407*38fd1498Szrj 
4408*38fd1498Szrj /* Delete all insns made since FROM.
4409*38fd1498Szrj    FROM becomes the new last instruction.  */
4410*38fd1498Szrj 
4411*38fd1498Szrj void
delete_insns_since(rtx_insn * from)4412*38fd1498Szrj delete_insns_since (rtx_insn *from)
4413*38fd1498Szrj {
4414*38fd1498Szrj   if (from == 0)
4415*38fd1498Szrj     set_first_insn (0);
4416*38fd1498Szrj   else
4417*38fd1498Szrj     SET_NEXT_INSN (from) = 0;
4418*38fd1498Szrj   set_last_insn (from);
4419*38fd1498Szrj }
4420*38fd1498Szrj 
4421*38fd1498Szrj /* This function is deprecated, please use sequences instead.
4422*38fd1498Szrj 
4423*38fd1498Szrj    Move a consecutive bunch of insns to a different place in the chain.
4424*38fd1498Szrj    The insns to be moved are those between FROM and TO.
4425*38fd1498Szrj    They are moved to a new position after the insn AFTER.
4426*38fd1498Szrj    AFTER must not be FROM or TO or any insn in between.
4427*38fd1498Szrj 
4428*38fd1498Szrj    This function does not know about SEQUENCEs and hence should not be
4429*38fd1498Szrj    called after delay-slot filling has been done.  */
4430*38fd1498Szrj 
4431*38fd1498Szrj void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)4432*38fd1498Szrj reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4433*38fd1498Szrj {
4434*38fd1498Szrj   if (flag_checking)
4435*38fd1498Szrj     {
4436*38fd1498Szrj       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
4437*38fd1498Szrj 	gcc_assert (after != x);
4438*38fd1498Szrj       gcc_assert (after != to);
4439*38fd1498Szrj     }
4440*38fd1498Szrj 
4441*38fd1498Szrj   /* Splice this bunch out of where it is now.  */
4442*38fd1498Szrj   if (PREV_INSN (from))
4443*38fd1498Szrj     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4444*38fd1498Szrj   if (NEXT_INSN (to))
4445*38fd1498Szrj     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4446*38fd1498Szrj   if (get_last_insn () == to)
4447*38fd1498Szrj     set_last_insn (PREV_INSN (from));
4448*38fd1498Szrj   if (get_insns () == from)
4449*38fd1498Szrj     set_first_insn (NEXT_INSN (to));
4450*38fd1498Szrj 
4451*38fd1498Szrj   /* Make the new neighbors point to it and it to them.  */
4452*38fd1498Szrj   if (NEXT_INSN (after))
4453*38fd1498Szrj     SET_PREV_INSN (NEXT_INSN (after)) = to;
4454*38fd1498Szrj 
4455*38fd1498Szrj   SET_NEXT_INSN (to) = NEXT_INSN (after);
4456*38fd1498Szrj   SET_PREV_INSN (from) = after;
4457*38fd1498Szrj   SET_NEXT_INSN (after) = from;
4458*38fd1498Szrj   if (after == get_last_insn ())
4459*38fd1498Szrj     set_last_insn (to);
4460*38fd1498Szrj }
4461*38fd1498Szrj 
4462*38fd1498Szrj /* Same as function above, but take care to update BB boundaries.  */
4463*38fd1498Szrj void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)4464*38fd1498Szrj reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
4465*38fd1498Szrj {
4466*38fd1498Szrj   rtx_insn *prev = PREV_INSN (from);
4467*38fd1498Szrj   basic_block bb, bb2;
4468*38fd1498Szrj 
4469*38fd1498Szrj   reorder_insns_nobb (from, to, after);
4470*38fd1498Szrj 
4471*38fd1498Szrj   if (!BARRIER_P (after)
4472*38fd1498Szrj       && (bb = BLOCK_FOR_INSN (after)))
4473*38fd1498Szrj     {
4474*38fd1498Szrj       rtx_insn *x;
4475*38fd1498Szrj       df_set_bb_dirty (bb);
4476*38fd1498Szrj 
4477*38fd1498Szrj       if (!BARRIER_P (from)
4478*38fd1498Szrj 	  && (bb2 = BLOCK_FOR_INSN (from)))
4479*38fd1498Szrj 	{
4480*38fd1498Szrj 	  if (BB_END (bb2) == to)
4481*38fd1498Szrj 	    BB_END (bb2) = prev;
4482*38fd1498Szrj 	  df_set_bb_dirty (bb2);
4483*38fd1498Szrj 	}
4484*38fd1498Szrj 
4485*38fd1498Szrj       if (BB_END (bb) == after)
4486*38fd1498Szrj 	BB_END (bb) = to;
4487*38fd1498Szrj 
4488*38fd1498Szrj       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4489*38fd1498Szrj 	if (!BARRIER_P (x))
4490*38fd1498Szrj 	  df_insn_change_bb (x, bb);
4491*38fd1498Szrj     }
4492*38fd1498Szrj }
4493*38fd1498Szrj 
4494*38fd1498Szrj 
4495*38fd1498Szrj /* Emit insn(s) of given code and pattern
4496*38fd1498Szrj    at a specified place within the doubly-linked list.
4497*38fd1498Szrj 
4498*38fd1498Szrj    All of the emit_foo global entry points accept an object
4499*38fd1498Szrj    X which is either an insn list or a PATTERN of a single
4500*38fd1498Szrj    instruction.
4501*38fd1498Szrj 
4502*38fd1498Szrj    There are thus a few canonical ways to generate code and
4503*38fd1498Szrj    emit it at a specific place in the instruction stream.  For
4504*38fd1498Szrj    example, consider the instruction named SPOT and the fact that
4505*38fd1498Szrj    we would like to emit some instructions before SPOT.  We might
4506*38fd1498Szrj    do it like this:
4507*38fd1498Szrj 
4508*38fd1498Szrj 	start_sequence ();
4509*38fd1498Szrj 	... emit the new instructions ...
4510*38fd1498Szrj 	insns_head = get_insns ();
4511*38fd1498Szrj 	end_sequence ();
4512*38fd1498Szrj 
4513*38fd1498Szrj 	emit_insn_before (insns_head, SPOT);
4514*38fd1498Szrj 
4515*38fd1498Szrj    It used to be common to generate SEQUENCE rtl instead, but that
4516*38fd1498Szrj    is a relic of the past which no longer occurs.  The reason is that
4517*38fd1498Szrj    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4518*38fd1498Szrj    generated would almost certainly die right after it was created.  */
4519*38fd1498Szrj 
4520*38fd1498Szrj static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx before,rtx last,basic_block bb,rtx_insn * (* make_raw)(rtx))4521*38fd1498Szrj emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4522*38fd1498Szrj                            rtx_insn *(*make_raw) (rtx))
4523*38fd1498Szrj {
4524*38fd1498Szrj   rtx_insn *insn;
4525*38fd1498Szrj 
4526*38fd1498Szrj   gcc_assert (before);
4527*38fd1498Szrj 
4528*38fd1498Szrj   if (x == NULL_RTX)
4529*38fd1498Szrj     return safe_as_a <rtx_insn *> (last);
4530*38fd1498Szrj 
4531*38fd1498Szrj   switch (GET_CODE (x))
4532*38fd1498Szrj     {
4533*38fd1498Szrj     case DEBUG_INSN:
4534*38fd1498Szrj     case INSN:
4535*38fd1498Szrj     case JUMP_INSN:
4536*38fd1498Szrj     case CALL_INSN:
4537*38fd1498Szrj     case CODE_LABEL:
4538*38fd1498Szrj     case BARRIER:
4539*38fd1498Szrj     case NOTE:
4540*38fd1498Szrj       insn = as_a <rtx_insn *> (x);
4541*38fd1498Szrj       while (insn)
4542*38fd1498Szrj 	{
4543*38fd1498Szrj 	  rtx_insn *next = NEXT_INSN (insn);
4544*38fd1498Szrj 	  add_insn_before (insn, before, bb);
4545*38fd1498Szrj 	  last = insn;
4546*38fd1498Szrj 	  insn = next;
4547*38fd1498Szrj 	}
4548*38fd1498Szrj       break;
4549*38fd1498Szrj 
4550*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
4551*38fd1498Szrj     case SEQUENCE:
4552*38fd1498Szrj       gcc_unreachable ();
4553*38fd1498Szrj       break;
4554*38fd1498Szrj #endif
4555*38fd1498Szrj 
4556*38fd1498Szrj     default:
4557*38fd1498Szrj       last = (*make_raw) (x);
4558*38fd1498Szrj       add_insn_before (last, before, bb);
4559*38fd1498Szrj       break;
4560*38fd1498Szrj     }
4561*38fd1498Szrj 
4562*38fd1498Szrj   return safe_as_a <rtx_insn *> (last);
4563*38fd1498Szrj }
4564*38fd1498Szrj 
4565*38fd1498Szrj /* Make X be output before the instruction BEFORE.  */
4566*38fd1498Szrj 
4567*38fd1498Szrj rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)4568*38fd1498Szrj emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
4569*38fd1498Szrj {
4570*38fd1498Szrj   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4571*38fd1498Szrj }
4572*38fd1498Szrj 
4573*38fd1498Szrj /* Make an instruction with body X and code JUMP_INSN
4574*38fd1498Szrj    and output it before the instruction BEFORE.  */
4575*38fd1498Szrj 
4576*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)4577*38fd1498Szrj emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
4578*38fd1498Szrj {
4579*38fd1498Szrj   return as_a <rtx_jump_insn *> (
4580*38fd1498Szrj 		emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4581*38fd1498Szrj 					   make_jump_insn_raw));
4582*38fd1498Szrj }
4583*38fd1498Szrj 
4584*38fd1498Szrj /* Make an instruction with body X and code CALL_INSN
4585*38fd1498Szrj    and output it before the instruction BEFORE.  */
4586*38fd1498Szrj 
4587*38fd1498Szrj rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)4588*38fd1498Szrj emit_call_insn_before_noloc (rtx x, rtx_insn *before)
4589*38fd1498Szrj {
4590*38fd1498Szrj   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4591*38fd1498Szrj 				    make_call_insn_raw);
4592*38fd1498Szrj }
4593*38fd1498Szrj 
4594*38fd1498Szrj /* Make an instruction with body X and code DEBUG_INSN
4595*38fd1498Szrj    and output it before the instruction BEFORE.  */
4596*38fd1498Szrj 
4597*38fd1498Szrj rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx before)4598*38fd1498Szrj emit_debug_insn_before_noloc (rtx x, rtx before)
4599*38fd1498Szrj {
4600*38fd1498Szrj   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4601*38fd1498Szrj 				    make_debug_insn_raw);
4602*38fd1498Szrj }
4603*38fd1498Szrj 
4604*38fd1498Szrj /* Make an insn of code BARRIER
4605*38fd1498Szrj    and output it before the insn BEFORE.  */
4606*38fd1498Szrj 
4607*38fd1498Szrj rtx_barrier *
emit_barrier_before(rtx before)4608*38fd1498Szrj emit_barrier_before (rtx before)
4609*38fd1498Szrj {
4610*38fd1498Szrj   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4611*38fd1498Szrj 
4612*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
4613*38fd1498Szrj 
4614*38fd1498Szrj   add_insn_before (insn, before, NULL);
4615*38fd1498Szrj   return insn;
4616*38fd1498Szrj }
4617*38fd1498Szrj 
4618*38fd1498Szrj /* Emit the label LABEL before the insn BEFORE.  */
4619*38fd1498Szrj 
4620*38fd1498Szrj rtx_code_label *
emit_label_before(rtx label,rtx_insn * before)4621*38fd1498Szrj emit_label_before (rtx label, rtx_insn *before)
4622*38fd1498Szrj {
4623*38fd1498Szrj   gcc_checking_assert (INSN_UID (label) == 0);
4624*38fd1498Szrj   INSN_UID (label) = cur_insn_uid++;
4625*38fd1498Szrj   add_insn_before (label, before, NULL);
4626*38fd1498Szrj   return as_a <rtx_code_label *> (label);
4627*38fd1498Szrj }
4628*38fd1498Szrj 
4629*38fd1498Szrj /* Helper for emit_insn_after, handles lists of instructions
4630*38fd1498Szrj    efficiently.  */
4631*38fd1498Szrj 
4632*38fd1498Szrj static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx uncast_after,basic_block bb)4633*38fd1498Szrj emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
4634*38fd1498Szrj {
4635*38fd1498Szrj   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4636*38fd1498Szrj   rtx_insn *last;
4637*38fd1498Szrj   rtx_insn *after_after;
4638*38fd1498Szrj   if (!bb && !BARRIER_P (after))
4639*38fd1498Szrj     bb = BLOCK_FOR_INSN (after);
4640*38fd1498Szrj 
4641*38fd1498Szrj   if (bb)
4642*38fd1498Szrj     {
4643*38fd1498Szrj       df_set_bb_dirty (bb);
4644*38fd1498Szrj       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4645*38fd1498Szrj 	if (!BARRIER_P (last))
4646*38fd1498Szrj 	  {
4647*38fd1498Szrj 	    set_block_for_insn (last, bb);
4648*38fd1498Szrj 	    df_insn_rescan (last);
4649*38fd1498Szrj 	  }
4650*38fd1498Szrj       if (!BARRIER_P (last))
4651*38fd1498Szrj 	{
4652*38fd1498Szrj 	  set_block_for_insn (last, bb);
4653*38fd1498Szrj 	  df_insn_rescan (last);
4654*38fd1498Szrj 	}
4655*38fd1498Szrj       if (BB_END (bb) == after)
4656*38fd1498Szrj 	BB_END (bb) = last;
4657*38fd1498Szrj     }
4658*38fd1498Szrj   else
4659*38fd1498Szrj     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4660*38fd1498Szrj       continue;
4661*38fd1498Szrj 
4662*38fd1498Szrj   after_after = NEXT_INSN (after);
4663*38fd1498Szrj 
4664*38fd1498Szrj   SET_NEXT_INSN (after) = first;
4665*38fd1498Szrj   SET_PREV_INSN (first) = after;
4666*38fd1498Szrj   SET_NEXT_INSN (last) = after_after;
4667*38fd1498Szrj   if (after_after)
4668*38fd1498Szrj     SET_PREV_INSN (after_after) = last;
4669*38fd1498Szrj 
4670*38fd1498Szrj   if (after == get_last_insn ())
4671*38fd1498Szrj     set_last_insn (last);
4672*38fd1498Szrj 
4673*38fd1498Szrj   return last;
4674*38fd1498Szrj }
4675*38fd1498Szrj 
4676*38fd1498Szrj static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx uncast_after,basic_block bb,rtx_insn * (* make_raw)(rtx))4677*38fd1498Szrj emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
4678*38fd1498Szrj 			  rtx_insn *(*make_raw)(rtx))
4679*38fd1498Szrj {
4680*38fd1498Szrj   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4681*38fd1498Szrj   rtx_insn *last = after;
4682*38fd1498Szrj 
4683*38fd1498Szrj   gcc_assert (after);
4684*38fd1498Szrj 
4685*38fd1498Szrj   if (x == NULL_RTX)
4686*38fd1498Szrj     return last;
4687*38fd1498Szrj 
4688*38fd1498Szrj   switch (GET_CODE (x))
4689*38fd1498Szrj     {
4690*38fd1498Szrj     case DEBUG_INSN:
4691*38fd1498Szrj     case INSN:
4692*38fd1498Szrj     case JUMP_INSN:
4693*38fd1498Szrj     case CALL_INSN:
4694*38fd1498Szrj     case CODE_LABEL:
4695*38fd1498Szrj     case BARRIER:
4696*38fd1498Szrj     case NOTE:
4697*38fd1498Szrj       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
4698*38fd1498Szrj       break;
4699*38fd1498Szrj 
4700*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
4701*38fd1498Szrj     case SEQUENCE:
4702*38fd1498Szrj       gcc_unreachable ();
4703*38fd1498Szrj       break;
4704*38fd1498Szrj #endif
4705*38fd1498Szrj 
4706*38fd1498Szrj     default:
4707*38fd1498Szrj       last = (*make_raw) (x);
4708*38fd1498Szrj       add_insn_after (last, after, bb);
4709*38fd1498Szrj       break;
4710*38fd1498Szrj     }
4711*38fd1498Szrj 
4712*38fd1498Szrj   return last;
4713*38fd1498Szrj }
4714*38fd1498Szrj 
4715*38fd1498Szrj /* Make X be output after the insn AFTER and set the BB of insn.  If
4716*38fd1498Szrj    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4717*38fd1498Szrj 
4718*38fd1498Szrj rtx_insn *
emit_insn_after_noloc(rtx x,rtx after,basic_block bb)4719*38fd1498Szrj emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4720*38fd1498Szrj {
4721*38fd1498Szrj   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4722*38fd1498Szrj }
4723*38fd1498Szrj 
4724*38fd1498Szrj 
4725*38fd1498Szrj /* Make an insn of code JUMP_INSN with body X
4726*38fd1498Szrj    and output it after the insn AFTER.  */
4727*38fd1498Szrj 
4728*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx after)4729*38fd1498Szrj emit_jump_insn_after_noloc (rtx x, rtx after)
4730*38fd1498Szrj {
4731*38fd1498Szrj   return as_a <rtx_jump_insn *> (
4732*38fd1498Szrj 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
4733*38fd1498Szrj }
4734*38fd1498Szrj 
4735*38fd1498Szrj /* Make an instruction with body X and code CALL_INSN
4736*38fd1498Szrj    and output it after the instruction AFTER.  */
4737*38fd1498Szrj 
4738*38fd1498Szrj rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx after)4739*38fd1498Szrj emit_call_insn_after_noloc (rtx x, rtx after)
4740*38fd1498Szrj {
4741*38fd1498Szrj   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4742*38fd1498Szrj }
4743*38fd1498Szrj 
4744*38fd1498Szrj /* Make an instruction with body X and code CALL_INSN
4745*38fd1498Szrj    and output it after the instruction AFTER.  */
4746*38fd1498Szrj 
4747*38fd1498Szrj rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx after)4748*38fd1498Szrj emit_debug_insn_after_noloc (rtx x, rtx after)
4749*38fd1498Szrj {
4750*38fd1498Szrj   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4751*38fd1498Szrj }
4752*38fd1498Szrj 
4753*38fd1498Szrj /* Make an insn of code BARRIER
4754*38fd1498Szrj    and output it after the insn AFTER.  */
4755*38fd1498Szrj 
4756*38fd1498Szrj rtx_barrier *
emit_barrier_after(rtx after)4757*38fd1498Szrj emit_barrier_after (rtx after)
4758*38fd1498Szrj {
4759*38fd1498Szrj   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
4760*38fd1498Szrj 
4761*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
4762*38fd1498Szrj 
4763*38fd1498Szrj   add_insn_after (insn, after, NULL);
4764*38fd1498Szrj   return insn;
4765*38fd1498Szrj }
4766*38fd1498Szrj 
4767*38fd1498Szrj /* Emit the label LABEL after the insn AFTER.  */
4768*38fd1498Szrj 
4769*38fd1498Szrj rtx_insn *
emit_label_after(rtx label,rtx_insn * after)4770*38fd1498Szrj emit_label_after (rtx label, rtx_insn *after)
4771*38fd1498Szrj {
4772*38fd1498Szrj   gcc_checking_assert (INSN_UID (label) == 0);
4773*38fd1498Szrj   INSN_UID (label) = cur_insn_uid++;
4774*38fd1498Szrj   add_insn_after (label, after, NULL);
4775*38fd1498Szrj   return as_a <rtx_insn *> (label);
4776*38fd1498Szrj }
4777*38fd1498Szrj 
4778*38fd1498Szrj /* Notes require a bit of special handling: Some notes need to have their
4779*38fd1498Szrj    BLOCK_FOR_INSN set, others should never have it set, and some should
4780*38fd1498Szrj    have it set or clear depending on the context.   */
4781*38fd1498Szrj 
4782*38fd1498Szrj /* Return true iff a note of kind SUBTYPE should be emitted with routines
4783*38fd1498Szrj    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
4784*38fd1498Szrj    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
4785*38fd1498Szrj 
4786*38fd1498Szrj static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)4787*38fd1498Szrj note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4788*38fd1498Szrj {
4789*38fd1498Szrj   switch (subtype)
4790*38fd1498Szrj     {
4791*38fd1498Szrj       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
4792*38fd1498Szrj       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4793*38fd1498Szrj 	return true;
4794*38fd1498Szrj 
4795*38fd1498Szrj       /* Notes for var tracking and EH region markers can appear between or
4796*38fd1498Szrj 	 inside basic blocks.  If the caller is emitting on the basic block
4797*38fd1498Szrj 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
4798*38fd1498Szrj       case NOTE_INSN_VAR_LOCATION:
4799*38fd1498Szrj       case NOTE_INSN_EH_REGION_BEG:
4800*38fd1498Szrj       case NOTE_INSN_EH_REGION_END:
4801*38fd1498Szrj 	return on_bb_boundary_p;
4802*38fd1498Szrj 
4803*38fd1498Szrj       /* Otherwise, BLOCK_FOR_INSN must be set.  */
4804*38fd1498Szrj       default:
4805*38fd1498Szrj 	return false;
4806*38fd1498Szrj     }
4807*38fd1498Szrj }
4808*38fd1498Szrj 
4809*38fd1498Szrj /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4810*38fd1498Szrj 
4811*38fd1498Szrj rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)4812*38fd1498Szrj emit_note_after (enum insn_note subtype, rtx_insn *after)
4813*38fd1498Szrj {
4814*38fd1498Szrj   rtx_note *note = make_note_raw (subtype);
4815*38fd1498Szrj   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4816*38fd1498Szrj   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4817*38fd1498Szrj 
4818*38fd1498Szrj   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4819*38fd1498Szrj     add_insn_after_nobb (note, after);
4820*38fd1498Szrj   else
4821*38fd1498Szrj     add_insn_after (note, after, bb);
4822*38fd1498Szrj   return note;
4823*38fd1498Szrj }
4824*38fd1498Szrj 
4825*38fd1498Szrj /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4826*38fd1498Szrj 
4827*38fd1498Szrj rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)4828*38fd1498Szrj emit_note_before (enum insn_note subtype, rtx_insn *before)
4829*38fd1498Szrj {
4830*38fd1498Szrj   rtx_note *note = make_note_raw (subtype);
4831*38fd1498Szrj   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4832*38fd1498Szrj   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4833*38fd1498Szrj 
4834*38fd1498Szrj   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4835*38fd1498Szrj     add_insn_before_nobb (note, before);
4836*38fd1498Szrj   else
4837*38fd1498Szrj     add_insn_before (note, before, bb);
4838*38fd1498Szrj   return note;
4839*38fd1498Szrj }
4840*38fd1498Szrj 
4841*38fd1498Szrj /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4842*38fd1498Szrj    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4843*38fd1498Szrj 
4844*38fd1498Szrj static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx uncast_after,int loc,rtx_insn * (* make_raw)(rtx))4845*38fd1498Szrj emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
4846*38fd1498Szrj 			   rtx_insn *(*make_raw) (rtx))
4847*38fd1498Szrj {
4848*38fd1498Szrj   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4849*38fd1498Szrj   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4850*38fd1498Szrj 
4851*38fd1498Szrj   if (pattern == NULL_RTX || !loc)
4852*38fd1498Szrj     return last;
4853*38fd1498Szrj 
4854*38fd1498Szrj   after = NEXT_INSN (after);
4855*38fd1498Szrj   while (1)
4856*38fd1498Szrj     {
4857*38fd1498Szrj       if (active_insn_p (after)
4858*38fd1498Szrj 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
4859*38fd1498Szrj 	  && !INSN_LOCATION (after))
4860*38fd1498Szrj 	INSN_LOCATION (after) = loc;
4861*38fd1498Szrj       if (after == last)
4862*38fd1498Szrj 	break;
4863*38fd1498Szrj       after = NEXT_INSN (after);
4864*38fd1498Szrj     }
4865*38fd1498Szrj   return last;
4866*38fd1498Szrj }
4867*38fd1498Szrj 
4868*38fd1498Szrj /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4869*38fd1498Szrj    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4870*38fd1498Szrj    any DEBUG_INSNs.  */
4871*38fd1498Szrj 
4872*38fd1498Szrj static rtx_insn *
emit_pattern_after(rtx pattern,rtx uncast_after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))4873*38fd1498Szrj emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
4874*38fd1498Szrj 		    rtx_insn *(*make_raw) (rtx))
4875*38fd1498Szrj {
4876*38fd1498Szrj   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
4877*38fd1498Szrj   rtx_insn *prev = after;
4878*38fd1498Szrj 
4879*38fd1498Szrj   if (skip_debug_insns)
4880*38fd1498Szrj     while (DEBUG_INSN_P (prev))
4881*38fd1498Szrj       prev = PREV_INSN (prev);
4882*38fd1498Szrj 
4883*38fd1498Szrj   if (INSN_P (prev))
4884*38fd1498Szrj     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4885*38fd1498Szrj 				      make_raw);
4886*38fd1498Szrj   else
4887*38fd1498Szrj     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4888*38fd1498Szrj }
4889*38fd1498Szrj 
4890*38fd1498Szrj /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4891*38fd1498Szrj rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx after,int loc)4892*38fd1498Szrj emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4893*38fd1498Szrj {
4894*38fd1498Szrj   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4895*38fd1498Szrj }
4896*38fd1498Szrj 
4897*38fd1498Szrj /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4898*38fd1498Szrj rtx_insn *
emit_insn_after(rtx pattern,rtx after)4899*38fd1498Szrj emit_insn_after (rtx pattern, rtx after)
4900*38fd1498Szrj {
4901*38fd1498Szrj   return emit_pattern_after (pattern, after, true, make_insn_raw);
4902*38fd1498Szrj }
4903*38fd1498Szrj 
4904*38fd1498Szrj /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4905*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx after,int loc)4906*38fd1498Szrj emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4907*38fd1498Szrj {
4908*38fd1498Szrj   return as_a <rtx_jump_insn *> (
4909*38fd1498Szrj 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
4910*38fd1498Szrj }
4911*38fd1498Szrj 
4912*38fd1498Szrj /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4913*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx after)4914*38fd1498Szrj emit_jump_insn_after (rtx pattern, rtx after)
4915*38fd1498Szrj {
4916*38fd1498Szrj   return as_a <rtx_jump_insn *> (
4917*38fd1498Szrj 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
4918*38fd1498Szrj }
4919*38fd1498Szrj 
4920*38fd1498Szrj /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4921*38fd1498Szrj rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx after,int loc)4922*38fd1498Szrj emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4923*38fd1498Szrj {
4924*38fd1498Szrj   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4925*38fd1498Szrj }
4926*38fd1498Szrj 
4927*38fd1498Szrj /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4928*38fd1498Szrj rtx_insn *
emit_call_insn_after(rtx pattern,rtx after)4929*38fd1498Szrj emit_call_insn_after (rtx pattern, rtx after)
4930*38fd1498Szrj {
4931*38fd1498Szrj   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4932*38fd1498Szrj }
4933*38fd1498Szrj 
4934*38fd1498Szrj /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
4935*38fd1498Szrj rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx after,int loc)4936*38fd1498Szrj emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4937*38fd1498Szrj {
4938*38fd1498Szrj   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4939*38fd1498Szrj }
4940*38fd1498Szrj 
4941*38fd1498Szrj /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
4942*38fd1498Szrj rtx_insn *
emit_debug_insn_after(rtx pattern,rtx after)4943*38fd1498Szrj emit_debug_insn_after (rtx pattern, rtx after)
4944*38fd1498Szrj {
4945*38fd1498Szrj   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4946*38fd1498Szrj }
4947*38fd1498Szrj 
4948*38fd1498Szrj /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4949*38fd1498Szrj    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4950*38fd1498Szrj    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4951*38fd1498Szrj    CALL_INSN, etc.  */
4952*38fd1498Szrj 
4953*38fd1498Szrj static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx uncast_before,int loc,bool insnp,rtx_insn * (* make_raw)(rtx))4954*38fd1498Szrj emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
4955*38fd1498Szrj 			    rtx_insn *(*make_raw) (rtx))
4956*38fd1498Szrj {
4957*38fd1498Szrj   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
4958*38fd1498Szrj   rtx_insn *first = PREV_INSN (before);
4959*38fd1498Szrj   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
4960*38fd1498Szrj 					      insnp ? before : NULL_RTX,
4961*38fd1498Szrj 					      NULL, make_raw);
4962*38fd1498Szrj 
4963*38fd1498Szrj   if (pattern == NULL_RTX || !loc)
4964*38fd1498Szrj     return last;
4965*38fd1498Szrj 
4966*38fd1498Szrj   if (!first)
4967*38fd1498Szrj     first = get_insns ();
4968*38fd1498Szrj   else
4969*38fd1498Szrj     first = NEXT_INSN (first);
4970*38fd1498Szrj   while (1)
4971*38fd1498Szrj     {
4972*38fd1498Szrj       if (active_insn_p (first)
4973*38fd1498Szrj 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
4974*38fd1498Szrj 	  && !INSN_LOCATION (first))
4975*38fd1498Szrj 	INSN_LOCATION (first) = loc;
4976*38fd1498Szrj       if (first == last)
4977*38fd1498Szrj 	break;
4978*38fd1498Szrj       first = NEXT_INSN (first);
4979*38fd1498Szrj     }
4980*38fd1498Szrj   return last;
4981*38fd1498Szrj }
4982*38fd1498Szrj 
4983*38fd1498Szrj /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4984*38fd1498Szrj    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4985*38fd1498Szrj    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4986*38fd1498Szrj    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4987*38fd1498Szrj 
4988*38fd1498Szrj static rtx_insn *
emit_pattern_before(rtx pattern,rtx uncast_before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))4989*38fd1498Szrj emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
4990*38fd1498Szrj 		     bool insnp, rtx_insn *(*make_raw) (rtx))
4991*38fd1498Szrj {
4992*38fd1498Szrj   rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
4993*38fd1498Szrj   rtx_insn *next = before;
4994*38fd1498Szrj 
4995*38fd1498Szrj   if (skip_debug_insns)
4996*38fd1498Szrj     while (DEBUG_INSN_P (next))
4997*38fd1498Szrj       next = PREV_INSN (next);
4998*38fd1498Szrj 
4999*38fd1498Szrj   if (INSN_P (next))
5000*38fd1498Szrj     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
5001*38fd1498Szrj 				       insnp, make_raw);
5002*38fd1498Szrj   else
5003*38fd1498Szrj     return emit_pattern_before_noloc (pattern, before,
5004*38fd1498Szrj 				      insnp ? before : NULL_RTX,
5005*38fd1498Szrj                                       NULL, make_raw);
5006*38fd1498Szrj }
5007*38fd1498Szrj 
5008*38fd1498Szrj /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5009*38fd1498Szrj rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5010*38fd1498Szrj emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5011*38fd1498Szrj {
5012*38fd1498Szrj   return emit_pattern_before_setloc (pattern, before, loc, true,
5013*38fd1498Szrj 				     make_insn_raw);
5014*38fd1498Szrj }
5015*38fd1498Szrj 
5016*38fd1498Szrj /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
5017*38fd1498Szrj rtx_insn *
emit_insn_before(rtx pattern,rtx before)5018*38fd1498Szrj emit_insn_before (rtx pattern, rtx before)
5019*38fd1498Szrj {
5020*38fd1498Szrj   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
5021*38fd1498Szrj }
5022*38fd1498Szrj 
5023*38fd1498Szrj /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5024*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5025*38fd1498Szrj emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5026*38fd1498Szrj {
5027*38fd1498Szrj   return as_a <rtx_jump_insn *> (
5028*38fd1498Szrj 	emit_pattern_before_setloc (pattern, before, loc, false,
5029*38fd1498Szrj 				    make_jump_insn_raw));
5030*38fd1498Szrj }
5031*38fd1498Szrj 
5032*38fd1498Szrj /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
5033*38fd1498Szrj rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx before)5034*38fd1498Szrj emit_jump_insn_before (rtx pattern, rtx before)
5035*38fd1498Szrj {
5036*38fd1498Szrj   return as_a <rtx_jump_insn *> (
5037*38fd1498Szrj 	emit_pattern_before (pattern, before, true, false,
5038*38fd1498Szrj 			     make_jump_insn_raw));
5039*38fd1498Szrj }
5040*38fd1498Szrj 
5041*38fd1498Szrj /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5042*38fd1498Szrj rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,int loc)5043*38fd1498Szrj emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
5044*38fd1498Szrj {
5045*38fd1498Szrj   return emit_pattern_before_setloc (pattern, before, loc, false,
5046*38fd1498Szrj 				     make_call_insn_raw);
5047*38fd1498Szrj }
5048*38fd1498Szrj 
5049*38fd1498Szrj /* Like emit_call_insn_before_noloc,
5050*38fd1498Szrj    but set insn_location according to BEFORE.  */
5051*38fd1498Szrj rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)5052*38fd1498Szrj emit_call_insn_before (rtx pattern, rtx_insn *before)
5053*38fd1498Szrj {
5054*38fd1498Szrj   return emit_pattern_before (pattern, before, true, false,
5055*38fd1498Szrj 			      make_call_insn_raw);
5056*38fd1498Szrj }
5057*38fd1498Szrj 
5058*38fd1498Szrj /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
5059*38fd1498Szrj rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx before,int loc)5060*38fd1498Szrj emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
5061*38fd1498Szrj {
5062*38fd1498Szrj   return emit_pattern_before_setloc (pattern, before, loc, false,
5063*38fd1498Szrj 				     make_debug_insn_raw);
5064*38fd1498Szrj }
5065*38fd1498Szrj 
5066*38fd1498Szrj /* Like emit_debug_insn_before_noloc,
5067*38fd1498Szrj    but set insn_location according to BEFORE.  */
5068*38fd1498Szrj rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)5069*38fd1498Szrj emit_debug_insn_before (rtx pattern, rtx_insn *before)
5070*38fd1498Szrj {
5071*38fd1498Szrj   return emit_pattern_before (pattern, before, false, false,
5072*38fd1498Szrj 			      make_debug_insn_raw);
5073*38fd1498Szrj }
5074*38fd1498Szrj 
5075*38fd1498Szrj /* Take X and emit it at the end of the doubly-linked
5076*38fd1498Szrj    INSN list.
5077*38fd1498Szrj 
5078*38fd1498Szrj    Returns the last insn emitted.  */
5079*38fd1498Szrj 
5080*38fd1498Szrj rtx_insn *
emit_insn(rtx x)5081*38fd1498Szrj emit_insn (rtx x)
5082*38fd1498Szrj {
5083*38fd1498Szrj   rtx_insn *last = get_last_insn ();
5084*38fd1498Szrj   rtx_insn *insn;
5085*38fd1498Szrj 
5086*38fd1498Szrj   if (x == NULL_RTX)
5087*38fd1498Szrj     return last;
5088*38fd1498Szrj 
5089*38fd1498Szrj   switch (GET_CODE (x))
5090*38fd1498Szrj     {
5091*38fd1498Szrj     case DEBUG_INSN:
5092*38fd1498Szrj     case INSN:
5093*38fd1498Szrj     case JUMP_INSN:
5094*38fd1498Szrj     case CALL_INSN:
5095*38fd1498Szrj     case CODE_LABEL:
5096*38fd1498Szrj     case BARRIER:
5097*38fd1498Szrj     case NOTE:
5098*38fd1498Szrj       insn = as_a <rtx_insn *> (x);
5099*38fd1498Szrj       while (insn)
5100*38fd1498Szrj 	{
5101*38fd1498Szrj 	  rtx_insn *next = NEXT_INSN (insn);
5102*38fd1498Szrj 	  add_insn (insn);
5103*38fd1498Szrj 	  last = insn;
5104*38fd1498Szrj 	  insn = next;
5105*38fd1498Szrj 	}
5106*38fd1498Szrj       break;
5107*38fd1498Szrj 
5108*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
5109*38fd1498Szrj     case JUMP_TABLE_DATA:
5110*38fd1498Szrj     case SEQUENCE:
5111*38fd1498Szrj       gcc_unreachable ();
5112*38fd1498Szrj       break;
5113*38fd1498Szrj #endif
5114*38fd1498Szrj 
5115*38fd1498Szrj     default:
5116*38fd1498Szrj       last = make_insn_raw (x);
5117*38fd1498Szrj       add_insn (last);
5118*38fd1498Szrj       break;
5119*38fd1498Szrj     }
5120*38fd1498Szrj 
5121*38fd1498Szrj   return last;
5122*38fd1498Szrj }
5123*38fd1498Szrj 
5124*38fd1498Szrj /* Make an insn of code DEBUG_INSN with pattern X
5125*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5126*38fd1498Szrj 
5127*38fd1498Szrj rtx_insn *
emit_debug_insn(rtx x)5128*38fd1498Szrj emit_debug_insn (rtx x)
5129*38fd1498Szrj {
5130*38fd1498Szrj   rtx_insn *last = get_last_insn ();
5131*38fd1498Szrj   rtx_insn *insn;
5132*38fd1498Szrj 
5133*38fd1498Szrj   if (x == NULL_RTX)
5134*38fd1498Szrj     return last;
5135*38fd1498Szrj 
5136*38fd1498Szrj   switch (GET_CODE (x))
5137*38fd1498Szrj     {
5138*38fd1498Szrj     case DEBUG_INSN:
5139*38fd1498Szrj     case INSN:
5140*38fd1498Szrj     case JUMP_INSN:
5141*38fd1498Szrj     case CALL_INSN:
5142*38fd1498Szrj     case CODE_LABEL:
5143*38fd1498Szrj     case BARRIER:
5144*38fd1498Szrj     case NOTE:
5145*38fd1498Szrj       insn = as_a <rtx_insn *> (x);
5146*38fd1498Szrj       while (insn)
5147*38fd1498Szrj 	{
5148*38fd1498Szrj 	  rtx_insn *next = NEXT_INSN (insn);
5149*38fd1498Szrj 	  add_insn (insn);
5150*38fd1498Szrj 	  last = insn;
5151*38fd1498Szrj 	  insn = next;
5152*38fd1498Szrj 	}
5153*38fd1498Szrj       break;
5154*38fd1498Szrj 
5155*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
5156*38fd1498Szrj     case JUMP_TABLE_DATA:
5157*38fd1498Szrj     case SEQUENCE:
5158*38fd1498Szrj       gcc_unreachable ();
5159*38fd1498Szrj       break;
5160*38fd1498Szrj #endif
5161*38fd1498Szrj 
5162*38fd1498Szrj     default:
5163*38fd1498Szrj       last = make_debug_insn_raw (x);
5164*38fd1498Szrj       add_insn (last);
5165*38fd1498Szrj       break;
5166*38fd1498Szrj     }
5167*38fd1498Szrj 
5168*38fd1498Szrj   return last;
5169*38fd1498Szrj }
5170*38fd1498Szrj 
5171*38fd1498Szrj /* Make an insn of code JUMP_INSN with pattern X
5172*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5173*38fd1498Szrj 
5174*38fd1498Szrj rtx_insn *
emit_jump_insn(rtx x)5175*38fd1498Szrj emit_jump_insn (rtx x)
5176*38fd1498Szrj {
5177*38fd1498Szrj   rtx_insn *last = NULL;
5178*38fd1498Szrj   rtx_insn *insn;
5179*38fd1498Szrj 
5180*38fd1498Szrj   switch (GET_CODE (x))
5181*38fd1498Szrj     {
5182*38fd1498Szrj     case DEBUG_INSN:
5183*38fd1498Szrj     case INSN:
5184*38fd1498Szrj     case JUMP_INSN:
5185*38fd1498Szrj     case CALL_INSN:
5186*38fd1498Szrj     case CODE_LABEL:
5187*38fd1498Szrj     case BARRIER:
5188*38fd1498Szrj     case NOTE:
5189*38fd1498Szrj       insn = as_a <rtx_insn *> (x);
5190*38fd1498Szrj       while (insn)
5191*38fd1498Szrj 	{
5192*38fd1498Szrj 	  rtx_insn *next = NEXT_INSN (insn);
5193*38fd1498Szrj 	  add_insn (insn);
5194*38fd1498Szrj 	  last = insn;
5195*38fd1498Szrj 	  insn = next;
5196*38fd1498Szrj 	}
5197*38fd1498Szrj       break;
5198*38fd1498Szrj 
5199*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
5200*38fd1498Szrj     case JUMP_TABLE_DATA:
5201*38fd1498Szrj     case SEQUENCE:
5202*38fd1498Szrj       gcc_unreachable ();
5203*38fd1498Szrj       break;
5204*38fd1498Szrj #endif
5205*38fd1498Szrj 
5206*38fd1498Szrj     default:
5207*38fd1498Szrj       last = make_jump_insn_raw (x);
5208*38fd1498Szrj       add_insn (last);
5209*38fd1498Szrj       break;
5210*38fd1498Szrj     }
5211*38fd1498Szrj 
5212*38fd1498Szrj   return last;
5213*38fd1498Szrj }
5214*38fd1498Szrj 
5215*38fd1498Szrj /* Make an insn of code CALL_INSN with pattern X
5216*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5217*38fd1498Szrj 
5218*38fd1498Szrj rtx_insn *
emit_call_insn(rtx x)5219*38fd1498Szrj emit_call_insn (rtx x)
5220*38fd1498Szrj {
5221*38fd1498Szrj   rtx_insn *insn;
5222*38fd1498Szrj 
5223*38fd1498Szrj   switch (GET_CODE (x))
5224*38fd1498Szrj     {
5225*38fd1498Szrj     case DEBUG_INSN:
5226*38fd1498Szrj     case INSN:
5227*38fd1498Szrj     case JUMP_INSN:
5228*38fd1498Szrj     case CALL_INSN:
5229*38fd1498Szrj     case CODE_LABEL:
5230*38fd1498Szrj     case BARRIER:
5231*38fd1498Szrj     case NOTE:
5232*38fd1498Szrj       insn = emit_insn (x);
5233*38fd1498Szrj       break;
5234*38fd1498Szrj 
5235*38fd1498Szrj #ifdef ENABLE_RTL_CHECKING
5236*38fd1498Szrj     case SEQUENCE:
5237*38fd1498Szrj     case JUMP_TABLE_DATA:
5238*38fd1498Szrj       gcc_unreachable ();
5239*38fd1498Szrj       break;
5240*38fd1498Szrj #endif
5241*38fd1498Szrj 
5242*38fd1498Szrj     default:
5243*38fd1498Szrj       insn = make_call_insn_raw (x);
5244*38fd1498Szrj       add_insn (insn);
5245*38fd1498Szrj       break;
5246*38fd1498Szrj     }
5247*38fd1498Szrj 
5248*38fd1498Szrj   return insn;
5249*38fd1498Szrj }
5250*38fd1498Szrj 
5251*38fd1498Szrj /* Add the label LABEL to the end of the doubly-linked list.  */
5252*38fd1498Szrj 
5253*38fd1498Szrj rtx_code_label *
emit_label(rtx uncast_label)5254*38fd1498Szrj emit_label (rtx uncast_label)
5255*38fd1498Szrj {
5256*38fd1498Szrj   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
5257*38fd1498Szrj 
5258*38fd1498Szrj   gcc_checking_assert (INSN_UID (label) == 0);
5259*38fd1498Szrj   INSN_UID (label) = cur_insn_uid++;
5260*38fd1498Szrj   add_insn (label);
5261*38fd1498Szrj   return label;
5262*38fd1498Szrj }
5263*38fd1498Szrj 
5264*38fd1498Szrj /* Make an insn of code JUMP_TABLE_DATA
5265*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5266*38fd1498Szrj 
5267*38fd1498Szrj rtx_jump_table_data *
emit_jump_table_data(rtx table)5268*38fd1498Szrj emit_jump_table_data (rtx table)
5269*38fd1498Szrj {
5270*38fd1498Szrj   rtx_jump_table_data *jump_table_data =
5271*38fd1498Szrj     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
5272*38fd1498Szrj   INSN_UID (jump_table_data) = cur_insn_uid++;
5273*38fd1498Szrj   PATTERN (jump_table_data) = table;
5274*38fd1498Szrj   BLOCK_FOR_INSN (jump_table_data) = NULL;
5275*38fd1498Szrj   add_insn (jump_table_data);
5276*38fd1498Szrj   return jump_table_data;
5277*38fd1498Szrj }
5278*38fd1498Szrj 
5279*38fd1498Szrj /* Make an insn of code BARRIER
5280*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5281*38fd1498Szrj 
5282*38fd1498Szrj rtx_barrier *
emit_barrier(void)5283*38fd1498Szrj emit_barrier (void)
5284*38fd1498Szrj {
5285*38fd1498Szrj   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
5286*38fd1498Szrj   INSN_UID (barrier) = cur_insn_uid++;
5287*38fd1498Szrj   add_insn (barrier);
5288*38fd1498Szrj   return barrier;
5289*38fd1498Szrj }
5290*38fd1498Szrj 
5291*38fd1498Szrj /* Emit a copy of note ORIG.  */
5292*38fd1498Szrj 
5293*38fd1498Szrj rtx_note *
emit_note_copy(rtx_note * orig)5294*38fd1498Szrj emit_note_copy (rtx_note *orig)
5295*38fd1498Szrj {
5296*38fd1498Szrj   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
5297*38fd1498Szrj   rtx_note *note = make_note_raw (kind);
5298*38fd1498Szrj   NOTE_DATA (note) = NOTE_DATA (orig);
5299*38fd1498Szrj   add_insn (note);
5300*38fd1498Szrj   return note;
5301*38fd1498Szrj }
5302*38fd1498Szrj 
5303*38fd1498Szrj /* Make an insn of code NOTE or type NOTE_NO
5304*38fd1498Szrj    and add it to the end of the doubly-linked list.  */
5305*38fd1498Szrj 
5306*38fd1498Szrj rtx_note *
emit_note(enum insn_note kind)5307*38fd1498Szrj emit_note (enum insn_note kind)
5308*38fd1498Szrj {
5309*38fd1498Szrj   rtx_note *note = make_note_raw (kind);
5310*38fd1498Szrj   add_insn (note);
5311*38fd1498Szrj   return note;
5312*38fd1498Szrj }
5313*38fd1498Szrj 
5314*38fd1498Szrj /* Emit a clobber of lvalue X.  */
5315*38fd1498Szrj 
5316*38fd1498Szrj rtx_insn *
emit_clobber(rtx x)5317*38fd1498Szrj emit_clobber (rtx x)
5318*38fd1498Szrj {
5319*38fd1498Szrj   /* CONCATs should not appear in the insn stream.  */
5320*38fd1498Szrj   if (GET_CODE (x) == CONCAT)
5321*38fd1498Szrj     {
5322*38fd1498Szrj       emit_clobber (XEXP (x, 0));
5323*38fd1498Szrj       return emit_clobber (XEXP (x, 1));
5324*38fd1498Szrj     }
5325*38fd1498Szrj   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5326*38fd1498Szrj }
5327*38fd1498Szrj 
5328*38fd1498Szrj /* Return a sequence of insns to clobber lvalue X.  */
5329*38fd1498Szrj 
5330*38fd1498Szrj rtx_insn *
gen_clobber(rtx x)5331*38fd1498Szrj gen_clobber (rtx x)
5332*38fd1498Szrj {
5333*38fd1498Szrj   rtx_insn *seq;
5334*38fd1498Szrj 
5335*38fd1498Szrj   start_sequence ();
5336*38fd1498Szrj   emit_clobber (x);
5337*38fd1498Szrj   seq = get_insns ();
5338*38fd1498Szrj   end_sequence ();
5339*38fd1498Szrj   return seq;
5340*38fd1498Szrj }
5341*38fd1498Szrj 
5342*38fd1498Szrj /* Emit a use of rvalue X.  */
5343*38fd1498Szrj 
5344*38fd1498Szrj rtx_insn *
emit_use(rtx x)5345*38fd1498Szrj emit_use (rtx x)
5346*38fd1498Szrj {
5347*38fd1498Szrj   /* CONCATs should not appear in the insn stream.  */
5348*38fd1498Szrj   if (GET_CODE (x) == CONCAT)
5349*38fd1498Szrj     {
5350*38fd1498Szrj       emit_use (XEXP (x, 0));
5351*38fd1498Szrj       return emit_use (XEXP (x, 1));
5352*38fd1498Szrj     }
5353*38fd1498Szrj   return emit_insn (gen_rtx_USE (VOIDmode, x));
5354*38fd1498Szrj }
5355*38fd1498Szrj 
5356*38fd1498Szrj /* Return a sequence of insns to use rvalue X.  */
5357*38fd1498Szrj 
5358*38fd1498Szrj rtx_insn *
gen_use(rtx x)5359*38fd1498Szrj gen_use (rtx x)
5360*38fd1498Szrj {
5361*38fd1498Szrj   rtx_insn *seq;
5362*38fd1498Szrj 
5363*38fd1498Szrj   start_sequence ();
5364*38fd1498Szrj   emit_use (x);
5365*38fd1498Szrj   seq = get_insns ();
5366*38fd1498Szrj   end_sequence ();
5367*38fd1498Szrj   return seq;
5368*38fd1498Szrj }
5369*38fd1498Szrj 
5370*38fd1498Szrj /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5371*38fd1498Szrj    Return the set in INSN that such notes describe, or NULL if the notes
5372*38fd1498Szrj    have no meaning for INSN.  */
5373*38fd1498Szrj 
5374*38fd1498Szrj rtx
set_for_reg_notes(rtx insn)5375*38fd1498Szrj set_for_reg_notes (rtx insn)
5376*38fd1498Szrj {
5377*38fd1498Szrj   rtx pat, reg;
5378*38fd1498Szrj 
5379*38fd1498Szrj   if (!INSN_P (insn))
5380*38fd1498Szrj     return NULL_RTX;
5381*38fd1498Szrj 
5382*38fd1498Szrj   pat = PATTERN (insn);
5383*38fd1498Szrj   if (GET_CODE (pat) == PARALLEL)
5384*38fd1498Szrj     {
5385*38fd1498Szrj       /* We do not use single_set because that ignores SETs of unused
5386*38fd1498Szrj 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
5387*38fd1498Szrj 	 PARALLEL to have a single SET.  */
5388*38fd1498Szrj       if (multiple_sets (insn))
5389*38fd1498Szrj 	return NULL_RTX;
5390*38fd1498Szrj       pat = XVECEXP (pat, 0, 0);
5391*38fd1498Szrj     }
5392*38fd1498Szrj 
5393*38fd1498Szrj   if (GET_CODE (pat) != SET)
5394*38fd1498Szrj     return NULL_RTX;
5395*38fd1498Szrj 
5396*38fd1498Szrj   reg = SET_DEST (pat);
5397*38fd1498Szrj 
5398*38fd1498Szrj   /* Notes apply to the contents of a STRICT_LOW_PART.  */
5399*38fd1498Szrj   if (GET_CODE (reg) == STRICT_LOW_PART
5400*38fd1498Szrj       || GET_CODE (reg) == ZERO_EXTRACT)
5401*38fd1498Szrj     reg = XEXP (reg, 0);
5402*38fd1498Szrj 
5403*38fd1498Szrj   /* Check that we have a register.  */
5404*38fd1498Szrj   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
5405*38fd1498Szrj     return NULL_RTX;
5406*38fd1498Szrj 
5407*38fd1498Szrj   return pat;
5408*38fd1498Szrj }
5409*38fd1498Szrj 
5410*38fd1498Szrj /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5411*38fd1498Szrj    note of this type already exists, remove it first.  */
5412*38fd1498Szrj 
5413*38fd1498Szrj rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)5414*38fd1498Szrj set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5415*38fd1498Szrj {
5416*38fd1498Szrj   rtx note = find_reg_note (insn, kind, NULL_RTX);
5417*38fd1498Szrj 
5418*38fd1498Szrj   switch (kind)
5419*38fd1498Szrj     {
5420*38fd1498Szrj     case REG_EQUAL:
5421*38fd1498Szrj     case REG_EQUIV:
5422*38fd1498Szrj       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
5423*38fd1498Szrj       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
5424*38fd1498Szrj 	return NULL_RTX;
5425*38fd1498Szrj 
5426*38fd1498Szrj       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5427*38fd1498Szrj 	 It serves no useful purpose and breaks eliminate_regs.  */
5428*38fd1498Szrj       if (GET_CODE (datum) == ASM_OPERANDS)
5429*38fd1498Szrj 	return NULL_RTX;
5430*38fd1498Szrj 
5431*38fd1498Szrj       /* Notes with side effects are dangerous.  Even if the side-effect
5432*38fd1498Szrj 	 initially mirrors one in PATTERN (INSN), later optimizations
5433*38fd1498Szrj 	 might alter the way that the final register value is calculated
5434*38fd1498Szrj 	 and so move or alter the side-effect in some way.  The note would
5435*38fd1498Szrj 	 then no longer be a valid substitution for SET_SRC.  */
5436*38fd1498Szrj       if (side_effects_p (datum))
5437*38fd1498Szrj 	return NULL_RTX;
5438*38fd1498Szrj       break;
5439*38fd1498Szrj 
5440*38fd1498Szrj     default:
5441*38fd1498Szrj       break;
5442*38fd1498Szrj     }
5443*38fd1498Szrj 
5444*38fd1498Szrj   if (note)
5445*38fd1498Szrj     XEXP (note, 0) = datum;
5446*38fd1498Szrj   else
5447*38fd1498Szrj     {
5448*38fd1498Szrj       add_reg_note (insn, kind, datum);
5449*38fd1498Szrj       note = REG_NOTES (insn);
5450*38fd1498Szrj     }
5451*38fd1498Szrj 
5452*38fd1498Szrj   switch (kind)
5453*38fd1498Szrj     {
5454*38fd1498Szrj     case REG_EQUAL:
5455*38fd1498Szrj     case REG_EQUIV:
5456*38fd1498Szrj       df_notes_rescan (as_a <rtx_insn *> (insn));
5457*38fd1498Szrj       break;
5458*38fd1498Szrj     default:
5459*38fd1498Szrj       break;
5460*38fd1498Szrj     }
5461*38fd1498Szrj 
5462*38fd1498Szrj   return note;
5463*38fd1498Szrj }
5464*38fd1498Szrj 
5465*38fd1498Szrj /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5466*38fd1498Szrj rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)5467*38fd1498Szrj set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5468*38fd1498Szrj {
5469*38fd1498Szrj   rtx set = set_for_reg_notes (insn);
5470*38fd1498Szrj 
5471*38fd1498Szrj   if (set && SET_DEST (set) == dst)
5472*38fd1498Szrj     return set_unique_reg_note (insn, kind, datum);
5473*38fd1498Szrj   return NULL_RTX;
5474*38fd1498Szrj }
5475*38fd1498Szrj 
5476*38fd1498Szrj /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
5477*38fd1498Szrj    following barrier if the instruction needs one and if ALLOW_BARRIER_P
5478*38fd1498Szrj    is true.
5479*38fd1498Szrj 
5480*38fd1498Szrj    If X is a label, it is simply added into the insn chain.  */
5481*38fd1498Szrj 
5482*38fd1498Szrj rtx_insn *
emit(rtx x,bool allow_barrier_p)5483*38fd1498Szrj emit (rtx x, bool allow_barrier_p)
5484*38fd1498Szrj {
5485*38fd1498Szrj   enum rtx_code code = classify_insn (x);
5486*38fd1498Szrj 
5487*38fd1498Szrj   switch (code)
5488*38fd1498Szrj     {
5489*38fd1498Szrj     case CODE_LABEL:
5490*38fd1498Szrj       return emit_label (x);
5491*38fd1498Szrj     case INSN:
5492*38fd1498Szrj       return emit_insn (x);
5493*38fd1498Szrj     case  JUMP_INSN:
5494*38fd1498Szrj       {
5495*38fd1498Szrj 	rtx_insn *insn = emit_jump_insn (x);
5496*38fd1498Szrj 	if (allow_barrier_p
5497*38fd1498Szrj 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
5498*38fd1498Szrj 	  return emit_barrier ();
5499*38fd1498Szrj 	return insn;
5500*38fd1498Szrj       }
5501*38fd1498Szrj     case CALL_INSN:
5502*38fd1498Szrj       return emit_call_insn (x);
5503*38fd1498Szrj     case DEBUG_INSN:
5504*38fd1498Szrj       return emit_debug_insn (x);
5505*38fd1498Szrj     default:
5506*38fd1498Szrj       gcc_unreachable ();
5507*38fd1498Szrj     }
5508*38fd1498Szrj }
5509*38fd1498Szrj 
5510*38fd1498Szrj /* Space for free sequence stack entries.  */
5511*38fd1498Szrj static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5512*38fd1498Szrj 
5513*38fd1498Szrj /* Begin emitting insns to a sequence.  If this sequence will contain
5514*38fd1498Szrj    something that might cause the compiler to pop arguments to function
5515*38fd1498Szrj    calls (because those pops have previously been deferred; see
5516*38fd1498Szrj    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5517*38fd1498Szrj    before calling this function.  That will ensure that the deferred
5518*38fd1498Szrj    pops are not accidentally emitted in the middle of this sequence.  */
5519*38fd1498Szrj 
5520*38fd1498Szrj void
start_sequence(void)5521*38fd1498Szrj start_sequence (void)
5522*38fd1498Szrj {
5523*38fd1498Szrj   struct sequence_stack *tem;
5524*38fd1498Szrj 
5525*38fd1498Szrj   if (free_sequence_stack != NULL)
5526*38fd1498Szrj     {
5527*38fd1498Szrj       tem = free_sequence_stack;
5528*38fd1498Szrj       free_sequence_stack = tem->next;
5529*38fd1498Szrj     }
5530*38fd1498Szrj   else
5531*38fd1498Szrj     tem = ggc_alloc<sequence_stack> ();
5532*38fd1498Szrj 
5533*38fd1498Szrj   tem->next = get_current_sequence ()->next;
5534*38fd1498Szrj   tem->first = get_insns ();
5535*38fd1498Szrj   tem->last = get_last_insn ();
5536*38fd1498Szrj   get_current_sequence ()->next = tem;
5537*38fd1498Szrj 
5538*38fd1498Szrj   set_first_insn (0);
5539*38fd1498Szrj   set_last_insn (0);
5540*38fd1498Szrj }
5541*38fd1498Szrj 
5542*38fd1498Szrj /* Set up the insn chain starting with FIRST as the current sequence,
5543*38fd1498Szrj    saving the previously current one.  See the documentation for
5544*38fd1498Szrj    start_sequence for more information about how to use this function.  */
5545*38fd1498Szrj 
5546*38fd1498Szrj void
push_to_sequence(rtx_insn * first)5547*38fd1498Szrj push_to_sequence (rtx_insn *first)
5548*38fd1498Szrj {
5549*38fd1498Szrj   rtx_insn *last;
5550*38fd1498Szrj 
5551*38fd1498Szrj   start_sequence ();
5552*38fd1498Szrj 
5553*38fd1498Szrj   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5554*38fd1498Szrj     ;
5555*38fd1498Szrj 
5556*38fd1498Szrj   set_first_insn (first);
5557*38fd1498Szrj   set_last_insn (last);
5558*38fd1498Szrj }
5559*38fd1498Szrj 
5560*38fd1498Szrj /* Like push_to_sequence, but take the last insn as an argument to avoid
5561*38fd1498Szrj    looping through the list.  */
5562*38fd1498Szrj 
5563*38fd1498Szrj void
push_to_sequence2(rtx_insn * first,rtx_insn * last)5564*38fd1498Szrj push_to_sequence2 (rtx_insn *first, rtx_insn *last)
5565*38fd1498Szrj {
5566*38fd1498Szrj   start_sequence ();
5567*38fd1498Szrj 
5568*38fd1498Szrj   set_first_insn (first);
5569*38fd1498Szrj   set_last_insn (last);
5570*38fd1498Szrj }
5571*38fd1498Szrj 
5572*38fd1498Szrj /* Set up the outer-level insn chain
5573*38fd1498Szrj    as the current sequence, saving the previously current one.  */
5574*38fd1498Szrj 
5575*38fd1498Szrj void
push_topmost_sequence(void)5576*38fd1498Szrj push_topmost_sequence (void)
5577*38fd1498Szrj {
5578*38fd1498Szrj   struct sequence_stack *top;
5579*38fd1498Szrj 
5580*38fd1498Szrj   start_sequence ();
5581*38fd1498Szrj 
5582*38fd1498Szrj   top = get_topmost_sequence ();
5583*38fd1498Szrj   set_first_insn (top->first);
5584*38fd1498Szrj   set_last_insn (top->last);
5585*38fd1498Szrj }
5586*38fd1498Szrj 
5587*38fd1498Szrj /* After emitting to the outer-level insn chain, update the outer-level
5588*38fd1498Szrj    insn chain, and restore the previous saved state.  */
5589*38fd1498Szrj 
5590*38fd1498Szrj void
pop_topmost_sequence(void)5591*38fd1498Szrj pop_topmost_sequence (void)
5592*38fd1498Szrj {
5593*38fd1498Szrj   struct sequence_stack *top;
5594*38fd1498Szrj 
5595*38fd1498Szrj   top = get_topmost_sequence ();
5596*38fd1498Szrj   top->first = get_insns ();
5597*38fd1498Szrj   top->last = get_last_insn ();
5598*38fd1498Szrj 
5599*38fd1498Szrj   end_sequence ();
5600*38fd1498Szrj }
5601*38fd1498Szrj 
5602*38fd1498Szrj /* After emitting to a sequence, restore previous saved state.
5603*38fd1498Szrj 
5604*38fd1498Szrj    To get the contents of the sequence just made, you must call
5605*38fd1498Szrj    `get_insns' *before* calling here.
5606*38fd1498Szrj 
5607*38fd1498Szrj    If the compiler might have deferred popping arguments while
5608*38fd1498Szrj    generating this sequence, and this sequence will not be immediately
5609*38fd1498Szrj    inserted into the instruction stream, use do_pending_stack_adjust
5610*38fd1498Szrj    before calling get_insns.  That will ensure that the deferred
5611*38fd1498Szrj    pops are inserted into this sequence, and not into some random
5612*38fd1498Szrj    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5613*38fd1498Szrj    information about deferred popping of arguments.  */
5614*38fd1498Szrj 
5615*38fd1498Szrj void
end_sequence(void)5616*38fd1498Szrj end_sequence (void)
5617*38fd1498Szrj {
5618*38fd1498Szrj   struct sequence_stack *tem = get_current_sequence ()->next;
5619*38fd1498Szrj 
5620*38fd1498Szrj   set_first_insn (tem->first);
5621*38fd1498Szrj   set_last_insn (tem->last);
5622*38fd1498Szrj   get_current_sequence ()->next = tem->next;
5623*38fd1498Szrj 
5624*38fd1498Szrj   memset (tem, 0, sizeof (*tem));
5625*38fd1498Szrj   tem->next = free_sequence_stack;
5626*38fd1498Szrj   free_sequence_stack = tem;
5627*38fd1498Szrj }
5628*38fd1498Szrj 
5629*38fd1498Szrj /* Return 1 if currently emitting into a sequence.  */
5630*38fd1498Szrj 
5631*38fd1498Szrj int
in_sequence_p(void)5632*38fd1498Szrj in_sequence_p (void)
5633*38fd1498Szrj {
5634*38fd1498Szrj   return get_current_sequence ()->next != 0;
5635*38fd1498Szrj }
5636*38fd1498Szrj 
5637*38fd1498Szrj /* Put the various virtual registers into REGNO_REG_RTX.  */
5638*38fd1498Szrj 
5639*38fd1498Szrj static void
init_virtual_regs(void)5640*38fd1498Szrj init_virtual_regs (void)
5641*38fd1498Szrj {
5642*38fd1498Szrj   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5643*38fd1498Szrj   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5644*38fd1498Szrj   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5645*38fd1498Szrj   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5646*38fd1498Szrj   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5647*38fd1498Szrj   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5648*38fd1498Szrj     = virtual_preferred_stack_boundary_rtx;
5649*38fd1498Szrj }
5650*38fd1498Szrj 
5651*38fd1498Szrj 
5652*38fd1498Szrj /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5653*38fd1498Szrj static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5654*38fd1498Szrj static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5655*38fd1498Szrj static int copy_insn_n_scratches;
5656*38fd1498Szrj 
5657*38fd1498Szrj /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5658*38fd1498Szrj    copied an ASM_OPERANDS.
5659*38fd1498Szrj    In that case, it is the original input-operand vector.  */
5660*38fd1498Szrj static rtvec orig_asm_operands_vector;
5661*38fd1498Szrj 
5662*38fd1498Szrj /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5663*38fd1498Szrj    copied an ASM_OPERANDS.
5664*38fd1498Szrj    In that case, it is the copied input-operand vector.  */
5665*38fd1498Szrj static rtvec copy_asm_operands_vector;
5666*38fd1498Szrj 
5667*38fd1498Szrj /* Likewise for the constraints vector.  */
5668*38fd1498Szrj static rtvec orig_asm_constraints_vector;
5669*38fd1498Szrj static rtvec copy_asm_constraints_vector;
5670*38fd1498Szrj 
5671*38fd1498Szrj /* Recursively create a new copy of an rtx for copy_insn.
5672*38fd1498Szrj    This function differs from copy_rtx in that it handles SCRATCHes and
5673*38fd1498Szrj    ASM_OPERANDs properly.
5674*38fd1498Szrj    Normally, this function is not used directly; use copy_insn as front end.
5675*38fd1498Szrj    However, you could first copy an insn pattern with copy_insn and then use
5676*38fd1498Szrj    this function afterwards to properly copy any REG_NOTEs containing
5677*38fd1498Szrj    SCRATCHes.  */
5678*38fd1498Szrj 
5679*38fd1498Szrj rtx
copy_insn_1(rtx orig)5680*38fd1498Szrj copy_insn_1 (rtx orig)
5681*38fd1498Szrj {
5682*38fd1498Szrj   rtx copy;
5683*38fd1498Szrj   int i, j;
5684*38fd1498Szrj   RTX_CODE code;
5685*38fd1498Szrj   const char *format_ptr;
5686*38fd1498Szrj 
5687*38fd1498Szrj   if (orig == NULL)
5688*38fd1498Szrj     return NULL;
5689*38fd1498Szrj 
5690*38fd1498Szrj   code = GET_CODE (orig);
5691*38fd1498Szrj 
5692*38fd1498Szrj   switch (code)
5693*38fd1498Szrj     {
5694*38fd1498Szrj     case REG:
5695*38fd1498Szrj     case DEBUG_EXPR:
5696*38fd1498Szrj     CASE_CONST_ANY:
5697*38fd1498Szrj     case SYMBOL_REF:
5698*38fd1498Szrj     case CODE_LABEL:
5699*38fd1498Szrj     case PC:
5700*38fd1498Szrj     case CC0:
5701*38fd1498Szrj     case RETURN:
5702*38fd1498Szrj     case SIMPLE_RETURN:
5703*38fd1498Szrj       return orig;
5704*38fd1498Szrj     case CLOBBER:
5705*38fd1498Szrj       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5706*38fd1498Szrj          clobbers or clobbers of hard registers that originated as pseudos.
5707*38fd1498Szrj          This is needed to allow safe register renaming.  */
5708*38fd1498Szrj       if (REG_P (XEXP (orig, 0))
5709*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
5710*38fd1498Szrj 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
5711*38fd1498Szrj 	return orig;
5712*38fd1498Szrj       break;
5713*38fd1498Szrj 
5714*38fd1498Szrj     case SCRATCH:
5715*38fd1498Szrj       for (i = 0; i < copy_insn_n_scratches; i++)
5716*38fd1498Szrj 	if (copy_insn_scratch_in[i] == orig)
5717*38fd1498Szrj 	  return copy_insn_scratch_out[i];
5718*38fd1498Szrj       break;
5719*38fd1498Szrj 
5720*38fd1498Szrj     case CONST:
5721*38fd1498Szrj       if (shared_const_p (orig))
5722*38fd1498Szrj 	return orig;
5723*38fd1498Szrj       break;
5724*38fd1498Szrj 
5725*38fd1498Szrj       /* A MEM with a constant address is not sharable.  The problem is that
5726*38fd1498Szrj 	 the constant address may need to be reloaded.  If the mem is shared,
5727*38fd1498Szrj 	 then reloading one copy of this mem will cause all copies to appear
5728*38fd1498Szrj 	 to have been reloaded.  */
5729*38fd1498Szrj 
5730*38fd1498Szrj     default:
5731*38fd1498Szrj       break;
5732*38fd1498Szrj     }
5733*38fd1498Szrj 
5734*38fd1498Szrj   /* Copy the various flags, fields, and other information.  We assume
5735*38fd1498Szrj      that all fields need copying, and then clear the fields that should
5736*38fd1498Szrj      not be copied.  That is the sensible default behavior, and forces
5737*38fd1498Szrj      us to explicitly document why we are *not* copying a flag.  */
5738*38fd1498Szrj   copy = shallow_copy_rtx (orig);
5739*38fd1498Szrj 
5740*38fd1498Szrj   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5741*38fd1498Szrj   if (INSN_P (orig))
5742*38fd1498Szrj     {
5743*38fd1498Szrj       RTX_FLAG (copy, jump) = 0;
5744*38fd1498Szrj       RTX_FLAG (copy, call) = 0;
5745*38fd1498Szrj       RTX_FLAG (copy, frame_related) = 0;
5746*38fd1498Szrj     }
5747*38fd1498Szrj 
5748*38fd1498Szrj   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5749*38fd1498Szrj 
5750*38fd1498Szrj   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5751*38fd1498Szrj     switch (*format_ptr++)
5752*38fd1498Szrj       {
5753*38fd1498Szrj       case 'e':
5754*38fd1498Szrj 	if (XEXP (orig, i) != NULL)
5755*38fd1498Szrj 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5756*38fd1498Szrj 	break;
5757*38fd1498Szrj 
5758*38fd1498Szrj       case 'E':
5759*38fd1498Szrj       case 'V':
5760*38fd1498Szrj 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5761*38fd1498Szrj 	  XVEC (copy, i) = copy_asm_constraints_vector;
5762*38fd1498Szrj 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5763*38fd1498Szrj 	  XVEC (copy, i) = copy_asm_operands_vector;
5764*38fd1498Szrj 	else if (XVEC (orig, i) != NULL)
5765*38fd1498Szrj 	  {
5766*38fd1498Szrj 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5767*38fd1498Szrj 	    for (j = 0; j < XVECLEN (copy, i); j++)
5768*38fd1498Szrj 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5769*38fd1498Szrj 	  }
5770*38fd1498Szrj 	break;
5771*38fd1498Szrj 
5772*38fd1498Szrj       case 't':
5773*38fd1498Szrj       case 'w':
5774*38fd1498Szrj       case 'i':
5775*38fd1498Szrj       case 'p':
5776*38fd1498Szrj       case 's':
5777*38fd1498Szrj       case 'S':
5778*38fd1498Szrj       case 'u':
5779*38fd1498Szrj       case '0':
5780*38fd1498Szrj 	/* These are left unchanged.  */
5781*38fd1498Szrj 	break;
5782*38fd1498Szrj 
5783*38fd1498Szrj       default:
5784*38fd1498Szrj 	gcc_unreachable ();
5785*38fd1498Szrj       }
5786*38fd1498Szrj 
5787*38fd1498Szrj   if (code == SCRATCH)
5788*38fd1498Szrj     {
5789*38fd1498Szrj       i = copy_insn_n_scratches++;
5790*38fd1498Szrj       gcc_assert (i < MAX_RECOG_OPERANDS);
5791*38fd1498Szrj       copy_insn_scratch_in[i] = orig;
5792*38fd1498Szrj       copy_insn_scratch_out[i] = copy;
5793*38fd1498Szrj     }
5794*38fd1498Szrj   else if (code == ASM_OPERANDS)
5795*38fd1498Szrj     {
5796*38fd1498Szrj       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5797*38fd1498Szrj       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5798*38fd1498Szrj       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5799*38fd1498Szrj       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5800*38fd1498Szrj     }
5801*38fd1498Szrj 
5802*38fd1498Szrj   return copy;
5803*38fd1498Szrj }
5804*38fd1498Szrj 
5805*38fd1498Szrj /* Create a new copy of an rtx.
5806*38fd1498Szrj    This function differs from copy_rtx in that it handles SCRATCHes and
5807*38fd1498Szrj    ASM_OPERANDs properly.
5808*38fd1498Szrj    INSN doesn't really have to be a full INSN; it could be just the
5809*38fd1498Szrj    pattern.  */
5810*38fd1498Szrj rtx
copy_insn(rtx insn)5811*38fd1498Szrj copy_insn (rtx insn)
5812*38fd1498Szrj {
5813*38fd1498Szrj   copy_insn_n_scratches = 0;
5814*38fd1498Szrj   orig_asm_operands_vector = 0;
5815*38fd1498Szrj   orig_asm_constraints_vector = 0;
5816*38fd1498Szrj   copy_asm_operands_vector = 0;
5817*38fd1498Szrj   copy_asm_constraints_vector = 0;
5818*38fd1498Szrj   return copy_insn_1 (insn);
5819*38fd1498Szrj }
5820*38fd1498Szrj 
5821*38fd1498Szrj /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5822*38fd1498Szrj    on that assumption that INSN itself remains in its original place.  */
5823*38fd1498Szrj 
5824*38fd1498Szrj rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)5825*38fd1498Szrj copy_delay_slot_insn (rtx_insn *insn)
5826*38fd1498Szrj {
5827*38fd1498Szrj   /* Copy INSN with its rtx_code, all its notes, location etc.  */
5828*38fd1498Szrj   insn = as_a <rtx_insn *> (copy_rtx (insn));
5829*38fd1498Szrj   INSN_UID (insn) = cur_insn_uid++;
5830*38fd1498Szrj   return insn;
5831*38fd1498Szrj }
5832*38fd1498Szrj 
5833*38fd1498Szrj /* Initialize data structures and variables in this file
5834*38fd1498Szrj    before generating rtl for each function.  */
5835*38fd1498Szrj 
5836*38fd1498Szrj void
init_emit(void)5837*38fd1498Szrj init_emit (void)
5838*38fd1498Szrj {
5839*38fd1498Szrj   set_first_insn (NULL);
5840*38fd1498Szrj   set_last_insn (NULL);
5841*38fd1498Szrj   if (MIN_NONDEBUG_INSN_UID)
5842*38fd1498Szrj     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5843*38fd1498Szrj   else
5844*38fd1498Szrj     cur_insn_uid = 1;
5845*38fd1498Szrj   cur_debug_insn_uid = 1;
5846*38fd1498Szrj   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5847*38fd1498Szrj   first_label_num = label_num;
5848*38fd1498Szrj   get_current_sequence ()->next = NULL;
5849*38fd1498Szrj 
5850*38fd1498Szrj   /* Init the tables that describe all the pseudo regs.  */
5851*38fd1498Szrj 
5852*38fd1498Szrj   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5853*38fd1498Szrj 
5854*38fd1498Szrj   crtl->emit.regno_pointer_align
5855*38fd1498Szrj     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5856*38fd1498Szrj 
5857*38fd1498Szrj   regno_reg_rtx
5858*38fd1498Szrj     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
5859*38fd1498Szrj 
5860*38fd1498Szrj   /* Put copies of all the hard registers into regno_reg_rtx.  */
5861*38fd1498Szrj   memcpy (regno_reg_rtx,
5862*38fd1498Szrj 	  initial_regno_reg_rtx,
5863*38fd1498Szrj 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5864*38fd1498Szrj 
5865*38fd1498Szrj   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5866*38fd1498Szrj   init_virtual_regs ();
5867*38fd1498Szrj 
5868*38fd1498Szrj   /* Indicate that the virtual registers and stack locations are
5869*38fd1498Szrj      all pointers.  */
5870*38fd1498Szrj   REG_POINTER (stack_pointer_rtx) = 1;
5871*38fd1498Szrj   REG_POINTER (frame_pointer_rtx) = 1;
5872*38fd1498Szrj   REG_POINTER (hard_frame_pointer_rtx) = 1;
5873*38fd1498Szrj   REG_POINTER (arg_pointer_rtx) = 1;
5874*38fd1498Szrj 
5875*38fd1498Szrj   REG_POINTER (virtual_incoming_args_rtx) = 1;
5876*38fd1498Szrj   REG_POINTER (virtual_stack_vars_rtx) = 1;
5877*38fd1498Szrj   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5878*38fd1498Szrj   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5879*38fd1498Szrj   REG_POINTER (virtual_cfa_rtx) = 1;
5880*38fd1498Szrj 
5881*38fd1498Szrj #ifdef STACK_BOUNDARY
5882*38fd1498Szrj   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5883*38fd1498Szrj   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5884*38fd1498Szrj   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5885*38fd1498Szrj   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5886*38fd1498Szrj 
5887*38fd1498Szrj   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5888*38fd1498Szrj   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5889*38fd1498Szrj   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5890*38fd1498Szrj   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5891*38fd1498Szrj 
5892*38fd1498Szrj   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5893*38fd1498Szrj #endif
5894*38fd1498Szrj 
5895*38fd1498Szrj #ifdef INIT_EXPANDERS
5896*38fd1498Szrj   INIT_EXPANDERS;
5897*38fd1498Szrj #endif
5898*38fd1498Szrj }
5899*38fd1498Szrj 
5900*38fd1498Szrj /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5901*38fd1498Szrj 
5902*38fd1498Szrj wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5903*38fd1498Szrj const_vector_int_elt (const_rtx x, unsigned int i)
5904*38fd1498Szrj {
5905*38fd1498Szrj   /* First handle elements that are directly encoded.  */
5906*38fd1498Szrj   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5907*38fd1498Szrj   if (i < (unsigned int) XVECLEN (x, 0))
5908*38fd1498Szrj     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5909*38fd1498Szrj 
5910*38fd1498Szrj   /* Identify the pattern that contains element I and work out the index of
5911*38fd1498Szrj      the last encoded element for that pattern.  */
5912*38fd1498Szrj   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5913*38fd1498Szrj   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5914*38fd1498Szrj   unsigned int count = i / npatterns;
5915*38fd1498Szrj   unsigned int pattern = i % npatterns;
5916*38fd1498Szrj   unsigned int final_i = encoded_nelts - npatterns + pattern;
5917*38fd1498Szrj 
5918*38fd1498Szrj   /* If there are no steps, the final encoded value is the right one.  */
5919*38fd1498Szrj   if (!CONST_VECTOR_STEPPED_P (x))
5920*38fd1498Szrj     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5921*38fd1498Szrj 
5922*38fd1498Szrj   /* Otherwise work out the value from the last two encoded elements.  */
5923*38fd1498Szrj   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5924*38fd1498Szrj   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5925*38fd1498Szrj   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5926*38fd1498Szrj 			   rtx_mode_t (v1, elt_mode));
5927*38fd1498Szrj   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5928*38fd1498Szrj }
5929*38fd1498Szrj 
5930*38fd1498Szrj /* Return the value of element I of CONST_VECTOR X.  */
5931*38fd1498Szrj 
5932*38fd1498Szrj rtx
const_vector_elt(const_rtx x,unsigned int i)5933*38fd1498Szrj const_vector_elt (const_rtx x, unsigned int i)
5934*38fd1498Szrj {
5935*38fd1498Szrj   /* First handle elements that are directly encoded.  */
5936*38fd1498Szrj   if (i < (unsigned int) XVECLEN (x, 0))
5937*38fd1498Szrj     return CONST_VECTOR_ENCODED_ELT (x, i);
5938*38fd1498Szrj 
5939*38fd1498Szrj   /* If there are no steps, the final encoded value is the right one.  */
5940*38fd1498Szrj   if (!CONST_VECTOR_STEPPED_P (x))
5941*38fd1498Szrj     {
5942*38fd1498Szrj       /* Identify the pattern that contains element I and work out the index of
5943*38fd1498Szrj 	 the last encoded element for that pattern.  */
5944*38fd1498Szrj       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5945*38fd1498Szrj       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5946*38fd1498Szrj       unsigned int pattern = i % npatterns;
5947*38fd1498Szrj       unsigned int final_i = encoded_nelts - npatterns + pattern;
5948*38fd1498Szrj       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5949*38fd1498Szrj     }
5950*38fd1498Szrj 
5951*38fd1498Szrj   /* Otherwise work out the value from the last two encoded elements.  */
5952*38fd1498Szrj   return immed_wide_int_const (const_vector_int_elt (x, i),
5953*38fd1498Szrj 			       GET_MODE_INNER (GET_MODE (x)));
5954*38fd1498Szrj }
5955*38fd1498Szrj 
5956*38fd1498Szrj /* Return true if X is a valid element for a CONST_VECTOR of the given
5957*38fd1498Szrj   mode.  */
5958*38fd1498Szrj 
5959*38fd1498Szrj bool
valid_for_const_vector_p(machine_mode,rtx x)5960*38fd1498Szrj valid_for_const_vector_p (machine_mode, rtx x)
5961*38fd1498Szrj {
5962*38fd1498Szrj   return (CONST_SCALAR_INT_P (x)
5963*38fd1498Szrj 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5964*38fd1498Szrj 	  || CONST_FIXED_P (x));
5965*38fd1498Szrj }
5966*38fd1498Szrj 
5967*38fd1498Szrj /* Generate a vector constant of mode MODE in which every element has
5968*38fd1498Szrj    value ELT.  */
5969*38fd1498Szrj 
5970*38fd1498Szrj rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5971*38fd1498Szrj gen_const_vec_duplicate (machine_mode mode, rtx elt)
5972*38fd1498Szrj {
5973*38fd1498Szrj   rtx_vector_builder builder (mode, 1, 1);
5974*38fd1498Szrj   builder.quick_push (elt);
5975*38fd1498Szrj   return builder.build ();
5976*38fd1498Szrj }
5977*38fd1498Szrj 
5978*38fd1498Szrj /* Return a vector rtx of mode MODE in which every element has value X.
5979*38fd1498Szrj    The result will be a constant if X is constant.  */
5980*38fd1498Szrj 
5981*38fd1498Szrj rtx
gen_vec_duplicate(machine_mode mode,rtx x)5982*38fd1498Szrj gen_vec_duplicate (machine_mode mode, rtx x)
5983*38fd1498Szrj {
5984*38fd1498Szrj   if (valid_for_const_vector_p (mode, x))
5985*38fd1498Szrj     return gen_const_vec_duplicate (mode, x);
5986*38fd1498Szrj   return gen_rtx_VEC_DUPLICATE (mode, x);
5987*38fd1498Szrj }
5988*38fd1498Szrj 
5989*38fd1498Szrj /* A subroutine of const_vec_series_p that handles the case in which:
5990*38fd1498Szrj 
5991*38fd1498Szrj      (GET_CODE (X) == CONST_VECTOR
5992*38fd1498Szrj       && CONST_VECTOR_NPATTERNS (X) == 1
5993*38fd1498Szrj       && !CONST_VECTOR_DUPLICATE_P (X))
5994*38fd1498Szrj 
5995*38fd1498Szrj    is known to hold.  */
5996*38fd1498Szrj 
5997*38fd1498Szrj bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5998*38fd1498Szrj const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5999*38fd1498Szrj {
6000*38fd1498Szrj   /* Stepped sequences are only defined for integers, to avoid specifying
6001*38fd1498Szrj      rounding behavior.  */
6002*38fd1498Szrj   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6003*38fd1498Szrj     return false;
6004*38fd1498Szrj 
6005*38fd1498Szrj   /* A non-duplicated vector with two elements can always be seen as a
6006*38fd1498Szrj      series with a nonzero step.  Longer vectors must have a stepped
6007*38fd1498Szrj      encoding.  */
6008*38fd1498Szrj   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6009*38fd1498Szrj       && !CONST_VECTOR_STEPPED_P (x))
6010*38fd1498Szrj     return false;
6011*38fd1498Szrj 
6012*38fd1498Szrj   /* Calculate the step between the first and second elements.  */
6013*38fd1498Szrj   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6014*38fd1498Szrj   rtx base = CONST_VECTOR_ELT (x, 0);
6015*38fd1498Szrj   rtx step = simplify_binary_operation (MINUS, inner,
6016*38fd1498Szrj 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
6017*38fd1498Szrj   if (rtx_equal_p (step, CONST0_RTX (inner)))
6018*38fd1498Szrj     return false;
6019*38fd1498Szrj 
6020*38fd1498Szrj   /* If we have a stepped encoding, check that the step between the
6021*38fd1498Szrj      second and third elements is the same as STEP.  */
6022*38fd1498Szrj   if (CONST_VECTOR_STEPPED_P (x))
6023*38fd1498Szrj     {
6024*38fd1498Szrj       rtx diff = simplify_binary_operation (MINUS, inner,
6025*38fd1498Szrj 					    CONST_VECTOR_ENCODED_ELT (x, 2),
6026*38fd1498Szrj 					    CONST_VECTOR_ENCODED_ELT (x, 1));
6027*38fd1498Szrj       if (!rtx_equal_p (step, diff))
6028*38fd1498Szrj 	return false;
6029*38fd1498Szrj     }
6030*38fd1498Szrj 
6031*38fd1498Szrj   *base_out = base;
6032*38fd1498Szrj   *step_out = step;
6033*38fd1498Szrj   return true;
6034*38fd1498Szrj }
6035*38fd1498Szrj 
6036*38fd1498Szrj /* Generate a vector constant of mode MODE in which element I has
6037*38fd1498Szrj    the value BASE + I * STEP.  */
6038*38fd1498Szrj 
6039*38fd1498Szrj rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6040*38fd1498Szrj gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6041*38fd1498Szrj {
6042*38fd1498Szrj   gcc_assert (valid_for_const_vector_p (mode, base)
6043*38fd1498Szrj 	      && valid_for_const_vector_p (mode, step));
6044*38fd1498Szrj 
6045*38fd1498Szrj   rtx_vector_builder builder (mode, 1, 3);
6046*38fd1498Szrj   builder.quick_push (base);
6047*38fd1498Szrj   for (int i = 1; i < 3; ++i)
6048*38fd1498Szrj     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6049*38fd1498Szrj 					     builder[i - 1], step));
6050*38fd1498Szrj   return builder.build ();
6051*38fd1498Szrj }
6052*38fd1498Szrj 
6053*38fd1498Szrj /* Generate a vector of mode MODE in which element I has the value
6054*38fd1498Szrj    BASE + I * STEP.  The result will be a constant if BASE and STEP
6055*38fd1498Szrj    are both constants.  */
6056*38fd1498Szrj 
6057*38fd1498Szrj rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6058*38fd1498Szrj gen_vec_series (machine_mode mode, rtx base, rtx step)
6059*38fd1498Szrj {
6060*38fd1498Szrj   if (step == const0_rtx)
6061*38fd1498Szrj     return gen_vec_duplicate (mode, base);
6062*38fd1498Szrj   if (valid_for_const_vector_p (mode, base)
6063*38fd1498Szrj       && valid_for_const_vector_p (mode, step))
6064*38fd1498Szrj     return gen_const_vec_series (mode, base, step);
6065*38fd1498Szrj   return gen_rtx_VEC_SERIES (mode, base, step);
6066*38fd1498Szrj }
6067*38fd1498Szrj 
6068*38fd1498Szrj /* Generate a new vector constant for mode MODE and constant value
6069*38fd1498Szrj    CONSTANT.  */
6070*38fd1498Szrj 
6071*38fd1498Szrj static rtx
gen_const_vector(machine_mode mode,int constant)6072*38fd1498Szrj gen_const_vector (machine_mode mode, int constant)
6073*38fd1498Szrj {
6074*38fd1498Szrj   machine_mode inner = GET_MODE_INNER (mode);
6075*38fd1498Szrj 
6076*38fd1498Szrj   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
6077*38fd1498Szrj 
6078*38fd1498Szrj   rtx el = const_tiny_rtx[constant][(int) inner];
6079*38fd1498Szrj   gcc_assert (el);
6080*38fd1498Szrj 
6081*38fd1498Szrj   return gen_const_vec_duplicate (mode, el);
6082*38fd1498Szrj }
6083*38fd1498Szrj 
6084*38fd1498Szrj /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6085*38fd1498Szrj    all elements are zero, and the one vector when all elements are one.  */
6086*38fd1498Szrj rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)6087*38fd1498Szrj gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
6088*38fd1498Szrj {
6089*38fd1498Szrj   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
6090*38fd1498Szrj 
6091*38fd1498Szrj   /* If the values are all the same, check to see if we can use one of the
6092*38fd1498Szrj      standard constant vectors.  */
6093*38fd1498Szrj   if (rtvec_all_equal_p (v))
6094*38fd1498Szrj     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
6095*38fd1498Szrj 
6096*38fd1498Szrj   unsigned int nunits = GET_NUM_ELEM (v);
6097*38fd1498Szrj   rtx_vector_builder builder (mode, nunits, 1);
6098*38fd1498Szrj   for (unsigned int i = 0; i < nunits; ++i)
6099*38fd1498Szrj     builder.quick_push (RTVEC_ELT (v, i));
6100*38fd1498Szrj   return builder.build (v);
6101*38fd1498Szrj }
6102*38fd1498Szrj 
6103*38fd1498Szrj /* Initialise global register information required by all functions.  */
6104*38fd1498Szrj 
6105*38fd1498Szrj void
init_emit_regs(void)6106*38fd1498Szrj init_emit_regs (void)
6107*38fd1498Szrj {
6108*38fd1498Szrj   int i;
6109*38fd1498Szrj   machine_mode mode;
6110*38fd1498Szrj   mem_attrs *attrs;
6111*38fd1498Szrj 
6112*38fd1498Szrj   /* Reset register attributes */
6113*38fd1498Szrj   reg_attrs_htab->empty ();
6114*38fd1498Szrj 
6115*38fd1498Szrj   /* We need reg_raw_mode, so initialize the modes now.  */
6116*38fd1498Szrj   init_reg_modes_target ();
6117*38fd1498Szrj 
6118*38fd1498Szrj   /* Assign register numbers to the globally defined register rtx.  */
6119*38fd1498Szrj   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
6120*38fd1498Szrj   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
6121*38fd1498Szrj   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
6122*38fd1498Szrj   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
6123*38fd1498Szrj   virtual_incoming_args_rtx =
6124*38fd1498Szrj     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
6125*38fd1498Szrj   virtual_stack_vars_rtx =
6126*38fd1498Szrj     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
6127*38fd1498Szrj   virtual_stack_dynamic_rtx =
6128*38fd1498Szrj     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
6129*38fd1498Szrj   virtual_outgoing_args_rtx =
6130*38fd1498Szrj     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
6131*38fd1498Szrj   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
6132*38fd1498Szrj   virtual_preferred_stack_boundary_rtx =
6133*38fd1498Szrj     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
6134*38fd1498Szrj 
6135*38fd1498Szrj   /* Initialize RTL for commonly used hard registers.  These are
6136*38fd1498Szrj      copied into regno_reg_rtx as we begin to compile each function.  */
6137*38fd1498Szrj   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
6138*38fd1498Szrj     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
6139*38fd1498Szrj 
6140*38fd1498Szrj #ifdef RETURN_ADDRESS_POINTER_REGNUM
6141*38fd1498Szrj   return_address_pointer_rtx
6142*38fd1498Szrj     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
6143*38fd1498Szrj #endif
6144*38fd1498Szrj 
6145*38fd1498Szrj   pic_offset_table_rtx = NULL_RTX;
6146*38fd1498Szrj   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
6147*38fd1498Szrj     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
6148*38fd1498Szrj 
6149*38fd1498Szrj   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
6150*38fd1498Szrj     {
6151*38fd1498Szrj       mode = (machine_mode) i;
6152*38fd1498Szrj       attrs = ggc_cleared_alloc<mem_attrs> ();
6153*38fd1498Szrj       attrs->align = BITS_PER_UNIT;
6154*38fd1498Szrj       attrs->addrspace = ADDR_SPACE_GENERIC;
6155*38fd1498Szrj       if (mode != BLKmode && mode != VOIDmode)
6156*38fd1498Szrj 	{
6157*38fd1498Szrj 	  attrs->size_known_p = true;
6158*38fd1498Szrj 	  attrs->size = GET_MODE_SIZE (mode);
6159*38fd1498Szrj 	  if (STRICT_ALIGNMENT)
6160*38fd1498Szrj 	    attrs->align = GET_MODE_ALIGNMENT (mode);
6161*38fd1498Szrj 	}
6162*38fd1498Szrj       mode_mem_attrs[i] = attrs;
6163*38fd1498Szrj     }
6164*38fd1498Szrj 
6165*38fd1498Szrj   split_branch_probability = profile_probability::uninitialized ();
6166*38fd1498Szrj }
6167*38fd1498Szrj 
6168*38fd1498Szrj /* Initialize global machine_mode variables.  */
6169*38fd1498Szrj 
6170*38fd1498Szrj void
init_derived_machine_modes(void)6171*38fd1498Szrj init_derived_machine_modes (void)
6172*38fd1498Szrj {
6173*38fd1498Szrj   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6174*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
6175*38fd1498Szrj     {
6176*38fd1498Szrj       scalar_int_mode mode = mode_iter.require ();
6177*38fd1498Szrj 
6178*38fd1498Szrj       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6179*38fd1498Szrj 	  && !opt_byte_mode.exists ())
6180*38fd1498Szrj 	opt_byte_mode = mode;
6181*38fd1498Szrj 
6182*38fd1498Szrj       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6183*38fd1498Szrj 	  && !opt_word_mode.exists ())
6184*38fd1498Szrj 	opt_word_mode = mode;
6185*38fd1498Szrj     }
6186*38fd1498Szrj 
6187*38fd1498Szrj   byte_mode = opt_byte_mode.require ();
6188*38fd1498Szrj   word_mode = opt_word_mode.require ();
6189*38fd1498Szrj   ptr_mode = as_a <scalar_int_mode>
6190*38fd1498Szrj     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
6191*38fd1498Szrj }
6192*38fd1498Szrj 
6193*38fd1498Szrj /* Create some permanent unique rtl objects shared between all functions.  */
6194*38fd1498Szrj 
6195*38fd1498Szrj void
init_emit_once(void)6196*38fd1498Szrj init_emit_once (void)
6197*38fd1498Szrj {
6198*38fd1498Szrj   int i;
6199*38fd1498Szrj   machine_mode mode;
6200*38fd1498Szrj   scalar_float_mode double_mode;
6201*38fd1498Szrj   opt_scalar_mode smode_iter;
6202*38fd1498Szrj 
6203*38fd1498Szrj   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6204*38fd1498Szrj      CONST_FIXED, and memory attribute hash tables.  */
6205*38fd1498Szrj   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
6206*38fd1498Szrj 
6207*38fd1498Szrj #if TARGET_SUPPORTS_WIDE_INT
6208*38fd1498Szrj   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
6209*38fd1498Szrj #endif
6210*38fd1498Szrj   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
6211*38fd1498Szrj 
6212*38fd1498Szrj   if (NUM_POLY_INT_COEFFS > 1)
6213*38fd1498Szrj     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6214*38fd1498Szrj 
6215*38fd1498Szrj   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
6216*38fd1498Szrj 
6217*38fd1498Szrj   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
6218*38fd1498Szrj 
6219*38fd1498Szrj #ifdef INIT_EXPANDERS
6220*38fd1498Szrj   /* This is to initialize {init|mark|free}_machine_status before the first
6221*38fd1498Szrj      call to push_function_context_to.  This is needed by the Chill front
6222*38fd1498Szrj      end which calls push_function_context_to before the first call to
6223*38fd1498Szrj      init_function_start.  */
6224*38fd1498Szrj   INIT_EXPANDERS;
6225*38fd1498Szrj #endif
6226*38fd1498Szrj 
6227*38fd1498Szrj   /* Create the unique rtx's for certain rtx codes and operand values.  */
6228*38fd1498Szrj 
6229*38fd1498Szrj   /* Process stack-limiting command-line options.  */
6230*38fd1498Szrj   if (opt_fstack_limit_symbol_arg != NULL)
6231*38fd1498Szrj     stack_limit_rtx
6232*38fd1498Szrj       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
6233*38fd1498Szrj   if (opt_fstack_limit_register_no >= 0)
6234*38fd1498Szrj     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
6235*38fd1498Szrj 
6236*38fd1498Szrj   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6237*38fd1498Szrj      tries to use these variables.  */
6238*38fd1498Szrj   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
6239*38fd1498Szrj     const_int_rtx[i + MAX_SAVED_CONST_INT] =
6240*38fd1498Szrj       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
6241*38fd1498Szrj 
6242*38fd1498Szrj   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
6243*38fd1498Szrj       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
6244*38fd1498Szrj     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
6245*38fd1498Szrj   else
6246*38fd1498Szrj     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
6247*38fd1498Szrj 
6248*38fd1498Szrj   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
6249*38fd1498Szrj 
6250*38fd1498Szrj   real_from_integer (&dconst0, double_mode, 0, SIGNED);
6251*38fd1498Szrj   real_from_integer (&dconst1, double_mode, 1, SIGNED);
6252*38fd1498Szrj   real_from_integer (&dconst2, double_mode, 2, SIGNED);
6253*38fd1498Szrj 
6254*38fd1498Szrj   dconstm1 = dconst1;
6255*38fd1498Szrj   dconstm1.sign = 1;
6256*38fd1498Szrj 
6257*38fd1498Szrj   dconsthalf = dconst1;
6258*38fd1498Szrj   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
6259*38fd1498Szrj 
6260*38fd1498Szrj   for (i = 0; i < 3; i++)
6261*38fd1498Szrj     {
6262*38fd1498Szrj       const REAL_VALUE_TYPE *const r =
6263*38fd1498Szrj 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
6264*38fd1498Szrj 
6265*38fd1498Szrj       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
6266*38fd1498Szrj 	const_tiny_rtx[i][(int) mode] =
6267*38fd1498Szrj 	  const_double_from_real_value (*r, mode);
6268*38fd1498Szrj 
6269*38fd1498Szrj       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
6270*38fd1498Szrj 	const_tiny_rtx[i][(int) mode] =
6271*38fd1498Szrj 	  const_double_from_real_value (*r, mode);
6272*38fd1498Szrj 
6273*38fd1498Szrj       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
6274*38fd1498Szrj 
6275*38fd1498Szrj       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6276*38fd1498Szrj 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6277*38fd1498Szrj 
6278*38fd1498Szrj       for (mode = MIN_MODE_PARTIAL_INT;
6279*38fd1498Szrj 	   mode <= MAX_MODE_PARTIAL_INT;
6280*38fd1498Szrj 	   mode = (machine_mode)((int)(mode) + 1))
6281*38fd1498Szrj 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
6282*38fd1498Szrj     }
6283*38fd1498Szrj 
6284*38fd1498Szrj   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
6285*38fd1498Szrj 
6286*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
6287*38fd1498Szrj     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6288*38fd1498Szrj 
6289*38fd1498Szrj   /* For BImode, 1 and -1 are unsigned and signed interpretations
6290*38fd1498Szrj      of the same value.  */
6291*38fd1498Szrj   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6292*38fd1498Szrj   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6293*38fd1498Szrj   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6294*38fd1498Szrj 
6295*38fd1498Szrj   for (mode = MIN_MODE_PARTIAL_INT;
6296*38fd1498Szrj        mode <= MAX_MODE_PARTIAL_INT;
6297*38fd1498Szrj        mode = (machine_mode)((int)(mode) + 1))
6298*38fd1498Szrj     const_tiny_rtx[3][(int) mode] = constm1_rtx;
6299*38fd1498Szrj 
6300*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
6301*38fd1498Szrj     {
6302*38fd1498Szrj       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6303*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6304*38fd1498Szrj     }
6305*38fd1498Szrj 
6306*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
6307*38fd1498Szrj     {
6308*38fd1498Szrj       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
6309*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
6310*38fd1498Szrj     }
6311*38fd1498Szrj 
6312*38fd1498Szrj   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6313*38fd1498Szrj      interpretations of the same value.  */
6314*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6315*38fd1498Szrj     {
6316*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6317*38fd1498Szrj       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6318*38fd1498Szrj       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6319*38fd1498Szrj     }
6320*38fd1498Szrj 
6321*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
6322*38fd1498Szrj     {
6323*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6324*38fd1498Szrj       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6325*38fd1498Szrj       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6326*38fd1498Szrj     }
6327*38fd1498Szrj 
6328*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
6329*38fd1498Szrj     {
6330*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6331*38fd1498Szrj       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6332*38fd1498Szrj     }
6333*38fd1498Szrj 
6334*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
6335*38fd1498Szrj     {
6336*38fd1498Szrj       scalar_mode smode = smode_iter.require ();
6337*38fd1498Szrj       FCONST0 (smode).data.high = 0;
6338*38fd1498Szrj       FCONST0 (smode).data.low = 0;
6339*38fd1498Szrj       FCONST0 (smode).mode = smode;
6340*38fd1498Szrj       const_tiny_rtx[0][(int) smode]
6341*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6342*38fd1498Szrj     }
6343*38fd1498Szrj 
6344*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
6345*38fd1498Szrj     {
6346*38fd1498Szrj       scalar_mode smode = smode_iter.require ();
6347*38fd1498Szrj       FCONST0 (smode).data.high = 0;
6348*38fd1498Szrj       FCONST0 (smode).data.low = 0;
6349*38fd1498Szrj       FCONST0 (smode).mode = smode;
6350*38fd1498Szrj       const_tiny_rtx[0][(int) smode]
6351*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6352*38fd1498Szrj     }
6353*38fd1498Szrj 
6354*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
6355*38fd1498Szrj     {
6356*38fd1498Szrj       scalar_mode smode = smode_iter.require ();
6357*38fd1498Szrj       FCONST0 (smode).data.high = 0;
6358*38fd1498Szrj       FCONST0 (smode).data.low = 0;
6359*38fd1498Szrj       FCONST0 (smode).mode = smode;
6360*38fd1498Szrj       const_tiny_rtx[0][(int) smode]
6361*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6362*38fd1498Szrj 
6363*38fd1498Szrj       /* We store the value 1.  */
6364*38fd1498Szrj       FCONST1 (smode).data.high = 0;
6365*38fd1498Szrj       FCONST1 (smode).data.low = 0;
6366*38fd1498Szrj       FCONST1 (smode).mode = smode;
6367*38fd1498Szrj       FCONST1 (smode).data
6368*38fd1498Szrj 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6369*38fd1498Szrj 				 HOST_BITS_PER_DOUBLE_INT,
6370*38fd1498Szrj 				 SIGNED_FIXED_POINT_MODE_P (smode));
6371*38fd1498Szrj       const_tiny_rtx[1][(int) smode]
6372*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6373*38fd1498Szrj     }
6374*38fd1498Szrj 
6375*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
6376*38fd1498Szrj     {
6377*38fd1498Szrj       scalar_mode smode = smode_iter.require ();
6378*38fd1498Szrj       FCONST0 (smode).data.high = 0;
6379*38fd1498Szrj       FCONST0 (smode).data.low = 0;
6380*38fd1498Szrj       FCONST0 (smode).mode = smode;
6381*38fd1498Szrj       const_tiny_rtx[0][(int) smode]
6382*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
6383*38fd1498Szrj 
6384*38fd1498Szrj       /* We store the value 1.  */
6385*38fd1498Szrj       FCONST1 (smode).data.high = 0;
6386*38fd1498Szrj       FCONST1 (smode).data.low = 0;
6387*38fd1498Szrj       FCONST1 (smode).mode = smode;
6388*38fd1498Szrj       FCONST1 (smode).data
6389*38fd1498Szrj 	= double_int_one.lshift (GET_MODE_FBIT (smode),
6390*38fd1498Szrj 				 HOST_BITS_PER_DOUBLE_INT,
6391*38fd1498Szrj 				 SIGNED_FIXED_POINT_MODE_P (smode));
6392*38fd1498Szrj       const_tiny_rtx[1][(int) smode]
6393*38fd1498Szrj 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
6394*38fd1498Szrj     }
6395*38fd1498Szrj 
6396*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
6397*38fd1498Szrj     {
6398*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6399*38fd1498Szrj     }
6400*38fd1498Szrj 
6401*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
6402*38fd1498Szrj     {
6403*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6404*38fd1498Szrj     }
6405*38fd1498Szrj 
6406*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
6407*38fd1498Szrj     {
6408*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6409*38fd1498Szrj       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6410*38fd1498Szrj     }
6411*38fd1498Szrj 
6412*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
6413*38fd1498Szrj     {
6414*38fd1498Szrj       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6415*38fd1498Szrj       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
6416*38fd1498Szrj     }
6417*38fd1498Szrj 
6418*38fd1498Szrj   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
6419*38fd1498Szrj     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
6420*38fd1498Szrj       const_tiny_rtx[0][i] = const0_rtx;
6421*38fd1498Szrj 
6422*38fd1498Szrj   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
6423*38fd1498Szrj     {
6424*38fd1498Szrj       scalar_mode smode = smode_iter.require ();
6425*38fd1498Szrj       wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6426*38fd1498Szrj       const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
6427*38fd1498Szrj     }
6428*38fd1498Szrj 
6429*38fd1498Szrj   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
6430*38fd1498Szrj   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
6431*38fd1498Szrj   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
6432*38fd1498Szrj   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
6433*38fd1498Szrj   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
6434*38fd1498Szrj 				   /*prev_insn=*/NULL,
6435*38fd1498Szrj 				   /*next_insn=*/NULL,
6436*38fd1498Szrj 				   /*bb=*/NULL,
6437*38fd1498Szrj 				   /*pattern=*/NULL_RTX,
6438*38fd1498Szrj 				   /*location=*/-1,
6439*38fd1498Szrj 				   CODE_FOR_nothing,
6440*38fd1498Szrj 				   /*reg_notes=*/NULL_RTX);
6441*38fd1498Szrj }
6442*38fd1498Szrj 
6443*38fd1498Szrj /* Produce exact duplicate of insn INSN after AFTER.
6444*38fd1498Szrj    Care updating of libcall regions if present.  */
6445*38fd1498Szrj 
6446*38fd1498Szrj rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)6447*38fd1498Szrj emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
6448*38fd1498Szrj {
6449*38fd1498Szrj   rtx_insn *new_rtx;
6450*38fd1498Szrj   rtx link;
6451*38fd1498Szrj 
6452*38fd1498Szrj   switch (GET_CODE (insn))
6453*38fd1498Szrj     {
6454*38fd1498Szrj     case INSN:
6455*38fd1498Szrj       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
6456*38fd1498Szrj       break;
6457*38fd1498Szrj 
6458*38fd1498Szrj     case JUMP_INSN:
6459*38fd1498Szrj       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
6460*38fd1498Szrj       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
6461*38fd1498Szrj       break;
6462*38fd1498Szrj 
6463*38fd1498Szrj     case DEBUG_INSN:
6464*38fd1498Szrj       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
6465*38fd1498Szrj       break;
6466*38fd1498Szrj 
6467*38fd1498Szrj     case CALL_INSN:
6468*38fd1498Szrj       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6469*38fd1498Szrj       if (CALL_INSN_FUNCTION_USAGE (insn))
6470*38fd1498Szrj 	CALL_INSN_FUNCTION_USAGE (new_rtx)
6471*38fd1498Szrj 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6472*38fd1498Szrj       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6473*38fd1498Szrj       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6474*38fd1498Szrj       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6475*38fd1498Szrj       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6476*38fd1498Szrj 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6477*38fd1498Szrj       break;
6478*38fd1498Szrj 
6479*38fd1498Szrj     default:
6480*38fd1498Szrj       gcc_unreachable ();
6481*38fd1498Szrj     }
6482*38fd1498Szrj 
6483*38fd1498Szrj   /* Update LABEL_NUSES.  */
6484*38fd1498Szrj   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6485*38fd1498Szrj 
6486*38fd1498Szrj   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6487*38fd1498Szrj 
6488*38fd1498Szrj   /* If the old insn is frame related, then so is the new one.  This is
6489*38fd1498Szrj      primarily needed for IA-64 unwind info which marks epilogue insns,
6490*38fd1498Szrj      which may be duplicated by the basic block reordering code.  */
6491*38fd1498Szrj   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6492*38fd1498Szrj 
6493*38fd1498Szrj   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
6494*38fd1498Szrj   rtx *ptail = &REG_NOTES (new_rtx);
6495*38fd1498Szrj   while (*ptail != NULL_RTX)
6496*38fd1498Szrj     ptail = &XEXP (*ptail, 1);
6497*38fd1498Szrj 
6498*38fd1498Szrj   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6499*38fd1498Szrj      will make them.  REG_LABEL_TARGETs are created there too, but are
6500*38fd1498Szrj      supposed to be sticky, so we copy them.  */
6501*38fd1498Szrj   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6502*38fd1498Szrj     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6503*38fd1498Szrj       {
6504*38fd1498Szrj 	*ptail = duplicate_reg_note (link);
6505*38fd1498Szrj 	ptail = &XEXP (*ptail, 1);
6506*38fd1498Szrj       }
6507*38fd1498Szrj 
6508*38fd1498Szrj   INSN_CODE (new_rtx) = INSN_CODE (insn);
6509*38fd1498Szrj   return new_rtx;
6510*38fd1498Szrj }
6511*38fd1498Szrj 
6512*38fd1498Szrj static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6513*38fd1498Szrj rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)6514*38fd1498Szrj gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
6515*38fd1498Szrj {
6516*38fd1498Szrj   if (hard_reg_clobbers[mode][regno])
6517*38fd1498Szrj     return hard_reg_clobbers[mode][regno];
6518*38fd1498Szrj   else
6519*38fd1498Szrj     return (hard_reg_clobbers[mode][regno] =
6520*38fd1498Szrj 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6521*38fd1498Szrj }
6522*38fd1498Szrj 
6523*38fd1498Szrj location_t prologue_location;
6524*38fd1498Szrj location_t epilogue_location;
6525*38fd1498Szrj 
6526*38fd1498Szrj /* Hold current location information and last location information, so the
6527*38fd1498Szrj    datastructures are built lazily only when some instructions in given
6528*38fd1498Szrj    place are needed.  */
6529*38fd1498Szrj static location_t curr_location;
6530*38fd1498Szrj 
6531*38fd1498Szrj /* Allocate insn location datastructure.  */
6532*38fd1498Szrj void
insn_locations_init(void)6533*38fd1498Szrj insn_locations_init (void)
6534*38fd1498Szrj {
6535*38fd1498Szrj   prologue_location = epilogue_location = 0;
6536*38fd1498Szrj   curr_location = UNKNOWN_LOCATION;
6537*38fd1498Szrj }
6538*38fd1498Szrj 
6539*38fd1498Szrj /* At the end of emit stage, clear current location.  */
6540*38fd1498Szrj void
insn_locations_finalize(void)6541*38fd1498Szrj insn_locations_finalize (void)
6542*38fd1498Szrj {
6543*38fd1498Szrj   epilogue_location = curr_location;
6544*38fd1498Szrj   curr_location = UNKNOWN_LOCATION;
6545*38fd1498Szrj }
6546*38fd1498Szrj 
6547*38fd1498Szrj /* Set current location.  */
6548*38fd1498Szrj void
set_curr_insn_location(location_t location)6549*38fd1498Szrj set_curr_insn_location (location_t location)
6550*38fd1498Szrj {
6551*38fd1498Szrj   curr_location = location;
6552*38fd1498Szrj }
6553*38fd1498Szrj 
6554*38fd1498Szrj /* Get current location.  */
6555*38fd1498Szrj location_t
curr_insn_location(void)6556*38fd1498Szrj curr_insn_location (void)
6557*38fd1498Szrj {
6558*38fd1498Szrj   return curr_location;
6559*38fd1498Szrj }
6560*38fd1498Szrj 
6561*38fd1498Szrj /* Return lexical scope block insn belongs to.  */
6562*38fd1498Szrj tree
insn_scope(const rtx_insn * insn)6563*38fd1498Szrj insn_scope (const rtx_insn *insn)
6564*38fd1498Szrj {
6565*38fd1498Szrj   return LOCATION_BLOCK (INSN_LOCATION (insn));
6566*38fd1498Szrj }
6567*38fd1498Szrj 
6568*38fd1498Szrj /* Return line number of the statement that produced this insn.  */
6569*38fd1498Szrj int
insn_line(const rtx_insn * insn)6570*38fd1498Szrj insn_line (const rtx_insn *insn)
6571*38fd1498Szrj {
6572*38fd1498Szrj   return LOCATION_LINE (INSN_LOCATION (insn));
6573*38fd1498Szrj }
6574*38fd1498Szrj 
6575*38fd1498Szrj /* Return source file of the statement that produced this insn.  */
6576*38fd1498Szrj const char *
insn_file(const rtx_insn * insn)6577*38fd1498Szrj insn_file (const rtx_insn *insn)
6578*38fd1498Szrj {
6579*38fd1498Szrj   return LOCATION_FILE (INSN_LOCATION (insn));
6580*38fd1498Szrj }
6581*38fd1498Szrj 
6582*38fd1498Szrj /* Return expanded location of the statement that produced this insn.  */
6583*38fd1498Szrj expanded_location
insn_location(const rtx_insn * insn)6584*38fd1498Szrj insn_location (const rtx_insn *insn)
6585*38fd1498Szrj {
6586*38fd1498Szrj   return expand_location (INSN_LOCATION (insn));
6587*38fd1498Szrj }
6588*38fd1498Szrj 
6589*38fd1498Szrj /* Return true if memory model MODEL requires a pre-operation (release-style)
6590*38fd1498Szrj    barrier or a post-operation (acquire-style) barrier.  While not universal,
6591*38fd1498Szrj    this function matches behavior of several targets.  */
6592*38fd1498Szrj 
6593*38fd1498Szrj bool
need_atomic_barrier_p(enum memmodel model,bool pre)6594*38fd1498Szrj need_atomic_barrier_p (enum memmodel model, bool pre)
6595*38fd1498Szrj {
6596*38fd1498Szrj   switch (model & MEMMODEL_BASE_MASK)
6597*38fd1498Szrj     {
6598*38fd1498Szrj     case MEMMODEL_RELAXED:
6599*38fd1498Szrj     case MEMMODEL_CONSUME:
6600*38fd1498Szrj       return false;
6601*38fd1498Szrj     case MEMMODEL_RELEASE:
6602*38fd1498Szrj       return pre;
6603*38fd1498Szrj     case MEMMODEL_ACQUIRE:
6604*38fd1498Szrj       return !pre;
6605*38fd1498Szrj     case MEMMODEL_ACQ_REL:
6606*38fd1498Szrj     case MEMMODEL_SEQ_CST:
6607*38fd1498Szrj       return true;
6608*38fd1498Szrj     default:
6609*38fd1498Szrj       gcc_unreachable ();
6610*38fd1498Szrj     }
6611*38fd1498Szrj }
6612*38fd1498Szrj 
6613*38fd1498Szrj /* Return a constant shift amount for shifting a value of mode MODE
6614*38fd1498Szrj    by VALUE bits.  */
6615*38fd1498Szrj 
6616*38fd1498Szrj rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6617*38fd1498Szrj gen_int_shift_amount (machine_mode, poly_int64 value)
6618*38fd1498Szrj {
6619*38fd1498Szrj   /* Use a 64-bit mode, to avoid any truncation.
6620*38fd1498Szrj 
6621*38fd1498Szrj      ??? Perhaps this should be automatically derived from the .md files
6622*38fd1498Szrj      instead, or perhaps have a target hook.  */
6623*38fd1498Szrj   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6624*38fd1498Szrj 				? DImode
6625*38fd1498Szrj 				: int_mode_for_size (64, 0).require ());
6626*38fd1498Szrj   return gen_int_mode (value, shift_mode);
6627*38fd1498Szrj }
6628*38fd1498Szrj 
6629*38fd1498Szrj /* Initialize fields of rtl_data related to stack alignment.  */
6630*38fd1498Szrj 
6631*38fd1498Szrj void
init_stack_alignment()6632*38fd1498Szrj rtl_data::init_stack_alignment ()
6633*38fd1498Szrj {
6634*38fd1498Szrj   stack_alignment_needed = STACK_BOUNDARY;
6635*38fd1498Szrj   max_used_stack_slot_alignment = STACK_BOUNDARY;
6636*38fd1498Szrj   stack_alignment_estimated = 0;
6637*38fd1498Szrj   preferred_stack_boundary = STACK_BOUNDARY;
6638*38fd1498Szrj }
6639*38fd1498Szrj 
6640*38fd1498Szrj 
6641*38fd1498Szrj #include "gt-emit-rtl.h"
6642