xref: /netbsd/external/gpl3/gcc.old/dist/gcc/emit-rtl.c (revision c7a68eb7)
110d565efSmrg /* Emit RTL for the GCC expander.
2*c7a68eb7Smrg    Copyright (C) 1987-2018 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg 
2110d565efSmrg /* Middle-to-low level generation of rtx code and insns.
2210d565efSmrg 
2310d565efSmrg    This file contains support functions for creating rtl expressions
2410d565efSmrg    and manipulating them in the doubly-linked chain of insns.
2510d565efSmrg 
2610d565efSmrg    The patterns of the insns are created by machine-dependent
2710d565efSmrg    routines in insn-emit.c, which is generated automatically from
2810d565efSmrg    the machine description.  These routines make the individual rtx's
2910d565efSmrg    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
3010d565efSmrg    which are automatically generated from rtl.def; what is machine
3110d565efSmrg    dependent is the kind of rtx's they make and what arguments they
3210d565efSmrg    use.  */
3310d565efSmrg 
3410d565efSmrg #include "config.h"
3510d565efSmrg #include "system.h"
3610d565efSmrg #include "coretypes.h"
3710d565efSmrg #include "memmodel.h"
3810d565efSmrg #include "backend.h"
3910d565efSmrg #include "target.h"
4010d565efSmrg #include "rtl.h"
4110d565efSmrg #include "tree.h"
4210d565efSmrg #include "df.h"
4310d565efSmrg #include "tm_p.h"
4410d565efSmrg #include "stringpool.h"
4510d565efSmrg #include "insn-config.h"
4610d565efSmrg #include "regs.h"
4710d565efSmrg #include "emit-rtl.h"
4810d565efSmrg #include "recog.h"
4910d565efSmrg #include "diagnostic-core.h"
5010d565efSmrg #include "alias.h"
5110d565efSmrg #include "fold-const.h"
5210d565efSmrg #include "varasm.h"
5310d565efSmrg #include "cfgrtl.h"
5410d565efSmrg #include "tree-eh.h"
5510d565efSmrg #include "explow.h"
5610d565efSmrg #include "expr.h"
5710d565efSmrg #include "params.h"
5810d565efSmrg #include "builtins.h"
5910d565efSmrg #include "rtl-iter.h"
6010d565efSmrg #include "stor-layout.h"
6110d565efSmrg #include "opts.h"
62*c7a68eb7Smrg #include "predict.h"
63*c7a68eb7Smrg #include "rtx-vector-builder.h"
6410d565efSmrg 
6510d565efSmrg struct target_rtl default_target_rtl;
6610d565efSmrg #if SWITCHABLE_TARGET
6710d565efSmrg struct target_rtl *this_target_rtl = &default_target_rtl;
6810d565efSmrg #endif
6910d565efSmrg 
7010d565efSmrg #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
7110d565efSmrg 
7210d565efSmrg /* Commonly used modes.  */
7310d565efSmrg 
74*c7a68eb7Smrg scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
75*c7a68eb7Smrg scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
76*c7a68eb7Smrg scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
7710d565efSmrg 
7810d565efSmrg /* Datastructures maintained for currently processed function in RTL form.  */
7910d565efSmrg 
8010d565efSmrg struct rtl_data x_rtl;
8110d565efSmrg 
8210d565efSmrg /* Indexed by pseudo register number, gives the rtx for that pseudo.
8310d565efSmrg    Allocated in parallel with regno_pointer_align.
8410d565efSmrg    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
8510d565efSmrg    with length attribute nested in top level structures.  */
8610d565efSmrg 
8710d565efSmrg rtx * regno_reg_rtx;
8810d565efSmrg 
8910d565efSmrg /* This is *not* reset after each function.  It gives each CODE_LABEL
9010d565efSmrg    in the entire compilation a unique label number.  */
9110d565efSmrg 
9210d565efSmrg static GTY(()) int label_num = 1;
9310d565efSmrg 
9410d565efSmrg /* We record floating-point CONST_DOUBLEs in each floating-point mode for
9510d565efSmrg    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
9610d565efSmrg    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
9710d565efSmrg    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
9810d565efSmrg 
9910d565efSmrg rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
10010d565efSmrg 
10110d565efSmrg rtx const_true_rtx;
10210d565efSmrg 
10310d565efSmrg REAL_VALUE_TYPE dconst0;
10410d565efSmrg REAL_VALUE_TYPE dconst1;
10510d565efSmrg REAL_VALUE_TYPE dconst2;
10610d565efSmrg REAL_VALUE_TYPE dconstm1;
10710d565efSmrg REAL_VALUE_TYPE dconsthalf;
10810d565efSmrg 
10910d565efSmrg /* Record fixed-point constant 0 and 1.  */
11010d565efSmrg FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
11110d565efSmrg FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
11210d565efSmrg 
11310d565efSmrg /* We make one copy of (const_int C) where C is in
11410d565efSmrg    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
11510d565efSmrg    to save space during the compilation and simplify comparisons of
11610d565efSmrg    integers.  */
11710d565efSmrg 
11810d565efSmrg rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
11910d565efSmrg 
12010d565efSmrg /* Standard pieces of rtx, to be substituted directly into things.  */
12110d565efSmrg rtx pc_rtx;
12210d565efSmrg rtx ret_rtx;
12310d565efSmrg rtx simple_return_rtx;
12410d565efSmrg rtx cc0_rtx;
12510d565efSmrg 
12610d565efSmrg /* Marker used for denoting an INSN, which should never be accessed (i.e.,
12710d565efSmrg    this pointer should normally never be dereferenced), but is required to be
12810d565efSmrg    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
12910d565efSmrg rtx_insn *invalid_insn_rtx;
13010d565efSmrg 
13110d565efSmrg /* A hash table storing CONST_INTs whose absolute value is greater
13210d565efSmrg    than MAX_SAVED_CONST_INT.  */
13310d565efSmrg 
13410d565efSmrg struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
13510d565efSmrg {
13610d565efSmrg   typedef HOST_WIDE_INT compare_type;
13710d565efSmrg 
13810d565efSmrg   static hashval_t hash (rtx i);
13910d565efSmrg   static bool equal (rtx i, HOST_WIDE_INT h);
14010d565efSmrg };
14110d565efSmrg 
14210d565efSmrg static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
14310d565efSmrg 
14410d565efSmrg struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
14510d565efSmrg {
14610d565efSmrg   static hashval_t hash (rtx x);
14710d565efSmrg   static bool equal (rtx x, rtx y);
14810d565efSmrg };
14910d565efSmrg 
15010d565efSmrg static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
15110d565efSmrg 
152*c7a68eb7Smrg struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
153*c7a68eb7Smrg {
154*c7a68eb7Smrg   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
155*c7a68eb7Smrg 
156*c7a68eb7Smrg   static hashval_t hash (rtx x);
157*c7a68eb7Smrg   static bool equal (rtx x, const compare_type &y);
158*c7a68eb7Smrg };
159*c7a68eb7Smrg 
160*c7a68eb7Smrg static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
161*c7a68eb7Smrg 
16210d565efSmrg /* A hash table storing register attribute structures.  */
16310d565efSmrg struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
16410d565efSmrg {
16510d565efSmrg   static hashval_t hash (reg_attrs *x);
16610d565efSmrg   static bool equal (reg_attrs *a, reg_attrs *b);
16710d565efSmrg };
16810d565efSmrg 
16910d565efSmrg static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
17010d565efSmrg 
17110d565efSmrg /* A hash table storing all CONST_DOUBLEs.  */
17210d565efSmrg struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
17310d565efSmrg {
17410d565efSmrg   static hashval_t hash (rtx x);
17510d565efSmrg   static bool equal (rtx x, rtx y);
17610d565efSmrg };
17710d565efSmrg 
17810d565efSmrg static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
17910d565efSmrg 
18010d565efSmrg /* A hash table storing all CONST_FIXEDs.  */
18110d565efSmrg struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
18210d565efSmrg {
18310d565efSmrg   static hashval_t hash (rtx x);
18410d565efSmrg   static bool equal (rtx x, rtx y);
18510d565efSmrg };
18610d565efSmrg 
18710d565efSmrg static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
18810d565efSmrg 
18910d565efSmrg #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
19010d565efSmrg #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
19110d565efSmrg #define first_label_num (crtl->emit.x_first_label_num)
19210d565efSmrg 
19310d565efSmrg static void set_used_decls (tree);
19410d565efSmrg static void mark_label_nuses (rtx);
19510d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
19610d565efSmrg static rtx lookup_const_wide_int (rtx);
19710d565efSmrg #endif
19810d565efSmrg static rtx lookup_const_double (rtx);
19910d565efSmrg static rtx lookup_const_fixed (rtx);
20010d565efSmrg static rtx gen_const_vector (machine_mode, int);
20110d565efSmrg static void copy_rtx_if_shared_1 (rtx *orig);
20210d565efSmrg 
203*c7a68eb7Smrg /* Probability of the conditional branch currently proceeded by try_split.  */
204*c7a68eb7Smrg profile_probability split_branch_probability;
20510d565efSmrg 
20610d565efSmrg /* Returns a hash code for X (which is a really a CONST_INT).  */
20710d565efSmrg 
20810d565efSmrg hashval_t
20910d565efSmrg const_int_hasher::hash (rtx x)
21010d565efSmrg {
21110d565efSmrg   return (hashval_t) INTVAL (x);
21210d565efSmrg }
21310d565efSmrg 
21410d565efSmrg /* Returns nonzero if the value represented by X (which is really a
21510d565efSmrg    CONST_INT) is the same as that given by Y (which is really a
21610d565efSmrg    HOST_WIDE_INT *).  */
21710d565efSmrg 
21810d565efSmrg bool
21910d565efSmrg const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
22010d565efSmrg {
22110d565efSmrg   return (INTVAL (x) == y);
22210d565efSmrg }
22310d565efSmrg 
22410d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
22510d565efSmrg /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
22610d565efSmrg 
22710d565efSmrg hashval_t
22810d565efSmrg const_wide_int_hasher::hash (rtx x)
22910d565efSmrg {
23010d565efSmrg   int i;
23110d565efSmrg   unsigned HOST_WIDE_INT hash = 0;
23210d565efSmrg   const_rtx xr = x;
23310d565efSmrg 
23410d565efSmrg   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
23510d565efSmrg     hash += CONST_WIDE_INT_ELT (xr, i);
23610d565efSmrg 
23710d565efSmrg   return (hashval_t) hash;
23810d565efSmrg }
23910d565efSmrg 
24010d565efSmrg /* Returns nonzero if the value represented by X (which is really a
24110d565efSmrg    CONST_WIDE_INT) is the same as that given by Y (which is really a
24210d565efSmrg    CONST_WIDE_INT).  */
24310d565efSmrg 
24410d565efSmrg bool
24510d565efSmrg const_wide_int_hasher::equal (rtx x, rtx y)
24610d565efSmrg {
24710d565efSmrg   int i;
24810d565efSmrg   const_rtx xr = x;
24910d565efSmrg   const_rtx yr = y;
25010d565efSmrg   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
25110d565efSmrg     return false;
25210d565efSmrg 
25310d565efSmrg   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
25410d565efSmrg     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
25510d565efSmrg       return false;
25610d565efSmrg 
25710d565efSmrg   return true;
25810d565efSmrg }
25910d565efSmrg #endif
26010d565efSmrg 
261*c7a68eb7Smrg /* Returns a hash code for CONST_POLY_INT X.  */
262*c7a68eb7Smrg 
263*c7a68eb7Smrg hashval_t
264*c7a68eb7Smrg const_poly_int_hasher::hash (rtx x)
265*c7a68eb7Smrg {
266*c7a68eb7Smrg   inchash::hash h;
267*c7a68eb7Smrg   h.add_int (GET_MODE (x));
268*c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
269*c7a68eb7Smrg     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
270*c7a68eb7Smrg   return h.end ();
271*c7a68eb7Smrg }
272*c7a68eb7Smrg 
273*c7a68eb7Smrg /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
274*c7a68eb7Smrg 
275*c7a68eb7Smrg bool
276*c7a68eb7Smrg const_poly_int_hasher::equal (rtx x, const compare_type &y)
277*c7a68eb7Smrg {
278*c7a68eb7Smrg   if (GET_MODE (x) != y.first)
279*c7a68eb7Smrg     return false;
280*c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
281*c7a68eb7Smrg     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
282*c7a68eb7Smrg       return false;
283*c7a68eb7Smrg   return true;
284*c7a68eb7Smrg }
285*c7a68eb7Smrg 
28610d565efSmrg /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
28710d565efSmrg hashval_t
28810d565efSmrg const_double_hasher::hash (rtx x)
28910d565efSmrg {
29010d565efSmrg   const_rtx const value = x;
29110d565efSmrg   hashval_t h;
29210d565efSmrg 
29310d565efSmrg   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
29410d565efSmrg     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
29510d565efSmrg   else
29610d565efSmrg     {
29710d565efSmrg       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
29810d565efSmrg       /* MODE is used in the comparison, so it should be in the hash.  */
29910d565efSmrg       h ^= GET_MODE (value);
30010d565efSmrg     }
30110d565efSmrg   return h;
30210d565efSmrg }
30310d565efSmrg 
30410d565efSmrg /* Returns nonzero if the value represented by X (really a ...)
30510d565efSmrg    is the same as that represented by Y (really a ...) */
30610d565efSmrg bool
30710d565efSmrg const_double_hasher::equal (rtx x, rtx y)
30810d565efSmrg {
30910d565efSmrg   const_rtx const a = x, b = y;
31010d565efSmrg 
31110d565efSmrg   if (GET_MODE (a) != GET_MODE (b))
31210d565efSmrg     return 0;
31310d565efSmrg   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
31410d565efSmrg     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
31510d565efSmrg 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
31610d565efSmrg   else
31710d565efSmrg     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
31810d565efSmrg 			   CONST_DOUBLE_REAL_VALUE (b));
31910d565efSmrg }
32010d565efSmrg 
32110d565efSmrg /* Returns a hash code for X (which is really a CONST_FIXED).  */
32210d565efSmrg 
32310d565efSmrg hashval_t
32410d565efSmrg const_fixed_hasher::hash (rtx x)
32510d565efSmrg {
32610d565efSmrg   const_rtx const value = x;
32710d565efSmrg   hashval_t h;
32810d565efSmrg 
32910d565efSmrg   h = fixed_hash (CONST_FIXED_VALUE (value));
33010d565efSmrg   /* MODE is used in the comparison, so it should be in the hash.  */
33110d565efSmrg   h ^= GET_MODE (value);
33210d565efSmrg   return h;
33310d565efSmrg }
33410d565efSmrg 
33510d565efSmrg /* Returns nonzero if the value represented by X is the same as that
33610d565efSmrg    represented by Y.  */
33710d565efSmrg 
33810d565efSmrg bool
33910d565efSmrg const_fixed_hasher::equal (rtx x, rtx y)
34010d565efSmrg {
34110d565efSmrg   const_rtx const a = x, b = y;
34210d565efSmrg 
34310d565efSmrg   if (GET_MODE (a) != GET_MODE (b))
34410d565efSmrg     return 0;
34510d565efSmrg   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
34610d565efSmrg }
34710d565efSmrg 
34810d565efSmrg /* Return true if the given memory attributes are equal.  */
34910d565efSmrg 
35010d565efSmrg bool
35110d565efSmrg mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
35210d565efSmrg {
35310d565efSmrg   if (p == q)
35410d565efSmrg     return true;
35510d565efSmrg   if (!p || !q)
35610d565efSmrg     return false;
35710d565efSmrg   return (p->alias == q->alias
35810d565efSmrg 	  && p->offset_known_p == q->offset_known_p
359*c7a68eb7Smrg 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
36010d565efSmrg 	  && p->size_known_p == q->size_known_p
361*c7a68eb7Smrg 	  && (!p->size_known_p || known_eq (p->size, q->size))
36210d565efSmrg 	  && p->align == q->align
36310d565efSmrg 	  && p->addrspace == q->addrspace
36410d565efSmrg 	  && (p->expr == q->expr
36510d565efSmrg 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
36610d565efSmrg 		  && operand_equal_p (p->expr, q->expr, 0))));
36710d565efSmrg }
36810d565efSmrg 
36910d565efSmrg /* Set MEM's memory attributes so that they are the same as ATTRS.  */
37010d565efSmrg 
37110d565efSmrg static void
37210d565efSmrg set_mem_attrs (rtx mem, mem_attrs *attrs)
37310d565efSmrg {
37410d565efSmrg   /* If everything is the default, we can just clear the attributes.  */
37510d565efSmrg   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
37610d565efSmrg     {
37710d565efSmrg       MEM_ATTRS (mem) = 0;
37810d565efSmrg       return;
37910d565efSmrg     }
38010d565efSmrg 
38110d565efSmrg   if (!MEM_ATTRS (mem)
38210d565efSmrg       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
38310d565efSmrg     {
38410d565efSmrg       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
38510d565efSmrg       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
38610d565efSmrg     }
38710d565efSmrg }
38810d565efSmrg 
38910d565efSmrg /* Returns a hash code for X (which is a really a reg_attrs *).  */
39010d565efSmrg 
39110d565efSmrg hashval_t
39210d565efSmrg reg_attr_hasher::hash (reg_attrs *x)
39310d565efSmrg {
39410d565efSmrg   const reg_attrs *const p = x;
39510d565efSmrg 
396*c7a68eb7Smrg   inchash::hash h;
397*c7a68eb7Smrg   h.add_ptr (p->decl);
398*c7a68eb7Smrg   h.add_poly_hwi (p->offset);
399*c7a68eb7Smrg   return h.end ();
40010d565efSmrg }
40110d565efSmrg 
40210d565efSmrg /* Returns nonzero if the value represented by X  is the same as that given by
40310d565efSmrg    Y.  */
40410d565efSmrg 
40510d565efSmrg bool
40610d565efSmrg reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
40710d565efSmrg {
40810d565efSmrg   const reg_attrs *const p = x;
40910d565efSmrg   const reg_attrs *const q = y;
41010d565efSmrg 
411*c7a68eb7Smrg   return (p->decl == q->decl && known_eq (p->offset, q->offset));
41210d565efSmrg }
41310d565efSmrg /* Allocate a new reg_attrs structure and insert it into the hash table if
41410d565efSmrg    one identical to it is not already in the table.  We are doing this for
41510d565efSmrg    MEM of mode MODE.  */
41610d565efSmrg 
41710d565efSmrg static reg_attrs *
418*c7a68eb7Smrg get_reg_attrs (tree decl, poly_int64 offset)
41910d565efSmrg {
42010d565efSmrg   reg_attrs attrs;
42110d565efSmrg 
42210d565efSmrg   /* If everything is the default, we can just return zero.  */
423*c7a68eb7Smrg   if (decl == 0 && known_eq (offset, 0))
42410d565efSmrg     return 0;
42510d565efSmrg 
42610d565efSmrg   attrs.decl = decl;
42710d565efSmrg   attrs.offset = offset;
42810d565efSmrg 
42910d565efSmrg   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
43010d565efSmrg   if (*slot == 0)
43110d565efSmrg     {
43210d565efSmrg       *slot = ggc_alloc<reg_attrs> ();
43310d565efSmrg       memcpy (*slot, &attrs, sizeof (reg_attrs));
43410d565efSmrg     }
43510d565efSmrg 
43610d565efSmrg   return *slot;
43710d565efSmrg }
43810d565efSmrg 
43910d565efSmrg 
44010d565efSmrg #if !HAVE_blockage
44110d565efSmrg /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
44210d565efSmrg    and to block register equivalences to be seen across this insn.  */
44310d565efSmrg 
44410d565efSmrg rtx
44510d565efSmrg gen_blockage (void)
44610d565efSmrg {
44710d565efSmrg   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
44810d565efSmrg   MEM_VOLATILE_P (x) = true;
44910d565efSmrg   return x;
45010d565efSmrg }
45110d565efSmrg #endif
45210d565efSmrg 
45310d565efSmrg 
45410d565efSmrg /* Set the mode and register number of X to MODE and REGNO.  */
45510d565efSmrg 
45610d565efSmrg void
45710d565efSmrg set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
45810d565efSmrg {
45910d565efSmrg   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
460*c7a68eb7Smrg 			? hard_regno_nregs (regno, mode)
46110d565efSmrg 			: 1);
46210d565efSmrg   PUT_MODE_RAW (x, mode);
46310d565efSmrg   set_regno_raw (x, regno, nregs);
46410d565efSmrg }
46510d565efSmrg 
46610d565efSmrg /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
46710d565efSmrg    don't attempt to share with the various global pieces of rtl (such as
46810d565efSmrg    frame_pointer_rtx).  */
46910d565efSmrg 
47010d565efSmrg rtx
47110d565efSmrg gen_raw_REG (machine_mode mode, unsigned int regno)
47210d565efSmrg {
473*c7a68eb7Smrg   rtx x = rtx_alloc (REG MEM_STAT_INFO);
47410d565efSmrg   set_mode_and_regno (x, mode, regno);
47510d565efSmrg   REG_ATTRS (x) = NULL;
47610d565efSmrg   ORIGINAL_REGNO (x) = regno;
47710d565efSmrg   return x;
47810d565efSmrg }
47910d565efSmrg 
48010d565efSmrg /* There are some RTL codes that require special attention; the generation
48110d565efSmrg    functions do the raw handling.  If you add to this list, modify
48210d565efSmrg    special_rtx in gengenrtl.c as well.  */
48310d565efSmrg 
48410d565efSmrg rtx_expr_list *
48510d565efSmrg gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
48610d565efSmrg {
48710d565efSmrg   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
48810d565efSmrg 						 expr_list));
48910d565efSmrg }
49010d565efSmrg 
49110d565efSmrg rtx_insn_list *
49210d565efSmrg gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
49310d565efSmrg {
49410d565efSmrg   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
49510d565efSmrg 						 insn_list));
49610d565efSmrg }
49710d565efSmrg 
49810d565efSmrg rtx_insn *
49910d565efSmrg gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
50010d565efSmrg 	      basic_block bb, rtx pattern, int location, int code,
50110d565efSmrg 	      rtx reg_notes)
50210d565efSmrg {
50310d565efSmrg   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
50410d565efSmrg 						 prev_insn, next_insn,
50510d565efSmrg 						 bb, pattern, location, code,
50610d565efSmrg 						 reg_notes));
50710d565efSmrg }
50810d565efSmrg 
50910d565efSmrg rtx
51010d565efSmrg gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
51110d565efSmrg {
51210d565efSmrg   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
51310d565efSmrg     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
51410d565efSmrg 
51510d565efSmrg #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
51610d565efSmrg   if (const_true_rtx && arg == STORE_FLAG_VALUE)
51710d565efSmrg     return const_true_rtx;
51810d565efSmrg #endif
51910d565efSmrg 
52010d565efSmrg   /* Look up the CONST_INT in the hash table.  */
52110d565efSmrg   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
52210d565efSmrg 						   INSERT);
52310d565efSmrg   if (*slot == 0)
52410d565efSmrg     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
52510d565efSmrg 
52610d565efSmrg   return *slot;
52710d565efSmrg }
52810d565efSmrg 
52910d565efSmrg rtx
530*c7a68eb7Smrg gen_int_mode (poly_int64 c, machine_mode mode)
53110d565efSmrg {
532*c7a68eb7Smrg   c = trunc_int_for_mode (c, mode);
533*c7a68eb7Smrg   if (c.is_constant ())
534*c7a68eb7Smrg     return GEN_INT (c.coeffs[0]);
535*c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
536*c7a68eb7Smrg   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
53710d565efSmrg }
53810d565efSmrg 
53910d565efSmrg /* CONST_DOUBLEs might be created from pairs of integers, or from
54010d565efSmrg    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
54110d565efSmrg    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
54210d565efSmrg 
54310d565efSmrg /* Determine whether REAL, a CONST_DOUBLE, already exists in the
54410d565efSmrg    hash table.  If so, return its counterpart; otherwise add it
54510d565efSmrg    to the hash table and return it.  */
54610d565efSmrg static rtx
54710d565efSmrg lookup_const_double (rtx real)
54810d565efSmrg {
54910d565efSmrg   rtx *slot = const_double_htab->find_slot (real, INSERT);
55010d565efSmrg   if (*slot == 0)
55110d565efSmrg     *slot = real;
55210d565efSmrg 
55310d565efSmrg   return *slot;
55410d565efSmrg }
55510d565efSmrg 
55610d565efSmrg /* Return a CONST_DOUBLE rtx for a floating-point value specified by
55710d565efSmrg    VALUE in mode MODE.  */
55810d565efSmrg rtx
55910d565efSmrg const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
56010d565efSmrg {
56110d565efSmrg   rtx real = rtx_alloc (CONST_DOUBLE);
56210d565efSmrg   PUT_MODE (real, mode);
56310d565efSmrg 
56410d565efSmrg   real->u.rv = value;
56510d565efSmrg 
56610d565efSmrg   return lookup_const_double (real);
56710d565efSmrg }
56810d565efSmrg 
56910d565efSmrg /* Determine whether FIXED, a CONST_FIXED, already exists in the
57010d565efSmrg    hash table.  If so, return its counterpart; otherwise add it
57110d565efSmrg    to the hash table and return it.  */
57210d565efSmrg 
57310d565efSmrg static rtx
57410d565efSmrg lookup_const_fixed (rtx fixed)
57510d565efSmrg {
57610d565efSmrg   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
57710d565efSmrg   if (*slot == 0)
57810d565efSmrg     *slot = fixed;
57910d565efSmrg 
58010d565efSmrg   return *slot;
58110d565efSmrg }
58210d565efSmrg 
58310d565efSmrg /* Return a CONST_FIXED rtx for a fixed-point value specified by
58410d565efSmrg    VALUE in mode MODE.  */
58510d565efSmrg 
58610d565efSmrg rtx
58710d565efSmrg const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
58810d565efSmrg {
58910d565efSmrg   rtx fixed = rtx_alloc (CONST_FIXED);
59010d565efSmrg   PUT_MODE (fixed, mode);
59110d565efSmrg 
59210d565efSmrg   fixed->u.fv = value;
59310d565efSmrg 
59410d565efSmrg   return lookup_const_fixed (fixed);
59510d565efSmrg }
59610d565efSmrg 
59710d565efSmrg #if TARGET_SUPPORTS_WIDE_INT == 0
59810d565efSmrg /* Constructs double_int from rtx CST.  */
59910d565efSmrg 
60010d565efSmrg double_int
60110d565efSmrg rtx_to_double_int (const_rtx cst)
60210d565efSmrg {
60310d565efSmrg   double_int r;
60410d565efSmrg 
60510d565efSmrg   if (CONST_INT_P (cst))
60610d565efSmrg       r = double_int::from_shwi (INTVAL (cst));
60710d565efSmrg   else if (CONST_DOUBLE_AS_INT_P (cst))
60810d565efSmrg     {
60910d565efSmrg       r.low = CONST_DOUBLE_LOW (cst);
61010d565efSmrg       r.high = CONST_DOUBLE_HIGH (cst);
61110d565efSmrg     }
61210d565efSmrg   else
61310d565efSmrg     gcc_unreachable ();
61410d565efSmrg 
61510d565efSmrg   return r;
61610d565efSmrg }
61710d565efSmrg #endif
61810d565efSmrg 
61910d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
62010d565efSmrg /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
62110d565efSmrg    If so, return its counterpart; otherwise add it to the hash table and
62210d565efSmrg    return it.  */
62310d565efSmrg 
62410d565efSmrg static rtx
62510d565efSmrg lookup_const_wide_int (rtx wint)
62610d565efSmrg {
62710d565efSmrg   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
62810d565efSmrg   if (*slot == 0)
62910d565efSmrg     *slot = wint;
63010d565efSmrg 
63110d565efSmrg   return *slot;
63210d565efSmrg }
63310d565efSmrg #endif
63410d565efSmrg 
63510d565efSmrg /* Return an rtx constant for V, given that the constant has mode MODE.
63610d565efSmrg    The returned rtx will be a CONST_INT if V fits, otherwise it will be
63710d565efSmrg    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
63810d565efSmrg    (if TARGET_SUPPORTS_WIDE_INT).  */
63910d565efSmrg 
640*c7a68eb7Smrg static rtx
641*c7a68eb7Smrg immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
64210d565efSmrg {
64310d565efSmrg   unsigned int len = v.get_len ();
644*c7a68eb7Smrg   /* Not scalar_int_mode because we also allow pointer bound modes.  */
645*c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
64610d565efSmrg 
64710d565efSmrg   /* Allow truncation but not extension since we do not know if the
64810d565efSmrg      number is signed or unsigned.  */
64910d565efSmrg   gcc_assert (prec <= v.get_precision ());
65010d565efSmrg 
65110d565efSmrg   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
65210d565efSmrg     return gen_int_mode (v.elt (0), mode);
65310d565efSmrg 
65410d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
65510d565efSmrg   {
65610d565efSmrg     unsigned int i;
65710d565efSmrg     rtx value;
65810d565efSmrg     unsigned int blocks_needed
65910d565efSmrg       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
66010d565efSmrg 
66110d565efSmrg     if (len > blocks_needed)
66210d565efSmrg       len = blocks_needed;
66310d565efSmrg 
66410d565efSmrg     value = const_wide_int_alloc (len);
66510d565efSmrg 
66610d565efSmrg     /* It is so tempting to just put the mode in here.  Must control
66710d565efSmrg        myself ... */
66810d565efSmrg     PUT_MODE (value, VOIDmode);
66910d565efSmrg     CWI_PUT_NUM_ELEM (value, len);
67010d565efSmrg 
67110d565efSmrg     for (i = 0; i < len; i++)
67210d565efSmrg       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
67310d565efSmrg 
67410d565efSmrg     return lookup_const_wide_int (value);
67510d565efSmrg   }
67610d565efSmrg #else
67710d565efSmrg   return immed_double_const (v.elt (0), v.elt (1), mode);
67810d565efSmrg #endif
67910d565efSmrg }
68010d565efSmrg 
68110d565efSmrg #if TARGET_SUPPORTS_WIDE_INT == 0
68210d565efSmrg /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
68310d565efSmrg    of ints: I0 is the low-order word and I1 is the high-order word.
68410d565efSmrg    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
68510d565efSmrg    implied upper bits are copies of the high bit of i1.  The value
68610d565efSmrg    itself is neither signed nor unsigned.  Do not use this routine for
68710d565efSmrg    non-integer modes; convert to REAL_VALUE_TYPE and use
68810d565efSmrg    const_double_from_real_value.  */
68910d565efSmrg 
69010d565efSmrg rtx
69110d565efSmrg immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
69210d565efSmrg {
69310d565efSmrg   rtx value;
69410d565efSmrg   unsigned int i;
69510d565efSmrg 
69610d565efSmrg   /* There are the following cases (note that there are no modes with
69710d565efSmrg      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
69810d565efSmrg 
69910d565efSmrg      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
70010d565efSmrg 	gen_int_mode.
70110d565efSmrg      2) If the value of the integer fits into HOST_WIDE_INT anyway
70210d565efSmrg         (i.e., i1 consists only from copies of the sign bit, and sign
70310d565efSmrg 	of i0 and i1 are the same), then we return a CONST_INT for i0.
70410d565efSmrg      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
705*c7a68eb7Smrg   scalar_mode smode;
706*c7a68eb7Smrg   if (is_a <scalar_mode> (mode, &smode)
707*c7a68eb7Smrg       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
70810d565efSmrg     return gen_int_mode (i0, mode);
70910d565efSmrg 
71010d565efSmrg   /* If this integer fits in one word, return a CONST_INT.  */
71110d565efSmrg   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
71210d565efSmrg     return GEN_INT (i0);
71310d565efSmrg 
71410d565efSmrg   /* We use VOIDmode for integers.  */
71510d565efSmrg   value = rtx_alloc (CONST_DOUBLE);
71610d565efSmrg   PUT_MODE (value, VOIDmode);
71710d565efSmrg 
71810d565efSmrg   CONST_DOUBLE_LOW (value) = i0;
71910d565efSmrg   CONST_DOUBLE_HIGH (value) = i1;
72010d565efSmrg 
72110d565efSmrg   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
72210d565efSmrg     XWINT (value, i) = 0;
72310d565efSmrg 
72410d565efSmrg   return lookup_const_double (value);
72510d565efSmrg }
72610d565efSmrg #endif
72710d565efSmrg 
728*c7a68eb7Smrg /* Return an rtx representation of C in mode MODE.  */
729*c7a68eb7Smrg 
730*c7a68eb7Smrg rtx
731*c7a68eb7Smrg immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
732*c7a68eb7Smrg {
733*c7a68eb7Smrg   if (c.is_constant ())
734*c7a68eb7Smrg     return immed_wide_int_const_1 (c.coeffs[0], mode);
735*c7a68eb7Smrg 
736*c7a68eb7Smrg   /* Not scalar_int_mode because we also allow pointer bound modes.  */
737*c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
738*c7a68eb7Smrg 
739*c7a68eb7Smrg   /* Allow truncation but not extension since we do not know if the
740*c7a68eb7Smrg      number is signed or unsigned.  */
741*c7a68eb7Smrg   gcc_assert (prec <= c.coeffs[0].get_precision ());
742*c7a68eb7Smrg   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
743*c7a68eb7Smrg 
744*c7a68eb7Smrg   /* See whether we already have an rtx for this constant.  */
745*c7a68eb7Smrg   inchash::hash h;
746*c7a68eb7Smrg   h.add_int (mode);
747*c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
748*c7a68eb7Smrg     h.add_wide_int (newc.coeffs[i]);
749*c7a68eb7Smrg   const_poly_int_hasher::compare_type typed_value (mode, newc);
750*c7a68eb7Smrg   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
751*c7a68eb7Smrg 							h.end (), INSERT);
752*c7a68eb7Smrg   rtx x = *slot;
753*c7a68eb7Smrg   if (x)
754*c7a68eb7Smrg     return x;
755*c7a68eb7Smrg 
756*c7a68eb7Smrg   /* Create a new rtx.  There's a choice to be made here between installing
757*c7a68eb7Smrg      the actual mode of the rtx or leaving it as VOIDmode (for consistency
758*c7a68eb7Smrg      with CONST_INT).  In practice the handling of the codes is different
759*c7a68eb7Smrg      enough that we get no benefit from using VOIDmode, and various places
760*c7a68eb7Smrg      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
761*c7a68eb7Smrg      the right long-term direction anyway.  */
762*c7a68eb7Smrg   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
763*c7a68eb7Smrg   size_t extra_size = twi::extra_size (prec);
764*c7a68eb7Smrg   x = rtx_alloc_v (CONST_POLY_INT,
765*c7a68eb7Smrg 		   sizeof (struct const_poly_int_def) + extra_size);
766*c7a68eb7Smrg   PUT_MODE (x, mode);
767*c7a68eb7Smrg   CONST_POLY_INT_COEFFS (x).set_precision (prec);
768*c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
769*c7a68eb7Smrg     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
770*c7a68eb7Smrg 
771*c7a68eb7Smrg   *slot = x;
772*c7a68eb7Smrg   return x;
773*c7a68eb7Smrg }
774*c7a68eb7Smrg 
77510d565efSmrg rtx
77610d565efSmrg gen_rtx_REG (machine_mode mode, unsigned int regno)
77710d565efSmrg {
77810d565efSmrg   /* In case the MD file explicitly references the frame pointer, have
77910d565efSmrg      all such references point to the same frame pointer.  This is
78010d565efSmrg      used during frame pointer elimination to distinguish the explicit
78110d565efSmrg      references to these registers from pseudos that happened to be
78210d565efSmrg      assigned to them.
78310d565efSmrg 
78410d565efSmrg      If we have eliminated the frame pointer or arg pointer, we will
78510d565efSmrg      be using it as a normal register, for example as a spill
78610d565efSmrg      register.  In such cases, we might be accessing it in a mode that
78710d565efSmrg      is not Pmode and therefore cannot use the pre-allocated rtx.
78810d565efSmrg 
78910d565efSmrg      Also don't do this when we are making new REGs in reload, since
79010d565efSmrg      we don't want to get confused with the real pointers.  */
79110d565efSmrg 
79210d565efSmrg   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
79310d565efSmrg     {
79410d565efSmrg       if (regno == FRAME_POINTER_REGNUM
79510d565efSmrg 	  && (!reload_completed || frame_pointer_needed))
79610d565efSmrg 	return frame_pointer_rtx;
79710d565efSmrg 
79810d565efSmrg       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
79910d565efSmrg 	  && regno == HARD_FRAME_POINTER_REGNUM
80010d565efSmrg 	  && (!reload_completed || frame_pointer_needed))
80110d565efSmrg 	return hard_frame_pointer_rtx;
80210d565efSmrg #if !HARD_FRAME_POINTER_IS_ARG_POINTER
80310d565efSmrg       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
80410d565efSmrg 	  && regno == ARG_POINTER_REGNUM)
80510d565efSmrg 	return arg_pointer_rtx;
80610d565efSmrg #endif
80710d565efSmrg #ifdef RETURN_ADDRESS_POINTER_REGNUM
80810d565efSmrg       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
80910d565efSmrg 	return return_address_pointer_rtx;
81010d565efSmrg #endif
81110d565efSmrg       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
81210d565efSmrg 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
81310d565efSmrg 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
81410d565efSmrg 	return pic_offset_table_rtx;
81510d565efSmrg       if (regno == STACK_POINTER_REGNUM)
81610d565efSmrg 	return stack_pointer_rtx;
81710d565efSmrg     }
81810d565efSmrg 
81910d565efSmrg #if 0
82010d565efSmrg   /* If the per-function register table has been set up, try to re-use
82110d565efSmrg      an existing entry in that table to avoid useless generation of RTL.
82210d565efSmrg 
82310d565efSmrg      This code is disabled for now until we can fix the various backends
82410d565efSmrg      which depend on having non-shared hard registers in some cases.   Long
82510d565efSmrg      term we want to re-enable this code as it can significantly cut down
82610d565efSmrg      on the amount of useless RTL that gets generated.
82710d565efSmrg 
82810d565efSmrg      We'll also need to fix some code that runs after reload that wants to
82910d565efSmrg      set ORIGINAL_REGNO.  */
83010d565efSmrg 
83110d565efSmrg   if (cfun
83210d565efSmrg       && cfun->emit
83310d565efSmrg       && regno_reg_rtx
83410d565efSmrg       && regno < FIRST_PSEUDO_REGISTER
83510d565efSmrg       && reg_raw_mode[regno] == mode)
83610d565efSmrg     return regno_reg_rtx[regno];
83710d565efSmrg #endif
83810d565efSmrg 
83910d565efSmrg   return gen_raw_REG (mode, regno);
84010d565efSmrg }
84110d565efSmrg 
84210d565efSmrg rtx
84310d565efSmrg gen_rtx_MEM (machine_mode mode, rtx addr)
84410d565efSmrg {
84510d565efSmrg   rtx rt = gen_rtx_raw_MEM (mode, addr);
84610d565efSmrg 
84710d565efSmrg   /* This field is not cleared by the mere allocation of the rtx, so
84810d565efSmrg      we clear it here.  */
84910d565efSmrg   MEM_ATTRS (rt) = 0;
85010d565efSmrg 
85110d565efSmrg   return rt;
85210d565efSmrg }
85310d565efSmrg 
85410d565efSmrg /* Generate a memory referring to non-trapping constant memory.  */
85510d565efSmrg 
85610d565efSmrg rtx
85710d565efSmrg gen_const_mem (machine_mode mode, rtx addr)
85810d565efSmrg {
85910d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
86010d565efSmrg   MEM_READONLY_P (mem) = 1;
86110d565efSmrg   MEM_NOTRAP_P (mem) = 1;
86210d565efSmrg   return mem;
86310d565efSmrg }
86410d565efSmrg 
86510d565efSmrg /* Generate a MEM referring to fixed portions of the frame, e.g., register
86610d565efSmrg    save areas.  */
86710d565efSmrg 
86810d565efSmrg rtx
86910d565efSmrg gen_frame_mem (machine_mode mode, rtx addr)
87010d565efSmrg {
87110d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
87210d565efSmrg   MEM_NOTRAP_P (mem) = 1;
87310d565efSmrg   set_mem_alias_set (mem, get_frame_alias_set ());
87410d565efSmrg   return mem;
87510d565efSmrg }
87610d565efSmrg 
87710d565efSmrg /* Generate a MEM referring to a temporary use of the stack, not part
87810d565efSmrg     of the fixed stack frame.  For example, something which is pushed
87910d565efSmrg     by a target splitter.  */
88010d565efSmrg rtx
88110d565efSmrg gen_tmp_stack_mem (machine_mode mode, rtx addr)
88210d565efSmrg {
88310d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
88410d565efSmrg   MEM_NOTRAP_P (mem) = 1;
88510d565efSmrg   if (!cfun->calls_alloca)
88610d565efSmrg     set_mem_alias_set (mem, get_frame_alias_set ());
88710d565efSmrg   return mem;
88810d565efSmrg }
88910d565efSmrg 
89010d565efSmrg /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
89110d565efSmrg    this construct would be valid, and false otherwise.  */
89210d565efSmrg 
89310d565efSmrg bool
89410d565efSmrg validate_subreg (machine_mode omode, machine_mode imode,
895*c7a68eb7Smrg 		 const_rtx reg, poly_uint64 offset)
89610d565efSmrg {
897*c7a68eb7Smrg   poly_uint64 isize = GET_MODE_SIZE (imode);
898*c7a68eb7Smrg   poly_uint64 osize = GET_MODE_SIZE (omode);
899*c7a68eb7Smrg 
900*c7a68eb7Smrg   /* The sizes must be ordered, so that we know whether the subreg
901*c7a68eb7Smrg      is partial, paradoxical or complete.  */
902*c7a68eb7Smrg   if (!ordered_p (isize, osize))
903*c7a68eb7Smrg     return false;
90410d565efSmrg 
90510d565efSmrg   /* All subregs must be aligned.  */
906*c7a68eb7Smrg   if (!multiple_p (offset, osize))
90710d565efSmrg     return false;
90810d565efSmrg 
90910d565efSmrg   /* The subreg offset cannot be outside the inner object.  */
910*c7a68eb7Smrg   if (maybe_ge (offset, isize))
91110d565efSmrg     return false;
91210d565efSmrg 
913*c7a68eb7Smrg   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
914*c7a68eb7Smrg 
91510d565efSmrg   /* ??? This should not be here.  Temporarily continue to allow word_mode
91610d565efSmrg      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
91710d565efSmrg      Generally, backends are doing something sketchy but it'll take time to
91810d565efSmrg      fix them all.  */
91910d565efSmrg   if (omode == word_mode)
92010d565efSmrg     ;
92110d565efSmrg   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
92210d565efSmrg      is the culprit here, and not the backends.  */
923*c7a68eb7Smrg   else if (known_ge (osize, regsize) && known_ge (isize, osize))
92410d565efSmrg     ;
92510d565efSmrg   /* Allow component subregs of complex and vector.  Though given the below
92610d565efSmrg      extraction rules, it's not always clear what that means.  */
92710d565efSmrg   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
92810d565efSmrg 	   && GET_MODE_INNER (imode) == omode)
92910d565efSmrg     ;
93010d565efSmrg   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
93110d565efSmrg      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
93210d565efSmrg      represent this.  It's questionable if this ought to be represented at
93310d565efSmrg      all -- why can't this all be hidden in post-reload splitters that make
93410d565efSmrg      arbitrarily mode changes to the registers themselves.  */
93510d565efSmrg   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
93610d565efSmrg     ;
93710d565efSmrg   /* Subregs involving floating point modes are not allowed to
93810d565efSmrg      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
93910d565efSmrg      (subreg:SI (reg:DF) 0) isn't.  */
94010d565efSmrg   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
94110d565efSmrg     {
942*c7a68eb7Smrg       if (! (known_eq (isize, osize)
94310d565efSmrg 	     /* LRA can use subreg to store a floating point value in
94410d565efSmrg 		an integer mode.  Although the floating point and the
94510d565efSmrg 		integer modes need the same number of hard registers,
94610d565efSmrg 		the size of floating point mode can be less than the
94710d565efSmrg 		integer mode.  LRA also uses subregs for a register
94810d565efSmrg 		should be used in different mode in on insn.  */
94910d565efSmrg 	     || lra_in_progress))
95010d565efSmrg 	return false;
95110d565efSmrg     }
95210d565efSmrg 
95310d565efSmrg   /* Paradoxical subregs must have offset zero.  */
954*c7a68eb7Smrg   if (maybe_gt (osize, isize))
955*c7a68eb7Smrg     return known_eq (offset, 0U);
95610d565efSmrg 
95710d565efSmrg   /* This is a normal subreg.  Verify that the offset is representable.  */
95810d565efSmrg 
95910d565efSmrg   /* For hard registers, we already have most of these rules collected in
96010d565efSmrg      subreg_offset_representable_p.  */
96110d565efSmrg   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
96210d565efSmrg     {
96310d565efSmrg       unsigned int regno = REGNO (reg);
96410d565efSmrg 
96510d565efSmrg       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
96610d565efSmrg 	  && GET_MODE_INNER (imode) == omode)
96710d565efSmrg 	;
968*c7a68eb7Smrg       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
96910d565efSmrg 	return false;
97010d565efSmrg 
97110d565efSmrg       return subreg_offset_representable_p (regno, imode, offset, omode);
97210d565efSmrg     }
97310d565efSmrg 
974*c7a68eb7Smrg   /* The outer size must be ordered wrt the register size, otherwise
975*c7a68eb7Smrg      we wouldn't know at compile time how many registers the outer
976*c7a68eb7Smrg      mode occupies.  */
977*c7a68eb7Smrg   if (!ordered_p (osize, regsize))
978*c7a68eb7Smrg     return false;
979*c7a68eb7Smrg 
98010d565efSmrg   /* For pseudo registers, we want most of the same checks.  Namely:
981*c7a68eb7Smrg 
982*c7a68eb7Smrg      Assume that the pseudo register will be allocated to hard registers
983*c7a68eb7Smrg      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
984*c7a68eb7Smrg      the remainder must correspond to the lowpart of the containing hard
985*c7a68eb7Smrg      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
986*c7a68eb7Smrg      otherwise it is at the lowest offset.
987*c7a68eb7Smrg 
988*c7a68eb7Smrg      Given that we've already checked the mode and offset alignment,
989*c7a68eb7Smrg      we only have to check subblock subregs here.  */
990*c7a68eb7Smrg   if (maybe_lt (osize, regsize)
99110d565efSmrg       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
99210d565efSmrg     {
993*c7a68eb7Smrg       /* It is invalid for the target to pick a register size for a mode
994*c7a68eb7Smrg 	 that isn't ordered wrt to the size of that mode.  */
995*c7a68eb7Smrg       poly_uint64 block_size = ordered_min (isize, regsize);
996*c7a68eb7Smrg       unsigned int start_reg;
997*c7a68eb7Smrg       poly_uint64 offset_within_reg;
998*c7a68eb7Smrg       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
999*c7a68eb7Smrg 	  || (BYTES_BIG_ENDIAN
1000*c7a68eb7Smrg 	      ? maybe_ne (offset_within_reg, block_size - osize)
1001*c7a68eb7Smrg 	      : maybe_ne (offset_within_reg, 0U)))
100210d565efSmrg 	return false;
100310d565efSmrg     }
100410d565efSmrg   return true;
100510d565efSmrg }
100610d565efSmrg 
100710d565efSmrg rtx
1008*c7a68eb7Smrg gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
100910d565efSmrg {
101010d565efSmrg   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
101110d565efSmrg   return gen_rtx_raw_SUBREG (mode, reg, offset);
101210d565efSmrg }
101310d565efSmrg 
101410d565efSmrg /* Generate a SUBREG representing the least-significant part of REG if MODE
101510d565efSmrg    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
101610d565efSmrg 
101710d565efSmrg rtx
101810d565efSmrg gen_lowpart_SUBREG (machine_mode mode, rtx reg)
101910d565efSmrg {
102010d565efSmrg   machine_mode inmode;
102110d565efSmrg 
102210d565efSmrg   inmode = GET_MODE (reg);
102310d565efSmrg   if (inmode == VOIDmode)
102410d565efSmrg     inmode = mode;
102510d565efSmrg   return gen_rtx_SUBREG (mode, reg,
102610d565efSmrg 			 subreg_lowpart_offset (mode, inmode));
102710d565efSmrg }
102810d565efSmrg 
102910d565efSmrg rtx
103010d565efSmrg gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
103110d565efSmrg 		      enum var_init_status status)
103210d565efSmrg {
103310d565efSmrg   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
103410d565efSmrg   PAT_VAR_LOCATION_STATUS (x) = status;
103510d565efSmrg   return x;
103610d565efSmrg }
103710d565efSmrg 
103810d565efSmrg 
103910d565efSmrg /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
104010d565efSmrg 
104110d565efSmrg rtvec
104210d565efSmrg gen_rtvec (int n, ...)
104310d565efSmrg {
104410d565efSmrg   int i;
104510d565efSmrg   rtvec rt_val;
104610d565efSmrg   va_list p;
104710d565efSmrg 
104810d565efSmrg   va_start (p, n);
104910d565efSmrg 
105010d565efSmrg   /* Don't allocate an empty rtvec...  */
105110d565efSmrg   if (n == 0)
105210d565efSmrg     {
105310d565efSmrg       va_end (p);
105410d565efSmrg       return NULL_RTVEC;
105510d565efSmrg     }
105610d565efSmrg 
105710d565efSmrg   rt_val = rtvec_alloc (n);
105810d565efSmrg 
105910d565efSmrg   for (i = 0; i < n; i++)
106010d565efSmrg     rt_val->elem[i] = va_arg (p, rtx);
106110d565efSmrg 
106210d565efSmrg   va_end (p);
106310d565efSmrg   return rt_val;
106410d565efSmrg }
106510d565efSmrg 
106610d565efSmrg rtvec
106710d565efSmrg gen_rtvec_v (int n, rtx *argp)
106810d565efSmrg {
106910d565efSmrg   int i;
107010d565efSmrg   rtvec rt_val;
107110d565efSmrg 
107210d565efSmrg   /* Don't allocate an empty rtvec...  */
107310d565efSmrg   if (n == 0)
107410d565efSmrg     return NULL_RTVEC;
107510d565efSmrg 
107610d565efSmrg   rt_val = rtvec_alloc (n);
107710d565efSmrg 
107810d565efSmrg   for (i = 0; i < n; i++)
107910d565efSmrg     rt_val->elem[i] = *argp++;
108010d565efSmrg 
108110d565efSmrg   return rt_val;
108210d565efSmrg }
108310d565efSmrg 
108410d565efSmrg rtvec
108510d565efSmrg gen_rtvec_v (int n, rtx_insn **argp)
108610d565efSmrg {
108710d565efSmrg   int i;
108810d565efSmrg   rtvec rt_val;
108910d565efSmrg 
109010d565efSmrg   /* Don't allocate an empty rtvec...  */
109110d565efSmrg   if (n == 0)
109210d565efSmrg     return NULL_RTVEC;
109310d565efSmrg 
109410d565efSmrg   rt_val = rtvec_alloc (n);
109510d565efSmrg 
109610d565efSmrg   for (i = 0; i < n; i++)
109710d565efSmrg     rt_val->elem[i] = *argp++;
109810d565efSmrg 
109910d565efSmrg   return rt_val;
110010d565efSmrg }
110110d565efSmrg 
110210d565efSmrg 
110310d565efSmrg /* Return the number of bytes between the start of an OUTER_MODE
110410d565efSmrg    in-memory value and the start of an INNER_MODE in-memory value,
110510d565efSmrg    given that the former is a lowpart of the latter.  It may be a
110610d565efSmrg    paradoxical lowpart, in which case the offset will be negative
110710d565efSmrg    on big-endian targets.  */
110810d565efSmrg 
1109*c7a68eb7Smrg poly_int64
111010d565efSmrg byte_lowpart_offset (machine_mode outer_mode,
111110d565efSmrg 		     machine_mode inner_mode)
111210d565efSmrg {
1113*c7a68eb7Smrg   if (paradoxical_subreg_p (outer_mode, inner_mode))
111410d565efSmrg     return -subreg_lowpart_offset (inner_mode, outer_mode);
1115*c7a68eb7Smrg   else
1116*c7a68eb7Smrg     return subreg_lowpart_offset (outer_mode, inner_mode);
1117*c7a68eb7Smrg }
1118*c7a68eb7Smrg 
1119*c7a68eb7Smrg /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1120*c7a68eb7Smrg    from address X.  For paradoxical big-endian subregs this is a
1121*c7a68eb7Smrg    negative value, otherwise it's the same as OFFSET.  */
1122*c7a68eb7Smrg 
1123*c7a68eb7Smrg poly_int64
1124*c7a68eb7Smrg subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1125*c7a68eb7Smrg 		      poly_uint64 offset)
1126*c7a68eb7Smrg {
1127*c7a68eb7Smrg   if (paradoxical_subreg_p (outer_mode, inner_mode))
1128*c7a68eb7Smrg     {
1129*c7a68eb7Smrg       gcc_assert (known_eq (offset, 0U));
1130*c7a68eb7Smrg       return -subreg_lowpart_offset (inner_mode, outer_mode);
1131*c7a68eb7Smrg     }
1132*c7a68eb7Smrg   return offset;
1133*c7a68eb7Smrg }
1134*c7a68eb7Smrg 
1135*c7a68eb7Smrg /* As above, but return the offset that existing subreg X would have
1136*c7a68eb7Smrg    if SUBREG_REG (X) were stored in memory.  The only significant thing
1137*c7a68eb7Smrg    about the current SUBREG_REG is its mode.  */
1138*c7a68eb7Smrg 
1139*c7a68eb7Smrg poly_int64
1140*c7a68eb7Smrg subreg_memory_offset (const_rtx x)
1141*c7a68eb7Smrg {
1142*c7a68eb7Smrg   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1143*c7a68eb7Smrg 			       SUBREG_BYTE (x));
114410d565efSmrg }
114510d565efSmrg 
114610d565efSmrg /* Generate a REG rtx for a new pseudo register of mode MODE.
114710d565efSmrg    This pseudo is assigned the next sequential register number.  */
114810d565efSmrg 
114910d565efSmrg rtx
115010d565efSmrg gen_reg_rtx (machine_mode mode)
115110d565efSmrg {
115210d565efSmrg   rtx val;
115310d565efSmrg   unsigned int align = GET_MODE_ALIGNMENT (mode);
115410d565efSmrg 
115510d565efSmrg   gcc_assert (can_create_pseudo_p ());
115610d565efSmrg 
115710d565efSmrg   /* If a virtual register with bigger mode alignment is generated,
115810d565efSmrg      increase stack alignment estimation because it might be spilled
115910d565efSmrg      to stack later.  */
116010d565efSmrg   if (SUPPORTS_STACK_ALIGNMENT
116110d565efSmrg       && crtl->stack_alignment_estimated < align
116210d565efSmrg       && !crtl->stack_realign_processed)
116310d565efSmrg     {
116410d565efSmrg       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
116510d565efSmrg       if (crtl->stack_alignment_estimated < min_align)
116610d565efSmrg 	crtl->stack_alignment_estimated = min_align;
116710d565efSmrg     }
116810d565efSmrg 
116910d565efSmrg   if (generating_concat_p
117010d565efSmrg       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
117110d565efSmrg 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
117210d565efSmrg     {
117310d565efSmrg       /* For complex modes, don't make a single pseudo.
117410d565efSmrg 	 Instead, make a CONCAT of two pseudos.
117510d565efSmrg 	 This allows noncontiguous allocation of the real and imaginary parts,
117610d565efSmrg 	 which makes much better code.  Besides, allocating DCmode
117710d565efSmrg 	 pseudos overstrains reload on some machines like the 386.  */
117810d565efSmrg       rtx realpart, imagpart;
117910d565efSmrg       machine_mode partmode = GET_MODE_INNER (mode);
118010d565efSmrg 
118110d565efSmrg       realpart = gen_reg_rtx (partmode);
118210d565efSmrg       imagpart = gen_reg_rtx (partmode);
118310d565efSmrg       return gen_rtx_CONCAT (mode, realpart, imagpart);
118410d565efSmrg     }
118510d565efSmrg 
118610d565efSmrg   /* Do not call gen_reg_rtx with uninitialized crtl.  */
118710d565efSmrg   gcc_assert (crtl->emit.regno_pointer_align_length);
118810d565efSmrg 
118910d565efSmrg   crtl->emit.ensure_regno_capacity ();
119010d565efSmrg   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
119110d565efSmrg 
119210d565efSmrg   val = gen_raw_REG (mode, reg_rtx_no);
119310d565efSmrg   regno_reg_rtx[reg_rtx_no++] = val;
119410d565efSmrg   return val;
119510d565efSmrg }
119610d565efSmrg 
119710d565efSmrg /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
119810d565efSmrg    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
119910d565efSmrg 
120010d565efSmrg void
120110d565efSmrg emit_status::ensure_regno_capacity ()
120210d565efSmrg {
120310d565efSmrg   int old_size = regno_pointer_align_length;
120410d565efSmrg 
120510d565efSmrg   if (reg_rtx_no < old_size)
120610d565efSmrg     return;
120710d565efSmrg 
120810d565efSmrg   int new_size = old_size * 2;
120910d565efSmrg   while (reg_rtx_no >= new_size)
121010d565efSmrg     new_size *= 2;
121110d565efSmrg 
121210d565efSmrg   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
121310d565efSmrg   memset (tmp + old_size, 0, new_size - old_size);
121410d565efSmrg   regno_pointer_align = (unsigned char *) tmp;
121510d565efSmrg 
121610d565efSmrg   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
121710d565efSmrg   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
121810d565efSmrg   regno_reg_rtx = new1;
121910d565efSmrg 
122010d565efSmrg   crtl->emit.regno_pointer_align_length = new_size;
122110d565efSmrg }
122210d565efSmrg 
122310d565efSmrg /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
122410d565efSmrg 
122510d565efSmrg bool
122610d565efSmrg reg_is_parm_p (rtx reg)
122710d565efSmrg {
122810d565efSmrg   tree decl;
122910d565efSmrg 
123010d565efSmrg   gcc_assert (REG_P (reg));
123110d565efSmrg   decl = REG_EXPR (reg);
123210d565efSmrg   return (decl && TREE_CODE (decl) == PARM_DECL);
123310d565efSmrg }
123410d565efSmrg 
123510d565efSmrg /* Update NEW with the same attributes as REG, but with OFFSET added
123610d565efSmrg    to the REG_OFFSET.  */
123710d565efSmrg 
123810d565efSmrg static void
1239*c7a68eb7Smrg update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
124010d565efSmrg {
124110d565efSmrg   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
124210d565efSmrg 				       REG_OFFSET (reg) + offset);
124310d565efSmrg }
124410d565efSmrg 
124510d565efSmrg /* Generate a register with same attributes as REG, but with OFFSET
124610d565efSmrg    added to the REG_OFFSET.  */
124710d565efSmrg 
124810d565efSmrg rtx
124910d565efSmrg gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1250*c7a68eb7Smrg 		    poly_int64 offset)
125110d565efSmrg {
125210d565efSmrg   rtx new_rtx = gen_rtx_REG (mode, regno);
125310d565efSmrg 
125410d565efSmrg   update_reg_offset (new_rtx, reg, offset);
125510d565efSmrg   return new_rtx;
125610d565efSmrg }
125710d565efSmrg 
125810d565efSmrg /* Generate a new pseudo-register with the same attributes as REG, but
125910d565efSmrg    with OFFSET added to the REG_OFFSET.  */
126010d565efSmrg 
126110d565efSmrg rtx
126210d565efSmrg gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
126310d565efSmrg {
126410d565efSmrg   rtx new_rtx = gen_reg_rtx (mode);
126510d565efSmrg 
126610d565efSmrg   update_reg_offset (new_rtx, reg, offset);
126710d565efSmrg   return new_rtx;
126810d565efSmrg }
126910d565efSmrg 
127010d565efSmrg /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
127110d565efSmrg    new register is a (possibly paradoxical) lowpart of the old one.  */
127210d565efSmrg 
127310d565efSmrg void
127410d565efSmrg adjust_reg_mode (rtx reg, machine_mode mode)
127510d565efSmrg {
127610d565efSmrg   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
127710d565efSmrg   PUT_MODE (reg, mode);
127810d565efSmrg }
127910d565efSmrg 
128010d565efSmrg /* Copy REG's attributes from X, if X has any attributes.  If REG and X
128110d565efSmrg    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
128210d565efSmrg 
128310d565efSmrg void
128410d565efSmrg set_reg_attrs_from_value (rtx reg, rtx x)
128510d565efSmrg {
1286*c7a68eb7Smrg   poly_int64 offset;
128710d565efSmrg   bool can_be_reg_pointer = true;
128810d565efSmrg 
128910d565efSmrg   /* Don't call mark_reg_pointer for incompatible pointer sign
129010d565efSmrg      extension.  */
129110d565efSmrg   while (GET_CODE (x) == SIGN_EXTEND
129210d565efSmrg 	 || GET_CODE (x) == ZERO_EXTEND
129310d565efSmrg 	 || GET_CODE (x) == TRUNCATE
129410d565efSmrg 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
129510d565efSmrg     {
129610d565efSmrg #if defined(POINTERS_EXTEND_UNSIGNED)
129710d565efSmrg       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
129810d565efSmrg 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
129910d565efSmrg 	   || (paradoxical_subreg_p (x)
130010d565efSmrg 	       && ! (SUBREG_PROMOTED_VAR_P (x)
130110d565efSmrg 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
130210d565efSmrg 						    POINTERS_EXTEND_UNSIGNED))))
130310d565efSmrg 	  && !targetm.have_ptr_extend ())
130410d565efSmrg 	can_be_reg_pointer = false;
130510d565efSmrg #endif
130610d565efSmrg       x = XEXP (x, 0);
130710d565efSmrg     }
130810d565efSmrg 
130910d565efSmrg   /* Hard registers can be reused for multiple purposes within the same
131010d565efSmrg      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
131110d565efSmrg      on them is wrong.  */
131210d565efSmrg   if (HARD_REGISTER_P (reg))
131310d565efSmrg     return;
131410d565efSmrg 
131510d565efSmrg   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
131610d565efSmrg   if (MEM_P (x))
131710d565efSmrg     {
131810d565efSmrg       if (MEM_OFFSET_KNOWN_P (x))
131910d565efSmrg 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
132010d565efSmrg 					 MEM_OFFSET (x) + offset);
132110d565efSmrg       if (can_be_reg_pointer && MEM_POINTER (x))
132210d565efSmrg 	mark_reg_pointer (reg, 0);
132310d565efSmrg     }
132410d565efSmrg   else if (REG_P (x))
132510d565efSmrg     {
132610d565efSmrg       if (REG_ATTRS (x))
132710d565efSmrg 	update_reg_offset (reg, x, offset);
132810d565efSmrg       if (can_be_reg_pointer && REG_POINTER (x))
132910d565efSmrg 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
133010d565efSmrg     }
133110d565efSmrg }
133210d565efSmrg 
133310d565efSmrg /* Generate a REG rtx for a new pseudo register, copying the mode
133410d565efSmrg    and attributes from X.  */
133510d565efSmrg 
133610d565efSmrg rtx
133710d565efSmrg gen_reg_rtx_and_attrs (rtx x)
133810d565efSmrg {
133910d565efSmrg   rtx reg = gen_reg_rtx (GET_MODE (x));
134010d565efSmrg   set_reg_attrs_from_value (reg, x);
134110d565efSmrg   return reg;
134210d565efSmrg }
134310d565efSmrg 
134410d565efSmrg /* Set the register attributes for registers contained in PARM_RTX.
134510d565efSmrg    Use needed values from memory attributes of MEM.  */
134610d565efSmrg 
134710d565efSmrg void
134810d565efSmrg set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
134910d565efSmrg {
135010d565efSmrg   if (REG_P (parm_rtx))
135110d565efSmrg     set_reg_attrs_from_value (parm_rtx, mem);
135210d565efSmrg   else if (GET_CODE (parm_rtx) == PARALLEL)
135310d565efSmrg     {
135410d565efSmrg       /* Check for a NULL entry in the first slot, used to indicate that the
135510d565efSmrg 	 parameter goes both on the stack and in registers.  */
135610d565efSmrg       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
135710d565efSmrg       for (; i < XVECLEN (parm_rtx, 0); i++)
135810d565efSmrg 	{
135910d565efSmrg 	  rtx x = XVECEXP (parm_rtx, 0, i);
136010d565efSmrg 	  if (REG_P (XEXP (x, 0)))
136110d565efSmrg 	    REG_ATTRS (XEXP (x, 0))
136210d565efSmrg 	      = get_reg_attrs (MEM_EXPR (mem),
136310d565efSmrg 			       INTVAL (XEXP (x, 1)));
136410d565efSmrg 	}
136510d565efSmrg     }
136610d565efSmrg }
136710d565efSmrg 
136810d565efSmrg /* Set the REG_ATTRS for registers in value X, given that X represents
136910d565efSmrg    decl T.  */
137010d565efSmrg 
137110d565efSmrg void
137210d565efSmrg set_reg_attrs_for_decl_rtl (tree t, rtx x)
137310d565efSmrg {
137410d565efSmrg   if (!t)
137510d565efSmrg     return;
137610d565efSmrg   tree tdecl = t;
137710d565efSmrg   if (GET_CODE (x) == SUBREG)
137810d565efSmrg     {
137910d565efSmrg       gcc_assert (subreg_lowpart_p (x));
138010d565efSmrg       x = SUBREG_REG (x);
138110d565efSmrg     }
138210d565efSmrg   if (REG_P (x))
138310d565efSmrg     REG_ATTRS (x)
138410d565efSmrg       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
138510d565efSmrg 					       DECL_P (tdecl)
138610d565efSmrg 					       ? DECL_MODE (tdecl)
138710d565efSmrg 					       : TYPE_MODE (TREE_TYPE (tdecl))));
138810d565efSmrg   if (GET_CODE (x) == CONCAT)
138910d565efSmrg     {
139010d565efSmrg       if (REG_P (XEXP (x, 0)))
139110d565efSmrg         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
139210d565efSmrg       if (REG_P (XEXP (x, 1)))
139310d565efSmrg 	REG_ATTRS (XEXP (x, 1))
139410d565efSmrg 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
139510d565efSmrg     }
139610d565efSmrg   if (GET_CODE (x) == PARALLEL)
139710d565efSmrg     {
139810d565efSmrg       int i, start;
139910d565efSmrg 
140010d565efSmrg       /* Check for a NULL entry, used to indicate that the parameter goes
140110d565efSmrg 	 both on the stack and in registers.  */
140210d565efSmrg       if (XEXP (XVECEXP (x, 0, 0), 0))
140310d565efSmrg 	start = 0;
140410d565efSmrg       else
140510d565efSmrg 	start = 1;
140610d565efSmrg 
140710d565efSmrg       for (i = start; i < XVECLEN (x, 0); i++)
140810d565efSmrg 	{
140910d565efSmrg 	  rtx y = XVECEXP (x, 0, i);
141010d565efSmrg 	  if (REG_P (XEXP (y, 0)))
141110d565efSmrg 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
141210d565efSmrg 	}
141310d565efSmrg     }
141410d565efSmrg }
141510d565efSmrg 
141610d565efSmrg /* Assign the RTX X to declaration T.  */
141710d565efSmrg 
141810d565efSmrg void
141910d565efSmrg set_decl_rtl (tree t, rtx x)
142010d565efSmrg {
142110d565efSmrg   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
142210d565efSmrg   if (x)
142310d565efSmrg     set_reg_attrs_for_decl_rtl (t, x);
142410d565efSmrg }
142510d565efSmrg 
142610d565efSmrg /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
142710d565efSmrg    if the ABI requires the parameter to be passed by reference.  */
142810d565efSmrg 
142910d565efSmrg void
143010d565efSmrg set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
143110d565efSmrg {
143210d565efSmrg   DECL_INCOMING_RTL (t) = x;
143310d565efSmrg   if (x && !by_reference_p)
143410d565efSmrg     set_reg_attrs_for_decl_rtl (t, x);
143510d565efSmrg }
143610d565efSmrg 
143710d565efSmrg /* Identify REG (which may be a CONCAT) as a user register.  */
143810d565efSmrg 
143910d565efSmrg void
144010d565efSmrg mark_user_reg (rtx reg)
144110d565efSmrg {
144210d565efSmrg   if (GET_CODE (reg) == CONCAT)
144310d565efSmrg     {
144410d565efSmrg       REG_USERVAR_P (XEXP (reg, 0)) = 1;
144510d565efSmrg       REG_USERVAR_P (XEXP (reg, 1)) = 1;
144610d565efSmrg     }
144710d565efSmrg   else
144810d565efSmrg     {
144910d565efSmrg       gcc_assert (REG_P (reg));
145010d565efSmrg       REG_USERVAR_P (reg) = 1;
145110d565efSmrg     }
145210d565efSmrg }
145310d565efSmrg 
145410d565efSmrg /* Identify REG as a probable pointer register and show its alignment
145510d565efSmrg    as ALIGN, if nonzero.  */
145610d565efSmrg 
145710d565efSmrg void
145810d565efSmrg mark_reg_pointer (rtx reg, int align)
145910d565efSmrg {
146010d565efSmrg   if (! REG_POINTER (reg))
146110d565efSmrg     {
146210d565efSmrg       REG_POINTER (reg) = 1;
146310d565efSmrg 
146410d565efSmrg       if (align)
146510d565efSmrg 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
146610d565efSmrg     }
146710d565efSmrg   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
146810d565efSmrg     /* We can no-longer be sure just how aligned this pointer is.  */
146910d565efSmrg     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
147010d565efSmrg }
147110d565efSmrg 
147210d565efSmrg /* Return 1 plus largest pseudo reg number used in the current function.  */
147310d565efSmrg 
147410d565efSmrg int
147510d565efSmrg max_reg_num (void)
147610d565efSmrg {
147710d565efSmrg   return reg_rtx_no;
147810d565efSmrg }
147910d565efSmrg 
148010d565efSmrg /* Return 1 + the largest label number used so far in the current function.  */
148110d565efSmrg 
148210d565efSmrg int
148310d565efSmrg max_label_num (void)
148410d565efSmrg {
148510d565efSmrg   return label_num;
148610d565efSmrg }
148710d565efSmrg 
148810d565efSmrg /* Return first label number used in this function (if any were used).  */
148910d565efSmrg 
149010d565efSmrg int
149110d565efSmrg get_first_label_num (void)
149210d565efSmrg {
149310d565efSmrg   return first_label_num;
149410d565efSmrg }
149510d565efSmrg 
149610d565efSmrg /* If the rtx for label was created during the expansion of a nested
149710d565efSmrg    function, then first_label_num won't include this label number.
149810d565efSmrg    Fix this now so that array indices work later.  */
149910d565efSmrg 
150010d565efSmrg void
150110d565efSmrg maybe_set_first_label_num (rtx_code_label *x)
150210d565efSmrg {
150310d565efSmrg   if (CODE_LABEL_NUMBER (x) < first_label_num)
150410d565efSmrg     first_label_num = CODE_LABEL_NUMBER (x);
150510d565efSmrg }
150610d565efSmrg 
150710d565efSmrg /* For use by the RTL function loader, when mingling with normal
150810d565efSmrg    functions.
150910d565efSmrg    Ensure that label_num is greater than the label num of X, to avoid
151010d565efSmrg    duplicate labels in the generated assembler.  */
151110d565efSmrg 
151210d565efSmrg void
151310d565efSmrg maybe_set_max_label_num (rtx_code_label *x)
151410d565efSmrg {
151510d565efSmrg   if (CODE_LABEL_NUMBER (x) >= label_num)
151610d565efSmrg     label_num = CODE_LABEL_NUMBER (x) + 1;
151710d565efSmrg }
151810d565efSmrg 
151910d565efSmrg 
152010d565efSmrg /* Return a value representing some low-order bits of X, where the number
152110d565efSmrg    of low-order bits is given by MODE.  Note that no conversion is done
152210d565efSmrg    between floating-point and fixed-point values, rather, the bit
152310d565efSmrg    representation is returned.
152410d565efSmrg 
152510d565efSmrg    This function handles the cases in common between gen_lowpart, below,
152610d565efSmrg    and two variants in cse.c and combine.c.  These are the cases that can
152710d565efSmrg    be safely handled at all points in the compilation.
152810d565efSmrg 
152910d565efSmrg    If this is not a case we can handle, return 0.  */
153010d565efSmrg 
153110d565efSmrg rtx
153210d565efSmrg gen_lowpart_common (machine_mode mode, rtx x)
153310d565efSmrg {
1534*c7a68eb7Smrg   poly_uint64 msize = GET_MODE_SIZE (mode);
153510d565efSmrg   machine_mode innermode;
153610d565efSmrg 
153710d565efSmrg   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
153810d565efSmrg      so we have to make one up.  Yuk.  */
153910d565efSmrg   innermode = GET_MODE (x);
154010d565efSmrg   if (CONST_INT_P (x)
1541*c7a68eb7Smrg       && known_le (msize * BITS_PER_UNIT,
1542*c7a68eb7Smrg 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1543*c7a68eb7Smrg     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
154410d565efSmrg   else if (innermode == VOIDmode)
1545*c7a68eb7Smrg     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
154610d565efSmrg 
154710d565efSmrg   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
154810d565efSmrg 
154910d565efSmrg   if (innermode == mode)
155010d565efSmrg     return x;
155110d565efSmrg 
1552*c7a68eb7Smrg   /* The size of the outer and inner modes must be ordered.  */
1553*c7a68eb7Smrg   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1554*c7a68eb7Smrg   if (!ordered_p (msize, xsize))
155510d565efSmrg     return 0;
155610d565efSmrg 
1557*c7a68eb7Smrg   if (SCALAR_FLOAT_MODE_P (mode))
1558*c7a68eb7Smrg     {
1559*c7a68eb7Smrg       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1560*c7a68eb7Smrg       if (maybe_gt (msize, xsize))
156110d565efSmrg 	return 0;
1562*c7a68eb7Smrg     }
1563*c7a68eb7Smrg   else
1564*c7a68eb7Smrg     {
1565*c7a68eb7Smrg       /* MODE must occupy no more of the underlying registers than X.  */
1566*c7a68eb7Smrg       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1567*c7a68eb7Smrg       unsigned int mregs, xregs;
1568*c7a68eb7Smrg       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1569*c7a68eb7Smrg 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1570*c7a68eb7Smrg 	  || mregs > xregs)
1571*c7a68eb7Smrg 	return 0;
1572*c7a68eb7Smrg     }
157310d565efSmrg 
1574*c7a68eb7Smrg   scalar_int_mode int_mode, int_innermode, from_mode;
157510d565efSmrg   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1576*c7a68eb7Smrg       && is_a <scalar_int_mode> (mode, &int_mode)
1577*c7a68eb7Smrg       && is_a <scalar_int_mode> (innermode, &int_innermode)
1578*c7a68eb7Smrg       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
157910d565efSmrg     {
158010d565efSmrg       /* If we are getting the low-order part of something that has been
158110d565efSmrg 	 sign- or zero-extended, we can either just use the object being
158210d565efSmrg 	 extended or make a narrower extension.  If we want an even smaller
158310d565efSmrg 	 piece than the size of the object being extended, call ourselves
158410d565efSmrg 	 recursively.
158510d565efSmrg 
158610d565efSmrg 	 This case is used mostly by combine and cse.  */
158710d565efSmrg 
1588*c7a68eb7Smrg       if (from_mode == int_mode)
158910d565efSmrg 	return XEXP (x, 0);
1590*c7a68eb7Smrg       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1591*c7a68eb7Smrg 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1592*c7a68eb7Smrg       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1593*c7a68eb7Smrg 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
159410d565efSmrg     }
159510d565efSmrg   else if (GET_CODE (x) == SUBREG || REG_P (x)
159610d565efSmrg 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1597*c7a68eb7Smrg 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1598*c7a68eb7Smrg 	   || CONST_POLY_INT_P (x))
159910d565efSmrg     return lowpart_subreg (mode, x, innermode);
160010d565efSmrg 
160110d565efSmrg   /* Otherwise, we can't do this.  */
160210d565efSmrg   return 0;
160310d565efSmrg }
160410d565efSmrg 
160510d565efSmrg rtx
160610d565efSmrg gen_highpart (machine_mode mode, rtx x)
160710d565efSmrg {
1608*c7a68eb7Smrg   poly_uint64 msize = GET_MODE_SIZE (mode);
160910d565efSmrg   rtx result;
161010d565efSmrg 
161110d565efSmrg   /* This case loses if X is a subreg.  To catch bugs early,
161210d565efSmrg      complain if an invalid MODE is used even in other cases.  */
1613*c7a68eb7Smrg   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1614*c7a68eb7Smrg 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
161510d565efSmrg 
161610d565efSmrg   result = simplify_gen_subreg (mode, x, GET_MODE (x),
161710d565efSmrg 				subreg_highpart_offset (mode, GET_MODE (x)));
161810d565efSmrg   gcc_assert (result);
161910d565efSmrg 
162010d565efSmrg   /* simplify_gen_subreg is not guaranteed to return a valid operand for
162110d565efSmrg      the target if we have a MEM.  gen_highpart must return a valid operand,
162210d565efSmrg      emitting code if necessary to do so.  */
162310d565efSmrg   if (MEM_P (result))
162410d565efSmrg     {
162510d565efSmrg       result = validize_mem (result);
162610d565efSmrg       gcc_assert (result);
162710d565efSmrg     }
162810d565efSmrg 
162910d565efSmrg   return result;
163010d565efSmrg }
163110d565efSmrg 
163210d565efSmrg /* Like gen_highpart, but accept mode of EXP operand in case EXP can
163310d565efSmrg    be VOIDmode constant.  */
163410d565efSmrg rtx
163510d565efSmrg gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
163610d565efSmrg {
163710d565efSmrg   if (GET_MODE (exp) != VOIDmode)
163810d565efSmrg     {
163910d565efSmrg       gcc_assert (GET_MODE (exp) == innermode);
164010d565efSmrg       return gen_highpart (outermode, exp);
164110d565efSmrg     }
164210d565efSmrg   return simplify_gen_subreg (outermode, exp, innermode,
164310d565efSmrg 			      subreg_highpart_offset (outermode, innermode));
164410d565efSmrg }
164510d565efSmrg 
164610d565efSmrg /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
164710d565efSmrg    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
164810d565efSmrg 
1649*c7a68eb7Smrg poly_uint64
1650*c7a68eb7Smrg subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
165110d565efSmrg {
1652*c7a68eb7Smrg   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1653*c7a68eb7Smrg   if (maybe_gt (outer_bytes, inner_bytes))
165410d565efSmrg     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
165510d565efSmrg     return 0;
165610d565efSmrg 
165710d565efSmrg   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
165810d565efSmrg     return inner_bytes - outer_bytes;
165910d565efSmrg   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
166010d565efSmrg     return 0;
166110d565efSmrg   else
166210d565efSmrg     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
166310d565efSmrg }
166410d565efSmrg 
166510d565efSmrg /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
166610d565efSmrg    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
166710d565efSmrg 
1668*c7a68eb7Smrg poly_uint64
1669*c7a68eb7Smrg subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
167010d565efSmrg {
1671*c7a68eb7Smrg   gcc_assert (known_ge (inner_bytes, outer_bytes));
167210d565efSmrg 
167310d565efSmrg   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
167410d565efSmrg     return 0;
167510d565efSmrg   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
167610d565efSmrg     return inner_bytes - outer_bytes;
167710d565efSmrg   else
167810d565efSmrg     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
167910d565efSmrg 					(inner_bytes - outer_bytes)
168010d565efSmrg 					* BITS_PER_UNIT);
168110d565efSmrg }
168210d565efSmrg 
168310d565efSmrg /* Return 1 iff X, assumed to be a SUBREG,
168410d565efSmrg    refers to the least significant part of its containing reg.
168510d565efSmrg    If X is not a SUBREG, always return 1 (it is its own low part!).  */
168610d565efSmrg 
168710d565efSmrg int
168810d565efSmrg subreg_lowpart_p (const_rtx x)
168910d565efSmrg {
169010d565efSmrg   if (GET_CODE (x) != SUBREG)
169110d565efSmrg     return 1;
169210d565efSmrg   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
169310d565efSmrg     return 0;
169410d565efSmrg 
1695*c7a68eb7Smrg   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1696*c7a68eb7Smrg 					  GET_MODE (SUBREG_REG (x))),
1697*c7a68eb7Smrg 		   SUBREG_BYTE (x));
169810d565efSmrg }
169910d565efSmrg 
170010d565efSmrg /* Return subword OFFSET of operand OP.
170110d565efSmrg    The word number, OFFSET, is interpreted as the word number starting
170210d565efSmrg    at the low-order address.  OFFSET 0 is the low-order word if not
170310d565efSmrg    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
170410d565efSmrg 
170510d565efSmrg    If we cannot extract the required word, we return zero.  Otherwise,
170610d565efSmrg    an rtx corresponding to the requested word will be returned.
170710d565efSmrg 
170810d565efSmrg    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
170910d565efSmrg    reload has completed, a valid address will always be returned.  After
171010d565efSmrg    reload, if a valid address cannot be returned, we return zero.
171110d565efSmrg 
171210d565efSmrg    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
171310d565efSmrg    it is the responsibility of the caller.
171410d565efSmrg 
171510d565efSmrg    MODE is the mode of OP in case it is a CONST_INT.
171610d565efSmrg 
171710d565efSmrg    ??? This is still rather broken for some cases.  The problem for the
171810d565efSmrg    moment is that all callers of this thing provide no 'goal mode' to
171910d565efSmrg    tell us to work with.  This exists because all callers were written
172010d565efSmrg    in a word based SUBREG world.
172110d565efSmrg    Now use of this function can be deprecated by simplify_subreg in most
172210d565efSmrg    cases.
172310d565efSmrg  */
172410d565efSmrg 
172510d565efSmrg rtx
1726*c7a68eb7Smrg operand_subword (rtx op, poly_uint64 offset, int validate_address,
1727*c7a68eb7Smrg 		 machine_mode mode)
172810d565efSmrg {
172910d565efSmrg   if (mode == VOIDmode)
173010d565efSmrg     mode = GET_MODE (op);
173110d565efSmrg 
173210d565efSmrg   gcc_assert (mode != VOIDmode);
173310d565efSmrg 
173410d565efSmrg   /* If OP is narrower than a word, fail.  */
173510d565efSmrg   if (mode != BLKmode
1736*c7a68eb7Smrg       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
173710d565efSmrg     return 0;
173810d565efSmrg 
173910d565efSmrg   /* If we want a word outside OP, return zero.  */
174010d565efSmrg   if (mode != BLKmode
1741*c7a68eb7Smrg       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
174210d565efSmrg     return const0_rtx;
174310d565efSmrg 
174410d565efSmrg   /* Form a new MEM at the requested address.  */
174510d565efSmrg   if (MEM_P (op))
174610d565efSmrg     {
174710d565efSmrg       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
174810d565efSmrg 
174910d565efSmrg       if (! validate_address)
175010d565efSmrg 	return new_rtx;
175110d565efSmrg 
175210d565efSmrg       else if (reload_completed)
175310d565efSmrg 	{
175410d565efSmrg 	  if (! strict_memory_address_addr_space_p (word_mode,
175510d565efSmrg 						    XEXP (new_rtx, 0),
175610d565efSmrg 						    MEM_ADDR_SPACE (op)))
175710d565efSmrg 	    return 0;
175810d565efSmrg 	}
175910d565efSmrg       else
176010d565efSmrg 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
176110d565efSmrg     }
176210d565efSmrg 
176310d565efSmrg   /* Rest can be handled by simplify_subreg.  */
176410d565efSmrg   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
176510d565efSmrg }
176610d565efSmrg 
176710d565efSmrg /* Similar to `operand_subword', but never return 0.  If we can't
176810d565efSmrg    extract the required subword, put OP into a register and try again.
176910d565efSmrg    The second attempt must succeed.  We always validate the address in
177010d565efSmrg    this case.
177110d565efSmrg 
177210d565efSmrg    MODE is the mode of OP, in case it is CONST_INT.  */
177310d565efSmrg 
177410d565efSmrg rtx
1775*c7a68eb7Smrg operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
177610d565efSmrg {
177710d565efSmrg   rtx result = operand_subword (op, offset, 1, mode);
177810d565efSmrg 
177910d565efSmrg   if (result)
178010d565efSmrg     return result;
178110d565efSmrg 
178210d565efSmrg   if (mode != BLKmode && mode != VOIDmode)
178310d565efSmrg     {
178410d565efSmrg       /* If this is a register which can not be accessed by words, copy it
178510d565efSmrg 	 to a pseudo register.  */
178610d565efSmrg       if (REG_P (op))
178710d565efSmrg 	op = copy_to_reg (op);
178810d565efSmrg       else
178910d565efSmrg 	op = force_reg (mode, op);
179010d565efSmrg     }
179110d565efSmrg 
179210d565efSmrg   result = operand_subword (op, offset, 1, mode);
179310d565efSmrg   gcc_assert (result);
179410d565efSmrg 
179510d565efSmrg   return result;
179610d565efSmrg }
179710d565efSmrg 
1798*c7a68eb7Smrg mem_attrs::mem_attrs ()
1799*c7a68eb7Smrg   : expr (NULL_TREE),
1800*c7a68eb7Smrg     offset (0),
1801*c7a68eb7Smrg     size (0),
1802*c7a68eb7Smrg     alias (0),
1803*c7a68eb7Smrg     align (0),
1804*c7a68eb7Smrg     addrspace (ADDR_SPACE_GENERIC),
1805*c7a68eb7Smrg     offset_known_p (false),
1806*c7a68eb7Smrg     size_known_p (false)
1807*c7a68eb7Smrg {}
1808*c7a68eb7Smrg 
180910d565efSmrg /* Returns 1 if both MEM_EXPR can be considered equal
181010d565efSmrg    and 0 otherwise.  */
181110d565efSmrg 
181210d565efSmrg int
181310d565efSmrg mem_expr_equal_p (const_tree expr1, const_tree expr2)
181410d565efSmrg {
181510d565efSmrg   if (expr1 == expr2)
181610d565efSmrg     return 1;
181710d565efSmrg 
181810d565efSmrg   if (! expr1 || ! expr2)
181910d565efSmrg     return 0;
182010d565efSmrg 
182110d565efSmrg   if (TREE_CODE (expr1) != TREE_CODE (expr2))
182210d565efSmrg     return 0;
182310d565efSmrg 
182410d565efSmrg   return operand_equal_p (expr1, expr2, 0);
182510d565efSmrg }
182610d565efSmrg 
182710d565efSmrg /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
182810d565efSmrg    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
182910d565efSmrg    -1 if not known.  */
183010d565efSmrg 
183110d565efSmrg int
183210d565efSmrg get_mem_align_offset (rtx mem, unsigned int align)
183310d565efSmrg {
183410d565efSmrg   tree expr;
1835*c7a68eb7Smrg   poly_uint64 offset;
183610d565efSmrg 
183710d565efSmrg   /* This function can't use
183810d565efSmrg      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
183910d565efSmrg 	 || (MAX (MEM_ALIGN (mem),
184010d565efSmrg 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
184110d565efSmrg 	     < align))
184210d565efSmrg        return -1;
184310d565efSmrg      else
184410d565efSmrg        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
184510d565efSmrg      for two reasons:
184610d565efSmrg      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
184710d565efSmrg        for <variable>.  get_inner_reference doesn't handle it and
184810d565efSmrg        even if it did, the alignment in that case needs to be determined
184910d565efSmrg        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
185010d565efSmrg      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
185110d565efSmrg        isn't sufficiently aligned, the object it is in might be.  */
185210d565efSmrg   gcc_assert (MEM_P (mem));
185310d565efSmrg   expr = MEM_EXPR (mem);
185410d565efSmrg   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
185510d565efSmrg     return -1;
185610d565efSmrg 
185710d565efSmrg   offset = MEM_OFFSET (mem);
185810d565efSmrg   if (DECL_P (expr))
185910d565efSmrg     {
186010d565efSmrg       if (DECL_ALIGN (expr) < align)
186110d565efSmrg 	return -1;
186210d565efSmrg     }
186310d565efSmrg   else if (INDIRECT_REF_P (expr))
186410d565efSmrg     {
186510d565efSmrg       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
186610d565efSmrg 	return -1;
186710d565efSmrg     }
186810d565efSmrg   else if (TREE_CODE (expr) == COMPONENT_REF)
186910d565efSmrg     {
187010d565efSmrg       while (1)
187110d565efSmrg 	{
187210d565efSmrg 	  tree inner = TREE_OPERAND (expr, 0);
187310d565efSmrg 	  tree field = TREE_OPERAND (expr, 1);
187410d565efSmrg 	  tree byte_offset = component_ref_field_offset (expr);
187510d565efSmrg 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
187610d565efSmrg 
1877*c7a68eb7Smrg 	  poly_uint64 suboffset;
187810d565efSmrg 	  if (!byte_offset
1879*c7a68eb7Smrg 	      || !poly_int_tree_p (byte_offset, &suboffset)
188010d565efSmrg 	      || !tree_fits_uhwi_p (bit_offset))
188110d565efSmrg 	    return -1;
188210d565efSmrg 
1883*c7a68eb7Smrg 	  offset += suboffset;
188410d565efSmrg 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
188510d565efSmrg 
188610d565efSmrg 	  if (inner == NULL_TREE)
188710d565efSmrg 	    {
188810d565efSmrg 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
188910d565efSmrg 		  < (unsigned int) align)
189010d565efSmrg 		return -1;
189110d565efSmrg 	      break;
189210d565efSmrg 	    }
189310d565efSmrg 	  else if (DECL_P (inner))
189410d565efSmrg 	    {
189510d565efSmrg 	      if (DECL_ALIGN (inner) < align)
189610d565efSmrg 		return -1;
189710d565efSmrg 	      break;
189810d565efSmrg 	    }
189910d565efSmrg 	  else if (TREE_CODE (inner) != COMPONENT_REF)
190010d565efSmrg 	    return -1;
190110d565efSmrg 	  expr = inner;
190210d565efSmrg 	}
190310d565efSmrg     }
190410d565efSmrg   else
190510d565efSmrg     return -1;
190610d565efSmrg 
1907*c7a68eb7Smrg   HOST_WIDE_INT misalign;
1908*c7a68eb7Smrg   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1909*c7a68eb7Smrg     return -1;
1910*c7a68eb7Smrg   return misalign;
191110d565efSmrg }
191210d565efSmrg 
191310d565efSmrg /* Given REF (a MEM) and T, either the type of X or the expression
191410d565efSmrg    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
191510d565efSmrg    if we are making a new object of this type.  BITPOS is nonzero if
191610d565efSmrg    there is an offset outstanding on T that will be applied later.  */
191710d565efSmrg 
191810d565efSmrg void
191910d565efSmrg set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1920*c7a68eb7Smrg 				 poly_int64 bitpos)
192110d565efSmrg {
1922*c7a68eb7Smrg   poly_int64 apply_bitpos = 0;
192310d565efSmrg   tree type;
192410d565efSmrg   struct mem_attrs attrs, *defattrs, *refattrs;
192510d565efSmrg   addr_space_t as;
192610d565efSmrg 
192710d565efSmrg   /* It can happen that type_for_mode was given a mode for which there
192810d565efSmrg      is no language-level type.  In which case it returns NULL, which
192910d565efSmrg      we can see here.  */
193010d565efSmrg   if (t == NULL_TREE)
193110d565efSmrg     return;
193210d565efSmrg 
193310d565efSmrg   type = TYPE_P (t) ? t : TREE_TYPE (t);
193410d565efSmrg   if (type == error_mark_node)
193510d565efSmrg     return;
193610d565efSmrg 
193710d565efSmrg   /* If we have already set DECL_RTL = ref, get_alias_set will get the
193810d565efSmrg      wrong answer, as it assumes that DECL_RTL already has the right alias
193910d565efSmrg      info.  Callers should not set DECL_RTL until after the call to
194010d565efSmrg      set_mem_attributes.  */
194110d565efSmrg   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
194210d565efSmrg 
194310d565efSmrg   /* Get the alias set from the expression or type (perhaps using a
194410d565efSmrg      front-end routine) and use it.  */
194510d565efSmrg   attrs.alias = get_alias_set (t);
194610d565efSmrg 
194710d565efSmrg   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
194810d565efSmrg   MEM_POINTER (ref) = POINTER_TYPE_P (type);
194910d565efSmrg 
195010d565efSmrg   /* Default values from pre-existing memory attributes if present.  */
195110d565efSmrg   refattrs = MEM_ATTRS (ref);
195210d565efSmrg   if (refattrs)
195310d565efSmrg     {
195410d565efSmrg       /* ??? Can this ever happen?  Calling this routine on a MEM that
195510d565efSmrg 	 already carries memory attributes should probably be invalid.  */
195610d565efSmrg       attrs.expr = refattrs->expr;
195710d565efSmrg       attrs.offset_known_p = refattrs->offset_known_p;
195810d565efSmrg       attrs.offset = refattrs->offset;
195910d565efSmrg       attrs.size_known_p = refattrs->size_known_p;
196010d565efSmrg       attrs.size = refattrs->size;
196110d565efSmrg       attrs.align = refattrs->align;
196210d565efSmrg     }
196310d565efSmrg 
196410d565efSmrg   /* Otherwise, default values from the mode of the MEM reference.  */
196510d565efSmrg   else
196610d565efSmrg     {
196710d565efSmrg       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
196810d565efSmrg       gcc_assert (!defattrs->expr);
196910d565efSmrg       gcc_assert (!defattrs->offset_known_p);
197010d565efSmrg 
197110d565efSmrg       /* Respect mode size.  */
197210d565efSmrg       attrs.size_known_p = defattrs->size_known_p;
197310d565efSmrg       attrs.size = defattrs->size;
197410d565efSmrg       /* ??? Is this really necessary?  We probably should always get
197510d565efSmrg 	 the size from the type below.  */
197610d565efSmrg 
197710d565efSmrg       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
197810d565efSmrg          if T is an object, always compute the object alignment below.  */
197910d565efSmrg       if (TYPE_P (t))
198010d565efSmrg 	attrs.align = defattrs->align;
198110d565efSmrg       else
198210d565efSmrg 	attrs.align = BITS_PER_UNIT;
198310d565efSmrg       /* ??? If T is a type, respecting mode alignment may *also* be wrong
198410d565efSmrg 	 e.g. if the type carries an alignment attribute.  Should we be
198510d565efSmrg 	 able to simply always use TYPE_ALIGN?  */
198610d565efSmrg     }
198710d565efSmrg 
198810d565efSmrg   /* We can set the alignment from the type if we are making an object or if
198910d565efSmrg      this is an INDIRECT_REF.  */
199010d565efSmrg   if (objectp || TREE_CODE (t) == INDIRECT_REF)
199110d565efSmrg     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
199210d565efSmrg 
199310d565efSmrg   /* If the size is known, we can set that.  */
199410d565efSmrg   tree new_size = TYPE_SIZE_UNIT (type);
199510d565efSmrg 
199610d565efSmrg   /* The address-space is that of the type.  */
199710d565efSmrg   as = TYPE_ADDR_SPACE (type);
199810d565efSmrg 
199910d565efSmrg   /* If T is not a type, we may be able to deduce some more information about
200010d565efSmrg      the expression.  */
200110d565efSmrg   if (! TYPE_P (t))
200210d565efSmrg     {
200310d565efSmrg       tree base;
200410d565efSmrg 
200510d565efSmrg       if (TREE_THIS_VOLATILE (t))
200610d565efSmrg 	MEM_VOLATILE_P (ref) = 1;
200710d565efSmrg 
200810d565efSmrg       /* Now remove any conversions: they don't change what the underlying
200910d565efSmrg 	 object is.  Likewise for SAVE_EXPR.  */
201010d565efSmrg       while (CONVERT_EXPR_P (t)
201110d565efSmrg 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
201210d565efSmrg 	     || TREE_CODE (t) == SAVE_EXPR)
201310d565efSmrg 	t = TREE_OPERAND (t, 0);
201410d565efSmrg 
201510d565efSmrg       /* Note whether this expression can trap.  */
201610d565efSmrg       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
201710d565efSmrg 
201810d565efSmrg       base = get_base_address (t);
201910d565efSmrg       if (base)
202010d565efSmrg 	{
202110d565efSmrg 	  if (DECL_P (base)
202210d565efSmrg 	      && TREE_READONLY (base)
202310d565efSmrg 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
202410d565efSmrg 	      && !TREE_THIS_VOLATILE (base))
202510d565efSmrg 	    MEM_READONLY_P (ref) = 1;
202610d565efSmrg 
202710d565efSmrg 	  /* Mark static const strings readonly as well.  */
202810d565efSmrg 	  if (TREE_CODE (base) == STRING_CST
202910d565efSmrg 	      && TREE_READONLY (base)
203010d565efSmrg 	      && TREE_STATIC (base))
203110d565efSmrg 	    MEM_READONLY_P (ref) = 1;
203210d565efSmrg 
203310d565efSmrg 	  /* Address-space information is on the base object.  */
203410d565efSmrg 	  if (TREE_CODE (base) == MEM_REF
203510d565efSmrg 	      || TREE_CODE (base) == TARGET_MEM_REF)
203610d565efSmrg 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
203710d565efSmrg 								      0))));
203810d565efSmrg 	  else
203910d565efSmrg 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
204010d565efSmrg 	}
204110d565efSmrg 
204210d565efSmrg       /* If this expression uses it's parent's alias set, mark it such
204310d565efSmrg 	 that we won't change it.  */
204410d565efSmrg       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
204510d565efSmrg 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
204610d565efSmrg 
204710d565efSmrg       /* If this is a decl, set the attributes of the MEM from it.  */
204810d565efSmrg       if (DECL_P (t))
204910d565efSmrg 	{
205010d565efSmrg 	  attrs.expr = t;
205110d565efSmrg 	  attrs.offset_known_p = true;
205210d565efSmrg 	  attrs.offset = 0;
205310d565efSmrg 	  apply_bitpos = bitpos;
205410d565efSmrg 	  new_size = DECL_SIZE_UNIT (t);
205510d565efSmrg 	}
205610d565efSmrg 
205710d565efSmrg       /* ???  If we end up with a constant here do record a MEM_EXPR.  */
205810d565efSmrg       else if (CONSTANT_CLASS_P (t))
205910d565efSmrg 	;
206010d565efSmrg 
206110d565efSmrg       /* If this is a field reference, record it.  */
206210d565efSmrg       else if (TREE_CODE (t) == COMPONENT_REF)
206310d565efSmrg 	{
206410d565efSmrg 	  attrs.expr = t;
206510d565efSmrg 	  attrs.offset_known_p = true;
206610d565efSmrg 	  attrs.offset = 0;
206710d565efSmrg 	  apply_bitpos = bitpos;
206810d565efSmrg 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
206910d565efSmrg 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
207010d565efSmrg 	}
207110d565efSmrg 
207210d565efSmrg       /* If this is an array reference, look for an outer field reference.  */
207310d565efSmrg       else if (TREE_CODE (t) == ARRAY_REF)
207410d565efSmrg 	{
207510d565efSmrg 	  tree off_tree = size_zero_node;
207610d565efSmrg 	  /* We can't modify t, because we use it at the end of the
207710d565efSmrg 	     function.  */
207810d565efSmrg 	  tree t2 = t;
207910d565efSmrg 
208010d565efSmrg 	  do
208110d565efSmrg 	    {
208210d565efSmrg 	      tree index = TREE_OPERAND (t2, 1);
208310d565efSmrg 	      tree low_bound = array_ref_low_bound (t2);
208410d565efSmrg 	      tree unit_size = array_ref_element_size (t2);
208510d565efSmrg 
208610d565efSmrg 	      /* We assume all arrays have sizes that are a multiple of a byte.
208710d565efSmrg 		 First subtract the lower bound, if any, in the type of the
208810d565efSmrg 		 index, then convert to sizetype and multiply by the size of
208910d565efSmrg 		 the array element.  */
209010d565efSmrg 	      if (! integer_zerop (low_bound))
209110d565efSmrg 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
209210d565efSmrg 				     index, low_bound);
209310d565efSmrg 
209410d565efSmrg 	      off_tree = size_binop (PLUS_EXPR,
209510d565efSmrg 				     size_binop (MULT_EXPR,
209610d565efSmrg 						 fold_convert (sizetype,
209710d565efSmrg 							       index),
209810d565efSmrg 						 unit_size),
209910d565efSmrg 				     off_tree);
210010d565efSmrg 	      t2 = TREE_OPERAND (t2, 0);
210110d565efSmrg 	    }
210210d565efSmrg 	  while (TREE_CODE (t2) == ARRAY_REF);
210310d565efSmrg 
210410d565efSmrg 	  if (DECL_P (t2)
2105*c7a68eb7Smrg 	      || (TREE_CODE (t2) == COMPONENT_REF
2106*c7a68eb7Smrg 		  /* For trailing arrays t2 doesn't have a size that
2107*c7a68eb7Smrg 		     covers all valid accesses.  */
2108*c7a68eb7Smrg 		  && ! array_at_struct_end_p (t)))
210910d565efSmrg 	    {
211010d565efSmrg 	      attrs.expr = t2;
211110d565efSmrg 	      attrs.offset_known_p = false;
2112*c7a68eb7Smrg 	      if (poly_int_tree_p (off_tree, &attrs.offset))
211310d565efSmrg 		{
211410d565efSmrg 		  attrs.offset_known_p = true;
211510d565efSmrg 		  apply_bitpos = bitpos;
211610d565efSmrg 		}
211710d565efSmrg 	    }
211810d565efSmrg 	  /* Else do not record a MEM_EXPR.  */
211910d565efSmrg 	}
212010d565efSmrg 
212110d565efSmrg       /* If this is an indirect reference, record it.  */
212210d565efSmrg       else if (TREE_CODE (t) == MEM_REF
212310d565efSmrg 	       || TREE_CODE (t) == TARGET_MEM_REF)
212410d565efSmrg 	{
212510d565efSmrg 	  attrs.expr = t;
212610d565efSmrg 	  attrs.offset_known_p = true;
212710d565efSmrg 	  attrs.offset = 0;
212810d565efSmrg 	  apply_bitpos = bitpos;
212910d565efSmrg 	}
213010d565efSmrg 
213110d565efSmrg       /* Compute the alignment.  */
213210d565efSmrg       unsigned int obj_align;
213310d565efSmrg       unsigned HOST_WIDE_INT obj_bitpos;
213410d565efSmrg       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2135*c7a68eb7Smrg       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2136*c7a68eb7Smrg       if (diff_align != 0)
2137*c7a68eb7Smrg 	obj_align = MIN (obj_align, diff_align);
213810d565efSmrg       attrs.align = MAX (attrs.align, obj_align);
213910d565efSmrg     }
214010d565efSmrg 
2141*c7a68eb7Smrg   poly_uint64 const_size;
2142*c7a68eb7Smrg   if (poly_int_tree_p (new_size, &const_size))
214310d565efSmrg     {
214410d565efSmrg       attrs.size_known_p = true;
2145*c7a68eb7Smrg       attrs.size = const_size;
214610d565efSmrg     }
214710d565efSmrg 
214810d565efSmrg   /* If we modified OFFSET based on T, then subtract the outstanding
214910d565efSmrg      bit position offset.  Similarly, increase the size of the accessed
215010d565efSmrg      object to contain the negative offset.  */
2151*c7a68eb7Smrg   if (maybe_ne (apply_bitpos, 0))
215210d565efSmrg     {
215310d565efSmrg       gcc_assert (attrs.offset_known_p);
2154*c7a68eb7Smrg       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2155*c7a68eb7Smrg       attrs.offset -= bytepos;
215610d565efSmrg       if (attrs.size_known_p)
2157*c7a68eb7Smrg 	attrs.size += bytepos;
215810d565efSmrg     }
215910d565efSmrg 
216010d565efSmrg   /* Now set the attributes we computed above.  */
216110d565efSmrg   attrs.addrspace = as;
216210d565efSmrg   set_mem_attrs (ref, &attrs);
216310d565efSmrg }
216410d565efSmrg 
216510d565efSmrg void
216610d565efSmrg set_mem_attributes (rtx ref, tree t, int objectp)
216710d565efSmrg {
216810d565efSmrg   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
216910d565efSmrg }
217010d565efSmrg 
217110d565efSmrg /* Set the alias set of MEM to SET.  */
217210d565efSmrg 
217310d565efSmrg void
217410d565efSmrg set_mem_alias_set (rtx mem, alias_set_type set)
217510d565efSmrg {
217610d565efSmrg   /* If the new and old alias sets don't conflict, something is wrong.  */
217710d565efSmrg   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2178*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
217910d565efSmrg   attrs.alias = set;
218010d565efSmrg   set_mem_attrs (mem, &attrs);
218110d565efSmrg }
218210d565efSmrg 
218310d565efSmrg /* Set the address space of MEM to ADDRSPACE (target-defined).  */
218410d565efSmrg 
218510d565efSmrg void
218610d565efSmrg set_mem_addr_space (rtx mem, addr_space_t addrspace)
218710d565efSmrg {
2188*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
218910d565efSmrg   attrs.addrspace = addrspace;
219010d565efSmrg   set_mem_attrs (mem, &attrs);
219110d565efSmrg }
219210d565efSmrg 
219310d565efSmrg /* Set the alignment of MEM to ALIGN bits.  */
219410d565efSmrg 
219510d565efSmrg void
219610d565efSmrg set_mem_align (rtx mem, unsigned int align)
219710d565efSmrg {
2198*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
219910d565efSmrg   attrs.align = align;
220010d565efSmrg   set_mem_attrs (mem, &attrs);
220110d565efSmrg }
220210d565efSmrg 
220310d565efSmrg /* Set the expr for MEM to EXPR.  */
220410d565efSmrg 
220510d565efSmrg void
220610d565efSmrg set_mem_expr (rtx mem, tree expr)
220710d565efSmrg {
2208*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
220910d565efSmrg   attrs.expr = expr;
221010d565efSmrg   set_mem_attrs (mem, &attrs);
221110d565efSmrg }
221210d565efSmrg 
221310d565efSmrg /* Set the offset of MEM to OFFSET.  */
221410d565efSmrg 
221510d565efSmrg void
2216*c7a68eb7Smrg set_mem_offset (rtx mem, poly_int64 offset)
221710d565efSmrg {
2218*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
221910d565efSmrg   attrs.offset_known_p = true;
222010d565efSmrg   attrs.offset = offset;
222110d565efSmrg   set_mem_attrs (mem, &attrs);
222210d565efSmrg }
222310d565efSmrg 
222410d565efSmrg /* Clear the offset of MEM.  */
222510d565efSmrg 
222610d565efSmrg void
222710d565efSmrg clear_mem_offset (rtx mem)
222810d565efSmrg {
2229*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
223010d565efSmrg   attrs.offset_known_p = false;
223110d565efSmrg   set_mem_attrs (mem, &attrs);
223210d565efSmrg }
223310d565efSmrg 
223410d565efSmrg /* Set the size of MEM to SIZE.  */
223510d565efSmrg 
223610d565efSmrg void
2237*c7a68eb7Smrg set_mem_size (rtx mem, poly_int64 size)
223810d565efSmrg {
2239*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
224010d565efSmrg   attrs.size_known_p = true;
224110d565efSmrg   attrs.size = size;
224210d565efSmrg   set_mem_attrs (mem, &attrs);
224310d565efSmrg }
224410d565efSmrg 
224510d565efSmrg /* Clear the size of MEM.  */
224610d565efSmrg 
224710d565efSmrg void
224810d565efSmrg clear_mem_size (rtx mem)
224910d565efSmrg {
2250*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
225110d565efSmrg   attrs.size_known_p = false;
225210d565efSmrg   set_mem_attrs (mem, &attrs);
225310d565efSmrg }
225410d565efSmrg 
225510d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed to MODE
225610d565efSmrg    and its address changed to ADDR.  (VOIDmode means don't change the mode.
225710d565efSmrg    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
225810d565efSmrg    returned memory location is required to be valid.  INPLACE is true if any
225910d565efSmrg    changes can be made directly to MEMREF or false if MEMREF must be treated
226010d565efSmrg    as immutable.
226110d565efSmrg 
226210d565efSmrg    The memory attributes are not changed.  */
226310d565efSmrg 
226410d565efSmrg static rtx
226510d565efSmrg change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
226610d565efSmrg 		  bool inplace)
226710d565efSmrg {
226810d565efSmrg   addr_space_t as;
226910d565efSmrg   rtx new_rtx;
227010d565efSmrg 
227110d565efSmrg   gcc_assert (MEM_P (memref));
227210d565efSmrg   as = MEM_ADDR_SPACE (memref);
227310d565efSmrg   if (mode == VOIDmode)
227410d565efSmrg     mode = GET_MODE (memref);
227510d565efSmrg   if (addr == 0)
227610d565efSmrg     addr = XEXP (memref, 0);
227710d565efSmrg   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
227810d565efSmrg       && (!validate || memory_address_addr_space_p (mode, addr, as)))
227910d565efSmrg     return memref;
228010d565efSmrg 
228110d565efSmrg   /* Don't validate address for LRA.  LRA can make the address valid
228210d565efSmrg      by itself in most efficient way.  */
228310d565efSmrg   if (validate && !lra_in_progress)
228410d565efSmrg     {
228510d565efSmrg       if (reload_in_progress || reload_completed)
228610d565efSmrg 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
228710d565efSmrg       else
228810d565efSmrg 	addr = memory_address_addr_space (mode, addr, as);
228910d565efSmrg     }
229010d565efSmrg 
229110d565efSmrg   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
229210d565efSmrg     return memref;
229310d565efSmrg 
229410d565efSmrg   if (inplace)
229510d565efSmrg     {
229610d565efSmrg       XEXP (memref, 0) = addr;
229710d565efSmrg       return memref;
229810d565efSmrg     }
229910d565efSmrg 
230010d565efSmrg   new_rtx = gen_rtx_MEM (mode, addr);
230110d565efSmrg   MEM_COPY_ATTRIBUTES (new_rtx, memref);
230210d565efSmrg   return new_rtx;
230310d565efSmrg }
230410d565efSmrg 
230510d565efSmrg /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
230610d565efSmrg    way we are changing MEMREF, so we only preserve the alias set.  */
230710d565efSmrg 
230810d565efSmrg rtx
230910d565efSmrg change_address (rtx memref, machine_mode mode, rtx addr)
231010d565efSmrg {
231110d565efSmrg   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
231210d565efSmrg   machine_mode mmode = GET_MODE (new_rtx);
2313*c7a68eb7Smrg   struct mem_attrs *defattrs;
231410d565efSmrg 
2315*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (memref));
231610d565efSmrg   defattrs = mode_mem_attrs[(int) mmode];
231710d565efSmrg   attrs.expr = NULL_TREE;
231810d565efSmrg   attrs.offset_known_p = false;
231910d565efSmrg   attrs.size_known_p = defattrs->size_known_p;
232010d565efSmrg   attrs.size = defattrs->size;
232110d565efSmrg   attrs.align = defattrs->align;
232210d565efSmrg 
232310d565efSmrg   /* If there are no changes, just return the original memory reference.  */
232410d565efSmrg   if (new_rtx == memref)
232510d565efSmrg     {
232610d565efSmrg       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
232710d565efSmrg 	return new_rtx;
232810d565efSmrg 
232910d565efSmrg       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
233010d565efSmrg       MEM_COPY_ATTRIBUTES (new_rtx, memref);
233110d565efSmrg     }
233210d565efSmrg 
233310d565efSmrg   set_mem_attrs (new_rtx, &attrs);
233410d565efSmrg   return new_rtx;
233510d565efSmrg }
233610d565efSmrg 
233710d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed
233810d565efSmrg    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
233910d565efSmrg    nonzero, the memory address is forced to be valid.
234010d565efSmrg    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
234110d565efSmrg    and the caller is responsible for adjusting MEMREF base register.
234210d565efSmrg    If ADJUST_OBJECT is zero, the underlying object associated with the
234310d565efSmrg    memory reference is left unchanged and the caller is responsible for
234410d565efSmrg    dealing with it.  Otherwise, if the new memory reference is outside
234510d565efSmrg    the underlying object, even partially, then the object is dropped.
234610d565efSmrg    SIZE, if nonzero, is the size of an access in cases where MODE
234710d565efSmrg    has no inherent size.  */
234810d565efSmrg 
234910d565efSmrg rtx
2350*c7a68eb7Smrg adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
235110d565efSmrg 		  int validate, int adjust_address, int adjust_object,
2352*c7a68eb7Smrg 		  poly_int64 size)
235310d565efSmrg {
235410d565efSmrg   rtx addr = XEXP (memref, 0);
235510d565efSmrg   rtx new_rtx;
2356*c7a68eb7Smrg   scalar_int_mode address_mode;
2357*c7a68eb7Smrg   struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
235810d565efSmrg   unsigned HOST_WIDE_INT max_align;
235910d565efSmrg #ifdef POINTERS_EXTEND_UNSIGNED
2360*c7a68eb7Smrg   scalar_int_mode pointer_mode
236110d565efSmrg     = targetm.addr_space.pointer_mode (attrs.addrspace);
236210d565efSmrg #endif
236310d565efSmrg 
236410d565efSmrg   /* VOIDmode means no mode change for change_address_1.  */
236510d565efSmrg   if (mode == VOIDmode)
236610d565efSmrg     mode = GET_MODE (memref);
236710d565efSmrg 
236810d565efSmrg   /* Take the size of non-BLKmode accesses from the mode.  */
236910d565efSmrg   defattrs = mode_mem_attrs[(int) mode];
237010d565efSmrg   if (defattrs->size_known_p)
237110d565efSmrg     size = defattrs->size;
237210d565efSmrg 
237310d565efSmrg   /* If there are no changes, just return the original memory reference.  */
2374*c7a68eb7Smrg   if (mode == GET_MODE (memref)
2375*c7a68eb7Smrg       && known_eq (offset, 0)
2376*c7a68eb7Smrg       && (known_eq (size, 0)
2377*c7a68eb7Smrg 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
237810d565efSmrg       && (!validate || memory_address_addr_space_p (mode, addr,
237910d565efSmrg 						    attrs.addrspace)))
238010d565efSmrg     return memref;
238110d565efSmrg 
238210d565efSmrg   /* ??? Prefer to create garbage instead of creating shared rtl.
238310d565efSmrg      This may happen even if offset is nonzero -- consider
238410d565efSmrg      (plus (plus reg reg) const_int) -- so do this always.  */
238510d565efSmrg   addr = copy_rtx (addr);
238610d565efSmrg 
238710d565efSmrg   /* Convert a possibly large offset to a signed value within the
238810d565efSmrg      range of the target address space.  */
238910d565efSmrg   address_mode = get_address_mode (memref);
2390*c7a68eb7Smrg   offset = trunc_int_for_mode (offset, address_mode);
239110d565efSmrg 
239210d565efSmrg   if (adjust_address)
239310d565efSmrg     {
239410d565efSmrg       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
239510d565efSmrg 	 object, we can merge it into the LO_SUM.  */
2396*c7a68eb7Smrg       if (GET_MODE (memref) != BLKmode
2397*c7a68eb7Smrg 	  && GET_CODE (addr) == LO_SUM
2398*c7a68eb7Smrg 	  && known_in_range_p (offset,
2399*c7a68eb7Smrg 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2400*c7a68eb7Smrg 				   / BITS_PER_UNIT)))
240110d565efSmrg 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
240210d565efSmrg 			       plus_constant (address_mode,
240310d565efSmrg 					      XEXP (addr, 1), offset));
240410d565efSmrg #ifdef POINTERS_EXTEND_UNSIGNED
240510d565efSmrg       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
240610d565efSmrg 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
240710d565efSmrg 	 the fact that pointers are not allowed to overflow.  */
240810d565efSmrg       else if (POINTERS_EXTEND_UNSIGNED > 0
240910d565efSmrg 	       && GET_CODE (addr) == ZERO_EXTEND
241010d565efSmrg 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2411*c7a68eb7Smrg 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
241210d565efSmrg 	addr = gen_rtx_ZERO_EXTEND (address_mode,
241310d565efSmrg 				    plus_constant (pointer_mode,
241410d565efSmrg 						   XEXP (addr, 0), offset));
241510d565efSmrg #endif
241610d565efSmrg       else
241710d565efSmrg 	addr = plus_constant (address_mode, addr, offset);
241810d565efSmrg     }
241910d565efSmrg 
242010d565efSmrg   new_rtx = change_address_1 (memref, mode, addr, validate, false);
242110d565efSmrg 
242210d565efSmrg   /* If the address is a REG, change_address_1 rightfully returns memref,
242310d565efSmrg      but this would destroy memref's MEM_ATTRS.  */
2424*c7a68eb7Smrg   if (new_rtx == memref && maybe_ne (offset, 0))
242510d565efSmrg     new_rtx = copy_rtx (new_rtx);
242610d565efSmrg 
242710d565efSmrg   /* Conservatively drop the object if we don't know where we start from.  */
242810d565efSmrg   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
242910d565efSmrg     {
243010d565efSmrg       attrs.expr = NULL_TREE;
243110d565efSmrg       attrs.alias = 0;
243210d565efSmrg     }
243310d565efSmrg 
243410d565efSmrg   /* Compute the new values of the memory attributes due to this adjustment.
243510d565efSmrg      We add the offsets and update the alignment.  */
243610d565efSmrg   if (attrs.offset_known_p)
243710d565efSmrg     {
243810d565efSmrg       attrs.offset += offset;
243910d565efSmrg 
244010d565efSmrg       /* Drop the object if the new left end is not within its bounds.  */
2441*c7a68eb7Smrg       if (adjust_object && maybe_lt (attrs.offset, 0))
244210d565efSmrg 	{
244310d565efSmrg 	  attrs.expr = NULL_TREE;
244410d565efSmrg 	  attrs.alias = 0;
244510d565efSmrg 	}
244610d565efSmrg     }
244710d565efSmrg 
244810d565efSmrg   /* Compute the new alignment by taking the MIN of the alignment and the
244910d565efSmrg      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
245010d565efSmrg      if zero.  */
2451*c7a68eb7Smrg   if (maybe_ne (offset, 0))
245210d565efSmrg     {
2453*c7a68eb7Smrg       max_align = known_alignment (offset) * BITS_PER_UNIT;
245410d565efSmrg       attrs.align = MIN (attrs.align, max_align);
245510d565efSmrg     }
245610d565efSmrg 
2457*c7a68eb7Smrg   if (maybe_ne (size, 0))
245810d565efSmrg     {
245910d565efSmrg       /* Drop the object if the new right end is not within its bounds.  */
2460*c7a68eb7Smrg       if (adjust_object && maybe_gt (offset + size, attrs.size))
246110d565efSmrg 	{
246210d565efSmrg 	  attrs.expr = NULL_TREE;
246310d565efSmrg 	  attrs.alias = 0;
246410d565efSmrg 	}
246510d565efSmrg       attrs.size_known_p = true;
246610d565efSmrg       attrs.size = size;
246710d565efSmrg     }
246810d565efSmrg   else if (attrs.size_known_p)
246910d565efSmrg     {
247010d565efSmrg       gcc_assert (!adjust_object);
247110d565efSmrg       attrs.size -= offset;
247210d565efSmrg       /* ??? The store_by_pieces machinery generates negative sizes,
247310d565efSmrg 	 so don't assert for that here.  */
247410d565efSmrg     }
247510d565efSmrg 
247610d565efSmrg   set_mem_attrs (new_rtx, &attrs);
247710d565efSmrg 
247810d565efSmrg   return new_rtx;
247910d565efSmrg }
248010d565efSmrg 
248110d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed
248210d565efSmrg    to MODE and its address changed to ADDR, which is assumed to be
248310d565efSmrg    MEMREF offset by OFFSET bytes.  If VALIDATE is
248410d565efSmrg    nonzero, the memory address is forced to be valid.  */
248510d565efSmrg 
248610d565efSmrg rtx
248710d565efSmrg adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2488*c7a68eb7Smrg 			     poly_int64 offset, int validate)
248910d565efSmrg {
249010d565efSmrg   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
249110d565efSmrg   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
249210d565efSmrg }
249310d565efSmrg 
249410d565efSmrg /* Return a memory reference like MEMREF, but whose address is changed by
249510d565efSmrg    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
249610d565efSmrg    known to be in OFFSET (possibly 1).  */
249710d565efSmrg 
249810d565efSmrg rtx
249910d565efSmrg offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
250010d565efSmrg {
250110d565efSmrg   rtx new_rtx, addr = XEXP (memref, 0);
250210d565efSmrg   machine_mode address_mode;
2503*c7a68eb7Smrg   struct mem_attrs *defattrs;
250410d565efSmrg 
2505*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (memref));
250610d565efSmrg   address_mode = get_address_mode (memref);
250710d565efSmrg   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
250810d565efSmrg 
250910d565efSmrg   /* At this point we don't know _why_ the address is invalid.  It
251010d565efSmrg      could have secondary memory references, multiplies or anything.
251110d565efSmrg 
251210d565efSmrg      However, if we did go and rearrange things, we can wind up not
251310d565efSmrg      being able to recognize the magic around pic_offset_table_rtx.
251410d565efSmrg      This stuff is fragile, and is yet another example of why it is
251510d565efSmrg      bad to expose PIC machinery too early.  */
251610d565efSmrg   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
251710d565efSmrg 				     attrs.addrspace)
251810d565efSmrg       && GET_CODE (addr) == PLUS
251910d565efSmrg       && XEXP (addr, 0) == pic_offset_table_rtx)
252010d565efSmrg     {
252110d565efSmrg       addr = force_reg (GET_MODE (addr), addr);
252210d565efSmrg       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
252310d565efSmrg     }
252410d565efSmrg 
252510d565efSmrg   update_temp_slot_address (XEXP (memref, 0), new_rtx);
252610d565efSmrg   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
252710d565efSmrg 
252810d565efSmrg   /* If there are no changes, just return the original memory reference.  */
252910d565efSmrg   if (new_rtx == memref)
253010d565efSmrg     return new_rtx;
253110d565efSmrg 
253210d565efSmrg   /* Update the alignment to reflect the offset.  Reset the offset, which
253310d565efSmrg      we don't know.  */
253410d565efSmrg   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
253510d565efSmrg   attrs.offset_known_p = false;
253610d565efSmrg   attrs.size_known_p = defattrs->size_known_p;
253710d565efSmrg   attrs.size = defattrs->size;
253810d565efSmrg   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
253910d565efSmrg   set_mem_attrs (new_rtx, &attrs);
254010d565efSmrg   return new_rtx;
254110d565efSmrg }
254210d565efSmrg 
254310d565efSmrg /* Return a memory reference like MEMREF, but with its address changed to
254410d565efSmrg    ADDR.  The caller is asserting that the actual piece of memory pointed
254510d565efSmrg    to is the same, just the form of the address is being changed, such as
254610d565efSmrg    by putting something into a register.  INPLACE is true if any changes
254710d565efSmrg    can be made directly to MEMREF or false if MEMREF must be treated as
254810d565efSmrg    immutable.  */
254910d565efSmrg 
255010d565efSmrg rtx
255110d565efSmrg replace_equiv_address (rtx memref, rtx addr, bool inplace)
255210d565efSmrg {
255310d565efSmrg   /* change_address_1 copies the memory attribute structure without change
255410d565efSmrg      and that's exactly what we want here.  */
255510d565efSmrg   update_temp_slot_address (XEXP (memref, 0), addr);
255610d565efSmrg   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
255710d565efSmrg }
255810d565efSmrg 
255910d565efSmrg /* Likewise, but the reference is not required to be valid.  */
256010d565efSmrg 
256110d565efSmrg rtx
256210d565efSmrg replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
256310d565efSmrg {
256410d565efSmrg   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
256510d565efSmrg }
256610d565efSmrg 
256710d565efSmrg /* Return a memory reference like MEMREF, but with its mode widened to
256810d565efSmrg    MODE and offset by OFFSET.  This would be used by targets that e.g.
256910d565efSmrg    cannot issue QImode memory operations and have to use SImode memory
257010d565efSmrg    operations plus masking logic.  */
257110d565efSmrg 
257210d565efSmrg rtx
2573*c7a68eb7Smrg widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
257410d565efSmrg {
257510d565efSmrg   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2576*c7a68eb7Smrg   poly_uint64 size = GET_MODE_SIZE (mode);
257710d565efSmrg 
257810d565efSmrg   /* If there are no changes, just return the original memory reference.  */
257910d565efSmrg   if (new_rtx == memref)
258010d565efSmrg     return new_rtx;
258110d565efSmrg 
2582*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (new_rtx));
258310d565efSmrg 
258410d565efSmrg   /* If we don't know what offset we were at within the expression, then
258510d565efSmrg      we can't know if we've overstepped the bounds.  */
258610d565efSmrg   if (! attrs.offset_known_p)
258710d565efSmrg     attrs.expr = NULL_TREE;
258810d565efSmrg 
258910d565efSmrg   while (attrs.expr)
259010d565efSmrg     {
259110d565efSmrg       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
259210d565efSmrg 	{
259310d565efSmrg 	  tree field = TREE_OPERAND (attrs.expr, 1);
259410d565efSmrg 	  tree offset = component_ref_field_offset (attrs.expr);
259510d565efSmrg 
259610d565efSmrg 	  if (! DECL_SIZE_UNIT (field))
259710d565efSmrg 	    {
259810d565efSmrg 	      attrs.expr = NULL_TREE;
259910d565efSmrg 	      break;
260010d565efSmrg 	    }
260110d565efSmrg 
260210d565efSmrg 	  /* Is the field at least as large as the access?  If so, ok,
260310d565efSmrg 	     otherwise strip back to the containing structure.  */
2604*c7a68eb7Smrg 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2605*c7a68eb7Smrg 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2606*c7a68eb7Smrg 	      && known_ge (attrs.offset, 0))
260710d565efSmrg 	    break;
260810d565efSmrg 
2609*c7a68eb7Smrg 	  poly_uint64 suboffset;
2610*c7a68eb7Smrg 	  if (!poly_int_tree_p (offset, &suboffset))
261110d565efSmrg 	    {
261210d565efSmrg 	      attrs.expr = NULL_TREE;
261310d565efSmrg 	      break;
261410d565efSmrg 	    }
261510d565efSmrg 
261610d565efSmrg 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2617*c7a68eb7Smrg 	  attrs.offset += suboffset;
261810d565efSmrg 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
261910d565efSmrg 			   / BITS_PER_UNIT);
262010d565efSmrg 	}
262110d565efSmrg       /* Similarly for the decl.  */
262210d565efSmrg       else if (DECL_P (attrs.expr)
262310d565efSmrg 	       && DECL_SIZE_UNIT (attrs.expr)
2624*c7a68eb7Smrg 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2625*c7a68eb7Smrg 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2626*c7a68eb7Smrg 			   size)
2627*c7a68eb7Smrg 	       && known_ge (attrs.offset, 0))
262810d565efSmrg 	break;
262910d565efSmrg       else
263010d565efSmrg 	{
263110d565efSmrg 	  /* The widened memory access overflows the expression, which means
263210d565efSmrg 	     that it could alias another expression.  Zap it.  */
263310d565efSmrg 	  attrs.expr = NULL_TREE;
263410d565efSmrg 	  break;
263510d565efSmrg 	}
263610d565efSmrg     }
263710d565efSmrg 
263810d565efSmrg   if (! attrs.expr)
263910d565efSmrg     attrs.offset_known_p = false;
264010d565efSmrg 
264110d565efSmrg   /* The widened memory may alias other stuff, so zap the alias set.  */
264210d565efSmrg   /* ??? Maybe use get_alias_set on any remaining expression.  */
264310d565efSmrg   attrs.alias = 0;
264410d565efSmrg   attrs.size_known_p = true;
264510d565efSmrg   attrs.size = size;
264610d565efSmrg   set_mem_attrs (new_rtx, &attrs);
264710d565efSmrg   return new_rtx;
264810d565efSmrg }
264910d565efSmrg 
265010d565efSmrg /* A fake decl that is used as the MEM_EXPR of spill slots.  */
265110d565efSmrg static GTY(()) tree spill_slot_decl;
265210d565efSmrg 
265310d565efSmrg tree
265410d565efSmrg get_spill_slot_decl (bool force_build_p)
265510d565efSmrg {
265610d565efSmrg   tree d = spill_slot_decl;
265710d565efSmrg   rtx rd;
265810d565efSmrg 
265910d565efSmrg   if (d || !force_build_p)
266010d565efSmrg     return d;
266110d565efSmrg 
266210d565efSmrg   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
266310d565efSmrg 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
266410d565efSmrg   DECL_ARTIFICIAL (d) = 1;
266510d565efSmrg   DECL_IGNORED_P (d) = 1;
266610d565efSmrg   TREE_USED (d) = 1;
266710d565efSmrg   spill_slot_decl = d;
266810d565efSmrg 
266910d565efSmrg   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
267010d565efSmrg   MEM_NOTRAP_P (rd) = 1;
2671*c7a68eb7Smrg   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
267210d565efSmrg   attrs.alias = new_alias_set ();
267310d565efSmrg   attrs.expr = d;
267410d565efSmrg   set_mem_attrs (rd, &attrs);
267510d565efSmrg   SET_DECL_RTL (d, rd);
267610d565efSmrg 
267710d565efSmrg   return d;
267810d565efSmrg }
267910d565efSmrg 
268010d565efSmrg /* Given MEM, a result from assign_stack_local, fill in the memory
268110d565efSmrg    attributes as appropriate for a register allocator spill slot.
268210d565efSmrg    These slots are not aliasable by other memory.  We arrange for
268310d565efSmrg    them all to use a single MEM_EXPR, so that the aliasing code can
268410d565efSmrg    work properly in the case of shared spill slots.  */
268510d565efSmrg 
268610d565efSmrg void
268710d565efSmrg set_mem_attrs_for_spill (rtx mem)
268810d565efSmrg {
268910d565efSmrg   rtx addr;
269010d565efSmrg 
2691*c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
269210d565efSmrg   attrs.expr = get_spill_slot_decl (true);
269310d565efSmrg   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
269410d565efSmrg   attrs.addrspace = ADDR_SPACE_GENERIC;
269510d565efSmrg 
269610d565efSmrg   /* We expect the incoming memory to be of the form:
269710d565efSmrg 	(mem:MODE (plus (reg sfp) (const_int offset)))
269810d565efSmrg      with perhaps the plus missing for offset = 0.  */
269910d565efSmrg   addr = XEXP (mem, 0);
270010d565efSmrg   attrs.offset_known_p = true;
2701*c7a68eb7Smrg   strip_offset (addr, &attrs.offset);
270210d565efSmrg 
270310d565efSmrg   set_mem_attrs (mem, &attrs);
270410d565efSmrg   MEM_NOTRAP_P (mem) = 1;
270510d565efSmrg }
270610d565efSmrg 
270710d565efSmrg /* Return a newly created CODE_LABEL rtx with a unique label number.  */
270810d565efSmrg 
270910d565efSmrg rtx_code_label *
271010d565efSmrg gen_label_rtx (void)
271110d565efSmrg {
271210d565efSmrg   return as_a <rtx_code_label *> (
271310d565efSmrg 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
271410d565efSmrg 				NULL, label_num++, NULL));
271510d565efSmrg }
271610d565efSmrg 
271710d565efSmrg /* For procedure integration.  */
271810d565efSmrg 
271910d565efSmrg /* Install new pointers to the first and last insns in the chain.
272010d565efSmrg    Also, set cur_insn_uid to one higher than the last in use.
272110d565efSmrg    Used for an inline-procedure after copying the insn chain.  */
272210d565efSmrg 
272310d565efSmrg void
272410d565efSmrg set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
272510d565efSmrg {
272610d565efSmrg   rtx_insn *insn;
272710d565efSmrg 
272810d565efSmrg   set_first_insn (first);
272910d565efSmrg   set_last_insn (last);
273010d565efSmrg   cur_insn_uid = 0;
273110d565efSmrg 
273210d565efSmrg   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
273310d565efSmrg     {
273410d565efSmrg       int debug_count = 0;
273510d565efSmrg 
273610d565efSmrg       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
273710d565efSmrg       cur_debug_insn_uid = 0;
273810d565efSmrg 
273910d565efSmrg       for (insn = first; insn; insn = NEXT_INSN (insn))
274010d565efSmrg 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
274110d565efSmrg 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
274210d565efSmrg 	else
274310d565efSmrg 	  {
274410d565efSmrg 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
274510d565efSmrg 	    if (DEBUG_INSN_P (insn))
274610d565efSmrg 	      debug_count++;
274710d565efSmrg 	  }
274810d565efSmrg 
274910d565efSmrg       if (debug_count)
275010d565efSmrg 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
275110d565efSmrg       else
275210d565efSmrg 	cur_debug_insn_uid++;
275310d565efSmrg     }
275410d565efSmrg   else
275510d565efSmrg     for (insn = first; insn; insn = NEXT_INSN (insn))
275610d565efSmrg       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
275710d565efSmrg 
275810d565efSmrg   cur_insn_uid++;
275910d565efSmrg }
276010d565efSmrg 
276110d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared
276210d565efSmrg    structure.  This routine should only be called once.  */
276310d565efSmrg 
276410d565efSmrg static void
276510d565efSmrg unshare_all_rtl_1 (rtx_insn *insn)
276610d565efSmrg {
276710d565efSmrg   /* Unshare just about everything else.  */
276810d565efSmrg   unshare_all_rtl_in_chain (insn);
276910d565efSmrg 
277010d565efSmrg   /* Make sure the addresses of stack slots found outside the insn chain
277110d565efSmrg      (such as, in DECL_RTL of a variable) are not shared
277210d565efSmrg      with the insn chain.
277310d565efSmrg 
277410d565efSmrg      This special care is necessary when the stack slot MEM does not
277510d565efSmrg      actually appear in the insn chain.  If it does appear, its address
277610d565efSmrg      is unshared from all else at that point.  */
277710d565efSmrg   unsigned int i;
277810d565efSmrg   rtx temp;
277910d565efSmrg   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
278010d565efSmrg     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
278110d565efSmrg }
278210d565efSmrg 
278310d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared
278410d565efSmrg    structure, again.  This is a fairly expensive thing to do so it
278510d565efSmrg    should be done sparingly.  */
278610d565efSmrg 
278710d565efSmrg void
278810d565efSmrg unshare_all_rtl_again (rtx_insn *insn)
278910d565efSmrg {
279010d565efSmrg   rtx_insn *p;
279110d565efSmrg   tree decl;
279210d565efSmrg 
279310d565efSmrg   for (p = insn; p; p = NEXT_INSN (p))
279410d565efSmrg     if (INSN_P (p))
279510d565efSmrg       {
279610d565efSmrg 	reset_used_flags (PATTERN (p));
279710d565efSmrg 	reset_used_flags (REG_NOTES (p));
279810d565efSmrg 	if (CALL_P (p))
279910d565efSmrg 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
280010d565efSmrg       }
280110d565efSmrg 
280210d565efSmrg   /* Make sure that virtual stack slots are not shared.  */
280310d565efSmrg   set_used_decls (DECL_INITIAL (cfun->decl));
280410d565efSmrg 
280510d565efSmrg   /* Make sure that virtual parameters are not shared.  */
280610d565efSmrg   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
280710d565efSmrg     set_used_flags (DECL_RTL (decl));
280810d565efSmrg 
280910d565efSmrg   rtx temp;
281010d565efSmrg   unsigned int i;
281110d565efSmrg   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
281210d565efSmrg     reset_used_flags (temp);
281310d565efSmrg 
281410d565efSmrg   unshare_all_rtl_1 (insn);
281510d565efSmrg }
281610d565efSmrg 
281710d565efSmrg unsigned int
281810d565efSmrg unshare_all_rtl (void)
281910d565efSmrg {
282010d565efSmrg   unshare_all_rtl_1 (get_insns ());
282110d565efSmrg 
282210d565efSmrg   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
282310d565efSmrg     {
282410d565efSmrg       if (DECL_RTL_SET_P (decl))
282510d565efSmrg 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
282610d565efSmrg       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
282710d565efSmrg     }
282810d565efSmrg 
282910d565efSmrg   return 0;
283010d565efSmrg }
283110d565efSmrg 
283210d565efSmrg 
283310d565efSmrg /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
283410d565efSmrg    Recursively does the same for subexpressions.  */
283510d565efSmrg 
283610d565efSmrg static void
283710d565efSmrg verify_rtx_sharing (rtx orig, rtx insn)
283810d565efSmrg {
283910d565efSmrg   rtx x = orig;
284010d565efSmrg   int i;
284110d565efSmrg   enum rtx_code code;
284210d565efSmrg   const char *format_ptr;
284310d565efSmrg 
284410d565efSmrg   if (x == 0)
284510d565efSmrg     return;
284610d565efSmrg 
284710d565efSmrg   code = GET_CODE (x);
284810d565efSmrg 
284910d565efSmrg   /* These types may be freely shared.  */
285010d565efSmrg 
285110d565efSmrg   switch (code)
285210d565efSmrg     {
285310d565efSmrg     case REG:
285410d565efSmrg     case DEBUG_EXPR:
285510d565efSmrg     case VALUE:
285610d565efSmrg     CASE_CONST_ANY:
285710d565efSmrg     case SYMBOL_REF:
285810d565efSmrg     case LABEL_REF:
285910d565efSmrg     case CODE_LABEL:
286010d565efSmrg     case PC:
286110d565efSmrg     case CC0:
286210d565efSmrg     case RETURN:
286310d565efSmrg     case SIMPLE_RETURN:
286410d565efSmrg     case SCRATCH:
286510d565efSmrg       /* SCRATCH must be shared because they represent distinct values.  */
286610d565efSmrg       return;
286710d565efSmrg     case CLOBBER:
286810d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
286910d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
287010d565efSmrg          This is needed to allow safe register renaming.  */
287110d565efSmrg       if (REG_P (XEXP (x, 0))
287210d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
287310d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
287410d565efSmrg 	return;
287510d565efSmrg       break;
287610d565efSmrg 
287710d565efSmrg     case CONST:
287810d565efSmrg       if (shared_const_p (orig))
287910d565efSmrg 	return;
288010d565efSmrg       break;
288110d565efSmrg 
288210d565efSmrg     case MEM:
288310d565efSmrg       /* A MEM is allowed to be shared if its address is constant.  */
288410d565efSmrg       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
288510d565efSmrg 	  || reload_completed || reload_in_progress)
288610d565efSmrg 	return;
288710d565efSmrg 
288810d565efSmrg       break;
288910d565efSmrg 
289010d565efSmrg     default:
289110d565efSmrg       break;
289210d565efSmrg     }
289310d565efSmrg 
289410d565efSmrg   /* This rtx may not be shared.  If it has already been seen,
289510d565efSmrg      replace it with a copy of itself.  */
289610d565efSmrg   if (flag_checking && RTX_FLAG (x, used))
289710d565efSmrg     {
289810d565efSmrg       error ("invalid rtl sharing found in the insn");
289910d565efSmrg       debug_rtx (insn);
290010d565efSmrg       error ("shared rtx");
290110d565efSmrg       debug_rtx (x);
290210d565efSmrg       internal_error ("internal consistency failure");
290310d565efSmrg     }
290410d565efSmrg   gcc_assert (!RTX_FLAG (x, used));
290510d565efSmrg 
290610d565efSmrg   RTX_FLAG (x, used) = 1;
290710d565efSmrg 
290810d565efSmrg   /* Now scan the subexpressions recursively.  */
290910d565efSmrg 
291010d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
291110d565efSmrg 
291210d565efSmrg   for (i = 0; i < GET_RTX_LENGTH (code); i++)
291310d565efSmrg     {
291410d565efSmrg       switch (*format_ptr++)
291510d565efSmrg 	{
291610d565efSmrg 	case 'e':
291710d565efSmrg 	  verify_rtx_sharing (XEXP (x, i), insn);
291810d565efSmrg 	  break;
291910d565efSmrg 
292010d565efSmrg 	case 'E':
292110d565efSmrg 	  if (XVEC (x, i) != NULL)
292210d565efSmrg 	    {
292310d565efSmrg 	      int j;
292410d565efSmrg 	      int len = XVECLEN (x, i);
292510d565efSmrg 
292610d565efSmrg 	      for (j = 0; j < len; j++)
292710d565efSmrg 		{
292810d565efSmrg 		  /* We allow sharing of ASM_OPERANDS inside single
292910d565efSmrg 		     instruction.  */
293010d565efSmrg 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
293110d565efSmrg 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
293210d565efSmrg 			  == ASM_OPERANDS))
293310d565efSmrg 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
293410d565efSmrg 		  else
293510d565efSmrg 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
293610d565efSmrg 		}
293710d565efSmrg 	    }
293810d565efSmrg 	  break;
293910d565efSmrg 	}
294010d565efSmrg     }
294110d565efSmrg   return;
294210d565efSmrg }
294310d565efSmrg 
294410d565efSmrg /* Reset used-flags for INSN.  */
294510d565efSmrg 
294610d565efSmrg static void
294710d565efSmrg reset_insn_used_flags (rtx insn)
294810d565efSmrg {
294910d565efSmrg   gcc_assert (INSN_P (insn));
295010d565efSmrg   reset_used_flags (PATTERN (insn));
295110d565efSmrg   reset_used_flags (REG_NOTES (insn));
295210d565efSmrg   if (CALL_P (insn))
295310d565efSmrg     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
295410d565efSmrg }
295510d565efSmrg 
295610d565efSmrg /* Go through all the RTL insn bodies and clear all the USED bits.  */
295710d565efSmrg 
295810d565efSmrg static void
295910d565efSmrg reset_all_used_flags (void)
296010d565efSmrg {
296110d565efSmrg   rtx_insn *p;
296210d565efSmrg 
296310d565efSmrg   for (p = get_insns (); p; p = NEXT_INSN (p))
296410d565efSmrg     if (INSN_P (p))
296510d565efSmrg       {
296610d565efSmrg 	rtx pat = PATTERN (p);
296710d565efSmrg 	if (GET_CODE (pat) != SEQUENCE)
296810d565efSmrg 	  reset_insn_used_flags (p);
296910d565efSmrg 	else
297010d565efSmrg 	  {
297110d565efSmrg 	    gcc_assert (REG_NOTES (p) == NULL);
297210d565efSmrg 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
297310d565efSmrg 	      {
297410d565efSmrg 		rtx insn = XVECEXP (pat, 0, i);
297510d565efSmrg 		if (INSN_P (insn))
297610d565efSmrg 		  reset_insn_used_flags (insn);
297710d565efSmrg 	      }
297810d565efSmrg 	  }
297910d565efSmrg       }
298010d565efSmrg }
298110d565efSmrg 
298210d565efSmrg /* Verify sharing in INSN.  */
298310d565efSmrg 
298410d565efSmrg static void
298510d565efSmrg verify_insn_sharing (rtx insn)
298610d565efSmrg {
298710d565efSmrg   gcc_assert (INSN_P (insn));
298810d565efSmrg   verify_rtx_sharing (PATTERN (insn), insn);
298910d565efSmrg   verify_rtx_sharing (REG_NOTES (insn), insn);
299010d565efSmrg   if (CALL_P (insn))
299110d565efSmrg     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
299210d565efSmrg }
299310d565efSmrg 
299410d565efSmrg /* Go through all the RTL insn bodies and check that there is no unexpected
299510d565efSmrg    sharing in between the subexpressions.  */
299610d565efSmrg 
299710d565efSmrg DEBUG_FUNCTION void
299810d565efSmrg verify_rtl_sharing (void)
299910d565efSmrg {
300010d565efSmrg   rtx_insn *p;
300110d565efSmrg 
300210d565efSmrg   timevar_push (TV_VERIFY_RTL_SHARING);
300310d565efSmrg 
300410d565efSmrg   reset_all_used_flags ();
300510d565efSmrg 
300610d565efSmrg   for (p = get_insns (); p; p = NEXT_INSN (p))
300710d565efSmrg     if (INSN_P (p))
300810d565efSmrg       {
300910d565efSmrg 	rtx pat = PATTERN (p);
301010d565efSmrg 	if (GET_CODE (pat) != SEQUENCE)
301110d565efSmrg 	  verify_insn_sharing (p);
301210d565efSmrg 	else
301310d565efSmrg 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
301410d565efSmrg 	      {
301510d565efSmrg 		rtx insn = XVECEXP (pat, 0, i);
301610d565efSmrg 		if (INSN_P (insn))
301710d565efSmrg 		  verify_insn_sharing (insn);
301810d565efSmrg 	      }
301910d565efSmrg       }
302010d565efSmrg 
302110d565efSmrg   reset_all_used_flags ();
302210d565efSmrg 
302310d565efSmrg   timevar_pop (TV_VERIFY_RTL_SHARING);
302410d565efSmrg }
302510d565efSmrg 
302610d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared structure.
302710d565efSmrg    Assumes the mark bits are cleared at entry.  */
302810d565efSmrg 
302910d565efSmrg void
303010d565efSmrg unshare_all_rtl_in_chain (rtx_insn *insn)
303110d565efSmrg {
303210d565efSmrg   for (; insn; insn = NEXT_INSN (insn))
303310d565efSmrg     if (INSN_P (insn))
303410d565efSmrg       {
303510d565efSmrg 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
303610d565efSmrg 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
303710d565efSmrg 	if (CALL_P (insn))
303810d565efSmrg 	  CALL_INSN_FUNCTION_USAGE (insn)
303910d565efSmrg 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
304010d565efSmrg       }
304110d565efSmrg }
304210d565efSmrg 
304310d565efSmrg /* Go through all virtual stack slots of a function and mark them as
304410d565efSmrg    shared.  We never replace the DECL_RTLs themselves with a copy,
304510d565efSmrg    but expressions mentioned into a DECL_RTL cannot be shared with
304610d565efSmrg    expressions in the instruction stream.
304710d565efSmrg 
304810d565efSmrg    Note that reload may convert pseudo registers into memories in-place.
304910d565efSmrg    Pseudo registers are always shared, but MEMs never are.  Thus if we
305010d565efSmrg    reset the used flags on MEMs in the instruction stream, we must set
305110d565efSmrg    them again on MEMs that appear in DECL_RTLs.  */
305210d565efSmrg 
305310d565efSmrg static void
305410d565efSmrg set_used_decls (tree blk)
305510d565efSmrg {
305610d565efSmrg   tree t;
305710d565efSmrg 
305810d565efSmrg   /* Mark decls.  */
305910d565efSmrg   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
306010d565efSmrg     if (DECL_RTL_SET_P (t))
306110d565efSmrg       set_used_flags (DECL_RTL (t));
306210d565efSmrg 
306310d565efSmrg   /* Now process sub-blocks.  */
306410d565efSmrg   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
306510d565efSmrg     set_used_decls (t);
306610d565efSmrg }
306710d565efSmrg 
306810d565efSmrg /* Mark ORIG as in use, and return a copy of it if it was already in use.
306910d565efSmrg    Recursively does the same for subexpressions.  Uses
307010d565efSmrg    copy_rtx_if_shared_1 to reduce stack space.  */
307110d565efSmrg 
307210d565efSmrg rtx
307310d565efSmrg copy_rtx_if_shared (rtx orig)
307410d565efSmrg {
307510d565efSmrg   copy_rtx_if_shared_1 (&orig);
307610d565efSmrg   return orig;
307710d565efSmrg }
307810d565efSmrg 
307910d565efSmrg /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
308010d565efSmrg    use.  Recursively does the same for subexpressions.  */
308110d565efSmrg 
308210d565efSmrg static void
308310d565efSmrg copy_rtx_if_shared_1 (rtx *orig1)
308410d565efSmrg {
308510d565efSmrg   rtx x;
308610d565efSmrg   int i;
308710d565efSmrg   enum rtx_code code;
308810d565efSmrg   rtx *last_ptr;
308910d565efSmrg   const char *format_ptr;
309010d565efSmrg   int copied = 0;
309110d565efSmrg   int length;
309210d565efSmrg 
309310d565efSmrg   /* Repeat is used to turn tail-recursion into iteration.  */
309410d565efSmrg repeat:
309510d565efSmrg   x = *orig1;
309610d565efSmrg 
309710d565efSmrg   if (x == 0)
309810d565efSmrg     return;
309910d565efSmrg 
310010d565efSmrg   code = GET_CODE (x);
310110d565efSmrg 
310210d565efSmrg   /* These types may be freely shared.  */
310310d565efSmrg 
310410d565efSmrg   switch (code)
310510d565efSmrg     {
310610d565efSmrg     case REG:
310710d565efSmrg     case DEBUG_EXPR:
310810d565efSmrg     case VALUE:
310910d565efSmrg     CASE_CONST_ANY:
311010d565efSmrg     case SYMBOL_REF:
311110d565efSmrg     case LABEL_REF:
311210d565efSmrg     case CODE_LABEL:
311310d565efSmrg     case PC:
311410d565efSmrg     case CC0:
311510d565efSmrg     case RETURN:
311610d565efSmrg     case SIMPLE_RETURN:
311710d565efSmrg     case SCRATCH:
311810d565efSmrg       /* SCRATCH must be shared because they represent distinct values.  */
311910d565efSmrg       return;
312010d565efSmrg     case CLOBBER:
312110d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
312210d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
312310d565efSmrg          This is needed to allow safe register renaming.  */
312410d565efSmrg       if (REG_P (XEXP (x, 0))
312510d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
312610d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
312710d565efSmrg 	return;
312810d565efSmrg       break;
312910d565efSmrg 
313010d565efSmrg     case CONST:
313110d565efSmrg       if (shared_const_p (x))
313210d565efSmrg 	return;
313310d565efSmrg       break;
313410d565efSmrg 
313510d565efSmrg     case DEBUG_INSN:
313610d565efSmrg     case INSN:
313710d565efSmrg     case JUMP_INSN:
313810d565efSmrg     case CALL_INSN:
313910d565efSmrg     case NOTE:
314010d565efSmrg     case BARRIER:
314110d565efSmrg       /* The chain of insns is not being copied.  */
314210d565efSmrg       return;
314310d565efSmrg 
314410d565efSmrg     default:
314510d565efSmrg       break;
314610d565efSmrg     }
314710d565efSmrg 
314810d565efSmrg   /* This rtx may not be shared.  If it has already been seen,
314910d565efSmrg      replace it with a copy of itself.  */
315010d565efSmrg 
315110d565efSmrg   if (RTX_FLAG (x, used))
315210d565efSmrg     {
315310d565efSmrg       x = shallow_copy_rtx (x);
315410d565efSmrg       copied = 1;
315510d565efSmrg     }
315610d565efSmrg   RTX_FLAG (x, used) = 1;
315710d565efSmrg 
315810d565efSmrg   /* Now scan the subexpressions recursively.
315910d565efSmrg      We can store any replaced subexpressions directly into X
316010d565efSmrg      since we know X is not shared!  Any vectors in X
316110d565efSmrg      must be copied if X was copied.  */
316210d565efSmrg 
316310d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
316410d565efSmrg   length = GET_RTX_LENGTH (code);
316510d565efSmrg   last_ptr = NULL;
316610d565efSmrg 
316710d565efSmrg   for (i = 0; i < length; i++)
316810d565efSmrg     {
316910d565efSmrg       switch (*format_ptr++)
317010d565efSmrg 	{
317110d565efSmrg 	case 'e':
317210d565efSmrg           if (last_ptr)
317310d565efSmrg             copy_rtx_if_shared_1 (last_ptr);
317410d565efSmrg 	  last_ptr = &XEXP (x, i);
317510d565efSmrg 	  break;
317610d565efSmrg 
317710d565efSmrg 	case 'E':
317810d565efSmrg 	  if (XVEC (x, i) != NULL)
317910d565efSmrg 	    {
318010d565efSmrg 	      int j;
318110d565efSmrg 	      int len = XVECLEN (x, i);
318210d565efSmrg 
318310d565efSmrg               /* Copy the vector iff I copied the rtx and the length
318410d565efSmrg 		 is nonzero.  */
318510d565efSmrg 	      if (copied && len > 0)
318610d565efSmrg 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
318710d565efSmrg 
318810d565efSmrg               /* Call recursively on all inside the vector.  */
318910d565efSmrg 	      for (j = 0; j < len; j++)
319010d565efSmrg                 {
319110d565efSmrg 		  if (last_ptr)
319210d565efSmrg 		    copy_rtx_if_shared_1 (last_ptr);
319310d565efSmrg                   last_ptr = &XVECEXP (x, i, j);
319410d565efSmrg                 }
319510d565efSmrg 	    }
319610d565efSmrg 	  break;
319710d565efSmrg 	}
319810d565efSmrg     }
319910d565efSmrg   *orig1 = x;
320010d565efSmrg   if (last_ptr)
320110d565efSmrg     {
320210d565efSmrg       orig1 = last_ptr;
320310d565efSmrg       goto repeat;
320410d565efSmrg     }
320510d565efSmrg   return;
320610d565efSmrg }
320710d565efSmrg 
320810d565efSmrg /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
320910d565efSmrg 
321010d565efSmrg static void
321110d565efSmrg mark_used_flags (rtx x, int flag)
321210d565efSmrg {
321310d565efSmrg   int i, j;
321410d565efSmrg   enum rtx_code code;
321510d565efSmrg   const char *format_ptr;
321610d565efSmrg   int length;
321710d565efSmrg 
321810d565efSmrg   /* Repeat is used to turn tail-recursion into iteration.  */
321910d565efSmrg repeat:
322010d565efSmrg   if (x == 0)
322110d565efSmrg     return;
322210d565efSmrg 
322310d565efSmrg   code = GET_CODE (x);
322410d565efSmrg 
322510d565efSmrg   /* These types may be freely shared so we needn't do any resetting
322610d565efSmrg      for them.  */
322710d565efSmrg 
322810d565efSmrg   switch (code)
322910d565efSmrg     {
323010d565efSmrg     case REG:
323110d565efSmrg     case DEBUG_EXPR:
323210d565efSmrg     case VALUE:
323310d565efSmrg     CASE_CONST_ANY:
323410d565efSmrg     case SYMBOL_REF:
323510d565efSmrg     case CODE_LABEL:
323610d565efSmrg     case PC:
323710d565efSmrg     case CC0:
323810d565efSmrg     case RETURN:
323910d565efSmrg     case SIMPLE_RETURN:
324010d565efSmrg       return;
324110d565efSmrg 
324210d565efSmrg     case DEBUG_INSN:
324310d565efSmrg     case INSN:
324410d565efSmrg     case JUMP_INSN:
324510d565efSmrg     case CALL_INSN:
324610d565efSmrg     case NOTE:
324710d565efSmrg     case LABEL_REF:
324810d565efSmrg     case BARRIER:
324910d565efSmrg       /* The chain of insns is not being copied.  */
325010d565efSmrg       return;
325110d565efSmrg 
325210d565efSmrg     default:
325310d565efSmrg       break;
325410d565efSmrg     }
325510d565efSmrg 
325610d565efSmrg   RTX_FLAG (x, used) = flag;
325710d565efSmrg 
325810d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
325910d565efSmrg   length = GET_RTX_LENGTH (code);
326010d565efSmrg 
326110d565efSmrg   for (i = 0; i < length; i++)
326210d565efSmrg     {
326310d565efSmrg       switch (*format_ptr++)
326410d565efSmrg 	{
326510d565efSmrg 	case 'e':
326610d565efSmrg           if (i == length-1)
326710d565efSmrg             {
326810d565efSmrg               x = XEXP (x, i);
326910d565efSmrg 	      goto repeat;
327010d565efSmrg             }
327110d565efSmrg 	  mark_used_flags (XEXP (x, i), flag);
327210d565efSmrg 	  break;
327310d565efSmrg 
327410d565efSmrg 	case 'E':
327510d565efSmrg 	  for (j = 0; j < XVECLEN (x, i); j++)
327610d565efSmrg 	    mark_used_flags (XVECEXP (x, i, j), flag);
327710d565efSmrg 	  break;
327810d565efSmrg 	}
327910d565efSmrg     }
328010d565efSmrg }
328110d565efSmrg 
328210d565efSmrg /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
328310d565efSmrg    to look for shared sub-parts.  */
328410d565efSmrg 
328510d565efSmrg void
328610d565efSmrg reset_used_flags (rtx x)
328710d565efSmrg {
328810d565efSmrg   mark_used_flags (x, 0);
328910d565efSmrg }
329010d565efSmrg 
329110d565efSmrg /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
329210d565efSmrg    to look for shared sub-parts.  */
329310d565efSmrg 
329410d565efSmrg void
329510d565efSmrg set_used_flags (rtx x)
329610d565efSmrg {
329710d565efSmrg   mark_used_flags (x, 1);
329810d565efSmrg }
329910d565efSmrg 
330010d565efSmrg /* Copy X if necessary so that it won't be altered by changes in OTHER.
330110d565efSmrg    Return X or the rtx for the pseudo reg the value of X was copied into.
330210d565efSmrg    OTHER must be valid as a SET_DEST.  */
330310d565efSmrg 
330410d565efSmrg rtx
330510d565efSmrg make_safe_from (rtx x, rtx other)
330610d565efSmrg {
330710d565efSmrg   while (1)
330810d565efSmrg     switch (GET_CODE (other))
330910d565efSmrg       {
331010d565efSmrg       case SUBREG:
331110d565efSmrg 	other = SUBREG_REG (other);
331210d565efSmrg 	break;
331310d565efSmrg       case STRICT_LOW_PART:
331410d565efSmrg       case SIGN_EXTEND:
331510d565efSmrg       case ZERO_EXTEND:
331610d565efSmrg 	other = XEXP (other, 0);
331710d565efSmrg 	break;
331810d565efSmrg       default:
331910d565efSmrg 	goto done;
332010d565efSmrg       }
332110d565efSmrg  done:
332210d565efSmrg   if ((MEM_P (other)
332310d565efSmrg        && ! CONSTANT_P (x)
332410d565efSmrg        && !REG_P (x)
332510d565efSmrg        && GET_CODE (x) != SUBREG)
332610d565efSmrg       || (REG_P (other)
332710d565efSmrg 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
332810d565efSmrg 	      || reg_mentioned_p (other, x))))
332910d565efSmrg     {
333010d565efSmrg       rtx temp = gen_reg_rtx (GET_MODE (x));
333110d565efSmrg       emit_move_insn (temp, x);
333210d565efSmrg       return temp;
333310d565efSmrg     }
333410d565efSmrg   return x;
333510d565efSmrg }
333610d565efSmrg 
333710d565efSmrg /* Emission of insns (adding them to the doubly-linked list).  */
333810d565efSmrg 
333910d565efSmrg /* Return the last insn emitted, even if it is in a sequence now pushed.  */
334010d565efSmrg 
334110d565efSmrg rtx_insn *
334210d565efSmrg get_last_insn_anywhere (void)
334310d565efSmrg {
334410d565efSmrg   struct sequence_stack *seq;
334510d565efSmrg   for (seq = get_current_sequence (); seq; seq = seq->next)
334610d565efSmrg     if (seq->last != 0)
334710d565efSmrg       return seq->last;
334810d565efSmrg   return 0;
334910d565efSmrg }
335010d565efSmrg 
335110d565efSmrg /* Return the first nonnote insn emitted in current sequence or current
335210d565efSmrg    function.  This routine looks inside SEQUENCEs.  */
335310d565efSmrg 
335410d565efSmrg rtx_insn *
335510d565efSmrg get_first_nonnote_insn (void)
335610d565efSmrg {
335710d565efSmrg   rtx_insn *insn = get_insns ();
335810d565efSmrg 
335910d565efSmrg   if (insn)
336010d565efSmrg     {
336110d565efSmrg       if (NOTE_P (insn))
336210d565efSmrg 	for (insn = next_insn (insn);
336310d565efSmrg 	     insn && NOTE_P (insn);
336410d565efSmrg 	     insn = next_insn (insn))
336510d565efSmrg 	  continue;
336610d565efSmrg       else
336710d565efSmrg 	{
336810d565efSmrg 	  if (NONJUMP_INSN_P (insn)
336910d565efSmrg 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
337010d565efSmrg 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
337110d565efSmrg 	}
337210d565efSmrg     }
337310d565efSmrg 
337410d565efSmrg   return insn;
337510d565efSmrg }
337610d565efSmrg 
337710d565efSmrg /* Return the last nonnote insn emitted in current sequence or current
337810d565efSmrg    function.  This routine looks inside SEQUENCEs.  */
337910d565efSmrg 
338010d565efSmrg rtx_insn *
338110d565efSmrg get_last_nonnote_insn (void)
338210d565efSmrg {
338310d565efSmrg   rtx_insn *insn = get_last_insn ();
338410d565efSmrg 
338510d565efSmrg   if (insn)
338610d565efSmrg     {
338710d565efSmrg       if (NOTE_P (insn))
338810d565efSmrg 	for (insn = previous_insn (insn);
338910d565efSmrg 	     insn && NOTE_P (insn);
339010d565efSmrg 	     insn = previous_insn (insn))
339110d565efSmrg 	  continue;
339210d565efSmrg       else
339310d565efSmrg 	{
339410d565efSmrg 	  if (NONJUMP_INSN_P (insn))
339510d565efSmrg 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
339610d565efSmrg 	      insn = seq->insn (seq->len () - 1);
339710d565efSmrg 	}
339810d565efSmrg     }
339910d565efSmrg 
340010d565efSmrg   return insn;
340110d565efSmrg }
340210d565efSmrg 
340310d565efSmrg /* Return the number of actual (non-debug) insns emitted in this
340410d565efSmrg    function.  */
340510d565efSmrg 
340610d565efSmrg int
340710d565efSmrg get_max_insn_count (void)
340810d565efSmrg {
340910d565efSmrg   int n = cur_insn_uid;
341010d565efSmrg 
341110d565efSmrg   /* The table size must be stable across -g, to avoid codegen
341210d565efSmrg      differences due to debug insns, and not be affected by
341310d565efSmrg      -fmin-insn-uid, to avoid excessive table size and to simplify
341410d565efSmrg      debugging of -fcompare-debug failures.  */
341510d565efSmrg   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
341610d565efSmrg     n -= cur_debug_insn_uid;
341710d565efSmrg   else
341810d565efSmrg     n -= MIN_NONDEBUG_INSN_UID;
341910d565efSmrg 
342010d565efSmrg   return n;
342110d565efSmrg }
342210d565efSmrg 
342310d565efSmrg 
342410d565efSmrg /* Return the next insn.  If it is a SEQUENCE, return the first insn
342510d565efSmrg    of the sequence.  */
342610d565efSmrg 
342710d565efSmrg rtx_insn *
342810d565efSmrg next_insn (rtx_insn *insn)
342910d565efSmrg {
343010d565efSmrg   if (insn)
343110d565efSmrg     {
343210d565efSmrg       insn = NEXT_INSN (insn);
343310d565efSmrg       if (insn && NONJUMP_INSN_P (insn)
343410d565efSmrg 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
343510d565efSmrg 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
343610d565efSmrg     }
343710d565efSmrg 
343810d565efSmrg   return insn;
343910d565efSmrg }
344010d565efSmrg 
344110d565efSmrg /* Return the previous insn.  If it is a SEQUENCE, return the last insn
344210d565efSmrg    of the sequence.  */
344310d565efSmrg 
344410d565efSmrg rtx_insn *
344510d565efSmrg previous_insn (rtx_insn *insn)
344610d565efSmrg {
344710d565efSmrg   if (insn)
344810d565efSmrg     {
344910d565efSmrg       insn = PREV_INSN (insn);
345010d565efSmrg       if (insn && NONJUMP_INSN_P (insn))
345110d565efSmrg 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
345210d565efSmrg 	  insn = seq->insn (seq->len () - 1);
345310d565efSmrg     }
345410d565efSmrg 
345510d565efSmrg   return insn;
345610d565efSmrg }
345710d565efSmrg 
345810d565efSmrg /* Return the next insn after INSN that is not a NOTE.  This routine does not
345910d565efSmrg    look inside SEQUENCEs.  */
346010d565efSmrg 
346110d565efSmrg rtx_insn *
346210d565efSmrg next_nonnote_insn (rtx_insn *insn)
346310d565efSmrg {
346410d565efSmrg   while (insn)
346510d565efSmrg     {
346610d565efSmrg       insn = NEXT_INSN (insn);
346710d565efSmrg       if (insn == 0 || !NOTE_P (insn))
346810d565efSmrg 	break;
346910d565efSmrg     }
347010d565efSmrg 
347110d565efSmrg   return insn;
347210d565efSmrg }
347310d565efSmrg 
3474*c7a68eb7Smrg /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3475*c7a68eb7Smrg    routine does not look inside SEQUENCEs.  */
347610d565efSmrg 
347710d565efSmrg rtx_insn *
3478*c7a68eb7Smrg next_nondebug_insn (rtx_insn *insn)
347910d565efSmrg {
348010d565efSmrg   while (insn)
348110d565efSmrg     {
348210d565efSmrg       insn = NEXT_INSN (insn);
3483*c7a68eb7Smrg       if (insn == 0 || !DEBUG_INSN_P (insn))
348410d565efSmrg 	break;
348510d565efSmrg     }
348610d565efSmrg 
348710d565efSmrg   return insn;
348810d565efSmrg }
348910d565efSmrg 
349010d565efSmrg /* Return the previous insn before INSN that is not a NOTE.  This routine does
349110d565efSmrg    not look inside SEQUENCEs.  */
349210d565efSmrg 
349310d565efSmrg rtx_insn *
349410d565efSmrg prev_nonnote_insn (rtx_insn *insn)
349510d565efSmrg {
349610d565efSmrg   while (insn)
349710d565efSmrg     {
349810d565efSmrg       insn = PREV_INSN (insn);
349910d565efSmrg       if (insn == 0 || !NOTE_P (insn))
350010d565efSmrg 	break;
350110d565efSmrg     }
350210d565efSmrg 
350310d565efSmrg   return insn;
350410d565efSmrg }
350510d565efSmrg 
350610d565efSmrg /* Return the previous insn before INSN that is not a DEBUG_INSN.
350710d565efSmrg    This routine does not look inside SEQUENCEs.  */
350810d565efSmrg 
350910d565efSmrg rtx_insn *
351010d565efSmrg prev_nondebug_insn (rtx_insn *insn)
351110d565efSmrg {
351210d565efSmrg   while (insn)
351310d565efSmrg     {
351410d565efSmrg       insn = PREV_INSN (insn);
351510d565efSmrg       if (insn == 0 || !DEBUG_INSN_P (insn))
351610d565efSmrg 	break;
351710d565efSmrg     }
351810d565efSmrg 
351910d565efSmrg   return insn;
352010d565efSmrg }
352110d565efSmrg 
352210d565efSmrg /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
352310d565efSmrg    This routine does not look inside SEQUENCEs.  */
352410d565efSmrg 
352510d565efSmrg rtx_insn *
352610d565efSmrg next_nonnote_nondebug_insn (rtx_insn *insn)
352710d565efSmrg {
352810d565efSmrg   while (insn)
352910d565efSmrg     {
353010d565efSmrg       insn = NEXT_INSN (insn);
353110d565efSmrg       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
353210d565efSmrg 	break;
353310d565efSmrg     }
353410d565efSmrg 
353510d565efSmrg   return insn;
353610d565efSmrg }
353710d565efSmrg 
3538*c7a68eb7Smrg /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3539*c7a68eb7Smrg    but stop the search before we enter another basic block.  This
3540*c7a68eb7Smrg    routine does not look inside SEQUENCEs.  */
3541*c7a68eb7Smrg 
3542*c7a68eb7Smrg rtx_insn *
3543*c7a68eb7Smrg next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3544*c7a68eb7Smrg {
3545*c7a68eb7Smrg   while (insn)
3546*c7a68eb7Smrg     {
3547*c7a68eb7Smrg       insn = NEXT_INSN (insn);
3548*c7a68eb7Smrg       if (insn == 0)
3549*c7a68eb7Smrg 	break;
3550*c7a68eb7Smrg       if (DEBUG_INSN_P (insn))
3551*c7a68eb7Smrg 	continue;
3552*c7a68eb7Smrg       if (!NOTE_P (insn))
3553*c7a68eb7Smrg 	break;
3554*c7a68eb7Smrg       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3555*c7a68eb7Smrg 	return NULL;
3556*c7a68eb7Smrg     }
3557*c7a68eb7Smrg 
3558*c7a68eb7Smrg   return insn;
3559*c7a68eb7Smrg }
3560*c7a68eb7Smrg 
356110d565efSmrg /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
356210d565efSmrg    This routine does not look inside SEQUENCEs.  */
356310d565efSmrg 
356410d565efSmrg rtx_insn *
356510d565efSmrg prev_nonnote_nondebug_insn (rtx_insn *insn)
356610d565efSmrg {
356710d565efSmrg   while (insn)
356810d565efSmrg     {
356910d565efSmrg       insn = PREV_INSN (insn);
357010d565efSmrg       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
357110d565efSmrg 	break;
357210d565efSmrg     }
357310d565efSmrg 
357410d565efSmrg   return insn;
357510d565efSmrg }
357610d565efSmrg 
3577*c7a68eb7Smrg /* Return the previous insn before INSN that is not a NOTE nor
3578*c7a68eb7Smrg    DEBUG_INSN, but stop the search before we enter another basic
3579*c7a68eb7Smrg    block.  This routine does not look inside SEQUENCEs.  */
3580*c7a68eb7Smrg 
3581*c7a68eb7Smrg rtx_insn *
3582*c7a68eb7Smrg prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3583*c7a68eb7Smrg {
3584*c7a68eb7Smrg   while (insn)
3585*c7a68eb7Smrg     {
3586*c7a68eb7Smrg       insn = PREV_INSN (insn);
3587*c7a68eb7Smrg       if (insn == 0)
3588*c7a68eb7Smrg 	break;
3589*c7a68eb7Smrg       if (DEBUG_INSN_P (insn))
3590*c7a68eb7Smrg 	continue;
3591*c7a68eb7Smrg       if (!NOTE_P (insn))
3592*c7a68eb7Smrg 	break;
3593*c7a68eb7Smrg       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3594*c7a68eb7Smrg 	return NULL;
3595*c7a68eb7Smrg     }
3596*c7a68eb7Smrg 
3597*c7a68eb7Smrg   return insn;
3598*c7a68eb7Smrg }
3599*c7a68eb7Smrg 
3600*c7a68eb7Smrg /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
360110d565efSmrg    or 0, if there is none.  This routine does not look inside
360210d565efSmrg    SEQUENCEs.  */
360310d565efSmrg 
360410d565efSmrg rtx_insn *
360510d565efSmrg next_real_insn (rtx uncast_insn)
360610d565efSmrg {
360710d565efSmrg   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
360810d565efSmrg 
360910d565efSmrg   while (insn)
361010d565efSmrg     {
361110d565efSmrg       insn = NEXT_INSN (insn);
361210d565efSmrg       if (insn == 0 || INSN_P (insn))
361310d565efSmrg 	break;
361410d565efSmrg     }
361510d565efSmrg 
361610d565efSmrg   return insn;
361710d565efSmrg }
361810d565efSmrg 
3619*c7a68eb7Smrg /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
362010d565efSmrg    or 0, if there is none.  This routine does not look inside
362110d565efSmrg    SEQUENCEs.  */
362210d565efSmrg 
362310d565efSmrg rtx_insn *
362410d565efSmrg prev_real_insn (rtx_insn *insn)
362510d565efSmrg {
362610d565efSmrg   while (insn)
362710d565efSmrg     {
362810d565efSmrg       insn = PREV_INSN (insn);
362910d565efSmrg       if (insn == 0 || INSN_P (insn))
363010d565efSmrg 	break;
363110d565efSmrg     }
363210d565efSmrg 
363310d565efSmrg   return insn;
363410d565efSmrg }
363510d565efSmrg 
3636*c7a68eb7Smrg /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3637*c7a68eb7Smrg    or 0, if there is none.  This routine does not look inside
3638*c7a68eb7Smrg    SEQUENCEs.  */
3639*c7a68eb7Smrg 
3640*c7a68eb7Smrg rtx_insn *
3641*c7a68eb7Smrg next_real_nondebug_insn (rtx uncast_insn)
3642*c7a68eb7Smrg {
3643*c7a68eb7Smrg   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3644*c7a68eb7Smrg 
3645*c7a68eb7Smrg   while (insn)
3646*c7a68eb7Smrg     {
3647*c7a68eb7Smrg       insn = NEXT_INSN (insn);
3648*c7a68eb7Smrg       if (insn == 0 || NONDEBUG_INSN_P (insn))
3649*c7a68eb7Smrg 	break;
3650*c7a68eb7Smrg     }
3651*c7a68eb7Smrg 
3652*c7a68eb7Smrg   return insn;
3653*c7a68eb7Smrg }
3654*c7a68eb7Smrg 
3655*c7a68eb7Smrg /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3656*c7a68eb7Smrg    or 0, if there is none.  This routine does not look inside
3657*c7a68eb7Smrg    SEQUENCEs.  */
3658*c7a68eb7Smrg 
3659*c7a68eb7Smrg rtx_insn *
3660*c7a68eb7Smrg prev_real_nondebug_insn (rtx_insn *insn)
3661*c7a68eb7Smrg {
3662*c7a68eb7Smrg   while (insn)
3663*c7a68eb7Smrg     {
3664*c7a68eb7Smrg       insn = PREV_INSN (insn);
3665*c7a68eb7Smrg       if (insn == 0 || NONDEBUG_INSN_P (insn))
3666*c7a68eb7Smrg 	break;
3667*c7a68eb7Smrg     }
3668*c7a68eb7Smrg 
3669*c7a68eb7Smrg   return insn;
3670*c7a68eb7Smrg }
3671*c7a68eb7Smrg 
367210d565efSmrg /* Return the last CALL_INSN in the current list, or 0 if there is none.
367310d565efSmrg    This routine does not look inside SEQUENCEs.  */
367410d565efSmrg 
367510d565efSmrg rtx_call_insn *
367610d565efSmrg last_call_insn (void)
367710d565efSmrg {
367810d565efSmrg   rtx_insn *insn;
367910d565efSmrg 
368010d565efSmrg   for (insn = get_last_insn ();
368110d565efSmrg        insn && !CALL_P (insn);
368210d565efSmrg        insn = PREV_INSN (insn))
368310d565efSmrg     ;
368410d565efSmrg 
368510d565efSmrg   return safe_as_a <rtx_call_insn *> (insn);
368610d565efSmrg }
368710d565efSmrg 
368810d565efSmrg /* Find the next insn after INSN that really does something.  This routine
368910d565efSmrg    does not look inside SEQUENCEs.  After reload this also skips over
369010d565efSmrg    standalone USE and CLOBBER insn.  */
369110d565efSmrg 
369210d565efSmrg int
369310d565efSmrg active_insn_p (const rtx_insn *insn)
369410d565efSmrg {
369510d565efSmrg   return (CALL_P (insn) || JUMP_P (insn)
369610d565efSmrg 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
369710d565efSmrg 	  || (NONJUMP_INSN_P (insn)
369810d565efSmrg 	      && (! reload_completed
369910d565efSmrg 		  || (GET_CODE (PATTERN (insn)) != USE
370010d565efSmrg 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
370110d565efSmrg }
370210d565efSmrg 
370310d565efSmrg rtx_insn *
370410d565efSmrg next_active_insn (rtx_insn *insn)
370510d565efSmrg {
370610d565efSmrg   while (insn)
370710d565efSmrg     {
370810d565efSmrg       insn = NEXT_INSN (insn);
370910d565efSmrg       if (insn == 0 || active_insn_p (insn))
371010d565efSmrg 	break;
371110d565efSmrg     }
371210d565efSmrg 
371310d565efSmrg   return insn;
371410d565efSmrg }
371510d565efSmrg 
371610d565efSmrg /* Find the last insn before INSN that really does something.  This routine
371710d565efSmrg    does not look inside SEQUENCEs.  After reload this also skips over
371810d565efSmrg    standalone USE and CLOBBER insn.  */
371910d565efSmrg 
372010d565efSmrg rtx_insn *
372110d565efSmrg prev_active_insn (rtx_insn *insn)
372210d565efSmrg {
372310d565efSmrg   while (insn)
372410d565efSmrg     {
372510d565efSmrg       insn = PREV_INSN (insn);
372610d565efSmrg       if (insn == 0 || active_insn_p (insn))
372710d565efSmrg 	break;
372810d565efSmrg     }
372910d565efSmrg 
373010d565efSmrg   return insn;
373110d565efSmrg }
373210d565efSmrg 
373310d565efSmrg /* Return the next insn that uses CC0 after INSN, which is assumed to
373410d565efSmrg    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
373510d565efSmrg    applied to the result of this function should yield INSN).
373610d565efSmrg 
373710d565efSmrg    Normally, this is simply the next insn.  However, if a REG_CC_USER note
373810d565efSmrg    is present, it contains the insn that uses CC0.
373910d565efSmrg 
374010d565efSmrg    Return 0 if we can't find the insn.  */
374110d565efSmrg 
374210d565efSmrg rtx_insn *
374310d565efSmrg next_cc0_user (rtx_insn *insn)
374410d565efSmrg {
374510d565efSmrg   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
374610d565efSmrg 
374710d565efSmrg   if (note)
374810d565efSmrg     return safe_as_a <rtx_insn *> (XEXP (note, 0));
374910d565efSmrg 
375010d565efSmrg   insn = next_nonnote_insn (insn);
375110d565efSmrg   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
375210d565efSmrg     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
375310d565efSmrg 
375410d565efSmrg   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
375510d565efSmrg     return insn;
375610d565efSmrg 
375710d565efSmrg   return 0;
375810d565efSmrg }
375910d565efSmrg 
376010d565efSmrg /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
376110d565efSmrg    note, it is the previous insn.  */
376210d565efSmrg 
376310d565efSmrg rtx_insn *
376410d565efSmrg prev_cc0_setter (rtx_insn *insn)
376510d565efSmrg {
376610d565efSmrg   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
376710d565efSmrg 
376810d565efSmrg   if (note)
376910d565efSmrg     return safe_as_a <rtx_insn *> (XEXP (note, 0));
377010d565efSmrg 
377110d565efSmrg   insn = prev_nonnote_insn (insn);
377210d565efSmrg   gcc_assert (sets_cc0_p (PATTERN (insn)));
377310d565efSmrg 
377410d565efSmrg   return insn;
377510d565efSmrg }
377610d565efSmrg 
377710d565efSmrg /* Find a RTX_AUTOINC class rtx which matches DATA.  */
377810d565efSmrg 
377910d565efSmrg static int
378010d565efSmrg find_auto_inc (const_rtx x, const_rtx reg)
378110d565efSmrg {
378210d565efSmrg   subrtx_iterator::array_type array;
378310d565efSmrg   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
378410d565efSmrg     {
378510d565efSmrg       const_rtx x = *iter;
378610d565efSmrg       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
378710d565efSmrg 	  && rtx_equal_p (reg, XEXP (x, 0)))
378810d565efSmrg 	return true;
378910d565efSmrg     }
379010d565efSmrg   return false;
379110d565efSmrg }
379210d565efSmrg 
379310d565efSmrg /* Increment the label uses for all labels present in rtx.  */
379410d565efSmrg 
379510d565efSmrg static void
379610d565efSmrg mark_label_nuses (rtx x)
379710d565efSmrg {
379810d565efSmrg   enum rtx_code code;
379910d565efSmrg   int i, j;
380010d565efSmrg   const char *fmt;
380110d565efSmrg 
380210d565efSmrg   code = GET_CODE (x);
380310d565efSmrg   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
380410d565efSmrg     LABEL_NUSES (label_ref_label (x))++;
380510d565efSmrg 
380610d565efSmrg   fmt = GET_RTX_FORMAT (code);
380710d565efSmrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
380810d565efSmrg     {
380910d565efSmrg       if (fmt[i] == 'e')
381010d565efSmrg 	mark_label_nuses (XEXP (x, i));
381110d565efSmrg       else if (fmt[i] == 'E')
381210d565efSmrg 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
381310d565efSmrg 	  mark_label_nuses (XVECEXP (x, i, j));
381410d565efSmrg     }
381510d565efSmrg }
381610d565efSmrg 
381710d565efSmrg 
381810d565efSmrg /* Try splitting insns that can be split for better scheduling.
381910d565efSmrg    PAT is the pattern which might split.
382010d565efSmrg    TRIAL is the insn providing PAT.
382110d565efSmrg    LAST is nonzero if we should return the last insn of the sequence produced.
382210d565efSmrg 
382310d565efSmrg    If this routine succeeds in splitting, it returns the first or last
382410d565efSmrg    replacement insn depending on the value of LAST.  Otherwise, it
382510d565efSmrg    returns TRIAL.  If the insn to be returned can be split, it will be.  */
382610d565efSmrg 
382710d565efSmrg rtx_insn *
382810d565efSmrg try_split (rtx pat, rtx_insn *trial, int last)
382910d565efSmrg {
383010d565efSmrg   rtx_insn *before, *after;
383110d565efSmrg   rtx note;
383210d565efSmrg   rtx_insn *seq, *tem;
3833*c7a68eb7Smrg   profile_probability probability;
383410d565efSmrg   rtx_insn *insn_last, *insn;
383510d565efSmrg   int njumps = 0;
383610d565efSmrg   rtx_insn *call_insn = NULL;
383710d565efSmrg 
383810d565efSmrg   /* We're not good at redistributing frame information.  */
383910d565efSmrg   if (RTX_FRAME_RELATED_P (trial))
384010d565efSmrg     return trial;
384110d565efSmrg 
384210d565efSmrg   if (any_condjump_p (trial)
384310d565efSmrg       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3844*c7a68eb7Smrg     split_branch_probability
3845*c7a68eb7Smrg       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3846*c7a68eb7Smrg   else
3847*c7a68eb7Smrg     split_branch_probability = profile_probability::uninitialized ();
3848*c7a68eb7Smrg 
384910d565efSmrg   probability = split_branch_probability;
385010d565efSmrg 
385110d565efSmrg   seq = split_insns (pat, trial);
385210d565efSmrg 
3853*c7a68eb7Smrg   split_branch_probability = profile_probability::uninitialized ();
385410d565efSmrg 
385510d565efSmrg   if (!seq)
385610d565efSmrg     return trial;
385710d565efSmrg 
385810d565efSmrg   /* Avoid infinite loop if any insn of the result matches
385910d565efSmrg      the original pattern.  */
386010d565efSmrg   insn_last = seq;
386110d565efSmrg   while (1)
386210d565efSmrg     {
386310d565efSmrg       if (INSN_P (insn_last)
386410d565efSmrg 	  && rtx_equal_p (PATTERN (insn_last), pat))
386510d565efSmrg 	return trial;
386610d565efSmrg       if (!NEXT_INSN (insn_last))
386710d565efSmrg 	break;
386810d565efSmrg       insn_last = NEXT_INSN (insn_last);
386910d565efSmrg     }
387010d565efSmrg 
387110d565efSmrg   /* We will be adding the new sequence to the function.  The splitters
387210d565efSmrg      may have introduced invalid RTL sharing, so unshare the sequence now.  */
387310d565efSmrg   unshare_all_rtl_in_chain (seq);
387410d565efSmrg 
387510d565efSmrg   /* Mark labels and copy flags.  */
387610d565efSmrg   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
387710d565efSmrg     {
387810d565efSmrg       if (JUMP_P (insn))
387910d565efSmrg 	{
388010d565efSmrg 	  if (JUMP_P (trial))
388110d565efSmrg 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
388210d565efSmrg 	  mark_jump_label (PATTERN (insn), insn, 0);
388310d565efSmrg 	  njumps++;
3884*c7a68eb7Smrg 	  if (probability.initialized_p ()
388510d565efSmrg 	      && any_condjump_p (insn)
388610d565efSmrg 	      && !find_reg_note (insn, REG_BR_PROB, 0))
388710d565efSmrg 	    {
388810d565efSmrg 	      /* We can preserve the REG_BR_PROB notes only if exactly
388910d565efSmrg 		 one jump is created, otherwise the machine description
389010d565efSmrg 		 is responsible for this step using
389110d565efSmrg 		 split_branch_probability variable.  */
389210d565efSmrg 	      gcc_assert (njumps == 1);
3893*c7a68eb7Smrg 	      add_reg_br_prob_note (insn, probability);
389410d565efSmrg 	    }
389510d565efSmrg 	}
389610d565efSmrg     }
389710d565efSmrg 
389810d565efSmrg   /* If we are splitting a CALL_INSN, look for the CALL_INSN
389910d565efSmrg      in SEQ and copy any additional information across.  */
390010d565efSmrg   if (CALL_P (trial))
390110d565efSmrg     {
390210d565efSmrg       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
390310d565efSmrg 	if (CALL_P (insn))
390410d565efSmrg 	  {
390510d565efSmrg 	    gcc_assert (call_insn == NULL_RTX);
390610d565efSmrg 	    call_insn = insn;
390710d565efSmrg 
390810d565efSmrg 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
390910d565efSmrg 	       target may have explicitly specified.  */
3910*c7a68eb7Smrg 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
391110d565efSmrg 	    while (*p)
391210d565efSmrg 	      p = &XEXP (*p, 1);
391310d565efSmrg 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
391410d565efSmrg 
391510d565efSmrg 	    /* If the old call was a sibling call, the new one must
391610d565efSmrg 	       be too.  */
391710d565efSmrg 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
391810d565efSmrg 	  }
391910d565efSmrg     }
392010d565efSmrg 
392110d565efSmrg   /* Copy notes, particularly those related to the CFG.  */
392210d565efSmrg   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
392310d565efSmrg     {
392410d565efSmrg       switch (REG_NOTE_KIND (note))
392510d565efSmrg 	{
392610d565efSmrg 	case REG_EH_REGION:
392710d565efSmrg 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
392810d565efSmrg 	  break;
392910d565efSmrg 
393010d565efSmrg 	case REG_NORETURN:
393110d565efSmrg 	case REG_SETJMP:
393210d565efSmrg 	case REG_TM:
3933*c7a68eb7Smrg 	case REG_CALL_NOCF_CHECK:
3934*c7a68eb7Smrg 	case REG_CALL_ARG_LOCATION:
393510d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
393610d565efSmrg 	    {
393710d565efSmrg 	      if (CALL_P (insn))
393810d565efSmrg 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
393910d565efSmrg 	    }
394010d565efSmrg 	  break;
394110d565efSmrg 
394210d565efSmrg 	case REG_NON_LOCAL_GOTO:
3943*c7a68eb7Smrg 	case REG_LABEL_TARGET:
394410d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
394510d565efSmrg 	    {
394610d565efSmrg 	      if (JUMP_P (insn))
394710d565efSmrg 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
394810d565efSmrg 	    }
394910d565efSmrg 	  break;
395010d565efSmrg 
395110d565efSmrg 	case REG_INC:
395210d565efSmrg 	  if (!AUTO_INC_DEC)
395310d565efSmrg 	    break;
395410d565efSmrg 
395510d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
395610d565efSmrg 	    {
395710d565efSmrg 	      rtx reg = XEXP (note, 0);
395810d565efSmrg 	      if (!FIND_REG_INC_NOTE (insn, reg)
395910d565efSmrg 		  && find_auto_inc (PATTERN (insn), reg))
396010d565efSmrg 		add_reg_note (insn, REG_INC, reg);
396110d565efSmrg 	    }
396210d565efSmrg 	  break;
396310d565efSmrg 
396410d565efSmrg 	case REG_ARGS_SIZE:
3965*c7a68eb7Smrg 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
396610d565efSmrg 	  break;
396710d565efSmrg 
396810d565efSmrg 	case REG_CALL_DECL:
396910d565efSmrg 	  gcc_assert (call_insn != NULL_RTX);
397010d565efSmrg 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
397110d565efSmrg 	  break;
397210d565efSmrg 
397310d565efSmrg 	default:
397410d565efSmrg 	  break;
397510d565efSmrg 	}
397610d565efSmrg     }
397710d565efSmrg 
397810d565efSmrg   /* If there are LABELS inside the split insns increment the
397910d565efSmrg      usage count so we don't delete the label.  */
398010d565efSmrg   if (INSN_P (trial))
398110d565efSmrg     {
398210d565efSmrg       insn = insn_last;
398310d565efSmrg       while (insn != NULL_RTX)
398410d565efSmrg 	{
398510d565efSmrg 	  /* JUMP_P insns have already been "marked" above.  */
398610d565efSmrg 	  if (NONJUMP_INSN_P (insn))
398710d565efSmrg 	    mark_label_nuses (PATTERN (insn));
398810d565efSmrg 
398910d565efSmrg 	  insn = PREV_INSN (insn);
399010d565efSmrg 	}
399110d565efSmrg     }
399210d565efSmrg 
399310d565efSmrg   before = PREV_INSN (trial);
399410d565efSmrg   after = NEXT_INSN (trial);
399510d565efSmrg 
399610d565efSmrg   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
399710d565efSmrg 
399810d565efSmrg   delete_insn (trial);
399910d565efSmrg 
400010d565efSmrg   /* Recursively call try_split for each new insn created; by the
400110d565efSmrg      time control returns here that insn will be fully split, so
400210d565efSmrg      set LAST and continue from the insn after the one returned.
400310d565efSmrg      We can't use next_active_insn here since AFTER may be a note.
400410d565efSmrg      Ignore deleted insns, which can be occur if not optimizing.  */
400510d565efSmrg   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
400610d565efSmrg     if (! tem->deleted () && INSN_P (tem))
400710d565efSmrg       tem = try_split (PATTERN (tem), tem, 1);
400810d565efSmrg 
400910d565efSmrg   /* Return either the first or the last insn, depending on which was
401010d565efSmrg      requested.  */
401110d565efSmrg   return last
401210d565efSmrg     ? (after ? PREV_INSN (after) : get_last_insn ())
401310d565efSmrg     : NEXT_INSN (before);
401410d565efSmrg }
401510d565efSmrg 
401610d565efSmrg /* Make and return an INSN rtx, initializing all its slots.
401710d565efSmrg    Store PATTERN in the pattern slots.  */
401810d565efSmrg 
401910d565efSmrg rtx_insn *
402010d565efSmrg make_insn_raw (rtx pattern)
402110d565efSmrg {
402210d565efSmrg   rtx_insn *insn;
402310d565efSmrg 
402410d565efSmrg   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
402510d565efSmrg 
402610d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
402710d565efSmrg   PATTERN (insn) = pattern;
402810d565efSmrg   INSN_CODE (insn) = -1;
402910d565efSmrg   REG_NOTES (insn) = NULL;
403010d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
403110d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
403210d565efSmrg 
403310d565efSmrg #ifdef ENABLE_RTL_CHECKING
403410d565efSmrg   if (insn
403510d565efSmrg       && INSN_P (insn)
403610d565efSmrg       && (returnjump_p (insn)
403710d565efSmrg 	  || (GET_CODE (insn) == SET
403810d565efSmrg 	      && SET_DEST (insn) == pc_rtx)))
403910d565efSmrg     {
404010d565efSmrg       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
404110d565efSmrg       debug_rtx (insn);
404210d565efSmrg     }
404310d565efSmrg #endif
404410d565efSmrg 
404510d565efSmrg   return insn;
404610d565efSmrg }
404710d565efSmrg 
404810d565efSmrg /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
404910d565efSmrg 
405010d565efSmrg static rtx_insn *
405110d565efSmrg make_debug_insn_raw (rtx pattern)
405210d565efSmrg {
405310d565efSmrg   rtx_debug_insn *insn;
405410d565efSmrg 
405510d565efSmrg   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
405610d565efSmrg   INSN_UID (insn) = cur_debug_insn_uid++;
405710d565efSmrg   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
405810d565efSmrg     INSN_UID (insn) = cur_insn_uid++;
405910d565efSmrg 
406010d565efSmrg   PATTERN (insn) = pattern;
406110d565efSmrg   INSN_CODE (insn) = -1;
406210d565efSmrg   REG_NOTES (insn) = NULL;
406310d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
406410d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
406510d565efSmrg 
406610d565efSmrg   return insn;
406710d565efSmrg }
406810d565efSmrg 
406910d565efSmrg /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
407010d565efSmrg 
407110d565efSmrg static rtx_insn *
407210d565efSmrg make_jump_insn_raw (rtx pattern)
407310d565efSmrg {
407410d565efSmrg   rtx_jump_insn *insn;
407510d565efSmrg 
407610d565efSmrg   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
407710d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
407810d565efSmrg 
407910d565efSmrg   PATTERN (insn) = pattern;
408010d565efSmrg   INSN_CODE (insn) = -1;
408110d565efSmrg   REG_NOTES (insn) = NULL;
408210d565efSmrg   JUMP_LABEL (insn) = NULL;
408310d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
408410d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
408510d565efSmrg 
408610d565efSmrg   return insn;
408710d565efSmrg }
408810d565efSmrg 
408910d565efSmrg /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
409010d565efSmrg 
409110d565efSmrg static rtx_insn *
409210d565efSmrg make_call_insn_raw (rtx pattern)
409310d565efSmrg {
409410d565efSmrg   rtx_call_insn *insn;
409510d565efSmrg 
409610d565efSmrg   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
409710d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
409810d565efSmrg 
409910d565efSmrg   PATTERN (insn) = pattern;
410010d565efSmrg   INSN_CODE (insn) = -1;
410110d565efSmrg   REG_NOTES (insn) = NULL;
410210d565efSmrg   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
410310d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
410410d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
410510d565efSmrg 
410610d565efSmrg   return insn;
410710d565efSmrg }
410810d565efSmrg 
410910d565efSmrg /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
411010d565efSmrg 
411110d565efSmrg static rtx_note *
411210d565efSmrg make_note_raw (enum insn_note subtype)
411310d565efSmrg {
411410d565efSmrg   /* Some notes are never created this way at all.  These notes are
411510d565efSmrg      only created by patching out insns.  */
411610d565efSmrg   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
411710d565efSmrg 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
411810d565efSmrg 
411910d565efSmrg   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
412010d565efSmrg   INSN_UID (note) = cur_insn_uid++;
412110d565efSmrg   NOTE_KIND (note) = subtype;
412210d565efSmrg   BLOCK_FOR_INSN (note) = NULL;
412310d565efSmrg   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
412410d565efSmrg   return note;
412510d565efSmrg }
412610d565efSmrg 
412710d565efSmrg /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
412810d565efSmrg    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
412910d565efSmrg    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
413010d565efSmrg 
413110d565efSmrg static inline void
413210d565efSmrg link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
413310d565efSmrg {
413410d565efSmrg   SET_PREV_INSN (insn) = prev;
413510d565efSmrg   SET_NEXT_INSN (insn) = next;
413610d565efSmrg   if (prev != NULL)
413710d565efSmrg     {
413810d565efSmrg       SET_NEXT_INSN (prev) = insn;
413910d565efSmrg       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
414010d565efSmrg 	{
414110d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
414210d565efSmrg 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
414310d565efSmrg 	}
414410d565efSmrg     }
414510d565efSmrg   if (next != NULL)
414610d565efSmrg     {
414710d565efSmrg       SET_PREV_INSN (next) = insn;
414810d565efSmrg       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
414910d565efSmrg 	{
415010d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
415110d565efSmrg 	  SET_PREV_INSN (sequence->insn (0)) = insn;
415210d565efSmrg 	}
415310d565efSmrg     }
415410d565efSmrg 
415510d565efSmrg   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
415610d565efSmrg     {
415710d565efSmrg       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
415810d565efSmrg       SET_PREV_INSN (sequence->insn (0)) = prev;
415910d565efSmrg       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
416010d565efSmrg     }
416110d565efSmrg }
416210d565efSmrg 
416310d565efSmrg /* Add INSN to the end of the doubly-linked list.
416410d565efSmrg    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
416510d565efSmrg 
416610d565efSmrg void
416710d565efSmrg add_insn (rtx_insn *insn)
416810d565efSmrg {
416910d565efSmrg   rtx_insn *prev = get_last_insn ();
417010d565efSmrg   link_insn_into_chain (insn, prev, NULL);
4171*c7a68eb7Smrg   if (get_insns () == NULL)
417210d565efSmrg     set_first_insn (insn);
417310d565efSmrg   set_last_insn (insn);
417410d565efSmrg }
417510d565efSmrg 
417610d565efSmrg /* Add INSN into the doubly-linked list after insn AFTER.  */
417710d565efSmrg 
417810d565efSmrg static void
417910d565efSmrg add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
418010d565efSmrg {
418110d565efSmrg   rtx_insn *next = NEXT_INSN (after);
418210d565efSmrg 
418310d565efSmrg   gcc_assert (!optimize || !after->deleted ());
418410d565efSmrg 
418510d565efSmrg   link_insn_into_chain (insn, after, next);
418610d565efSmrg 
418710d565efSmrg   if (next == NULL)
418810d565efSmrg     {
418910d565efSmrg       struct sequence_stack *seq;
419010d565efSmrg 
419110d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
419210d565efSmrg 	if (after == seq->last)
419310d565efSmrg 	  {
419410d565efSmrg 	    seq->last = insn;
419510d565efSmrg 	    break;
419610d565efSmrg 	  }
419710d565efSmrg     }
419810d565efSmrg }
419910d565efSmrg 
420010d565efSmrg /* Add INSN into the doubly-linked list before insn BEFORE.  */
420110d565efSmrg 
420210d565efSmrg static void
420310d565efSmrg add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
420410d565efSmrg {
420510d565efSmrg   rtx_insn *prev = PREV_INSN (before);
420610d565efSmrg 
420710d565efSmrg   gcc_assert (!optimize || !before->deleted ());
420810d565efSmrg 
420910d565efSmrg   link_insn_into_chain (insn, prev, before);
421010d565efSmrg 
421110d565efSmrg   if (prev == NULL)
421210d565efSmrg     {
421310d565efSmrg       struct sequence_stack *seq;
421410d565efSmrg 
421510d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
421610d565efSmrg 	if (before == seq->first)
421710d565efSmrg 	  {
421810d565efSmrg 	    seq->first = insn;
421910d565efSmrg 	    break;
422010d565efSmrg 	  }
422110d565efSmrg 
422210d565efSmrg       gcc_assert (seq);
422310d565efSmrg     }
422410d565efSmrg }
422510d565efSmrg 
422610d565efSmrg /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
422710d565efSmrg    If BB is NULL, an attempt is made to infer the bb from before.
422810d565efSmrg 
422910d565efSmrg    This and the next function should be the only functions called
423010d565efSmrg    to insert an insn once delay slots have been filled since only
423110d565efSmrg    they know how to update a SEQUENCE. */
423210d565efSmrg 
423310d565efSmrg void
423410d565efSmrg add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb)
423510d565efSmrg {
423610d565efSmrg   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
423710d565efSmrg   rtx_insn *after = as_a <rtx_insn *> (uncast_after);
423810d565efSmrg   add_insn_after_nobb (insn, after);
423910d565efSmrg   if (!BARRIER_P (after)
424010d565efSmrg       && !BARRIER_P (insn)
424110d565efSmrg       && (bb = BLOCK_FOR_INSN (after)))
424210d565efSmrg     {
424310d565efSmrg       set_block_for_insn (insn, bb);
424410d565efSmrg       if (INSN_P (insn))
424510d565efSmrg 	df_insn_rescan (insn);
424610d565efSmrg       /* Should not happen as first in the BB is always
424710d565efSmrg 	 either NOTE or LABEL.  */
424810d565efSmrg       if (BB_END (bb) == after
424910d565efSmrg 	  /* Avoid clobbering of structure when creating new BB.  */
425010d565efSmrg 	  && !BARRIER_P (insn)
425110d565efSmrg 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
425210d565efSmrg 	BB_END (bb) = insn;
425310d565efSmrg     }
425410d565efSmrg }
425510d565efSmrg 
425610d565efSmrg /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
425710d565efSmrg    If BB is NULL, an attempt is made to infer the bb from before.
425810d565efSmrg 
425910d565efSmrg    This and the previous function should be the only functions called
426010d565efSmrg    to insert an insn once delay slots have been filled since only
426110d565efSmrg    they know how to update a SEQUENCE. */
426210d565efSmrg 
426310d565efSmrg void
426410d565efSmrg add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb)
426510d565efSmrg {
426610d565efSmrg   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
426710d565efSmrg   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
426810d565efSmrg   add_insn_before_nobb (insn, before);
426910d565efSmrg 
427010d565efSmrg   if (!bb
427110d565efSmrg       && !BARRIER_P (before)
427210d565efSmrg       && !BARRIER_P (insn))
427310d565efSmrg     bb = BLOCK_FOR_INSN (before);
427410d565efSmrg 
427510d565efSmrg   if (bb)
427610d565efSmrg     {
427710d565efSmrg       set_block_for_insn (insn, bb);
427810d565efSmrg       if (INSN_P (insn))
427910d565efSmrg 	df_insn_rescan (insn);
428010d565efSmrg       /* Should not happen as first in the BB is always either NOTE or
428110d565efSmrg 	 LABEL.  */
428210d565efSmrg       gcc_assert (BB_HEAD (bb) != insn
428310d565efSmrg 		  /* Avoid clobbering of structure when creating new BB.  */
428410d565efSmrg 		  || BARRIER_P (insn)
428510d565efSmrg 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
428610d565efSmrg     }
428710d565efSmrg }
428810d565efSmrg 
428910d565efSmrg /* Replace insn with an deleted instruction note.  */
429010d565efSmrg 
429110d565efSmrg void
429210d565efSmrg set_insn_deleted (rtx insn)
429310d565efSmrg {
429410d565efSmrg   if (INSN_P (insn))
429510d565efSmrg     df_insn_delete (as_a <rtx_insn *> (insn));
429610d565efSmrg   PUT_CODE (insn, NOTE);
429710d565efSmrg   NOTE_KIND (insn) = NOTE_INSN_DELETED;
429810d565efSmrg }
429910d565efSmrg 
430010d565efSmrg 
430110d565efSmrg /* Unlink INSN from the insn chain.
430210d565efSmrg 
430310d565efSmrg    This function knows how to handle sequences.
430410d565efSmrg 
430510d565efSmrg    This function does not invalidate data flow information associated with
430610d565efSmrg    INSN (i.e. does not call df_insn_delete).  That makes this function
430710d565efSmrg    usable for only disconnecting an insn from the chain, and re-emit it
430810d565efSmrg    elsewhere later.
430910d565efSmrg 
431010d565efSmrg    To later insert INSN elsewhere in the insn chain via add_insn and
431110d565efSmrg    similar functions, PREV_INSN and NEXT_INSN must be nullified by
431210d565efSmrg    the caller.  Nullifying them here breaks many insn chain walks.
431310d565efSmrg 
431410d565efSmrg    To really delete an insn and related DF information, use delete_insn.  */
431510d565efSmrg 
431610d565efSmrg void
431710d565efSmrg remove_insn (rtx uncast_insn)
431810d565efSmrg {
431910d565efSmrg   rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
432010d565efSmrg   rtx_insn *next = NEXT_INSN (insn);
432110d565efSmrg   rtx_insn *prev = PREV_INSN (insn);
432210d565efSmrg   basic_block bb;
432310d565efSmrg 
432410d565efSmrg   if (prev)
432510d565efSmrg     {
432610d565efSmrg       SET_NEXT_INSN (prev) = next;
432710d565efSmrg       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
432810d565efSmrg 	{
432910d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
433010d565efSmrg 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
433110d565efSmrg 	}
433210d565efSmrg     }
433310d565efSmrg   else
433410d565efSmrg     {
433510d565efSmrg       struct sequence_stack *seq;
433610d565efSmrg 
433710d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
433810d565efSmrg 	if (insn == seq->first)
433910d565efSmrg 	  {
434010d565efSmrg 	    seq->first = next;
434110d565efSmrg 	    break;
434210d565efSmrg 	  }
434310d565efSmrg 
434410d565efSmrg       gcc_assert (seq);
434510d565efSmrg     }
434610d565efSmrg 
434710d565efSmrg   if (next)
434810d565efSmrg     {
434910d565efSmrg       SET_PREV_INSN (next) = prev;
435010d565efSmrg       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
435110d565efSmrg 	{
435210d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
435310d565efSmrg 	  SET_PREV_INSN (sequence->insn (0)) = prev;
435410d565efSmrg 	}
435510d565efSmrg     }
435610d565efSmrg   else
435710d565efSmrg     {
435810d565efSmrg       struct sequence_stack *seq;
435910d565efSmrg 
436010d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
436110d565efSmrg 	if (insn == seq->last)
436210d565efSmrg 	  {
436310d565efSmrg 	    seq->last = prev;
436410d565efSmrg 	    break;
436510d565efSmrg 	  }
436610d565efSmrg 
436710d565efSmrg       gcc_assert (seq);
436810d565efSmrg     }
436910d565efSmrg 
437010d565efSmrg   /* Fix up basic block boundaries, if necessary.  */
437110d565efSmrg   if (!BARRIER_P (insn)
437210d565efSmrg       && (bb = BLOCK_FOR_INSN (insn)))
437310d565efSmrg     {
437410d565efSmrg       if (BB_HEAD (bb) == insn)
437510d565efSmrg 	{
437610d565efSmrg 	  /* Never ever delete the basic block note without deleting whole
437710d565efSmrg 	     basic block.  */
437810d565efSmrg 	  gcc_assert (!NOTE_P (insn));
437910d565efSmrg 	  BB_HEAD (bb) = next;
438010d565efSmrg 	}
438110d565efSmrg       if (BB_END (bb) == insn)
438210d565efSmrg 	BB_END (bb) = prev;
438310d565efSmrg     }
438410d565efSmrg }
438510d565efSmrg 
438610d565efSmrg /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
438710d565efSmrg 
438810d565efSmrg void
438910d565efSmrg add_function_usage_to (rtx call_insn, rtx call_fusage)
439010d565efSmrg {
439110d565efSmrg   gcc_assert (call_insn && CALL_P (call_insn));
439210d565efSmrg 
439310d565efSmrg   /* Put the register usage information on the CALL.  If there is already
439410d565efSmrg      some usage information, put ours at the end.  */
439510d565efSmrg   if (CALL_INSN_FUNCTION_USAGE (call_insn))
439610d565efSmrg     {
439710d565efSmrg       rtx link;
439810d565efSmrg 
439910d565efSmrg       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
440010d565efSmrg 	   link = XEXP (link, 1))
440110d565efSmrg 	;
440210d565efSmrg 
440310d565efSmrg       XEXP (link, 1) = call_fusage;
440410d565efSmrg     }
440510d565efSmrg   else
440610d565efSmrg     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
440710d565efSmrg }
440810d565efSmrg 
440910d565efSmrg /* Delete all insns made since FROM.
441010d565efSmrg    FROM becomes the new last instruction.  */
441110d565efSmrg 
441210d565efSmrg void
441310d565efSmrg delete_insns_since (rtx_insn *from)
441410d565efSmrg {
441510d565efSmrg   if (from == 0)
441610d565efSmrg     set_first_insn (0);
441710d565efSmrg   else
441810d565efSmrg     SET_NEXT_INSN (from) = 0;
441910d565efSmrg   set_last_insn (from);
442010d565efSmrg }
442110d565efSmrg 
442210d565efSmrg /* This function is deprecated, please use sequences instead.
442310d565efSmrg 
442410d565efSmrg    Move a consecutive bunch of insns to a different place in the chain.
442510d565efSmrg    The insns to be moved are those between FROM and TO.
442610d565efSmrg    They are moved to a new position after the insn AFTER.
442710d565efSmrg    AFTER must not be FROM or TO or any insn in between.
442810d565efSmrg 
442910d565efSmrg    This function does not know about SEQUENCEs and hence should not be
443010d565efSmrg    called after delay-slot filling has been done.  */
443110d565efSmrg 
443210d565efSmrg void
443310d565efSmrg reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
443410d565efSmrg {
443510d565efSmrg   if (flag_checking)
443610d565efSmrg     {
443710d565efSmrg       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
443810d565efSmrg 	gcc_assert (after != x);
443910d565efSmrg       gcc_assert (after != to);
444010d565efSmrg     }
444110d565efSmrg 
444210d565efSmrg   /* Splice this bunch out of where it is now.  */
444310d565efSmrg   if (PREV_INSN (from))
444410d565efSmrg     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
444510d565efSmrg   if (NEXT_INSN (to))
444610d565efSmrg     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
444710d565efSmrg   if (get_last_insn () == to)
444810d565efSmrg     set_last_insn (PREV_INSN (from));
444910d565efSmrg   if (get_insns () == from)
445010d565efSmrg     set_first_insn (NEXT_INSN (to));
445110d565efSmrg 
445210d565efSmrg   /* Make the new neighbors point to it and it to them.  */
445310d565efSmrg   if (NEXT_INSN (after))
445410d565efSmrg     SET_PREV_INSN (NEXT_INSN (after)) = to;
445510d565efSmrg 
445610d565efSmrg   SET_NEXT_INSN (to) = NEXT_INSN (after);
445710d565efSmrg   SET_PREV_INSN (from) = after;
445810d565efSmrg   SET_NEXT_INSN (after) = from;
445910d565efSmrg   if (after == get_last_insn ())
446010d565efSmrg     set_last_insn (to);
446110d565efSmrg }
446210d565efSmrg 
446310d565efSmrg /* Same as function above, but take care to update BB boundaries.  */
446410d565efSmrg void
446510d565efSmrg reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
446610d565efSmrg {
446710d565efSmrg   rtx_insn *prev = PREV_INSN (from);
446810d565efSmrg   basic_block bb, bb2;
446910d565efSmrg 
447010d565efSmrg   reorder_insns_nobb (from, to, after);
447110d565efSmrg 
447210d565efSmrg   if (!BARRIER_P (after)
447310d565efSmrg       && (bb = BLOCK_FOR_INSN (after)))
447410d565efSmrg     {
447510d565efSmrg       rtx_insn *x;
447610d565efSmrg       df_set_bb_dirty (bb);
447710d565efSmrg 
447810d565efSmrg       if (!BARRIER_P (from)
447910d565efSmrg 	  && (bb2 = BLOCK_FOR_INSN (from)))
448010d565efSmrg 	{
448110d565efSmrg 	  if (BB_END (bb2) == to)
448210d565efSmrg 	    BB_END (bb2) = prev;
448310d565efSmrg 	  df_set_bb_dirty (bb2);
448410d565efSmrg 	}
448510d565efSmrg 
448610d565efSmrg       if (BB_END (bb) == after)
448710d565efSmrg 	BB_END (bb) = to;
448810d565efSmrg 
448910d565efSmrg       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
449010d565efSmrg 	if (!BARRIER_P (x))
449110d565efSmrg 	  df_insn_change_bb (x, bb);
449210d565efSmrg     }
449310d565efSmrg }
449410d565efSmrg 
449510d565efSmrg 
449610d565efSmrg /* Emit insn(s) of given code and pattern
449710d565efSmrg    at a specified place within the doubly-linked list.
449810d565efSmrg 
449910d565efSmrg    All of the emit_foo global entry points accept an object
450010d565efSmrg    X which is either an insn list or a PATTERN of a single
450110d565efSmrg    instruction.
450210d565efSmrg 
450310d565efSmrg    There are thus a few canonical ways to generate code and
450410d565efSmrg    emit it at a specific place in the instruction stream.  For
450510d565efSmrg    example, consider the instruction named SPOT and the fact that
450610d565efSmrg    we would like to emit some instructions before SPOT.  We might
450710d565efSmrg    do it like this:
450810d565efSmrg 
450910d565efSmrg 	start_sequence ();
451010d565efSmrg 	... emit the new instructions ...
451110d565efSmrg 	insns_head = get_insns ();
451210d565efSmrg 	end_sequence ();
451310d565efSmrg 
451410d565efSmrg 	emit_insn_before (insns_head, SPOT);
451510d565efSmrg 
451610d565efSmrg    It used to be common to generate SEQUENCE rtl instead, but that
451710d565efSmrg    is a relic of the past which no longer occurs.  The reason is that
451810d565efSmrg    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
451910d565efSmrg    generated would almost certainly die right after it was created.  */
452010d565efSmrg 
452110d565efSmrg static rtx_insn *
452210d565efSmrg emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
452310d565efSmrg                            rtx_insn *(*make_raw) (rtx))
452410d565efSmrg {
452510d565efSmrg   rtx_insn *insn;
452610d565efSmrg 
452710d565efSmrg   gcc_assert (before);
452810d565efSmrg 
452910d565efSmrg   if (x == NULL_RTX)
453010d565efSmrg     return safe_as_a <rtx_insn *> (last);
453110d565efSmrg 
453210d565efSmrg   switch (GET_CODE (x))
453310d565efSmrg     {
453410d565efSmrg     case DEBUG_INSN:
453510d565efSmrg     case INSN:
453610d565efSmrg     case JUMP_INSN:
453710d565efSmrg     case CALL_INSN:
453810d565efSmrg     case CODE_LABEL:
453910d565efSmrg     case BARRIER:
454010d565efSmrg     case NOTE:
454110d565efSmrg       insn = as_a <rtx_insn *> (x);
454210d565efSmrg       while (insn)
454310d565efSmrg 	{
454410d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
454510d565efSmrg 	  add_insn_before (insn, before, bb);
454610d565efSmrg 	  last = insn;
454710d565efSmrg 	  insn = next;
454810d565efSmrg 	}
454910d565efSmrg       break;
455010d565efSmrg 
455110d565efSmrg #ifdef ENABLE_RTL_CHECKING
455210d565efSmrg     case SEQUENCE:
455310d565efSmrg       gcc_unreachable ();
455410d565efSmrg       break;
455510d565efSmrg #endif
455610d565efSmrg 
455710d565efSmrg     default:
455810d565efSmrg       last = (*make_raw) (x);
455910d565efSmrg       add_insn_before (last, before, bb);
456010d565efSmrg       break;
456110d565efSmrg     }
456210d565efSmrg 
456310d565efSmrg   return safe_as_a <rtx_insn *> (last);
456410d565efSmrg }
456510d565efSmrg 
456610d565efSmrg /* Make X be output before the instruction BEFORE.  */
456710d565efSmrg 
456810d565efSmrg rtx_insn *
456910d565efSmrg emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
457010d565efSmrg {
457110d565efSmrg   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
457210d565efSmrg }
457310d565efSmrg 
457410d565efSmrg /* Make an instruction with body X and code JUMP_INSN
457510d565efSmrg    and output it before the instruction BEFORE.  */
457610d565efSmrg 
457710d565efSmrg rtx_jump_insn *
457810d565efSmrg emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
457910d565efSmrg {
458010d565efSmrg   return as_a <rtx_jump_insn *> (
458110d565efSmrg 		emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
458210d565efSmrg 					   make_jump_insn_raw));
458310d565efSmrg }
458410d565efSmrg 
458510d565efSmrg /* Make an instruction with body X and code CALL_INSN
458610d565efSmrg    and output it before the instruction BEFORE.  */
458710d565efSmrg 
458810d565efSmrg rtx_insn *
458910d565efSmrg emit_call_insn_before_noloc (rtx x, rtx_insn *before)
459010d565efSmrg {
459110d565efSmrg   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
459210d565efSmrg 				    make_call_insn_raw);
459310d565efSmrg }
459410d565efSmrg 
459510d565efSmrg /* Make an instruction with body X and code DEBUG_INSN
459610d565efSmrg    and output it before the instruction BEFORE.  */
459710d565efSmrg 
459810d565efSmrg rtx_insn *
459910d565efSmrg emit_debug_insn_before_noloc (rtx x, rtx before)
460010d565efSmrg {
460110d565efSmrg   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
460210d565efSmrg 				    make_debug_insn_raw);
460310d565efSmrg }
460410d565efSmrg 
460510d565efSmrg /* Make an insn of code BARRIER
460610d565efSmrg    and output it before the insn BEFORE.  */
460710d565efSmrg 
460810d565efSmrg rtx_barrier *
460910d565efSmrg emit_barrier_before (rtx before)
461010d565efSmrg {
461110d565efSmrg   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
461210d565efSmrg 
461310d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
461410d565efSmrg 
461510d565efSmrg   add_insn_before (insn, before, NULL);
461610d565efSmrg   return insn;
461710d565efSmrg }
461810d565efSmrg 
461910d565efSmrg /* Emit the label LABEL before the insn BEFORE.  */
462010d565efSmrg 
462110d565efSmrg rtx_code_label *
462210d565efSmrg emit_label_before (rtx label, rtx_insn *before)
462310d565efSmrg {
462410d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
462510d565efSmrg   INSN_UID (label) = cur_insn_uid++;
462610d565efSmrg   add_insn_before (label, before, NULL);
462710d565efSmrg   return as_a <rtx_code_label *> (label);
462810d565efSmrg }
462910d565efSmrg 
463010d565efSmrg /* Helper for emit_insn_after, handles lists of instructions
463110d565efSmrg    efficiently.  */
463210d565efSmrg 
463310d565efSmrg static rtx_insn *
463410d565efSmrg emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb)
463510d565efSmrg {
463610d565efSmrg   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
463710d565efSmrg   rtx_insn *last;
463810d565efSmrg   rtx_insn *after_after;
463910d565efSmrg   if (!bb && !BARRIER_P (after))
464010d565efSmrg     bb = BLOCK_FOR_INSN (after);
464110d565efSmrg 
464210d565efSmrg   if (bb)
464310d565efSmrg     {
464410d565efSmrg       df_set_bb_dirty (bb);
464510d565efSmrg       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
464610d565efSmrg 	if (!BARRIER_P (last))
464710d565efSmrg 	  {
464810d565efSmrg 	    set_block_for_insn (last, bb);
464910d565efSmrg 	    df_insn_rescan (last);
465010d565efSmrg 	  }
465110d565efSmrg       if (!BARRIER_P (last))
465210d565efSmrg 	{
465310d565efSmrg 	  set_block_for_insn (last, bb);
465410d565efSmrg 	  df_insn_rescan (last);
465510d565efSmrg 	}
465610d565efSmrg       if (BB_END (bb) == after)
465710d565efSmrg 	BB_END (bb) = last;
465810d565efSmrg     }
465910d565efSmrg   else
466010d565efSmrg     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
466110d565efSmrg       continue;
466210d565efSmrg 
466310d565efSmrg   after_after = NEXT_INSN (after);
466410d565efSmrg 
466510d565efSmrg   SET_NEXT_INSN (after) = first;
466610d565efSmrg   SET_PREV_INSN (first) = after;
466710d565efSmrg   SET_NEXT_INSN (last) = after_after;
466810d565efSmrg   if (after_after)
466910d565efSmrg     SET_PREV_INSN (after_after) = last;
467010d565efSmrg 
467110d565efSmrg   if (after == get_last_insn ())
467210d565efSmrg     set_last_insn (last);
467310d565efSmrg 
467410d565efSmrg   return last;
467510d565efSmrg }
467610d565efSmrg 
467710d565efSmrg static rtx_insn *
467810d565efSmrg emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb,
467910d565efSmrg 			  rtx_insn *(*make_raw)(rtx))
468010d565efSmrg {
468110d565efSmrg   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
468210d565efSmrg   rtx_insn *last = after;
468310d565efSmrg 
468410d565efSmrg   gcc_assert (after);
468510d565efSmrg 
468610d565efSmrg   if (x == NULL_RTX)
468710d565efSmrg     return last;
468810d565efSmrg 
468910d565efSmrg   switch (GET_CODE (x))
469010d565efSmrg     {
469110d565efSmrg     case DEBUG_INSN:
469210d565efSmrg     case INSN:
469310d565efSmrg     case JUMP_INSN:
469410d565efSmrg     case CALL_INSN:
469510d565efSmrg     case CODE_LABEL:
469610d565efSmrg     case BARRIER:
469710d565efSmrg     case NOTE:
469810d565efSmrg       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
469910d565efSmrg       break;
470010d565efSmrg 
470110d565efSmrg #ifdef ENABLE_RTL_CHECKING
470210d565efSmrg     case SEQUENCE:
470310d565efSmrg       gcc_unreachable ();
470410d565efSmrg       break;
470510d565efSmrg #endif
470610d565efSmrg 
470710d565efSmrg     default:
470810d565efSmrg       last = (*make_raw) (x);
470910d565efSmrg       add_insn_after (last, after, bb);
471010d565efSmrg       break;
471110d565efSmrg     }
471210d565efSmrg 
471310d565efSmrg   return last;
471410d565efSmrg }
471510d565efSmrg 
471610d565efSmrg /* Make X be output after the insn AFTER and set the BB of insn.  If
471710d565efSmrg    BB is NULL, an attempt is made to infer the BB from AFTER.  */
471810d565efSmrg 
471910d565efSmrg rtx_insn *
472010d565efSmrg emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
472110d565efSmrg {
472210d565efSmrg   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
472310d565efSmrg }
472410d565efSmrg 
472510d565efSmrg 
472610d565efSmrg /* Make an insn of code JUMP_INSN with body X
472710d565efSmrg    and output it after the insn AFTER.  */
472810d565efSmrg 
472910d565efSmrg rtx_jump_insn *
473010d565efSmrg emit_jump_insn_after_noloc (rtx x, rtx after)
473110d565efSmrg {
473210d565efSmrg   return as_a <rtx_jump_insn *> (
473310d565efSmrg 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
473410d565efSmrg }
473510d565efSmrg 
473610d565efSmrg /* Make an instruction with body X and code CALL_INSN
473710d565efSmrg    and output it after the instruction AFTER.  */
473810d565efSmrg 
473910d565efSmrg rtx_insn *
474010d565efSmrg emit_call_insn_after_noloc (rtx x, rtx after)
474110d565efSmrg {
474210d565efSmrg   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
474310d565efSmrg }
474410d565efSmrg 
474510d565efSmrg /* Make an instruction with body X and code CALL_INSN
474610d565efSmrg    and output it after the instruction AFTER.  */
474710d565efSmrg 
474810d565efSmrg rtx_insn *
474910d565efSmrg emit_debug_insn_after_noloc (rtx x, rtx after)
475010d565efSmrg {
475110d565efSmrg   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
475210d565efSmrg }
475310d565efSmrg 
475410d565efSmrg /* Make an insn of code BARRIER
475510d565efSmrg    and output it after the insn AFTER.  */
475610d565efSmrg 
475710d565efSmrg rtx_barrier *
475810d565efSmrg emit_barrier_after (rtx after)
475910d565efSmrg {
476010d565efSmrg   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
476110d565efSmrg 
476210d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
476310d565efSmrg 
476410d565efSmrg   add_insn_after (insn, after, NULL);
476510d565efSmrg   return insn;
476610d565efSmrg }
476710d565efSmrg 
476810d565efSmrg /* Emit the label LABEL after the insn AFTER.  */
476910d565efSmrg 
477010d565efSmrg rtx_insn *
477110d565efSmrg emit_label_after (rtx label, rtx_insn *after)
477210d565efSmrg {
477310d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
477410d565efSmrg   INSN_UID (label) = cur_insn_uid++;
477510d565efSmrg   add_insn_after (label, after, NULL);
477610d565efSmrg   return as_a <rtx_insn *> (label);
477710d565efSmrg }
477810d565efSmrg 
477910d565efSmrg /* Notes require a bit of special handling: Some notes need to have their
478010d565efSmrg    BLOCK_FOR_INSN set, others should never have it set, and some should
478110d565efSmrg    have it set or clear depending on the context.   */
478210d565efSmrg 
478310d565efSmrg /* Return true iff a note of kind SUBTYPE should be emitted with routines
478410d565efSmrg    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
478510d565efSmrg    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
478610d565efSmrg 
478710d565efSmrg static bool
478810d565efSmrg note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
478910d565efSmrg {
479010d565efSmrg   switch (subtype)
479110d565efSmrg     {
479210d565efSmrg       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
479310d565efSmrg       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
479410d565efSmrg 	return true;
479510d565efSmrg 
479610d565efSmrg       /* Notes for var tracking and EH region markers can appear between or
479710d565efSmrg 	 inside basic blocks.  If the caller is emitting on the basic block
479810d565efSmrg 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
479910d565efSmrg       case NOTE_INSN_VAR_LOCATION:
480010d565efSmrg       case NOTE_INSN_EH_REGION_BEG:
480110d565efSmrg       case NOTE_INSN_EH_REGION_END:
480210d565efSmrg 	return on_bb_boundary_p;
480310d565efSmrg 
480410d565efSmrg       /* Otherwise, BLOCK_FOR_INSN must be set.  */
480510d565efSmrg       default:
480610d565efSmrg 	return false;
480710d565efSmrg     }
480810d565efSmrg }
480910d565efSmrg 
481010d565efSmrg /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
481110d565efSmrg 
481210d565efSmrg rtx_note *
481310d565efSmrg emit_note_after (enum insn_note subtype, rtx_insn *after)
481410d565efSmrg {
481510d565efSmrg   rtx_note *note = make_note_raw (subtype);
481610d565efSmrg   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
481710d565efSmrg   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
481810d565efSmrg 
481910d565efSmrg   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
482010d565efSmrg     add_insn_after_nobb (note, after);
482110d565efSmrg   else
482210d565efSmrg     add_insn_after (note, after, bb);
482310d565efSmrg   return note;
482410d565efSmrg }
482510d565efSmrg 
482610d565efSmrg /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
482710d565efSmrg 
482810d565efSmrg rtx_note *
482910d565efSmrg emit_note_before (enum insn_note subtype, rtx_insn *before)
483010d565efSmrg {
483110d565efSmrg   rtx_note *note = make_note_raw (subtype);
483210d565efSmrg   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
483310d565efSmrg   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
483410d565efSmrg 
483510d565efSmrg   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
483610d565efSmrg     add_insn_before_nobb (note, before);
483710d565efSmrg   else
483810d565efSmrg     add_insn_before (note, before, bb);
483910d565efSmrg   return note;
484010d565efSmrg }
484110d565efSmrg 
484210d565efSmrg /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
484310d565efSmrg    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
484410d565efSmrg 
484510d565efSmrg static rtx_insn *
484610d565efSmrg emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc,
484710d565efSmrg 			   rtx_insn *(*make_raw) (rtx))
484810d565efSmrg {
484910d565efSmrg   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
485010d565efSmrg   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
485110d565efSmrg 
485210d565efSmrg   if (pattern == NULL_RTX || !loc)
485310d565efSmrg     return last;
485410d565efSmrg 
485510d565efSmrg   after = NEXT_INSN (after);
485610d565efSmrg   while (1)
485710d565efSmrg     {
485810d565efSmrg       if (active_insn_p (after)
485910d565efSmrg 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
486010d565efSmrg 	  && !INSN_LOCATION (after))
486110d565efSmrg 	INSN_LOCATION (after) = loc;
486210d565efSmrg       if (after == last)
486310d565efSmrg 	break;
486410d565efSmrg       after = NEXT_INSN (after);
486510d565efSmrg     }
486610d565efSmrg   return last;
486710d565efSmrg }
486810d565efSmrg 
486910d565efSmrg /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
487010d565efSmrg    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
487110d565efSmrg    any DEBUG_INSNs.  */
487210d565efSmrg 
487310d565efSmrg static rtx_insn *
487410d565efSmrg emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns,
487510d565efSmrg 		    rtx_insn *(*make_raw) (rtx))
487610d565efSmrg {
487710d565efSmrg   rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after);
487810d565efSmrg   rtx_insn *prev = after;
487910d565efSmrg 
488010d565efSmrg   if (skip_debug_insns)
488110d565efSmrg     while (DEBUG_INSN_P (prev))
488210d565efSmrg       prev = PREV_INSN (prev);
488310d565efSmrg 
488410d565efSmrg   if (INSN_P (prev))
488510d565efSmrg     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
488610d565efSmrg 				      make_raw);
488710d565efSmrg   else
488810d565efSmrg     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
488910d565efSmrg }
489010d565efSmrg 
489110d565efSmrg /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
489210d565efSmrg rtx_insn *
489310d565efSmrg emit_insn_after_setloc (rtx pattern, rtx after, int loc)
489410d565efSmrg {
489510d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
489610d565efSmrg }
489710d565efSmrg 
489810d565efSmrg /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
489910d565efSmrg rtx_insn *
490010d565efSmrg emit_insn_after (rtx pattern, rtx after)
490110d565efSmrg {
490210d565efSmrg   return emit_pattern_after (pattern, after, true, make_insn_raw);
490310d565efSmrg }
490410d565efSmrg 
490510d565efSmrg /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
490610d565efSmrg rtx_jump_insn *
490710d565efSmrg emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
490810d565efSmrg {
490910d565efSmrg   return as_a <rtx_jump_insn *> (
491010d565efSmrg 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
491110d565efSmrg }
491210d565efSmrg 
491310d565efSmrg /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
491410d565efSmrg rtx_jump_insn *
491510d565efSmrg emit_jump_insn_after (rtx pattern, rtx after)
491610d565efSmrg {
491710d565efSmrg   return as_a <rtx_jump_insn *> (
491810d565efSmrg 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
491910d565efSmrg }
492010d565efSmrg 
492110d565efSmrg /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
492210d565efSmrg rtx_insn *
492310d565efSmrg emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
492410d565efSmrg {
492510d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
492610d565efSmrg }
492710d565efSmrg 
492810d565efSmrg /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
492910d565efSmrg rtx_insn *
493010d565efSmrg emit_call_insn_after (rtx pattern, rtx after)
493110d565efSmrg {
493210d565efSmrg   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
493310d565efSmrg }
493410d565efSmrg 
493510d565efSmrg /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
493610d565efSmrg rtx_insn *
493710d565efSmrg emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
493810d565efSmrg {
493910d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
494010d565efSmrg }
494110d565efSmrg 
494210d565efSmrg /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
494310d565efSmrg rtx_insn *
494410d565efSmrg emit_debug_insn_after (rtx pattern, rtx after)
494510d565efSmrg {
494610d565efSmrg   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
494710d565efSmrg }
494810d565efSmrg 
494910d565efSmrg /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
495010d565efSmrg    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
495110d565efSmrg    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
495210d565efSmrg    CALL_INSN, etc.  */
495310d565efSmrg 
495410d565efSmrg static rtx_insn *
495510d565efSmrg emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp,
495610d565efSmrg 			    rtx_insn *(*make_raw) (rtx))
495710d565efSmrg {
495810d565efSmrg   rtx_insn *before = as_a <rtx_insn *> (uncast_before);
495910d565efSmrg   rtx_insn *first = PREV_INSN (before);
496010d565efSmrg   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
496110d565efSmrg 					      insnp ? before : NULL_RTX,
496210d565efSmrg 					      NULL, make_raw);
496310d565efSmrg 
496410d565efSmrg   if (pattern == NULL_RTX || !loc)
496510d565efSmrg     return last;
496610d565efSmrg 
496710d565efSmrg   if (!first)
496810d565efSmrg     first = get_insns ();
496910d565efSmrg   else
497010d565efSmrg     first = NEXT_INSN (first);
497110d565efSmrg   while (1)
497210d565efSmrg     {
497310d565efSmrg       if (active_insn_p (first)
497410d565efSmrg 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
497510d565efSmrg 	  && !INSN_LOCATION (first))
497610d565efSmrg 	INSN_LOCATION (first) = loc;
497710d565efSmrg       if (first == last)
497810d565efSmrg 	break;
497910d565efSmrg       first = NEXT_INSN (first);
498010d565efSmrg     }
498110d565efSmrg   return last;
498210d565efSmrg }
498310d565efSmrg 
498410d565efSmrg /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
498510d565efSmrg    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
498610d565efSmrg    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
498710d565efSmrg    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
498810d565efSmrg 
498910d565efSmrg static rtx_insn *
499010d565efSmrg emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns,
499110d565efSmrg 		     bool insnp, rtx_insn *(*make_raw) (rtx))
499210d565efSmrg {
499310d565efSmrg   rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before);
499410d565efSmrg   rtx_insn *next = before;
499510d565efSmrg 
499610d565efSmrg   if (skip_debug_insns)
499710d565efSmrg     while (DEBUG_INSN_P (next))
499810d565efSmrg       next = PREV_INSN (next);
499910d565efSmrg 
500010d565efSmrg   if (INSN_P (next))
500110d565efSmrg     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
500210d565efSmrg 				       insnp, make_raw);
500310d565efSmrg   else
500410d565efSmrg     return emit_pattern_before_noloc (pattern, before,
500510d565efSmrg 				      insnp ? before : NULL_RTX,
500610d565efSmrg                                       NULL, make_raw);
500710d565efSmrg }
500810d565efSmrg 
500910d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
501010d565efSmrg rtx_insn *
501110d565efSmrg emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
501210d565efSmrg {
501310d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, true,
501410d565efSmrg 				     make_insn_raw);
501510d565efSmrg }
501610d565efSmrg 
501710d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
501810d565efSmrg rtx_insn *
501910d565efSmrg emit_insn_before (rtx pattern, rtx before)
502010d565efSmrg {
502110d565efSmrg   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
502210d565efSmrg }
502310d565efSmrg 
502410d565efSmrg /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
502510d565efSmrg rtx_jump_insn *
502610d565efSmrg emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
502710d565efSmrg {
502810d565efSmrg   return as_a <rtx_jump_insn *> (
502910d565efSmrg 	emit_pattern_before_setloc (pattern, before, loc, false,
503010d565efSmrg 				    make_jump_insn_raw));
503110d565efSmrg }
503210d565efSmrg 
503310d565efSmrg /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
503410d565efSmrg rtx_jump_insn *
503510d565efSmrg emit_jump_insn_before (rtx pattern, rtx before)
503610d565efSmrg {
503710d565efSmrg   return as_a <rtx_jump_insn *> (
503810d565efSmrg 	emit_pattern_before (pattern, before, true, false,
503910d565efSmrg 			     make_jump_insn_raw));
504010d565efSmrg }
504110d565efSmrg 
504210d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
504310d565efSmrg rtx_insn *
504410d565efSmrg emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc)
504510d565efSmrg {
504610d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, false,
504710d565efSmrg 				     make_call_insn_raw);
504810d565efSmrg }
504910d565efSmrg 
505010d565efSmrg /* Like emit_call_insn_before_noloc,
505110d565efSmrg    but set insn_location according to BEFORE.  */
505210d565efSmrg rtx_insn *
505310d565efSmrg emit_call_insn_before (rtx pattern, rtx_insn *before)
505410d565efSmrg {
505510d565efSmrg   return emit_pattern_before (pattern, before, true, false,
505610d565efSmrg 			      make_call_insn_raw);
505710d565efSmrg }
505810d565efSmrg 
505910d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
506010d565efSmrg rtx_insn *
506110d565efSmrg emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
506210d565efSmrg {
506310d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, false,
506410d565efSmrg 				     make_debug_insn_raw);
506510d565efSmrg }
506610d565efSmrg 
506710d565efSmrg /* Like emit_debug_insn_before_noloc,
506810d565efSmrg    but set insn_location according to BEFORE.  */
506910d565efSmrg rtx_insn *
507010d565efSmrg emit_debug_insn_before (rtx pattern, rtx_insn *before)
507110d565efSmrg {
507210d565efSmrg   return emit_pattern_before (pattern, before, false, false,
507310d565efSmrg 			      make_debug_insn_raw);
507410d565efSmrg }
507510d565efSmrg 
507610d565efSmrg /* Take X and emit it at the end of the doubly-linked
507710d565efSmrg    INSN list.
507810d565efSmrg 
507910d565efSmrg    Returns the last insn emitted.  */
508010d565efSmrg 
508110d565efSmrg rtx_insn *
508210d565efSmrg emit_insn (rtx x)
508310d565efSmrg {
508410d565efSmrg   rtx_insn *last = get_last_insn ();
508510d565efSmrg   rtx_insn *insn;
508610d565efSmrg 
508710d565efSmrg   if (x == NULL_RTX)
508810d565efSmrg     return last;
508910d565efSmrg 
509010d565efSmrg   switch (GET_CODE (x))
509110d565efSmrg     {
509210d565efSmrg     case DEBUG_INSN:
509310d565efSmrg     case INSN:
509410d565efSmrg     case JUMP_INSN:
509510d565efSmrg     case CALL_INSN:
509610d565efSmrg     case CODE_LABEL:
509710d565efSmrg     case BARRIER:
509810d565efSmrg     case NOTE:
509910d565efSmrg       insn = as_a <rtx_insn *> (x);
510010d565efSmrg       while (insn)
510110d565efSmrg 	{
510210d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
510310d565efSmrg 	  add_insn (insn);
510410d565efSmrg 	  last = insn;
510510d565efSmrg 	  insn = next;
510610d565efSmrg 	}
510710d565efSmrg       break;
510810d565efSmrg 
510910d565efSmrg #ifdef ENABLE_RTL_CHECKING
511010d565efSmrg     case JUMP_TABLE_DATA:
511110d565efSmrg     case SEQUENCE:
511210d565efSmrg       gcc_unreachable ();
511310d565efSmrg       break;
511410d565efSmrg #endif
511510d565efSmrg 
511610d565efSmrg     default:
511710d565efSmrg       last = make_insn_raw (x);
511810d565efSmrg       add_insn (last);
511910d565efSmrg       break;
512010d565efSmrg     }
512110d565efSmrg 
512210d565efSmrg   return last;
512310d565efSmrg }
512410d565efSmrg 
512510d565efSmrg /* Make an insn of code DEBUG_INSN with pattern X
512610d565efSmrg    and add it to the end of the doubly-linked list.  */
512710d565efSmrg 
512810d565efSmrg rtx_insn *
512910d565efSmrg emit_debug_insn (rtx x)
513010d565efSmrg {
513110d565efSmrg   rtx_insn *last = get_last_insn ();
513210d565efSmrg   rtx_insn *insn;
513310d565efSmrg 
513410d565efSmrg   if (x == NULL_RTX)
513510d565efSmrg     return last;
513610d565efSmrg 
513710d565efSmrg   switch (GET_CODE (x))
513810d565efSmrg     {
513910d565efSmrg     case DEBUG_INSN:
514010d565efSmrg     case INSN:
514110d565efSmrg     case JUMP_INSN:
514210d565efSmrg     case CALL_INSN:
514310d565efSmrg     case CODE_LABEL:
514410d565efSmrg     case BARRIER:
514510d565efSmrg     case NOTE:
514610d565efSmrg       insn = as_a <rtx_insn *> (x);
514710d565efSmrg       while (insn)
514810d565efSmrg 	{
514910d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
515010d565efSmrg 	  add_insn (insn);
515110d565efSmrg 	  last = insn;
515210d565efSmrg 	  insn = next;
515310d565efSmrg 	}
515410d565efSmrg       break;
515510d565efSmrg 
515610d565efSmrg #ifdef ENABLE_RTL_CHECKING
515710d565efSmrg     case JUMP_TABLE_DATA:
515810d565efSmrg     case SEQUENCE:
515910d565efSmrg       gcc_unreachable ();
516010d565efSmrg       break;
516110d565efSmrg #endif
516210d565efSmrg 
516310d565efSmrg     default:
516410d565efSmrg       last = make_debug_insn_raw (x);
516510d565efSmrg       add_insn (last);
516610d565efSmrg       break;
516710d565efSmrg     }
516810d565efSmrg 
516910d565efSmrg   return last;
517010d565efSmrg }
517110d565efSmrg 
517210d565efSmrg /* Make an insn of code JUMP_INSN with pattern X
517310d565efSmrg    and add it to the end of the doubly-linked list.  */
517410d565efSmrg 
517510d565efSmrg rtx_insn *
517610d565efSmrg emit_jump_insn (rtx x)
517710d565efSmrg {
517810d565efSmrg   rtx_insn *last = NULL;
517910d565efSmrg   rtx_insn *insn;
518010d565efSmrg 
518110d565efSmrg   switch (GET_CODE (x))
518210d565efSmrg     {
518310d565efSmrg     case DEBUG_INSN:
518410d565efSmrg     case INSN:
518510d565efSmrg     case JUMP_INSN:
518610d565efSmrg     case CALL_INSN:
518710d565efSmrg     case CODE_LABEL:
518810d565efSmrg     case BARRIER:
518910d565efSmrg     case NOTE:
519010d565efSmrg       insn = as_a <rtx_insn *> (x);
519110d565efSmrg       while (insn)
519210d565efSmrg 	{
519310d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
519410d565efSmrg 	  add_insn (insn);
519510d565efSmrg 	  last = insn;
519610d565efSmrg 	  insn = next;
519710d565efSmrg 	}
519810d565efSmrg       break;
519910d565efSmrg 
520010d565efSmrg #ifdef ENABLE_RTL_CHECKING
520110d565efSmrg     case JUMP_TABLE_DATA:
520210d565efSmrg     case SEQUENCE:
520310d565efSmrg       gcc_unreachable ();
520410d565efSmrg       break;
520510d565efSmrg #endif
520610d565efSmrg 
520710d565efSmrg     default:
520810d565efSmrg       last = make_jump_insn_raw (x);
520910d565efSmrg       add_insn (last);
521010d565efSmrg       break;
521110d565efSmrg     }
521210d565efSmrg 
521310d565efSmrg   return last;
521410d565efSmrg }
521510d565efSmrg 
521610d565efSmrg /* Make an insn of code CALL_INSN with pattern X
521710d565efSmrg    and add it to the end of the doubly-linked list.  */
521810d565efSmrg 
521910d565efSmrg rtx_insn *
522010d565efSmrg emit_call_insn (rtx x)
522110d565efSmrg {
522210d565efSmrg   rtx_insn *insn;
522310d565efSmrg 
522410d565efSmrg   switch (GET_CODE (x))
522510d565efSmrg     {
522610d565efSmrg     case DEBUG_INSN:
522710d565efSmrg     case INSN:
522810d565efSmrg     case JUMP_INSN:
522910d565efSmrg     case CALL_INSN:
523010d565efSmrg     case CODE_LABEL:
523110d565efSmrg     case BARRIER:
523210d565efSmrg     case NOTE:
523310d565efSmrg       insn = emit_insn (x);
523410d565efSmrg       break;
523510d565efSmrg 
523610d565efSmrg #ifdef ENABLE_RTL_CHECKING
523710d565efSmrg     case SEQUENCE:
523810d565efSmrg     case JUMP_TABLE_DATA:
523910d565efSmrg       gcc_unreachable ();
524010d565efSmrg       break;
524110d565efSmrg #endif
524210d565efSmrg 
524310d565efSmrg     default:
524410d565efSmrg       insn = make_call_insn_raw (x);
524510d565efSmrg       add_insn (insn);
524610d565efSmrg       break;
524710d565efSmrg     }
524810d565efSmrg 
524910d565efSmrg   return insn;
525010d565efSmrg }
525110d565efSmrg 
525210d565efSmrg /* Add the label LABEL to the end of the doubly-linked list.  */
525310d565efSmrg 
525410d565efSmrg rtx_code_label *
525510d565efSmrg emit_label (rtx uncast_label)
525610d565efSmrg {
525710d565efSmrg   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
525810d565efSmrg 
525910d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
526010d565efSmrg   INSN_UID (label) = cur_insn_uid++;
526110d565efSmrg   add_insn (label);
526210d565efSmrg   return label;
526310d565efSmrg }
526410d565efSmrg 
526510d565efSmrg /* Make an insn of code JUMP_TABLE_DATA
526610d565efSmrg    and add it to the end of the doubly-linked list.  */
526710d565efSmrg 
526810d565efSmrg rtx_jump_table_data *
526910d565efSmrg emit_jump_table_data (rtx table)
527010d565efSmrg {
527110d565efSmrg   rtx_jump_table_data *jump_table_data =
527210d565efSmrg     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
527310d565efSmrg   INSN_UID (jump_table_data) = cur_insn_uid++;
527410d565efSmrg   PATTERN (jump_table_data) = table;
527510d565efSmrg   BLOCK_FOR_INSN (jump_table_data) = NULL;
527610d565efSmrg   add_insn (jump_table_data);
527710d565efSmrg   return jump_table_data;
527810d565efSmrg }
527910d565efSmrg 
528010d565efSmrg /* Make an insn of code BARRIER
528110d565efSmrg    and add it to the end of the doubly-linked list.  */
528210d565efSmrg 
528310d565efSmrg rtx_barrier *
528410d565efSmrg emit_barrier (void)
528510d565efSmrg {
528610d565efSmrg   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
528710d565efSmrg   INSN_UID (barrier) = cur_insn_uid++;
528810d565efSmrg   add_insn (barrier);
528910d565efSmrg   return barrier;
529010d565efSmrg }
529110d565efSmrg 
529210d565efSmrg /* Emit a copy of note ORIG.  */
529310d565efSmrg 
529410d565efSmrg rtx_note *
529510d565efSmrg emit_note_copy (rtx_note *orig)
529610d565efSmrg {
529710d565efSmrg   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
529810d565efSmrg   rtx_note *note = make_note_raw (kind);
529910d565efSmrg   NOTE_DATA (note) = NOTE_DATA (orig);
530010d565efSmrg   add_insn (note);
530110d565efSmrg   return note;
530210d565efSmrg }
530310d565efSmrg 
530410d565efSmrg /* Make an insn of code NOTE or type NOTE_NO
530510d565efSmrg    and add it to the end of the doubly-linked list.  */
530610d565efSmrg 
530710d565efSmrg rtx_note *
530810d565efSmrg emit_note (enum insn_note kind)
530910d565efSmrg {
531010d565efSmrg   rtx_note *note = make_note_raw (kind);
531110d565efSmrg   add_insn (note);
531210d565efSmrg   return note;
531310d565efSmrg }
531410d565efSmrg 
531510d565efSmrg /* Emit a clobber of lvalue X.  */
531610d565efSmrg 
531710d565efSmrg rtx_insn *
531810d565efSmrg emit_clobber (rtx x)
531910d565efSmrg {
532010d565efSmrg   /* CONCATs should not appear in the insn stream.  */
532110d565efSmrg   if (GET_CODE (x) == CONCAT)
532210d565efSmrg     {
532310d565efSmrg       emit_clobber (XEXP (x, 0));
532410d565efSmrg       return emit_clobber (XEXP (x, 1));
532510d565efSmrg     }
532610d565efSmrg   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
532710d565efSmrg }
532810d565efSmrg 
532910d565efSmrg /* Return a sequence of insns to clobber lvalue X.  */
533010d565efSmrg 
533110d565efSmrg rtx_insn *
533210d565efSmrg gen_clobber (rtx x)
533310d565efSmrg {
533410d565efSmrg   rtx_insn *seq;
533510d565efSmrg 
533610d565efSmrg   start_sequence ();
533710d565efSmrg   emit_clobber (x);
533810d565efSmrg   seq = get_insns ();
533910d565efSmrg   end_sequence ();
534010d565efSmrg   return seq;
534110d565efSmrg }
534210d565efSmrg 
534310d565efSmrg /* Emit a use of rvalue X.  */
534410d565efSmrg 
534510d565efSmrg rtx_insn *
534610d565efSmrg emit_use (rtx x)
534710d565efSmrg {
534810d565efSmrg   /* CONCATs should not appear in the insn stream.  */
534910d565efSmrg   if (GET_CODE (x) == CONCAT)
535010d565efSmrg     {
535110d565efSmrg       emit_use (XEXP (x, 0));
535210d565efSmrg       return emit_use (XEXP (x, 1));
535310d565efSmrg     }
535410d565efSmrg   return emit_insn (gen_rtx_USE (VOIDmode, x));
535510d565efSmrg }
535610d565efSmrg 
535710d565efSmrg /* Return a sequence of insns to use rvalue X.  */
535810d565efSmrg 
535910d565efSmrg rtx_insn *
536010d565efSmrg gen_use (rtx x)
536110d565efSmrg {
536210d565efSmrg   rtx_insn *seq;
536310d565efSmrg 
536410d565efSmrg   start_sequence ();
536510d565efSmrg   emit_use (x);
536610d565efSmrg   seq = get_insns ();
536710d565efSmrg   end_sequence ();
536810d565efSmrg   return seq;
536910d565efSmrg }
537010d565efSmrg 
537110d565efSmrg /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
537210d565efSmrg    Return the set in INSN that such notes describe, or NULL if the notes
537310d565efSmrg    have no meaning for INSN.  */
537410d565efSmrg 
537510d565efSmrg rtx
537610d565efSmrg set_for_reg_notes (rtx insn)
537710d565efSmrg {
537810d565efSmrg   rtx pat, reg;
537910d565efSmrg 
538010d565efSmrg   if (!INSN_P (insn))
538110d565efSmrg     return NULL_RTX;
538210d565efSmrg 
538310d565efSmrg   pat = PATTERN (insn);
538410d565efSmrg   if (GET_CODE (pat) == PARALLEL)
538510d565efSmrg     {
538610d565efSmrg       /* We do not use single_set because that ignores SETs of unused
538710d565efSmrg 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
538810d565efSmrg 	 PARALLEL to have a single SET.  */
538910d565efSmrg       if (multiple_sets (insn))
539010d565efSmrg 	return NULL_RTX;
539110d565efSmrg       pat = XVECEXP (pat, 0, 0);
539210d565efSmrg     }
539310d565efSmrg 
539410d565efSmrg   if (GET_CODE (pat) != SET)
539510d565efSmrg     return NULL_RTX;
539610d565efSmrg 
539710d565efSmrg   reg = SET_DEST (pat);
539810d565efSmrg 
539910d565efSmrg   /* Notes apply to the contents of a STRICT_LOW_PART.  */
540010d565efSmrg   if (GET_CODE (reg) == STRICT_LOW_PART
540110d565efSmrg       || GET_CODE (reg) == ZERO_EXTRACT)
540210d565efSmrg     reg = XEXP (reg, 0);
540310d565efSmrg 
540410d565efSmrg   /* Check that we have a register.  */
540510d565efSmrg   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
540610d565efSmrg     return NULL_RTX;
540710d565efSmrg 
540810d565efSmrg   return pat;
540910d565efSmrg }
541010d565efSmrg 
541110d565efSmrg /* Place a note of KIND on insn INSN with DATUM as the datum. If a
541210d565efSmrg    note of this type already exists, remove it first.  */
541310d565efSmrg 
541410d565efSmrg rtx
541510d565efSmrg set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
541610d565efSmrg {
541710d565efSmrg   rtx note = find_reg_note (insn, kind, NULL_RTX);
541810d565efSmrg 
541910d565efSmrg   switch (kind)
542010d565efSmrg     {
542110d565efSmrg     case REG_EQUAL:
542210d565efSmrg     case REG_EQUIV:
542310d565efSmrg       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
542410d565efSmrg       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
542510d565efSmrg 	return NULL_RTX;
542610d565efSmrg 
542710d565efSmrg       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
542810d565efSmrg 	 It serves no useful purpose and breaks eliminate_regs.  */
542910d565efSmrg       if (GET_CODE (datum) == ASM_OPERANDS)
543010d565efSmrg 	return NULL_RTX;
543110d565efSmrg 
543210d565efSmrg       /* Notes with side effects are dangerous.  Even if the side-effect
543310d565efSmrg 	 initially mirrors one in PATTERN (INSN), later optimizations
543410d565efSmrg 	 might alter the way that the final register value is calculated
543510d565efSmrg 	 and so move or alter the side-effect in some way.  The note would
543610d565efSmrg 	 then no longer be a valid substitution for SET_SRC.  */
543710d565efSmrg       if (side_effects_p (datum))
543810d565efSmrg 	return NULL_RTX;
543910d565efSmrg       break;
544010d565efSmrg 
544110d565efSmrg     default:
544210d565efSmrg       break;
544310d565efSmrg     }
544410d565efSmrg 
544510d565efSmrg   if (note)
544610d565efSmrg     XEXP (note, 0) = datum;
544710d565efSmrg   else
544810d565efSmrg     {
544910d565efSmrg       add_reg_note (insn, kind, datum);
545010d565efSmrg       note = REG_NOTES (insn);
545110d565efSmrg     }
545210d565efSmrg 
545310d565efSmrg   switch (kind)
545410d565efSmrg     {
545510d565efSmrg     case REG_EQUAL:
545610d565efSmrg     case REG_EQUIV:
545710d565efSmrg       df_notes_rescan (as_a <rtx_insn *> (insn));
545810d565efSmrg       break;
545910d565efSmrg     default:
546010d565efSmrg       break;
546110d565efSmrg     }
546210d565efSmrg 
546310d565efSmrg   return note;
546410d565efSmrg }
546510d565efSmrg 
546610d565efSmrg /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
546710d565efSmrg rtx
546810d565efSmrg set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
546910d565efSmrg {
547010d565efSmrg   rtx set = set_for_reg_notes (insn);
547110d565efSmrg 
547210d565efSmrg   if (set && SET_DEST (set) == dst)
547310d565efSmrg     return set_unique_reg_note (insn, kind, datum);
547410d565efSmrg   return NULL_RTX;
547510d565efSmrg }
547610d565efSmrg 
547710d565efSmrg /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
547810d565efSmrg    following barrier if the instruction needs one and if ALLOW_BARRIER_P
547910d565efSmrg    is true.
548010d565efSmrg 
548110d565efSmrg    If X is a label, it is simply added into the insn chain.  */
548210d565efSmrg 
548310d565efSmrg rtx_insn *
548410d565efSmrg emit (rtx x, bool allow_barrier_p)
548510d565efSmrg {
548610d565efSmrg   enum rtx_code code = classify_insn (x);
548710d565efSmrg 
548810d565efSmrg   switch (code)
548910d565efSmrg     {
549010d565efSmrg     case CODE_LABEL:
549110d565efSmrg       return emit_label (x);
549210d565efSmrg     case INSN:
549310d565efSmrg       return emit_insn (x);
549410d565efSmrg     case  JUMP_INSN:
549510d565efSmrg       {
549610d565efSmrg 	rtx_insn *insn = emit_jump_insn (x);
549710d565efSmrg 	if (allow_barrier_p
549810d565efSmrg 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
549910d565efSmrg 	  return emit_barrier ();
550010d565efSmrg 	return insn;
550110d565efSmrg       }
550210d565efSmrg     case CALL_INSN:
550310d565efSmrg       return emit_call_insn (x);
550410d565efSmrg     case DEBUG_INSN:
550510d565efSmrg       return emit_debug_insn (x);
550610d565efSmrg     default:
550710d565efSmrg       gcc_unreachable ();
550810d565efSmrg     }
550910d565efSmrg }
551010d565efSmrg 
551110d565efSmrg /* Space for free sequence stack entries.  */
551210d565efSmrg static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
551310d565efSmrg 
551410d565efSmrg /* Begin emitting insns to a sequence.  If this sequence will contain
551510d565efSmrg    something that might cause the compiler to pop arguments to function
551610d565efSmrg    calls (because those pops have previously been deferred; see
551710d565efSmrg    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
551810d565efSmrg    before calling this function.  That will ensure that the deferred
551910d565efSmrg    pops are not accidentally emitted in the middle of this sequence.  */
552010d565efSmrg 
552110d565efSmrg void
552210d565efSmrg start_sequence (void)
552310d565efSmrg {
552410d565efSmrg   struct sequence_stack *tem;
552510d565efSmrg 
552610d565efSmrg   if (free_sequence_stack != NULL)
552710d565efSmrg     {
552810d565efSmrg       tem = free_sequence_stack;
552910d565efSmrg       free_sequence_stack = tem->next;
553010d565efSmrg     }
553110d565efSmrg   else
553210d565efSmrg     tem = ggc_alloc<sequence_stack> ();
553310d565efSmrg 
553410d565efSmrg   tem->next = get_current_sequence ()->next;
553510d565efSmrg   tem->first = get_insns ();
553610d565efSmrg   tem->last = get_last_insn ();
553710d565efSmrg   get_current_sequence ()->next = tem;
553810d565efSmrg 
553910d565efSmrg   set_first_insn (0);
554010d565efSmrg   set_last_insn (0);
554110d565efSmrg }
554210d565efSmrg 
554310d565efSmrg /* Set up the insn chain starting with FIRST as the current sequence,
554410d565efSmrg    saving the previously current one.  See the documentation for
554510d565efSmrg    start_sequence for more information about how to use this function.  */
554610d565efSmrg 
554710d565efSmrg void
554810d565efSmrg push_to_sequence (rtx_insn *first)
554910d565efSmrg {
555010d565efSmrg   rtx_insn *last;
555110d565efSmrg 
555210d565efSmrg   start_sequence ();
555310d565efSmrg 
555410d565efSmrg   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
555510d565efSmrg     ;
555610d565efSmrg 
555710d565efSmrg   set_first_insn (first);
555810d565efSmrg   set_last_insn (last);
555910d565efSmrg }
556010d565efSmrg 
556110d565efSmrg /* Like push_to_sequence, but take the last insn as an argument to avoid
556210d565efSmrg    looping through the list.  */
556310d565efSmrg 
556410d565efSmrg void
556510d565efSmrg push_to_sequence2 (rtx_insn *first, rtx_insn *last)
556610d565efSmrg {
556710d565efSmrg   start_sequence ();
556810d565efSmrg 
556910d565efSmrg   set_first_insn (first);
557010d565efSmrg   set_last_insn (last);
557110d565efSmrg }
557210d565efSmrg 
557310d565efSmrg /* Set up the outer-level insn chain
557410d565efSmrg    as the current sequence, saving the previously current one.  */
557510d565efSmrg 
557610d565efSmrg void
557710d565efSmrg push_topmost_sequence (void)
557810d565efSmrg {
557910d565efSmrg   struct sequence_stack *top;
558010d565efSmrg 
558110d565efSmrg   start_sequence ();
558210d565efSmrg 
558310d565efSmrg   top = get_topmost_sequence ();
558410d565efSmrg   set_first_insn (top->first);
558510d565efSmrg   set_last_insn (top->last);
558610d565efSmrg }
558710d565efSmrg 
558810d565efSmrg /* After emitting to the outer-level insn chain, update the outer-level
558910d565efSmrg    insn chain, and restore the previous saved state.  */
559010d565efSmrg 
559110d565efSmrg void
559210d565efSmrg pop_topmost_sequence (void)
559310d565efSmrg {
559410d565efSmrg   struct sequence_stack *top;
559510d565efSmrg 
559610d565efSmrg   top = get_topmost_sequence ();
559710d565efSmrg   top->first = get_insns ();
559810d565efSmrg   top->last = get_last_insn ();
559910d565efSmrg 
560010d565efSmrg   end_sequence ();
560110d565efSmrg }
560210d565efSmrg 
560310d565efSmrg /* After emitting to a sequence, restore previous saved state.
560410d565efSmrg 
560510d565efSmrg    To get the contents of the sequence just made, you must call
560610d565efSmrg    `get_insns' *before* calling here.
560710d565efSmrg 
560810d565efSmrg    If the compiler might have deferred popping arguments while
560910d565efSmrg    generating this sequence, and this sequence will not be immediately
561010d565efSmrg    inserted into the instruction stream, use do_pending_stack_adjust
561110d565efSmrg    before calling get_insns.  That will ensure that the deferred
561210d565efSmrg    pops are inserted into this sequence, and not into some random
561310d565efSmrg    location in the instruction stream.  See INHIBIT_DEFER_POP for more
561410d565efSmrg    information about deferred popping of arguments.  */
561510d565efSmrg 
561610d565efSmrg void
561710d565efSmrg end_sequence (void)
561810d565efSmrg {
561910d565efSmrg   struct sequence_stack *tem = get_current_sequence ()->next;
562010d565efSmrg 
562110d565efSmrg   set_first_insn (tem->first);
562210d565efSmrg   set_last_insn (tem->last);
562310d565efSmrg   get_current_sequence ()->next = tem->next;
562410d565efSmrg 
562510d565efSmrg   memset (tem, 0, sizeof (*tem));
562610d565efSmrg   tem->next = free_sequence_stack;
562710d565efSmrg   free_sequence_stack = tem;
562810d565efSmrg }
562910d565efSmrg 
563010d565efSmrg /* Return 1 if currently emitting into a sequence.  */
563110d565efSmrg 
563210d565efSmrg int
563310d565efSmrg in_sequence_p (void)
563410d565efSmrg {
563510d565efSmrg   return get_current_sequence ()->next != 0;
563610d565efSmrg }
563710d565efSmrg 
563810d565efSmrg /* Put the various virtual registers into REGNO_REG_RTX.  */
563910d565efSmrg 
564010d565efSmrg static void
564110d565efSmrg init_virtual_regs (void)
564210d565efSmrg {
564310d565efSmrg   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
564410d565efSmrg   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
564510d565efSmrg   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
564610d565efSmrg   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
564710d565efSmrg   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
564810d565efSmrg   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
564910d565efSmrg     = virtual_preferred_stack_boundary_rtx;
565010d565efSmrg }
565110d565efSmrg 
565210d565efSmrg 
565310d565efSmrg /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
565410d565efSmrg static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
565510d565efSmrg static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
565610d565efSmrg static int copy_insn_n_scratches;
565710d565efSmrg 
565810d565efSmrg /* When an insn is being copied by copy_insn_1, this is nonzero if we have
565910d565efSmrg    copied an ASM_OPERANDS.
566010d565efSmrg    In that case, it is the original input-operand vector.  */
566110d565efSmrg static rtvec orig_asm_operands_vector;
566210d565efSmrg 
566310d565efSmrg /* When an insn is being copied by copy_insn_1, this is nonzero if we have
566410d565efSmrg    copied an ASM_OPERANDS.
566510d565efSmrg    In that case, it is the copied input-operand vector.  */
566610d565efSmrg static rtvec copy_asm_operands_vector;
566710d565efSmrg 
566810d565efSmrg /* Likewise for the constraints vector.  */
566910d565efSmrg static rtvec orig_asm_constraints_vector;
567010d565efSmrg static rtvec copy_asm_constraints_vector;
567110d565efSmrg 
567210d565efSmrg /* Recursively create a new copy of an rtx for copy_insn.
567310d565efSmrg    This function differs from copy_rtx in that it handles SCRATCHes and
567410d565efSmrg    ASM_OPERANDs properly.
567510d565efSmrg    Normally, this function is not used directly; use copy_insn as front end.
567610d565efSmrg    However, you could first copy an insn pattern with copy_insn and then use
567710d565efSmrg    this function afterwards to properly copy any REG_NOTEs containing
567810d565efSmrg    SCRATCHes.  */
567910d565efSmrg 
568010d565efSmrg rtx
568110d565efSmrg copy_insn_1 (rtx orig)
568210d565efSmrg {
568310d565efSmrg   rtx copy;
568410d565efSmrg   int i, j;
568510d565efSmrg   RTX_CODE code;
568610d565efSmrg   const char *format_ptr;
568710d565efSmrg 
568810d565efSmrg   if (orig == NULL)
568910d565efSmrg     return NULL;
569010d565efSmrg 
569110d565efSmrg   code = GET_CODE (orig);
569210d565efSmrg 
569310d565efSmrg   switch (code)
569410d565efSmrg     {
569510d565efSmrg     case REG:
569610d565efSmrg     case DEBUG_EXPR:
569710d565efSmrg     CASE_CONST_ANY:
569810d565efSmrg     case SYMBOL_REF:
569910d565efSmrg     case CODE_LABEL:
570010d565efSmrg     case PC:
570110d565efSmrg     case CC0:
570210d565efSmrg     case RETURN:
570310d565efSmrg     case SIMPLE_RETURN:
570410d565efSmrg       return orig;
570510d565efSmrg     case CLOBBER:
570610d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
570710d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
570810d565efSmrg          This is needed to allow safe register renaming.  */
570910d565efSmrg       if (REG_P (XEXP (orig, 0))
571010d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
571110d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
571210d565efSmrg 	return orig;
571310d565efSmrg       break;
571410d565efSmrg 
571510d565efSmrg     case SCRATCH:
571610d565efSmrg       for (i = 0; i < copy_insn_n_scratches; i++)
571710d565efSmrg 	if (copy_insn_scratch_in[i] == orig)
571810d565efSmrg 	  return copy_insn_scratch_out[i];
571910d565efSmrg       break;
572010d565efSmrg 
572110d565efSmrg     case CONST:
572210d565efSmrg       if (shared_const_p (orig))
572310d565efSmrg 	return orig;
572410d565efSmrg       break;
572510d565efSmrg 
572610d565efSmrg       /* A MEM with a constant address is not sharable.  The problem is that
572710d565efSmrg 	 the constant address may need to be reloaded.  If the mem is shared,
572810d565efSmrg 	 then reloading one copy of this mem will cause all copies to appear
572910d565efSmrg 	 to have been reloaded.  */
573010d565efSmrg 
573110d565efSmrg     default:
573210d565efSmrg       break;
573310d565efSmrg     }
573410d565efSmrg 
573510d565efSmrg   /* Copy the various flags, fields, and other information.  We assume
573610d565efSmrg      that all fields need copying, and then clear the fields that should
573710d565efSmrg      not be copied.  That is the sensible default behavior, and forces
573810d565efSmrg      us to explicitly document why we are *not* copying a flag.  */
573910d565efSmrg   copy = shallow_copy_rtx (orig);
574010d565efSmrg 
574110d565efSmrg   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
574210d565efSmrg   if (INSN_P (orig))
574310d565efSmrg     {
574410d565efSmrg       RTX_FLAG (copy, jump) = 0;
574510d565efSmrg       RTX_FLAG (copy, call) = 0;
574610d565efSmrg       RTX_FLAG (copy, frame_related) = 0;
574710d565efSmrg     }
574810d565efSmrg 
574910d565efSmrg   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
575010d565efSmrg 
575110d565efSmrg   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
575210d565efSmrg     switch (*format_ptr++)
575310d565efSmrg       {
575410d565efSmrg       case 'e':
575510d565efSmrg 	if (XEXP (orig, i) != NULL)
575610d565efSmrg 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
575710d565efSmrg 	break;
575810d565efSmrg 
575910d565efSmrg       case 'E':
576010d565efSmrg       case 'V':
576110d565efSmrg 	if (XVEC (orig, i) == orig_asm_constraints_vector)
576210d565efSmrg 	  XVEC (copy, i) = copy_asm_constraints_vector;
576310d565efSmrg 	else if (XVEC (orig, i) == orig_asm_operands_vector)
576410d565efSmrg 	  XVEC (copy, i) = copy_asm_operands_vector;
576510d565efSmrg 	else if (XVEC (orig, i) != NULL)
576610d565efSmrg 	  {
576710d565efSmrg 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
576810d565efSmrg 	    for (j = 0; j < XVECLEN (copy, i); j++)
576910d565efSmrg 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
577010d565efSmrg 	  }
577110d565efSmrg 	break;
577210d565efSmrg 
577310d565efSmrg       case 't':
577410d565efSmrg       case 'w':
577510d565efSmrg       case 'i':
5776*c7a68eb7Smrg       case 'p':
577710d565efSmrg       case 's':
577810d565efSmrg       case 'S':
577910d565efSmrg       case 'u':
578010d565efSmrg       case '0':
578110d565efSmrg 	/* These are left unchanged.  */
578210d565efSmrg 	break;
578310d565efSmrg 
578410d565efSmrg       default:
578510d565efSmrg 	gcc_unreachable ();
578610d565efSmrg       }
578710d565efSmrg 
578810d565efSmrg   if (code == SCRATCH)
578910d565efSmrg     {
579010d565efSmrg       i = copy_insn_n_scratches++;
579110d565efSmrg       gcc_assert (i < MAX_RECOG_OPERANDS);
579210d565efSmrg       copy_insn_scratch_in[i] = orig;
579310d565efSmrg       copy_insn_scratch_out[i] = copy;
579410d565efSmrg     }
579510d565efSmrg   else if (code == ASM_OPERANDS)
579610d565efSmrg     {
579710d565efSmrg       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
579810d565efSmrg       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
579910d565efSmrg       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
580010d565efSmrg       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
580110d565efSmrg     }
580210d565efSmrg 
580310d565efSmrg   return copy;
580410d565efSmrg }
580510d565efSmrg 
580610d565efSmrg /* Create a new copy of an rtx.
580710d565efSmrg    This function differs from copy_rtx in that it handles SCRATCHes and
580810d565efSmrg    ASM_OPERANDs properly.
580910d565efSmrg    INSN doesn't really have to be a full INSN; it could be just the
581010d565efSmrg    pattern.  */
581110d565efSmrg rtx
581210d565efSmrg copy_insn (rtx insn)
581310d565efSmrg {
581410d565efSmrg   copy_insn_n_scratches = 0;
581510d565efSmrg   orig_asm_operands_vector = 0;
581610d565efSmrg   orig_asm_constraints_vector = 0;
581710d565efSmrg   copy_asm_operands_vector = 0;
581810d565efSmrg   copy_asm_constraints_vector = 0;
581910d565efSmrg   return copy_insn_1 (insn);
582010d565efSmrg }
582110d565efSmrg 
582210d565efSmrg /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
582310d565efSmrg    on that assumption that INSN itself remains in its original place.  */
582410d565efSmrg 
582510d565efSmrg rtx_insn *
582610d565efSmrg copy_delay_slot_insn (rtx_insn *insn)
582710d565efSmrg {
582810d565efSmrg   /* Copy INSN with its rtx_code, all its notes, location etc.  */
582910d565efSmrg   insn = as_a <rtx_insn *> (copy_rtx (insn));
583010d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
583110d565efSmrg   return insn;
583210d565efSmrg }
583310d565efSmrg 
583410d565efSmrg /* Initialize data structures and variables in this file
583510d565efSmrg    before generating rtl for each function.  */
583610d565efSmrg 
583710d565efSmrg void
583810d565efSmrg init_emit (void)
583910d565efSmrg {
584010d565efSmrg   set_first_insn (NULL);
584110d565efSmrg   set_last_insn (NULL);
584210d565efSmrg   if (MIN_NONDEBUG_INSN_UID)
584310d565efSmrg     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
584410d565efSmrg   else
584510d565efSmrg     cur_insn_uid = 1;
584610d565efSmrg   cur_debug_insn_uid = 1;
584710d565efSmrg   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
584810d565efSmrg   first_label_num = label_num;
584910d565efSmrg   get_current_sequence ()->next = NULL;
585010d565efSmrg 
585110d565efSmrg   /* Init the tables that describe all the pseudo regs.  */
585210d565efSmrg 
585310d565efSmrg   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
585410d565efSmrg 
585510d565efSmrg   crtl->emit.regno_pointer_align
585610d565efSmrg     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
585710d565efSmrg 
585810d565efSmrg   regno_reg_rtx
585910d565efSmrg     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
586010d565efSmrg 
586110d565efSmrg   /* Put copies of all the hard registers into regno_reg_rtx.  */
586210d565efSmrg   memcpy (regno_reg_rtx,
586310d565efSmrg 	  initial_regno_reg_rtx,
586410d565efSmrg 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
586510d565efSmrg 
586610d565efSmrg   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
586710d565efSmrg   init_virtual_regs ();
586810d565efSmrg 
586910d565efSmrg   /* Indicate that the virtual registers and stack locations are
587010d565efSmrg      all pointers.  */
587110d565efSmrg   REG_POINTER (stack_pointer_rtx) = 1;
587210d565efSmrg   REG_POINTER (frame_pointer_rtx) = 1;
587310d565efSmrg   REG_POINTER (hard_frame_pointer_rtx) = 1;
587410d565efSmrg   REG_POINTER (arg_pointer_rtx) = 1;
587510d565efSmrg 
587610d565efSmrg   REG_POINTER (virtual_incoming_args_rtx) = 1;
587710d565efSmrg   REG_POINTER (virtual_stack_vars_rtx) = 1;
587810d565efSmrg   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
587910d565efSmrg   REG_POINTER (virtual_outgoing_args_rtx) = 1;
588010d565efSmrg   REG_POINTER (virtual_cfa_rtx) = 1;
588110d565efSmrg 
588210d565efSmrg #ifdef STACK_BOUNDARY
588310d565efSmrg   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
588410d565efSmrg   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
588510d565efSmrg   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
588610d565efSmrg   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
588710d565efSmrg 
588810d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
588910d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
589010d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
589110d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
589210d565efSmrg 
589310d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
589410d565efSmrg #endif
589510d565efSmrg 
589610d565efSmrg #ifdef INIT_EXPANDERS
589710d565efSmrg   INIT_EXPANDERS;
589810d565efSmrg #endif
589910d565efSmrg }
590010d565efSmrg 
5901*c7a68eb7Smrg /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5902*c7a68eb7Smrg 
5903*c7a68eb7Smrg wide_int
5904*c7a68eb7Smrg const_vector_int_elt (const_rtx x, unsigned int i)
5905*c7a68eb7Smrg {
5906*c7a68eb7Smrg   /* First handle elements that are directly encoded.  */
5907*c7a68eb7Smrg   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5908*c7a68eb7Smrg   if (i < (unsigned int) XVECLEN (x, 0))
5909*c7a68eb7Smrg     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5910*c7a68eb7Smrg 
5911*c7a68eb7Smrg   /* Identify the pattern that contains element I and work out the index of
5912*c7a68eb7Smrg      the last encoded element for that pattern.  */
5913*c7a68eb7Smrg   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5914*c7a68eb7Smrg   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5915*c7a68eb7Smrg   unsigned int count = i / npatterns;
5916*c7a68eb7Smrg   unsigned int pattern = i % npatterns;
5917*c7a68eb7Smrg   unsigned int final_i = encoded_nelts - npatterns + pattern;
5918*c7a68eb7Smrg 
5919*c7a68eb7Smrg   /* If there are no steps, the final encoded value is the right one.  */
5920*c7a68eb7Smrg   if (!CONST_VECTOR_STEPPED_P (x))
5921*c7a68eb7Smrg     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5922*c7a68eb7Smrg 
5923*c7a68eb7Smrg   /* Otherwise work out the value from the last two encoded elements.  */
5924*c7a68eb7Smrg   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5925*c7a68eb7Smrg   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5926*c7a68eb7Smrg   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5927*c7a68eb7Smrg 			   rtx_mode_t (v1, elt_mode));
5928*c7a68eb7Smrg   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5929*c7a68eb7Smrg }
5930*c7a68eb7Smrg 
5931*c7a68eb7Smrg /* Return the value of element I of CONST_VECTOR X.  */
5932*c7a68eb7Smrg 
5933*c7a68eb7Smrg rtx
5934*c7a68eb7Smrg const_vector_elt (const_rtx x, unsigned int i)
5935*c7a68eb7Smrg {
5936*c7a68eb7Smrg   /* First handle elements that are directly encoded.  */
5937*c7a68eb7Smrg   if (i < (unsigned int) XVECLEN (x, 0))
5938*c7a68eb7Smrg     return CONST_VECTOR_ENCODED_ELT (x, i);
5939*c7a68eb7Smrg 
5940*c7a68eb7Smrg   /* If there are no steps, the final encoded value is the right one.  */
5941*c7a68eb7Smrg   if (!CONST_VECTOR_STEPPED_P (x))
5942*c7a68eb7Smrg     {
5943*c7a68eb7Smrg       /* Identify the pattern that contains element I and work out the index of
5944*c7a68eb7Smrg 	 the last encoded element for that pattern.  */
5945*c7a68eb7Smrg       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5946*c7a68eb7Smrg       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5947*c7a68eb7Smrg       unsigned int pattern = i % npatterns;
5948*c7a68eb7Smrg       unsigned int final_i = encoded_nelts - npatterns + pattern;
5949*c7a68eb7Smrg       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5950*c7a68eb7Smrg     }
5951*c7a68eb7Smrg 
5952*c7a68eb7Smrg   /* Otherwise work out the value from the last two encoded elements.  */
5953*c7a68eb7Smrg   return immed_wide_int_const (const_vector_int_elt (x, i),
5954*c7a68eb7Smrg 			       GET_MODE_INNER (GET_MODE (x)));
5955*c7a68eb7Smrg }
5956*c7a68eb7Smrg 
5957*c7a68eb7Smrg /* Return true if X is a valid element for a CONST_VECTOR of the given
5958*c7a68eb7Smrg   mode.  */
5959*c7a68eb7Smrg 
5960*c7a68eb7Smrg bool
5961*c7a68eb7Smrg valid_for_const_vector_p (machine_mode, rtx x)
5962*c7a68eb7Smrg {
5963*c7a68eb7Smrg   return (CONST_SCALAR_INT_P (x)
5964*c7a68eb7Smrg 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5965*c7a68eb7Smrg 	  || CONST_FIXED_P (x));
5966*c7a68eb7Smrg }
5967*c7a68eb7Smrg 
5968*c7a68eb7Smrg /* Generate a vector constant of mode MODE in which every element has
5969*c7a68eb7Smrg    value ELT.  */
5970*c7a68eb7Smrg 
5971*c7a68eb7Smrg rtx
5972*c7a68eb7Smrg gen_const_vec_duplicate (machine_mode mode, rtx elt)
5973*c7a68eb7Smrg {
5974*c7a68eb7Smrg   rtx_vector_builder builder (mode, 1, 1);
5975*c7a68eb7Smrg   builder.quick_push (elt);
5976*c7a68eb7Smrg   return builder.build ();
5977*c7a68eb7Smrg }
5978*c7a68eb7Smrg 
5979*c7a68eb7Smrg /* Return a vector rtx of mode MODE in which every element has value X.
5980*c7a68eb7Smrg    The result will be a constant if X is constant.  */
5981*c7a68eb7Smrg 
5982*c7a68eb7Smrg rtx
5983*c7a68eb7Smrg gen_vec_duplicate (machine_mode mode, rtx x)
5984*c7a68eb7Smrg {
5985*c7a68eb7Smrg   if (valid_for_const_vector_p (mode, x))
5986*c7a68eb7Smrg     return gen_const_vec_duplicate (mode, x);
5987*c7a68eb7Smrg   return gen_rtx_VEC_DUPLICATE (mode, x);
5988*c7a68eb7Smrg }
5989*c7a68eb7Smrg 
5990*c7a68eb7Smrg /* A subroutine of const_vec_series_p that handles the case in which:
5991*c7a68eb7Smrg 
5992*c7a68eb7Smrg      (GET_CODE (X) == CONST_VECTOR
5993*c7a68eb7Smrg       && CONST_VECTOR_NPATTERNS (X) == 1
5994*c7a68eb7Smrg       && !CONST_VECTOR_DUPLICATE_P (X))
5995*c7a68eb7Smrg 
5996*c7a68eb7Smrg    is known to hold.  */
5997*c7a68eb7Smrg 
5998*c7a68eb7Smrg bool
5999*c7a68eb7Smrg const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
6000*c7a68eb7Smrg {
6001*c7a68eb7Smrg   /* Stepped sequences are only defined for integers, to avoid specifying
6002*c7a68eb7Smrg      rounding behavior.  */
6003*c7a68eb7Smrg   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
6004*c7a68eb7Smrg     return false;
6005*c7a68eb7Smrg 
6006*c7a68eb7Smrg   /* A non-duplicated vector with two elements can always be seen as a
6007*c7a68eb7Smrg      series with a nonzero step.  Longer vectors must have a stepped
6008*c7a68eb7Smrg      encoding.  */
6009*c7a68eb7Smrg   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
6010*c7a68eb7Smrg       && !CONST_VECTOR_STEPPED_P (x))
6011*c7a68eb7Smrg     return false;
6012*c7a68eb7Smrg 
6013*c7a68eb7Smrg   /* Calculate the step between the first and second elements.  */
6014*c7a68eb7Smrg   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
6015*c7a68eb7Smrg   rtx base = CONST_VECTOR_ELT (x, 0);
6016*c7a68eb7Smrg   rtx step = simplify_binary_operation (MINUS, inner,
6017*c7a68eb7Smrg 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
6018*c7a68eb7Smrg   if (rtx_equal_p (step, CONST0_RTX (inner)))
6019*c7a68eb7Smrg     return false;
6020*c7a68eb7Smrg 
6021*c7a68eb7Smrg   /* If we have a stepped encoding, check that the step between the
6022*c7a68eb7Smrg      second and third elements is the same as STEP.  */
6023*c7a68eb7Smrg   if (CONST_VECTOR_STEPPED_P (x))
6024*c7a68eb7Smrg     {
6025*c7a68eb7Smrg       rtx diff = simplify_binary_operation (MINUS, inner,
6026*c7a68eb7Smrg 					    CONST_VECTOR_ENCODED_ELT (x, 2),
6027*c7a68eb7Smrg 					    CONST_VECTOR_ENCODED_ELT (x, 1));
6028*c7a68eb7Smrg       if (!rtx_equal_p (step, diff))
6029*c7a68eb7Smrg 	return false;
6030*c7a68eb7Smrg     }
6031*c7a68eb7Smrg 
6032*c7a68eb7Smrg   *base_out = base;
6033*c7a68eb7Smrg   *step_out = step;
6034*c7a68eb7Smrg   return true;
6035*c7a68eb7Smrg }
6036*c7a68eb7Smrg 
6037*c7a68eb7Smrg /* Generate a vector constant of mode MODE in which element I has
6038*c7a68eb7Smrg    the value BASE + I * STEP.  */
6039*c7a68eb7Smrg 
6040*c7a68eb7Smrg rtx
6041*c7a68eb7Smrg gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6042*c7a68eb7Smrg {
6043*c7a68eb7Smrg   gcc_assert (valid_for_const_vector_p (mode, base)
6044*c7a68eb7Smrg 	      && valid_for_const_vector_p (mode, step));
6045*c7a68eb7Smrg 
6046*c7a68eb7Smrg   rtx_vector_builder builder (mode, 1, 3);
6047*c7a68eb7Smrg   builder.quick_push (base);
6048*c7a68eb7Smrg   for (int i = 1; i < 3; ++i)
6049*c7a68eb7Smrg     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6050*c7a68eb7Smrg 					     builder[i - 1], step));
6051*c7a68eb7Smrg   return builder.build ();
6052*c7a68eb7Smrg }
6053*c7a68eb7Smrg 
6054*c7a68eb7Smrg /* Generate a vector of mode MODE in which element I has the value
6055*c7a68eb7Smrg    BASE + I * STEP.  The result will be a constant if BASE and STEP
6056*c7a68eb7Smrg    are both constants.  */
6057*c7a68eb7Smrg 
6058*c7a68eb7Smrg rtx
6059*c7a68eb7Smrg gen_vec_series (machine_mode mode, rtx base, rtx step)
6060*c7a68eb7Smrg {
6061*c7a68eb7Smrg   if (step == const0_rtx)
6062*c7a68eb7Smrg     return gen_vec_duplicate (mode, base);
6063*c7a68eb7Smrg   if (valid_for_const_vector_p (mode, base)
6064*c7a68eb7Smrg       && valid_for_const_vector_p (mode, step))
6065*c7a68eb7Smrg     return gen_const_vec_series (mode, base, step);
6066*c7a68eb7Smrg   return gen_rtx_VEC_SERIES (mode, base, step);
6067*c7a68eb7Smrg }
6068*c7a68eb7Smrg 
6069*c7a68eb7Smrg /* Generate a new vector constant for mode MODE and constant value
6070*c7a68eb7Smrg    CONSTANT.  */
607110d565efSmrg 
607210d565efSmrg static rtx
607310d565efSmrg gen_const_vector (machine_mode mode, int constant)
607410d565efSmrg {
6075*c7a68eb7Smrg   machine_mode inner = GET_MODE_INNER (mode);
607610d565efSmrg 
607710d565efSmrg   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
607810d565efSmrg 
6079*c7a68eb7Smrg   rtx el = const_tiny_rtx[constant][(int) inner];
6080*c7a68eb7Smrg   gcc_assert (el);
608110d565efSmrg 
6082*c7a68eb7Smrg   return gen_const_vec_duplicate (mode, el);
608310d565efSmrg }
608410d565efSmrg 
608510d565efSmrg /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
608610d565efSmrg    all elements are zero, and the one vector when all elements are one.  */
608710d565efSmrg rtx
608810d565efSmrg gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
608910d565efSmrg {
6090*c7a68eb7Smrg   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
609110d565efSmrg 
609210d565efSmrg   /* If the values are all the same, check to see if we can use one of the
609310d565efSmrg      standard constant vectors.  */
6094*c7a68eb7Smrg   if (rtvec_all_equal_p (v))
6095*c7a68eb7Smrg     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
609610d565efSmrg 
6097*c7a68eb7Smrg   unsigned int nunits = GET_NUM_ELEM (v);
6098*c7a68eb7Smrg   rtx_vector_builder builder (mode, nunits, 1);
6099*c7a68eb7Smrg   for (unsigned int i = 0; i < nunits; ++i)
6100*c7a68eb7Smrg     builder.quick_push (RTVEC_ELT (v, i));
6101*c7a68eb7Smrg   return builder.build (v);
610210d565efSmrg }
610310d565efSmrg 
610410d565efSmrg /* Initialise global register information required by all functions.  */
610510d565efSmrg 
610610d565efSmrg void
610710d565efSmrg init_emit_regs (void)
610810d565efSmrg {
610910d565efSmrg   int i;
611010d565efSmrg   machine_mode mode;
611110d565efSmrg   mem_attrs *attrs;
611210d565efSmrg 
611310d565efSmrg   /* Reset register attributes */
611410d565efSmrg   reg_attrs_htab->empty ();
611510d565efSmrg 
611610d565efSmrg   /* We need reg_raw_mode, so initialize the modes now.  */
611710d565efSmrg   init_reg_modes_target ();
611810d565efSmrg 
611910d565efSmrg   /* Assign register numbers to the globally defined register rtx.  */
612010d565efSmrg   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
612110d565efSmrg   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
612210d565efSmrg   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
612310d565efSmrg   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
612410d565efSmrg   virtual_incoming_args_rtx =
612510d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
612610d565efSmrg   virtual_stack_vars_rtx =
612710d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
612810d565efSmrg   virtual_stack_dynamic_rtx =
612910d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
613010d565efSmrg   virtual_outgoing_args_rtx =
613110d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
613210d565efSmrg   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
613310d565efSmrg   virtual_preferred_stack_boundary_rtx =
613410d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
613510d565efSmrg 
613610d565efSmrg   /* Initialize RTL for commonly used hard registers.  These are
613710d565efSmrg      copied into regno_reg_rtx as we begin to compile each function.  */
613810d565efSmrg   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
613910d565efSmrg     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
614010d565efSmrg 
614110d565efSmrg #ifdef RETURN_ADDRESS_POINTER_REGNUM
614210d565efSmrg   return_address_pointer_rtx
614310d565efSmrg     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
614410d565efSmrg #endif
614510d565efSmrg 
614610d565efSmrg   pic_offset_table_rtx = NULL_RTX;
614710d565efSmrg   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
614810d565efSmrg     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
614910d565efSmrg 
615010d565efSmrg   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
615110d565efSmrg     {
615210d565efSmrg       mode = (machine_mode) i;
615310d565efSmrg       attrs = ggc_cleared_alloc<mem_attrs> ();
615410d565efSmrg       attrs->align = BITS_PER_UNIT;
615510d565efSmrg       attrs->addrspace = ADDR_SPACE_GENERIC;
615610d565efSmrg       if (mode != BLKmode && mode != VOIDmode)
615710d565efSmrg 	{
615810d565efSmrg 	  attrs->size_known_p = true;
615910d565efSmrg 	  attrs->size = GET_MODE_SIZE (mode);
616010d565efSmrg 	  if (STRICT_ALIGNMENT)
616110d565efSmrg 	    attrs->align = GET_MODE_ALIGNMENT (mode);
616210d565efSmrg 	}
616310d565efSmrg       mode_mem_attrs[i] = attrs;
616410d565efSmrg     }
6165*c7a68eb7Smrg 
6166*c7a68eb7Smrg   split_branch_probability = profile_probability::uninitialized ();
616710d565efSmrg }
616810d565efSmrg 
616910d565efSmrg /* Initialize global machine_mode variables.  */
617010d565efSmrg 
617110d565efSmrg void
617210d565efSmrg init_derived_machine_modes (void)
617310d565efSmrg {
6174*c7a68eb7Smrg   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6175*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
617610d565efSmrg     {
6177*c7a68eb7Smrg       scalar_int_mode mode = mode_iter.require ();
6178*c7a68eb7Smrg 
617910d565efSmrg       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6180*c7a68eb7Smrg 	  && !opt_byte_mode.exists ())
6181*c7a68eb7Smrg 	opt_byte_mode = mode;
618210d565efSmrg 
618310d565efSmrg       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6184*c7a68eb7Smrg 	  && !opt_word_mode.exists ())
6185*c7a68eb7Smrg 	opt_word_mode = mode;
618610d565efSmrg     }
618710d565efSmrg 
6188*c7a68eb7Smrg   byte_mode = opt_byte_mode.require ();
6189*c7a68eb7Smrg   word_mode = opt_word_mode.require ();
6190*c7a68eb7Smrg   ptr_mode = as_a <scalar_int_mode>
6191*c7a68eb7Smrg     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
619210d565efSmrg }
619310d565efSmrg 
619410d565efSmrg /* Create some permanent unique rtl objects shared between all functions.  */
619510d565efSmrg 
619610d565efSmrg void
619710d565efSmrg init_emit_once (void)
619810d565efSmrg {
619910d565efSmrg   int i;
620010d565efSmrg   machine_mode mode;
6201*c7a68eb7Smrg   scalar_float_mode double_mode;
6202*c7a68eb7Smrg   opt_scalar_mode smode_iter;
620310d565efSmrg 
620410d565efSmrg   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
620510d565efSmrg      CONST_FIXED, and memory attribute hash tables.  */
620610d565efSmrg   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
620710d565efSmrg 
620810d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
620910d565efSmrg   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
621010d565efSmrg #endif
621110d565efSmrg   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
621210d565efSmrg 
6213*c7a68eb7Smrg   if (NUM_POLY_INT_COEFFS > 1)
6214*c7a68eb7Smrg     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6215*c7a68eb7Smrg 
621610d565efSmrg   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
621710d565efSmrg 
621810d565efSmrg   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
621910d565efSmrg 
622010d565efSmrg #ifdef INIT_EXPANDERS
622110d565efSmrg   /* This is to initialize {init|mark|free}_machine_status before the first
622210d565efSmrg      call to push_function_context_to.  This is needed by the Chill front
622310d565efSmrg      end which calls push_function_context_to before the first call to
622410d565efSmrg      init_function_start.  */
622510d565efSmrg   INIT_EXPANDERS;
622610d565efSmrg #endif
622710d565efSmrg 
622810d565efSmrg   /* Create the unique rtx's for certain rtx codes and operand values.  */
622910d565efSmrg 
623010d565efSmrg   /* Process stack-limiting command-line options.  */
623110d565efSmrg   if (opt_fstack_limit_symbol_arg != NULL)
623210d565efSmrg     stack_limit_rtx
623310d565efSmrg       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
623410d565efSmrg   if (opt_fstack_limit_register_no >= 0)
623510d565efSmrg     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
623610d565efSmrg 
623710d565efSmrg   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
623810d565efSmrg      tries to use these variables.  */
623910d565efSmrg   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
624010d565efSmrg     const_int_rtx[i + MAX_SAVED_CONST_INT] =
624110d565efSmrg       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
624210d565efSmrg 
624310d565efSmrg   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
624410d565efSmrg       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
624510d565efSmrg     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
624610d565efSmrg   else
624710d565efSmrg     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
624810d565efSmrg 
6249*c7a68eb7Smrg   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
625010d565efSmrg 
625110d565efSmrg   real_from_integer (&dconst0, double_mode, 0, SIGNED);
625210d565efSmrg   real_from_integer (&dconst1, double_mode, 1, SIGNED);
625310d565efSmrg   real_from_integer (&dconst2, double_mode, 2, SIGNED);
625410d565efSmrg 
625510d565efSmrg   dconstm1 = dconst1;
625610d565efSmrg   dconstm1.sign = 1;
625710d565efSmrg 
625810d565efSmrg   dconsthalf = dconst1;
625910d565efSmrg   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
626010d565efSmrg 
626110d565efSmrg   for (i = 0; i < 3; i++)
626210d565efSmrg     {
626310d565efSmrg       const REAL_VALUE_TYPE *const r =
626410d565efSmrg 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
626510d565efSmrg 
6266*c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
626710d565efSmrg 	const_tiny_rtx[i][(int) mode] =
626810d565efSmrg 	  const_double_from_real_value (*r, mode);
626910d565efSmrg 
6270*c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
627110d565efSmrg 	const_tiny_rtx[i][(int) mode] =
627210d565efSmrg 	  const_double_from_real_value (*r, mode);
627310d565efSmrg 
627410d565efSmrg       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
627510d565efSmrg 
6276*c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
627710d565efSmrg 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
627810d565efSmrg 
627910d565efSmrg       for (mode = MIN_MODE_PARTIAL_INT;
628010d565efSmrg 	   mode <= MAX_MODE_PARTIAL_INT;
628110d565efSmrg 	   mode = (machine_mode)((int)(mode) + 1))
628210d565efSmrg 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
628310d565efSmrg     }
628410d565efSmrg 
628510d565efSmrg   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
628610d565efSmrg 
6287*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
628810d565efSmrg     const_tiny_rtx[3][(int) mode] = constm1_rtx;
628910d565efSmrg 
6290*c7a68eb7Smrg   /* For BImode, 1 and -1 are unsigned and signed interpretations
6291*c7a68eb7Smrg      of the same value.  */
6292*c7a68eb7Smrg   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6293*c7a68eb7Smrg   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6294*c7a68eb7Smrg   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6295*c7a68eb7Smrg 
629610d565efSmrg   for (mode = MIN_MODE_PARTIAL_INT;
629710d565efSmrg        mode <= MAX_MODE_PARTIAL_INT;
629810d565efSmrg        mode = (machine_mode)((int)(mode) + 1))
629910d565efSmrg     const_tiny_rtx[3][(int) mode] = constm1_rtx;
630010d565efSmrg 
6301*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
630210d565efSmrg     {
630310d565efSmrg       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
630410d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
630510d565efSmrg     }
630610d565efSmrg 
6307*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
630810d565efSmrg     {
630910d565efSmrg       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
631010d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
631110d565efSmrg     }
631210d565efSmrg 
6313*c7a68eb7Smrg   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6314*c7a68eb7Smrg      interpretations of the same value.  */
6315*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6316*c7a68eb7Smrg     {
6317*c7a68eb7Smrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6318*c7a68eb7Smrg       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6319*c7a68eb7Smrg       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6320*c7a68eb7Smrg     }
6321*c7a68eb7Smrg 
6322*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
632310d565efSmrg     {
632410d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
632510d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
632610d565efSmrg       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
632710d565efSmrg     }
632810d565efSmrg 
6329*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
633010d565efSmrg     {
633110d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
633210d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
633310d565efSmrg     }
633410d565efSmrg 
6335*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
633610d565efSmrg     {
6337*c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6338*c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6339*c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6340*c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6341*c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6342*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
634310d565efSmrg     }
634410d565efSmrg 
6345*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
634610d565efSmrg     {
6347*c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6348*c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6349*c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6350*c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6351*c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6352*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
635310d565efSmrg     }
635410d565efSmrg 
6355*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
635610d565efSmrg     {
6357*c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6358*c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6359*c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6360*c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6361*c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6362*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
636310d565efSmrg 
636410d565efSmrg       /* We store the value 1.  */
6365*c7a68eb7Smrg       FCONST1 (smode).data.high = 0;
6366*c7a68eb7Smrg       FCONST1 (smode).data.low = 0;
6367*c7a68eb7Smrg       FCONST1 (smode).mode = smode;
6368*c7a68eb7Smrg       FCONST1 (smode).data
6369*c7a68eb7Smrg 	= double_int_one.lshift (GET_MODE_FBIT (smode),
637010d565efSmrg 				 HOST_BITS_PER_DOUBLE_INT,
6371*c7a68eb7Smrg 				 SIGNED_FIXED_POINT_MODE_P (smode));
6372*c7a68eb7Smrg       const_tiny_rtx[1][(int) smode]
6373*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
637410d565efSmrg     }
637510d565efSmrg 
6376*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
637710d565efSmrg     {
6378*c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6379*c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6380*c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6381*c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6382*c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6383*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
638410d565efSmrg 
638510d565efSmrg       /* We store the value 1.  */
6386*c7a68eb7Smrg       FCONST1 (smode).data.high = 0;
6387*c7a68eb7Smrg       FCONST1 (smode).data.low = 0;
6388*c7a68eb7Smrg       FCONST1 (smode).mode = smode;
6389*c7a68eb7Smrg       FCONST1 (smode).data
6390*c7a68eb7Smrg 	= double_int_one.lshift (GET_MODE_FBIT (smode),
639110d565efSmrg 				 HOST_BITS_PER_DOUBLE_INT,
6392*c7a68eb7Smrg 				 SIGNED_FIXED_POINT_MODE_P (smode));
6393*c7a68eb7Smrg       const_tiny_rtx[1][(int) smode]
6394*c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
639510d565efSmrg     }
639610d565efSmrg 
6397*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
639810d565efSmrg     {
639910d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
640010d565efSmrg     }
640110d565efSmrg 
6402*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
640310d565efSmrg     {
640410d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
640510d565efSmrg     }
640610d565efSmrg 
6407*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
640810d565efSmrg     {
640910d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
641010d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
641110d565efSmrg     }
641210d565efSmrg 
6413*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
641410d565efSmrg     {
641510d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
641610d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
641710d565efSmrg     }
641810d565efSmrg 
641910d565efSmrg   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
642010d565efSmrg     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
642110d565efSmrg       const_tiny_rtx[0][i] = const0_rtx;
642210d565efSmrg 
6423*c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS)
642410d565efSmrg     {
6425*c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6426*c7a68eb7Smrg       wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode));
6427*c7a68eb7Smrg       const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode);
642810d565efSmrg     }
642910d565efSmrg 
643010d565efSmrg   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
643110d565efSmrg   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
643210d565efSmrg   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
643310d565efSmrg   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
643410d565efSmrg   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
643510d565efSmrg 				   /*prev_insn=*/NULL,
643610d565efSmrg 				   /*next_insn=*/NULL,
643710d565efSmrg 				   /*bb=*/NULL,
643810d565efSmrg 				   /*pattern=*/NULL_RTX,
643910d565efSmrg 				   /*location=*/-1,
644010d565efSmrg 				   CODE_FOR_nothing,
644110d565efSmrg 				   /*reg_notes=*/NULL_RTX);
644210d565efSmrg }
644310d565efSmrg 
644410d565efSmrg /* Produce exact duplicate of insn INSN after AFTER.
644510d565efSmrg    Care updating of libcall regions if present.  */
644610d565efSmrg 
644710d565efSmrg rtx_insn *
644810d565efSmrg emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
644910d565efSmrg {
645010d565efSmrg   rtx_insn *new_rtx;
645110d565efSmrg   rtx link;
645210d565efSmrg 
645310d565efSmrg   switch (GET_CODE (insn))
645410d565efSmrg     {
645510d565efSmrg     case INSN:
645610d565efSmrg       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
645710d565efSmrg       break;
645810d565efSmrg 
645910d565efSmrg     case JUMP_INSN:
646010d565efSmrg       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
646110d565efSmrg       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
646210d565efSmrg       break;
646310d565efSmrg 
646410d565efSmrg     case DEBUG_INSN:
646510d565efSmrg       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
646610d565efSmrg       break;
646710d565efSmrg 
646810d565efSmrg     case CALL_INSN:
646910d565efSmrg       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
647010d565efSmrg       if (CALL_INSN_FUNCTION_USAGE (insn))
647110d565efSmrg 	CALL_INSN_FUNCTION_USAGE (new_rtx)
647210d565efSmrg 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
647310d565efSmrg       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
647410d565efSmrg       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
647510d565efSmrg       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
647610d565efSmrg       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
647710d565efSmrg 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
647810d565efSmrg       break;
647910d565efSmrg 
648010d565efSmrg     default:
648110d565efSmrg       gcc_unreachable ();
648210d565efSmrg     }
648310d565efSmrg 
648410d565efSmrg   /* Update LABEL_NUSES.  */
648510d565efSmrg   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
648610d565efSmrg 
648710d565efSmrg   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
648810d565efSmrg 
648910d565efSmrg   /* If the old insn is frame related, then so is the new one.  This is
649010d565efSmrg      primarily needed for IA-64 unwind info which marks epilogue insns,
649110d565efSmrg      which may be duplicated by the basic block reordering code.  */
649210d565efSmrg   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
649310d565efSmrg 
649410d565efSmrg   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
649510d565efSmrg   rtx *ptail = &REG_NOTES (new_rtx);
649610d565efSmrg   while (*ptail != NULL_RTX)
649710d565efSmrg     ptail = &XEXP (*ptail, 1);
649810d565efSmrg 
649910d565efSmrg   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
650010d565efSmrg      will make them.  REG_LABEL_TARGETs are created there too, but are
650110d565efSmrg      supposed to be sticky, so we copy them.  */
650210d565efSmrg   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
650310d565efSmrg     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
650410d565efSmrg       {
650510d565efSmrg 	*ptail = duplicate_reg_note (link);
650610d565efSmrg 	ptail = &XEXP (*ptail, 1);
650710d565efSmrg       }
650810d565efSmrg 
650910d565efSmrg   INSN_CODE (new_rtx) = INSN_CODE (insn);
651010d565efSmrg   return new_rtx;
651110d565efSmrg }
651210d565efSmrg 
651310d565efSmrg static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
651410d565efSmrg rtx
651510d565efSmrg gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
651610d565efSmrg {
651710d565efSmrg   if (hard_reg_clobbers[mode][regno])
651810d565efSmrg     return hard_reg_clobbers[mode][regno];
651910d565efSmrg   else
652010d565efSmrg     return (hard_reg_clobbers[mode][regno] =
652110d565efSmrg 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
652210d565efSmrg }
652310d565efSmrg 
652410d565efSmrg location_t prologue_location;
652510d565efSmrg location_t epilogue_location;
652610d565efSmrg 
652710d565efSmrg /* Hold current location information and last location information, so the
652810d565efSmrg    datastructures are built lazily only when some instructions in given
652910d565efSmrg    place are needed.  */
653010d565efSmrg static location_t curr_location;
653110d565efSmrg 
653210d565efSmrg /* Allocate insn location datastructure.  */
653310d565efSmrg void
653410d565efSmrg insn_locations_init (void)
653510d565efSmrg {
653610d565efSmrg   prologue_location = epilogue_location = 0;
653710d565efSmrg   curr_location = UNKNOWN_LOCATION;
653810d565efSmrg }
653910d565efSmrg 
654010d565efSmrg /* At the end of emit stage, clear current location.  */
654110d565efSmrg void
654210d565efSmrg insn_locations_finalize (void)
654310d565efSmrg {
654410d565efSmrg   epilogue_location = curr_location;
654510d565efSmrg   curr_location = UNKNOWN_LOCATION;
654610d565efSmrg }
654710d565efSmrg 
654810d565efSmrg /* Set current location.  */
654910d565efSmrg void
655010d565efSmrg set_curr_insn_location (location_t location)
655110d565efSmrg {
655210d565efSmrg   curr_location = location;
655310d565efSmrg }
655410d565efSmrg 
655510d565efSmrg /* Get current location.  */
655610d565efSmrg location_t
655710d565efSmrg curr_insn_location (void)
655810d565efSmrg {
655910d565efSmrg   return curr_location;
656010d565efSmrg }
656110d565efSmrg 
656210d565efSmrg /* Return lexical scope block insn belongs to.  */
656310d565efSmrg tree
656410d565efSmrg insn_scope (const rtx_insn *insn)
656510d565efSmrg {
656610d565efSmrg   return LOCATION_BLOCK (INSN_LOCATION (insn));
656710d565efSmrg }
656810d565efSmrg 
656910d565efSmrg /* Return line number of the statement that produced this insn.  */
657010d565efSmrg int
657110d565efSmrg insn_line (const rtx_insn *insn)
657210d565efSmrg {
657310d565efSmrg   return LOCATION_LINE (INSN_LOCATION (insn));
657410d565efSmrg }
657510d565efSmrg 
657610d565efSmrg /* Return source file of the statement that produced this insn.  */
657710d565efSmrg const char *
657810d565efSmrg insn_file (const rtx_insn *insn)
657910d565efSmrg {
658010d565efSmrg   return LOCATION_FILE (INSN_LOCATION (insn));
658110d565efSmrg }
658210d565efSmrg 
658310d565efSmrg /* Return expanded location of the statement that produced this insn.  */
658410d565efSmrg expanded_location
658510d565efSmrg insn_location (const rtx_insn *insn)
658610d565efSmrg {
658710d565efSmrg   return expand_location (INSN_LOCATION (insn));
658810d565efSmrg }
658910d565efSmrg 
659010d565efSmrg /* Return true if memory model MODEL requires a pre-operation (release-style)
659110d565efSmrg    barrier or a post-operation (acquire-style) barrier.  While not universal,
659210d565efSmrg    this function matches behavior of several targets.  */
659310d565efSmrg 
659410d565efSmrg bool
659510d565efSmrg need_atomic_barrier_p (enum memmodel model, bool pre)
659610d565efSmrg {
659710d565efSmrg   switch (model & MEMMODEL_BASE_MASK)
659810d565efSmrg     {
659910d565efSmrg     case MEMMODEL_RELAXED:
660010d565efSmrg     case MEMMODEL_CONSUME:
660110d565efSmrg       return false;
660210d565efSmrg     case MEMMODEL_RELEASE:
660310d565efSmrg       return pre;
660410d565efSmrg     case MEMMODEL_ACQUIRE:
660510d565efSmrg       return !pre;
660610d565efSmrg     case MEMMODEL_ACQ_REL:
660710d565efSmrg     case MEMMODEL_SEQ_CST:
660810d565efSmrg       return true;
660910d565efSmrg     default:
661010d565efSmrg       gcc_unreachable ();
661110d565efSmrg     }
661210d565efSmrg }
661310d565efSmrg 
6614*c7a68eb7Smrg /* Return a constant shift amount for shifting a value of mode MODE
6615*c7a68eb7Smrg    by VALUE bits.  */
6616*c7a68eb7Smrg 
6617*c7a68eb7Smrg rtx
6618*c7a68eb7Smrg gen_int_shift_amount (machine_mode, poly_int64 value)
6619*c7a68eb7Smrg {
6620*c7a68eb7Smrg   /* Use a 64-bit mode, to avoid any truncation.
6621*c7a68eb7Smrg 
6622*c7a68eb7Smrg      ??? Perhaps this should be automatically derived from the .md files
6623*c7a68eb7Smrg      instead, or perhaps have a target hook.  */
6624*c7a68eb7Smrg   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6625*c7a68eb7Smrg 				? DImode
6626*c7a68eb7Smrg 				: int_mode_for_size (64, 0).require ());
6627*c7a68eb7Smrg   return gen_int_mode (value, shift_mode);
6628*c7a68eb7Smrg }
6629*c7a68eb7Smrg 
663010d565efSmrg /* Initialize fields of rtl_data related to stack alignment.  */
663110d565efSmrg 
663210d565efSmrg void
663310d565efSmrg rtl_data::init_stack_alignment ()
663410d565efSmrg {
663510d565efSmrg   stack_alignment_needed = STACK_BOUNDARY;
663610d565efSmrg   max_used_stack_slot_alignment = STACK_BOUNDARY;
663710d565efSmrg   stack_alignment_estimated = 0;
663810d565efSmrg   preferred_stack_boundary = STACK_BOUNDARY;
663910d565efSmrg }
664010d565efSmrg 
664110d565efSmrg 
664210d565efSmrg #include "gt-emit-rtl.h"
6643