xref: /netbsd/external/gpl3/gcc.old/dist/gcc/emit-rtl.c (revision ec02198a)
110d565efSmrg /* Emit RTL for the GCC expander.
2*ec02198aSmrg    Copyright (C) 1987-2020 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg 
2110d565efSmrg /* Middle-to-low level generation of rtx code and insns.
2210d565efSmrg 
2310d565efSmrg    This file contains support functions for creating rtl expressions
2410d565efSmrg    and manipulating them in the doubly-linked chain of insns.
2510d565efSmrg 
2610d565efSmrg    The patterns of the insns are created by machine-dependent
2710d565efSmrg    routines in insn-emit.c, which is generated automatically from
2810d565efSmrg    the machine description.  These routines make the individual rtx's
2910d565efSmrg    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
3010d565efSmrg    which are automatically generated from rtl.def; what is machine
3110d565efSmrg    dependent is the kind of rtx's they make and what arguments they
3210d565efSmrg    use.  */
3310d565efSmrg 
3410d565efSmrg #include "config.h"
3510d565efSmrg #include "system.h"
3610d565efSmrg #include "coretypes.h"
3710d565efSmrg #include "memmodel.h"
3810d565efSmrg #include "backend.h"
3910d565efSmrg #include "target.h"
4010d565efSmrg #include "rtl.h"
4110d565efSmrg #include "tree.h"
4210d565efSmrg #include "df.h"
4310d565efSmrg #include "tm_p.h"
4410d565efSmrg #include "stringpool.h"
4510d565efSmrg #include "insn-config.h"
4610d565efSmrg #include "regs.h"
4710d565efSmrg #include "emit-rtl.h"
4810d565efSmrg #include "recog.h"
4910d565efSmrg #include "diagnostic-core.h"
5010d565efSmrg #include "alias.h"
5110d565efSmrg #include "fold-const.h"
5210d565efSmrg #include "varasm.h"
5310d565efSmrg #include "cfgrtl.h"
5410d565efSmrg #include "tree-eh.h"
5510d565efSmrg #include "explow.h"
5610d565efSmrg #include "expr.h"
5710d565efSmrg #include "builtins.h"
5810d565efSmrg #include "rtl-iter.h"
5910d565efSmrg #include "stor-layout.h"
6010d565efSmrg #include "opts.h"
61c7a68eb7Smrg #include "predict.h"
62c7a68eb7Smrg #include "rtx-vector-builder.h"
63*ec02198aSmrg #include "gimple.h"
64*ec02198aSmrg #include "gimple-ssa.h"
65*ec02198aSmrg #include "gimplify.h"
6610d565efSmrg 
6710d565efSmrg struct target_rtl default_target_rtl;
6810d565efSmrg #if SWITCHABLE_TARGET
6910d565efSmrg struct target_rtl *this_target_rtl = &default_target_rtl;
7010d565efSmrg #endif
7110d565efSmrg 
7210d565efSmrg #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
7310d565efSmrg 
7410d565efSmrg /* Commonly used modes.  */
7510d565efSmrg 
76c7a68eb7Smrg scalar_int_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
77c7a68eb7Smrg scalar_int_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
78c7a68eb7Smrg scalar_int_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
7910d565efSmrg 
8010d565efSmrg /* Datastructures maintained for currently processed function in RTL form.  */
8110d565efSmrg 
8210d565efSmrg struct rtl_data x_rtl;
8310d565efSmrg 
8410d565efSmrg /* Indexed by pseudo register number, gives the rtx for that pseudo.
8510d565efSmrg    Allocated in parallel with regno_pointer_align.
8610d565efSmrg    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
8710d565efSmrg    with length attribute nested in top level structures.  */
8810d565efSmrg 
8910d565efSmrg rtx * regno_reg_rtx;
9010d565efSmrg 
9110d565efSmrg /* This is *not* reset after each function.  It gives each CODE_LABEL
9210d565efSmrg    in the entire compilation a unique label number.  */
9310d565efSmrg 
9410d565efSmrg static GTY(()) int label_num = 1;
9510d565efSmrg 
9610d565efSmrg /* We record floating-point CONST_DOUBLEs in each floating-point mode for
9710d565efSmrg    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
9810d565efSmrg    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
9910d565efSmrg    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
10010d565efSmrg 
10110d565efSmrg rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
10210d565efSmrg 
10310d565efSmrg rtx const_true_rtx;
10410d565efSmrg 
10510d565efSmrg REAL_VALUE_TYPE dconst0;
10610d565efSmrg REAL_VALUE_TYPE dconst1;
10710d565efSmrg REAL_VALUE_TYPE dconst2;
10810d565efSmrg REAL_VALUE_TYPE dconstm1;
10910d565efSmrg REAL_VALUE_TYPE dconsthalf;
11010d565efSmrg 
11110d565efSmrg /* Record fixed-point constant 0 and 1.  */
11210d565efSmrg FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
11310d565efSmrg FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
11410d565efSmrg 
11510d565efSmrg /* We make one copy of (const_int C) where C is in
11610d565efSmrg    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
11710d565efSmrg    to save space during the compilation and simplify comparisons of
11810d565efSmrg    integers.  */
11910d565efSmrg 
12010d565efSmrg rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
12110d565efSmrg 
12210d565efSmrg /* Standard pieces of rtx, to be substituted directly into things.  */
12310d565efSmrg rtx pc_rtx;
12410d565efSmrg rtx ret_rtx;
12510d565efSmrg rtx simple_return_rtx;
12610d565efSmrg rtx cc0_rtx;
12710d565efSmrg 
12810d565efSmrg /* Marker used for denoting an INSN, which should never be accessed (i.e.,
12910d565efSmrg    this pointer should normally never be dereferenced), but is required to be
13010d565efSmrg    distinct from NULL_RTX.  Currently used by peephole2 pass.  */
13110d565efSmrg rtx_insn *invalid_insn_rtx;
13210d565efSmrg 
13310d565efSmrg /* A hash table storing CONST_INTs whose absolute value is greater
13410d565efSmrg    than MAX_SAVED_CONST_INT.  */
13510d565efSmrg 
13610d565efSmrg struct const_int_hasher : ggc_cache_ptr_hash<rtx_def>
13710d565efSmrg {
13810d565efSmrg   typedef HOST_WIDE_INT compare_type;
13910d565efSmrg 
14010d565efSmrg   static hashval_t hash (rtx i);
14110d565efSmrg   static bool equal (rtx i, HOST_WIDE_INT h);
14210d565efSmrg };
14310d565efSmrg 
14410d565efSmrg static GTY ((cache)) hash_table<const_int_hasher> *const_int_htab;
14510d565efSmrg 
14610d565efSmrg struct const_wide_int_hasher : ggc_cache_ptr_hash<rtx_def>
14710d565efSmrg {
14810d565efSmrg   static hashval_t hash (rtx x);
14910d565efSmrg   static bool equal (rtx x, rtx y);
15010d565efSmrg };
15110d565efSmrg 
15210d565efSmrg static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab;
15310d565efSmrg 
154c7a68eb7Smrg struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def>
155c7a68eb7Smrg {
156c7a68eb7Smrg   typedef std::pair<machine_mode, poly_wide_int_ref> compare_type;
157c7a68eb7Smrg 
158c7a68eb7Smrg   static hashval_t hash (rtx x);
159c7a68eb7Smrg   static bool equal (rtx x, const compare_type &y);
160c7a68eb7Smrg };
161c7a68eb7Smrg 
162c7a68eb7Smrg static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab;
163c7a68eb7Smrg 
16410d565efSmrg /* A hash table storing register attribute structures.  */
16510d565efSmrg struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs>
16610d565efSmrg {
16710d565efSmrg   static hashval_t hash (reg_attrs *x);
16810d565efSmrg   static bool equal (reg_attrs *a, reg_attrs *b);
16910d565efSmrg };
17010d565efSmrg 
17110d565efSmrg static GTY ((cache)) hash_table<reg_attr_hasher> *reg_attrs_htab;
17210d565efSmrg 
17310d565efSmrg /* A hash table storing all CONST_DOUBLEs.  */
17410d565efSmrg struct const_double_hasher : ggc_cache_ptr_hash<rtx_def>
17510d565efSmrg {
17610d565efSmrg   static hashval_t hash (rtx x);
17710d565efSmrg   static bool equal (rtx x, rtx y);
17810d565efSmrg };
17910d565efSmrg 
18010d565efSmrg static GTY ((cache)) hash_table<const_double_hasher> *const_double_htab;
18110d565efSmrg 
18210d565efSmrg /* A hash table storing all CONST_FIXEDs.  */
18310d565efSmrg struct const_fixed_hasher : ggc_cache_ptr_hash<rtx_def>
18410d565efSmrg {
18510d565efSmrg   static hashval_t hash (rtx x);
18610d565efSmrg   static bool equal (rtx x, rtx y);
18710d565efSmrg };
18810d565efSmrg 
18910d565efSmrg static GTY ((cache)) hash_table<const_fixed_hasher> *const_fixed_htab;
19010d565efSmrg 
19110d565efSmrg #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
19210d565efSmrg #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
19310d565efSmrg #define first_label_num (crtl->emit.x_first_label_num)
19410d565efSmrg 
19510d565efSmrg static void set_used_decls (tree);
19610d565efSmrg static void mark_label_nuses (rtx);
19710d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
19810d565efSmrg static rtx lookup_const_wide_int (rtx);
19910d565efSmrg #endif
20010d565efSmrg static rtx lookup_const_double (rtx);
20110d565efSmrg static rtx lookup_const_fixed (rtx);
20210d565efSmrg static rtx gen_const_vector (machine_mode, int);
20310d565efSmrg static void copy_rtx_if_shared_1 (rtx *orig);
20410d565efSmrg 
205c7a68eb7Smrg /* Probability of the conditional branch currently proceeded by try_split.  */
206c7a68eb7Smrg profile_probability split_branch_probability;
20710d565efSmrg 
20810d565efSmrg /* Returns a hash code for X (which is a really a CONST_INT).  */
20910d565efSmrg 
21010d565efSmrg hashval_t
hash(rtx x)21110d565efSmrg const_int_hasher::hash (rtx x)
21210d565efSmrg {
21310d565efSmrg   return (hashval_t) INTVAL (x);
21410d565efSmrg }
21510d565efSmrg 
21610d565efSmrg /* Returns nonzero if the value represented by X (which is really a
21710d565efSmrg    CONST_INT) is the same as that given by Y (which is really a
21810d565efSmrg    HOST_WIDE_INT *).  */
21910d565efSmrg 
22010d565efSmrg bool
equal(rtx x,HOST_WIDE_INT y)22110d565efSmrg const_int_hasher::equal (rtx x, HOST_WIDE_INT y)
22210d565efSmrg {
22310d565efSmrg   return (INTVAL (x) == y);
22410d565efSmrg }
22510d565efSmrg 
22610d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
22710d565efSmrg /* Returns a hash code for X (which is a really a CONST_WIDE_INT).  */
22810d565efSmrg 
22910d565efSmrg hashval_t
hash(rtx x)23010d565efSmrg const_wide_int_hasher::hash (rtx x)
23110d565efSmrg {
23210d565efSmrg   int i;
23310d565efSmrg   unsigned HOST_WIDE_INT hash = 0;
23410d565efSmrg   const_rtx xr = x;
23510d565efSmrg 
23610d565efSmrg   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
23710d565efSmrg     hash += CONST_WIDE_INT_ELT (xr, i);
23810d565efSmrg 
23910d565efSmrg   return (hashval_t) hash;
24010d565efSmrg }
24110d565efSmrg 
24210d565efSmrg /* Returns nonzero if the value represented by X (which is really a
24310d565efSmrg    CONST_WIDE_INT) is the same as that given by Y (which is really a
24410d565efSmrg    CONST_WIDE_INT).  */
24510d565efSmrg 
24610d565efSmrg bool
equal(rtx x,rtx y)24710d565efSmrg const_wide_int_hasher::equal (rtx x, rtx y)
24810d565efSmrg {
24910d565efSmrg   int i;
25010d565efSmrg   const_rtx xr = x;
25110d565efSmrg   const_rtx yr = y;
25210d565efSmrg   if (CONST_WIDE_INT_NUNITS (xr) != CONST_WIDE_INT_NUNITS (yr))
25310d565efSmrg     return false;
25410d565efSmrg 
25510d565efSmrg   for (i = 0; i < CONST_WIDE_INT_NUNITS (xr); i++)
25610d565efSmrg     if (CONST_WIDE_INT_ELT (xr, i) != CONST_WIDE_INT_ELT (yr, i))
25710d565efSmrg       return false;
25810d565efSmrg 
25910d565efSmrg   return true;
26010d565efSmrg }
26110d565efSmrg #endif
26210d565efSmrg 
263c7a68eb7Smrg /* Returns a hash code for CONST_POLY_INT X.  */
264c7a68eb7Smrg 
265c7a68eb7Smrg hashval_t
hash(rtx x)266c7a68eb7Smrg const_poly_int_hasher::hash (rtx x)
267c7a68eb7Smrg {
268c7a68eb7Smrg   inchash::hash h;
269c7a68eb7Smrg   h.add_int (GET_MODE (x));
270c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
271c7a68eb7Smrg     h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
272c7a68eb7Smrg   return h.end ();
273c7a68eb7Smrg }
274c7a68eb7Smrg 
275c7a68eb7Smrg /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y.  */
276c7a68eb7Smrg 
277c7a68eb7Smrg bool
equal(rtx x,const compare_type & y)278c7a68eb7Smrg const_poly_int_hasher::equal (rtx x, const compare_type &y)
279c7a68eb7Smrg {
280c7a68eb7Smrg   if (GET_MODE (x) != y.first)
281c7a68eb7Smrg     return false;
282c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
283c7a68eb7Smrg     if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i])
284c7a68eb7Smrg       return false;
285c7a68eb7Smrg   return true;
286c7a68eb7Smrg }
287c7a68eb7Smrg 
28810d565efSmrg /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
28910d565efSmrg hashval_t
hash(rtx x)29010d565efSmrg const_double_hasher::hash (rtx x)
29110d565efSmrg {
29210d565efSmrg   const_rtx const value = x;
29310d565efSmrg   hashval_t h;
29410d565efSmrg 
29510d565efSmrg   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (value) == VOIDmode)
29610d565efSmrg     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
29710d565efSmrg   else
29810d565efSmrg     {
29910d565efSmrg       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
30010d565efSmrg       /* MODE is used in the comparison, so it should be in the hash.  */
30110d565efSmrg       h ^= GET_MODE (value);
30210d565efSmrg     }
30310d565efSmrg   return h;
30410d565efSmrg }
30510d565efSmrg 
30610d565efSmrg /* Returns nonzero if the value represented by X (really a ...)
30710d565efSmrg    is the same as that represented by Y (really a ...) */
30810d565efSmrg bool
equal(rtx x,rtx y)30910d565efSmrg const_double_hasher::equal (rtx x, rtx y)
31010d565efSmrg {
31110d565efSmrg   const_rtx const a = x, b = y;
31210d565efSmrg 
31310d565efSmrg   if (GET_MODE (a) != GET_MODE (b))
31410d565efSmrg     return 0;
31510d565efSmrg   if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (a) == VOIDmode)
31610d565efSmrg     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
31710d565efSmrg 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
31810d565efSmrg   else
31910d565efSmrg     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
32010d565efSmrg 			   CONST_DOUBLE_REAL_VALUE (b));
32110d565efSmrg }
32210d565efSmrg 
32310d565efSmrg /* Returns a hash code for X (which is really a CONST_FIXED).  */
32410d565efSmrg 
32510d565efSmrg hashval_t
hash(rtx x)32610d565efSmrg const_fixed_hasher::hash (rtx x)
32710d565efSmrg {
32810d565efSmrg   const_rtx const value = x;
32910d565efSmrg   hashval_t h;
33010d565efSmrg 
33110d565efSmrg   h = fixed_hash (CONST_FIXED_VALUE (value));
33210d565efSmrg   /* MODE is used in the comparison, so it should be in the hash.  */
33310d565efSmrg   h ^= GET_MODE (value);
33410d565efSmrg   return h;
33510d565efSmrg }
33610d565efSmrg 
33710d565efSmrg /* Returns nonzero if the value represented by X is the same as that
33810d565efSmrg    represented by Y.  */
33910d565efSmrg 
34010d565efSmrg bool
equal(rtx x,rtx y)34110d565efSmrg const_fixed_hasher::equal (rtx x, rtx y)
34210d565efSmrg {
34310d565efSmrg   const_rtx const a = x, b = y;
34410d565efSmrg 
34510d565efSmrg   if (GET_MODE (a) != GET_MODE (b))
34610d565efSmrg     return 0;
34710d565efSmrg   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
34810d565efSmrg }
34910d565efSmrg 
35010d565efSmrg /* Return true if the given memory attributes are equal.  */
35110d565efSmrg 
35210d565efSmrg bool
mem_attrs_eq_p(const class mem_attrs * p,const class mem_attrs * q)353*ec02198aSmrg mem_attrs_eq_p (const class mem_attrs *p, const class mem_attrs *q)
35410d565efSmrg {
35510d565efSmrg   if (p == q)
35610d565efSmrg     return true;
35710d565efSmrg   if (!p || !q)
35810d565efSmrg     return false;
35910d565efSmrg   return (p->alias == q->alias
36010d565efSmrg 	  && p->offset_known_p == q->offset_known_p
361c7a68eb7Smrg 	  && (!p->offset_known_p || known_eq (p->offset, q->offset))
36210d565efSmrg 	  && p->size_known_p == q->size_known_p
363c7a68eb7Smrg 	  && (!p->size_known_p || known_eq (p->size, q->size))
36410d565efSmrg 	  && p->align == q->align
36510d565efSmrg 	  && p->addrspace == q->addrspace
36610d565efSmrg 	  && (p->expr == q->expr
36710d565efSmrg 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
36810d565efSmrg 		  && operand_equal_p (p->expr, q->expr, 0))));
36910d565efSmrg }
37010d565efSmrg 
37110d565efSmrg /* Set MEM's memory attributes so that they are the same as ATTRS.  */
37210d565efSmrg 
37310d565efSmrg static void
set_mem_attrs(rtx mem,mem_attrs * attrs)37410d565efSmrg set_mem_attrs (rtx mem, mem_attrs *attrs)
37510d565efSmrg {
37610d565efSmrg   /* If everything is the default, we can just clear the attributes.  */
37710d565efSmrg   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
37810d565efSmrg     {
37910d565efSmrg       MEM_ATTRS (mem) = 0;
38010d565efSmrg       return;
38110d565efSmrg     }
38210d565efSmrg 
38310d565efSmrg   if (!MEM_ATTRS (mem)
38410d565efSmrg       || !mem_attrs_eq_p (attrs, MEM_ATTRS (mem)))
38510d565efSmrg     {
38610d565efSmrg       MEM_ATTRS (mem) = ggc_alloc<mem_attrs> ();
38710d565efSmrg       memcpy (MEM_ATTRS (mem), attrs, sizeof (mem_attrs));
38810d565efSmrg     }
38910d565efSmrg }
39010d565efSmrg 
39110d565efSmrg /* Returns a hash code for X (which is a really a reg_attrs *).  */
39210d565efSmrg 
39310d565efSmrg hashval_t
hash(reg_attrs * x)39410d565efSmrg reg_attr_hasher::hash (reg_attrs *x)
39510d565efSmrg {
39610d565efSmrg   const reg_attrs *const p = x;
39710d565efSmrg 
398c7a68eb7Smrg   inchash::hash h;
399c7a68eb7Smrg   h.add_ptr (p->decl);
400c7a68eb7Smrg   h.add_poly_hwi (p->offset);
401c7a68eb7Smrg   return h.end ();
40210d565efSmrg }
40310d565efSmrg 
40410d565efSmrg /* Returns nonzero if the value represented by X  is the same as that given by
40510d565efSmrg    Y.  */
40610d565efSmrg 
40710d565efSmrg bool
equal(reg_attrs * x,reg_attrs * y)40810d565efSmrg reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y)
40910d565efSmrg {
41010d565efSmrg   const reg_attrs *const p = x;
41110d565efSmrg   const reg_attrs *const q = y;
41210d565efSmrg 
413c7a68eb7Smrg   return (p->decl == q->decl && known_eq (p->offset, q->offset));
41410d565efSmrg }
41510d565efSmrg /* Allocate a new reg_attrs structure and insert it into the hash table if
41610d565efSmrg    one identical to it is not already in the table.  We are doing this for
41710d565efSmrg    MEM of mode MODE.  */
41810d565efSmrg 
41910d565efSmrg static reg_attrs *
get_reg_attrs(tree decl,poly_int64 offset)420c7a68eb7Smrg get_reg_attrs (tree decl, poly_int64 offset)
42110d565efSmrg {
42210d565efSmrg   reg_attrs attrs;
42310d565efSmrg 
42410d565efSmrg   /* If everything is the default, we can just return zero.  */
425c7a68eb7Smrg   if (decl == 0 && known_eq (offset, 0))
42610d565efSmrg     return 0;
42710d565efSmrg 
42810d565efSmrg   attrs.decl = decl;
42910d565efSmrg   attrs.offset = offset;
43010d565efSmrg 
43110d565efSmrg   reg_attrs **slot = reg_attrs_htab->find_slot (&attrs, INSERT);
43210d565efSmrg   if (*slot == 0)
43310d565efSmrg     {
43410d565efSmrg       *slot = ggc_alloc<reg_attrs> ();
43510d565efSmrg       memcpy (*slot, &attrs, sizeof (reg_attrs));
43610d565efSmrg     }
43710d565efSmrg 
43810d565efSmrg   return *slot;
43910d565efSmrg }
44010d565efSmrg 
44110d565efSmrg 
44210d565efSmrg #if !HAVE_blockage
44310d565efSmrg /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
44410d565efSmrg    and to block register equivalences to be seen across this insn.  */
44510d565efSmrg 
44610d565efSmrg rtx
gen_blockage(void)44710d565efSmrg gen_blockage (void)
44810d565efSmrg {
44910d565efSmrg   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
45010d565efSmrg   MEM_VOLATILE_P (x) = true;
45110d565efSmrg   return x;
45210d565efSmrg }
45310d565efSmrg #endif
45410d565efSmrg 
45510d565efSmrg 
45610d565efSmrg /* Set the mode and register number of X to MODE and REGNO.  */
45710d565efSmrg 
45810d565efSmrg void
set_mode_and_regno(rtx x,machine_mode mode,unsigned int regno)45910d565efSmrg set_mode_and_regno (rtx x, machine_mode mode, unsigned int regno)
46010d565efSmrg {
46110d565efSmrg   unsigned int nregs = (HARD_REGISTER_NUM_P (regno)
462c7a68eb7Smrg 			? hard_regno_nregs (regno, mode)
46310d565efSmrg 			: 1);
46410d565efSmrg   PUT_MODE_RAW (x, mode);
46510d565efSmrg   set_regno_raw (x, regno, nregs);
46610d565efSmrg }
46710d565efSmrg 
468*ec02198aSmrg /* Initialize a fresh REG rtx with mode MODE and register REGNO.  */
469*ec02198aSmrg 
470*ec02198aSmrg rtx
init_raw_REG(rtx x,machine_mode mode,unsigned int regno)471*ec02198aSmrg init_raw_REG (rtx x, machine_mode mode, unsigned int regno)
472*ec02198aSmrg {
473*ec02198aSmrg   set_mode_and_regno (x, mode, regno);
474*ec02198aSmrg   REG_ATTRS (x) = NULL;
475*ec02198aSmrg   ORIGINAL_REGNO (x) = regno;
476*ec02198aSmrg   return x;
477*ec02198aSmrg }
478*ec02198aSmrg 
47910d565efSmrg /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
48010d565efSmrg    don't attempt to share with the various global pieces of rtl (such as
48110d565efSmrg    frame_pointer_rtx).  */
48210d565efSmrg 
48310d565efSmrg rtx
gen_raw_REG(machine_mode mode,unsigned int regno)48410d565efSmrg gen_raw_REG (machine_mode mode, unsigned int regno)
48510d565efSmrg {
486c7a68eb7Smrg   rtx x = rtx_alloc (REG MEM_STAT_INFO);
487*ec02198aSmrg   init_raw_REG (x, mode, regno);
48810d565efSmrg   return x;
48910d565efSmrg }
49010d565efSmrg 
49110d565efSmrg /* There are some RTL codes that require special attention; the generation
49210d565efSmrg    functions do the raw handling.  If you add to this list, modify
49310d565efSmrg    special_rtx in gengenrtl.c as well.  */
49410d565efSmrg 
49510d565efSmrg rtx_expr_list *
gen_rtx_EXPR_LIST(machine_mode mode,rtx expr,rtx expr_list)49610d565efSmrg gen_rtx_EXPR_LIST (machine_mode mode, rtx expr, rtx expr_list)
49710d565efSmrg {
49810d565efSmrg   return as_a <rtx_expr_list *> (gen_rtx_fmt_ee (EXPR_LIST, mode, expr,
49910d565efSmrg 						 expr_list));
50010d565efSmrg }
50110d565efSmrg 
50210d565efSmrg rtx_insn_list *
gen_rtx_INSN_LIST(machine_mode mode,rtx insn,rtx insn_list)50310d565efSmrg gen_rtx_INSN_LIST (machine_mode mode, rtx insn, rtx insn_list)
50410d565efSmrg {
50510d565efSmrg   return as_a <rtx_insn_list *> (gen_rtx_fmt_ue (INSN_LIST, mode, insn,
50610d565efSmrg 						 insn_list));
50710d565efSmrg }
50810d565efSmrg 
50910d565efSmrg rtx_insn *
gen_rtx_INSN(machine_mode mode,rtx_insn * prev_insn,rtx_insn * next_insn,basic_block bb,rtx pattern,int location,int code,rtx reg_notes)51010d565efSmrg gen_rtx_INSN (machine_mode mode, rtx_insn *prev_insn, rtx_insn *next_insn,
51110d565efSmrg 	      basic_block bb, rtx pattern, int location, int code,
51210d565efSmrg 	      rtx reg_notes)
51310d565efSmrg {
51410d565efSmrg   return as_a <rtx_insn *> (gen_rtx_fmt_uuBeiie (INSN, mode,
51510d565efSmrg 						 prev_insn, next_insn,
51610d565efSmrg 						 bb, pattern, location, code,
51710d565efSmrg 						 reg_notes));
51810d565efSmrg }
51910d565efSmrg 
52010d565efSmrg rtx
gen_rtx_CONST_INT(machine_mode mode ATTRIBUTE_UNUSED,HOST_WIDE_INT arg)52110d565efSmrg gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
52210d565efSmrg {
52310d565efSmrg   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
52410d565efSmrg     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
52510d565efSmrg 
52610d565efSmrg #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
52710d565efSmrg   if (const_true_rtx && arg == STORE_FLAG_VALUE)
52810d565efSmrg     return const_true_rtx;
52910d565efSmrg #endif
53010d565efSmrg 
53110d565efSmrg   /* Look up the CONST_INT in the hash table.  */
53210d565efSmrg   rtx *slot = const_int_htab->find_slot_with_hash (arg, (hashval_t) arg,
53310d565efSmrg 						   INSERT);
53410d565efSmrg   if (*slot == 0)
53510d565efSmrg     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
53610d565efSmrg 
53710d565efSmrg   return *slot;
53810d565efSmrg }
53910d565efSmrg 
54010d565efSmrg rtx
gen_int_mode(poly_int64 c,machine_mode mode)541c7a68eb7Smrg gen_int_mode (poly_int64 c, machine_mode mode)
54210d565efSmrg {
543c7a68eb7Smrg   c = trunc_int_for_mode (c, mode);
544c7a68eb7Smrg   if (c.is_constant ())
545c7a68eb7Smrg     return GEN_INT (c.coeffs[0]);
546c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
547c7a68eb7Smrg   return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode);
54810d565efSmrg }
54910d565efSmrg 
55010d565efSmrg /* CONST_DOUBLEs might be created from pairs of integers, or from
55110d565efSmrg    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
55210d565efSmrg    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
55310d565efSmrg 
55410d565efSmrg /* Determine whether REAL, a CONST_DOUBLE, already exists in the
55510d565efSmrg    hash table.  If so, return its counterpart; otherwise add it
55610d565efSmrg    to the hash table and return it.  */
55710d565efSmrg static rtx
lookup_const_double(rtx real)55810d565efSmrg lookup_const_double (rtx real)
55910d565efSmrg {
56010d565efSmrg   rtx *slot = const_double_htab->find_slot (real, INSERT);
56110d565efSmrg   if (*slot == 0)
56210d565efSmrg     *slot = real;
56310d565efSmrg 
56410d565efSmrg   return *slot;
56510d565efSmrg }
56610d565efSmrg 
56710d565efSmrg /* Return a CONST_DOUBLE rtx for a floating-point value specified by
56810d565efSmrg    VALUE in mode MODE.  */
56910d565efSmrg rtx
const_double_from_real_value(REAL_VALUE_TYPE value,machine_mode mode)57010d565efSmrg const_double_from_real_value (REAL_VALUE_TYPE value, machine_mode mode)
57110d565efSmrg {
57210d565efSmrg   rtx real = rtx_alloc (CONST_DOUBLE);
57310d565efSmrg   PUT_MODE (real, mode);
57410d565efSmrg 
57510d565efSmrg   real->u.rv = value;
57610d565efSmrg 
57710d565efSmrg   return lookup_const_double (real);
57810d565efSmrg }
57910d565efSmrg 
58010d565efSmrg /* Determine whether FIXED, a CONST_FIXED, already exists in the
58110d565efSmrg    hash table.  If so, return its counterpart; otherwise add it
58210d565efSmrg    to the hash table and return it.  */
58310d565efSmrg 
58410d565efSmrg static rtx
lookup_const_fixed(rtx fixed)58510d565efSmrg lookup_const_fixed (rtx fixed)
58610d565efSmrg {
58710d565efSmrg   rtx *slot = const_fixed_htab->find_slot (fixed, INSERT);
58810d565efSmrg   if (*slot == 0)
58910d565efSmrg     *slot = fixed;
59010d565efSmrg 
59110d565efSmrg   return *slot;
59210d565efSmrg }
59310d565efSmrg 
59410d565efSmrg /* Return a CONST_FIXED rtx for a fixed-point value specified by
59510d565efSmrg    VALUE in mode MODE.  */
59610d565efSmrg 
59710d565efSmrg rtx
const_fixed_from_fixed_value(FIXED_VALUE_TYPE value,machine_mode mode)59810d565efSmrg const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, machine_mode mode)
59910d565efSmrg {
60010d565efSmrg   rtx fixed = rtx_alloc (CONST_FIXED);
60110d565efSmrg   PUT_MODE (fixed, mode);
60210d565efSmrg 
60310d565efSmrg   fixed->u.fv = value;
60410d565efSmrg 
60510d565efSmrg   return lookup_const_fixed (fixed);
60610d565efSmrg }
60710d565efSmrg 
60810d565efSmrg #if TARGET_SUPPORTS_WIDE_INT == 0
60910d565efSmrg /* Constructs double_int from rtx CST.  */
61010d565efSmrg 
61110d565efSmrg double_int
rtx_to_double_int(const_rtx cst)61210d565efSmrg rtx_to_double_int (const_rtx cst)
61310d565efSmrg {
61410d565efSmrg   double_int r;
61510d565efSmrg 
61610d565efSmrg   if (CONST_INT_P (cst))
61710d565efSmrg       r = double_int::from_shwi (INTVAL (cst));
61810d565efSmrg   else if (CONST_DOUBLE_AS_INT_P (cst))
61910d565efSmrg     {
62010d565efSmrg       r.low = CONST_DOUBLE_LOW (cst);
62110d565efSmrg       r.high = CONST_DOUBLE_HIGH (cst);
62210d565efSmrg     }
62310d565efSmrg   else
62410d565efSmrg     gcc_unreachable ();
62510d565efSmrg 
62610d565efSmrg   return r;
62710d565efSmrg }
62810d565efSmrg #endif
62910d565efSmrg 
63010d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
63110d565efSmrg /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
63210d565efSmrg    If so, return its counterpart; otherwise add it to the hash table and
63310d565efSmrg    return it.  */
63410d565efSmrg 
63510d565efSmrg static rtx
lookup_const_wide_int(rtx wint)63610d565efSmrg lookup_const_wide_int (rtx wint)
63710d565efSmrg {
63810d565efSmrg   rtx *slot = const_wide_int_htab->find_slot (wint, INSERT);
63910d565efSmrg   if (*slot == 0)
64010d565efSmrg     *slot = wint;
64110d565efSmrg 
64210d565efSmrg   return *slot;
64310d565efSmrg }
64410d565efSmrg #endif
64510d565efSmrg 
64610d565efSmrg /* Return an rtx constant for V, given that the constant has mode MODE.
64710d565efSmrg    The returned rtx will be a CONST_INT if V fits, otherwise it will be
64810d565efSmrg    a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
64910d565efSmrg    (if TARGET_SUPPORTS_WIDE_INT).  */
65010d565efSmrg 
651c7a68eb7Smrg static rtx
immed_wide_int_const_1(const wide_int_ref & v,machine_mode mode)652c7a68eb7Smrg immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode)
65310d565efSmrg {
65410d565efSmrg   unsigned int len = v.get_len ();
655c7a68eb7Smrg   /* Not scalar_int_mode because we also allow pointer bound modes.  */
656c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
65710d565efSmrg 
65810d565efSmrg   /* Allow truncation but not extension since we do not know if the
65910d565efSmrg      number is signed or unsigned.  */
66010d565efSmrg   gcc_assert (prec <= v.get_precision ());
66110d565efSmrg 
66210d565efSmrg   if (len < 2 || prec <= HOST_BITS_PER_WIDE_INT)
66310d565efSmrg     return gen_int_mode (v.elt (0), mode);
66410d565efSmrg 
66510d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
66610d565efSmrg   {
66710d565efSmrg     unsigned int i;
66810d565efSmrg     rtx value;
66910d565efSmrg     unsigned int blocks_needed
67010d565efSmrg       = (prec + HOST_BITS_PER_WIDE_INT - 1) / HOST_BITS_PER_WIDE_INT;
67110d565efSmrg 
67210d565efSmrg     if (len > blocks_needed)
67310d565efSmrg       len = blocks_needed;
67410d565efSmrg 
67510d565efSmrg     value = const_wide_int_alloc (len);
67610d565efSmrg 
67710d565efSmrg     /* It is so tempting to just put the mode in here.  Must control
67810d565efSmrg        myself ... */
67910d565efSmrg     PUT_MODE (value, VOIDmode);
68010d565efSmrg     CWI_PUT_NUM_ELEM (value, len);
68110d565efSmrg 
68210d565efSmrg     for (i = 0; i < len; i++)
68310d565efSmrg       CONST_WIDE_INT_ELT (value, i) = v.elt (i);
68410d565efSmrg 
68510d565efSmrg     return lookup_const_wide_int (value);
68610d565efSmrg   }
68710d565efSmrg #else
68810d565efSmrg   return immed_double_const (v.elt (0), v.elt (1), mode);
68910d565efSmrg #endif
69010d565efSmrg }
69110d565efSmrg 
69210d565efSmrg #if TARGET_SUPPORTS_WIDE_INT == 0
69310d565efSmrg /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
69410d565efSmrg    of ints: I0 is the low-order word and I1 is the high-order word.
69510d565efSmrg    For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
69610d565efSmrg    implied upper bits are copies of the high bit of i1.  The value
69710d565efSmrg    itself is neither signed nor unsigned.  Do not use this routine for
69810d565efSmrg    non-integer modes; convert to REAL_VALUE_TYPE and use
69910d565efSmrg    const_double_from_real_value.  */
70010d565efSmrg 
70110d565efSmrg rtx
immed_double_const(HOST_WIDE_INT i0,HOST_WIDE_INT i1,machine_mode mode)70210d565efSmrg immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, machine_mode mode)
70310d565efSmrg {
70410d565efSmrg   rtx value;
70510d565efSmrg   unsigned int i;
70610d565efSmrg 
70710d565efSmrg   /* There are the following cases (note that there are no modes with
70810d565efSmrg      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
70910d565efSmrg 
71010d565efSmrg      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
71110d565efSmrg 	gen_int_mode.
71210d565efSmrg      2) If the value of the integer fits into HOST_WIDE_INT anyway
71310d565efSmrg         (i.e., i1 consists only from copies of the sign bit, and sign
71410d565efSmrg 	of i0 and i1 are the same), then we return a CONST_INT for i0.
71510d565efSmrg      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
716c7a68eb7Smrg   scalar_mode smode;
717c7a68eb7Smrg   if (is_a <scalar_mode> (mode, &smode)
718c7a68eb7Smrg       && GET_MODE_BITSIZE (smode) <= HOST_BITS_PER_WIDE_INT)
71910d565efSmrg     return gen_int_mode (i0, mode);
72010d565efSmrg 
72110d565efSmrg   /* If this integer fits in one word, return a CONST_INT.  */
72210d565efSmrg   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
72310d565efSmrg     return GEN_INT (i0);
72410d565efSmrg 
72510d565efSmrg   /* We use VOIDmode for integers.  */
72610d565efSmrg   value = rtx_alloc (CONST_DOUBLE);
72710d565efSmrg   PUT_MODE (value, VOIDmode);
72810d565efSmrg 
72910d565efSmrg   CONST_DOUBLE_LOW (value) = i0;
73010d565efSmrg   CONST_DOUBLE_HIGH (value) = i1;
73110d565efSmrg 
73210d565efSmrg   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
73310d565efSmrg     XWINT (value, i) = 0;
73410d565efSmrg 
73510d565efSmrg   return lookup_const_double (value);
73610d565efSmrg }
73710d565efSmrg #endif
73810d565efSmrg 
739c7a68eb7Smrg /* Return an rtx representation of C in mode MODE.  */
740c7a68eb7Smrg 
741c7a68eb7Smrg rtx
immed_wide_int_const(const poly_wide_int_ref & c,machine_mode mode)742c7a68eb7Smrg immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode)
743c7a68eb7Smrg {
744c7a68eb7Smrg   if (c.is_constant ())
745c7a68eb7Smrg     return immed_wide_int_const_1 (c.coeffs[0], mode);
746c7a68eb7Smrg 
747c7a68eb7Smrg   /* Not scalar_int_mode because we also allow pointer bound modes.  */
748c7a68eb7Smrg   unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode));
749c7a68eb7Smrg 
750c7a68eb7Smrg   /* Allow truncation but not extension since we do not know if the
751c7a68eb7Smrg      number is signed or unsigned.  */
752c7a68eb7Smrg   gcc_assert (prec <= c.coeffs[0].get_precision ());
753c7a68eb7Smrg   poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED);
754c7a68eb7Smrg 
755c7a68eb7Smrg   /* See whether we already have an rtx for this constant.  */
756c7a68eb7Smrg   inchash::hash h;
757c7a68eb7Smrg   h.add_int (mode);
758c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
759c7a68eb7Smrg     h.add_wide_int (newc.coeffs[i]);
760c7a68eb7Smrg   const_poly_int_hasher::compare_type typed_value (mode, newc);
761c7a68eb7Smrg   rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value,
762c7a68eb7Smrg 							h.end (), INSERT);
763c7a68eb7Smrg   rtx x = *slot;
764c7a68eb7Smrg   if (x)
765c7a68eb7Smrg     return x;
766c7a68eb7Smrg 
767c7a68eb7Smrg   /* Create a new rtx.  There's a choice to be made here between installing
768c7a68eb7Smrg      the actual mode of the rtx or leaving it as VOIDmode (for consistency
769c7a68eb7Smrg      with CONST_INT).  In practice the handling of the codes is different
770c7a68eb7Smrg      enough that we get no benefit from using VOIDmode, and various places
771c7a68eb7Smrg      assume that VOIDmode implies CONST_INT.  Using the real mode seems like
772c7a68eb7Smrg      the right long-term direction anyway.  */
773c7a68eb7Smrg   typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi;
774c7a68eb7Smrg   size_t extra_size = twi::extra_size (prec);
775c7a68eb7Smrg   x = rtx_alloc_v (CONST_POLY_INT,
776c7a68eb7Smrg 		   sizeof (struct const_poly_int_def) + extra_size);
777c7a68eb7Smrg   PUT_MODE (x, mode);
778c7a68eb7Smrg   CONST_POLY_INT_COEFFS (x).set_precision (prec);
779c7a68eb7Smrg   for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
780c7a68eb7Smrg     CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i];
781c7a68eb7Smrg 
782c7a68eb7Smrg   *slot = x;
783c7a68eb7Smrg   return x;
784c7a68eb7Smrg }
785c7a68eb7Smrg 
78610d565efSmrg rtx
gen_rtx_REG(machine_mode mode,unsigned int regno)78710d565efSmrg gen_rtx_REG (machine_mode mode, unsigned int regno)
78810d565efSmrg {
78910d565efSmrg   /* In case the MD file explicitly references the frame pointer, have
79010d565efSmrg      all such references point to the same frame pointer.  This is
79110d565efSmrg      used during frame pointer elimination to distinguish the explicit
79210d565efSmrg      references to these registers from pseudos that happened to be
79310d565efSmrg      assigned to them.
79410d565efSmrg 
79510d565efSmrg      If we have eliminated the frame pointer or arg pointer, we will
79610d565efSmrg      be using it as a normal register, for example as a spill
79710d565efSmrg      register.  In such cases, we might be accessing it in a mode that
79810d565efSmrg      is not Pmode and therefore cannot use the pre-allocated rtx.
79910d565efSmrg 
80010d565efSmrg      Also don't do this when we are making new REGs in reload, since
80110d565efSmrg      we don't want to get confused with the real pointers.  */
80210d565efSmrg 
80310d565efSmrg   if (mode == Pmode && !reload_in_progress && !lra_in_progress)
80410d565efSmrg     {
80510d565efSmrg       if (regno == FRAME_POINTER_REGNUM
80610d565efSmrg 	  && (!reload_completed || frame_pointer_needed))
80710d565efSmrg 	return frame_pointer_rtx;
80810d565efSmrg 
80910d565efSmrg       if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
81010d565efSmrg 	  && regno == HARD_FRAME_POINTER_REGNUM
81110d565efSmrg 	  && (!reload_completed || frame_pointer_needed))
81210d565efSmrg 	return hard_frame_pointer_rtx;
81310d565efSmrg #if !HARD_FRAME_POINTER_IS_ARG_POINTER
81410d565efSmrg       if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
81510d565efSmrg 	  && regno == ARG_POINTER_REGNUM)
81610d565efSmrg 	return arg_pointer_rtx;
81710d565efSmrg #endif
81810d565efSmrg #ifdef RETURN_ADDRESS_POINTER_REGNUM
81910d565efSmrg       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
82010d565efSmrg 	return return_address_pointer_rtx;
82110d565efSmrg #endif
82210d565efSmrg       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
82310d565efSmrg 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
82410d565efSmrg 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
82510d565efSmrg 	return pic_offset_table_rtx;
82610d565efSmrg       if (regno == STACK_POINTER_REGNUM)
82710d565efSmrg 	return stack_pointer_rtx;
82810d565efSmrg     }
82910d565efSmrg 
83010d565efSmrg #if 0
83110d565efSmrg   /* If the per-function register table has been set up, try to re-use
83210d565efSmrg      an existing entry in that table to avoid useless generation of RTL.
83310d565efSmrg 
83410d565efSmrg      This code is disabled for now until we can fix the various backends
83510d565efSmrg      which depend on having non-shared hard registers in some cases.   Long
83610d565efSmrg      term we want to re-enable this code as it can significantly cut down
83710d565efSmrg      on the amount of useless RTL that gets generated.
83810d565efSmrg 
83910d565efSmrg      We'll also need to fix some code that runs after reload that wants to
84010d565efSmrg      set ORIGINAL_REGNO.  */
84110d565efSmrg 
84210d565efSmrg   if (cfun
84310d565efSmrg       && cfun->emit
84410d565efSmrg       && regno_reg_rtx
84510d565efSmrg       && regno < FIRST_PSEUDO_REGISTER
84610d565efSmrg       && reg_raw_mode[regno] == mode)
84710d565efSmrg     return regno_reg_rtx[regno];
84810d565efSmrg #endif
84910d565efSmrg 
85010d565efSmrg   return gen_raw_REG (mode, regno);
85110d565efSmrg }
85210d565efSmrg 
85310d565efSmrg rtx
gen_rtx_MEM(machine_mode mode,rtx addr)85410d565efSmrg gen_rtx_MEM (machine_mode mode, rtx addr)
85510d565efSmrg {
85610d565efSmrg   rtx rt = gen_rtx_raw_MEM (mode, addr);
85710d565efSmrg 
85810d565efSmrg   /* This field is not cleared by the mere allocation of the rtx, so
85910d565efSmrg      we clear it here.  */
86010d565efSmrg   MEM_ATTRS (rt) = 0;
86110d565efSmrg 
86210d565efSmrg   return rt;
86310d565efSmrg }
86410d565efSmrg 
86510d565efSmrg /* Generate a memory referring to non-trapping constant memory.  */
86610d565efSmrg 
86710d565efSmrg rtx
gen_const_mem(machine_mode mode,rtx addr)86810d565efSmrg gen_const_mem (machine_mode mode, rtx addr)
86910d565efSmrg {
87010d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
87110d565efSmrg   MEM_READONLY_P (mem) = 1;
87210d565efSmrg   MEM_NOTRAP_P (mem) = 1;
87310d565efSmrg   return mem;
87410d565efSmrg }
87510d565efSmrg 
87610d565efSmrg /* Generate a MEM referring to fixed portions of the frame, e.g., register
87710d565efSmrg    save areas.  */
87810d565efSmrg 
87910d565efSmrg rtx
gen_frame_mem(machine_mode mode,rtx addr)88010d565efSmrg gen_frame_mem (machine_mode mode, rtx addr)
88110d565efSmrg {
88210d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
88310d565efSmrg   MEM_NOTRAP_P (mem) = 1;
88410d565efSmrg   set_mem_alias_set (mem, get_frame_alias_set ());
88510d565efSmrg   return mem;
88610d565efSmrg }
88710d565efSmrg 
88810d565efSmrg /* Generate a MEM referring to a temporary use of the stack, not part
88910d565efSmrg     of the fixed stack frame.  For example, something which is pushed
89010d565efSmrg     by a target splitter.  */
89110d565efSmrg rtx
gen_tmp_stack_mem(machine_mode mode,rtx addr)89210d565efSmrg gen_tmp_stack_mem (machine_mode mode, rtx addr)
89310d565efSmrg {
89410d565efSmrg   rtx mem = gen_rtx_MEM (mode, addr);
89510d565efSmrg   MEM_NOTRAP_P (mem) = 1;
89610d565efSmrg   if (!cfun->calls_alloca)
89710d565efSmrg     set_mem_alias_set (mem, get_frame_alias_set ());
89810d565efSmrg   return mem;
89910d565efSmrg }
90010d565efSmrg 
90110d565efSmrg /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
90210d565efSmrg    this construct would be valid, and false otherwise.  */
90310d565efSmrg 
90410d565efSmrg bool
validate_subreg(machine_mode omode,machine_mode imode,const_rtx reg,poly_uint64 offset)90510d565efSmrg validate_subreg (machine_mode omode, machine_mode imode,
906c7a68eb7Smrg 		 const_rtx reg, poly_uint64 offset)
90710d565efSmrg {
908c7a68eb7Smrg   poly_uint64 isize = GET_MODE_SIZE (imode);
909c7a68eb7Smrg   poly_uint64 osize = GET_MODE_SIZE (omode);
910c7a68eb7Smrg 
911c7a68eb7Smrg   /* The sizes must be ordered, so that we know whether the subreg
912c7a68eb7Smrg      is partial, paradoxical or complete.  */
913c7a68eb7Smrg   if (!ordered_p (isize, osize))
914c7a68eb7Smrg     return false;
91510d565efSmrg 
91610d565efSmrg   /* All subregs must be aligned.  */
917c7a68eb7Smrg   if (!multiple_p (offset, osize))
91810d565efSmrg     return false;
91910d565efSmrg 
92010d565efSmrg   /* The subreg offset cannot be outside the inner object.  */
921c7a68eb7Smrg   if (maybe_ge (offset, isize))
92210d565efSmrg     return false;
92310d565efSmrg 
924c7a68eb7Smrg   poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode);
925c7a68eb7Smrg 
92610d565efSmrg   /* ??? This should not be here.  Temporarily continue to allow word_mode
92710d565efSmrg      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
92810d565efSmrg      Generally, backends are doing something sketchy but it'll take time to
92910d565efSmrg      fix them all.  */
93010d565efSmrg   if (omode == word_mode)
93110d565efSmrg     ;
93210d565efSmrg   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
93310d565efSmrg      is the culprit here, and not the backends.  */
934c7a68eb7Smrg   else if (known_ge (osize, regsize) && known_ge (isize, osize))
93510d565efSmrg     ;
93610d565efSmrg   /* Allow component subregs of complex and vector.  Though given the below
93710d565efSmrg      extraction rules, it's not always clear what that means.  */
93810d565efSmrg   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
93910d565efSmrg 	   && GET_MODE_INNER (imode) == omode)
94010d565efSmrg     ;
94110d565efSmrg   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
94210d565efSmrg      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
94310d565efSmrg      represent this.  It's questionable if this ought to be represented at
94410d565efSmrg      all -- why can't this all be hidden in post-reload splitters that make
94510d565efSmrg      arbitrarily mode changes to the registers themselves.  */
94610d565efSmrg   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
94710d565efSmrg     ;
94810d565efSmrg   /* Subregs involving floating point modes are not allowed to
94910d565efSmrg      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
95010d565efSmrg      (subreg:SI (reg:DF) 0) isn't.  */
95110d565efSmrg   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
95210d565efSmrg     {
953c7a68eb7Smrg       if (! (known_eq (isize, osize)
95410d565efSmrg 	     /* LRA can use subreg to store a floating point value in
95510d565efSmrg 		an integer mode.  Although the floating point and the
95610d565efSmrg 		integer modes need the same number of hard registers,
95710d565efSmrg 		the size of floating point mode can be less than the
95810d565efSmrg 		integer mode.  LRA also uses subregs for a register
95910d565efSmrg 		should be used in different mode in on insn.  */
96010d565efSmrg 	     || lra_in_progress))
96110d565efSmrg 	return false;
96210d565efSmrg     }
96310d565efSmrg 
96410d565efSmrg   /* Paradoxical subregs must have offset zero.  */
965c7a68eb7Smrg   if (maybe_gt (osize, isize))
966c7a68eb7Smrg     return known_eq (offset, 0U);
96710d565efSmrg 
96810d565efSmrg   /* This is a normal subreg.  Verify that the offset is representable.  */
96910d565efSmrg 
97010d565efSmrg   /* For hard registers, we already have most of these rules collected in
97110d565efSmrg      subreg_offset_representable_p.  */
97210d565efSmrg   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
97310d565efSmrg     {
97410d565efSmrg       unsigned int regno = REGNO (reg);
97510d565efSmrg 
97610d565efSmrg       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
97710d565efSmrg 	  && GET_MODE_INNER (imode) == omode)
97810d565efSmrg 	;
979c7a68eb7Smrg       else if (!REG_CAN_CHANGE_MODE_P (regno, imode, omode))
98010d565efSmrg 	return false;
98110d565efSmrg 
98210d565efSmrg       return subreg_offset_representable_p (regno, imode, offset, omode);
98310d565efSmrg     }
98410d565efSmrg 
985c7a68eb7Smrg   /* The outer size must be ordered wrt the register size, otherwise
986c7a68eb7Smrg      we wouldn't know at compile time how many registers the outer
987c7a68eb7Smrg      mode occupies.  */
988c7a68eb7Smrg   if (!ordered_p (osize, regsize))
989c7a68eb7Smrg     return false;
990c7a68eb7Smrg 
99110d565efSmrg   /* For pseudo registers, we want most of the same checks.  Namely:
992c7a68eb7Smrg 
993c7a68eb7Smrg      Assume that the pseudo register will be allocated to hard registers
994c7a68eb7Smrg      that can hold REGSIZE bytes each.  If OSIZE is not a multiple of REGSIZE,
995c7a68eb7Smrg      the remainder must correspond to the lowpart of the containing hard
996c7a68eb7Smrg      register.  If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
997c7a68eb7Smrg      otherwise it is at the lowest offset.
998c7a68eb7Smrg 
999c7a68eb7Smrg      Given that we've already checked the mode and offset alignment,
1000c7a68eb7Smrg      we only have to check subblock subregs here.  */
1001c7a68eb7Smrg   if (maybe_lt (osize, regsize)
100210d565efSmrg       && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
100310d565efSmrg     {
1004c7a68eb7Smrg       /* It is invalid for the target to pick a register size for a mode
1005c7a68eb7Smrg 	 that isn't ordered wrt to the size of that mode.  */
1006c7a68eb7Smrg       poly_uint64 block_size = ordered_min (isize, regsize);
1007c7a68eb7Smrg       unsigned int start_reg;
1008c7a68eb7Smrg       poly_uint64 offset_within_reg;
1009c7a68eb7Smrg       if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg)
1010c7a68eb7Smrg 	  || (BYTES_BIG_ENDIAN
1011c7a68eb7Smrg 	      ? maybe_ne (offset_within_reg, block_size - osize)
1012c7a68eb7Smrg 	      : maybe_ne (offset_within_reg, 0U)))
101310d565efSmrg 	return false;
101410d565efSmrg     }
101510d565efSmrg   return true;
101610d565efSmrg }
101710d565efSmrg 
101810d565efSmrg rtx
gen_rtx_SUBREG(machine_mode mode,rtx reg,poly_uint64 offset)1019c7a68eb7Smrg gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset)
102010d565efSmrg {
102110d565efSmrg   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
102210d565efSmrg   return gen_rtx_raw_SUBREG (mode, reg, offset);
102310d565efSmrg }
102410d565efSmrg 
102510d565efSmrg /* Generate a SUBREG representing the least-significant part of REG if MODE
102610d565efSmrg    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
102710d565efSmrg 
102810d565efSmrg rtx
gen_lowpart_SUBREG(machine_mode mode,rtx reg)102910d565efSmrg gen_lowpart_SUBREG (machine_mode mode, rtx reg)
103010d565efSmrg {
103110d565efSmrg   machine_mode inmode;
103210d565efSmrg 
103310d565efSmrg   inmode = GET_MODE (reg);
103410d565efSmrg   if (inmode == VOIDmode)
103510d565efSmrg     inmode = mode;
103610d565efSmrg   return gen_rtx_SUBREG (mode, reg,
103710d565efSmrg 			 subreg_lowpart_offset (mode, inmode));
103810d565efSmrg }
103910d565efSmrg 
104010d565efSmrg rtx
gen_rtx_VAR_LOCATION(machine_mode mode,tree decl,rtx loc,enum var_init_status status)104110d565efSmrg gen_rtx_VAR_LOCATION (machine_mode mode, tree decl, rtx loc,
104210d565efSmrg 		      enum var_init_status status)
104310d565efSmrg {
104410d565efSmrg   rtx x = gen_rtx_fmt_te (VAR_LOCATION, mode, decl, loc);
104510d565efSmrg   PAT_VAR_LOCATION_STATUS (x) = status;
104610d565efSmrg   return x;
104710d565efSmrg }
104810d565efSmrg 
104910d565efSmrg 
105010d565efSmrg /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
105110d565efSmrg 
105210d565efSmrg rtvec
gen_rtvec(int n,...)105310d565efSmrg gen_rtvec (int n, ...)
105410d565efSmrg {
105510d565efSmrg   int i;
105610d565efSmrg   rtvec rt_val;
105710d565efSmrg   va_list p;
105810d565efSmrg 
105910d565efSmrg   va_start (p, n);
106010d565efSmrg 
106110d565efSmrg   /* Don't allocate an empty rtvec...  */
106210d565efSmrg   if (n == 0)
106310d565efSmrg     {
106410d565efSmrg       va_end (p);
106510d565efSmrg       return NULL_RTVEC;
106610d565efSmrg     }
106710d565efSmrg 
106810d565efSmrg   rt_val = rtvec_alloc (n);
106910d565efSmrg 
107010d565efSmrg   for (i = 0; i < n; i++)
107110d565efSmrg     rt_val->elem[i] = va_arg (p, rtx);
107210d565efSmrg 
107310d565efSmrg   va_end (p);
107410d565efSmrg   return rt_val;
107510d565efSmrg }
107610d565efSmrg 
107710d565efSmrg rtvec
gen_rtvec_v(int n,rtx * argp)107810d565efSmrg gen_rtvec_v (int n, rtx *argp)
107910d565efSmrg {
108010d565efSmrg   int i;
108110d565efSmrg   rtvec rt_val;
108210d565efSmrg 
108310d565efSmrg   /* Don't allocate an empty rtvec...  */
108410d565efSmrg   if (n == 0)
108510d565efSmrg     return NULL_RTVEC;
108610d565efSmrg 
108710d565efSmrg   rt_val = rtvec_alloc (n);
108810d565efSmrg 
108910d565efSmrg   for (i = 0; i < n; i++)
109010d565efSmrg     rt_val->elem[i] = *argp++;
109110d565efSmrg 
109210d565efSmrg   return rt_val;
109310d565efSmrg }
109410d565efSmrg 
109510d565efSmrg rtvec
gen_rtvec_v(int n,rtx_insn ** argp)109610d565efSmrg gen_rtvec_v (int n, rtx_insn **argp)
109710d565efSmrg {
109810d565efSmrg   int i;
109910d565efSmrg   rtvec rt_val;
110010d565efSmrg 
110110d565efSmrg   /* Don't allocate an empty rtvec...  */
110210d565efSmrg   if (n == 0)
110310d565efSmrg     return NULL_RTVEC;
110410d565efSmrg 
110510d565efSmrg   rt_val = rtvec_alloc (n);
110610d565efSmrg 
110710d565efSmrg   for (i = 0; i < n; i++)
110810d565efSmrg     rt_val->elem[i] = *argp++;
110910d565efSmrg 
111010d565efSmrg   return rt_val;
111110d565efSmrg }
111210d565efSmrg 
111310d565efSmrg 
111410d565efSmrg /* Return the number of bytes between the start of an OUTER_MODE
111510d565efSmrg    in-memory value and the start of an INNER_MODE in-memory value,
111610d565efSmrg    given that the former is a lowpart of the latter.  It may be a
111710d565efSmrg    paradoxical lowpart, in which case the offset will be negative
111810d565efSmrg    on big-endian targets.  */
111910d565efSmrg 
1120c7a68eb7Smrg poly_int64
byte_lowpart_offset(machine_mode outer_mode,machine_mode inner_mode)112110d565efSmrg byte_lowpart_offset (machine_mode outer_mode,
112210d565efSmrg 		     machine_mode inner_mode)
112310d565efSmrg {
1124c7a68eb7Smrg   if (paradoxical_subreg_p (outer_mode, inner_mode))
112510d565efSmrg     return -subreg_lowpart_offset (inner_mode, outer_mode);
1126c7a68eb7Smrg   else
1127c7a68eb7Smrg     return subreg_lowpart_offset (outer_mode, inner_mode);
1128c7a68eb7Smrg }
1129c7a68eb7Smrg 
1130c7a68eb7Smrg /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1131c7a68eb7Smrg    from address X.  For paradoxical big-endian subregs this is a
1132c7a68eb7Smrg    negative value, otherwise it's the same as OFFSET.  */
1133c7a68eb7Smrg 
1134c7a68eb7Smrg poly_int64
subreg_memory_offset(machine_mode outer_mode,machine_mode inner_mode,poly_uint64 offset)1135c7a68eb7Smrg subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode,
1136c7a68eb7Smrg 		      poly_uint64 offset)
1137c7a68eb7Smrg {
1138c7a68eb7Smrg   if (paradoxical_subreg_p (outer_mode, inner_mode))
1139c7a68eb7Smrg     {
1140c7a68eb7Smrg       gcc_assert (known_eq (offset, 0U));
1141c7a68eb7Smrg       return -subreg_lowpart_offset (inner_mode, outer_mode);
1142c7a68eb7Smrg     }
1143c7a68eb7Smrg   return offset;
1144c7a68eb7Smrg }
1145c7a68eb7Smrg 
1146c7a68eb7Smrg /* As above, but return the offset that existing subreg X would have
1147c7a68eb7Smrg    if SUBREG_REG (X) were stored in memory.  The only significant thing
1148c7a68eb7Smrg    about the current SUBREG_REG is its mode.  */
1149c7a68eb7Smrg 
1150c7a68eb7Smrg poly_int64
subreg_memory_offset(const_rtx x)1151c7a68eb7Smrg subreg_memory_offset (const_rtx x)
1152c7a68eb7Smrg {
1153c7a68eb7Smrg   return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
1154c7a68eb7Smrg 			       SUBREG_BYTE (x));
115510d565efSmrg }
115610d565efSmrg 
115710d565efSmrg /* Generate a REG rtx for a new pseudo register of mode MODE.
115810d565efSmrg    This pseudo is assigned the next sequential register number.  */
115910d565efSmrg 
116010d565efSmrg rtx
gen_reg_rtx(machine_mode mode)116110d565efSmrg gen_reg_rtx (machine_mode mode)
116210d565efSmrg {
116310d565efSmrg   rtx val;
116410d565efSmrg   unsigned int align = GET_MODE_ALIGNMENT (mode);
116510d565efSmrg 
116610d565efSmrg   gcc_assert (can_create_pseudo_p ());
116710d565efSmrg 
116810d565efSmrg   /* If a virtual register with bigger mode alignment is generated,
116910d565efSmrg      increase stack alignment estimation because it might be spilled
117010d565efSmrg      to stack later.  */
117110d565efSmrg   if (SUPPORTS_STACK_ALIGNMENT
117210d565efSmrg       && crtl->stack_alignment_estimated < align
117310d565efSmrg       && !crtl->stack_realign_processed)
117410d565efSmrg     {
117510d565efSmrg       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
117610d565efSmrg       if (crtl->stack_alignment_estimated < min_align)
117710d565efSmrg 	crtl->stack_alignment_estimated = min_align;
117810d565efSmrg     }
117910d565efSmrg 
118010d565efSmrg   if (generating_concat_p
118110d565efSmrg       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
118210d565efSmrg 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
118310d565efSmrg     {
118410d565efSmrg       /* For complex modes, don't make a single pseudo.
118510d565efSmrg 	 Instead, make a CONCAT of two pseudos.
118610d565efSmrg 	 This allows noncontiguous allocation of the real and imaginary parts,
118710d565efSmrg 	 which makes much better code.  Besides, allocating DCmode
118810d565efSmrg 	 pseudos overstrains reload on some machines like the 386.  */
118910d565efSmrg       rtx realpart, imagpart;
119010d565efSmrg       machine_mode partmode = GET_MODE_INNER (mode);
119110d565efSmrg 
119210d565efSmrg       realpart = gen_reg_rtx (partmode);
119310d565efSmrg       imagpart = gen_reg_rtx (partmode);
119410d565efSmrg       return gen_rtx_CONCAT (mode, realpart, imagpart);
119510d565efSmrg     }
119610d565efSmrg 
119710d565efSmrg   /* Do not call gen_reg_rtx with uninitialized crtl.  */
119810d565efSmrg   gcc_assert (crtl->emit.regno_pointer_align_length);
119910d565efSmrg 
120010d565efSmrg   crtl->emit.ensure_regno_capacity ();
120110d565efSmrg   gcc_assert (reg_rtx_no < crtl->emit.regno_pointer_align_length);
120210d565efSmrg 
120310d565efSmrg   val = gen_raw_REG (mode, reg_rtx_no);
120410d565efSmrg   regno_reg_rtx[reg_rtx_no++] = val;
120510d565efSmrg   return val;
120610d565efSmrg }
120710d565efSmrg 
120810d565efSmrg /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
120910d565efSmrg    enough to have elements in the range 0 <= idx <= reg_rtx_no.  */
121010d565efSmrg 
121110d565efSmrg void
ensure_regno_capacity()121210d565efSmrg emit_status::ensure_regno_capacity ()
121310d565efSmrg {
121410d565efSmrg   int old_size = regno_pointer_align_length;
121510d565efSmrg 
121610d565efSmrg   if (reg_rtx_no < old_size)
121710d565efSmrg     return;
121810d565efSmrg 
121910d565efSmrg   int new_size = old_size * 2;
122010d565efSmrg   while (reg_rtx_no >= new_size)
122110d565efSmrg     new_size *= 2;
122210d565efSmrg 
122310d565efSmrg   char *tmp = XRESIZEVEC (char, regno_pointer_align, new_size);
122410d565efSmrg   memset (tmp + old_size, 0, new_size - old_size);
122510d565efSmrg   regno_pointer_align = (unsigned char *) tmp;
122610d565efSmrg 
122710d565efSmrg   rtx *new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, new_size);
122810d565efSmrg   memset (new1 + old_size, 0, (new_size - old_size) * sizeof (rtx));
122910d565efSmrg   regno_reg_rtx = new1;
123010d565efSmrg 
123110d565efSmrg   crtl->emit.regno_pointer_align_length = new_size;
123210d565efSmrg }
123310d565efSmrg 
123410d565efSmrg /* Return TRUE if REG is a PARM_DECL, FALSE otherwise.  */
123510d565efSmrg 
123610d565efSmrg bool
reg_is_parm_p(rtx reg)123710d565efSmrg reg_is_parm_p (rtx reg)
123810d565efSmrg {
123910d565efSmrg   tree decl;
124010d565efSmrg 
124110d565efSmrg   gcc_assert (REG_P (reg));
124210d565efSmrg   decl = REG_EXPR (reg);
124310d565efSmrg   return (decl && TREE_CODE (decl) == PARM_DECL);
124410d565efSmrg }
124510d565efSmrg 
124610d565efSmrg /* Update NEW with the same attributes as REG, but with OFFSET added
124710d565efSmrg    to the REG_OFFSET.  */
124810d565efSmrg 
124910d565efSmrg static void
update_reg_offset(rtx new_rtx,rtx reg,poly_int64 offset)1250c7a68eb7Smrg update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset)
125110d565efSmrg {
125210d565efSmrg   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
125310d565efSmrg 				       REG_OFFSET (reg) + offset);
125410d565efSmrg }
125510d565efSmrg 
125610d565efSmrg /* Generate a register with same attributes as REG, but with OFFSET
125710d565efSmrg    added to the REG_OFFSET.  */
125810d565efSmrg 
125910d565efSmrg rtx
gen_rtx_REG_offset(rtx reg,machine_mode mode,unsigned int regno,poly_int64 offset)126010d565efSmrg gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno,
1261c7a68eb7Smrg 		    poly_int64 offset)
126210d565efSmrg {
126310d565efSmrg   rtx new_rtx = gen_rtx_REG (mode, regno);
126410d565efSmrg 
126510d565efSmrg   update_reg_offset (new_rtx, reg, offset);
126610d565efSmrg   return new_rtx;
126710d565efSmrg }
126810d565efSmrg 
126910d565efSmrg /* Generate a new pseudo-register with the same attributes as REG, but
127010d565efSmrg    with OFFSET added to the REG_OFFSET.  */
127110d565efSmrg 
127210d565efSmrg rtx
gen_reg_rtx_offset(rtx reg,machine_mode mode,int offset)127310d565efSmrg gen_reg_rtx_offset (rtx reg, machine_mode mode, int offset)
127410d565efSmrg {
127510d565efSmrg   rtx new_rtx = gen_reg_rtx (mode);
127610d565efSmrg 
127710d565efSmrg   update_reg_offset (new_rtx, reg, offset);
127810d565efSmrg   return new_rtx;
127910d565efSmrg }
128010d565efSmrg 
128110d565efSmrg /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
128210d565efSmrg    new register is a (possibly paradoxical) lowpart of the old one.  */
128310d565efSmrg 
128410d565efSmrg void
adjust_reg_mode(rtx reg,machine_mode mode)128510d565efSmrg adjust_reg_mode (rtx reg, machine_mode mode)
128610d565efSmrg {
128710d565efSmrg   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
128810d565efSmrg   PUT_MODE (reg, mode);
128910d565efSmrg }
129010d565efSmrg 
129110d565efSmrg /* Copy REG's attributes from X, if X has any attributes.  If REG and X
129210d565efSmrg    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
129310d565efSmrg 
129410d565efSmrg void
set_reg_attrs_from_value(rtx reg,rtx x)129510d565efSmrg set_reg_attrs_from_value (rtx reg, rtx x)
129610d565efSmrg {
1297c7a68eb7Smrg   poly_int64 offset;
129810d565efSmrg   bool can_be_reg_pointer = true;
129910d565efSmrg 
130010d565efSmrg   /* Don't call mark_reg_pointer for incompatible pointer sign
130110d565efSmrg      extension.  */
130210d565efSmrg   while (GET_CODE (x) == SIGN_EXTEND
130310d565efSmrg 	 || GET_CODE (x) == ZERO_EXTEND
130410d565efSmrg 	 || GET_CODE (x) == TRUNCATE
130510d565efSmrg 	 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
130610d565efSmrg     {
130710d565efSmrg #if defined(POINTERS_EXTEND_UNSIGNED)
130810d565efSmrg       if (((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
130910d565efSmrg 	   || (GET_CODE (x) == ZERO_EXTEND && ! POINTERS_EXTEND_UNSIGNED)
131010d565efSmrg 	   || (paradoxical_subreg_p (x)
131110d565efSmrg 	       && ! (SUBREG_PROMOTED_VAR_P (x)
131210d565efSmrg 		     && SUBREG_CHECK_PROMOTED_SIGN (x,
131310d565efSmrg 						    POINTERS_EXTEND_UNSIGNED))))
131410d565efSmrg 	  && !targetm.have_ptr_extend ())
131510d565efSmrg 	can_be_reg_pointer = false;
131610d565efSmrg #endif
131710d565efSmrg       x = XEXP (x, 0);
131810d565efSmrg     }
131910d565efSmrg 
132010d565efSmrg   /* Hard registers can be reused for multiple purposes within the same
132110d565efSmrg      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
132210d565efSmrg      on them is wrong.  */
132310d565efSmrg   if (HARD_REGISTER_P (reg))
132410d565efSmrg     return;
132510d565efSmrg 
132610d565efSmrg   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
132710d565efSmrg   if (MEM_P (x))
132810d565efSmrg     {
132910d565efSmrg       if (MEM_OFFSET_KNOWN_P (x))
133010d565efSmrg 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
133110d565efSmrg 					 MEM_OFFSET (x) + offset);
133210d565efSmrg       if (can_be_reg_pointer && MEM_POINTER (x))
133310d565efSmrg 	mark_reg_pointer (reg, 0);
133410d565efSmrg     }
133510d565efSmrg   else if (REG_P (x))
133610d565efSmrg     {
133710d565efSmrg       if (REG_ATTRS (x))
133810d565efSmrg 	update_reg_offset (reg, x, offset);
133910d565efSmrg       if (can_be_reg_pointer && REG_POINTER (x))
134010d565efSmrg 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
134110d565efSmrg     }
134210d565efSmrg }
134310d565efSmrg 
134410d565efSmrg /* Generate a REG rtx for a new pseudo register, copying the mode
134510d565efSmrg    and attributes from X.  */
134610d565efSmrg 
134710d565efSmrg rtx
gen_reg_rtx_and_attrs(rtx x)134810d565efSmrg gen_reg_rtx_and_attrs (rtx x)
134910d565efSmrg {
135010d565efSmrg   rtx reg = gen_reg_rtx (GET_MODE (x));
135110d565efSmrg   set_reg_attrs_from_value (reg, x);
135210d565efSmrg   return reg;
135310d565efSmrg }
135410d565efSmrg 
135510d565efSmrg /* Set the register attributes for registers contained in PARM_RTX.
135610d565efSmrg    Use needed values from memory attributes of MEM.  */
135710d565efSmrg 
135810d565efSmrg void
set_reg_attrs_for_parm(rtx parm_rtx,rtx mem)135910d565efSmrg set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
136010d565efSmrg {
136110d565efSmrg   if (REG_P (parm_rtx))
136210d565efSmrg     set_reg_attrs_from_value (parm_rtx, mem);
136310d565efSmrg   else if (GET_CODE (parm_rtx) == PARALLEL)
136410d565efSmrg     {
136510d565efSmrg       /* Check for a NULL entry in the first slot, used to indicate that the
136610d565efSmrg 	 parameter goes both on the stack and in registers.  */
136710d565efSmrg       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
136810d565efSmrg       for (; i < XVECLEN (parm_rtx, 0); i++)
136910d565efSmrg 	{
137010d565efSmrg 	  rtx x = XVECEXP (parm_rtx, 0, i);
137110d565efSmrg 	  if (REG_P (XEXP (x, 0)))
137210d565efSmrg 	    REG_ATTRS (XEXP (x, 0))
137310d565efSmrg 	      = get_reg_attrs (MEM_EXPR (mem),
137410d565efSmrg 			       INTVAL (XEXP (x, 1)));
137510d565efSmrg 	}
137610d565efSmrg     }
137710d565efSmrg }
137810d565efSmrg 
137910d565efSmrg /* Set the REG_ATTRS for registers in value X, given that X represents
138010d565efSmrg    decl T.  */
138110d565efSmrg 
138210d565efSmrg void
set_reg_attrs_for_decl_rtl(tree t,rtx x)138310d565efSmrg set_reg_attrs_for_decl_rtl (tree t, rtx x)
138410d565efSmrg {
138510d565efSmrg   if (!t)
138610d565efSmrg     return;
138710d565efSmrg   tree tdecl = t;
138810d565efSmrg   if (GET_CODE (x) == SUBREG)
138910d565efSmrg     {
139010d565efSmrg       gcc_assert (subreg_lowpart_p (x));
139110d565efSmrg       x = SUBREG_REG (x);
139210d565efSmrg     }
139310d565efSmrg   if (REG_P (x))
139410d565efSmrg     REG_ATTRS (x)
139510d565efSmrg       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
139610d565efSmrg 					       DECL_P (tdecl)
139710d565efSmrg 					       ? DECL_MODE (tdecl)
139810d565efSmrg 					       : TYPE_MODE (TREE_TYPE (tdecl))));
139910d565efSmrg   if (GET_CODE (x) == CONCAT)
140010d565efSmrg     {
140110d565efSmrg       if (REG_P (XEXP (x, 0)))
140210d565efSmrg         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
140310d565efSmrg       if (REG_P (XEXP (x, 1)))
140410d565efSmrg 	REG_ATTRS (XEXP (x, 1))
140510d565efSmrg 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
140610d565efSmrg     }
140710d565efSmrg   if (GET_CODE (x) == PARALLEL)
140810d565efSmrg     {
140910d565efSmrg       int i, start;
141010d565efSmrg 
141110d565efSmrg       /* Check for a NULL entry, used to indicate that the parameter goes
141210d565efSmrg 	 both on the stack and in registers.  */
141310d565efSmrg       if (XEXP (XVECEXP (x, 0, 0), 0))
141410d565efSmrg 	start = 0;
141510d565efSmrg       else
141610d565efSmrg 	start = 1;
141710d565efSmrg 
141810d565efSmrg       for (i = start; i < XVECLEN (x, 0); i++)
141910d565efSmrg 	{
142010d565efSmrg 	  rtx y = XVECEXP (x, 0, i);
142110d565efSmrg 	  if (REG_P (XEXP (y, 0)))
142210d565efSmrg 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
142310d565efSmrg 	}
142410d565efSmrg     }
142510d565efSmrg }
142610d565efSmrg 
142710d565efSmrg /* Assign the RTX X to declaration T.  */
142810d565efSmrg 
142910d565efSmrg void
set_decl_rtl(tree t,rtx x)143010d565efSmrg set_decl_rtl (tree t, rtx x)
143110d565efSmrg {
143210d565efSmrg   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
143310d565efSmrg   if (x)
143410d565efSmrg     set_reg_attrs_for_decl_rtl (t, x);
143510d565efSmrg }
143610d565efSmrg 
143710d565efSmrg /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
143810d565efSmrg    if the ABI requires the parameter to be passed by reference.  */
143910d565efSmrg 
144010d565efSmrg void
set_decl_incoming_rtl(tree t,rtx x,bool by_reference_p)144110d565efSmrg set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
144210d565efSmrg {
144310d565efSmrg   DECL_INCOMING_RTL (t) = x;
144410d565efSmrg   if (x && !by_reference_p)
144510d565efSmrg     set_reg_attrs_for_decl_rtl (t, x);
144610d565efSmrg }
144710d565efSmrg 
144810d565efSmrg /* Identify REG (which may be a CONCAT) as a user register.  */
144910d565efSmrg 
145010d565efSmrg void
mark_user_reg(rtx reg)145110d565efSmrg mark_user_reg (rtx reg)
145210d565efSmrg {
145310d565efSmrg   if (GET_CODE (reg) == CONCAT)
145410d565efSmrg     {
145510d565efSmrg       REG_USERVAR_P (XEXP (reg, 0)) = 1;
145610d565efSmrg       REG_USERVAR_P (XEXP (reg, 1)) = 1;
145710d565efSmrg     }
145810d565efSmrg   else
145910d565efSmrg     {
146010d565efSmrg       gcc_assert (REG_P (reg));
146110d565efSmrg       REG_USERVAR_P (reg) = 1;
146210d565efSmrg     }
146310d565efSmrg }
146410d565efSmrg 
146510d565efSmrg /* Identify REG as a probable pointer register and show its alignment
146610d565efSmrg    as ALIGN, if nonzero.  */
146710d565efSmrg 
146810d565efSmrg void
mark_reg_pointer(rtx reg,int align)146910d565efSmrg mark_reg_pointer (rtx reg, int align)
147010d565efSmrg {
147110d565efSmrg   if (! REG_POINTER (reg))
147210d565efSmrg     {
147310d565efSmrg       REG_POINTER (reg) = 1;
147410d565efSmrg 
147510d565efSmrg       if (align)
147610d565efSmrg 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
147710d565efSmrg     }
147810d565efSmrg   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
147910d565efSmrg     /* We can no-longer be sure just how aligned this pointer is.  */
148010d565efSmrg     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
148110d565efSmrg }
148210d565efSmrg 
148310d565efSmrg /* Return 1 plus largest pseudo reg number used in the current function.  */
148410d565efSmrg 
148510d565efSmrg int
max_reg_num(void)148610d565efSmrg max_reg_num (void)
148710d565efSmrg {
148810d565efSmrg   return reg_rtx_no;
148910d565efSmrg }
149010d565efSmrg 
149110d565efSmrg /* Return 1 + the largest label number used so far in the current function.  */
149210d565efSmrg 
149310d565efSmrg int
max_label_num(void)149410d565efSmrg max_label_num (void)
149510d565efSmrg {
149610d565efSmrg   return label_num;
149710d565efSmrg }
149810d565efSmrg 
149910d565efSmrg /* Return first label number used in this function (if any were used).  */
150010d565efSmrg 
150110d565efSmrg int
get_first_label_num(void)150210d565efSmrg get_first_label_num (void)
150310d565efSmrg {
150410d565efSmrg   return first_label_num;
150510d565efSmrg }
150610d565efSmrg 
150710d565efSmrg /* If the rtx for label was created during the expansion of a nested
150810d565efSmrg    function, then first_label_num won't include this label number.
150910d565efSmrg    Fix this now so that array indices work later.  */
151010d565efSmrg 
151110d565efSmrg void
maybe_set_first_label_num(rtx_code_label * x)151210d565efSmrg maybe_set_first_label_num (rtx_code_label *x)
151310d565efSmrg {
151410d565efSmrg   if (CODE_LABEL_NUMBER (x) < first_label_num)
151510d565efSmrg     first_label_num = CODE_LABEL_NUMBER (x);
151610d565efSmrg }
151710d565efSmrg 
151810d565efSmrg /* For use by the RTL function loader, when mingling with normal
151910d565efSmrg    functions.
152010d565efSmrg    Ensure that label_num is greater than the label num of X, to avoid
152110d565efSmrg    duplicate labels in the generated assembler.  */
152210d565efSmrg 
152310d565efSmrg void
maybe_set_max_label_num(rtx_code_label * x)152410d565efSmrg maybe_set_max_label_num (rtx_code_label *x)
152510d565efSmrg {
152610d565efSmrg   if (CODE_LABEL_NUMBER (x) >= label_num)
152710d565efSmrg     label_num = CODE_LABEL_NUMBER (x) + 1;
152810d565efSmrg }
152910d565efSmrg 
153010d565efSmrg 
153110d565efSmrg /* Return a value representing some low-order bits of X, where the number
153210d565efSmrg    of low-order bits is given by MODE.  Note that no conversion is done
153310d565efSmrg    between floating-point and fixed-point values, rather, the bit
153410d565efSmrg    representation is returned.
153510d565efSmrg 
153610d565efSmrg    This function handles the cases in common between gen_lowpart, below,
153710d565efSmrg    and two variants in cse.c and combine.c.  These are the cases that can
153810d565efSmrg    be safely handled at all points in the compilation.
153910d565efSmrg 
154010d565efSmrg    If this is not a case we can handle, return 0.  */
154110d565efSmrg 
154210d565efSmrg rtx
gen_lowpart_common(machine_mode mode,rtx x)154310d565efSmrg gen_lowpart_common (machine_mode mode, rtx x)
154410d565efSmrg {
1545c7a68eb7Smrg   poly_uint64 msize = GET_MODE_SIZE (mode);
154610d565efSmrg   machine_mode innermode;
154710d565efSmrg 
154810d565efSmrg   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
154910d565efSmrg      so we have to make one up.  Yuk.  */
155010d565efSmrg   innermode = GET_MODE (x);
155110d565efSmrg   if (CONST_INT_P (x)
1552c7a68eb7Smrg       && known_le (msize * BITS_PER_UNIT,
1553c7a68eb7Smrg 		   (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT))
1554c7a68eb7Smrg     innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require ();
155510d565efSmrg   else if (innermode == VOIDmode)
1556c7a68eb7Smrg     innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require ();
155710d565efSmrg 
155810d565efSmrg   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
155910d565efSmrg 
156010d565efSmrg   if (innermode == mode)
156110d565efSmrg     return x;
156210d565efSmrg 
1563c7a68eb7Smrg   /* The size of the outer and inner modes must be ordered.  */
1564c7a68eb7Smrg   poly_uint64 xsize = GET_MODE_SIZE (innermode);
1565c7a68eb7Smrg   if (!ordered_p (msize, xsize))
156610d565efSmrg     return 0;
156710d565efSmrg 
1568c7a68eb7Smrg   if (SCALAR_FLOAT_MODE_P (mode))
1569c7a68eb7Smrg     {
1570c7a68eb7Smrg       /* Don't allow paradoxical FLOAT_MODE subregs.  */
1571c7a68eb7Smrg       if (maybe_gt (msize, xsize))
157210d565efSmrg 	return 0;
1573c7a68eb7Smrg     }
1574c7a68eb7Smrg   else
1575c7a68eb7Smrg     {
1576c7a68eb7Smrg       /* MODE must occupy no more of the underlying registers than X.  */
1577c7a68eb7Smrg       poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode);
1578c7a68eb7Smrg       unsigned int mregs, xregs;
1579c7a68eb7Smrg       if (!can_div_away_from_zero_p (msize, regsize, &mregs)
1580c7a68eb7Smrg 	  || !can_div_away_from_zero_p (xsize, regsize, &xregs)
1581c7a68eb7Smrg 	  || mregs > xregs)
1582c7a68eb7Smrg 	return 0;
1583c7a68eb7Smrg     }
158410d565efSmrg 
1585c7a68eb7Smrg   scalar_int_mode int_mode, int_innermode, from_mode;
158610d565efSmrg   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1587c7a68eb7Smrg       && is_a <scalar_int_mode> (mode, &int_mode)
1588c7a68eb7Smrg       && is_a <scalar_int_mode> (innermode, &int_innermode)
1589c7a68eb7Smrg       && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &from_mode))
159010d565efSmrg     {
159110d565efSmrg       /* If we are getting the low-order part of something that has been
159210d565efSmrg 	 sign- or zero-extended, we can either just use the object being
159310d565efSmrg 	 extended or make a narrower extension.  If we want an even smaller
159410d565efSmrg 	 piece than the size of the object being extended, call ourselves
159510d565efSmrg 	 recursively.
159610d565efSmrg 
159710d565efSmrg 	 This case is used mostly by combine and cse.  */
159810d565efSmrg 
1599c7a68eb7Smrg       if (from_mode == int_mode)
160010d565efSmrg 	return XEXP (x, 0);
1601c7a68eb7Smrg       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (from_mode))
1602c7a68eb7Smrg 	return gen_lowpart_common (int_mode, XEXP (x, 0));
1603c7a68eb7Smrg       else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode))
1604c7a68eb7Smrg 	return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0));
160510d565efSmrg     }
160610d565efSmrg   else if (GET_CODE (x) == SUBREG || REG_P (x)
160710d565efSmrg 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1608c7a68eb7Smrg 	   || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)
1609c7a68eb7Smrg 	   || CONST_POLY_INT_P (x))
161010d565efSmrg     return lowpart_subreg (mode, x, innermode);
161110d565efSmrg 
161210d565efSmrg   /* Otherwise, we can't do this.  */
161310d565efSmrg   return 0;
161410d565efSmrg }
161510d565efSmrg 
161610d565efSmrg rtx
gen_highpart(machine_mode mode,rtx x)161710d565efSmrg gen_highpart (machine_mode mode, rtx x)
161810d565efSmrg {
1619c7a68eb7Smrg   poly_uint64 msize = GET_MODE_SIZE (mode);
162010d565efSmrg   rtx result;
162110d565efSmrg 
162210d565efSmrg   /* This case loses if X is a subreg.  To catch bugs early,
162310d565efSmrg      complain if an invalid MODE is used even in other cases.  */
1624c7a68eb7Smrg   gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD)
1625c7a68eb7Smrg 	      || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x))));
162610d565efSmrg 
162710d565efSmrg   result = simplify_gen_subreg (mode, x, GET_MODE (x),
162810d565efSmrg 				subreg_highpart_offset (mode, GET_MODE (x)));
162910d565efSmrg   gcc_assert (result);
163010d565efSmrg 
163110d565efSmrg   /* simplify_gen_subreg is not guaranteed to return a valid operand for
163210d565efSmrg      the target if we have a MEM.  gen_highpart must return a valid operand,
163310d565efSmrg      emitting code if necessary to do so.  */
163410d565efSmrg   if (MEM_P (result))
163510d565efSmrg     {
163610d565efSmrg       result = validize_mem (result);
163710d565efSmrg       gcc_assert (result);
163810d565efSmrg     }
163910d565efSmrg 
164010d565efSmrg   return result;
164110d565efSmrg }
164210d565efSmrg 
164310d565efSmrg /* Like gen_highpart, but accept mode of EXP operand in case EXP can
164410d565efSmrg    be VOIDmode constant.  */
164510d565efSmrg rtx
gen_highpart_mode(machine_mode outermode,machine_mode innermode,rtx exp)164610d565efSmrg gen_highpart_mode (machine_mode outermode, machine_mode innermode, rtx exp)
164710d565efSmrg {
164810d565efSmrg   if (GET_MODE (exp) != VOIDmode)
164910d565efSmrg     {
165010d565efSmrg       gcc_assert (GET_MODE (exp) == innermode);
165110d565efSmrg       return gen_highpart (outermode, exp);
165210d565efSmrg     }
165310d565efSmrg   return simplify_gen_subreg (outermode, exp, innermode,
165410d565efSmrg 			      subreg_highpart_offset (outermode, innermode));
165510d565efSmrg }
165610d565efSmrg 
165710d565efSmrg /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
165810d565efSmrg    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
165910d565efSmrg 
1660c7a68eb7Smrg poly_uint64
subreg_size_lowpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1661c7a68eb7Smrg subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
166210d565efSmrg {
1663c7a68eb7Smrg   gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
1664c7a68eb7Smrg   if (maybe_gt (outer_bytes, inner_bytes))
166510d565efSmrg     /* Paradoxical subregs always have a SUBREG_BYTE of 0.  */
166610d565efSmrg     return 0;
166710d565efSmrg 
166810d565efSmrg   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
166910d565efSmrg     return inner_bytes - outer_bytes;
167010d565efSmrg   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
167110d565efSmrg     return 0;
167210d565efSmrg   else
167310d565efSmrg     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes, 0);
167410d565efSmrg }
167510d565efSmrg 
167610d565efSmrg /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
167710d565efSmrg    OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes.  */
167810d565efSmrg 
1679c7a68eb7Smrg poly_uint64
subreg_size_highpart_offset(poly_uint64 outer_bytes,poly_uint64 inner_bytes)1680c7a68eb7Smrg subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes)
168110d565efSmrg {
1682c7a68eb7Smrg   gcc_assert (known_ge (inner_bytes, outer_bytes));
168310d565efSmrg 
168410d565efSmrg   if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN)
168510d565efSmrg     return 0;
168610d565efSmrg   else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN)
168710d565efSmrg     return inner_bytes - outer_bytes;
168810d565efSmrg   else
168910d565efSmrg     return subreg_size_offset_from_lsb (outer_bytes, inner_bytes,
169010d565efSmrg 					(inner_bytes - outer_bytes)
169110d565efSmrg 					* BITS_PER_UNIT);
169210d565efSmrg }
169310d565efSmrg 
169410d565efSmrg /* Return 1 iff X, assumed to be a SUBREG,
169510d565efSmrg    refers to the least significant part of its containing reg.
169610d565efSmrg    If X is not a SUBREG, always return 1 (it is its own low part!).  */
169710d565efSmrg 
169810d565efSmrg int
subreg_lowpart_p(const_rtx x)169910d565efSmrg subreg_lowpart_p (const_rtx x)
170010d565efSmrg {
170110d565efSmrg   if (GET_CODE (x) != SUBREG)
170210d565efSmrg     return 1;
170310d565efSmrg   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
170410d565efSmrg     return 0;
170510d565efSmrg 
1706c7a68eb7Smrg   return known_eq (subreg_lowpart_offset (GET_MODE (x),
1707c7a68eb7Smrg 					  GET_MODE (SUBREG_REG (x))),
1708c7a68eb7Smrg 		   SUBREG_BYTE (x));
170910d565efSmrg }
171010d565efSmrg 
171110d565efSmrg /* Return subword OFFSET of operand OP.
171210d565efSmrg    The word number, OFFSET, is interpreted as the word number starting
171310d565efSmrg    at the low-order address.  OFFSET 0 is the low-order word if not
171410d565efSmrg    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
171510d565efSmrg 
171610d565efSmrg    If we cannot extract the required word, we return zero.  Otherwise,
171710d565efSmrg    an rtx corresponding to the requested word will be returned.
171810d565efSmrg 
171910d565efSmrg    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
172010d565efSmrg    reload has completed, a valid address will always be returned.  After
172110d565efSmrg    reload, if a valid address cannot be returned, we return zero.
172210d565efSmrg 
172310d565efSmrg    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
172410d565efSmrg    it is the responsibility of the caller.
172510d565efSmrg 
172610d565efSmrg    MODE is the mode of OP in case it is a CONST_INT.
172710d565efSmrg 
172810d565efSmrg    ??? This is still rather broken for some cases.  The problem for the
172910d565efSmrg    moment is that all callers of this thing provide no 'goal mode' to
173010d565efSmrg    tell us to work with.  This exists because all callers were written
173110d565efSmrg    in a word based SUBREG world.
173210d565efSmrg    Now use of this function can be deprecated by simplify_subreg in most
173310d565efSmrg    cases.
173410d565efSmrg  */
173510d565efSmrg 
173610d565efSmrg rtx
operand_subword(rtx op,poly_uint64 offset,int validate_address,machine_mode mode)1737c7a68eb7Smrg operand_subword (rtx op, poly_uint64 offset, int validate_address,
1738c7a68eb7Smrg 		 machine_mode mode)
173910d565efSmrg {
174010d565efSmrg   if (mode == VOIDmode)
174110d565efSmrg     mode = GET_MODE (op);
174210d565efSmrg 
174310d565efSmrg   gcc_assert (mode != VOIDmode);
174410d565efSmrg 
174510d565efSmrg   /* If OP is narrower than a word, fail.  */
174610d565efSmrg   if (mode != BLKmode
1747c7a68eb7Smrg       && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD))
174810d565efSmrg     return 0;
174910d565efSmrg 
175010d565efSmrg   /* If we want a word outside OP, return zero.  */
175110d565efSmrg   if (mode != BLKmode
1752c7a68eb7Smrg       && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode)))
175310d565efSmrg     return const0_rtx;
175410d565efSmrg 
175510d565efSmrg   /* Form a new MEM at the requested address.  */
175610d565efSmrg   if (MEM_P (op))
175710d565efSmrg     {
175810d565efSmrg       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
175910d565efSmrg 
176010d565efSmrg       if (! validate_address)
176110d565efSmrg 	return new_rtx;
176210d565efSmrg 
176310d565efSmrg       else if (reload_completed)
176410d565efSmrg 	{
176510d565efSmrg 	  if (! strict_memory_address_addr_space_p (word_mode,
176610d565efSmrg 						    XEXP (new_rtx, 0),
176710d565efSmrg 						    MEM_ADDR_SPACE (op)))
176810d565efSmrg 	    return 0;
176910d565efSmrg 	}
177010d565efSmrg       else
177110d565efSmrg 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
177210d565efSmrg     }
177310d565efSmrg 
177410d565efSmrg   /* Rest can be handled by simplify_subreg.  */
177510d565efSmrg   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
177610d565efSmrg }
177710d565efSmrg 
177810d565efSmrg /* Similar to `operand_subword', but never return 0.  If we can't
177910d565efSmrg    extract the required subword, put OP into a register and try again.
178010d565efSmrg    The second attempt must succeed.  We always validate the address in
178110d565efSmrg    this case.
178210d565efSmrg 
178310d565efSmrg    MODE is the mode of OP, in case it is CONST_INT.  */
178410d565efSmrg 
178510d565efSmrg rtx
operand_subword_force(rtx op,poly_uint64 offset,machine_mode mode)1786c7a68eb7Smrg operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode)
178710d565efSmrg {
178810d565efSmrg   rtx result = operand_subword (op, offset, 1, mode);
178910d565efSmrg 
179010d565efSmrg   if (result)
179110d565efSmrg     return result;
179210d565efSmrg 
179310d565efSmrg   if (mode != BLKmode && mode != VOIDmode)
179410d565efSmrg     {
179510d565efSmrg       /* If this is a register which cannot be accessed by words, copy it
179610d565efSmrg 	 to a pseudo register.  */
179710d565efSmrg       if (REG_P (op))
179810d565efSmrg 	op = copy_to_reg (op);
179910d565efSmrg       else
180010d565efSmrg 	op = force_reg (mode, op);
180110d565efSmrg     }
180210d565efSmrg 
180310d565efSmrg   result = operand_subword (op, offset, 1, mode);
180410d565efSmrg   gcc_assert (result);
180510d565efSmrg 
180610d565efSmrg   return result;
180710d565efSmrg }
180810d565efSmrg 
mem_attrs()1809c7a68eb7Smrg mem_attrs::mem_attrs ()
1810c7a68eb7Smrg   : expr (NULL_TREE),
1811c7a68eb7Smrg     offset (0),
1812c7a68eb7Smrg     size (0),
1813c7a68eb7Smrg     alias (0),
1814c7a68eb7Smrg     align (0),
1815c7a68eb7Smrg     addrspace (ADDR_SPACE_GENERIC),
1816c7a68eb7Smrg     offset_known_p (false),
1817c7a68eb7Smrg     size_known_p (false)
1818c7a68eb7Smrg {}
1819c7a68eb7Smrg 
182010d565efSmrg /* Returns 1 if both MEM_EXPR can be considered equal
182110d565efSmrg    and 0 otherwise.  */
182210d565efSmrg 
182310d565efSmrg int
mem_expr_equal_p(const_tree expr1,const_tree expr2)182410d565efSmrg mem_expr_equal_p (const_tree expr1, const_tree expr2)
182510d565efSmrg {
182610d565efSmrg   if (expr1 == expr2)
182710d565efSmrg     return 1;
182810d565efSmrg 
182910d565efSmrg   if (! expr1 || ! expr2)
183010d565efSmrg     return 0;
183110d565efSmrg 
183210d565efSmrg   if (TREE_CODE (expr1) != TREE_CODE (expr2))
183310d565efSmrg     return 0;
183410d565efSmrg 
183510d565efSmrg   return operand_equal_p (expr1, expr2, 0);
183610d565efSmrg }
183710d565efSmrg 
183810d565efSmrg /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
183910d565efSmrg    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
184010d565efSmrg    -1 if not known.  */
184110d565efSmrg 
184210d565efSmrg int
get_mem_align_offset(rtx mem,unsigned int align)184310d565efSmrg get_mem_align_offset (rtx mem, unsigned int align)
184410d565efSmrg {
184510d565efSmrg   tree expr;
1846c7a68eb7Smrg   poly_uint64 offset;
184710d565efSmrg 
184810d565efSmrg   /* This function can't use
184910d565efSmrg      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
185010d565efSmrg 	 || (MAX (MEM_ALIGN (mem),
185110d565efSmrg 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
185210d565efSmrg 	     < align))
185310d565efSmrg        return -1;
185410d565efSmrg      else
185510d565efSmrg        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
185610d565efSmrg      for two reasons:
185710d565efSmrg      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
185810d565efSmrg        for <variable>.  get_inner_reference doesn't handle it and
185910d565efSmrg        even if it did, the alignment in that case needs to be determined
186010d565efSmrg        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
186110d565efSmrg      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
186210d565efSmrg        isn't sufficiently aligned, the object it is in might be.  */
186310d565efSmrg   gcc_assert (MEM_P (mem));
186410d565efSmrg   expr = MEM_EXPR (mem);
186510d565efSmrg   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
186610d565efSmrg     return -1;
186710d565efSmrg 
186810d565efSmrg   offset = MEM_OFFSET (mem);
186910d565efSmrg   if (DECL_P (expr))
187010d565efSmrg     {
187110d565efSmrg       if (DECL_ALIGN (expr) < align)
187210d565efSmrg 	return -1;
187310d565efSmrg     }
187410d565efSmrg   else if (INDIRECT_REF_P (expr))
187510d565efSmrg     {
187610d565efSmrg       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
187710d565efSmrg 	return -1;
187810d565efSmrg     }
187910d565efSmrg   else if (TREE_CODE (expr) == COMPONENT_REF)
188010d565efSmrg     {
188110d565efSmrg       while (1)
188210d565efSmrg 	{
188310d565efSmrg 	  tree inner = TREE_OPERAND (expr, 0);
188410d565efSmrg 	  tree field = TREE_OPERAND (expr, 1);
188510d565efSmrg 	  tree byte_offset = component_ref_field_offset (expr);
188610d565efSmrg 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
188710d565efSmrg 
1888c7a68eb7Smrg 	  poly_uint64 suboffset;
188910d565efSmrg 	  if (!byte_offset
1890c7a68eb7Smrg 	      || !poly_int_tree_p (byte_offset, &suboffset)
189110d565efSmrg 	      || !tree_fits_uhwi_p (bit_offset))
189210d565efSmrg 	    return -1;
189310d565efSmrg 
1894c7a68eb7Smrg 	  offset += suboffset;
189510d565efSmrg 	  offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT;
189610d565efSmrg 
189710d565efSmrg 	  if (inner == NULL_TREE)
189810d565efSmrg 	    {
189910d565efSmrg 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
190010d565efSmrg 		  < (unsigned int) align)
190110d565efSmrg 		return -1;
190210d565efSmrg 	      break;
190310d565efSmrg 	    }
190410d565efSmrg 	  else if (DECL_P (inner))
190510d565efSmrg 	    {
190610d565efSmrg 	      if (DECL_ALIGN (inner) < align)
190710d565efSmrg 		return -1;
190810d565efSmrg 	      break;
190910d565efSmrg 	    }
191010d565efSmrg 	  else if (TREE_CODE (inner) != COMPONENT_REF)
191110d565efSmrg 	    return -1;
191210d565efSmrg 	  expr = inner;
191310d565efSmrg 	}
191410d565efSmrg     }
191510d565efSmrg   else
191610d565efSmrg     return -1;
191710d565efSmrg 
1918c7a68eb7Smrg   HOST_WIDE_INT misalign;
1919c7a68eb7Smrg   if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign))
1920c7a68eb7Smrg     return -1;
1921c7a68eb7Smrg   return misalign;
192210d565efSmrg }
192310d565efSmrg 
192410d565efSmrg /* Given REF (a MEM) and T, either the type of X or the expression
192510d565efSmrg    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
192610d565efSmrg    if we are making a new object of this type.  BITPOS is nonzero if
192710d565efSmrg    there is an offset outstanding on T that will be applied later.  */
192810d565efSmrg 
192910d565efSmrg void
set_mem_attributes_minus_bitpos(rtx ref,tree t,int objectp,poly_int64 bitpos)193010d565efSmrg set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1931c7a68eb7Smrg 				 poly_int64 bitpos)
193210d565efSmrg {
1933c7a68eb7Smrg   poly_int64 apply_bitpos = 0;
193410d565efSmrg   tree type;
1935*ec02198aSmrg   class mem_attrs attrs, *defattrs, *refattrs;
193610d565efSmrg   addr_space_t as;
193710d565efSmrg 
193810d565efSmrg   /* It can happen that type_for_mode was given a mode for which there
193910d565efSmrg      is no language-level type.  In which case it returns NULL, which
194010d565efSmrg      we can see here.  */
194110d565efSmrg   if (t == NULL_TREE)
194210d565efSmrg     return;
194310d565efSmrg 
194410d565efSmrg   type = TYPE_P (t) ? t : TREE_TYPE (t);
194510d565efSmrg   if (type == error_mark_node)
194610d565efSmrg     return;
194710d565efSmrg 
194810d565efSmrg   /* If we have already set DECL_RTL = ref, get_alias_set will get the
194910d565efSmrg      wrong answer, as it assumes that DECL_RTL already has the right alias
195010d565efSmrg      info.  Callers should not set DECL_RTL until after the call to
195110d565efSmrg      set_mem_attributes.  */
195210d565efSmrg   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
195310d565efSmrg 
195410d565efSmrg   /* Get the alias set from the expression or type (perhaps using a
195510d565efSmrg      front-end routine) and use it.  */
195610d565efSmrg   attrs.alias = get_alias_set (t);
195710d565efSmrg 
195810d565efSmrg   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
195910d565efSmrg   MEM_POINTER (ref) = POINTER_TYPE_P (type);
196010d565efSmrg 
196110d565efSmrg   /* Default values from pre-existing memory attributes if present.  */
196210d565efSmrg   refattrs = MEM_ATTRS (ref);
196310d565efSmrg   if (refattrs)
196410d565efSmrg     {
196510d565efSmrg       /* ??? Can this ever happen?  Calling this routine on a MEM that
196610d565efSmrg 	 already carries memory attributes should probably be invalid.  */
196710d565efSmrg       attrs.expr = refattrs->expr;
196810d565efSmrg       attrs.offset_known_p = refattrs->offset_known_p;
196910d565efSmrg       attrs.offset = refattrs->offset;
197010d565efSmrg       attrs.size_known_p = refattrs->size_known_p;
197110d565efSmrg       attrs.size = refattrs->size;
197210d565efSmrg       attrs.align = refattrs->align;
197310d565efSmrg     }
197410d565efSmrg 
197510d565efSmrg   /* Otherwise, default values from the mode of the MEM reference.  */
197610d565efSmrg   else
197710d565efSmrg     {
197810d565efSmrg       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
197910d565efSmrg       gcc_assert (!defattrs->expr);
198010d565efSmrg       gcc_assert (!defattrs->offset_known_p);
198110d565efSmrg 
198210d565efSmrg       /* Respect mode size.  */
198310d565efSmrg       attrs.size_known_p = defattrs->size_known_p;
198410d565efSmrg       attrs.size = defattrs->size;
198510d565efSmrg       /* ??? Is this really necessary?  We probably should always get
198610d565efSmrg 	 the size from the type below.  */
198710d565efSmrg 
198810d565efSmrg       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
198910d565efSmrg          if T is an object, always compute the object alignment below.  */
199010d565efSmrg       if (TYPE_P (t))
199110d565efSmrg 	attrs.align = defattrs->align;
199210d565efSmrg       else
199310d565efSmrg 	attrs.align = BITS_PER_UNIT;
199410d565efSmrg       /* ??? If T is a type, respecting mode alignment may *also* be wrong
199510d565efSmrg 	 e.g. if the type carries an alignment attribute.  Should we be
199610d565efSmrg 	 able to simply always use TYPE_ALIGN?  */
199710d565efSmrg     }
199810d565efSmrg 
199910d565efSmrg   /* We can set the alignment from the type if we are making an object or if
200010d565efSmrg      this is an INDIRECT_REF.  */
200110d565efSmrg   if (objectp || TREE_CODE (t) == INDIRECT_REF)
200210d565efSmrg     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
200310d565efSmrg 
200410d565efSmrg   /* If the size is known, we can set that.  */
200510d565efSmrg   tree new_size = TYPE_SIZE_UNIT (type);
200610d565efSmrg 
200710d565efSmrg   /* The address-space is that of the type.  */
200810d565efSmrg   as = TYPE_ADDR_SPACE (type);
200910d565efSmrg 
201010d565efSmrg   /* If T is not a type, we may be able to deduce some more information about
201110d565efSmrg      the expression.  */
201210d565efSmrg   if (! TYPE_P (t))
201310d565efSmrg     {
201410d565efSmrg       tree base;
201510d565efSmrg 
201610d565efSmrg       if (TREE_THIS_VOLATILE (t))
201710d565efSmrg 	MEM_VOLATILE_P (ref) = 1;
201810d565efSmrg 
201910d565efSmrg       /* Now remove any conversions: they don't change what the underlying
202010d565efSmrg 	 object is.  Likewise for SAVE_EXPR.  */
202110d565efSmrg       while (CONVERT_EXPR_P (t)
202210d565efSmrg 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
202310d565efSmrg 	     || TREE_CODE (t) == SAVE_EXPR)
202410d565efSmrg 	t = TREE_OPERAND (t, 0);
202510d565efSmrg 
202610d565efSmrg       /* Note whether this expression can trap.  */
202710d565efSmrg       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
202810d565efSmrg 
202910d565efSmrg       base = get_base_address (t);
203010d565efSmrg       if (base)
203110d565efSmrg 	{
203210d565efSmrg 	  if (DECL_P (base)
203310d565efSmrg 	      && TREE_READONLY (base)
203410d565efSmrg 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
203510d565efSmrg 	      && !TREE_THIS_VOLATILE (base))
203610d565efSmrg 	    MEM_READONLY_P (ref) = 1;
203710d565efSmrg 
203810d565efSmrg 	  /* Mark static const strings readonly as well.  */
203910d565efSmrg 	  if (TREE_CODE (base) == STRING_CST
204010d565efSmrg 	      && TREE_READONLY (base)
204110d565efSmrg 	      && TREE_STATIC (base))
204210d565efSmrg 	    MEM_READONLY_P (ref) = 1;
204310d565efSmrg 
204410d565efSmrg 	  /* Address-space information is on the base object.  */
204510d565efSmrg 	  if (TREE_CODE (base) == MEM_REF
204610d565efSmrg 	      || TREE_CODE (base) == TARGET_MEM_REF)
204710d565efSmrg 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
204810d565efSmrg 								      0))));
204910d565efSmrg 	  else
205010d565efSmrg 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
205110d565efSmrg 	}
205210d565efSmrg 
205310d565efSmrg       /* If this expression uses it's parent's alias set, mark it such
205410d565efSmrg 	 that we won't change it.  */
205510d565efSmrg       if (component_uses_parent_alias_set_from (t) != NULL_TREE)
205610d565efSmrg 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
205710d565efSmrg 
205810d565efSmrg       /* If this is a decl, set the attributes of the MEM from it.  */
205910d565efSmrg       if (DECL_P (t))
206010d565efSmrg 	{
206110d565efSmrg 	  attrs.expr = t;
206210d565efSmrg 	  attrs.offset_known_p = true;
206310d565efSmrg 	  attrs.offset = 0;
206410d565efSmrg 	  apply_bitpos = bitpos;
206510d565efSmrg 	  new_size = DECL_SIZE_UNIT (t);
206610d565efSmrg 	}
206710d565efSmrg 
2068*ec02198aSmrg       /* ???  If we end up with a constant or a descriptor do not
2069*ec02198aSmrg 	 record a MEM_EXPR.  */
2070*ec02198aSmrg       else if (CONSTANT_CLASS_P (t)
2071*ec02198aSmrg 	       || TREE_CODE (t) == CONSTRUCTOR)
207210d565efSmrg 	;
207310d565efSmrg 
207410d565efSmrg       /* If this is a field reference, record it.  */
207510d565efSmrg       else if (TREE_CODE (t) == COMPONENT_REF)
207610d565efSmrg 	{
207710d565efSmrg 	  attrs.expr = t;
207810d565efSmrg 	  attrs.offset_known_p = true;
207910d565efSmrg 	  attrs.offset = 0;
208010d565efSmrg 	  apply_bitpos = bitpos;
208110d565efSmrg 	  if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
208210d565efSmrg 	    new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
208310d565efSmrg 	}
208410d565efSmrg 
2085*ec02198aSmrg       /* Else record it.  */
2086*ec02198aSmrg       else
208710d565efSmrg 	{
2088*ec02198aSmrg 	  gcc_assert (handled_component_p (t)
2089*ec02198aSmrg 		      || TREE_CODE (t) == MEM_REF
2090*ec02198aSmrg 		      || TREE_CODE (t) == TARGET_MEM_REF);
209110d565efSmrg 	  attrs.expr = t;
209210d565efSmrg 	  attrs.offset_known_p = true;
209310d565efSmrg 	  attrs.offset = 0;
209410d565efSmrg 	  apply_bitpos = bitpos;
209510d565efSmrg 	}
209610d565efSmrg 
2097*ec02198aSmrg       /* If this is a reference based on a partitioned decl replace the
2098*ec02198aSmrg 	 base with a MEM_REF of the pointer representative we created
2099*ec02198aSmrg 	 during stack slot partitioning.  */
2100*ec02198aSmrg       if (attrs.expr
2101*ec02198aSmrg 	  && VAR_P (base)
2102*ec02198aSmrg 	  && ! is_global_var (base)
2103*ec02198aSmrg 	  && cfun->gimple_df->decls_to_pointers != NULL)
2104*ec02198aSmrg 	{
2105*ec02198aSmrg 	  tree *namep = cfun->gimple_df->decls_to_pointers->get (base);
2106*ec02198aSmrg 	  if (namep)
2107*ec02198aSmrg 	    {
2108*ec02198aSmrg 	      attrs.expr = unshare_expr (attrs.expr);
2109*ec02198aSmrg 	      tree *orig_base = &attrs.expr;
2110*ec02198aSmrg 	      while (handled_component_p (*orig_base))
2111*ec02198aSmrg 		orig_base = &TREE_OPERAND (*orig_base, 0);
2112*ec02198aSmrg 	      tree aptrt = reference_alias_ptr_type (*orig_base);
2113*ec02198aSmrg 	      *orig_base = build2 (MEM_REF, TREE_TYPE (*orig_base), *namep,
2114*ec02198aSmrg 				   build_int_cst (aptrt, 0));
2115*ec02198aSmrg 	    }
2116*ec02198aSmrg 	}
2117*ec02198aSmrg 
211810d565efSmrg       /* Compute the alignment.  */
211910d565efSmrg       unsigned int obj_align;
212010d565efSmrg       unsigned HOST_WIDE_INT obj_bitpos;
212110d565efSmrg       get_object_alignment_1 (t, &obj_align, &obj_bitpos);
2122c7a68eb7Smrg       unsigned int diff_align = known_alignment (obj_bitpos - bitpos);
2123c7a68eb7Smrg       if (diff_align != 0)
2124c7a68eb7Smrg 	obj_align = MIN (obj_align, diff_align);
212510d565efSmrg       attrs.align = MAX (attrs.align, obj_align);
212610d565efSmrg     }
212710d565efSmrg 
2128c7a68eb7Smrg   poly_uint64 const_size;
2129c7a68eb7Smrg   if (poly_int_tree_p (new_size, &const_size))
213010d565efSmrg     {
213110d565efSmrg       attrs.size_known_p = true;
2132c7a68eb7Smrg       attrs.size = const_size;
213310d565efSmrg     }
213410d565efSmrg 
213510d565efSmrg   /* If we modified OFFSET based on T, then subtract the outstanding
213610d565efSmrg      bit position offset.  Similarly, increase the size of the accessed
213710d565efSmrg      object to contain the negative offset.  */
2138c7a68eb7Smrg   if (maybe_ne (apply_bitpos, 0))
213910d565efSmrg     {
214010d565efSmrg       gcc_assert (attrs.offset_known_p);
2141c7a68eb7Smrg       poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos);
2142c7a68eb7Smrg       attrs.offset -= bytepos;
214310d565efSmrg       if (attrs.size_known_p)
2144c7a68eb7Smrg 	attrs.size += bytepos;
214510d565efSmrg     }
214610d565efSmrg 
214710d565efSmrg   /* Now set the attributes we computed above.  */
214810d565efSmrg   attrs.addrspace = as;
214910d565efSmrg   set_mem_attrs (ref, &attrs);
215010d565efSmrg }
215110d565efSmrg 
215210d565efSmrg void
set_mem_attributes(rtx ref,tree t,int objectp)215310d565efSmrg set_mem_attributes (rtx ref, tree t, int objectp)
215410d565efSmrg {
215510d565efSmrg   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
215610d565efSmrg }
215710d565efSmrg 
215810d565efSmrg /* Set the alias set of MEM to SET.  */
215910d565efSmrg 
216010d565efSmrg void
set_mem_alias_set(rtx mem,alias_set_type set)216110d565efSmrg set_mem_alias_set (rtx mem, alias_set_type set)
216210d565efSmrg {
216310d565efSmrg   /* If the new and old alias sets don't conflict, something is wrong.  */
216410d565efSmrg   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
2165c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
216610d565efSmrg   attrs.alias = set;
216710d565efSmrg   set_mem_attrs (mem, &attrs);
216810d565efSmrg }
216910d565efSmrg 
217010d565efSmrg /* Set the address space of MEM to ADDRSPACE (target-defined).  */
217110d565efSmrg 
217210d565efSmrg void
set_mem_addr_space(rtx mem,addr_space_t addrspace)217310d565efSmrg set_mem_addr_space (rtx mem, addr_space_t addrspace)
217410d565efSmrg {
2175c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
217610d565efSmrg   attrs.addrspace = addrspace;
217710d565efSmrg   set_mem_attrs (mem, &attrs);
217810d565efSmrg }
217910d565efSmrg 
218010d565efSmrg /* Set the alignment of MEM to ALIGN bits.  */
218110d565efSmrg 
218210d565efSmrg void
set_mem_align(rtx mem,unsigned int align)218310d565efSmrg set_mem_align (rtx mem, unsigned int align)
218410d565efSmrg {
2185c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
218610d565efSmrg   attrs.align = align;
218710d565efSmrg   set_mem_attrs (mem, &attrs);
218810d565efSmrg }
218910d565efSmrg 
219010d565efSmrg /* Set the expr for MEM to EXPR.  */
219110d565efSmrg 
219210d565efSmrg void
set_mem_expr(rtx mem,tree expr)219310d565efSmrg set_mem_expr (rtx mem, tree expr)
219410d565efSmrg {
2195c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
219610d565efSmrg   attrs.expr = expr;
219710d565efSmrg   set_mem_attrs (mem, &attrs);
219810d565efSmrg }
219910d565efSmrg 
220010d565efSmrg /* Set the offset of MEM to OFFSET.  */
220110d565efSmrg 
220210d565efSmrg void
set_mem_offset(rtx mem,poly_int64 offset)2203c7a68eb7Smrg set_mem_offset (rtx mem, poly_int64 offset)
220410d565efSmrg {
2205c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
220610d565efSmrg   attrs.offset_known_p = true;
220710d565efSmrg   attrs.offset = offset;
220810d565efSmrg   set_mem_attrs (mem, &attrs);
220910d565efSmrg }
221010d565efSmrg 
221110d565efSmrg /* Clear the offset of MEM.  */
221210d565efSmrg 
221310d565efSmrg void
clear_mem_offset(rtx mem)221410d565efSmrg clear_mem_offset (rtx mem)
221510d565efSmrg {
2216c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
221710d565efSmrg   attrs.offset_known_p = false;
221810d565efSmrg   set_mem_attrs (mem, &attrs);
221910d565efSmrg }
222010d565efSmrg 
222110d565efSmrg /* Set the size of MEM to SIZE.  */
222210d565efSmrg 
222310d565efSmrg void
set_mem_size(rtx mem,poly_int64 size)2224c7a68eb7Smrg set_mem_size (rtx mem, poly_int64 size)
222510d565efSmrg {
2226c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
222710d565efSmrg   attrs.size_known_p = true;
222810d565efSmrg   attrs.size = size;
222910d565efSmrg   set_mem_attrs (mem, &attrs);
223010d565efSmrg }
223110d565efSmrg 
223210d565efSmrg /* Clear the size of MEM.  */
223310d565efSmrg 
223410d565efSmrg void
clear_mem_size(rtx mem)223510d565efSmrg clear_mem_size (rtx mem)
223610d565efSmrg {
2237c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
223810d565efSmrg   attrs.size_known_p = false;
223910d565efSmrg   set_mem_attrs (mem, &attrs);
224010d565efSmrg }
224110d565efSmrg 
224210d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed to MODE
224310d565efSmrg    and its address changed to ADDR.  (VOIDmode means don't change the mode.
224410d565efSmrg    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
224510d565efSmrg    returned memory location is required to be valid.  INPLACE is true if any
224610d565efSmrg    changes can be made directly to MEMREF or false if MEMREF must be treated
224710d565efSmrg    as immutable.
224810d565efSmrg 
224910d565efSmrg    The memory attributes are not changed.  */
225010d565efSmrg 
225110d565efSmrg static rtx
change_address_1(rtx memref,machine_mode mode,rtx addr,int validate,bool inplace)225210d565efSmrg change_address_1 (rtx memref, machine_mode mode, rtx addr, int validate,
225310d565efSmrg 		  bool inplace)
225410d565efSmrg {
225510d565efSmrg   addr_space_t as;
225610d565efSmrg   rtx new_rtx;
225710d565efSmrg 
225810d565efSmrg   gcc_assert (MEM_P (memref));
225910d565efSmrg   as = MEM_ADDR_SPACE (memref);
226010d565efSmrg   if (mode == VOIDmode)
226110d565efSmrg     mode = GET_MODE (memref);
226210d565efSmrg   if (addr == 0)
226310d565efSmrg     addr = XEXP (memref, 0);
226410d565efSmrg   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
226510d565efSmrg       && (!validate || memory_address_addr_space_p (mode, addr, as)))
226610d565efSmrg     return memref;
226710d565efSmrg 
226810d565efSmrg   /* Don't validate address for LRA.  LRA can make the address valid
226910d565efSmrg      by itself in most efficient way.  */
227010d565efSmrg   if (validate && !lra_in_progress)
227110d565efSmrg     {
227210d565efSmrg       if (reload_in_progress || reload_completed)
227310d565efSmrg 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
227410d565efSmrg       else
227510d565efSmrg 	addr = memory_address_addr_space (mode, addr, as);
227610d565efSmrg     }
227710d565efSmrg 
227810d565efSmrg   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
227910d565efSmrg     return memref;
228010d565efSmrg 
228110d565efSmrg   if (inplace)
228210d565efSmrg     {
228310d565efSmrg       XEXP (memref, 0) = addr;
228410d565efSmrg       return memref;
228510d565efSmrg     }
228610d565efSmrg 
228710d565efSmrg   new_rtx = gen_rtx_MEM (mode, addr);
228810d565efSmrg   MEM_COPY_ATTRIBUTES (new_rtx, memref);
228910d565efSmrg   return new_rtx;
229010d565efSmrg }
229110d565efSmrg 
229210d565efSmrg /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
229310d565efSmrg    way we are changing MEMREF, so we only preserve the alias set.  */
229410d565efSmrg 
229510d565efSmrg rtx
change_address(rtx memref,machine_mode mode,rtx addr)229610d565efSmrg change_address (rtx memref, machine_mode mode, rtx addr)
229710d565efSmrg {
229810d565efSmrg   rtx new_rtx = change_address_1 (memref, mode, addr, 1, false);
229910d565efSmrg   machine_mode mmode = GET_MODE (new_rtx);
2300*ec02198aSmrg   class mem_attrs *defattrs;
230110d565efSmrg 
2302c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (memref));
230310d565efSmrg   defattrs = mode_mem_attrs[(int) mmode];
230410d565efSmrg   attrs.expr = NULL_TREE;
230510d565efSmrg   attrs.offset_known_p = false;
230610d565efSmrg   attrs.size_known_p = defattrs->size_known_p;
230710d565efSmrg   attrs.size = defattrs->size;
230810d565efSmrg   attrs.align = defattrs->align;
230910d565efSmrg 
231010d565efSmrg   /* If there are no changes, just return the original memory reference.  */
231110d565efSmrg   if (new_rtx == memref)
231210d565efSmrg     {
231310d565efSmrg       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
231410d565efSmrg 	return new_rtx;
231510d565efSmrg 
231610d565efSmrg       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
231710d565efSmrg       MEM_COPY_ATTRIBUTES (new_rtx, memref);
231810d565efSmrg     }
231910d565efSmrg 
232010d565efSmrg   set_mem_attrs (new_rtx, &attrs);
232110d565efSmrg   return new_rtx;
232210d565efSmrg }
232310d565efSmrg 
232410d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed
232510d565efSmrg    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
232610d565efSmrg    nonzero, the memory address is forced to be valid.
232710d565efSmrg    If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
232810d565efSmrg    and the caller is responsible for adjusting MEMREF base register.
232910d565efSmrg    If ADJUST_OBJECT is zero, the underlying object associated with the
233010d565efSmrg    memory reference is left unchanged and the caller is responsible for
233110d565efSmrg    dealing with it.  Otherwise, if the new memory reference is outside
233210d565efSmrg    the underlying object, even partially, then the object is dropped.
233310d565efSmrg    SIZE, if nonzero, is the size of an access in cases where MODE
233410d565efSmrg    has no inherent size.  */
233510d565efSmrg 
233610d565efSmrg rtx
adjust_address_1(rtx memref,machine_mode mode,poly_int64 offset,int validate,int adjust_address,int adjust_object,poly_int64 size)2337c7a68eb7Smrg adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset,
233810d565efSmrg 		  int validate, int adjust_address, int adjust_object,
2339c7a68eb7Smrg 		  poly_int64 size)
234010d565efSmrg {
234110d565efSmrg   rtx addr = XEXP (memref, 0);
234210d565efSmrg   rtx new_rtx;
2343c7a68eb7Smrg   scalar_int_mode address_mode;
2344*ec02198aSmrg   class mem_attrs attrs (*get_mem_attrs (memref)), *defattrs;
234510d565efSmrg   unsigned HOST_WIDE_INT max_align;
234610d565efSmrg #ifdef POINTERS_EXTEND_UNSIGNED
2347c7a68eb7Smrg   scalar_int_mode pointer_mode
234810d565efSmrg     = targetm.addr_space.pointer_mode (attrs.addrspace);
234910d565efSmrg #endif
235010d565efSmrg 
235110d565efSmrg   /* VOIDmode means no mode change for change_address_1.  */
235210d565efSmrg   if (mode == VOIDmode)
235310d565efSmrg     mode = GET_MODE (memref);
235410d565efSmrg 
235510d565efSmrg   /* Take the size of non-BLKmode accesses from the mode.  */
235610d565efSmrg   defattrs = mode_mem_attrs[(int) mode];
235710d565efSmrg   if (defattrs->size_known_p)
235810d565efSmrg     size = defattrs->size;
235910d565efSmrg 
236010d565efSmrg   /* If there are no changes, just return the original memory reference.  */
2361c7a68eb7Smrg   if (mode == GET_MODE (memref)
2362c7a68eb7Smrg       && known_eq (offset, 0)
2363c7a68eb7Smrg       && (known_eq (size, 0)
2364c7a68eb7Smrg 	  || (attrs.size_known_p && known_eq (attrs.size, size)))
236510d565efSmrg       && (!validate || memory_address_addr_space_p (mode, addr,
236610d565efSmrg 						    attrs.addrspace)))
236710d565efSmrg     return memref;
236810d565efSmrg 
236910d565efSmrg   /* ??? Prefer to create garbage instead of creating shared rtl.
237010d565efSmrg      This may happen even if offset is nonzero -- consider
237110d565efSmrg      (plus (plus reg reg) const_int) -- so do this always.  */
237210d565efSmrg   addr = copy_rtx (addr);
237310d565efSmrg 
237410d565efSmrg   /* Convert a possibly large offset to a signed value within the
237510d565efSmrg      range of the target address space.  */
237610d565efSmrg   address_mode = get_address_mode (memref);
2377c7a68eb7Smrg   offset = trunc_int_for_mode (offset, address_mode);
237810d565efSmrg 
237910d565efSmrg   if (adjust_address)
238010d565efSmrg     {
238110d565efSmrg       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
238210d565efSmrg 	 object, we can merge it into the LO_SUM.  */
2383c7a68eb7Smrg       if (GET_MODE (memref) != BLKmode
2384c7a68eb7Smrg 	  && GET_CODE (addr) == LO_SUM
2385c7a68eb7Smrg 	  && known_in_range_p (offset,
2386c7a68eb7Smrg 			       0, (GET_MODE_ALIGNMENT (GET_MODE (memref))
2387c7a68eb7Smrg 				   / BITS_PER_UNIT)))
238810d565efSmrg 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
238910d565efSmrg 			       plus_constant (address_mode,
239010d565efSmrg 					      XEXP (addr, 1), offset));
239110d565efSmrg #ifdef POINTERS_EXTEND_UNSIGNED
239210d565efSmrg       /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
239310d565efSmrg 	 in that mode, we merge it into the ZERO_EXTEND.  We take advantage of
239410d565efSmrg 	 the fact that pointers are not allowed to overflow.  */
239510d565efSmrg       else if (POINTERS_EXTEND_UNSIGNED > 0
239610d565efSmrg 	       && GET_CODE (addr) == ZERO_EXTEND
239710d565efSmrg 	       && GET_MODE (XEXP (addr, 0)) == pointer_mode
2398c7a68eb7Smrg 	       && known_eq (trunc_int_for_mode (offset, pointer_mode), offset))
239910d565efSmrg 	addr = gen_rtx_ZERO_EXTEND (address_mode,
240010d565efSmrg 				    plus_constant (pointer_mode,
240110d565efSmrg 						   XEXP (addr, 0), offset));
240210d565efSmrg #endif
240310d565efSmrg       else
240410d565efSmrg 	addr = plus_constant (address_mode, addr, offset);
240510d565efSmrg     }
240610d565efSmrg 
240710d565efSmrg   new_rtx = change_address_1 (memref, mode, addr, validate, false);
240810d565efSmrg 
240910d565efSmrg   /* If the address is a REG, change_address_1 rightfully returns memref,
241010d565efSmrg      but this would destroy memref's MEM_ATTRS.  */
2411c7a68eb7Smrg   if (new_rtx == memref && maybe_ne (offset, 0))
241210d565efSmrg     new_rtx = copy_rtx (new_rtx);
241310d565efSmrg 
241410d565efSmrg   /* Conservatively drop the object if we don't know where we start from.  */
241510d565efSmrg   if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
241610d565efSmrg     {
241710d565efSmrg       attrs.expr = NULL_TREE;
241810d565efSmrg       attrs.alias = 0;
241910d565efSmrg     }
242010d565efSmrg 
242110d565efSmrg   /* Compute the new values of the memory attributes due to this adjustment.
242210d565efSmrg      We add the offsets and update the alignment.  */
242310d565efSmrg   if (attrs.offset_known_p)
242410d565efSmrg     {
242510d565efSmrg       attrs.offset += offset;
242610d565efSmrg 
242710d565efSmrg       /* Drop the object if the new left end is not within its bounds.  */
2428c7a68eb7Smrg       if (adjust_object && maybe_lt (attrs.offset, 0))
242910d565efSmrg 	{
243010d565efSmrg 	  attrs.expr = NULL_TREE;
243110d565efSmrg 	  attrs.alias = 0;
243210d565efSmrg 	}
243310d565efSmrg     }
243410d565efSmrg 
243510d565efSmrg   /* Compute the new alignment by taking the MIN of the alignment and the
243610d565efSmrg      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
243710d565efSmrg      if zero.  */
2438c7a68eb7Smrg   if (maybe_ne (offset, 0))
243910d565efSmrg     {
2440c7a68eb7Smrg       max_align = known_alignment (offset) * BITS_PER_UNIT;
244110d565efSmrg       attrs.align = MIN (attrs.align, max_align);
244210d565efSmrg     }
244310d565efSmrg 
2444c7a68eb7Smrg   if (maybe_ne (size, 0))
244510d565efSmrg     {
244610d565efSmrg       /* Drop the object if the new right end is not within its bounds.  */
2447c7a68eb7Smrg       if (adjust_object && maybe_gt (offset + size, attrs.size))
244810d565efSmrg 	{
244910d565efSmrg 	  attrs.expr = NULL_TREE;
245010d565efSmrg 	  attrs.alias = 0;
245110d565efSmrg 	}
245210d565efSmrg       attrs.size_known_p = true;
245310d565efSmrg       attrs.size = size;
245410d565efSmrg     }
245510d565efSmrg   else if (attrs.size_known_p)
245610d565efSmrg     {
245710d565efSmrg       gcc_assert (!adjust_object);
245810d565efSmrg       attrs.size -= offset;
245910d565efSmrg       /* ??? The store_by_pieces machinery generates negative sizes,
246010d565efSmrg 	 so don't assert for that here.  */
246110d565efSmrg     }
246210d565efSmrg 
246310d565efSmrg   set_mem_attrs (new_rtx, &attrs);
246410d565efSmrg 
246510d565efSmrg   return new_rtx;
246610d565efSmrg }
246710d565efSmrg 
246810d565efSmrg /* Return a memory reference like MEMREF, but with its mode changed
246910d565efSmrg    to MODE and its address changed to ADDR, which is assumed to be
247010d565efSmrg    MEMREF offset by OFFSET bytes.  If VALIDATE is
247110d565efSmrg    nonzero, the memory address is forced to be valid.  */
247210d565efSmrg 
247310d565efSmrg rtx
adjust_automodify_address_1(rtx memref,machine_mode mode,rtx addr,poly_int64 offset,int validate)247410d565efSmrg adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr,
2475c7a68eb7Smrg 			     poly_int64 offset, int validate)
247610d565efSmrg {
247710d565efSmrg   memref = change_address_1 (memref, VOIDmode, addr, validate, false);
247810d565efSmrg   return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
247910d565efSmrg }
248010d565efSmrg 
248110d565efSmrg /* Return a memory reference like MEMREF, but whose address is changed by
248210d565efSmrg    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
248310d565efSmrg    known to be in OFFSET (possibly 1).  */
248410d565efSmrg 
248510d565efSmrg rtx
offset_address(rtx memref,rtx offset,unsigned HOST_WIDE_INT pow2)248610d565efSmrg offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
248710d565efSmrg {
248810d565efSmrg   rtx new_rtx, addr = XEXP (memref, 0);
248910d565efSmrg   machine_mode address_mode;
2490*ec02198aSmrg   class mem_attrs *defattrs;
249110d565efSmrg 
2492c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (memref));
249310d565efSmrg   address_mode = get_address_mode (memref);
249410d565efSmrg   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
249510d565efSmrg 
249610d565efSmrg   /* At this point we don't know _why_ the address is invalid.  It
249710d565efSmrg      could have secondary memory references, multiplies or anything.
249810d565efSmrg 
249910d565efSmrg      However, if we did go and rearrange things, we can wind up not
250010d565efSmrg      being able to recognize the magic around pic_offset_table_rtx.
250110d565efSmrg      This stuff is fragile, and is yet another example of why it is
250210d565efSmrg      bad to expose PIC machinery too early.  */
250310d565efSmrg   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
250410d565efSmrg 				     attrs.addrspace)
250510d565efSmrg       && GET_CODE (addr) == PLUS
250610d565efSmrg       && XEXP (addr, 0) == pic_offset_table_rtx)
250710d565efSmrg     {
250810d565efSmrg       addr = force_reg (GET_MODE (addr), addr);
250910d565efSmrg       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
251010d565efSmrg     }
251110d565efSmrg 
251210d565efSmrg   update_temp_slot_address (XEXP (memref, 0), new_rtx);
251310d565efSmrg   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1, false);
251410d565efSmrg 
251510d565efSmrg   /* If there are no changes, just return the original memory reference.  */
251610d565efSmrg   if (new_rtx == memref)
251710d565efSmrg     return new_rtx;
251810d565efSmrg 
251910d565efSmrg   /* Update the alignment to reflect the offset.  Reset the offset, which
252010d565efSmrg      we don't know.  */
252110d565efSmrg   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
252210d565efSmrg   attrs.offset_known_p = false;
252310d565efSmrg   attrs.size_known_p = defattrs->size_known_p;
252410d565efSmrg   attrs.size = defattrs->size;
252510d565efSmrg   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
252610d565efSmrg   set_mem_attrs (new_rtx, &attrs);
252710d565efSmrg   return new_rtx;
252810d565efSmrg }
252910d565efSmrg 
253010d565efSmrg /* Return a memory reference like MEMREF, but with its address changed to
253110d565efSmrg    ADDR.  The caller is asserting that the actual piece of memory pointed
253210d565efSmrg    to is the same, just the form of the address is being changed, such as
253310d565efSmrg    by putting something into a register.  INPLACE is true if any changes
253410d565efSmrg    can be made directly to MEMREF or false if MEMREF must be treated as
253510d565efSmrg    immutable.  */
253610d565efSmrg 
253710d565efSmrg rtx
replace_equiv_address(rtx memref,rtx addr,bool inplace)253810d565efSmrg replace_equiv_address (rtx memref, rtx addr, bool inplace)
253910d565efSmrg {
254010d565efSmrg   /* change_address_1 copies the memory attribute structure without change
254110d565efSmrg      and that's exactly what we want here.  */
254210d565efSmrg   update_temp_slot_address (XEXP (memref, 0), addr);
254310d565efSmrg   return change_address_1 (memref, VOIDmode, addr, 1, inplace);
254410d565efSmrg }
254510d565efSmrg 
254610d565efSmrg /* Likewise, but the reference is not required to be valid.  */
254710d565efSmrg 
254810d565efSmrg rtx
replace_equiv_address_nv(rtx memref,rtx addr,bool inplace)254910d565efSmrg replace_equiv_address_nv (rtx memref, rtx addr, bool inplace)
255010d565efSmrg {
255110d565efSmrg   return change_address_1 (memref, VOIDmode, addr, 0, inplace);
255210d565efSmrg }
255310d565efSmrg 
255410d565efSmrg /* Return a memory reference like MEMREF, but with its mode widened to
255510d565efSmrg    MODE and offset by OFFSET.  This would be used by targets that e.g.
255610d565efSmrg    cannot issue QImode memory operations and have to use SImode memory
255710d565efSmrg    operations plus masking logic.  */
255810d565efSmrg 
255910d565efSmrg rtx
widen_memory_access(rtx memref,machine_mode mode,poly_int64 offset)2560c7a68eb7Smrg widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset)
256110d565efSmrg {
256210d565efSmrg   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2563c7a68eb7Smrg   poly_uint64 size = GET_MODE_SIZE (mode);
256410d565efSmrg 
256510d565efSmrg   /* If there are no changes, just return the original memory reference.  */
256610d565efSmrg   if (new_rtx == memref)
256710d565efSmrg     return new_rtx;
256810d565efSmrg 
2569c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (new_rtx));
257010d565efSmrg 
257110d565efSmrg   /* If we don't know what offset we were at within the expression, then
257210d565efSmrg      we can't know if we've overstepped the bounds.  */
257310d565efSmrg   if (! attrs.offset_known_p)
257410d565efSmrg     attrs.expr = NULL_TREE;
257510d565efSmrg 
257610d565efSmrg   while (attrs.expr)
257710d565efSmrg     {
257810d565efSmrg       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
257910d565efSmrg 	{
258010d565efSmrg 	  tree field = TREE_OPERAND (attrs.expr, 1);
258110d565efSmrg 	  tree offset = component_ref_field_offset (attrs.expr);
258210d565efSmrg 
258310d565efSmrg 	  if (! DECL_SIZE_UNIT (field))
258410d565efSmrg 	    {
258510d565efSmrg 	      attrs.expr = NULL_TREE;
258610d565efSmrg 	      break;
258710d565efSmrg 	    }
258810d565efSmrg 
258910d565efSmrg 	  /* Is the field at least as large as the access?  If so, ok,
259010d565efSmrg 	     otherwise strip back to the containing structure.  */
2591c7a68eb7Smrg 	  if (poly_int_tree_p (DECL_SIZE_UNIT (field))
2592c7a68eb7Smrg 	      && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size)
2593c7a68eb7Smrg 	      && known_ge (attrs.offset, 0))
259410d565efSmrg 	    break;
259510d565efSmrg 
2596c7a68eb7Smrg 	  poly_uint64 suboffset;
2597c7a68eb7Smrg 	  if (!poly_int_tree_p (offset, &suboffset))
259810d565efSmrg 	    {
259910d565efSmrg 	      attrs.expr = NULL_TREE;
260010d565efSmrg 	      break;
260110d565efSmrg 	    }
260210d565efSmrg 
260310d565efSmrg 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2604c7a68eb7Smrg 	  attrs.offset += suboffset;
260510d565efSmrg 	  attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
260610d565efSmrg 			   / BITS_PER_UNIT);
260710d565efSmrg 	}
260810d565efSmrg       /* Similarly for the decl.  */
260910d565efSmrg       else if (DECL_P (attrs.expr)
261010d565efSmrg 	       && DECL_SIZE_UNIT (attrs.expr)
2611c7a68eb7Smrg 	       && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr))
2612c7a68eb7Smrg 	       && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)),
2613c7a68eb7Smrg 			   size)
2614c7a68eb7Smrg 	       && known_ge (attrs.offset, 0))
261510d565efSmrg 	break;
261610d565efSmrg       else
261710d565efSmrg 	{
261810d565efSmrg 	  /* The widened memory access overflows the expression, which means
261910d565efSmrg 	     that it could alias another expression.  Zap it.  */
262010d565efSmrg 	  attrs.expr = NULL_TREE;
262110d565efSmrg 	  break;
262210d565efSmrg 	}
262310d565efSmrg     }
262410d565efSmrg 
262510d565efSmrg   if (! attrs.expr)
262610d565efSmrg     attrs.offset_known_p = false;
262710d565efSmrg 
262810d565efSmrg   /* The widened memory may alias other stuff, so zap the alias set.  */
262910d565efSmrg   /* ??? Maybe use get_alias_set on any remaining expression.  */
263010d565efSmrg   attrs.alias = 0;
263110d565efSmrg   attrs.size_known_p = true;
263210d565efSmrg   attrs.size = size;
263310d565efSmrg   set_mem_attrs (new_rtx, &attrs);
263410d565efSmrg   return new_rtx;
263510d565efSmrg }
263610d565efSmrg 
263710d565efSmrg /* A fake decl that is used as the MEM_EXPR of spill slots.  */
263810d565efSmrg static GTY(()) tree spill_slot_decl;
263910d565efSmrg 
264010d565efSmrg tree
get_spill_slot_decl(bool force_build_p)264110d565efSmrg get_spill_slot_decl (bool force_build_p)
264210d565efSmrg {
264310d565efSmrg   tree d = spill_slot_decl;
264410d565efSmrg   rtx rd;
264510d565efSmrg 
264610d565efSmrg   if (d || !force_build_p)
264710d565efSmrg     return d;
264810d565efSmrg 
264910d565efSmrg   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
265010d565efSmrg 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
265110d565efSmrg   DECL_ARTIFICIAL (d) = 1;
265210d565efSmrg   DECL_IGNORED_P (d) = 1;
265310d565efSmrg   TREE_USED (d) = 1;
265410d565efSmrg   spill_slot_decl = d;
265510d565efSmrg 
265610d565efSmrg   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
265710d565efSmrg   MEM_NOTRAP_P (rd) = 1;
2658c7a68eb7Smrg   mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]);
265910d565efSmrg   attrs.alias = new_alias_set ();
266010d565efSmrg   attrs.expr = d;
266110d565efSmrg   set_mem_attrs (rd, &attrs);
266210d565efSmrg   SET_DECL_RTL (d, rd);
266310d565efSmrg 
266410d565efSmrg   return d;
266510d565efSmrg }
266610d565efSmrg 
266710d565efSmrg /* Given MEM, a result from assign_stack_local, fill in the memory
266810d565efSmrg    attributes as appropriate for a register allocator spill slot.
266910d565efSmrg    These slots are not aliasable by other memory.  We arrange for
267010d565efSmrg    them all to use a single MEM_EXPR, so that the aliasing code can
267110d565efSmrg    work properly in the case of shared spill slots.  */
267210d565efSmrg 
267310d565efSmrg void
set_mem_attrs_for_spill(rtx mem)267410d565efSmrg set_mem_attrs_for_spill (rtx mem)
267510d565efSmrg {
267610d565efSmrg   rtx addr;
267710d565efSmrg 
2678c7a68eb7Smrg   mem_attrs attrs (*get_mem_attrs (mem));
267910d565efSmrg   attrs.expr = get_spill_slot_decl (true);
268010d565efSmrg   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
268110d565efSmrg   attrs.addrspace = ADDR_SPACE_GENERIC;
268210d565efSmrg 
268310d565efSmrg   /* We expect the incoming memory to be of the form:
268410d565efSmrg 	(mem:MODE (plus (reg sfp) (const_int offset)))
268510d565efSmrg      with perhaps the plus missing for offset = 0.  */
268610d565efSmrg   addr = XEXP (mem, 0);
268710d565efSmrg   attrs.offset_known_p = true;
2688c7a68eb7Smrg   strip_offset (addr, &attrs.offset);
268910d565efSmrg 
269010d565efSmrg   set_mem_attrs (mem, &attrs);
269110d565efSmrg   MEM_NOTRAP_P (mem) = 1;
269210d565efSmrg }
269310d565efSmrg 
269410d565efSmrg /* Return a newly created CODE_LABEL rtx with a unique label number.  */
269510d565efSmrg 
269610d565efSmrg rtx_code_label *
gen_label_rtx(void)269710d565efSmrg gen_label_rtx (void)
269810d565efSmrg {
269910d565efSmrg   return as_a <rtx_code_label *> (
270010d565efSmrg 	    gen_rtx_CODE_LABEL (VOIDmode, NULL_RTX, NULL_RTX,
270110d565efSmrg 				NULL, label_num++, NULL));
270210d565efSmrg }
270310d565efSmrg 
270410d565efSmrg /* For procedure integration.  */
270510d565efSmrg 
270610d565efSmrg /* Install new pointers to the first and last insns in the chain.
270710d565efSmrg    Also, set cur_insn_uid to one higher than the last in use.
270810d565efSmrg    Used for an inline-procedure after copying the insn chain.  */
270910d565efSmrg 
271010d565efSmrg void
set_new_first_and_last_insn(rtx_insn * first,rtx_insn * last)271110d565efSmrg set_new_first_and_last_insn (rtx_insn *first, rtx_insn *last)
271210d565efSmrg {
271310d565efSmrg   rtx_insn *insn;
271410d565efSmrg 
271510d565efSmrg   set_first_insn (first);
271610d565efSmrg   set_last_insn (last);
271710d565efSmrg   cur_insn_uid = 0;
271810d565efSmrg 
2719*ec02198aSmrg   if (param_min_nondebug_insn_uid || MAY_HAVE_DEBUG_INSNS)
272010d565efSmrg     {
272110d565efSmrg       int debug_count = 0;
272210d565efSmrg 
2723*ec02198aSmrg       cur_insn_uid = param_min_nondebug_insn_uid - 1;
272410d565efSmrg       cur_debug_insn_uid = 0;
272510d565efSmrg 
272610d565efSmrg       for (insn = first; insn; insn = NEXT_INSN (insn))
2727*ec02198aSmrg 	if (INSN_UID (insn) < param_min_nondebug_insn_uid)
272810d565efSmrg 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
272910d565efSmrg 	else
273010d565efSmrg 	  {
273110d565efSmrg 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
273210d565efSmrg 	    if (DEBUG_INSN_P (insn))
273310d565efSmrg 	      debug_count++;
273410d565efSmrg 	  }
273510d565efSmrg 
273610d565efSmrg       if (debug_count)
2737*ec02198aSmrg 	cur_debug_insn_uid = param_min_nondebug_insn_uid + debug_count;
273810d565efSmrg       else
273910d565efSmrg 	cur_debug_insn_uid++;
274010d565efSmrg     }
274110d565efSmrg   else
274210d565efSmrg     for (insn = first; insn; insn = NEXT_INSN (insn))
274310d565efSmrg       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
274410d565efSmrg 
274510d565efSmrg   cur_insn_uid++;
274610d565efSmrg }
274710d565efSmrg 
274810d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared
274910d565efSmrg    structure.  This routine should only be called once.  */
275010d565efSmrg 
275110d565efSmrg static void
unshare_all_rtl_1(rtx_insn * insn)275210d565efSmrg unshare_all_rtl_1 (rtx_insn *insn)
275310d565efSmrg {
275410d565efSmrg   /* Unshare just about everything else.  */
275510d565efSmrg   unshare_all_rtl_in_chain (insn);
275610d565efSmrg 
275710d565efSmrg   /* Make sure the addresses of stack slots found outside the insn chain
275810d565efSmrg      (such as, in DECL_RTL of a variable) are not shared
275910d565efSmrg      with the insn chain.
276010d565efSmrg 
276110d565efSmrg      This special care is necessary when the stack slot MEM does not
276210d565efSmrg      actually appear in the insn chain.  If it does appear, its address
276310d565efSmrg      is unshared from all else at that point.  */
276410d565efSmrg   unsigned int i;
276510d565efSmrg   rtx temp;
276610d565efSmrg   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
276710d565efSmrg     (*stack_slot_list)[i] = copy_rtx_if_shared (temp);
276810d565efSmrg }
276910d565efSmrg 
277010d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared
277110d565efSmrg    structure, again.  This is a fairly expensive thing to do so it
277210d565efSmrg    should be done sparingly.  */
277310d565efSmrg 
277410d565efSmrg void
unshare_all_rtl_again(rtx_insn * insn)277510d565efSmrg unshare_all_rtl_again (rtx_insn *insn)
277610d565efSmrg {
277710d565efSmrg   rtx_insn *p;
277810d565efSmrg   tree decl;
277910d565efSmrg 
278010d565efSmrg   for (p = insn; p; p = NEXT_INSN (p))
278110d565efSmrg     if (INSN_P (p))
278210d565efSmrg       {
278310d565efSmrg 	reset_used_flags (PATTERN (p));
278410d565efSmrg 	reset_used_flags (REG_NOTES (p));
278510d565efSmrg 	if (CALL_P (p))
278610d565efSmrg 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
278710d565efSmrg       }
278810d565efSmrg 
278910d565efSmrg   /* Make sure that virtual stack slots are not shared.  */
279010d565efSmrg   set_used_decls (DECL_INITIAL (cfun->decl));
279110d565efSmrg 
279210d565efSmrg   /* Make sure that virtual parameters are not shared.  */
279310d565efSmrg   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
279410d565efSmrg     set_used_flags (DECL_RTL (decl));
279510d565efSmrg 
279610d565efSmrg   rtx temp;
279710d565efSmrg   unsigned int i;
279810d565efSmrg   FOR_EACH_VEC_SAFE_ELT (stack_slot_list, i, temp)
279910d565efSmrg     reset_used_flags (temp);
280010d565efSmrg 
280110d565efSmrg   unshare_all_rtl_1 (insn);
280210d565efSmrg }
280310d565efSmrg 
280410d565efSmrg unsigned int
unshare_all_rtl(void)280510d565efSmrg unshare_all_rtl (void)
280610d565efSmrg {
280710d565efSmrg   unshare_all_rtl_1 (get_insns ());
280810d565efSmrg 
280910d565efSmrg   for (tree decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
281010d565efSmrg     {
281110d565efSmrg       if (DECL_RTL_SET_P (decl))
281210d565efSmrg 	SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
281310d565efSmrg       DECL_INCOMING_RTL (decl) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl));
281410d565efSmrg     }
281510d565efSmrg 
281610d565efSmrg   return 0;
281710d565efSmrg }
281810d565efSmrg 
281910d565efSmrg 
282010d565efSmrg /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
282110d565efSmrg    Recursively does the same for subexpressions.  */
282210d565efSmrg 
282310d565efSmrg static void
verify_rtx_sharing(rtx orig,rtx insn)282410d565efSmrg verify_rtx_sharing (rtx orig, rtx insn)
282510d565efSmrg {
282610d565efSmrg   rtx x = orig;
282710d565efSmrg   int i;
282810d565efSmrg   enum rtx_code code;
282910d565efSmrg   const char *format_ptr;
283010d565efSmrg 
283110d565efSmrg   if (x == 0)
283210d565efSmrg     return;
283310d565efSmrg 
283410d565efSmrg   code = GET_CODE (x);
283510d565efSmrg 
283610d565efSmrg   /* These types may be freely shared.  */
283710d565efSmrg 
283810d565efSmrg   switch (code)
283910d565efSmrg     {
284010d565efSmrg     case REG:
284110d565efSmrg     case DEBUG_EXPR:
284210d565efSmrg     case VALUE:
284310d565efSmrg     CASE_CONST_ANY:
284410d565efSmrg     case SYMBOL_REF:
284510d565efSmrg     case LABEL_REF:
284610d565efSmrg     case CODE_LABEL:
284710d565efSmrg     case PC:
284810d565efSmrg     case CC0:
284910d565efSmrg     case RETURN:
285010d565efSmrg     case SIMPLE_RETURN:
285110d565efSmrg     case SCRATCH:
285210d565efSmrg       /* SCRATCH must be shared because they represent distinct values.  */
285310d565efSmrg       return;
285410d565efSmrg     case CLOBBER:
285510d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
285610d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
285710d565efSmrg          This is needed to allow safe register renaming.  */
285810d565efSmrg       if (REG_P (XEXP (x, 0))
285910d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
286010d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
286110d565efSmrg 	return;
286210d565efSmrg       break;
286310d565efSmrg 
286410d565efSmrg     case CONST:
286510d565efSmrg       if (shared_const_p (orig))
286610d565efSmrg 	return;
286710d565efSmrg       break;
286810d565efSmrg 
286910d565efSmrg     case MEM:
287010d565efSmrg       /* A MEM is allowed to be shared if its address is constant.  */
287110d565efSmrg       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
287210d565efSmrg 	  || reload_completed || reload_in_progress)
287310d565efSmrg 	return;
287410d565efSmrg 
287510d565efSmrg       break;
287610d565efSmrg 
287710d565efSmrg     default:
287810d565efSmrg       break;
287910d565efSmrg     }
288010d565efSmrg 
288110d565efSmrg   /* This rtx may not be shared.  If it has already been seen,
288210d565efSmrg      replace it with a copy of itself.  */
288310d565efSmrg   if (flag_checking && RTX_FLAG (x, used))
288410d565efSmrg     {
288510d565efSmrg       error ("invalid rtl sharing found in the insn");
288610d565efSmrg       debug_rtx (insn);
288710d565efSmrg       error ("shared rtx");
288810d565efSmrg       debug_rtx (x);
288910d565efSmrg       internal_error ("internal consistency failure");
289010d565efSmrg     }
289110d565efSmrg   gcc_assert (!RTX_FLAG (x, used));
289210d565efSmrg 
289310d565efSmrg   RTX_FLAG (x, used) = 1;
289410d565efSmrg 
289510d565efSmrg   /* Now scan the subexpressions recursively.  */
289610d565efSmrg 
289710d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
289810d565efSmrg 
289910d565efSmrg   for (i = 0; i < GET_RTX_LENGTH (code); i++)
290010d565efSmrg     {
290110d565efSmrg       switch (*format_ptr++)
290210d565efSmrg 	{
290310d565efSmrg 	case 'e':
290410d565efSmrg 	  verify_rtx_sharing (XEXP (x, i), insn);
290510d565efSmrg 	  break;
290610d565efSmrg 
290710d565efSmrg 	case 'E':
290810d565efSmrg 	  if (XVEC (x, i) != NULL)
290910d565efSmrg 	    {
291010d565efSmrg 	      int j;
291110d565efSmrg 	      int len = XVECLEN (x, i);
291210d565efSmrg 
291310d565efSmrg 	      for (j = 0; j < len; j++)
291410d565efSmrg 		{
291510d565efSmrg 		  /* We allow sharing of ASM_OPERANDS inside single
291610d565efSmrg 		     instruction.  */
291710d565efSmrg 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
291810d565efSmrg 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
291910d565efSmrg 			  == ASM_OPERANDS))
292010d565efSmrg 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
292110d565efSmrg 		  else
292210d565efSmrg 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
292310d565efSmrg 		}
292410d565efSmrg 	    }
292510d565efSmrg 	  break;
292610d565efSmrg 	}
292710d565efSmrg     }
292810d565efSmrg   return;
292910d565efSmrg }
293010d565efSmrg 
293110d565efSmrg /* Reset used-flags for INSN.  */
293210d565efSmrg 
293310d565efSmrg static void
reset_insn_used_flags(rtx insn)293410d565efSmrg reset_insn_used_flags (rtx insn)
293510d565efSmrg {
293610d565efSmrg   gcc_assert (INSN_P (insn));
293710d565efSmrg   reset_used_flags (PATTERN (insn));
293810d565efSmrg   reset_used_flags (REG_NOTES (insn));
293910d565efSmrg   if (CALL_P (insn))
294010d565efSmrg     reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
294110d565efSmrg }
294210d565efSmrg 
294310d565efSmrg /* Go through all the RTL insn bodies and clear all the USED bits.  */
294410d565efSmrg 
294510d565efSmrg static void
reset_all_used_flags(void)294610d565efSmrg reset_all_used_flags (void)
294710d565efSmrg {
294810d565efSmrg   rtx_insn *p;
294910d565efSmrg 
295010d565efSmrg   for (p = get_insns (); p; p = NEXT_INSN (p))
295110d565efSmrg     if (INSN_P (p))
295210d565efSmrg       {
295310d565efSmrg 	rtx pat = PATTERN (p);
295410d565efSmrg 	if (GET_CODE (pat) != SEQUENCE)
295510d565efSmrg 	  reset_insn_used_flags (p);
295610d565efSmrg 	else
295710d565efSmrg 	  {
295810d565efSmrg 	    gcc_assert (REG_NOTES (p) == NULL);
295910d565efSmrg 	    for (int i = 0; i < XVECLEN (pat, 0); i++)
296010d565efSmrg 	      {
296110d565efSmrg 		rtx insn = XVECEXP (pat, 0, i);
296210d565efSmrg 		if (INSN_P (insn))
296310d565efSmrg 		  reset_insn_used_flags (insn);
296410d565efSmrg 	      }
296510d565efSmrg 	  }
296610d565efSmrg       }
296710d565efSmrg }
296810d565efSmrg 
296910d565efSmrg /* Verify sharing in INSN.  */
297010d565efSmrg 
297110d565efSmrg static void
verify_insn_sharing(rtx insn)297210d565efSmrg verify_insn_sharing (rtx insn)
297310d565efSmrg {
297410d565efSmrg   gcc_assert (INSN_P (insn));
297510d565efSmrg   verify_rtx_sharing (PATTERN (insn), insn);
297610d565efSmrg   verify_rtx_sharing (REG_NOTES (insn), insn);
297710d565efSmrg   if (CALL_P (insn))
297810d565efSmrg     verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn), insn);
297910d565efSmrg }
298010d565efSmrg 
298110d565efSmrg /* Go through all the RTL insn bodies and check that there is no unexpected
298210d565efSmrg    sharing in between the subexpressions.  */
298310d565efSmrg 
298410d565efSmrg DEBUG_FUNCTION void
verify_rtl_sharing(void)298510d565efSmrg verify_rtl_sharing (void)
298610d565efSmrg {
298710d565efSmrg   rtx_insn *p;
298810d565efSmrg 
298910d565efSmrg   timevar_push (TV_VERIFY_RTL_SHARING);
299010d565efSmrg 
299110d565efSmrg   reset_all_used_flags ();
299210d565efSmrg 
299310d565efSmrg   for (p = get_insns (); p; p = NEXT_INSN (p))
299410d565efSmrg     if (INSN_P (p))
299510d565efSmrg       {
299610d565efSmrg 	rtx pat = PATTERN (p);
299710d565efSmrg 	if (GET_CODE (pat) != SEQUENCE)
299810d565efSmrg 	  verify_insn_sharing (p);
299910d565efSmrg 	else
300010d565efSmrg 	  for (int i = 0; i < XVECLEN (pat, 0); i++)
300110d565efSmrg 	      {
300210d565efSmrg 		rtx insn = XVECEXP (pat, 0, i);
300310d565efSmrg 		if (INSN_P (insn))
300410d565efSmrg 		  verify_insn_sharing (insn);
300510d565efSmrg 	      }
300610d565efSmrg       }
300710d565efSmrg 
300810d565efSmrg   reset_all_used_flags ();
300910d565efSmrg 
301010d565efSmrg   timevar_pop (TV_VERIFY_RTL_SHARING);
301110d565efSmrg }
301210d565efSmrg 
301310d565efSmrg /* Go through all the RTL insn bodies and copy any invalid shared structure.
301410d565efSmrg    Assumes the mark bits are cleared at entry.  */
301510d565efSmrg 
301610d565efSmrg void
unshare_all_rtl_in_chain(rtx_insn * insn)301710d565efSmrg unshare_all_rtl_in_chain (rtx_insn *insn)
301810d565efSmrg {
301910d565efSmrg   for (; insn; insn = NEXT_INSN (insn))
302010d565efSmrg     if (INSN_P (insn))
302110d565efSmrg       {
302210d565efSmrg 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
302310d565efSmrg 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
302410d565efSmrg 	if (CALL_P (insn))
302510d565efSmrg 	  CALL_INSN_FUNCTION_USAGE (insn)
302610d565efSmrg 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
302710d565efSmrg       }
302810d565efSmrg }
302910d565efSmrg 
303010d565efSmrg /* Go through all virtual stack slots of a function and mark them as
303110d565efSmrg    shared.  We never replace the DECL_RTLs themselves with a copy,
303210d565efSmrg    but expressions mentioned into a DECL_RTL cannot be shared with
303310d565efSmrg    expressions in the instruction stream.
303410d565efSmrg 
303510d565efSmrg    Note that reload may convert pseudo registers into memories in-place.
303610d565efSmrg    Pseudo registers are always shared, but MEMs never are.  Thus if we
303710d565efSmrg    reset the used flags on MEMs in the instruction stream, we must set
303810d565efSmrg    them again on MEMs that appear in DECL_RTLs.  */
303910d565efSmrg 
304010d565efSmrg static void
set_used_decls(tree blk)304110d565efSmrg set_used_decls (tree blk)
304210d565efSmrg {
304310d565efSmrg   tree t;
304410d565efSmrg 
304510d565efSmrg   /* Mark decls.  */
304610d565efSmrg   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
304710d565efSmrg     if (DECL_RTL_SET_P (t))
304810d565efSmrg       set_used_flags (DECL_RTL (t));
304910d565efSmrg 
305010d565efSmrg   /* Now process sub-blocks.  */
305110d565efSmrg   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
305210d565efSmrg     set_used_decls (t);
305310d565efSmrg }
305410d565efSmrg 
305510d565efSmrg /* Mark ORIG as in use, and return a copy of it if it was already in use.
305610d565efSmrg    Recursively does the same for subexpressions.  Uses
305710d565efSmrg    copy_rtx_if_shared_1 to reduce stack space.  */
305810d565efSmrg 
305910d565efSmrg rtx
copy_rtx_if_shared(rtx orig)306010d565efSmrg copy_rtx_if_shared (rtx orig)
306110d565efSmrg {
306210d565efSmrg   copy_rtx_if_shared_1 (&orig);
306310d565efSmrg   return orig;
306410d565efSmrg }
306510d565efSmrg 
306610d565efSmrg /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
306710d565efSmrg    use.  Recursively does the same for subexpressions.  */
306810d565efSmrg 
306910d565efSmrg static void
copy_rtx_if_shared_1(rtx * orig1)307010d565efSmrg copy_rtx_if_shared_1 (rtx *orig1)
307110d565efSmrg {
307210d565efSmrg   rtx x;
307310d565efSmrg   int i;
307410d565efSmrg   enum rtx_code code;
307510d565efSmrg   rtx *last_ptr;
307610d565efSmrg   const char *format_ptr;
307710d565efSmrg   int copied = 0;
307810d565efSmrg   int length;
307910d565efSmrg 
308010d565efSmrg   /* Repeat is used to turn tail-recursion into iteration.  */
308110d565efSmrg repeat:
308210d565efSmrg   x = *orig1;
308310d565efSmrg 
308410d565efSmrg   if (x == 0)
308510d565efSmrg     return;
308610d565efSmrg 
308710d565efSmrg   code = GET_CODE (x);
308810d565efSmrg 
308910d565efSmrg   /* These types may be freely shared.  */
309010d565efSmrg 
309110d565efSmrg   switch (code)
309210d565efSmrg     {
309310d565efSmrg     case REG:
309410d565efSmrg     case DEBUG_EXPR:
309510d565efSmrg     case VALUE:
309610d565efSmrg     CASE_CONST_ANY:
309710d565efSmrg     case SYMBOL_REF:
309810d565efSmrg     case LABEL_REF:
309910d565efSmrg     case CODE_LABEL:
310010d565efSmrg     case PC:
310110d565efSmrg     case CC0:
310210d565efSmrg     case RETURN:
310310d565efSmrg     case SIMPLE_RETURN:
310410d565efSmrg     case SCRATCH:
310510d565efSmrg       /* SCRATCH must be shared because they represent distinct values.  */
310610d565efSmrg       return;
310710d565efSmrg     case CLOBBER:
310810d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
310910d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
311010d565efSmrg          This is needed to allow safe register renaming.  */
311110d565efSmrg       if (REG_P (XEXP (x, 0))
311210d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0)))
311310d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x, 0))))
311410d565efSmrg 	return;
311510d565efSmrg       break;
311610d565efSmrg 
311710d565efSmrg     case CONST:
311810d565efSmrg       if (shared_const_p (x))
311910d565efSmrg 	return;
312010d565efSmrg       break;
312110d565efSmrg 
312210d565efSmrg     case DEBUG_INSN:
312310d565efSmrg     case INSN:
312410d565efSmrg     case JUMP_INSN:
312510d565efSmrg     case CALL_INSN:
312610d565efSmrg     case NOTE:
312710d565efSmrg     case BARRIER:
312810d565efSmrg       /* The chain of insns is not being copied.  */
312910d565efSmrg       return;
313010d565efSmrg 
313110d565efSmrg     default:
313210d565efSmrg       break;
313310d565efSmrg     }
313410d565efSmrg 
313510d565efSmrg   /* This rtx may not be shared.  If it has already been seen,
313610d565efSmrg      replace it with a copy of itself.  */
313710d565efSmrg 
313810d565efSmrg   if (RTX_FLAG (x, used))
313910d565efSmrg     {
314010d565efSmrg       x = shallow_copy_rtx (x);
314110d565efSmrg       copied = 1;
314210d565efSmrg     }
314310d565efSmrg   RTX_FLAG (x, used) = 1;
314410d565efSmrg 
314510d565efSmrg   /* Now scan the subexpressions recursively.
314610d565efSmrg      We can store any replaced subexpressions directly into X
314710d565efSmrg      since we know X is not shared!  Any vectors in X
314810d565efSmrg      must be copied if X was copied.  */
314910d565efSmrg 
315010d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
315110d565efSmrg   length = GET_RTX_LENGTH (code);
315210d565efSmrg   last_ptr = NULL;
315310d565efSmrg 
315410d565efSmrg   for (i = 0; i < length; i++)
315510d565efSmrg     {
315610d565efSmrg       switch (*format_ptr++)
315710d565efSmrg 	{
315810d565efSmrg 	case 'e':
315910d565efSmrg           if (last_ptr)
316010d565efSmrg             copy_rtx_if_shared_1 (last_ptr);
316110d565efSmrg 	  last_ptr = &XEXP (x, i);
316210d565efSmrg 	  break;
316310d565efSmrg 
316410d565efSmrg 	case 'E':
316510d565efSmrg 	  if (XVEC (x, i) != NULL)
316610d565efSmrg 	    {
316710d565efSmrg 	      int j;
316810d565efSmrg 	      int len = XVECLEN (x, i);
316910d565efSmrg 
317010d565efSmrg               /* Copy the vector iff I copied the rtx and the length
317110d565efSmrg 		 is nonzero.  */
317210d565efSmrg 	      if (copied && len > 0)
317310d565efSmrg 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
317410d565efSmrg 
317510d565efSmrg               /* Call recursively on all inside the vector.  */
317610d565efSmrg 	      for (j = 0; j < len; j++)
317710d565efSmrg                 {
317810d565efSmrg 		  if (last_ptr)
317910d565efSmrg 		    copy_rtx_if_shared_1 (last_ptr);
318010d565efSmrg                   last_ptr = &XVECEXP (x, i, j);
318110d565efSmrg                 }
318210d565efSmrg 	    }
318310d565efSmrg 	  break;
318410d565efSmrg 	}
318510d565efSmrg     }
318610d565efSmrg   *orig1 = x;
318710d565efSmrg   if (last_ptr)
318810d565efSmrg     {
318910d565efSmrg       orig1 = last_ptr;
319010d565efSmrg       goto repeat;
319110d565efSmrg     }
319210d565efSmrg   return;
319310d565efSmrg }
319410d565efSmrg 
319510d565efSmrg /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
319610d565efSmrg 
319710d565efSmrg static void
mark_used_flags(rtx x,int flag)319810d565efSmrg mark_used_flags (rtx x, int flag)
319910d565efSmrg {
320010d565efSmrg   int i, j;
320110d565efSmrg   enum rtx_code code;
320210d565efSmrg   const char *format_ptr;
320310d565efSmrg   int length;
320410d565efSmrg 
320510d565efSmrg   /* Repeat is used to turn tail-recursion into iteration.  */
320610d565efSmrg repeat:
320710d565efSmrg   if (x == 0)
320810d565efSmrg     return;
320910d565efSmrg 
321010d565efSmrg   code = GET_CODE (x);
321110d565efSmrg 
321210d565efSmrg   /* These types may be freely shared so we needn't do any resetting
321310d565efSmrg      for them.  */
321410d565efSmrg 
321510d565efSmrg   switch (code)
321610d565efSmrg     {
321710d565efSmrg     case REG:
321810d565efSmrg     case DEBUG_EXPR:
321910d565efSmrg     case VALUE:
322010d565efSmrg     CASE_CONST_ANY:
322110d565efSmrg     case SYMBOL_REF:
322210d565efSmrg     case CODE_LABEL:
322310d565efSmrg     case PC:
322410d565efSmrg     case CC0:
322510d565efSmrg     case RETURN:
322610d565efSmrg     case SIMPLE_RETURN:
322710d565efSmrg       return;
322810d565efSmrg 
322910d565efSmrg     case DEBUG_INSN:
323010d565efSmrg     case INSN:
323110d565efSmrg     case JUMP_INSN:
323210d565efSmrg     case CALL_INSN:
323310d565efSmrg     case NOTE:
323410d565efSmrg     case LABEL_REF:
323510d565efSmrg     case BARRIER:
323610d565efSmrg       /* The chain of insns is not being copied.  */
323710d565efSmrg       return;
323810d565efSmrg 
323910d565efSmrg     default:
324010d565efSmrg       break;
324110d565efSmrg     }
324210d565efSmrg 
324310d565efSmrg   RTX_FLAG (x, used) = flag;
324410d565efSmrg 
324510d565efSmrg   format_ptr = GET_RTX_FORMAT (code);
324610d565efSmrg   length = GET_RTX_LENGTH (code);
324710d565efSmrg 
324810d565efSmrg   for (i = 0; i < length; i++)
324910d565efSmrg     {
325010d565efSmrg       switch (*format_ptr++)
325110d565efSmrg 	{
325210d565efSmrg 	case 'e':
325310d565efSmrg           if (i == length-1)
325410d565efSmrg             {
325510d565efSmrg               x = XEXP (x, i);
325610d565efSmrg 	      goto repeat;
325710d565efSmrg             }
325810d565efSmrg 	  mark_used_flags (XEXP (x, i), flag);
325910d565efSmrg 	  break;
326010d565efSmrg 
326110d565efSmrg 	case 'E':
326210d565efSmrg 	  for (j = 0; j < XVECLEN (x, i); j++)
326310d565efSmrg 	    mark_used_flags (XVECEXP (x, i, j), flag);
326410d565efSmrg 	  break;
326510d565efSmrg 	}
326610d565efSmrg     }
326710d565efSmrg }
326810d565efSmrg 
326910d565efSmrg /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
327010d565efSmrg    to look for shared sub-parts.  */
327110d565efSmrg 
327210d565efSmrg void
reset_used_flags(rtx x)327310d565efSmrg reset_used_flags (rtx x)
327410d565efSmrg {
327510d565efSmrg   mark_used_flags (x, 0);
327610d565efSmrg }
327710d565efSmrg 
327810d565efSmrg /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
327910d565efSmrg    to look for shared sub-parts.  */
328010d565efSmrg 
328110d565efSmrg void
set_used_flags(rtx x)328210d565efSmrg set_used_flags (rtx x)
328310d565efSmrg {
328410d565efSmrg   mark_used_flags (x, 1);
328510d565efSmrg }
328610d565efSmrg 
328710d565efSmrg /* Copy X if necessary so that it won't be altered by changes in OTHER.
328810d565efSmrg    Return X or the rtx for the pseudo reg the value of X was copied into.
328910d565efSmrg    OTHER must be valid as a SET_DEST.  */
329010d565efSmrg 
329110d565efSmrg rtx
make_safe_from(rtx x,rtx other)329210d565efSmrg make_safe_from (rtx x, rtx other)
329310d565efSmrg {
329410d565efSmrg   while (1)
329510d565efSmrg     switch (GET_CODE (other))
329610d565efSmrg       {
329710d565efSmrg       case SUBREG:
329810d565efSmrg 	other = SUBREG_REG (other);
329910d565efSmrg 	break;
330010d565efSmrg       case STRICT_LOW_PART:
330110d565efSmrg       case SIGN_EXTEND:
330210d565efSmrg       case ZERO_EXTEND:
330310d565efSmrg 	other = XEXP (other, 0);
330410d565efSmrg 	break;
330510d565efSmrg       default:
330610d565efSmrg 	goto done;
330710d565efSmrg       }
330810d565efSmrg  done:
330910d565efSmrg   if ((MEM_P (other)
331010d565efSmrg        && ! CONSTANT_P (x)
331110d565efSmrg        && !REG_P (x)
331210d565efSmrg        && GET_CODE (x) != SUBREG)
331310d565efSmrg       || (REG_P (other)
331410d565efSmrg 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
331510d565efSmrg 	      || reg_mentioned_p (other, x))))
331610d565efSmrg     {
331710d565efSmrg       rtx temp = gen_reg_rtx (GET_MODE (x));
331810d565efSmrg       emit_move_insn (temp, x);
331910d565efSmrg       return temp;
332010d565efSmrg     }
332110d565efSmrg   return x;
332210d565efSmrg }
332310d565efSmrg 
332410d565efSmrg /* Emission of insns (adding them to the doubly-linked list).  */
332510d565efSmrg 
332610d565efSmrg /* Return the last insn emitted, even if it is in a sequence now pushed.  */
332710d565efSmrg 
332810d565efSmrg rtx_insn *
get_last_insn_anywhere(void)332910d565efSmrg get_last_insn_anywhere (void)
333010d565efSmrg {
333110d565efSmrg   struct sequence_stack *seq;
333210d565efSmrg   for (seq = get_current_sequence (); seq; seq = seq->next)
333310d565efSmrg     if (seq->last != 0)
333410d565efSmrg       return seq->last;
333510d565efSmrg   return 0;
333610d565efSmrg }
333710d565efSmrg 
333810d565efSmrg /* Return the first nonnote insn emitted in current sequence or current
333910d565efSmrg    function.  This routine looks inside SEQUENCEs.  */
334010d565efSmrg 
334110d565efSmrg rtx_insn *
get_first_nonnote_insn(void)334210d565efSmrg get_first_nonnote_insn (void)
334310d565efSmrg {
334410d565efSmrg   rtx_insn *insn = get_insns ();
334510d565efSmrg 
334610d565efSmrg   if (insn)
334710d565efSmrg     {
334810d565efSmrg       if (NOTE_P (insn))
334910d565efSmrg 	for (insn = next_insn (insn);
335010d565efSmrg 	     insn && NOTE_P (insn);
335110d565efSmrg 	     insn = next_insn (insn))
335210d565efSmrg 	  continue;
335310d565efSmrg       else
335410d565efSmrg 	{
335510d565efSmrg 	  if (NONJUMP_INSN_P (insn)
335610d565efSmrg 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
335710d565efSmrg 	    insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
335810d565efSmrg 	}
335910d565efSmrg     }
336010d565efSmrg 
336110d565efSmrg   return insn;
336210d565efSmrg }
336310d565efSmrg 
336410d565efSmrg /* Return the last nonnote insn emitted in current sequence or current
336510d565efSmrg    function.  This routine looks inside SEQUENCEs.  */
336610d565efSmrg 
336710d565efSmrg rtx_insn *
get_last_nonnote_insn(void)336810d565efSmrg get_last_nonnote_insn (void)
336910d565efSmrg {
337010d565efSmrg   rtx_insn *insn = get_last_insn ();
337110d565efSmrg 
337210d565efSmrg   if (insn)
337310d565efSmrg     {
337410d565efSmrg       if (NOTE_P (insn))
337510d565efSmrg 	for (insn = previous_insn (insn);
337610d565efSmrg 	     insn && NOTE_P (insn);
337710d565efSmrg 	     insn = previous_insn (insn))
337810d565efSmrg 	  continue;
337910d565efSmrg       else
338010d565efSmrg 	{
338110d565efSmrg 	  if (NONJUMP_INSN_P (insn))
338210d565efSmrg 	    if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
338310d565efSmrg 	      insn = seq->insn (seq->len () - 1);
338410d565efSmrg 	}
338510d565efSmrg     }
338610d565efSmrg 
338710d565efSmrg   return insn;
338810d565efSmrg }
338910d565efSmrg 
339010d565efSmrg /* Return the number of actual (non-debug) insns emitted in this
339110d565efSmrg    function.  */
339210d565efSmrg 
339310d565efSmrg int
get_max_insn_count(void)339410d565efSmrg get_max_insn_count (void)
339510d565efSmrg {
339610d565efSmrg   int n = cur_insn_uid;
339710d565efSmrg 
339810d565efSmrg   /* The table size must be stable across -g, to avoid codegen
339910d565efSmrg      differences due to debug insns, and not be affected by
340010d565efSmrg      -fmin-insn-uid, to avoid excessive table size and to simplify
340110d565efSmrg      debugging of -fcompare-debug failures.  */
3402*ec02198aSmrg   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
340310d565efSmrg     n -= cur_debug_insn_uid;
340410d565efSmrg   else
3405*ec02198aSmrg     n -= param_min_nondebug_insn_uid;
340610d565efSmrg 
340710d565efSmrg   return n;
340810d565efSmrg }
340910d565efSmrg 
341010d565efSmrg 
341110d565efSmrg /* Return the next insn.  If it is a SEQUENCE, return the first insn
341210d565efSmrg    of the sequence.  */
341310d565efSmrg 
341410d565efSmrg rtx_insn *
next_insn(rtx_insn * insn)341510d565efSmrg next_insn (rtx_insn *insn)
341610d565efSmrg {
341710d565efSmrg   if (insn)
341810d565efSmrg     {
341910d565efSmrg       insn = NEXT_INSN (insn);
342010d565efSmrg       if (insn && NONJUMP_INSN_P (insn)
342110d565efSmrg 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
342210d565efSmrg 	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
342310d565efSmrg     }
342410d565efSmrg 
342510d565efSmrg   return insn;
342610d565efSmrg }
342710d565efSmrg 
342810d565efSmrg /* Return the previous insn.  If it is a SEQUENCE, return the last insn
342910d565efSmrg    of the sequence.  */
343010d565efSmrg 
343110d565efSmrg rtx_insn *
previous_insn(rtx_insn * insn)343210d565efSmrg previous_insn (rtx_insn *insn)
343310d565efSmrg {
343410d565efSmrg   if (insn)
343510d565efSmrg     {
343610d565efSmrg       insn = PREV_INSN (insn);
343710d565efSmrg       if (insn && NONJUMP_INSN_P (insn))
343810d565efSmrg 	if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (PATTERN (insn)))
343910d565efSmrg 	  insn = seq->insn (seq->len () - 1);
344010d565efSmrg     }
344110d565efSmrg 
344210d565efSmrg   return insn;
344310d565efSmrg }
344410d565efSmrg 
344510d565efSmrg /* Return the next insn after INSN that is not a NOTE.  This routine does not
344610d565efSmrg    look inside SEQUENCEs.  */
344710d565efSmrg 
344810d565efSmrg rtx_insn *
next_nonnote_insn(rtx_insn * insn)344910d565efSmrg next_nonnote_insn (rtx_insn *insn)
345010d565efSmrg {
345110d565efSmrg   while (insn)
345210d565efSmrg     {
345310d565efSmrg       insn = NEXT_INSN (insn);
345410d565efSmrg       if (insn == 0 || !NOTE_P (insn))
345510d565efSmrg 	break;
345610d565efSmrg     }
345710d565efSmrg 
345810d565efSmrg   return insn;
345910d565efSmrg }
346010d565efSmrg 
3461c7a68eb7Smrg /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3462c7a68eb7Smrg    routine does not look inside SEQUENCEs.  */
346310d565efSmrg 
346410d565efSmrg rtx_insn *
next_nondebug_insn(rtx_insn * insn)3465c7a68eb7Smrg next_nondebug_insn (rtx_insn *insn)
346610d565efSmrg {
346710d565efSmrg   while (insn)
346810d565efSmrg     {
346910d565efSmrg       insn = NEXT_INSN (insn);
3470c7a68eb7Smrg       if (insn == 0 || !DEBUG_INSN_P (insn))
347110d565efSmrg 	break;
347210d565efSmrg     }
347310d565efSmrg 
347410d565efSmrg   return insn;
347510d565efSmrg }
347610d565efSmrg 
347710d565efSmrg /* Return the previous insn before INSN that is not a NOTE.  This routine does
347810d565efSmrg    not look inside SEQUENCEs.  */
347910d565efSmrg 
348010d565efSmrg rtx_insn *
prev_nonnote_insn(rtx_insn * insn)348110d565efSmrg prev_nonnote_insn (rtx_insn *insn)
348210d565efSmrg {
348310d565efSmrg   while (insn)
348410d565efSmrg     {
348510d565efSmrg       insn = PREV_INSN (insn);
348610d565efSmrg       if (insn == 0 || !NOTE_P (insn))
348710d565efSmrg 	break;
348810d565efSmrg     }
348910d565efSmrg 
349010d565efSmrg   return insn;
349110d565efSmrg }
349210d565efSmrg 
349310d565efSmrg /* Return the previous insn before INSN that is not a DEBUG_INSN.
349410d565efSmrg    This routine does not look inside SEQUENCEs.  */
349510d565efSmrg 
349610d565efSmrg rtx_insn *
prev_nondebug_insn(rtx_insn * insn)349710d565efSmrg prev_nondebug_insn (rtx_insn *insn)
349810d565efSmrg {
349910d565efSmrg   while (insn)
350010d565efSmrg     {
350110d565efSmrg       insn = PREV_INSN (insn);
350210d565efSmrg       if (insn == 0 || !DEBUG_INSN_P (insn))
350310d565efSmrg 	break;
350410d565efSmrg     }
350510d565efSmrg 
350610d565efSmrg   return insn;
350710d565efSmrg }
350810d565efSmrg 
350910d565efSmrg /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
351010d565efSmrg    This routine does not look inside SEQUENCEs.  */
351110d565efSmrg 
351210d565efSmrg rtx_insn *
next_nonnote_nondebug_insn(rtx_insn * insn)351310d565efSmrg next_nonnote_nondebug_insn (rtx_insn *insn)
351410d565efSmrg {
351510d565efSmrg   while (insn)
351610d565efSmrg     {
351710d565efSmrg       insn = NEXT_INSN (insn);
351810d565efSmrg       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
351910d565efSmrg 	break;
352010d565efSmrg     }
352110d565efSmrg 
352210d565efSmrg   return insn;
352310d565efSmrg }
352410d565efSmrg 
3525c7a68eb7Smrg /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3526c7a68eb7Smrg    but stop the search before we enter another basic block.  This
3527c7a68eb7Smrg    routine does not look inside SEQUENCEs.  */
3528c7a68eb7Smrg 
3529c7a68eb7Smrg rtx_insn *
next_nonnote_nondebug_insn_bb(rtx_insn * insn)3530c7a68eb7Smrg next_nonnote_nondebug_insn_bb (rtx_insn *insn)
3531c7a68eb7Smrg {
3532c7a68eb7Smrg   while (insn)
3533c7a68eb7Smrg     {
3534c7a68eb7Smrg       insn = NEXT_INSN (insn);
3535c7a68eb7Smrg       if (insn == 0)
3536c7a68eb7Smrg 	break;
3537c7a68eb7Smrg       if (DEBUG_INSN_P (insn))
3538c7a68eb7Smrg 	continue;
3539c7a68eb7Smrg       if (!NOTE_P (insn))
3540c7a68eb7Smrg 	break;
3541c7a68eb7Smrg       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3542c7a68eb7Smrg 	return NULL;
3543c7a68eb7Smrg     }
3544c7a68eb7Smrg 
3545c7a68eb7Smrg   return insn;
3546c7a68eb7Smrg }
3547c7a68eb7Smrg 
354810d565efSmrg /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
354910d565efSmrg    This routine does not look inside SEQUENCEs.  */
355010d565efSmrg 
355110d565efSmrg rtx_insn *
prev_nonnote_nondebug_insn(rtx_insn * insn)355210d565efSmrg prev_nonnote_nondebug_insn (rtx_insn *insn)
355310d565efSmrg {
355410d565efSmrg   while (insn)
355510d565efSmrg     {
355610d565efSmrg       insn = PREV_INSN (insn);
355710d565efSmrg       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
355810d565efSmrg 	break;
355910d565efSmrg     }
356010d565efSmrg 
356110d565efSmrg   return insn;
356210d565efSmrg }
356310d565efSmrg 
3564c7a68eb7Smrg /* Return the previous insn before INSN that is not a NOTE nor
3565c7a68eb7Smrg    DEBUG_INSN, but stop the search before we enter another basic
3566c7a68eb7Smrg    block.  This routine does not look inside SEQUENCEs.  */
3567c7a68eb7Smrg 
3568c7a68eb7Smrg rtx_insn *
prev_nonnote_nondebug_insn_bb(rtx_insn * insn)3569c7a68eb7Smrg prev_nonnote_nondebug_insn_bb (rtx_insn *insn)
3570c7a68eb7Smrg {
3571c7a68eb7Smrg   while (insn)
3572c7a68eb7Smrg     {
3573c7a68eb7Smrg       insn = PREV_INSN (insn);
3574c7a68eb7Smrg       if (insn == 0)
3575c7a68eb7Smrg 	break;
3576c7a68eb7Smrg       if (DEBUG_INSN_P (insn))
3577c7a68eb7Smrg 	continue;
3578c7a68eb7Smrg       if (!NOTE_P (insn))
3579c7a68eb7Smrg 	break;
3580c7a68eb7Smrg       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3581c7a68eb7Smrg 	return NULL;
3582c7a68eb7Smrg     }
3583c7a68eb7Smrg 
3584c7a68eb7Smrg   return insn;
3585c7a68eb7Smrg }
3586c7a68eb7Smrg 
3587c7a68eb7Smrg /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
358810d565efSmrg    or 0, if there is none.  This routine does not look inside
358910d565efSmrg    SEQUENCEs.  */
359010d565efSmrg 
359110d565efSmrg rtx_insn *
next_real_insn(rtx_insn * insn)35920fc04c29Smrg next_real_insn (rtx_insn *insn)
359310d565efSmrg {
359410d565efSmrg   while (insn)
359510d565efSmrg     {
359610d565efSmrg       insn = NEXT_INSN (insn);
359710d565efSmrg       if (insn == 0 || INSN_P (insn))
359810d565efSmrg 	break;
359910d565efSmrg     }
360010d565efSmrg 
360110d565efSmrg   return insn;
360210d565efSmrg }
360310d565efSmrg 
3604c7a68eb7Smrg /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
360510d565efSmrg    or 0, if there is none.  This routine does not look inside
360610d565efSmrg    SEQUENCEs.  */
360710d565efSmrg 
360810d565efSmrg rtx_insn *
prev_real_insn(rtx_insn * insn)360910d565efSmrg prev_real_insn (rtx_insn *insn)
361010d565efSmrg {
361110d565efSmrg   while (insn)
361210d565efSmrg     {
361310d565efSmrg       insn = PREV_INSN (insn);
361410d565efSmrg       if (insn == 0 || INSN_P (insn))
361510d565efSmrg 	break;
361610d565efSmrg     }
361710d565efSmrg 
361810d565efSmrg   return insn;
361910d565efSmrg }
362010d565efSmrg 
3621c7a68eb7Smrg /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3622c7a68eb7Smrg    or 0, if there is none.  This routine does not look inside
3623c7a68eb7Smrg    SEQUENCEs.  */
3624c7a68eb7Smrg 
3625c7a68eb7Smrg rtx_insn *
next_real_nondebug_insn(rtx uncast_insn)3626c7a68eb7Smrg next_real_nondebug_insn (rtx uncast_insn)
3627c7a68eb7Smrg {
3628c7a68eb7Smrg   rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3629c7a68eb7Smrg 
3630c7a68eb7Smrg   while (insn)
3631c7a68eb7Smrg     {
3632c7a68eb7Smrg       insn = NEXT_INSN (insn);
3633c7a68eb7Smrg       if (insn == 0 || NONDEBUG_INSN_P (insn))
3634c7a68eb7Smrg 	break;
3635c7a68eb7Smrg     }
3636c7a68eb7Smrg 
3637c7a68eb7Smrg   return insn;
3638c7a68eb7Smrg }
3639c7a68eb7Smrg 
3640c7a68eb7Smrg /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3641c7a68eb7Smrg    or 0, if there is none.  This routine does not look inside
3642c7a68eb7Smrg    SEQUENCEs.  */
3643c7a68eb7Smrg 
3644c7a68eb7Smrg rtx_insn *
prev_real_nondebug_insn(rtx_insn * insn)3645c7a68eb7Smrg prev_real_nondebug_insn (rtx_insn *insn)
3646c7a68eb7Smrg {
3647c7a68eb7Smrg   while (insn)
3648c7a68eb7Smrg     {
3649c7a68eb7Smrg       insn = PREV_INSN (insn);
3650c7a68eb7Smrg       if (insn == 0 || NONDEBUG_INSN_P (insn))
3651c7a68eb7Smrg 	break;
3652c7a68eb7Smrg     }
3653c7a68eb7Smrg 
3654c7a68eb7Smrg   return insn;
3655c7a68eb7Smrg }
3656c7a68eb7Smrg 
365710d565efSmrg /* Return the last CALL_INSN in the current list, or 0 if there is none.
365810d565efSmrg    This routine does not look inside SEQUENCEs.  */
365910d565efSmrg 
366010d565efSmrg rtx_call_insn *
last_call_insn(void)366110d565efSmrg last_call_insn (void)
366210d565efSmrg {
366310d565efSmrg   rtx_insn *insn;
366410d565efSmrg 
366510d565efSmrg   for (insn = get_last_insn ();
366610d565efSmrg        insn && !CALL_P (insn);
366710d565efSmrg        insn = PREV_INSN (insn))
366810d565efSmrg     ;
366910d565efSmrg 
367010d565efSmrg   return safe_as_a <rtx_call_insn *> (insn);
367110d565efSmrg }
367210d565efSmrg 
367310d565efSmrg /* Find the next insn after INSN that really does something.  This routine
367410d565efSmrg    does not look inside SEQUENCEs.  After reload this also skips over
367510d565efSmrg    standalone USE and CLOBBER insn.  */
367610d565efSmrg 
367710d565efSmrg int
active_insn_p(const rtx_insn * insn)367810d565efSmrg active_insn_p (const rtx_insn *insn)
367910d565efSmrg {
368010d565efSmrg   return (CALL_P (insn) || JUMP_P (insn)
368110d565efSmrg 	  || JUMP_TABLE_DATA_P (insn) /* FIXME */
368210d565efSmrg 	  || (NONJUMP_INSN_P (insn)
368310d565efSmrg 	      && (! reload_completed
368410d565efSmrg 		  || (GET_CODE (PATTERN (insn)) != USE
368510d565efSmrg 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
368610d565efSmrg }
368710d565efSmrg 
368810d565efSmrg rtx_insn *
next_active_insn(rtx_insn * insn)368910d565efSmrg next_active_insn (rtx_insn *insn)
369010d565efSmrg {
369110d565efSmrg   while (insn)
369210d565efSmrg     {
369310d565efSmrg       insn = NEXT_INSN (insn);
369410d565efSmrg       if (insn == 0 || active_insn_p (insn))
369510d565efSmrg 	break;
369610d565efSmrg     }
369710d565efSmrg 
369810d565efSmrg   return insn;
369910d565efSmrg }
370010d565efSmrg 
370110d565efSmrg /* Find the last insn before INSN that really does something.  This routine
370210d565efSmrg    does not look inside SEQUENCEs.  After reload this also skips over
370310d565efSmrg    standalone USE and CLOBBER insn.  */
370410d565efSmrg 
370510d565efSmrg rtx_insn *
prev_active_insn(rtx_insn * insn)370610d565efSmrg prev_active_insn (rtx_insn *insn)
370710d565efSmrg {
370810d565efSmrg   while (insn)
370910d565efSmrg     {
371010d565efSmrg       insn = PREV_INSN (insn);
371110d565efSmrg       if (insn == 0 || active_insn_p (insn))
371210d565efSmrg 	break;
371310d565efSmrg     }
371410d565efSmrg 
371510d565efSmrg   return insn;
371610d565efSmrg }
371710d565efSmrg 
371810d565efSmrg /* Return the next insn that uses CC0 after INSN, which is assumed to
371910d565efSmrg    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
372010d565efSmrg    applied to the result of this function should yield INSN).
372110d565efSmrg 
372210d565efSmrg    Normally, this is simply the next insn.  However, if a REG_CC_USER note
372310d565efSmrg    is present, it contains the insn that uses CC0.
372410d565efSmrg 
372510d565efSmrg    Return 0 if we can't find the insn.  */
372610d565efSmrg 
372710d565efSmrg rtx_insn *
next_cc0_user(rtx_insn * insn)372810d565efSmrg next_cc0_user (rtx_insn *insn)
372910d565efSmrg {
373010d565efSmrg   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
373110d565efSmrg 
373210d565efSmrg   if (note)
373310d565efSmrg     return safe_as_a <rtx_insn *> (XEXP (note, 0));
373410d565efSmrg 
373510d565efSmrg   insn = next_nonnote_insn (insn);
373610d565efSmrg   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
373710d565efSmrg     insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
373810d565efSmrg 
373910d565efSmrg   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
374010d565efSmrg     return insn;
374110d565efSmrg 
374210d565efSmrg   return 0;
374310d565efSmrg }
374410d565efSmrg 
374510d565efSmrg /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
374610d565efSmrg    note, it is the previous insn.  */
374710d565efSmrg 
374810d565efSmrg rtx_insn *
prev_cc0_setter(rtx_insn * insn)374910d565efSmrg prev_cc0_setter (rtx_insn *insn)
375010d565efSmrg {
375110d565efSmrg   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
375210d565efSmrg 
375310d565efSmrg   if (note)
375410d565efSmrg     return safe_as_a <rtx_insn *> (XEXP (note, 0));
375510d565efSmrg 
375610d565efSmrg   insn = prev_nonnote_insn (insn);
375710d565efSmrg   gcc_assert (sets_cc0_p (PATTERN (insn)));
375810d565efSmrg 
375910d565efSmrg   return insn;
376010d565efSmrg }
376110d565efSmrg 
376210d565efSmrg /* Find a RTX_AUTOINC class rtx which matches DATA.  */
376310d565efSmrg 
376410d565efSmrg static int
find_auto_inc(const_rtx x,const_rtx reg)376510d565efSmrg find_auto_inc (const_rtx x, const_rtx reg)
376610d565efSmrg {
376710d565efSmrg   subrtx_iterator::array_type array;
376810d565efSmrg   FOR_EACH_SUBRTX (iter, array, x, NONCONST)
376910d565efSmrg     {
377010d565efSmrg       const_rtx x = *iter;
377110d565efSmrg       if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC
377210d565efSmrg 	  && rtx_equal_p (reg, XEXP (x, 0)))
377310d565efSmrg 	return true;
377410d565efSmrg     }
377510d565efSmrg   return false;
377610d565efSmrg }
377710d565efSmrg 
377810d565efSmrg /* Increment the label uses for all labels present in rtx.  */
377910d565efSmrg 
378010d565efSmrg static void
mark_label_nuses(rtx x)378110d565efSmrg mark_label_nuses (rtx x)
378210d565efSmrg {
378310d565efSmrg   enum rtx_code code;
378410d565efSmrg   int i, j;
378510d565efSmrg   const char *fmt;
378610d565efSmrg 
378710d565efSmrg   code = GET_CODE (x);
378810d565efSmrg   if (code == LABEL_REF && LABEL_P (label_ref_label (x)))
378910d565efSmrg     LABEL_NUSES (label_ref_label (x))++;
379010d565efSmrg 
379110d565efSmrg   fmt = GET_RTX_FORMAT (code);
379210d565efSmrg   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
379310d565efSmrg     {
379410d565efSmrg       if (fmt[i] == 'e')
379510d565efSmrg 	mark_label_nuses (XEXP (x, i));
379610d565efSmrg       else if (fmt[i] == 'E')
379710d565efSmrg 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
379810d565efSmrg 	  mark_label_nuses (XVECEXP (x, i, j));
379910d565efSmrg     }
380010d565efSmrg }
380110d565efSmrg 
380210d565efSmrg 
380310d565efSmrg /* Try splitting insns that can be split for better scheduling.
380410d565efSmrg    PAT is the pattern which might split.
380510d565efSmrg    TRIAL is the insn providing PAT.
380610d565efSmrg    LAST is nonzero if we should return the last insn of the sequence produced.
380710d565efSmrg 
380810d565efSmrg    If this routine succeeds in splitting, it returns the first or last
380910d565efSmrg    replacement insn depending on the value of LAST.  Otherwise, it
381010d565efSmrg    returns TRIAL.  If the insn to be returned can be split, it will be.  */
381110d565efSmrg 
381210d565efSmrg rtx_insn *
try_split(rtx pat,rtx_insn * trial,int last)381310d565efSmrg try_split (rtx pat, rtx_insn *trial, int last)
381410d565efSmrg {
381510d565efSmrg   rtx_insn *before, *after;
381610d565efSmrg   rtx note;
381710d565efSmrg   rtx_insn *seq, *tem;
3818c7a68eb7Smrg   profile_probability probability;
381910d565efSmrg   rtx_insn *insn_last, *insn;
382010d565efSmrg   int njumps = 0;
382110d565efSmrg   rtx_insn *call_insn = NULL;
382210d565efSmrg 
382310d565efSmrg   /* We're not good at redistributing frame information.  */
382410d565efSmrg   if (RTX_FRAME_RELATED_P (trial))
382510d565efSmrg     return trial;
382610d565efSmrg 
382710d565efSmrg   if (any_condjump_p (trial)
382810d565efSmrg       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3829c7a68eb7Smrg     split_branch_probability
3830c7a68eb7Smrg       = profile_probability::from_reg_br_prob_note (XINT (note, 0));
3831c7a68eb7Smrg   else
3832c7a68eb7Smrg     split_branch_probability = profile_probability::uninitialized ();
3833c7a68eb7Smrg 
383410d565efSmrg   probability = split_branch_probability;
383510d565efSmrg 
383610d565efSmrg   seq = split_insns (pat, trial);
383710d565efSmrg 
3838c7a68eb7Smrg   split_branch_probability = profile_probability::uninitialized ();
383910d565efSmrg 
384010d565efSmrg   if (!seq)
384110d565efSmrg     return trial;
384210d565efSmrg 
384310d565efSmrg   /* Avoid infinite loop if any insn of the result matches
384410d565efSmrg      the original pattern.  */
384510d565efSmrg   insn_last = seq;
384610d565efSmrg   while (1)
384710d565efSmrg     {
384810d565efSmrg       if (INSN_P (insn_last)
384910d565efSmrg 	  && rtx_equal_p (PATTERN (insn_last), pat))
385010d565efSmrg 	return trial;
385110d565efSmrg       if (!NEXT_INSN (insn_last))
385210d565efSmrg 	break;
385310d565efSmrg       insn_last = NEXT_INSN (insn_last);
385410d565efSmrg     }
385510d565efSmrg 
385610d565efSmrg   /* We will be adding the new sequence to the function.  The splitters
385710d565efSmrg      may have introduced invalid RTL sharing, so unshare the sequence now.  */
385810d565efSmrg   unshare_all_rtl_in_chain (seq);
385910d565efSmrg 
386010d565efSmrg   /* Mark labels and copy flags.  */
386110d565efSmrg   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
386210d565efSmrg     {
386310d565efSmrg       if (JUMP_P (insn))
386410d565efSmrg 	{
386510d565efSmrg 	  if (JUMP_P (trial))
386610d565efSmrg 	    CROSSING_JUMP_P (insn) = CROSSING_JUMP_P (trial);
386710d565efSmrg 	  mark_jump_label (PATTERN (insn), insn, 0);
386810d565efSmrg 	  njumps++;
3869c7a68eb7Smrg 	  if (probability.initialized_p ()
387010d565efSmrg 	      && any_condjump_p (insn)
387110d565efSmrg 	      && !find_reg_note (insn, REG_BR_PROB, 0))
387210d565efSmrg 	    {
387310d565efSmrg 	      /* We can preserve the REG_BR_PROB notes only if exactly
387410d565efSmrg 		 one jump is created, otherwise the machine description
387510d565efSmrg 		 is responsible for this step using
387610d565efSmrg 		 split_branch_probability variable.  */
387710d565efSmrg 	      gcc_assert (njumps == 1);
3878c7a68eb7Smrg 	      add_reg_br_prob_note (insn, probability);
387910d565efSmrg 	    }
388010d565efSmrg 	}
388110d565efSmrg     }
388210d565efSmrg 
388310d565efSmrg   /* If we are splitting a CALL_INSN, look for the CALL_INSN
388410d565efSmrg      in SEQ and copy any additional information across.  */
388510d565efSmrg   if (CALL_P (trial))
388610d565efSmrg     {
388710d565efSmrg       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
388810d565efSmrg 	if (CALL_P (insn))
388910d565efSmrg 	  {
389010d565efSmrg 	    gcc_assert (call_insn == NULL_RTX);
389110d565efSmrg 	    call_insn = insn;
389210d565efSmrg 
389310d565efSmrg 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
389410d565efSmrg 	       target may have explicitly specified.  */
3895c7a68eb7Smrg 	    rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
389610d565efSmrg 	    while (*p)
389710d565efSmrg 	      p = &XEXP (*p, 1);
389810d565efSmrg 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
389910d565efSmrg 
390010d565efSmrg 	    /* If the old call was a sibling call, the new one must
390110d565efSmrg 	       be too.  */
390210d565efSmrg 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
390310d565efSmrg 	  }
390410d565efSmrg     }
390510d565efSmrg 
390610d565efSmrg   /* Copy notes, particularly those related to the CFG.  */
390710d565efSmrg   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
390810d565efSmrg     {
390910d565efSmrg       switch (REG_NOTE_KIND (note))
391010d565efSmrg 	{
391110d565efSmrg 	case REG_EH_REGION:
391210d565efSmrg 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
391310d565efSmrg 	  break;
391410d565efSmrg 
391510d565efSmrg 	case REG_NORETURN:
391610d565efSmrg 	case REG_SETJMP:
391710d565efSmrg 	case REG_TM:
3918c7a68eb7Smrg 	case REG_CALL_NOCF_CHECK:
3919c7a68eb7Smrg 	case REG_CALL_ARG_LOCATION:
392010d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
392110d565efSmrg 	    {
392210d565efSmrg 	      if (CALL_P (insn))
392310d565efSmrg 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
392410d565efSmrg 	    }
392510d565efSmrg 	  break;
392610d565efSmrg 
392710d565efSmrg 	case REG_NON_LOCAL_GOTO:
3928c7a68eb7Smrg 	case REG_LABEL_TARGET:
392910d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
393010d565efSmrg 	    {
393110d565efSmrg 	      if (JUMP_P (insn))
393210d565efSmrg 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
393310d565efSmrg 	    }
393410d565efSmrg 	  break;
393510d565efSmrg 
393610d565efSmrg 	case REG_INC:
393710d565efSmrg 	  if (!AUTO_INC_DEC)
393810d565efSmrg 	    break;
393910d565efSmrg 
394010d565efSmrg 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
394110d565efSmrg 	    {
394210d565efSmrg 	      rtx reg = XEXP (note, 0);
394310d565efSmrg 	      if (!FIND_REG_INC_NOTE (insn, reg)
394410d565efSmrg 		  && find_auto_inc (PATTERN (insn), reg))
394510d565efSmrg 		add_reg_note (insn, REG_INC, reg);
394610d565efSmrg 	    }
394710d565efSmrg 	  break;
394810d565efSmrg 
394910d565efSmrg 	case REG_ARGS_SIZE:
3950c7a68eb7Smrg 	  fixup_args_size_notes (NULL, insn_last, get_args_size (note));
395110d565efSmrg 	  break;
395210d565efSmrg 
395310d565efSmrg 	case REG_CALL_DECL:
395410d565efSmrg 	  gcc_assert (call_insn != NULL_RTX);
395510d565efSmrg 	  add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0));
395610d565efSmrg 	  break;
395710d565efSmrg 
395810d565efSmrg 	default:
395910d565efSmrg 	  break;
396010d565efSmrg 	}
396110d565efSmrg     }
396210d565efSmrg 
396310d565efSmrg   /* If there are LABELS inside the split insns increment the
396410d565efSmrg      usage count so we don't delete the label.  */
396510d565efSmrg   if (INSN_P (trial))
396610d565efSmrg     {
396710d565efSmrg       insn = insn_last;
396810d565efSmrg       while (insn != NULL_RTX)
396910d565efSmrg 	{
397010d565efSmrg 	  /* JUMP_P insns have already been "marked" above.  */
397110d565efSmrg 	  if (NONJUMP_INSN_P (insn))
397210d565efSmrg 	    mark_label_nuses (PATTERN (insn));
397310d565efSmrg 
397410d565efSmrg 	  insn = PREV_INSN (insn);
397510d565efSmrg 	}
397610d565efSmrg     }
397710d565efSmrg 
397810d565efSmrg   before = PREV_INSN (trial);
397910d565efSmrg   after = NEXT_INSN (trial);
398010d565efSmrg 
3981*ec02198aSmrg   emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
398210d565efSmrg 
398310d565efSmrg   delete_insn (trial);
398410d565efSmrg 
398510d565efSmrg   /* Recursively call try_split for each new insn created; by the
398610d565efSmrg      time control returns here that insn will be fully split, so
398710d565efSmrg      set LAST and continue from the insn after the one returned.
398810d565efSmrg      We can't use next_active_insn here since AFTER may be a note.
398910d565efSmrg      Ignore deleted insns, which can be occur if not optimizing.  */
399010d565efSmrg   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
399110d565efSmrg     if (! tem->deleted () && INSN_P (tem))
399210d565efSmrg       tem = try_split (PATTERN (tem), tem, 1);
399310d565efSmrg 
399410d565efSmrg   /* Return either the first or the last insn, depending on which was
399510d565efSmrg      requested.  */
399610d565efSmrg   return last
399710d565efSmrg     ? (after ? PREV_INSN (after) : get_last_insn ())
399810d565efSmrg     : NEXT_INSN (before);
399910d565efSmrg }
400010d565efSmrg 
400110d565efSmrg /* Make and return an INSN rtx, initializing all its slots.
400210d565efSmrg    Store PATTERN in the pattern slots.  */
400310d565efSmrg 
400410d565efSmrg rtx_insn *
make_insn_raw(rtx pattern)400510d565efSmrg make_insn_raw (rtx pattern)
400610d565efSmrg {
400710d565efSmrg   rtx_insn *insn;
400810d565efSmrg 
400910d565efSmrg   insn = as_a <rtx_insn *> (rtx_alloc (INSN));
401010d565efSmrg 
401110d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
401210d565efSmrg   PATTERN (insn) = pattern;
401310d565efSmrg   INSN_CODE (insn) = -1;
401410d565efSmrg   REG_NOTES (insn) = NULL;
401510d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
401610d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
401710d565efSmrg 
401810d565efSmrg #ifdef ENABLE_RTL_CHECKING
401910d565efSmrg   if (insn
402010d565efSmrg       && INSN_P (insn)
402110d565efSmrg       && (returnjump_p (insn)
402210d565efSmrg 	  || (GET_CODE (insn) == SET
402310d565efSmrg 	      && SET_DEST (insn) == pc_rtx)))
402410d565efSmrg     {
402510d565efSmrg       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
402610d565efSmrg       debug_rtx (insn);
402710d565efSmrg     }
402810d565efSmrg #endif
402910d565efSmrg 
403010d565efSmrg   return insn;
403110d565efSmrg }
403210d565efSmrg 
403310d565efSmrg /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
403410d565efSmrg 
403510d565efSmrg static rtx_insn *
make_debug_insn_raw(rtx pattern)403610d565efSmrg make_debug_insn_raw (rtx pattern)
403710d565efSmrg {
403810d565efSmrg   rtx_debug_insn *insn;
403910d565efSmrg 
404010d565efSmrg   insn = as_a <rtx_debug_insn *> (rtx_alloc (DEBUG_INSN));
404110d565efSmrg   INSN_UID (insn) = cur_debug_insn_uid++;
4042*ec02198aSmrg   if (cur_debug_insn_uid > param_min_nondebug_insn_uid)
404310d565efSmrg     INSN_UID (insn) = cur_insn_uid++;
404410d565efSmrg 
404510d565efSmrg   PATTERN (insn) = pattern;
404610d565efSmrg   INSN_CODE (insn) = -1;
404710d565efSmrg   REG_NOTES (insn) = NULL;
404810d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
404910d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
405010d565efSmrg 
405110d565efSmrg   return insn;
405210d565efSmrg }
405310d565efSmrg 
405410d565efSmrg /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
405510d565efSmrg 
405610d565efSmrg static rtx_insn *
make_jump_insn_raw(rtx pattern)405710d565efSmrg make_jump_insn_raw (rtx pattern)
405810d565efSmrg {
405910d565efSmrg   rtx_jump_insn *insn;
406010d565efSmrg 
406110d565efSmrg   insn = as_a <rtx_jump_insn *> (rtx_alloc (JUMP_INSN));
406210d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
406310d565efSmrg 
406410d565efSmrg   PATTERN (insn) = pattern;
406510d565efSmrg   INSN_CODE (insn) = -1;
406610d565efSmrg   REG_NOTES (insn) = NULL;
406710d565efSmrg   JUMP_LABEL (insn) = NULL;
406810d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
406910d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
407010d565efSmrg 
407110d565efSmrg   return insn;
407210d565efSmrg }
407310d565efSmrg 
407410d565efSmrg /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
407510d565efSmrg 
407610d565efSmrg static rtx_insn *
make_call_insn_raw(rtx pattern)407710d565efSmrg make_call_insn_raw (rtx pattern)
407810d565efSmrg {
407910d565efSmrg   rtx_call_insn *insn;
408010d565efSmrg 
408110d565efSmrg   insn = as_a <rtx_call_insn *> (rtx_alloc (CALL_INSN));
408210d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
408310d565efSmrg 
408410d565efSmrg   PATTERN (insn) = pattern;
408510d565efSmrg   INSN_CODE (insn) = -1;
408610d565efSmrg   REG_NOTES (insn) = NULL;
408710d565efSmrg   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
408810d565efSmrg   INSN_LOCATION (insn) = curr_insn_location ();
408910d565efSmrg   BLOCK_FOR_INSN (insn) = NULL;
409010d565efSmrg 
409110d565efSmrg   return insn;
409210d565efSmrg }
409310d565efSmrg 
409410d565efSmrg /* Like `make_insn_raw' but make a NOTE instead of an insn.  */
409510d565efSmrg 
409610d565efSmrg static rtx_note *
make_note_raw(enum insn_note subtype)409710d565efSmrg make_note_raw (enum insn_note subtype)
409810d565efSmrg {
409910d565efSmrg   /* Some notes are never created this way at all.  These notes are
410010d565efSmrg      only created by patching out insns.  */
410110d565efSmrg   gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
410210d565efSmrg 	      && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
410310d565efSmrg 
410410d565efSmrg   rtx_note *note = as_a <rtx_note *> (rtx_alloc (NOTE));
410510d565efSmrg   INSN_UID (note) = cur_insn_uid++;
410610d565efSmrg   NOTE_KIND (note) = subtype;
410710d565efSmrg   BLOCK_FOR_INSN (note) = NULL;
410810d565efSmrg   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
410910d565efSmrg   return note;
411010d565efSmrg }
411110d565efSmrg 
411210d565efSmrg /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
411310d565efSmrg    INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
411410d565efSmrg    but also BARRIERs and JUMP_TABLE_DATAs.  PREV and NEXT may be NULL.  */
411510d565efSmrg 
411610d565efSmrg static inline void
link_insn_into_chain(rtx_insn * insn,rtx_insn * prev,rtx_insn * next)411710d565efSmrg link_insn_into_chain (rtx_insn *insn, rtx_insn *prev, rtx_insn *next)
411810d565efSmrg {
411910d565efSmrg   SET_PREV_INSN (insn) = prev;
412010d565efSmrg   SET_NEXT_INSN (insn) = next;
412110d565efSmrg   if (prev != NULL)
412210d565efSmrg     {
412310d565efSmrg       SET_NEXT_INSN (prev) = insn;
412410d565efSmrg       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
412510d565efSmrg 	{
412610d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
412710d565efSmrg 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = insn;
412810d565efSmrg 	}
412910d565efSmrg     }
413010d565efSmrg   if (next != NULL)
413110d565efSmrg     {
413210d565efSmrg       SET_PREV_INSN (next) = insn;
413310d565efSmrg       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
413410d565efSmrg 	{
413510d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
413610d565efSmrg 	  SET_PREV_INSN (sequence->insn (0)) = insn;
413710d565efSmrg 	}
413810d565efSmrg     }
413910d565efSmrg 
414010d565efSmrg   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
414110d565efSmrg     {
414210d565efSmrg       rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (insn));
414310d565efSmrg       SET_PREV_INSN (sequence->insn (0)) = prev;
414410d565efSmrg       SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
414510d565efSmrg     }
414610d565efSmrg }
414710d565efSmrg 
414810d565efSmrg /* Add INSN to the end of the doubly-linked list.
414910d565efSmrg    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
415010d565efSmrg 
415110d565efSmrg void
add_insn(rtx_insn * insn)415210d565efSmrg add_insn (rtx_insn *insn)
415310d565efSmrg {
415410d565efSmrg   rtx_insn *prev = get_last_insn ();
415510d565efSmrg   link_insn_into_chain (insn, prev, NULL);
4156c7a68eb7Smrg   if (get_insns () == NULL)
415710d565efSmrg     set_first_insn (insn);
415810d565efSmrg   set_last_insn (insn);
415910d565efSmrg }
416010d565efSmrg 
416110d565efSmrg /* Add INSN into the doubly-linked list after insn AFTER.  */
416210d565efSmrg 
416310d565efSmrg static void
add_insn_after_nobb(rtx_insn * insn,rtx_insn * after)416410d565efSmrg add_insn_after_nobb (rtx_insn *insn, rtx_insn *after)
416510d565efSmrg {
416610d565efSmrg   rtx_insn *next = NEXT_INSN (after);
416710d565efSmrg 
416810d565efSmrg   gcc_assert (!optimize || !after->deleted ());
416910d565efSmrg 
417010d565efSmrg   link_insn_into_chain (insn, after, next);
417110d565efSmrg 
417210d565efSmrg   if (next == NULL)
417310d565efSmrg     {
417410d565efSmrg       struct sequence_stack *seq;
417510d565efSmrg 
417610d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
417710d565efSmrg 	if (after == seq->last)
417810d565efSmrg 	  {
417910d565efSmrg 	    seq->last = insn;
418010d565efSmrg 	    break;
418110d565efSmrg 	  }
418210d565efSmrg     }
418310d565efSmrg }
418410d565efSmrg 
418510d565efSmrg /* Add INSN into the doubly-linked list before insn BEFORE.  */
418610d565efSmrg 
418710d565efSmrg static void
add_insn_before_nobb(rtx_insn * insn,rtx_insn * before)418810d565efSmrg add_insn_before_nobb (rtx_insn *insn, rtx_insn *before)
418910d565efSmrg {
419010d565efSmrg   rtx_insn *prev = PREV_INSN (before);
419110d565efSmrg 
419210d565efSmrg   gcc_assert (!optimize || !before->deleted ());
419310d565efSmrg 
419410d565efSmrg   link_insn_into_chain (insn, prev, before);
419510d565efSmrg 
419610d565efSmrg   if (prev == NULL)
419710d565efSmrg     {
419810d565efSmrg       struct sequence_stack *seq;
419910d565efSmrg 
420010d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
420110d565efSmrg 	if (before == seq->first)
420210d565efSmrg 	  {
420310d565efSmrg 	    seq->first = insn;
420410d565efSmrg 	    break;
420510d565efSmrg 	  }
420610d565efSmrg 
420710d565efSmrg       gcc_assert (seq);
420810d565efSmrg     }
420910d565efSmrg }
421010d565efSmrg 
421110d565efSmrg /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
421210d565efSmrg    If BB is NULL, an attempt is made to infer the bb from before.
421310d565efSmrg 
421410d565efSmrg    This and the next function should be the only functions called
421510d565efSmrg    to insert an insn once delay slots have been filled since only
421610d565efSmrg    they know how to update a SEQUENCE. */
421710d565efSmrg 
421810d565efSmrg void
add_insn_after(rtx_insn * insn,rtx_insn * after,basic_block bb)42190fc04c29Smrg add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb)
422010d565efSmrg {
422110d565efSmrg   add_insn_after_nobb (insn, after);
422210d565efSmrg   if (!BARRIER_P (after)
422310d565efSmrg       && !BARRIER_P (insn)
422410d565efSmrg       && (bb = BLOCK_FOR_INSN (after)))
422510d565efSmrg     {
422610d565efSmrg       set_block_for_insn (insn, bb);
422710d565efSmrg       if (INSN_P (insn))
422810d565efSmrg 	df_insn_rescan (insn);
422910d565efSmrg       /* Should not happen as first in the BB is always
423010d565efSmrg 	 either NOTE or LABEL.  */
423110d565efSmrg       if (BB_END (bb) == after
423210d565efSmrg 	  /* Avoid clobbering of structure when creating new BB.  */
423310d565efSmrg 	  && !BARRIER_P (insn)
423410d565efSmrg 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
423510d565efSmrg 	BB_END (bb) = insn;
423610d565efSmrg     }
423710d565efSmrg }
423810d565efSmrg 
423910d565efSmrg /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
424010d565efSmrg    If BB is NULL, an attempt is made to infer the bb from before.
424110d565efSmrg 
424210d565efSmrg    This and the previous function should be the only functions called
424310d565efSmrg    to insert an insn once delay slots have been filled since only
424410d565efSmrg    they know how to update a SEQUENCE. */
424510d565efSmrg 
424610d565efSmrg void
add_insn_before(rtx_insn * insn,rtx_insn * before,basic_block bb)42470fc04c29Smrg add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb)
424810d565efSmrg {
424910d565efSmrg   add_insn_before_nobb (insn, before);
425010d565efSmrg 
425110d565efSmrg   if (!bb
425210d565efSmrg       && !BARRIER_P (before)
425310d565efSmrg       && !BARRIER_P (insn))
425410d565efSmrg     bb = BLOCK_FOR_INSN (before);
425510d565efSmrg 
425610d565efSmrg   if (bb)
425710d565efSmrg     {
425810d565efSmrg       set_block_for_insn (insn, bb);
425910d565efSmrg       if (INSN_P (insn))
426010d565efSmrg 	df_insn_rescan (insn);
426110d565efSmrg       /* Should not happen as first in the BB is always either NOTE or
426210d565efSmrg 	 LABEL.  */
426310d565efSmrg       gcc_assert (BB_HEAD (bb) != insn
426410d565efSmrg 		  /* Avoid clobbering of structure when creating new BB.  */
426510d565efSmrg 		  || BARRIER_P (insn)
426610d565efSmrg 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
426710d565efSmrg     }
426810d565efSmrg }
426910d565efSmrg 
427010d565efSmrg /* Replace insn with an deleted instruction note.  */
427110d565efSmrg 
427210d565efSmrg void
set_insn_deleted(rtx_insn * insn)42730fc04c29Smrg set_insn_deleted (rtx_insn *insn)
427410d565efSmrg {
427510d565efSmrg   if (INSN_P (insn))
42760fc04c29Smrg     df_insn_delete (insn);
427710d565efSmrg   PUT_CODE (insn, NOTE);
427810d565efSmrg   NOTE_KIND (insn) = NOTE_INSN_DELETED;
427910d565efSmrg }
428010d565efSmrg 
428110d565efSmrg 
428210d565efSmrg /* Unlink INSN from the insn chain.
428310d565efSmrg 
428410d565efSmrg    This function knows how to handle sequences.
428510d565efSmrg 
428610d565efSmrg    This function does not invalidate data flow information associated with
428710d565efSmrg    INSN (i.e. does not call df_insn_delete).  That makes this function
428810d565efSmrg    usable for only disconnecting an insn from the chain, and re-emit it
428910d565efSmrg    elsewhere later.
429010d565efSmrg 
429110d565efSmrg    To later insert INSN elsewhere in the insn chain via add_insn and
429210d565efSmrg    similar functions, PREV_INSN and NEXT_INSN must be nullified by
429310d565efSmrg    the caller.  Nullifying them here breaks many insn chain walks.
429410d565efSmrg 
429510d565efSmrg    To really delete an insn and related DF information, use delete_insn.  */
429610d565efSmrg 
429710d565efSmrg void
remove_insn(rtx_insn * insn)42980fc04c29Smrg remove_insn (rtx_insn *insn)
429910d565efSmrg {
430010d565efSmrg   rtx_insn *next = NEXT_INSN (insn);
430110d565efSmrg   rtx_insn *prev = PREV_INSN (insn);
430210d565efSmrg   basic_block bb;
430310d565efSmrg 
430410d565efSmrg   if (prev)
430510d565efSmrg     {
430610d565efSmrg       SET_NEXT_INSN (prev) = next;
430710d565efSmrg       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
430810d565efSmrg 	{
430910d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (prev));
431010d565efSmrg 	  SET_NEXT_INSN (sequence->insn (sequence->len () - 1)) = next;
431110d565efSmrg 	}
431210d565efSmrg     }
431310d565efSmrg   else
431410d565efSmrg     {
431510d565efSmrg       struct sequence_stack *seq;
431610d565efSmrg 
431710d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
431810d565efSmrg 	if (insn == seq->first)
431910d565efSmrg 	  {
432010d565efSmrg 	    seq->first = next;
432110d565efSmrg 	    break;
432210d565efSmrg 	  }
432310d565efSmrg 
432410d565efSmrg       gcc_assert (seq);
432510d565efSmrg     }
432610d565efSmrg 
432710d565efSmrg   if (next)
432810d565efSmrg     {
432910d565efSmrg       SET_PREV_INSN (next) = prev;
433010d565efSmrg       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
433110d565efSmrg 	{
433210d565efSmrg 	  rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (next));
433310d565efSmrg 	  SET_PREV_INSN (sequence->insn (0)) = prev;
433410d565efSmrg 	}
433510d565efSmrg     }
433610d565efSmrg   else
433710d565efSmrg     {
433810d565efSmrg       struct sequence_stack *seq;
433910d565efSmrg 
434010d565efSmrg       for (seq = get_current_sequence (); seq; seq = seq->next)
434110d565efSmrg 	if (insn == seq->last)
434210d565efSmrg 	  {
434310d565efSmrg 	    seq->last = prev;
434410d565efSmrg 	    break;
434510d565efSmrg 	  }
434610d565efSmrg 
434710d565efSmrg       gcc_assert (seq);
434810d565efSmrg     }
434910d565efSmrg 
435010d565efSmrg   /* Fix up basic block boundaries, if necessary.  */
435110d565efSmrg   if (!BARRIER_P (insn)
435210d565efSmrg       && (bb = BLOCK_FOR_INSN (insn)))
435310d565efSmrg     {
435410d565efSmrg       if (BB_HEAD (bb) == insn)
435510d565efSmrg 	{
435610d565efSmrg 	  /* Never ever delete the basic block note without deleting whole
435710d565efSmrg 	     basic block.  */
435810d565efSmrg 	  gcc_assert (!NOTE_P (insn));
435910d565efSmrg 	  BB_HEAD (bb) = next;
436010d565efSmrg 	}
436110d565efSmrg       if (BB_END (bb) == insn)
436210d565efSmrg 	BB_END (bb) = prev;
436310d565efSmrg     }
436410d565efSmrg }
436510d565efSmrg 
436610d565efSmrg /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
436710d565efSmrg 
436810d565efSmrg void
add_function_usage_to(rtx call_insn,rtx call_fusage)436910d565efSmrg add_function_usage_to (rtx call_insn, rtx call_fusage)
437010d565efSmrg {
437110d565efSmrg   gcc_assert (call_insn && CALL_P (call_insn));
437210d565efSmrg 
437310d565efSmrg   /* Put the register usage information on the CALL.  If there is already
437410d565efSmrg      some usage information, put ours at the end.  */
437510d565efSmrg   if (CALL_INSN_FUNCTION_USAGE (call_insn))
437610d565efSmrg     {
437710d565efSmrg       rtx link;
437810d565efSmrg 
437910d565efSmrg       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
438010d565efSmrg 	   link = XEXP (link, 1))
438110d565efSmrg 	;
438210d565efSmrg 
438310d565efSmrg       XEXP (link, 1) = call_fusage;
438410d565efSmrg     }
438510d565efSmrg   else
438610d565efSmrg     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
438710d565efSmrg }
438810d565efSmrg 
438910d565efSmrg /* Delete all insns made since FROM.
439010d565efSmrg    FROM becomes the new last instruction.  */
439110d565efSmrg 
439210d565efSmrg void
delete_insns_since(rtx_insn * from)439310d565efSmrg delete_insns_since (rtx_insn *from)
439410d565efSmrg {
439510d565efSmrg   if (from == 0)
439610d565efSmrg     set_first_insn (0);
439710d565efSmrg   else
439810d565efSmrg     SET_NEXT_INSN (from) = 0;
439910d565efSmrg   set_last_insn (from);
440010d565efSmrg }
440110d565efSmrg 
440210d565efSmrg /* This function is deprecated, please use sequences instead.
440310d565efSmrg 
440410d565efSmrg    Move a consecutive bunch of insns to a different place in the chain.
440510d565efSmrg    The insns to be moved are those between FROM and TO.
440610d565efSmrg    They are moved to a new position after the insn AFTER.
440710d565efSmrg    AFTER must not be FROM or TO or any insn in between.
440810d565efSmrg 
440910d565efSmrg    This function does not know about SEQUENCEs and hence should not be
441010d565efSmrg    called after delay-slot filling has been done.  */
441110d565efSmrg 
441210d565efSmrg void
reorder_insns_nobb(rtx_insn * from,rtx_insn * to,rtx_insn * after)441310d565efSmrg reorder_insns_nobb (rtx_insn *from, rtx_insn *to, rtx_insn *after)
441410d565efSmrg {
441510d565efSmrg   if (flag_checking)
441610d565efSmrg     {
441710d565efSmrg       for (rtx_insn *x = from; x != to; x = NEXT_INSN (x))
441810d565efSmrg 	gcc_assert (after != x);
441910d565efSmrg       gcc_assert (after != to);
442010d565efSmrg     }
442110d565efSmrg 
442210d565efSmrg   /* Splice this bunch out of where it is now.  */
442310d565efSmrg   if (PREV_INSN (from))
442410d565efSmrg     SET_NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
442510d565efSmrg   if (NEXT_INSN (to))
442610d565efSmrg     SET_PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
442710d565efSmrg   if (get_last_insn () == to)
442810d565efSmrg     set_last_insn (PREV_INSN (from));
442910d565efSmrg   if (get_insns () == from)
443010d565efSmrg     set_first_insn (NEXT_INSN (to));
443110d565efSmrg 
443210d565efSmrg   /* Make the new neighbors point to it and it to them.  */
443310d565efSmrg   if (NEXT_INSN (after))
443410d565efSmrg     SET_PREV_INSN (NEXT_INSN (after)) = to;
443510d565efSmrg 
443610d565efSmrg   SET_NEXT_INSN (to) = NEXT_INSN (after);
443710d565efSmrg   SET_PREV_INSN (from) = after;
443810d565efSmrg   SET_NEXT_INSN (after) = from;
443910d565efSmrg   if (after == get_last_insn ())
444010d565efSmrg     set_last_insn (to);
444110d565efSmrg }
444210d565efSmrg 
444310d565efSmrg /* Same as function above, but take care to update BB boundaries.  */
444410d565efSmrg void
reorder_insns(rtx_insn * from,rtx_insn * to,rtx_insn * after)444510d565efSmrg reorder_insns (rtx_insn *from, rtx_insn *to, rtx_insn *after)
444610d565efSmrg {
444710d565efSmrg   rtx_insn *prev = PREV_INSN (from);
444810d565efSmrg   basic_block bb, bb2;
444910d565efSmrg 
445010d565efSmrg   reorder_insns_nobb (from, to, after);
445110d565efSmrg 
445210d565efSmrg   if (!BARRIER_P (after)
445310d565efSmrg       && (bb = BLOCK_FOR_INSN (after)))
445410d565efSmrg     {
445510d565efSmrg       rtx_insn *x;
445610d565efSmrg       df_set_bb_dirty (bb);
445710d565efSmrg 
445810d565efSmrg       if (!BARRIER_P (from)
445910d565efSmrg 	  && (bb2 = BLOCK_FOR_INSN (from)))
446010d565efSmrg 	{
446110d565efSmrg 	  if (BB_END (bb2) == to)
446210d565efSmrg 	    BB_END (bb2) = prev;
446310d565efSmrg 	  df_set_bb_dirty (bb2);
446410d565efSmrg 	}
446510d565efSmrg 
446610d565efSmrg       if (BB_END (bb) == after)
446710d565efSmrg 	BB_END (bb) = to;
446810d565efSmrg 
446910d565efSmrg       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
447010d565efSmrg 	if (!BARRIER_P (x))
447110d565efSmrg 	  df_insn_change_bb (x, bb);
447210d565efSmrg     }
447310d565efSmrg }
447410d565efSmrg 
447510d565efSmrg 
447610d565efSmrg /* Emit insn(s) of given code and pattern
447710d565efSmrg    at a specified place within the doubly-linked list.
447810d565efSmrg 
447910d565efSmrg    All of the emit_foo global entry points accept an object
448010d565efSmrg    X which is either an insn list or a PATTERN of a single
448110d565efSmrg    instruction.
448210d565efSmrg 
448310d565efSmrg    There are thus a few canonical ways to generate code and
448410d565efSmrg    emit it at a specific place in the instruction stream.  For
448510d565efSmrg    example, consider the instruction named SPOT and the fact that
448610d565efSmrg    we would like to emit some instructions before SPOT.  We might
448710d565efSmrg    do it like this:
448810d565efSmrg 
448910d565efSmrg 	start_sequence ();
449010d565efSmrg 	... emit the new instructions ...
449110d565efSmrg 	insns_head = get_insns ();
449210d565efSmrg 	end_sequence ();
449310d565efSmrg 
449410d565efSmrg 	emit_insn_before (insns_head, SPOT);
449510d565efSmrg 
449610d565efSmrg    It used to be common to generate SEQUENCE rtl instead, but that
449710d565efSmrg    is a relic of the past which no longer occurs.  The reason is that
449810d565efSmrg    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
449910d565efSmrg    generated would almost certainly die right after it was created.  */
450010d565efSmrg 
450110d565efSmrg static rtx_insn *
emit_pattern_before_noloc(rtx x,rtx_insn * before,rtx_insn * last,basic_block bb,rtx_insn * (* make_raw)(rtx))45020fc04c29Smrg emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last,
45030fc04c29Smrg 			   basic_block bb,
450410d565efSmrg                            rtx_insn *(*make_raw) (rtx))
450510d565efSmrg {
450610d565efSmrg   rtx_insn *insn;
450710d565efSmrg 
450810d565efSmrg   gcc_assert (before);
450910d565efSmrg 
451010d565efSmrg   if (x == NULL_RTX)
45110fc04c29Smrg     return last;
451210d565efSmrg 
451310d565efSmrg   switch (GET_CODE (x))
451410d565efSmrg     {
451510d565efSmrg     case DEBUG_INSN:
451610d565efSmrg     case INSN:
451710d565efSmrg     case JUMP_INSN:
451810d565efSmrg     case CALL_INSN:
451910d565efSmrg     case CODE_LABEL:
452010d565efSmrg     case BARRIER:
452110d565efSmrg     case NOTE:
452210d565efSmrg       insn = as_a <rtx_insn *> (x);
452310d565efSmrg       while (insn)
452410d565efSmrg 	{
452510d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
452610d565efSmrg 	  add_insn_before (insn, before, bb);
452710d565efSmrg 	  last = insn;
452810d565efSmrg 	  insn = next;
452910d565efSmrg 	}
453010d565efSmrg       break;
453110d565efSmrg 
453210d565efSmrg #ifdef ENABLE_RTL_CHECKING
453310d565efSmrg     case SEQUENCE:
453410d565efSmrg       gcc_unreachable ();
453510d565efSmrg       break;
453610d565efSmrg #endif
453710d565efSmrg 
453810d565efSmrg     default:
453910d565efSmrg       last = (*make_raw) (x);
454010d565efSmrg       add_insn_before (last, before, bb);
454110d565efSmrg       break;
454210d565efSmrg     }
454310d565efSmrg 
45440fc04c29Smrg   return last;
454510d565efSmrg }
454610d565efSmrg 
454710d565efSmrg /* Make X be output before the instruction BEFORE.  */
454810d565efSmrg 
454910d565efSmrg rtx_insn *
emit_insn_before_noloc(rtx x,rtx_insn * before,basic_block bb)455010d565efSmrg emit_insn_before_noloc (rtx x, rtx_insn *before, basic_block bb)
455110d565efSmrg {
455210d565efSmrg   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
455310d565efSmrg }
455410d565efSmrg 
455510d565efSmrg /* Make an instruction with body X and code JUMP_INSN
455610d565efSmrg    and output it before the instruction BEFORE.  */
455710d565efSmrg 
455810d565efSmrg rtx_jump_insn *
emit_jump_insn_before_noloc(rtx x,rtx_insn * before)455910d565efSmrg emit_jump_insn_before_noloc (rtx x, rtx_insn *before)
456010d565efSmrg {
456110d565efSmrg   return as_a <rtx_jump_insn *> (
45620fc04c29Smrg 		emit_pattern_before_noloc (x, before, NULL, NULL,
456310d565efSmrg 					   make_jump_insn_raw));
456410d565efSmrg }
456510d565efSmrg 
456610d565efSmrg /* Make an instruction with body X and code CALL_INSN
456710d565efSmrg    and output it before the instruction BEFORE.  */
456810d565efSmrg 
456910d565efSmrg rtx_insn *
emit_call_insn_before_noloc(rtx x,rtx_insn * before)457010d565efSmrg emit_call_insn_before_noloc (rtx x, rtx_insn *before)
457110d565efSmrg {
45720fc04c29Smrg   return emit_pattern_before_noloc (x, before, NULL, NULL,
457310d565efSmrg 				    make_call_insn_raw);
457410d565efSmrg }
457510d565efSmrg 
457610d565efSmrg /* Make an instruction with body X and code DEBUG_INSN
457710d565efSmrg    and output it before the instruction BEFORE.  */
457810d565efSmrg 
457910d565efSmrg rtx_insn *
emit_debug_insn_before_noloc(rtx x,rtx_insn * before)45800fc04c29Smrg emit_debug_insn_before_noloc (rtx x, rtx_insn *before)
458110d565efSmrg {
45820fc04c29Smrg   return emit_pattern_before_noloc (x, before, NULL, NULL,
458310d565efSmrg 				    make_debug_insn_raw);
458410d565efSmrg }
458510d565efSmrg 
458610d565efSmrg /* Make an insn of code BARRIER
458710d565efSmrg    and output it before the insn BEFORE.  */
458810d565efSmrg 
458910d565efSmrg rtx_barrier *
emit_barrier_before(rtx_insn * before)45900fc04c29Smrg emit_barrier_before (rtx_insn *before)
459110d565efSmrg {
459210d565efSmrg   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
459310d565efSmrg 
459410d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
459510d565efSmrg 
459610d565efSmrg   add_insn_before (insn, before, NULL);
459710d565efSmrg   return insn;
459810d565efSmrg }
459910d565efSmrg 
460010d565efSmrg /* Emit the label LABEL before the insn BEFORE.  */
460110d565efSmrg 
460210d565efSmrg rtx_code_label *
emit_label_before(rtx_code_label * label,rtx_insn * before)46030fc04c29Smrg emit_label_before (rtx_code_label *label, rtx_insn *before)
460410d565efSmrg {
460510d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
460610d565efSmrg   INSN_UID (label) = cur_insn_uid++;
460710d565efSmrg   add_insn_before (label, before, NULL);
46080fc04c29Smrg   return label;
460910d565efSmrg }
461010d565efSmrg 
461110d565efSmrg /* Helper for emit_insn_after, handles lists of instructions
461210d565efSmrg    efficiently.  */
461310d565efSmrg 
461410d565efSmrg static rtx_insn *
emit_insn_after_1(rtx_insn * first,rtx_insn * after,basic_block bb)46150fc04c29Smrg emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb)
461610d565efSmrg {
461710d565efSmrg   rtx_insn *last;
461810d565efSmrg   rtx_insn *after_after;
461910d565efSmrg   if (!bb && !BARRIER_P (after))
462010d565efSmrg     bb = BLOCK_FOR_INSN (after);
462110d565efSmrg 
462210d565efSmrg   if (bb)
462310d565efSmrg     {
462410d565efSmrg       df_set_bb_dirty (bb);
462510d565efSmrg       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
462610d565efSmrg 	if (!BARRIER_P (last))
462710d565efSmrg 	  {
462810d565efSmrg 	    set_block_for_insn (last, bb);
462910d565efSmrg 	    df_insn_rescan (last);
463010d565efSmrg 	  }
463110d565efSmrg       if (!BARRIER_P (last))
463210d565efSmrg 	{
463310d565efSmrg 	  set_block_for_insn (last, bb);
463410d565efSmrg 	  df_insn_rescan (last);
463510d565efSmrg 	}
463610d565efSmrg       if (BB_END (bb) == after)
463710d565efSmrg 	BB_END (bb) = last;
463810d565efSmrg     }
463910d565efSmrg   else
464010d565efSmrg     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
464110d565efSmrg       continue;
464210d565efSmrg 
464310d565efSmrg   after_after = NEXT_INSN (after);
464410d565efSmrg 
464510d565efSmrg   SET_NEXT_INSN (after) = first;
464610d565efSmrg   SET_PREV_INSN (first) = after;
464710d565efSmrg   SET_NEXT_INSN (last) = after_after;
464810d565efSmrg   if (after_after)
464910d565efSmrg     SET_PREV_INSN (after_after) = last;
465010d565efSmrg 
465110d565efSmrg   if (after == get_last_insn ())
465210d565efSmrg     set_last_insn (last);
465310d565efSmrg 
465410d565efSmrg   return last;
465510d565efSmrg }
465610d565efSmrg 
465710d565efSmrg static rtx_insn *
emit_pattern_after_noloc(rtx x,rtx_insn * after,basic_block bb,rtx_insn * (* make_raw)(rtx))46580fc04c29Smrg emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb,
465910d565efSmrg 			  rtx_insn *(*make_raw)(rtx))
466010d565efSmrg {
466110d565efSmrg   rtx_insn *last = after;
466210d565efSmrg 
466310d565efSmrg   gcc_assert (after);
466410d565efSmrg 
466510d565efSmrg   if (x == NULL_RTX)
466610d565efSmrg     return last;
466710d565efSmrg 
466810d565efSmrg   switch (GET_CODE (x))
466910d565efSmrg     {
467010d565efSmrg     case DEBUG_INSN:
467110d565efSmrg     case INSN:
467210d565efSmrg     case JUMP_INSN:
467310d565efSmrg     case CALL_INSN:
467410d565efSmrg     case CODE_LABEL:
467510d565efSmrg     case BARRIER:
467610d565efSmrg     case NOTE:
467710d565efSmrg       last = emit_insn_after_1 (as_a <rtx_insn *> (x), after, bb);
467810d565efSmrg       break;
467910d565efSmrg 
468010d565efSmrg #ifdef ENABLE_RTL_CHECKING
468110d565efSmrg     case SEQUENCE:
468210d565efSmrg       gcc_unreachable ();
468310d565efSmrg       break;
468410d565efSmrg #endif
468510d565efSmrg 
468610d565efSmrg     default:
468710d565efSmrg       last = (*make_raw) (x);
468810d565efSmrg       add_insn_after (last, after, bb);
468910d565efSmrg       break;
469010d565efSmrg     }
469110d565efSmrg 
469210d565efSmrg   return last;
469310d565efSmrg }
469410d565efSmrg 
469510d565efSmrg /* Make X be output after the insn AFTER and set the BB of insn.  If
469610d565efSmrg    BB is NULL, an attempt is made to infer the BB from AFTER.  */
469710d565efSmrg 
469810d565efSmrg rtx_insn *
emit_insn_after_noloc(rtx x,rtx_insn * after,basic_block bb)46990fc04c29Smrg emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb)
470010d565efSmrg {
470110d565efSmrg   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
470210d565efSmrg }
470310d565efSmrg 
470410d565efSmrg 
470510d565efSmrg /* Make an insn of code JUMP_INSN with body X
470610d565efSmrg    and output it after the insn AFTER.  */
470710d565efSmrg 
470810d565efSmrg rtx_jump_insn *
emit_jump_insn_after_noloc(rtx x,rtx_insn * after)47090fc04c29Smrg emit_jump_insn_after_noloc (rtx x, rtx_insn *after)
471010d565efSmrg {
471110d565efSmrg   return as_a <rtx_jump_insn *> (
471210d565efSmrg 		emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw));
471310d565efSmrg }
471410d565efSmrg 
471510d565efSmrg /* Make an instruction with body X and code CALL_INSN
471610d565efSmrg    and output it after the instruction AFTER.  */
471710d565efSmrg 
471810d565efSmrg rtx_insn *
emit_call_insn_after_noloc(rtx x,rtx_insn * after)47190fc04c29Smrg emit_call_insn_after_noloc (rtx x, rtx_insn *after)
472010d565efSmrg {
472110d565efSmrg   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
472210d565efSmrg }
472310d565efSmrg 
472410d565efSmrg /* Make an instruction with body X and code CALL_INSN
472510d565efSmrg    and output it after the instruction AFTER.  */
472610d565efSmrg 
472710d565efSmrg rtx_insn *
emit_debug_insn_after_noloc(rtx x,rtx_insn * after)47280fc04c29Smrg emit_debug_insn_after_noloc (rtx x, rtx_insn *after)
472910d565efSmrg {
473010d565efSmrg   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
473110d565efSmrg }
473210d565efSmrg 
473310d565efSmrg /* Make an insn of code BARRIER
473410d565efSmrg    and output it after the insn AFTER.  */
473510d565efSmrg 
473610d565efSmrg rtx_barrier *
emit_barrier_after(rtx_insn * after)47370fc04c29Smrg emit_barrier_after (rtx_insn *after)
473810d565efSmrg {
473910d565efSmrg   rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
474010d565efSmrg 
474110d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
474210d565efSmrg 
474310d565efSmrg   add_insn_after (insn, after, NULL);
474410d565efSmrg   return insn;
474510d565efSmrg }
474610d565efSmrg 
474710d565efSmrg /* Emit the label LABEL after the insn AFTER.  */
474810d565efSmrg 
474910d565efSmrg rtx_insn *
emit_label_after(rtx_insn * label,rtx_insn * after)47500fc04c29Smrg emit_label_after (rtx_insn *label, rtx_insn *after)
475110d565efSmrg {
475210d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
475310d565efSmrg   INSN_UID (label) = cur_insn_uid++;
475410d565efSmrg   add_insn_after (label, after, NULL);
47550fc04c29Smrg   return label;
475610d565efSmrg }
475710d565efSmrg 
475810d565efSmrg /* Notes require a bit of special handling: Some notes need to have their
475910d565efSmrg    BLOCK_FOR_INSN set, others should never have it set, and some should
476010d565efSmrg    have it set or clear depending on the context.   */
476110d565efSmrg 
476210d565efSmrg /* Return true iff a note of kind SUBTYPE should be emitted with routines
476310d565efSmrg    that never set BLOCK_FOR_INSN on NOTE.  BB_BOUNDARY is true if the
476410d565efSmrg    caller is asked to emit a note before BB_HEAD, or after BB_END.  */
476510d565efSmrg 
476610d565efSmrg static bool
note_outside_basic_block_p(enum insn_note subtype,bool on_bb_boundary_p)476710d565efSmrg note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
476810d565efSmrg {
476910d565efSmrg   switch (subtype)
477010d565efSmrg     {
477110d565efSmrg       /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks.  */
477210d565efSmrg       case NOTE_INSN_SWITCH_TEXT_SECTIONS:
477310d565efSmrg 	return true;
477410d565efSmrg 
477510d565efSmrg       /* Notes for var tracking and EH region markers can appear between or
477610d565efSmrg 	 inside basic blocks.  If the caller is emitting on the basic block
477710d565efSmrg 	 boundary, do not set BLOCK_FOR_INSN on the new note.  */
477810d565efSmrg       case NOTE_INSN_VAR_LOCATION:
477910d565efSmrg       case NOTE_INSN_EH_REGION_BEG:
478010d565efSmrg       case NOTE_INSN_EH_REGION_END:
478110d565efSmrg 	return on_bb_boundary_p;
478210d565efSmrg 
478310d565efSmrg       /* Otherwise, BLOCK_FOR_INSN must be set.  */
478410d565efSmrg       default:
478510d565efSmrg 	return false;
478610d565efSmrg     }
478710d565efSmrg }
478810d565efSmrg 
478910d565efSmrg /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
479010d565efSmrg 
479110d565efSmrg rtx_note *
emit_note_after(enum insn_note subtype,rtx_insn * after)479210d565efSmrg emit_note_after (enum insn_note subtype, rtx_insn *after)
479310d565efSmrg {
479410d565efSmrg   rtx_note *note = make_note_raw (subtype);
479510d565efSmrg   basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
479610d565efSmrg   bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
479710d565efSmrg 
479810d565efSmrg   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
479910d565efSmrg     add_insn_after_nobb (note, after);
480010d565efSmrg   else
480110d565efSmrg     add_insn_after (note, after, bb);
480210d565efSmrg   return note;
480310d565efSmrg }
480410d565efSmrg 
480510d565efSmrg /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
480610d565efSmrg 
480710d565efSmrg rtx_note *
emit_note_before(enum insn_note subtype,rtx_insn * before)480810d565efSmrg emit_note_before (enum insn_note subtype, rtx_insn *before)
480910d565efSmrg {
481010d565efSmrg   rtx_note *note = make_note_raw (subtype);
481110d565efSmrg   basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
481210d565efSmrg   bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
481310d565efSmrg 
481410d565efSmrg   if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
481510d565efSmrg     add_insn_before_nobb (note, before);
481610d565efSmrg   else
481710d565efSmrg     add_insn_before (note, before, bb);
481810d565efSmrg   return note;
481910d565efSmrg }
482010d565efSmrg 
482110d565efSmrg /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
482210d565efSmrg    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
482310d565efSmrg 
482410d565efSmrg static rtx_insn *
emit_pattern_after_setloc(rtx pattern,rtx_insn * after,location_t loc,rtx_insn * (* make_raw)(rtx))48250fc04c29Smrg emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc,
482610d565efSmrg 			   rtx_insn *(*make_raw) (rtx))
482710d565efSmrg {
482810d565efSmrg   rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
482910d565efSmrg 
483010d565efSmrg   if (pattern == NULL_RTX || !loc)
483110d565efSmrg     return last;
483210d565efSmrg 
483310d565efSmrg   after = NEXT_INSN (after);
483410d565efSmrg   while (1)
483510d565efSmrg     {
483610d565efSmrg       if (active_insn_p (after)
483710d565efSmrg 	  && !JUMP_TABLE_DATA_P (after) /* FIXME */
483810d565efSmrg 	  && !INSN_LOCATION (after))
483910d565efSmrg 	INSN_LOCATION (after) = loc;
484010d565efSmrg       if (after == last)
484110d565efSmrg 	break;
484210d565efSmrg       after = NEXT_INSN (after);
484310d565efSmrg     }
484410d565efSmrg   return last;
484510d565efSmrg }
484610d565efSmrg 
484710d565efSmrg /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
484810d565efSmrg    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
484910d565efSmrg    any DEBUG_INSNs.  */
485010d565efSmrg 
485110d565efSmrg static rtx_insn *
emit_pattern_after(rtx pattern,rtx_insn * after,bool skip_debug_insns,rtx_insn * (* make_raw)(rtx))48520fc04c29Smrg emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns,
485310d565efSmrg 		    rtx_insn *(*make_raw) (rtx))
485410d565efSmrg {
485510d565efSmrg   rtx_insn *prev = after;
485610d565efSmrg 
485710d565efSmrg   if (skip_debug_insns)
485810d565efSmrg     while (DEBUG_INSN_P (prev))
485910d565efSmrg       prev = PREV_INSN (prev);
486010d565efSmrg 
486110d565efSmrg   if (INSN_P (prev))
486210d565efSmrg     return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
486310d565efSmrg 				      make_raw);
486410d565efSmrg   else
486510d565efSmrg     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
486610d565efSmrg }
486710d565efSmrg 
486810d565efSmrg /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
486910d565efSmrg rtx_insn *
emit_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)48700fc04c29Smrg emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
487110d565efSmrg {
487210d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
487310d565efSmrg }
487410d565efSmrg 
487510d565efSmrg /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
487610d565efSmrg rtx_insn *
emit_insn_after(rtx pattern,rtx_insn * after)48770fc04c29Smrg emit_insn_after (rtx pattern, rtx_insn *after)
487810d565efSmrg {
487910d565efSmrg   return emit_pattern_after (pattern, after, true, make_insn_raw);
488010d565efSmrg }
488110d565efSmrg 
488210d565efSmrg /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
488310d565efSmrg rtx_jump_insn *
emit_jump_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)48840fc04c29Smrg emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
488510d565efSmrg {
488610d565efSmrg   return as_a <rtx_jump_insn *> (
488710d565efSmrg 	emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw));
488810d565efSmrg }
488910d565efSmrg 
489010d565efSmrg /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
489110d565efSmrg rtx_jump_insn *
emit_jump_insn_after(rtx pattern,rtx_insn * after)48920fc04c29Smrg emit_jump_insn_after (rtx pattern, rtx_insn *after)
489310d565efSmrg {
489410d565efSmrg   return as_a <rtx_jump_insn *> (
489510d565efSmrg 	emit_pattern_after (pattern, after, true, make_jump_insn_raw));
489610d565efSmrg }
489710d565efSmrg 
489810d565efSmrg /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
489910d565efSmrg rtx_insn *
emit_call_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)49000fc04c29Smrg emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
490110d565efSmrg {
490210d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
490310d565efSmrg }
490410d565efSmrg 
490510d565efSmrg /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
490610d565efSmrg rtx_insn *
emit_call_insn_after(rtx pattern,rtx_insn * after)49070fc04c29Smrg emit_call_insn_after (rtx pattern, rtx_insn *after)
490810d565efSmrg {
490910d565efSmrg   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
491010d565efSmrg }
491110d565efSmrg 
491210d565efSmrg /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC.  */
491310d565efSmrg rtx_insn *
emit_debug_insn_after_setloc(rtx pattern,rtx_insn * after,location_t loc)49140fc04c29Smrg emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc)
491510d565efSmrg {
491610d565efSmrg   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
491710d565efSmrg }
491810d565efSmrg 
491910d565efSmrg /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER.  */
492010d565efSmrg rtx_insn *
emit_debug_insn_after(rtx pattern,rtx_insn * after)49210fc04c29Smrg emit_debug_insn_after (rtx pattern, rtx_insn *after)
492210d565efSmrg {
492310d565efSmrg   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
492410d565efSmrg }
492510d565efSmrg 
492610d565efSmrg /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
492710d565efSmrg    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
492810d565efSmrg    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
492910d565efSmrg    CALL_INSN, etc.  */
493010d565efSmrg 
493110d565efSmrg static rtx_insn *
emit_pattern_before_setloc(rtx pattern,rtx_insn * before,location_t loc,bool insnp,rtx_insn * (* make_raw)(rtx))49320fc04c29Smrg emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc,
49330fc04c29Smrg 			    bool insnp, rtx_insn *(*make_raw) (rtx))
493410d565efSmrg {
493510d565efSmrg   rtx_insn *first = PREV_INSN (before);
493610d565efSmrg   rtx_insn *last = emit_pattern_before_noloc (pattern, before,
49370fc04c29Smrg 					      insnp ? before : NULL,
493810d565efSmrg 					      NULL, make_raw);
493910d565efSmrg 
494010d565efSmrg   if (pattern == NULL_RTX || !loc)
494110d565efSmrg     return last;
494210d565efSmrg 
494310d565efSmrg   if (!first)
494410d565efSmrg     first = get_insns ();
494510d565efSmrg   else
494610d565efSmrg     first = NEXT_INSN (first);
494710d565efSmrg   while (1)
494810d565efSmrg     {
494910d565efSmrg       if (active_insn_p (first)
495010d565efSmrg 	  && !JUMP_TABLE_DATA_P (first) /* FIXME */
495110d565efSmrg 	  && !INSN_LOCATION (first))
495210d565efSmrg 	INSN_LOCATION (first) = loc;
495310d565efSmrg       if (first == last)
495410d565efSmrg 	break;
495510d565efSmrg       first = NEXT_INSN (first);
495610d565efSmrg     }
495710d565efSmrg   return last;
495810d565efSmrg }
495910d565efSmrg 
496010d565efSmrg /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
496110d565efSmrg    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
496210d565efSmrg    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
496310d565efSmrg    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
496410d565efSmrg 
496510d565efSmrg static rtx_insn *
emit_pattern_before(rtx pattern,rtx_insn * before,bool skip_debug_insns,bool insnp,rtx_insn * (* make_raw)(rtx))49660fc04c29Smrg emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns,
496710d565efSmrg 		     bool insnp, rtx_insn *(*make_raw) (rtx))
496810d565efSmrg {
496910d565efSmrg   rtx_insn *next = before;
497010d565efSmrg 
497110d565efSmrg   if (skip_debug_insns)
497210d565efSmrg     while (DEBUG_INSN_P (next))
497310d565efSmrg       next = PREV_INSN (next);
497410d565efSmrg 
497510d565efSmrg   if (INSN_P (next))
497610d565efSmrg     return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
497710d565efSmrg 				       insnp, make_raw);
497810d565efSmrg   else
497910d565efSmrg     return emit_pattern_before_noloc (pattern, before,
49800fc04c29Smrg 				      insnp ? before : NULL,
498110d565efSmrg                                       NULL, make_raw);
498210d565efSmrg }
498310d565efSmrg 
498410d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
498510d565efSmrg rtx_insn *
emit_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)49860fc04c29Smrg emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
498710d565efSmrg {
498810d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, true,
498910d565efSmrg 				     make_insn_raw);
499010d565efSmrg }
499110d565efSmrg 
499210d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
499310d565efSmrg rtx_insn *
emit_insn_before(rtx pattern,rtx_insn * before)49940fc04c29Smrg emit_insn_before (rtx pattern, rtx_insn *before)
499510d565efSmrg {
499610d565efSmrg   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
499710d565efSmrg }
499810d565efSmrg 
499910d565efSmrg /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
500010d565efSmrg rtx_jump_insn *
emit_jump_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)50010fc04c29Smrg emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
500210d565efSmrg {
500310d565efSmrg   return as_a <rtx_jump_insn *> (
500410d565efSmrg 	emit_pattern_before_setloc (pattern, before, loc, false,
500510d565efSmrg 				    make_jump_insn_raw));
500610d565efSmrg }
500710d565efSmrg 
500810d565efSmrg /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE.  */
500910d565efSmrg rtx_jump_insn *
emit_jump_insn_before(rtx pattern,rtx_insn * before)50100fc04c29Smrg emit_jump_insn_before (rtx pattern, rtx_insn *before)
501110d565efSmrg {
501210d565efSmrg   return as_a <rtx_jump_insn *> (
501310d565efSmrg 	emit_pattern_before (pattern, before, true, false,
501410d565efSmrg 			     make_jump_insn_raw));
501510d565efSmrg }
501610d565efSmrg 
501710d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
501810d565efSmrg rtx_insn *
emit_call_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)50190fc04c29Smrg emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
502010d565efSmrg {
502110d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, false,
502210d565efSmrg 				     make_call_insn_raw);
502310d565efSmrg }
502410d565efSmrg 
502510d565efSmrg /* Like emit_call_insn_before_noloc,
502610d565efSmrg    but set insn_location according to BEFORE.  */
502710d565efSmrg rtx_insn *
emit_call_insn_before(rtx pattern,rtx_insn * before)502810d565efSmrg emit_call_insn_before (rtx pattern, rtx_insn *before)
502910d565efSmrg {
503010d565efSmrg   return emit_pattern_before (pattern, before, true, false,
503110d565efSmrg 			      make_call_insn_raw);
503210d565efSmrg }
503310d565efSmrg 
503410d565efSmrg /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC.  */
503510d565efSmrg rtx_insn *
emit_debug_insn_before_setloc(rtx pattern,rtx_insn * before,location_t loc)50360fc04c29Smrg emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc)
503710d565efSmrg {
503810d565efSmrg   return emit_pattern_before_setloc (pattern, before, loc, false,
503910d565efSmrg 				     make_debug_insn_raw);
504010d565efSmrg }
504110d565efSmrg 
504210d565efSmrg /* Like emit_debug_insn_before_noloc,
504310d565efSmrg    but set insn_location according to BEFORE.  */
504410d565efSmrg rtx_insn *
emit_debug_insn_before(rtx pattern,rtx_insn * before)504510d565efSmrg emit_debug_insn_before (rtx pattern, rtx_insn *before)
504610d565efSmrg {
504710d565efSmrg   return emit_pattern_before (pattern, before, false, false,
504810d565efSmrg 			      make_debug_insn_raw);
504910d565efSmrg }
505010d565efSmrg 
505110d565efSmrg /* Take X and emit it at the end of the doubly-linked
505210d565efSmrg    INSN list.
505310d565efSmrg 
505410d565efSmrg    Returns the last insn emitted.  */
505510d565efSmrg 
505610d565efSmrg rtx_insn *
emit_insn(rtx x)505710d565efSmrg emit_insn (rtx x)
505810d565efSmrg {
505910d565efSmrg   rtx_insn *last = get_last_insn ();
506010d565efSmrg   rtx_insn *insn;
506110d565efSmrg 
506210d565efSmrg   if (x == NULL_RTX)
506310d565efSmrg     return last;
506410d565efSmrg 
506510d565efSmrg   switch (GET_CODE (x))
506610d565efSmrg     {
506710d565efSmrg     case DEBUG_INSN:
506810d565efSmrg     case INSN:
506910d565efSmrg     case JUMP_INSN:
507010d565efSmrg     case CALL_INSN:
507110d565efSmrg     case CODE_LABEL:
507210d565efSmrg     case BARRIER:
507310d565efSmrg     case NOTE:
507410d565efSmrg       insn = as_a <rtx_insn *> (x);
507510d565efSmrg       while (insn)
507610d565efSmrg 	{
507710d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
507810d565efSmrg 	  add_insn (insn);
507910d565efSmrg 	  last = insn;
508010d565efSmrg 	  insn = next;
508110d565efSmrg 	}
508210d565efSmrg       break;
508310d565efSmrg 
508410d565efSmrg #ifdef ENABLE_RTL_CHECKING
508510d565efSmrg     case JUMP_TABLE_DATA:
508610d565efSmrg     case SEQUENCE:
508710d565efSmrg       gcc_unreachable ();
508810d565efSmrg       break;
508910d565efSmrg #endif
509010d565efSmrg 
509110d565efSmrg     default:
509210d565efSmrg       last = make_insn_raw (x);
509310d565efSmrg       add_insn (last);
509410d565efSmrg       break;
509510d565efSmrg     }
509610d565efSmrg 
509710d565efSmrg   return last;
509810d565efSmrg }
509910d565efSmrg 
510010d565efSmrg /* Make an insn of code DEBUG_INSN with pattern X
510110d565efSmrg    and add it to the end of the doubly-linked list.  */
510210d565efSmrg 
510310d565efSmrg rtx_insn *
emit_debug_insn(rtx x)510410d565efSmrg emit_debug_insn (rtx x)
510510d565efSmrg {
510610d565efSmrg   rtx_insn *last = get_last_insn ();
510710d565efSmrg   rtx_insn *insn;
510810d565efSmrg 
510910d565efSmrg   if (x == NULL_RTX)
511010d565efSmrg     return last;
511110d565efSmrg 
511210d565efSmrg   switch (GET_CODE (x))
511310d565efSmrg     {
511410d565efSmrg     case DEBUG_INSN:
511510d565efSmrg     case INSN:
511610d565efSmrg     case JUMP_INSN:
511710d565efSmrg     case CALL_INSN:
511810d565efSmrg     case CODE_LABEL:
511910d565efSmrg     case BARRIER:
512010d565efSmrg     case NOTE:
512110d565efSmrg       insn = as_a <rtx_insn *> (x);
512210d565efSmrg       while (insn)
512310d565efSmrg 	{
512410d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
512510d565efSmrg 	  add_insn (insn);
512610d565efSmrg 	  last = insn;
512710d565efSmrg 	  insn = next;
512810d565efSmrg 	}
512910d565efSmrg       break;
513010d565efSmrg 
513110d565efSmrg #ifdef ENABLE_RTL_CHECKING
513210d565efSmrg     case JUMP_TABLE_DATA:
513310d565efSmrg     case SEQUENCE:
513410d565efSmrg       gcc_unreachable ();
513510d565efSmrg       break;
513610d565efSmrg #endif
513710d565efSmrg 
513810d565efSmrg     default:
513910d565efSmrg       last = make_debug_insn_raw (x);
514010d565efSmrg       add_insn (last);
514110d565efSmrg       break;
514210d565efSmrg     }
514310d565efSmrg 
514410d565efSmrg   return last;
514510d565efSmrg }
514610d565efSmrg 
514710d565efSmrg /* Make an insn of code JUMP_INSN with pattern X
514810d565efSmrg    and add it to the end of the doubly-linked list.  */
514910d565efSmrg 
515010d565efSmrg rtx_insn *
emit_jump_insn(rtx x)515110d565efSmrg emit_jump_insn (rtx x)
515210d565efSmrg {
515310d565efSmrg   rtx_insn *last = NULL;
515410d565efSmrg   rtx_insn *insn;
515510d565efSmrg 
515610d565efSmrg   switch (GET_CODE (x))
515710d565efSmrg     {
515810d565efSmrg     case DEBUG_INSN:
515910d565efSmrg     case INSN:
516010d565efSmrg     case JUMP_INSN:
516110d565efSmrg     case CALL_INSN:
516210d565efSmrg     case CODE_LABEL:
516310d565efSmrg     case BARRIER:
516410d565efSmrg     case NOTE:
516510d565efSmrg       insn = as_a <rtx_insn *> (x);
516610d565efSmrg       while (insn)
516710d565efSmrg 	{
516810d565efSmrg 	  rtx_insn *next = NEXT_INSN (insn);
516910d565efSmrg 	  add_insn (insn);
517010d565efSmrg 	  last = insn;
517110d565efSmrg 	  insn = next;
517210d565efSmrg 	}
517310d565efSmrg       break;
517410d565efSmrg 
517510d565efSmrg #ifdef ENABLE_RTL_CHECKING
517610d565efSmrg     case JUMP_TABLE_DATA:
517710d565efSmrg     case SEQUENCE:
517810d565efSmrg       gcc_unreachable ();
517910d565efSmrg       break;
518010d565efSmrg #endif
518110d565efSmrg 
518210d565efSmrg     default:
518310d565efSmrg       last = make_jump_insn_raw (x);
518410d565efSmrg       add_insn (last);
518510d565efSmrg       break;
518610d565efSmrg     }
518710d565efSmrg 
518810d565efSmrg   return last;
518910d565efSmrg }
519010d565efSmrg 
519110d565efSmrg /* Make an insn of code CALL_INSN with pattern X
519210d565efSmrg    and add it to the end of the doubly-linked list.  */
519310d565efSmrg 
519410d565efSmrg rtx_insn *
emit_call_insn(rtx x)519510d565efSmrg emit_call_insn (rtx x)
519610d565efSmrg {
519710d565efSmrg   rtx_insn *insn;
519810d565efSmrg 
519910d565efSmrg   switch (GET_CODE (x))
520010d565efSmrg     {
520110d565efSmrg     case DEBUG_INSN:
520210d565efSmrg     case INSN:
520310d565efSmrg     case JUMP_INSN:
520410d565efSmrg     case CALL_INSN:
520510d565efSmrg     case CODE_LABEL:
520610d565efSmrg     case BARRIER:
520710d565efSmrg     case NOTE:
520810d565efSmrg       insn = emit_insn (x);
520910d565efSmrg       break;
521010d565efSmrg 
521110d565efSmrg #ifdef ENABLE_RTL_CHECKING
521210d565efSmrg     case SEQUENCE:
521310d565efSmrg     case JUMP_TABLE_DATA:
521410d565efSmrg       gcc_unreachable ();
521510d565efSmrg       break;
521610d565efSmrg #endif
521710d565efSmrg 
521810d565efSmrg     default:
521910d565efSmrg       insn = make_call_insn_raw (x);
522010d565efSmrg       add_insn (insn);
522110d565efSmrg       break;
522210d565efSmrg     }
522310d565efSmrg 
522410d565efSmrg   return insn;
522510d565efSmrg }
522610d565efSmrg 
522710d565efSmrg /* Add the label LABEL to the end of the doubly-linked list.  */
522810d565efSmrg 
522910d565efSmrg rtx_code_label *
emit_label(rtx uncast_label)523010d565efSmrg emit_label (rtx uncast_label)
523110d565efSmrg {
523210d565efSmrg   rtx_code_label *label = as_a <rtx_code_label *> (uncast_label);
523310d565efSmrg 
523410d565efSmrg   gcc_checking_assert (INSN_UID (label) == 0);
523510d565efSmrg   INSN_UID (label) = cur_insn_uid++;
523610d565efSmrg   add_insn (label);
523710d565efSmrg   return label;
523810d565efSmrg }
523910d565efSmrg 
524010d565efSmrg /* Make an insn of code JUMP_TABLE_DATA
524110d565efSmrg    and add it to the end of the doubly-linked list.  */
524210d565efSmrg 
524310d565efSmrg rtx_jump_table_data *
emit_jump_table_data(rtx table)524410d565efSmrg emit_jump_table_data (rtx table)
524510d565efSmrg {
524610d565efSmrg   rtx_jump_table_data *jump_table_data =
524710d565efSmrg     as_a <rtx_jump_table_data *> (rtx_alloc (JUMP_TABLE_DATA));
524810d565efSmrg   INSN_UID (jump_table_data) = cur_insn_uid++;
524910d565efSmrg   PATTERN (jump_table_data) = table;
525010d565efSmrg   BLOCK_FOR_INSN (jump_table_data) = NULL;
525110d565efSmrg   add_insn (jump_table_data);
525210d565efSmrg   return jump_table_data;
525310d565efSmrg }
525410d565efSmrg 
525510d565efSmrg /* Make an insn of code BARRIER
525610d565efSmrg    and add it to the end of the doubly-linked list.  */
525710d565efSmrg 
525810d565efSmrg rtx_barrier *
emit_barrier(void)525910d565efSmrg emit_barrier (void)
526010d565efSmrg {
526110d565efSmrg   rtx_barrier *barrier = as_a <rtx_barrier *> (rtx_alloc (BARRIER));
526210d565efSmrg   INSN_UID (barrier) = cur_insn_uid++;
526310d565efSmrg   add_insn (barrier);
526410d565efSmrg   return barrier;
526510d565efSmrg }
526610d565efSmrg 
526710d565efSmrg /* Emit a copy of note ORIG.  */
526810d565efSmrg 
526910d565efSmrg rtx_note *
emit_note_copy(rtx_note * orig)527010d565efSmrg emit_note_copy (rtx_note *orig)
527110d565efSmrg {
527210d565efSmrg   enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
527310d565efSmrg   rtx_note *note = make_note_raw (kind);
527410d565efSmrg   NOTE_DATA (note) = NOTE_DATA (orig);
527510d565efSmrg   add_insn (note);
527610d565efSmrg   return note;
527710d565efSmrg }
527810d565efSmrg 
527910d565efSmrg /* Make an insn of code NOTE or type NOTE_NO
528010d565efSmrg    and add it to the end of the doubly-linked list.  */
528110d565efSmrg 
528210d565efSmrg rtx_note *
emit_note(enum insn_note kind)528310d565efSmrg emit_note (enum insn_note kind)
528410d565efSmrg {
528510d565efSmrg   rtx_note *note = make_note_raw (kind);
528610d565efSmrg   add_insn (note);
528710d565efSmrg   return note;
528810d565efSmrg }
528910d565efSmrg 
529010d565efSmrg /* Emit a clobber of lvalue X.  */
529110d565efSmrg 
529210d565efSmrg rtx_insn *
emit_clobber(rtx x)529310d565efSmrg emit_clobber (rtx x)
529410d565efSmrg {
529510d565efSmrg   /* CONCATs should not appear in the insn stream.  */
529610d565efSmrg   if (GET_CODE (x) == CONCAT)
529710d565efSmrg     {
529810d565efSmrg       emit_clobber (XEXP (x, 0));
529910d565efSmrg       return emit_clobber (XEXP (x, 1));
530010d565efSmrg     }
530110d565efSmrg   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
530210d565efSmrg }
530310d565efSmrg 
530410d565efSmrg /* Return a sequence of insns to clobber lvalue X.  */
530510d565efSmrg 
530610d565efSmrg rtx_insn *
gen_clobber(rtx x)530710d565efSmrg gen_clobber (rtx x)
530810d565efSmrg {
530910d565efSmrg   rtx_insn *seq;
531010d565efSmrg 
531110d565efSmrg   start_sequence ();
531210d565efSmrg   emit_clobber (x);
531310d565efSmrg   seq = get_insns ();
531410d565efSmrg   end_sequence ();
531510d565efSmrg   return seq;
531610d565efSmrg }
531710d565efSmrg 
531810d565efSmrg /* Emit a use of rvalue X.  */
531910d565efSmrg 
532010d565efSmrg rtx_insn *
emit_use(rtx x)532110d565efSmrg emit_use (rtx x)
532210d565efSmrg {
532310d565efSmrg   /* CONCATs should not appear in the insn stream.  */
532410d565efSmrg   if (GET_CODE (x) == CONCAT)
532510d565efSmrg     {
532610d565efSmrg       emit_use (XEXP (x, 0));
532710d565efSmrg       return emit_use (XEXP (x, 1));
532810d565efSmrg     }
532910d565efSmrg   return emit_insn (gen_rtx_USE (VOIDmode, x));
533010d565efSmrg }
533110d565efSmrg 
533210d565efSmrg /* Return a sequence of insns to use rvalue X.  */
533310d565efSmrg 
533410d565efSmrg rtx_insn *
gen_use(rtx x)533510d565efSmrg gen_use (rtx x)
533610d565efSmrg {
533710d565efSmrg   rtx_insn *seq;
533810d565efSmrg 
533910d565efSmrg   start_sequence ();
534010d565efSmrg   emit_use (x);
534110d565efSmrg   seq = get_insns ();
534210d565efSmrg   end_sequence ();
534310d565efSmrg   return seq;
534410d565efSmrg }
534510d565efSmrg 
534610d565efSmrg /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
534710d565efSmrg    Return the set in INSN that such notes describe, or NULL if the notes
534810d565efSmrg    have no meaning for INSN.  */
534910d565efSmrg 
535010d565efSmrg rtx
set_for_reg_notes(rtx insn)535110d565efSmrg set_for_reg_notes (rtx insn)
535210d565efSmrg {
535310d565efSmrg   rtx pat, reg;
535410d565efSmrg 
535510d565efSmrg   if (!INSN_P (insn))
535610d565efSmrg     return NULL_RTX;
535710d565efSmrg 
535810d565efSmrg   pat = PATTERN (insn);
535910d565efSmrg   if (GET_CODE (pat) == PARALLEL)
536010d565efSmrg     {
536110d565efSmrg       /* We do not use single_set because that ignores SETs of unused
536210d565efSmrg 	 registers.  REG_EQUAL and REG_EQUIV notes really do require the
536310d565efSmrg 	 PARALLEL to have a single SET.  */
536410d565efSmrg       if (multiple_sets (insn))
536510d565efSmrg 	return NULL_RTX;
536610d565efSmrg       pat = XVECEXP (pat, 0, 0);
536710d565efSmrg     }
536810d565efSmrg 
536910d565efSmrg   if (GET_CODE (pat) != SET)
537010d565efSmrg     return NULL_RTX;
537110d565efSmrg 
537210d565efSmrg   reg = SET_DEST (pat);
537310d565efSmrg 
537410d565efSmrg   /* Notes apply to the contents of a STRICT_LOW_PART.  */
537510d565efSmrg   if (GET_CODE (reg) == STRICT_LOW_PART
537610d565efSmrg       || GET_CODE (reg) == ZERO_EXTRACT)
537710d565efSmrg     reg = XEXP (reg, 0);
537810d565efSmrg 
537910d565efSmrg   /* Check that we have a register.  */
538010d565efSmrg   if (!(REG_P (reg) || GET_CODE (reg) == SUBREG))
538110d565efSmrg     return NULL_RTX;
538210d565efSmrg 
538310d565efSmrg   return pat;
538410d565efSmrg }
538510d565efSmrg 
538610d565efSmrg /* Place a note of KIND on insn INSN with DATUM as the datum. If a
538710d565efSmrg    note of this type already exists, remove it first.  */
538810d565efSmrg 
538910d565efSmrg rtx
set_unique_reg_note(rtx insn,enum reg_note kind,rtx datum)539010d565efSmrg set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
539110d565efSmrg {
539210d565efSmrg   rtx note = find_reg_note (insn, kind, NULL_RTX);
539310d565efSmrg 
539410d565efSmrg   switch (kind)
539510d565efSmrg     {
539610d565efSmrg     case REG_EQUAL:
539710d565efSmrg     case REG_EQUIV:
539810d565efSmrg       /* We need to support the REG_EQUAL on USE trick of find_reloads.  */
539910d565efSmrg       if (!set_for_reg_notes (insn) && GET_CODE (PATTERN (insn)) != USE)
540010d565efSmrg 	return NULL_RTX;
540110d565efSmrg 
540210d565efSmrg       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
540310d565efSmrg 	 It serves no useful purpose and breaks eliminate_regs.  */
540410d565efSmrg       if (GET_CODE (datum) == ASM_OPERANDS)
540510d565efSmrg 	return NULL_RTX;
540610d565efSmrg 
540710d565efSmrg       /* Notes with side effects are dangerous.  Even if the side-effect
540810d565efSmrg 	 initially mirrors one in PATTERN (INSN), later optimizations
540910d565efSmrg 	 might alter the way that the final register value is calculated
541010d565efSmrg 	 and so move or alter the side-effect in some way.  The note would
541110d565efSmrg 	 then no longer be a valid substitution for SET_SRC.  */
541210d565efSmrg       if (side_effects_p (datum))
541310d565efSmrg 	return NULL_RTX;
541410d565efSmrg       break;
541510d565efSmrg 
541610d565efSmrg     default:
541710d565efSmrg       break;
541810d565efSmrg     }
541910d565efSmrg 
542010d565efSmrg   if (note)
542110d565efSmrg     XEXP (note, 0) = datum;
542210d565efSmrg   else
542310d565efSmrg     {
542410d565efSmrg       add_reg_note (insn, kind, datum);
542510d565efSmrg       note = REG_NOTES (insn);
542610d565efSmrg     }
542710d565efSmrg 
542810d565efSmrg   switch (kind)
542910d565efSmrg     {
543010d565efSmrg     case REG_EQUAL:
543110d565efSmrg     case REG_EQUIV:
543210d565efSmrg       df_notes_rescan (as_a <rtx_insn *> (insn));
543310d565efSmrg       break;
543410d565efSmrg     default:
543510d565efSmrg       break;
543610d565efSmrg     }
543710d565efSmrg 
543810d565efSmrg   return note;
543910d565efSmrg }
544010d565efSmrg 
544110d565efSmrg /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
544210d565efSmrg rtx
set_dst_reg_note(rtx insn,enum reg_note kind,rtx datum,rtx dst)544310d565efSmrg set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
544410d565efSmrg {
544510d565efSmrg   rtx set = set_for_reg_notes (insn);
544610d565efSmrg 
544710d565efSmrg   if (set && SET_DEST (set) == dst)
544810d565efSmrg     return set_unique_reg_note (insn, kind, datum);
544910d565efSmrg   return NULL_RTX;
545010d565efSmrg }
545110d565efSmrg 
545210d565efSmrg /* Emit the rtl pattern X as an appropriate kind of insn.  Also emit a
545310d565efSmrg    following barrier if the instruction needs one and if ALLOW_BARRIER_P
545410d565efSmrg    is true.
545510d565efSmrg 
545610d565efSmrg    If X is a label, it is simply added into the insn chain.  */
545710d565efSmrg 
545810d565efSmrg rtx_insn *
emit(rtx x,bool allow_barrier_p)545910d565efSmrg emit (rtx x, bool allow_barrier_p)
546010d565efSmrg {
546110d565efSmrg   enum rtx_code code = classify_insn (x);
546210d565efSmrg 
546310d565efSmrg   switch (code)
546410d565efSmrg     {
546510d565efSmrg     case CODE_LABEL:
546610d565efSmrg       return emit_label (x);
546710d565efSmrg     case INSN:
546810d565efSmrg       return emit_insn (x);
546910d565efSmrg     case  JUMP_INSN:
547010d565efSmrg       {
547110d565efSmrg 	rtx_insn *insn = emit_jump_insn (x);
547210d565efSmrg 	if (allow_barrier_p
547310d565efSmrg 	    && (any_uncondjump_p (insn) || GET_CODE (x) == RETURN))
547410d565efSmrg 	  return emit_barrier ();
547510d565efSmrg 	return insn;
547610d565efSmrg       }
547710d565efSmrg     case CALL_INSN:
547810d565efSmrg       return emit_call_insn (x);
547910d565efSmrg     case DEBUG_INSN:
548010d565efSmrg       return emit_debug_insn (x);
548110d565efSmrg     default:
548210d565efSmrg       gcc_unreachable ();
548310d565efSmrg     }
548410d565efSmrg }
548510d565efSmrg 
548610d565efSmrg /* Space for free sequence stack entries.  */
548710d565efSmrg static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
548810d565efSmrg 
548910d565efSmrg /* Begin emitting insns to a sequence.  If this sequence will contain
549010d565efSmrg    something that might cause the compiler to pop arguments to function
549110d565efSmrg    calls (because those pops have previously been deferred; see
549210d565efSmrg    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
549310d565efSmrg    before calling this function.  That will ensure that the deferred
549410d565efSmrg    pops are not accidentally emitted in the middle of this sequence.  */
549510d565efSmrg 
549610d565efSmrg void
start_sequence(void)549710d565efSmrg start_sequence (void)
549810d565efSmrg {
549910d565efSmrg   struct sequence_stack *tem;
550010d565efSmrg 
550110d565efSmrg   if (free_sequence_stack != NULL)
550210d565efSmrg     {
550310d565efSmrg       tem = free_sequence_stack;
550410d565efSmrg       free_sequence_stack = tem->next;
550510d565efSmrg     }
550610d565efSmrg   else
550710d565efSmrg     tem = ggc_alloc<sequence_stack> ();
550810d565efSmrg 
550910d565efSmrg   tem->next = get_current_sequence ()->next;
551010d565efSmrg   tem->first = get_insns ();
551110d565efSmrg   tem->last = get_last_insn ();
551210d565efSmrg   get_current_sequence ()->next = tem;
551310d565efSmrg 
551410d565efSmrg   set_first_insn (0);
551510d565efSmrg   set_last_insn (0);
551610d565efSmrg }
551710d565efSmrg 
551810d565efSmrg /* Set up the insn chain starting with FIRST as the current sequence,
551910d565efSmrg    saving the previously current one.  See the documentation for
552010d565efSmrg    start_sequence for more information about how to use this function.  */
552110d565efSmrg 
552210d565efSmrg void
push_to_sequence(rtx_insn * first)552310d565efSmrg push_to_sequence (rtx_insn *first)
552410d565efSmrg {
552510d565efSmrg   rtx_insn *last;
552610d565efSmrg 
552710d565efSmrg   start_sequence ();
552810d565efSmrg 
552910d565efSmrg   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
553010d565efSmrg     ;
553110d565efSmrg 
553210d565efSmrg   set_first_insn (first);
553310d565efSmrg   set_last_insn (last);
553410d565efSmrg }
553510d565efSmrg 
553610d565efSmrg /* Like push_to_sequence, but take the last insn as an argument to avoid
553710d565efSmrg    looping through the list.  */
553810d565efSmrg 
553910d565efSmrg void
push_to_sequence2(rtx_insn * first,rtx_insn * last)554010d565efSmrg push_to_sequence2 (rtx_insn *first, rtx_insn *last)
554110d565efSmrg {
554210d565efSmrg   start_sequence ();
554310d565efSmrg 
554410d565efSmrg   set_first_insn (first);
554510d565efSmrg   set_last_insn (last);
554610d565efSmrg }
554710d565efSmrg 
554810d565efSmrg /* Set up the outer-level insn chain
554910d565efSmrg    as the current sequence, saving the previously current one.  */
555010d565efSmrg 
555110d565efSmrg void
push_topmost_sequence(void)555210d565efSmrg push_topmost_sequence (void)
555310d565efSmrg {
555410d565efSmrg   struct sequence_stack *top;
555510d565efSmrg 
555610d565efSmrg   start_sequence ();
555710d565efSmrg 
555810d565efSmrg   top = get_topmost_sequence ();
555910d565efSmrg   set_first_insn (top->first);
556010d565efSmrg   set_last_insn (top->last);
556110d565efSmrg }
556210d565efSmrg 
556310d565efSmrg /* After emitting to the outer-level insn chain, update the outer-level
556410d565efSmrg    insn chain, and restore the previous saved state.  */
556510d565efSmrg 
556610d565efSmrg void
pop_topmost_sequence(void)556710d565efSmrg pop_topmost_sequence (void)
556810d565efSmrg {
556910d565efSmrg   struct sequence_stack *top;
557010d565efSmrg 
557110d565efSmrg   top = get_topmost_sequence ();
557210d565efSmrg   top->first = get_insns ();
557310d565efSmrg   top->last = get_last_insn ();
557410d565efSmrg 
557510d565efSmrg   end_sequence ();
557610d565efSmrg }
557710d565efSmrg 
557810d565efSmrg /* After emitting to a sequence, restore previous saved state.
557910d565efSmrg 
558010d565efSmrg    To get the contents of the sequence just made, you must call
558110d565efSmrg    `get_insns' *before* calling here.
558210d565efSmrg 
558310d565efSmrg    If the compiler might have deferred popping arguments while
558410d565efSmrg    generating this sequence, and this sequence will not be immediately
558510d565efSmrg    inserted into the instruction stream, use do_pending_stack_adjust
558610d565efSmrg    before calling get_insns.  That will ensure that the deferred
558710d565efSmrg    pops are inserted into this sequence, and not into some random
558810d565efSmrg    location in the instruction stream.  See INHIBIT_DEFER_POP for more
558910d565efSmrg    information about deferred popping of arguments.  */
559010d565efSmrg 
559110d565efSmrg void
end_sequence(void)559210d565efSmrg end_sequence (void)
559310d565efSmrg {
559410d565efSmrg   struct sequence_stack *tem = get_current_sequence ()->next;
559510d565efSmrg 
559610d565efSmrg   set_first_insn (tem->first);
559710d565efSmrg   set_last_insn (tem->last);
559810d565efSmrg   get_current_sequence ()->next = tem->next;
559910d565efSmrg 
560010d565efSmrg   memset (tem, 0, sizeof (*tem));
560110d565efSmrg   tem->next = free_sequence_stack;
560210d565efSmrg   free_sequence_stack = tem;
560310d565efSmrg }
560410d565efSmrg 
560510d565efSmrg /* Return 1 if currently emitting into a sequence.  */
560610d565efSmrg 
560710d565efSmrg int
in_sequence_p(void)560810d565efSmrg in_sequence_p (void)
560910d565efSmrg {
561010d565efSmrg   return get_current_sequence ()->next != 0;
561110d565efSmrg }
561210d565efSmrg 
561310d565efSmrg /* Put the various virtual registers into REGNO_REG_RTX.  */
561410d565efSmrg 
561510d565efSmrg static void
init_virtual_regs(void)561610d565efSmrg init_virtual_regs (void)
561710d565efSmrg {
561810d565efSmrg   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
561910d565efSmrg   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
562010d565efSmrg   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
562110d565efSmrg   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
562210d565efSmrg   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
562310d565efSmrg   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
562410d565efSmrg     = virtual_preferred_stack_boundary_rtx;
562510d565efSmrg }
562610d565efSmrg 
562710d565efSmrg 
562810d565efSmrg /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
562910d565efSmrg static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
563010d565efSmrg static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
563110d565efSmrg static int copy_insn_n_scratches;
563210d565efSmrg 
563310d565efSmrg /* When an insn is being copied by copy_insn_1, this is nonzero if we have
563410d565efSmrg    copied an ASM_OPERANDS.
563510d565efSmrg    In that case, it is the original input-operand vector.  */
563610d565efSmrg static rtvec orig_asm_operands_vector;
563710d565efSmrg 
563810d565efSmrg /* When an insn is being copied by copy_insn_1, this is nonzero if we have
563910d565efSmrg    copied an ASM_OPERANDS.
564010d565efSmrg    In that case, it is the copied input-operand vector.  */
564110d565efSmrg static rtvec copy_asm_operands_vector;
564210d565efSmrg 
564310d565efSmrg /* Likewise for the constraints vector.  */
564410d565efSmrg static rtvec orig_asm_constraints_vector;
564510d565efSmrg static rtvec copy_asm_constraints_vector;
564610d565efSmrg 
564710d565efSmrg /* Recursively create a new copy of an rtx for copy_insn.
564810d565efSmrg    This function differs from copy_rtx in that it handles SCRATCHes and
564910d565efSmrg    ASM_OPERANDs properly.
565010d565efSmrg    Normally, this function is not used directly; use copy_insn as front end.
565110d565efSmrg    However, you could first copy an insn pattern with copy_insn and then use
565210d565efSmrg    this function afterwards to properly copy any REG_NOTEs containing
565310d565efSmrg    SCRATCHes.  */
565410d565efSmrg 
565510d565efSmrg rtx
copy_insn_1(rtx orig)565610d565efSmrg copy_insn_1 (rtx orig)
565710d565efSmrg {
565810d565efSmrg   rtx copy;
565910d565efSmrg   int i, j;
566010d565efSmrg   RTX_CODE code;
566110d565efSmrg   const char *format_ptr;
566210d565efSmrg 
566310d565efSmrg   if (orig == NULL)
566410d565efSmrg     return NULL;
566510d565efSmrg 
566610d565efSmrg   code = GET_CODE (orig);
566710d565efSmrg 
566810d565efSmrg   switch (code)
566910d565efSmrg     {
567010d565efSmrg     case REG:
567110d565efSmrg     case DEBUG_EXPR:
567210d565efSmrg     CASE_CONST_ANY:
567310d565efSmrg     case SYMBOL_REF:
567410d565efSmrg     case CODE_LABEL:
567510d565efSmrg     case PC:
567610d565efSmrg     case CC0:
567710d565efSmrg     case RETURN:
567810d565efSmrg     case SIMPLE_RETURN:
567910d565efSmrg       return orig;
568010d565efSmrg     case CLOBBER:
568110d565efSmrg       /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
568210d565efSmrg          clobbers or clobbers of hard registers that originated as pseudos.
568310d565efSmrg          This is needed to allow safe register renaming.  */
568410d565efSmrg       if (REG_P (XEXP (orig, 0))
568510d565efSmrg 	  && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))
568610d565efSmrg 	  && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig, 0))))
568710d565efSmrg 	return orig;
568810d565efSmrg       break;
568910d565efSmrg 
569010d565efSmrg     case SCRATCH:
569110d565efSmrg       for (i = 0; i < copy_insn_n_scratches; i++)
569210d565efSmrg 	if (copy_insn_scratch_in[i] == orig)
569310d565efSmrg 	  return copy_insn_scratch_out[i];
569410d565efSmrg       break;
569510d565efSmrg 
569610d565efSmrg     case CONST:
569710d565efSmrg       if (shared_const_p (orig))
569810d565efSmrg 	return orig;
569910d565efSmrg       break;
570010d565efSmrg 
570110d565efSmrg       /* A MEM with a constant address is not sharable.  The problem is that
570210d565efSmrg 	 the constant address may need to be reloaded.  If the mem is shared,
570310d565efSmrg 	 then reloading one copy of this mem will cause all copies to appear
570410d565efSmrg 	 to have been reloaded.  */
570510d565efSmrg 
570610d565efSmrg     default:
570710d565efSmrg       break;
570810d565efSmrg     }
570910d565efSmrg 
571010d565efSmrg   /* Copy the various flags, fields, and other information.  We assume
571110d565efSmrg      that all fields need copying, and then clear the fields that should
571210d565efSmrg      not be copied.  That is the sensible default behavior, and forces
571310d565efSmrg      us to explicitly document why we are *not* copying a flag.  */
571410d565efSmrg   copy = shallow_copy_rtx (orig);
571510d565efSmrg 
571610d565efSmrg   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
571710d565efSmrg   if (INSN_P (orig))
571810d565efSmrg     {
571910d565efSmrg       RTX_FLAG (copy, jump) = 0;
572010d565efSmrg       RTX_FLAG (copy, call) = 0;
572110d565efSmrg       RTX_FLAG (copy, frame_related) = 0;
572210d565efSmrg     }
572310d565efSmrg 
572410d565efSmrg   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
572510d565efSmrg 
572610d565efSmrg   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
572710d565efSmrg     switch (*format_ptr++)
572810d565efSmrg       {
572910d565efSmrg       case 'e':
573010d565efSmrg 	if (XEXP (orig, i) != NULL)
573110d565efSmrg 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
573210d565efSmrg 	break;
573310d565efSmrg 
573410d565efSmrg       case 'E':
573510d565efSmrg       case 'V':
573610d565efSmrg 	if (XVEC (orig, i) == orig_asm_constraints_vector)
573710d565efSmrg 	  XVEC (copy, i) = copy_asm_constraints_vector;
573810d565efSmrg 	else if (XVEC (orig, i) == orig_asm_operands_vector)
573910d565efSmrg 	  XVEC (copy, i) = copy_asm_operands_vector;
574010d565efSmrg 	else if (XVEC (orig, i) != NULL)
574110d565efSmrg 	  {
574210d565efSmrg 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
574310d565efSmrg 	    for (j = 0; j < XVECLEN (copy, i); j++)
574410d565efSmrg 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
574510d565efSmrg 	  }
574610d565efSmrg 	break;
574710d565efSmrg 
574810d565efSmrg       case 't':
574910d565efSmrg       case 'w':
575010d565efSmrg       case 'i':
5751c7a68eb7Smrg       case 'p':
575210d565efSmrg       case 's':
575310d565efSmrg       case 'S':
575410d565efSmrg       case 'u':
575510d565efSmrg       case '0':
575610d565efSmrg 	/* These are left unchanged.  */
575710d565efSmrg 	break;
575810d565efSmrg 
575910d565efSmrg       default:
576010d565efSmrg 	gcc_unreachable ();
576110d565efSmrg       }
576210d565efSmrg 
576310d565efSmrg   if (code == SCRATCH)
576410d565efSmrg     {
576510d565efSmrg       i = copy_insn_n_scratches++;
576610d565efSmrg       gcc_assert (i < MAX_RECOG_OPERANDS);
576710d565efSmrg       copy_insn_scratch_in[i] = orig;
576810d565efSmrg       copy_insn_scratch_out[i] = copy;
576910d565efSmrg     }
577010d565efSmrg   else if (code == ASM_OPERANDS)
577110d565efSmrg     {
577210d565efSmrg       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
577310d565efSmrg       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
577410d565efSmrg       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
577510d565efSmrg       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
577610d565efSmrg     }
577710d565efSmrg 
577810d565efSmrg   return copy;
577910d565efSmrg }
578010d565efSmrg 
578110d565efSmrg /* Create a new copy of an rtx.
578210d565efSmrg    This function differs from copy_rtx in that it handles SCRATCHes and
578310d565efSmrg    ASM_OPERANDs properly.
578410d565efSmrg    INSN doesn't really have to be a full INSN; it could be just the
578510d565efSmrg    pattern.  */
578610d565efSmrg rtx
copy_insn(rtx insn)578710d565efSmrg copy_insn (rtx insn)
578810d565efSmrg {
578910d565efSmrg   copy_insn_n_scratches = 0;
579010d565efSmrg   orig_asm_operands_vector = 0;
579110d565efSmrg   orig_asm_constraints_vector = 0;
579210d565efSmrg   copy_asm_operands_vector = 0;
579310d565efSmrg   copy_asm_constraints_vector = 0;
579410d565efSmrg   return copy_insn_1 (insn);
579510d565efSmrg }
579610d565efSmrg 
579710d565efSmrg /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
579810d565efSmrg    on that assumption that INSN itself remains in its original place.  */
579910d565efSmrg 
580010d565efSmrg rtx_insn *
copy_delay_slot_insn(rtx_insn * insn)580110d565efSmrg copy_delay_slot_insn (rtx_insn *insn)
580210d565efSmrg {
580310d565efSmrg   /* Copy INSN with its rtx_code, all its notes, location etc.  */
580410d565efSmrg   insn = as_a <rtx_insn *> (copy_rtx (insn));
580510d565efSmrg   INSN_UID (insn) = cur_insn_uid++;
580610d565efSmrg   return insn;
580710d565efSmrg }
580810d565efSmrg 
580910d565efSmrg /* Initialize data structures and variables in this file
581010d565efSmrg    before generating rtl for each function.  */
581110d565efSmrg 
581210d565efSmrg void
init_emit(void)581310d565efSmrg init_emit (void)
581410d565efSmrg {
581510d565efSmrg   set_first_insn (NULL);
581610d565efSmrg   set_last_insn (NULL);
5817*ec02198aSmrg   if (param_min_nondebug_insn_uid)
5818*ec02198aSmrg     cur_insn_uid = param_min_nondebug_insn_uid;
581910d565efSmrg   else
582010d565efSmrg     cur_insn_uid = 1;
582110d565efSmrg   cur_debug_insn_uid = 1;
582210d565efSmrg   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
582310d565efSmrg   first_label_num = label_num;
582410d565efSmrg   get_current_sequence ()->next = NULL;
582510d565efSmrg 
582610d565efSmrg   /* Init the tables that describe all the pseudo regs.  */
582710d565efSmrg 
582810d565efSmrg   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
582910d565efSmrg 
583010d565efSmrg   crtl->emit.regno_pointer_align
583110d565efSmrg     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
583210d565efSmrg 
583310d565efSmrg   regno_reg_rtx
583410d565efSmrg     = ggc_cleared_vec_alloc<rtx> (crtl->emit.regno_pointer_align_length);
583510d565efSmrg 
583610d565efSmrg   /* Put copies of all the hard registers into regno_reg_rtx.  */
583710d565efSmrg   memcpy (regno_reg_rtx,
583810d565efSmrg 	  initial_regno_reg_rtx,
583910d565efSmrg 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
584010d565efSmrg 
584110d565efSmrg   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
584210d565efSmrg   init_virtual_regs ();
584310d565efSmrg 
584410d565efSmrg   /* Indicate that the virtual registers and stack locations are
584510d565efSmrg      all pointers.  */
584610d565efSmrg   REG_POINTER (stack_pointer_rtx) = 1;
584710d565efSmrg   REG_POINTER (frame_pointer_rtx) = 1;
584810d565efSmrg   REG_POINTER (hard_frame_pointer_rtx) = 1;
584910d565efSmrg   REG_POINTER (arg_pointer_rtx) = 1;
585010d565efSmrg 
585110d565efSmrg   REG_POINTER (virtual_incoming_args_rtx) = 1;
585210d565efSmrg   REG_POINTER (virtual_stack_vars_rtx) = 1;
585310d565efSmrg   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
585410d565efSmrg   REG_POINTER (virtual_outgoing_args_rtx) = 1;
585510d565efSmrg   REG_POINTER (virtual_cfa_rtx) = 1;
585610d565efSmrg 
585710d565efSmrg #ifdef STACK_BOUNDARY
585810d565efSmrg   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
585910d565efSmrg   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
586010d565efSmrg   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
586110d565efSmrg   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
586210d565efSmrg 
586310d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
586410d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
586510d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
586610d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
586710d565efSmrg 
586810d565efSmrg   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
586910d565efSmrg #endif
587010d565efSmrg 
587110d565efSmrg #ifdef INIT_EXPANDERS
587210d565efSmrg   INIT_EXPANDERS;
587310d565efSmrg #endif
587410d565efSmrg }
587510d565efSmrg 
5876c7a68eb7Smrg /* Return the value of element I of CONST_VECTOR X as a wide_int.  */
5877c7a68eb7Smrg 
5878c7a68eb7Smrg wide_int
const_vector_int_elt(const_rtx x,unsigned int i)5879c7a68eb7Smrg const_vector_int_elt (const_rtx x, unsigned int i)
5880c7a68eb7Smrg {
5881c7a68eb7Smrg   /* First handle elements that are directly encoded.  */
5882c7a68eb7Smrg   machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x));
5883c7a68eb7Smrg   if (i < (unsigned int) XVECLEN (x, 0))
5884c7a68eb7Smrg     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode);
5885c7a68eb7Smrg 
5886c7a68eb7Smrg   /* Identify the pattern that contains element I and work out the index of
5887c7a68eb7Smrg      the last encoded element for that pattern.  */
5888c7a68eb7Smrg   unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5889c7a68eb7Smrg   unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5890c7a68eb7Smrg   unsigned int count = i / npatterns;
5891c7a68eb7Smrg   unsigned int pattern = i % npatterns;
5892c7a68eb7Smrg   unsigned int final_i = encoded_nelts - npatterns + pattern;
5893c7a68eb7Smrg 
5894c7a68eb7Smrg   /* If there are no steps, the final encoded value is the right one.  */
5895c7a68eb7Smrg   if (!CONST_VECTOR_STEPPED_P (x))
5896c7a68eb7Smrg     return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode);
5897c7a68eb7Smrg 
5898c7a68eb7Smrg   /* Otherwise work out the value from the last two encoded elements.  */
5899c7a68eb7Smrg   rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns);
5900c7a68eb7Smrg   rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i);
5901c7a68eb7Smrg   wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode),
5902c7a68eb7Smrg 			   rtx_mode_t (v1, elt_mode));
5903c7a68eb7Smrg   return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff);
5904c7a68eb7Smrg }
5905c7a68eb7Smrg 
5906c7a68eb7Smrg /* Return the value of element I of CONST_VECTOR X.  */
5907c7a68eb7Smrg 
5908c7a68eb7Smrg rtx
const_vector_elt(const_rtx x,unsigned int i)5909c7a68eb7Smrg const_vector_elt (const_rtx x, unsigned int i)
5910c7a68eb7Smrg {
5911c7a68eb7Smrg   /* First handle elements that are directly encoded.  */
5912c7a68eb7Smrg   if (i < (unsigned int) XVECLEN (x, 0))
5913c7a68eb7Smrg     return CONST_VECTOR_ENCODED_ELT (x, i);
5914c7a68eb7Smrg 
5915c7a68eb7Smrg   /* If there are no steps, the final encoded value is the right one.  */
5916c7a68eb7Smrg   if (!CONST_VECTOR_STEPPED_P (x))
5917c7a68eb7Smrg     {
5918c7a68eb7Smrg       /* Identify the pattern that contains element I and work out the index of
5919c7a68eb7Smrg 	 the last encoded element for that pattern.  */
5920c7a68eb7Smrg       unsigned int encoded_nelts = const_vector_encoded_nelts (x);
5921c7a68eb7Smrg       unsigned int npatterns = CONST_VECTOR_NPATTERNS (x);
5922c7a68eb7Smrg       unsigned int pattern = i % npatterns;
5923c7a68eb7Smrg       unsigned int final_i = encoded_nelts - npatterns + pattern;
5924c7a68eb7Smrg       return CONST_VECTOR_ENCODED_ELT (x, final_i);
5925c7a68eb7Smrg     }
5926c7a68eb7Smrg 
5927c7a68eb7Smrg   /* Otherwise work out the value from the last two encoded elements.  */
5928c7a68eb7Smrg   return immed_wide_int_const (const_vector_int_elt (x, i),
5929c7a68eb7Smrg 			       GET_MODE_INNER (GET_MODE (x)));
5930c7a68eb7Smrg }
5931c7a68eb7Smrg 
5932c7a68eb7Smrg /* Return true if X is a valid element for a CONST_VECTOR of the given
5933c7a68eb7Smrg   mode.  */
5934c7a68eb7Smrg 
5935c7a68eb7Smrg bool
valid_for_const_vector_p(machine_mode,rtx x)5936c7a68eb7Smrg valid_for_const_vector_p (machine_mode, rtx x)
5937c7a68eb7Smrg {
5938c7a68eb7Smrg   return (CONST_SCALAR_INT_P (x)
5939*ec02198aSmrg 	  || CONST_POLY_INT_P (x)
5940c7a68eb7Smrg 	  || CONST_DOUBLE_AS_FLOAT_P (x)
5941c7a68eb7Smrg 	  || CONST_FIXED_P (x));
5942c7a68eb7Smrg }
5943c7a68eb7Smrg 
5944c7a68eb7Smrg /* Generate a vector constant of mode MODE in which every element has
5945c7a68eb7Smrg    value ELT.  */
5946c7a68eb7Smrg 
5947c7a68eb7Smrg rtx
gen_const_vec_duplicate(machine_mode mode,rtx elt)5948c7a68eb7Smrg gen_const_vec_duplicate (machine_mode mode, rtx elt)
5949c7a68eb7Smrg {
5950c7a68eb7Smrg   rtx_vector_builder builder (mode, 1, 1);
5951c7a68eb7Smrg   builder.quick_push (elt);
5952c7a68eb7Smrg   return builder.build ();
5953c7a68eb7Smrg }
5954c7a68eb7Smrg 
5955c7a68eb7Smrg /* Return a vector rtx of mode MODE in which every element has value X.
5956c7a68eb7Smrg    The result will be a constant if X is constant.  */
5957c7a68eb7Smrg 
5958c7a68eb7Smrg rtx
gen_vec_duplicate(machine_mode mode,rtx x)5959c7a68eb7Smrg gen_vec_duplicate (machine_mode mode, rtx x)
5960c7a68eb7Smrg {
5961c7a68eb7Smrg   if (valid_for_const_vector_p (mode, x))
5962c7a68eb7Smrg     return gen_const_vec_duplicate (mode, x);
5963c7a68eb7Smrg   return gen_rtx_VEC_DUPLICATE (mode, x);
5964c7a68eb7Smrg }
5965c7a68eb7Smrg 
5966c7a68eb7Smrg /* A subroutine of const_vec_series_p that handles the case in which:
5967c7a68eb7Smrg 
5968c7a68eb7Smrg      (GET_CODE (X) == CONST_VECTOR
5969c7a68eb7Smrg       && CONST_VECTOR_NPATTERNS (X) == 1
5970c7a68eb7Smrg       && !CONST_VECTOR_DUPLICATE_P (X))
5971c7a68eb7Smrg 
5972c7a68eb7Smrg    is known to hold.  */
5973c7a68eb7Smrg 
5974c7a68eb7Smrg bool
const_vec_series_p_1(const_rtx x,rtx * base_out,rtx * step_out)5975c7a68eb7Smrg const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out)
5976c7a68eb7Smrg {
5977c7a68eb7Smrg   /* Stepped sequences are only defined for integers, to avoid specifying
5978c7a68eb7Smrg      rounding behavior.  */
5979c7a68eb7Smrg   if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT)
5980c7a68eb7Smrg     return false;
5981c7a68eb7Smrg 
5982c7a68eb7Smrg   /* A non-duplicated vector with two elements can always be seen as a
5983c7a68eb7Smrg      series with a nonzero step.  Longer vectors must have a stepped
5984c7a68eb7Smrg      encoding.  */
5985c7a68eb7Smrg   if (maybe_ne (CONST_VECTOR_NUNITS (x), 2)
5986c7a68eb7Smrg       && !CONST_VECTOR_STEPPED_P (x))
5987c7a68eb7Smrg     return false;
5988c7a68eb7Smrg 
5989c7a68eb7Smrg   /* Calculate the step between the first and second elements.  */
5990c7a68eb7Smrg   scalar_mode inner = GET_MODE_INNER (GET_MODE (x));
5991c7a68eb7Smrg   rtx base = CONST_VECTOR_ELT (x, 0);
5992c7a68eb7Smrg   rtx step = simplify_binary_operation (MINUS, inner,
5993c7a68eb7Smrg 					CONST_VECTOR_ENCODED_ELT (x, 1), base);
5994c7a68eb7Smrg   if (rtx_equal_p (step, CONST0_RTX (inner)))
5995c7a68eb7Smrg     return false;
5996c7a68eb7Smrg 
5997c7a68eb7Smrg   /* If we have a stepped encoding, check that the step between the
5998c7a68eb7Smrg      second and third elements is the same as STEP.  */
5999c7a68eb7Smrg   if (CONST_VECTOR_STEPPED_P (x))
6000c7a68eb7Smrg     {
6001c7a68eb7Smrg       rtx diff = simplify_binary_operation (MINUS, inner,
6002c7a68eb7Smrg 					    CONST_VECTOR_ENCODED_ELT (x, 2),
6003c7a68eb7Smrg 					    CONST_VECTOR_ENCODED_ELT (x, 1));
6004c7a68eb7Smrg       if (!rtx_equal_p (step, diff))
6005c7a68eb7Smrg 	return false;
6006c7a68eb7Smrg     }
6007c7a68eb7Smrg 
6008c7a68eb7Smrg   *base_out = base;
6009c7a68eb7Smrg   *step_out = step;
6010c7a68eb7Smrg   return true;
6011c7a68eb7Smrg }
6012c7a68eb7Smrg 
6013c7a68eb7Smrg /* Generate a vector constant of mode MODE in which element I has
6014c7a68eb7Smrg    the value BASE + I * STEP.  */
6015c7a68eb7Smrg 
6016c7a68eb7Smrg rtx
gen_const_vec_series(machine_mode mode,rtx base,rtx step)6017c7a68eb7Smrg gen_const_vec_series (machine_mode mode, rtx base, rtx step)
6018c7a68eb7Smrg {
6019c7a68eb7Smrg   gcc_assert (valid_for_const_vector_p (mode, base)
6020c7a68eb7Smrg 	      && valid_for_const_vector_p (mode, step));
6021c7a68eb7Smrg 
6022c7a68eb7Smrg   rtx_vector_builder builder (mode, 1, 3);
6023c7a68eb7Smrg   builder.quick_push (base);
6024c7a68eb7Smrg   for (int i = 1; i < 3; ++i)
6025c7a68eb7Smrg     builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode),
6026c7a68eb7Smrg 					     builder[i - 1], step));
6027c7a68eb7Smrg   return builder.build ();
6028c7a68eb7Smrg }
6029c7a68eb7Smrg 
6030c7a68eb7Smrg /* Generate a vector of mode MODE in which element I has the value
6031c7a68eb7Smrg    BASE + I * STEP.  The result will be a constant if BASE and STEP
6032c7a68eb7Smrg    are both constants.  */
6033c7a68eb7Smrg 
6034c7a68eb7Smrg rtx
gen_vec_series(machine_mode mode,rtx base,rtx step)6035c7a68eb7Smrg gen_vec_series (machine_mode mode, rtx base, rtx step)
6036c7a68eb7Smrg {
6037c7a68eb7Smrg   if (step == const0_rtx)
6038c7a68eb7Smrg     return gen_vec_duplicate (mode, base);
6039c7a68eb7Smrg   if (valid_for_const_vector_p (mode, base)
6040c7a68eb7Smrg       && valid_for_const_vector_p (mode, step))
6041c7a68eb7Smrg     return gen_const_vec_series (mode, base, step);
6042c7a68eb7Smrg   return gen_rtx_VEC_SERIES (mode, base, step);
6043c7a68eb7Smrg }
6044c7a68eb7Smrg 
6045c7a68eb7Smrg /* Generate a new vector constant for mode MODE and constant value
6046c7a68eb7Smrg    CONSTANT.  */
604710d565efSmrg 
604810d565efSmrg static rtx
gen_const_vector(machine_mode mode,int constant)604910d565efSmrg gen_const_vector (machine_mode mode, int constant)
605010d565efSmrg {
6051c7a68eb7Smrg   machine_mode inner = GET_MODE_INNER (mode);
605210d565efSmrg 
605310d565efSmrg   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
605410d565efSmrg 
6055c7a68eb7Smrg   rtx el = const_tiny_rtx[constant][(int) inner];
6056c7a68eb7Smrg   gcc_assert (el);
605710d565efSmrg 
6058c7a68eb7Smrg   return gen_const_vec_duplicate (mode, el);
605910d565efSmrg }
606010d565efSmrg 
606110d565efSmrg /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
606210d565efSmrg    all elements are zero, and the one vector when all elements are one.  */
606310d565efSmrg rtx
gen_rtx_CONST_VECTOR(machine_mode mode,rtvec v)606410d565efSmrg gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v)
606510d565efSmrg {
6066c7a68eb7Smrg   gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v)));
606710d565efSmrg 
606810d565efSmrg   /* If the values are all the same, check to see if we can use one of the
606910d565efSmrg      standard constant vectors.  */
6070c7a68eb7Smrg   if (rtvec_all_equal_p (v))
6071c7a68eb7Smrg     return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0));
607210d565efSmrg 
6073c7a68eb7Smrg   unsigned int nunits = GET_NUM_ELEM (v);
6074c7a68eb7Smrg   rtx_vector_builder builder (mode, nunits, 1);
6075c7a68eb7Smrg   for (unsigned int i = 0; i < nunits; ++i)
6076c7a68eb7Smrg     builder.quick_push (RTVEC_ELT (v, i));
6077c7a68eb7Smrg   return builder.build (v);
607810d565efSmrg }
607910d565efSmrg 
608010d565efSmrg /* Initialise global register information required by all functions.  */
608110d565efSmrg 
608210d565efSmrg void
init_emit_regs(void)608310d565efSmrg init_emit_regs (void)
608410d565efSmrg {
608510d565efSmrg   int i;
608610d565efSmrg   machine_mode mode;
608710d565efSmrg   mem_attrs *attrs;
608810d565efSmrg 
608910d565efSmrg   /* Reset register attributes */
609010d565efSmrg   reg_attrs_htab->empty ();
609110d565efSmrg 
609210d565efSmrg   /* We need reg_raw_mode, so initialize the modes now.  */
609310d565efSmrg   init_reg_modes_target ();
609410d565efSmrg 
609510d565efSmrg   /* Assign register numbers to the globally defined register rtx.  */
609610d565efSmrg   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
609710d565efSmrg   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
609810d565efSmrg   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
609910d565efSmrg   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
610010d565efSmrg   virtual_incoming_args_rtx =
610110d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
610210d565efSmrg   virtual_stack_vars_rtx =
610310d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
610410d565efSmrg   virtual_stack_dynamic_rtx =
610510d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
610610d565efSmrg   virtual_outgoing_args_rtx =
610710d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
610810d565efSmrg   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
610910d565efSmrg   virtual_preferred_stack_boundary_rtx =
611010d565efSmrg     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
611110d565efSmrg 
611210d565efSmrg   /* Initialize RTL for commonly used hard registers.  These are
611310d565efSmrg      copied into regno_reg_rtx as we begin to compile each function.  */
611410d565efSmrg   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
611510d565efSmrg     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
611610d565efSmrg 
611710d565efSmrg #ifdef RETURN_ADDRESS_POINTER_REGNUM
611810d565efSmrg   return_address_pointer_rtx
611910d565efSmrg     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
612010d565efSmrg #endif
612110d565efSmrg 
612210d565efSmrg   pic_offset_table_rtx = NULL_RTX;
612310d565efSmrg   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
612410d565efSmrg     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
612510d565efSmrg 
612610d565efSmrg   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
612710d565efSmrg     {
612810d565efSmrg       mode = (machine_mode) i;
612910d565efSmrg       attrs = ggc_cleared_alloc<mem_attrs> ();
613010d565efSmrg       attrs->align = BITS_PER_UNIT;
613110d565efSmrg       attrs->addrspace = ADDR_SPACE_GENERIC;
613210d565efSmrg       if (mode != BLKmode && mode != VOIDmode)
613310d565efSmrg 	{
613410d565efSmrg 	  attrs->size_known_p = true;
613510d565efSmrg 	  attrs->size = GET_MODE_SIZE (mode);
613610d565efSmrg 	  if (STRICT_ALIGNMENT)
613710d565efSmrg 	    attrs->align = GET_MODE_ALIGNMENT (mode);
613810d565efSmrg 	}
613910d565efSmrg       mode_mem_attrs[i] = attrs;
614010d565efSmrg     }
6141c7a68eb7Smrg 
6142c7a68eb7Smrg   split_branch_probability = profile_probability::uninitialized ();
614310d565efSmrg }
614410d565efSmrg 
614510d565efSmrg /* Initialize global machine_mode variables.  */
614610d565efSmrg 
614710d565efSmrg void
init_derived_machine_modes(void)614810d565efSmrg init_derived_machine_modes (void)
614910d565efSmrg {
6150c7a68eb7Smrg   opt_scalar_int_mode mode_iter, opt_byte_mode, opt_word_mode;
6151c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
615210d565efSmrg     {
6153c7a68eb7Smrg       scalar_int_mode mode = mode_iter.require ();
6154c7a68eb7Smrg 
615510d565efSmrg       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
6156c7a68eb7Smrg 	  && !opt_byte_mode.exists ())
6157c7a68eb7Smrg 	opt_byte_mode = mode;
615810d565efSmrg 
615910d565efSmrg       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
6160c7a68eb7Smrg 	  && !opt_word_mode.exists ())
6161c7a68eb7Smrg 	opt_word_mode = mode;
616210d565efSmrg     }
616310d565efSmrg 
6164c7a68eb7Smrg   byte_mode = opt_byte_mode.require ();
6165c7a68eb7Smrg   word_mode = opt_word_mode.require ();
6166c7a68eb7Smrg   ptr_mode = as_a <scalar_int_mode>
6167c7a68eb7Smrg     (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ());
616810d565efSmrg }
616910d565efSmrg 
617010d565efSmrg /* Create some permanent unique rtl objects shared between all functions.  */
617110d565efSmrg 
617210d565efSmrg void
init_emit_once(void)617310d565efSmrg init_emit_once (void)
617410d565efSmrg {
617510d565efSmrg   int i;
617610d565efSmrg   machine_mode mode;
6177c7a68eb7Smrg   scalar_float_mode double_mode;
6178c7a68eb7Smrg   opt_scalar_mode smode_iter;
617910d565efSmrg 
618010d565efSmrg   /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
618110d565efSmrg      CONST_FIXED, and memory attribute hash tables.  */
618210d565efSmrg   const_int_htab = hash_table<const_int_hasher>::create_ggc (37);
618310d565efSmrg 
618410d565efSmrg #if TARGET_SUPPORTS_WIDE_INT
618510d565efSmrg   const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37);
618610d565efSmrg #endif
618710d565efSmrg   const_double_htab = hash_table<const_double_hasher>::create_ggc (37);
618810d565efSmrg 
6189c7a68eb7Smrg   if (NUM_POLY_INT_COEFFS > 1)
6190c7a68eb7Smrg     const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37);
6191c7a68eb7Smrg 
619210d565efSmrg   const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37);
619310d565efSmrg 
619410d565efSmrg   reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37);
619510d565efSmrg 
619610d565efSmrg #ifdef INIT_EXPANDERS
619710d565efSmrg   /* This is to initialize {init|mark|free}_machine_status before the first
619810d565efSmrg      call to push_function_context_to.  This is needed by the Chill front
619910d565efSmrg      end which calls push_function_context_to before the first call to
620010d565efSmrg      init_function_start.  */
620110d565efSmrg   INIT_EXPANDERS;
620210d565efSmrg #endif
620310d565efSmrg 
620410d565efSmrg   /* Create the unique rtx's for certain rtx codes and operand values.  */
620510d565efSmrg 
620610d565efSmrg   /* Process stack-limiting command-line options.  */
620710d565efSmrg   if (opt_fstack_limit_symbol_arg != NULL)
620810d565efSmrg     stack_limit_rtx
620910d565efSmrg       = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (opt_fstack_limit_symbol_arg));
621010d565efSmrg   if (opt_fstack_limit_register_no >= 0)
621110d565efSmrg     stack_limit_rtx = gen_rtx_REG (Pmode, opt_fstack_limit_register_no);
621210d565efSmrg 
621310d565efSmrg   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
621410d565efSmrg      tries to use these variables.  */
621510d565efSmrg   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
621610d565efSmrg     const_int_rtx[i + MAX_SAVED_CONST_INT] =
621710d565efSmrg       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
621810d565efSmrg 
621910d565efSmrg   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
622010d565efSmrg       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
622110d565efSmrg     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
622210d565efSmrg   else
622310d565efSmrg     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
622410d565efSmrg 
6225c7a68eb7Smrg   double_mode = float_mode_for_size (DOUBLE_TYPE_SIZE).require ();
622610d565efSmrg 
622710d565efSmrg   real_from_integer (&dconst0, double_mode, 0, SIGNED);
622810d565efSmrg   real_from_integer (&dconst1, double_mode, 1, SIGNED);
622910d565efSmrg   real_from_integer (&dconst2, double_mode, 2, SIGNED);
623010d565efSmrg 
623110d565efSmrg   dconstm1 = dconst1;
623210d565efSmrg   dconstm1.sign = 1;
623310d565efSmrg 
623410d565efSmrg   dconsthalf = dconst1;
623510d565efSmrg   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
623610d565efSmrg 
623710d565efSmrg   for (i = 0; i < 3; i++)
623810d565efSmrg     {
623910d565efSmrg       const REAL_VALUE_TYPE *const r =
624010d565efSmrg 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
624110d565efSmrg 
6242c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
624310d565efSmrg 	const_tiny_rtx[i][(int) mode] =
624410d565efSmrg 	  const_double_from_real_value (*r, mode);
624510d565efSmrg 
6246c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_DECIMAL_FLOAT)
624710d565efSmrg 	const_tiny_rtx[i][(int) mode] =
624810d565efSmrg 	  const_double_from_real_value (*r, mode);
624910d565efSmrg 
625010d565efSmrg       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
625110d565efSmrg 
6252c7a68eb7Smrg       FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
625310d565efSmrg 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
625410d565efSmrg 
625510d565efSmrg       for (mode = MIN_MODE_PARTIAL_INT;
625610d565efSmrg 	   mode <= MAX_MODE_PARTIAL_INT;
625710d565efSmrg 	   mode = (machine_mode)((int)(mode) + 1))
625810d565efSmrg 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
625910d565efSmrg     }
626010d565efSmrg 
626110d565efSmrg   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
626210d565efSmrg 
6263c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
626410d565efSmrg     const_tiny_rtx[3][(int) mode] = constm1_rtx;
626510d565efSmrg 
6266c7a68eb7Smrg   /* For BImode, 1 and -1 are unsigned and signed interpretations
6267c7a68eb7Smrg      of the same value.  */
6268c7a68eb7Smrg   const_tiny_rtx[0][(int) BImode] = const0_rtx;
6269c7a68eb7Smrg   const_tiny_rtx[1][(int) BImode] = const_true_rtx;
6270c7a68eb7Smrg   const_tiny_rtx[3][(int) BImode] = const_true_rtx;
6271c7a68eb7Smrg 
627210d565efSmrg   for (mode = MIN_MODE_PARTIAL_INT;
627310d565efSmrg        mode <= MAX_MODE_PARTIAL_INT;
627410d565efSmrg        mode = (machine_mode)((int)(mode) + 1))
627510d565efSmrg     const_tiny_rtx[3][(int) mode] = constm1_rtx;
627610d565efSmrg 
6277c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_INT)
627810d565efSmrg     {
627910d565efSmrg       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
628010d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
628110d565efSmrg     }
628210d565efSmrg 
6283c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT)
628410d565efSmrg     {
628510d565efSmrg       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
628610d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
628710d565efSmrg     }
628810d565efSmrg 
6289c7a68eb7Smrg   /* As for BImode, "all 1" and "all -1" are unsigned and signed
6290c7a68eb7Smrg      interpretations of the same value.  */
6291c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL)
6292c7a68eb7Smrg     {
6293c7a68eb7Smrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
6294c7a68eb7Smrg       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
6295c7a68eb7Smrg       const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode];
6296c7a68eb7Smrg     }
6297c7a68eb7Smrg 
6298c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
629910d565efSmrg     {
630010d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
630110d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
630210d565efSmrg       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
630310d565efSmrg     }
630410d565efSmrg 
6305c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
630610d565efSmrg     {
630710d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
630810d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
630910d565efSmrg     }
631010d565efSmrg 
6311c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_FRACT)
631210d565efSmrg     {
6313c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6314c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6315c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6316c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6317c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6318c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
631910d565efSmrg     }
632010d565efSmrg 
6321c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UFRACT)
632210d565efSmrg     {
6323c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6324c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6325c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6326c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6327c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6328c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
632910d565efSmrg     }
633010d565efSmrg 
6331c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_ACCUM)
633210d565efSmrg     {
6333c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6334c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6335c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6336c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6337c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6338c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
633910d565efSmrg 
634010d565efSmrg       /* We store the value 1.  */
6341c7a68eb7Smrg       FCONST1 (smode).data.high = 0;
6342c7a68eb7Smrg       FCONST1 (smode).data.low = 0;
6343c7a68eb7Smrg       FCONST1 (smode).mode = smode;
6344c7a68eb7Smrg       FCONST1 (smode).data
6345c7a68eb7Smrg 	= double_int_one.lshift (GET_MODE_FBIT (smode),
634610d565efSmrg 				 HOST_BITS_PER_DOUBLE_INT,
6347c7a68eb7Smrg 				 SIGNED_FIXED_POINT_MODE_P (smode));
6348c7a68eb7Smrg       const_tiny_rtx[1][(int) smode]
6349c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
635010d565efSmrg     }
635110d565efSmrg 
6352c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_UACCUM)
635310d565efSmrg     {
6354c7a68eb7Smrg       scalar_mode smode = smode_iter.require ();
6355c7a68eb7Smrg       FCONST0 (smode).data.high = 0;
6356c7a68eb7Smrg       FCONST0 (smode).data.low = 0;
6357c7a68eb7Smrg       FCONST0 (smode).mode = smode;
6358c7a68eb7Smrg       const_tiny_rtx[0][(int) smode]
6359c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode), smode);
636010d565efSmrg 
636110d565efSmrg       /* We store the value 1.  */
6362c7a68eb7Smrg       FCONST1 (smode).data.high = 0;
6363c7a68eb7Smrg       FCONST1 (smode).data.low = 0;
6364c7a68eb7Smrg       FCONST1 (smode).mode = smode;
6365c7a68eb7Smrg       FCONST1 (smode).data
6366c7a68eb7Smrg 	= double_int_one.lshift (GET_MODE_FBIT (smode),
636710d565efSmrg 				 HOST_BITS_PER_DOUBLE_INT,
6368c7a68eb7Smrg 				 SIGNED_FIXED_POINT_MODE_P (smode));
6369c7a68eb7Smrg       const_tiny_rtx[1][(int) smode]
6370c7a68eb7Smrg 	= CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode), smode);
637110d565efSmrg     }
637210d565efSmrg 
6373c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FRACT)
637410d565efSmrg     {
637510d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
637610d565efSmrg     }
637710d565efSmrg 
6378c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UFRACT)
637910d565efSmrg     {
638010d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
638110d565efSmrg     }
638210d565efSmrg 
6383c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_ACCUM)
638410d565efSmrg     {
638510d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
638610d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
638710d565efSmrg     }
638810d565efSmrg 
6389c7a68eb7Smrg   FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_UACCUM)
639010d565efSmrg     {
639110d565efSmrg       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
639210d565efSmrg       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
639310d565efSmrg     }
639410d565efSmrg 
639510d565efSmrg   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
639610d565efSmrg     if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC)
639710d565efSmrg       const_tiny_rtx[0][i] = const0_rtx;
639810d565efSmrg 
639910d565efSmrg   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
640010d565efSmrg   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
640110d565efSmrg   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
640210d565efSmrg   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
640310d565efSmrg   invalid_insn_rtx = gen_rtx_INSN (VOIDmode,
640410d565efSmrg 				   /*prev_insn=*/NULL,
640510d565efSmrg 				   /*next_insn=*/NULL,
640610d565efSmrg 				   /*bb=*/NULL,
640710d565efSmrg 				   /*pattern=*/NULL_RTX,
640810d565efSmrg 				   /*location=*/-1,
640910d565efSmrg 				   CODE_FOR_nothing,
641010d565efSmrg 				   /*reg_notes=*/NULL_RTX);
641110d565efSmrg }
641210d565efSmrg 
641310d565efSmrg /* Produce exact duplicate of insn INSN after AFTER.
641410d565efSmrg    Care updating of libcall regions if present.  */
641510d565efSmrg 
641610d565efSmrg rtx_insn *
emit_copy_of_insn_after(rtx_insn * insn,rtx_insn * after)641710d565efSmrg emit_copy_of_insn_after (rtx_insn *insn, rtx_insn *after)
641810d565efSmrg {
641910d565efSmrg   rtx_insn *new_rtx;
642010d565efSmrg   rtx link;
642110d565efSmrg 
642210d565efSmrg   switch (GET_CODE (insn))
642310d565efSmrg     {
642410d565efSmrg     case INSN:
642510d565efSmrg       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
642610d565efSmrg       break;
642710d565efSmrg 
642810d565efSmrg     case JUMP_INSN:
642910d565efSmrg       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
643010d565efSmrg       CROSSING_JUMP_P (new_rtx) = CROSSING_JUMP_P (insn);
643110d565efSmrg       break;
643210d565efSmrg 
643310d565efSmrg     case DEBUG_INSN:
643410d565efSmrg       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
643510d565efSmrg       break;
643610d565efSmrg 
643710d565efSmrg     case CALL_INSN:
643810d565efSmrg       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
643910d565efSmrg       if (CALL_INSN_FUNCTION_USAGE (insn))
644010d565efSmrg 	CALL_INSN_FUNCTION_USAGE (new_rtx)
644110d565efSmrg 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
644210d565efSmrg       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
644310d565efSmrg       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
644410d565efSmrg       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
644510d565efSmrg       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
644610d565efSmrg 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
644710d565efSmrg       break;
644810d565efSmrg 
644910d565efSmrg     default:
645010d565efSmrg       gcc_unreachable ();
645110d565efSmrg     }
645210d565efSmrg 
645310d565efSmrg   /* Update LABEL_NUSES.  */
645410d565efSmrg   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
645510d565efSmrg 
645610d565efSmrg   INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
645710d565efSmrg 
645810d565efSmrg   /* If the old insn is frame related, then so is the new one.  This is
645910d565efSmrg      primarily needed for IA-64 unwind info which marks epilogue insns,
646010d565efSmrg      which may be duplicated by the basic block reordering code.  */
646110d565efSmrg   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
646210d565efSmrg 
646310d565efSmrg   /* Locate the end of existing REG_NOTES in NEW_RTX.  */
646410d565efSmrg   rtx *ptail = &REG_NOTES (new_rtx);
646510d565efSmrg   while (*ptail != NULL_RTX)
646610d565efSmrg     ptail = &XEXP (*ptail, 1);
646710d565efSmrg 
646810d565efSmrg   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
646910d565efSmrg      will make them.  REG_LABEL_TARGETs are created there too, but are
647010d565efSmrg      supposed to be sticky, so we copy them.  */
647110d565efSmrg   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
647210d565efSmrg     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
647310d565efSmrg       {
647410d565efSmrg 	*ptail = duplicate_reg_note (link);
647510d565efSmrg 	ptail = &XEXP (*ptail, 1);
647610d565efSmrg       }
647710d565efSmrg 
647810d565efSmrg   INSN_CODE (new_rtx) = INSN_CODE (insn);
647910d565efSmrg   return new_rtx;
648010d565efSmrg }
648110d565efSmrg 
648210d565efSmrg static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
648310d565efSmrg rtx
gen_hard_reg_clobber(machine_mode mode,unsigned int regno)648410d565efSmrg gen_hard_reg_clobber (machine_mode mode, unsigned int regno)
648510d565efSmrg {
648610d565efSmrg   if (hard_reg_clobbers[mode][regno])
648710d565efSmrg     return hard_reg_clobbers[mode][regno];
648810d565efSmrg   else
648910d565efSmrg     return (hard_reg_clobbers[mode][regno] =
649010d565efSmrg 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
649110d565efSmrg }
649210d565efSmrg 
649310d565efSmrg location_t prologue_location;
649410d565efSmrg location_t epilogue_location;
649510d565efSmrg 
649610d565efSmrg /* Hold current location information and last location information, so the
649710d565efSmrg    datastructures are built lazily only when some instructions in given
649810d565efSmrg    place are needed.  */
649910d565efSmrg static location_t curr_location;
650010d565efSmrg 
650110d565efSmrg /* Allocate insn location datastructure.  */
650210d565efSmrg void
insn_locations_init(void)650310d565efSmrg insn_locations_init (void)
650410d565efSmrg {
650510d565efSmrg   prologue_location = epilogue_location = 0;
650610d565efSmrg   curr_location = UNKNOWN_LOCATION;
650710d565efSmrg }
650810d565efSmrg 
650910d565efSmrg /* At the end of emit stage, clear current location.  */
651010d565efSmrg void
insn_locations_finalize(void)651110d565efSmrg insn_locations_finalize (void)
651210d565efSmrg {
651310d565efSmrg   epilogue_location = curr_location;
651410d565efSmrg   curr_location = UNKNOWN_LOCATION;
651510d565efSmrg }
651610d565efSmrg 
651710d565efSmrg /* Set current location.  */
651810d565efSmrg void
set_curr_insn_location(location_t location)651910d565efSmrg set_curr_insn_location (location_t location)
652010d565efSmrg {
652110d565efSmrg   curr_location = location;
652210d565efSmrg }
652310d565efSmrg 
652410d565efSmrg /* Get current location.  */
652510d565efSmrg location_t
curr_insn_location(void)652610d565efSmrg curr_insn_location (void)
652710d565efSmrg {
652810d565efSmrg   return curr_location;
652910d565efSmrg }
653010d565efSmrg 
6531*ec02198aSmrg /* Set the location of the insn chain starting at INSN to LOC.  */
6532*ec02198aSmrg void
set_insn_locations(rtx_insn * insn,location_t loc)6533*ec02198aSmrg set_insn_locations (rtx_insn *insn, location_t loc)
6534*ec02198aSmrg {
6535*ec02198aSmrg   while (insn)
6536*ec02198aSmrg     {
6537*ec02198aSmrg       if (INSN_P (insn))
6538*ec02198aSmrg 	INSN_LOCATION (insn) = loc;
6539*ec02198aSmrg       insn = NEXT_INSN (insn);
6540*ec02198aSmrg     }
6541*ec02198aSmrg }
6542*ec02198aSmrg 
654310d565efSmrg /* Return lexical scope block insn belongs to.  */
654410d565efSmrg tree
insn_scope(const rtx_insn * insn)654510d565efSmrg insn_scope (const rtx_insn *insn)
654610d565efSmrg {
654710d565efSmrg   return LOCATION_BLOCK (INSN_LOCATION (insn));
654810d565efSmrg }
654910d565efSmrg 
655010d565efSmrg /* Return line number of the statement that produced this insn.  */
655110d565efSmrg int
insn_line(const rtx_insn * insn)655210d565efSmrg insn_line (const rtx_insn *insn)
655310d565efSmrg {
655410d565efSmrg   return LOCATION_LINE (INSN_LOCATION (insn));
655510d565efSmrg }
655610d565efSmrg 
655710d565efSmrg /* Return source file of the statement that produced this insn.  */
655810d565efSmrg const char *
insn_file(const rtx_insn * insn)655910d565efSmrg insn_file (const rtx_insn *insn)
656010d565efSmrg {
656110d565efSmrg   return LOCATION_FILE (INSN_LOCATION (insn));
656210d565efSmrg }
656310d565efSmrg 
656410d565efSmrg /* Return expanded location of the statement that produced this insn.  */
656510d565efSmrg expanded_location
insn_location(const rtx_insn * insn)656610d565efSmrg insn_location (const rtx_insn *insn)
656710d565efSmrg {
656810d565efSmrg   return expand_location (INSN_LOCATION (insn));
656910d565efSmrg }
657010d565efSmrg 
657110d565efSmrg /* Return true if memory model MODEL requires a pre-operation (release-style)
657210d565efSmrg    barrier or a post-operation (acquire-style) barrier.  While not universal,
657310d565efSmrg    this function matches behavior of several targets.  */
657410d565efSmrg 
657510d565efSmrg bool
need_atomic_barrier_p(enum memmodel model,bool pre)657610d565efSmrg need_atomic_barrier_p (enum memmodel model, bool pre)
657710d565efSmrg {
657810d565efSmrg   switch (model & MEMMODEL_BASE_MASK)
657910d565efSmrg     {
658010d565efSmrg     case MEMMODEL_RELAXED:
658110d565efSmrg     case MEMMODEL_CONSUME:
658210d565efSmrg       return false;
658310d565efSmrg     case MEMMODEL_RELEASE:
658410d565efSmrg       return pre;
658510d565efSmrg     case MEMMODEL_ACQUIRE:
658610d565efSmrg       return !pre;
658710d565efSmrg     case MEMMODEL_ACQ_REL:
658810d565efSmrg     case MEMMODEL_SEQ_CST:
658910d565efSmrg       return true;
659010d565efSmrg     default:
659110d565efSmrg       gcc_unreachable ();
659210d565efSmrg     }
659310d565efSmrg }
659410d565efSmrg 
6595c7a68eb7Smrg /* Return a constant shift amount for shifting a value of mode MODE
6596c7a68eb7Smrg    by VALUE bits.  */
6597c7a68eb7Smrg 
6598c7a68eb7Smrg rtx
gen_int_shift_amount(machine_mode,poly_int64 value)6599c7a68eb7Smrg gen_int_shift_amount (machine_mode, poly_int64 value)
6600c7a68eb7Smrg {
6601c7a68eb7Smrg   /* Use a 64-bit mode, to avoid any truncation.
6602c7a68eb7Smrg 
6603c7a68eb7Smrg      ??? Perhaps this should be automatically derived from the .md files
6604c7a68eb7Smrg      instead, or perhaps have a target hook.  */
6605c7a68eb7Smrg   scalar_int_mode shift_mode = (BITS_PER_UNIT == 8
6606c7a68eb7Smrg 				? DImode
6607c7a68eb7Smrg 				: int_mode_for_size (64, 0).require ());
6608c7a68eb7Smrg   return gen_int_mode (value, shift_mode);
6609c7a68eb7Smrg }
6610c7a68eb7Smrg 
661110d565efSmrg /* Initialize fields of rtl_data related to stack alignment.  */
661210d565efSmrg 
661310d565efSmrg void
init_stack_alignment()661410d565efSmrg rtl_data::init_stack_alignment ()
661510d565efSmrg {
661610d565efSmrg   stack_alignment_needed = STACK_BOUNDARY;
661710d565efSmrg   max_used_stack_slot_alignment = STACK_BOUNDARY;
661810d565efSmrg   stack_alignment_estimated = 0;
661910d565efSmrg   preferred_stack_boundary = STACK_BOUNDARY;
662010d565efSmrg }
662110d565efSmrg 
662210d565efSmrg 
662310d565efSmrg #include "gt-emit-rtl.h"
6624