xref: /dragonfly/contrib/gcc-4.7/gcc/emit-rtl.c (revision cfd1aba3)
1 /* Emit RTL for the GCC expander.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4    2010, 2011
5    Free Software Foundation, Inc.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 
24 /* Middle-to-low level generation of rtx code and insns.
25 
26    This file contains support functions for creating rtl expressions
27    and manipulating them in the doubly-linked chain of insns.
28 
29    The patterns of the insns are created by machine-dependent
30    routines in insn-emit.c, which is generated automatically from
31    the machine description.  These routines make the individual rtx's
32    of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33    which are automatically generated from rtl.def; what is machine
34    dependent is the kind of rtx's they make and what arguments they
35    use.  */
36 
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62 #include "tree-flow.h"
63 
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
68 
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70 
71 /* Commonly used modes.  */
72 
73 enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
74 enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
75 enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
76 enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
77 
78 /* Datastructures maintained for currently processed function in RTL form.  */
79 
80 struct rtl_data x_rtl;
81 
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83    Allocated in parallel with regno_pointer_align.
84    FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85    with length attribute nested in top level structures.  */
86 
87 rtx * regno_reg_rtx;
88 
89 /* This is *not* reset after each function.  It gives each CODE_LABEL
90    in the entire compilation a unique label number.  */
91 
92 static GTY(()) int label_num = 1;
93 
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
96    record a copy of const[012]_rtx and constm1_rtx.  CONSTM1_RTX
97    is set only for MODE_INT and MODE_VECTOR_INT modes.  */
98 
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
100 
101 rtx const_true_rtx;
102 
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
108 
109 /* Record fixed-point constant 0 and 1.  */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 
113 /* We make one copy of (const_int C) where C is in
114    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115    to save space during the compilation and simplify comparisons of
116    integers.  */
117 
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 
120 /* A hash table storing CONST_INTs whose absolute value is greater
121    than MAX_SAVED_CONST_INT.  */
122 
123 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
124      htab_t const_int_htab;
125 
126 /* A hash table storing memory attribute structures.  */
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
128      htab_t mem_attrs_htab;
129 
130 /* A hash table storing register attribute structures.  */
131 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
132      htab_t reg_attrs_htab;
133 
134 /* A hash table storing all CONST_DOUBLEs.  */
135 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
136      htab_t const_double_htab;
137 
138 /* A hash table storing all CONST_FIXEDs.  */
139 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
140      htab_t const_fixed_htab;
141 
142 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
143 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
144 #define last_location (crtl->emit.x_last_location)
145 #define first_label_num (crtl->emit.x_first_label_num)
146 
147 static rtx make_call_insn_raw (rtx);
148 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
149 static void set_used_decls (tree);
150 static void mark_label_nuses (rtx);
151 static hashval_t const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 static hashval_t const_double_htab_hash (const void *);
154 static int const_double_htab_eq (const void *, const void *);
155 static rtx lookup_const_double (rtx);
156 static hashval_t const_fixed_htab_hash (const void *);
157 static int const_fixed_htab_eq (const void *, const void *);
158 static rtx lookup_const_fixed (rtx);
159 static hashval_t mem_attrs_htab_hash (const void *);
160 static int mem_attrs_htab_eq (const void *, const void *);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
166 
167 /* Probability of the conditional branch currently proceeded by try_split.
168    Set to -1 otherwise.  */
169 int split_branch_probability = -1;
170 
171 /* Returns a hash code for X (which is a really a CONST_INT).  */
172 
173 static hashval_t
174 const_int_htab_hash (const void *x)
175 {
176   return (hashval_t) INTVAL ((const_rtx) x);
177 }
178 
179 /* Returns nonzero if the value represented by X (which is really a
180    CONST_INT) is the same as that given by Y (which is really a
181    HOST_WIDE_INT *).  */
182 
183 static int
184 const_int_htab_eq (const void *x, const void *y)
185 {
186   return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
187 }
188 
189 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
190 static hashval_t
191 const_double_htab_hash (const void *x)
192 {
193   const_rtx const value = (const_rtx) x;
194   hashval_t h;
195 
196   if (GET_MODE (value) == VOIDmode)
197     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
198   else
199     {
200       h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201       /* MODE is used in the comparison, so it should be in the hash.  */
202       h ^= GET_MODE (value);
203     }
204   return h;
205 }
206 
207 /* Returns nonzero if the value represented by X (really a ...)
208    is the same as that represented by Y (really a ...) */
209 static int
210 const_double_htab_eq (const void *x, const void *y)
211 {
212   const_rtx const a = (const_rtx)x, b = (const_rtx)y;
213 
214   if (GET_MODE (a) != GET_MODE (b))
215     return 0;
216   if (GET_MODE (a) == VOIDmode)
217     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219   else
220     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 			   CONST_DOUBLE_REAL_VALUE (b));
222 }
223 
224 /* Returns a hash code for X (which is really a CONST_FIXED).  */
225 
226 static hashval_t
227 const_fixed_htab_hash (const void *x)
228 {
229   const_rtx const value = (const_rtx) x;
230   hashval_t h;
231 
232   h = fixed_hash (CONST_FIXED_VALUE (value));
233   /* MODE is used in the comparison, so it should be in the hash.  */
234   h ^= GET_MODE (value);
235   return h;
236 }
237 
238 /* Returns nonzero if the value represented by X (really a ...)
239    is the same as that represented by Y (really a ...).  */
240 
241 static int
242 const_fixed_htab_eq (const void *x, const void *y)
243 {
244   const_rtx const a = (const_rtx) x, b = (const_rtx) y;
245 
246   if (GET_MODE (a) != GET_MODE (b))
247     return 0;
248   return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
249 }
250 
251 /* Returns a hash code for X (which is a really a mem_attrs *).  */
252 
253 static hashval_t
254 mem_attrs_htab_hash (const void *x)
255 {
256   const mem_attrs *const p = (const mem_attrs *) x;
257 
258   return (p->alias ^ (p->align * 1000)
259 	  ^ (p->addrspace * 4000)
260 	  ^ ((p->offset_known_p ? p->offset : 0) * 50000)
261 	  ^ ((p->size_known_p ? p->size : 0) * 2500000)
262 	  ^ (size_t) iterative_hash_expr (p->expr, 0));
263 }
264 
265 /* Return true if the given memory attributes are equal.  */
266 
267 static bool
268 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
269 {
270   return (p->alias == q->alias
271 	  && p->offset_known_p == q->offset_known_p
272 	  && (!p->offset_known_p || p->offset == q->offset)
273 	  && p->size_known_p == q->size_known_p
274 	  && (!p->size_known_p || p->size == q->size)
275 	  && p->align == q->align
276 	  && p->addrspace == q->addrspace
277 	  && (p->expr == q->expr
278 	      || (p->expr != NULL_TREE && q->expr != NULL_TREE
279 		  && operand_equal_p (p->expr, q->expr, 0))));
280 }
281 
282 /* Returns nonzero if the value represented by X (which is really a
283    mem_attrs *) is the same as that given by Y (which is also really a
284    mem_attrs *).  */
285 
286 static int
287 mem_attrs_htab_eq (const void *x, const void *y)
288 {
289   return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
290 }
291 
292 /* Set MEM's memory attributes so that they are the same as ATTRS.  */
293 
294 static void
295 set_mem_attrs (rtx mem, mem_attrs *attrs)
296 {
297   void **slot;
298 
299   /* If everything is the default, we can just clear the attributes.  */
300   if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
301     {
302       MEM_ATTRS (mem) = 0;
303       return;
304     }
305 
306   slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
307   if (*slot == 0)
308     {
309       *slot = ggc_alloc_mem_attrs ();
310       memcpy (*slot, attrs, sizeof (mem_attrs));
311     }
312 
313   MEM_ATTRS (mem) = (mem_attrs *) *slot;
314 }
315 
316 /* Returns a hash code for X (which is a really a reg_attrs *).  */
317 
318 static hashval_t
319 reg_attrs_htab_hash (const void *x)
320 {
321   const reg_attrs *const p = (const reg_attrs *) x;
322 
323   return ((p->offset * 1000) ^ (intptr_t) p->decl);
324 }
325 
326 /* Returns nonzero if the value represented by X (which is really a
327    reg_attrs *) is the same as that given by Y (which is also really a
328    reg_attrs *).  */
329 
330 static int
331 reg_attrs_htab_eq (const void *x, const void *y)
332 {
333   const reg_attrs *const p = (const reg_attrs *) x;
334   const reg_attrs *const q = (const reg_attrs *) y;
335 
336   return (p->decl == q->decl && p->offset == q->offset);
337 }
338 /* Allocate a new reg_attrs structure and insert it into the hash table if
339    one identical to it is not already in the table.  We are doing this for
340    MEM of mode MODE.  */
341 
342 static reg_attrs *
343 get_reg_attrs (tree decl, int offset)
344 {
345   reg_attrs attrs;
346   void **slot;
347 
348   /* If everything is the default, we can just return zero.  */
349   if (decl == 0 && offset == 0)
350     return 0;
351 
352   attrs.decl = decl;
353   attrs.offset = offset;
354 
355   slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
356   if (*slot == 0)
357     {
358       *slot = ggc_alloc_reg_attrs ();
359       memcpy (*slot, &attrs, sizeof (reg_attrs));
360     }
361 
362   return (reg_attrs *) *slot;
363 }
364 
365 
366 #if !HAVE_blockage
367 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
368    across this insn. */
369 
370 rtx
371 gen_blockage (void)
372 {
373   rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
374   MEM_VOLATILE_P (x) = true;
375   return x;
376 }
377 #endif
378 
379 
380 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
381    don't attempt to share with the various global pieces of rtl (such as
382    frame_pointer_rtx).  */
383 
384 rtx
385 gen_raw_REG (enum machine_mode mode, int regno)
386 {
387   rtx x = gen_rtx_raw_REG (mode, regno);
388   ORIGINAL_REGNO (x) = regno;
389   return x;
390 }
391 
392 /* There are some RTL codes that require special attention; the generation
393    functions do the raw handling.  If you add to this list, modify
394    special_rtx in gengenrtl.c as well.  */
395 
396 rtx
397 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
398 {
399   void **slot;
400 
401   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
402     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
403 
404 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
405   if (const_true_rtx && arg == STORE_FLAG_VALUE)
406     return const_true_rtx;
407 #endif
408 
409   /* Look up the CONST_INT in the hash table.  */
410   slot = htab_find_slot_with_hash (const_int_htab, &arg,
411 				   (hashval_t) arg, INSERT);
412   if (*slot == 0)
413     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
414 
415   return (rtx) *slot;
416 }
417 
418 rtx
419 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
420 {
421   return GEN_INT (trunc_int_for_mode (c, mode));
422 }
423 
424 /* CONST_DOUBLEs might be created from pairs of integers, or from
425    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
426    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
427 
428 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
429    hash table.  If so, return its counterpart; otherwise add it
430    to the hash table and return it.  */
431 static rtx
432 lookup_const_double (rtx real)
433 {
434   void **slot = htab_find_slot (const_double_htab, real, INSERT);
435   if (*slot == 0)
436     *slot = real;
437 
438   return (rtx) *slot;
439 }
440 
441 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
442    VALUE in mode MODE.  */
443 rtx
444 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
445 {
446   rtx real = rtx_alloc (CONST_DOUBLE);
447   PUT_MODE (real, mode);
448 
449   real->u.rv = value;
450 
451   return lookup_const_double (real);
452 }
453 
454 /* Determine whether FIXED, a CONST_FIXED, already exists in the
455    hash table.  If so, return its counterpart; otherwise add it
456    to the hash table and return it.  */
457 
458 static rtx
459 lookup_const_fixed (rtx fixed)
460 {
461   void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
462   if (*slot == 0)
463     *slot = fixed;
464 
465   return (rtx) *slot;
466 }
467 
468 /* Return a CONST_FIXED rtx for a fixed-point value specified by
469    VALUE in mode MODE.  */
470 
471 rtx
472 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
473 {
474   rtx fixed = rtx_alloc (CONST_FIXED);
475   PUT_MODE (fixed, mode);
476 
477   fixed->u.fv = value;
478 
479   return lookup_const_fixed (fixed);
480 }
481 
482 /* Constructs double_int from rtx CST.  */
483 
484 double_int
485 rtx_to_double_int (const_rtx cst)
486 {
487   double_int r;
488 
489   if (CONST_INT_P (cst))
490       r = shwi_to_double_int (INTVAL (cst));
491   else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
492     {
493       r.low = CONST_DOUBLE_LOW (cst);
494       r.high = CONST_DOUBLE_HIGH (cst);
495     }
496   else
497     gcc_unreachable ();
498 
499   return r;
500 }
501 
502 
503 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
504    a double_int.  */
505 
506 rtx
507 immed_double_int_const (double_int i, enum machine_mode mode)
508 {
509   return immed_double_const (i.low, i.high, mode);
510 }
511 
512 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
513    of ints: I0 is the low-order word and I1 is the high-order word.
514    Do not use this routine for non-integer modes; convert to
515    REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
516 
517 rtx
518 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
519 {
520   rtx value;
521   unsigned int i;
522 
523   /* There are the following cases (note that there are no modes with
524      HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
525 
526      1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
527 	gen_int_mode.
528      2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
529 	the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
530 	from copies of the sign bit, and sign of i0 and i1 are the same),  then
531 	we return a CONST_INT for i0.
532      3) Otherwise, we create a CONST_DOUBLE for i0 and i1.  */
533   if (mode != VOIDmode)
534     {
535       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
536 		  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
537 		  /* We can get a 0 for an error mark.  */
538 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
539 		  || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
540 
541       if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
542 	return gen_int_mode (i0, mode);
543 
544       gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
545     }
546 
547   /* If this integer fits in one word, return a CONST_INT.  */
548   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
549     return GEN_INT (i0);
550 
551   /* We use VOIDmode for integers.  */
552   value = rtx_alloc (CONST_DOUBLE);
553   PUT_MODE (value, VOIDmode);
554 
555   CONST_DOUBLE_LOW (value) = i0;
556   CONST_DOUBLE_HIGH (value) = i1;
557 
558   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
559     XWINT (value, i) = 0;
560 
561   return lookup_const_double (value);
562 }
563 
564 rtx
565 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
566 {
567   /* In case the MD file explicitly references the frame pointer, have
568      all such references point to the same frame pointer.  This is
569      used during frame pointer elimination to distinguish the explicit
570      references to these registers from pseudos that happened to be
571      assigned to them.
572 
573      If we have eliminated the frame pointer or arg pointer, we will
574      be using it as a normal register, for example as a spill
575      register.  In such cases, we might be accessing it in a mode that
576      is not Pmode and therefore cannot use the pre-allocated rtx.
577 
578      Also don't do this when we are making new REGs in reload, since
579      we don't want to get confused with the real pointers.  */
580 
581   if (mode == Pmode && !reload_in_progress)
582     {
583       if (regno == FRAME_POINTER_REGNUM
584 	  && (!reload_completed || frame_pointer_needed))
585 	return frame_pointer_rtx;
586 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
587       if (regno == HARD_FRAME_POINTER_REGNUM
588 	  && (!reload_completed || frame_pointer_needed))
589 	return hard_frame_pointer_rtx;
590 #endif
591 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
592       if (regno == ARG_POINTER_REGNUM)
593 	return arg_pointer_rtx;
594 #endif
595 #ifdef RETURN_ADDRESS_POINTER_REGNUM
596       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
597 	return return_address_pointer_rtx;
598 #endif
599       if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
600 	  && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
601 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
602 	return pic_offset_table_rtx;
603       if (regno == STACK_POINTER_REGNUM)
604 	return stack_pointer_rtx;
605     }
606 
607 #if 0
608   /* If the per-function register table has been set up, try to re-use
609      an existing entry in that table to avoid useless generation of RTL.
610 
611      This code is disabled for now until we can fix the various backends
612      which depend on having non-shared hard registers in some cases.   Long
613      term we want to re-enable this code as it can significantly cut down
614      on the amount of useless RTL that gets generated.
615 
616      We'll also need to fix some code that runs after reload that wants to
617      set ORIGINAL_REGNO.  */
618 
619   if (cfun
620       && cfun->emit
621       && regno_reg_rtx
622       && regno < FIRST_PSEUDO_REGISTER
623       && reg_raw_mode[regno] == mode)
624     return regno_reg_rtx[regno];
625 #endif
626 
627   return gen_raw_REG (mode, regno);
628 }
629 
630 rtx
631 gen_rtx_MEM (enum machine_mode mode, rtx addr)
632 {
633   rtx rt = gen_rtx_raw_MEM (mode, addr);
634 
635   /* This field is not cleared by the mere allocation of the rtx, so
636      we clear it here.  */
637   MEM_ATTRS (rt) = 0;
638 
639   return rt;
640 }
641 
642 /* Generate a memory referring to non-trapping constant memory.  */
643 
644 rtx
645 gen_const_mem (enum machine_mode mode, rtx addr)
646 {
647   rtx mem = gen_rtx_MEM (mode, addr);
648   MEM_READONLY_P (mem) = 1;
649   MEM_NOTRAP_P (mem) = 1;
650   return mem;
651 }
652 
653 /* Generate a MEM referring to fixed portions of the frame, e.g., register
654    save areas.  */
655 
656 rtx
657 gen_frame_mem (enum machine_mode mode, rtx addr)
658 {
659   rtx mem = gen_rtx_MEM (mode, addr);
660   MEM_NOTRAP_P (mem) = 1;
661   set_mem_alias_set (mem, get_frame_alias_set ());
662   return mem;
663 }
664 
665 /* Generate a MEM referring to a temporary use of the stack, not part
666     of the fixed stack frame.  For example, something which is pushed
667     by a target splitter.  */
668 rtx
669 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
670 {
671   rtx mem = gen_rtx_MEM (mode, addr);
672   MEM_NOTRAP_P (mem) = 1;
673   if (!cfun->calls_alloca)
674     set_mem_alias_set (mem, get_frame_alias_set ());
675   return mem;
676 }
677 
678 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET).  Return true if
679    this construct would be valid, and false otherwise.  */
680 
681 bool
682 validate_subreg (enum machine_mode omode, enum machine_mode imode,
683 		 const_rtx reg, unsigned int offset)
684 {
685   unsigned int isize = GET_MODE_SIZE (imode);
686   unsigned int osize = GET_MODE_SIZE (omode);
687 
688   /* All subregs must be aligned.  */
689   if (offset % osize != 0)
690     return false;
691 
692   /* The subreg offset cannot be outside the inner object.  */
693   if (offset >= isize)
694     return false;
695 
696   /* ??? This should not be here.  Temporarily continue to allow word_mode
697      subregs of anything.  The most common offender is (subreg:SI (reg:DF)).
698      Generally, backends are doing something sketchy but it'll take time to
699      fix them all.  */
700   if (omode == word_mode)
701     ;
702   /* ??? Similarly, e.g. with (subreg:DF (reg:TI)).  Though store_bit_field
703      is the culprit here, and not the backends.  */
704   else if (osize >= UNITS_PER_WORD && isize >= osize)
705     ;
706   /* Allow component subregs of complex and vector.  Though given the below
707      extraction rules, it's not always clear what that means.  */
708   else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
709 	   && GET_MODE_INNER (imode) == omode)
710     ;
711   /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
712      i.e. (subreg:V4SF (reg:SF) 0).  This surely isn't the cleanest way to
713      represent this.  It's questionable if this ought to be represented at
714      all -- why can't this all be hidden in post-reload splitters that make
715      arbitrarily mode changes to the registers themselves.  */
716   else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
717     ;
718   /* Subregs involving floating point modes are not allowed to
719      change size.  Therefore (subreg:DI (reg:DF) 0) is fine, but
720      (subreg:SI (reg:DF) 0) isn't.  */
721   else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
722     {
723       if (isize != osize)
724 	return false;
725     }
726 
727   /* Paradoxical subregs must have offset zero.  */
728   if (osize > isize)
729     return offset == 0;
730 
731   /* This is a normal subreg.  Verify that the offset is representable.  */
732 
733   /* For hard registers, we already have most of these rules collected in
734      subreg_offset_representable_p.  */
735   if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
736     {
737       unsigned int regno = REGNO (reg);
738 
739 #ifdef CANNOT_CHANGE_MODE_CLASS
740       if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
741 	  && GET_MODE_INNER (imode) == omode)
742 	;
743       else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
744 	return false;
745 #endif
746 
747       return subreg_offset_representable_p (regno, imode, offset, omode);
748     }
749 
750   /* For pseudo registers, we want most of the same checks.  Namely:
751      If the register no larger than a word, the subreg must be lowpart.
752      If the register is larger than a word, the subreg must be the lowpart
753      of a subword.  A subreg does *not* perform arbitrary bit extraction.
754      Given that we've already checked mode/offset alignment, we only have
755      to check subword subregs here.  */
756   if (osize < UNITS_PER_WORD)
757     {
758       enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
759       unsigned int low_off = subreg_lowpart_offset (omode, wmode);
760       if (offset % UNITS_PER_WORD != low_off)
761 	return false;
762     }
763   return true;
764 }
765 
766 rtx
767 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
768 {
769   gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
770   return gen_rtx_raw_SUBREG (mode, reg, offset);
771 }
772 
773 /* Generate a SUBREG representing the least-significant part of REG if MODE
774    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
775 
776 rtx
777 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
778 {
779   enum machine_mode inmode;
780 
781   inmode = GET_MODE (reg);
782   if (inmode == VOIDmode)
783     inmode = mode;
784   return gen_rtx_SUBREG (mode, reg,
785 			 subreg_lowpart_offset (mode, inmode));
786 }
787 
788 
789 /* Create an rtvec and stores within it the RTXen passed in the arguments.  */
790 
791 rtvec
792 gen_rtvec (int n, ...)
793 {
794   int i;
795   rtvec rt_val;
796   va_list p;
797 
798   va_start (p, n);
799 
800   /* Don't allocate an empty rtvec...  */
801   if (n == 0)
802     {
803       va_end (p);
804       return NULL_RTVEC;
805     }
806 
807   rt_val = rtvec_alloc (n);
808 
809   for (i = 0; i < n; i++)
810     rt_val->elem[i] = va_arg (p, rtx);
811 
812   va_end (p);
813   return rt_val;
814 }
815 
816 rtvec
817 gen_rtvec_v (int n, rtx *argp)
818 {
819   int i;
820   rtvec rt_val;
821 
822   /* Don't allocate an empty rtvec...  */
823   if (n == 0)
824     return NULL_RTVEC;
825 
826   rt_val = rtvec_alloc (n);
827 
828   for (i = 0; i < n; i++)
829     rt_val->elem[i] = *argp++;
830 
831   return rt_val;
832 }
833 
834 /* Return the number of bytes between the start of an OUTER_MODE
835    in-memory value and the start of an INNER_MODE in-memory value,
836    given that the former is a lowpart of the latter.  It may be a
837    paradoxical lowpart, in which case the offset will be negative
838    on big-endian targets.  */
839 
840 int
841 byte_lowpart_offset (enum machine_mode outer_mode,
842 		     enum machine_mode inner_mode)
843 {
844   if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
845     return subreg_lowpart_offset (outer_mode, inner_mode);
846   else
847     return -subreg_lowpart_offset (inner_mode, outer_mode);
848 }
849 
850 /* Generate a REG rtx for a new pseudo register of mode MODE.
851    This pseudo is assigned the next sequential register number.  */
852 
853 rtx
854 gen_reg_rtx (enum machine_mode mode)
855 {
856   rtx val;
857   unsigned int align = GET_MODE_ALIGNMENT (mode);
858 
859   gcc_assert (can_create_pseudo_p ());
860 
861   /* If a virtual register with bigger mode alignment is generated,
862      increase stack alignment estimation because it might be spilled
863      to stack later.  */
864   if (SUPPORTS_STACK_ALIGNMENT
865       && crtl->stack_alignment_estimated < align
866       && !crtl->stack_realign_processed)
867     {
868       unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
869       if (crtl->stack_alignment_estimated < min_align)
870 	crtl->stack_alignment_estimated = min_align;
871     }
872 
873   if (generating_concat_p
874       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
875 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
876     {
877       /* For complex modes, don't make a single pseudo.
878 	 Instead, make a CONCAT of two pseudos.
879 	 This allows noncontiguous allocation of the real and imaginary parts,
880 	 which makes much better code.  Besides, allocating DCmode
881 	 pseudos overstrains reload on some machines like the 386.  */
882       rtx realpart, imagpart;
883       enum machine_mode partmode = GET_MODE_INNER (mode);
884 
885       realpart = gen_reg_rtx (partmode);
886       imagpart = gen_reg_rtx (partmode);
887       return gen_rtx_CONCAT (mode, realpart, imagpart);
888     }
889 
890   /* Make sure regno_pointer_align, and regno_reg_rtx are large
891      enough to have an element for this pseudo reg number.  */
892 
893   if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
894     {
895       int old_size = crtl->emit.regno_pointer_align_length;
896       char *tmp;
897       rtx *new1;
898 
899       tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
900       memset (tmp + old_size, 0, old_size);
901       crtl->emit.regno_pointer_align = (unsigned char *) tmp;
902 
903       new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
904       memset (new1 + old_size, 0, old_size * sizeof (rtx));
905       regno_reg_rtx = new1;
906 
907       crtl->emit.regno_pointer_align_length = old_size * 2;
908     }
909 
910   val = gen_raw_REG (mode, reg_rtx_no);
911   regno_reg_rtx[reg_rtx_no++] = val;
912   return val;
913 }
914 
915 /* Update NEW with the same attributes as REG, but with OFFSET added
916    to the REG_OFFSET.  */
917 
918 static void
919 update_reg_offset (rtx new_rtx, rtx reg, int offset)
920 {
921   REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
922 				   REG_OFFSET (reg) + offset);
923 }
924 
925 /* Generate a register with same attributes as REG, but with OFFSET
926    added to the REG_OFFSET.  */
927 
928 rtx
929 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
930 		    int offset)
931 {
932   rtx new_rtx = gen_rtx_REG (mode, regno);
933 
934   update_reg_offset (new_rtx, reg, offset);
935   return new_rtx;
936 }
937 
938 /* Generate a new pseudo-register with the same attributes as REG, but
939    with OFFSET added to the REG_OFFSET.  */
940 
941 rtx
942 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
943 {
944   rtx new_rtx = gen_reg_rtx (mode);
945 
946   update_reg_offset (new_rtx, reg, offset);
947   return new_rtx;
948 }
949 
950 /* Adjust REG in-place so that it has mode MODE.  It is assumed that the
951    new register is a (possibly paradoxical) lowpart of the old one.  */
952 
953 void
954 adjust_reg_mode (rtx reg, enum machine_mode mode)
955 {
956   update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
957   PUT_MODE (reg, mode);
958 }
959 
960 /* Copy REG's attributes from X, if X has any attributes.  If REG and X
961    have different modes, REG is a (possibly paradoxical) lowpart of X.  */
962 
963 void
964 set_reg_attrs_from_value (rtx reg, rtx x)
965 {
966   int offset;
967 
968   /* Hard registers can be reused for multiple purposes within the same
969      function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
970      on them is wrong.  */
971   if (HARD_REGISTER_P (reg))
972     return;
973 
974   offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
975   if (MEM_P (x))
976     {
977       if (MEM_OFFSET_KNOWN_P (x))
978 	REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
979 					 MEM_OFFSET (x) + offset);
980       if (MEM_POINTER (x))
981 	mark_reg_pointer (reg, 0);
982     }
983   else if (REG_P (x))
984     {
985       if (REG_ATTRS (x))
986 	update_reg_offset (reg, x, offset);
987       if (REG_POINTER (x))
988 	mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
989     }
990 }
991 
992 /* Generate a REG rtx for a new pseudo register, copying the mode
993    and attributes from X.  */
994 
995 rtx
996 gen_reg_rtx_and_attrs (rtx x)
997 {
998   rtx reg = gen_reg_rtx (GET_MODE (x));
999   set_reg_attrs_from_value (reg, x);
1000   return reg;
1001 }
1002 
1003 /* Set the register attributes for registers contained in PARM_RTX.
1004    Use needed values from memory attributes of MEM.  */
1005 
1006 void
1007 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1008 {
1009   if (REG_P (parm_rtx))
1010     set_reg_attrs_from_value (parm_rtx, mem);
1011   else if (GET_CODE (parm_rtx) == PARALLEL)
1012     {
1013       /* Check for a NULL entry in the first slot, used to indicate that the
1014 	 parameter goes both on the stack and in registers.  */
1015       int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1016       for (; i < XVECLEN (parm_rtx, 0); i++)
1017 	{
1018 	  rtx x = XVECEXP (parm_rtx, 0, i);
1019 	  if (REG_P (XEXP (x, 0)))
1020 	    REG_ATTRS (XEXP (x, 0))
1021 	      = get_reg_attrs (MEM_EXPR (mem),
1022 			       INTVAL (XEXP (x, 1)));
1023 	}
1024     }
1025 }
1026 
1027 /* Set the REG_ATTRS for registers in value X, given that X represents
1028    decl T.  */
1029 
1030 void
1031 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1032 {
1033   if (GET_CODE (x) == SUBREG)
1034     {
1035       gcc_assert (subreg_lowpart_p (x));
1036       x = SUBREG_REG (x);
1037     }
1038   if (REG_P (x))
1039     REG_ATTRS (x)
1040       = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1041 					       DECL_MODE (t)));
1042   if (GET_CODE (x) == CONCAT)
1043     {
1044       if (REG_P (XEXP (x, 0)))
1045         REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1046       if (REG_P (XEXP (x, 1)))
1047 	REG_ATTRS (XEXP (x, 1))
1048 	  = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1049     }
1050   if (GET_CODE (x) == PARALLEL)
1051     {
1052       int i, start;
1053 
1054       /* Check for a NULL entry, used to indicate that the parameter goes
1055 	 both on the stack and in registers.  */
1056       if (XEXP (XVECEXP (x, 0, 0), 0))
1057 	start = 0;
1058       else
1059 	start = 1;
1060 
1061       for (i = start; i < XVECLEN (x, 0); i++)
1062 	{
1063 	  rtx y = XVECEXP (x, 0, i);
1064 	  if (REG_P (XEXP (y, 0)))
1065 	    REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1066 	}
1067     }
1068 }
1069 
1070 /* Assign the RTX X to declaration T.  */
1071 
1072 void
1073 set_decl_rtl (tree t, rtx x)
1074 {
1075   DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1076   if (x)
1077     set_reg_attrs_for_decl_rtl (t, x);
1078 }
1079 
1080 /* Assign the RTX X to parameter declaration T.  BY_REFERENCE_P is true
1081    if the ABI requires the parameter to be passed by reference.  */
1082 
1083 void
1084 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1085 {
1086   DECL_INCOMING_RTL (t) = x;
1087   if (x && !by_reference_p)
1088     set_reg_attrs_for_decl_rtl (t, x);
1089 }
1090 
1091 /* Identify REG (which may be a CONCAT) as a user register.  */
1092 
1093 void
1094 mark_user_reg (rtx reg)
1095 {
1096   if (GET_CODE (reg) == CONCAT)
1097     {
1098       REG_USERVAR_P (XEXP (reg, 0)) = 1;
1099       REG_USERVAR_P (XEXP (reg, 1)) = 1;
1100     }
1101   else
1102     {
1103       gcc_assert (REG_P (reg));
1104       REG_USERVAR_P (reg) = 1;
1105     }
1106 }
1107 
1108 /* Identify REG as a probable pointer register and show its alignment
1109    as ALIGN, if nonzero.  */
1110 
1111 void
1112 mark_reg_pointer (rtx reg, int align)
1113 {
1114   if (! REG_POINTER (reg))
1115     {
1116       REG_POINTER (reg) = 1;
1117 
1118       if (align)
1119 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1120     }
1121   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1122     /* We can no-longer be sure just how aligned this pointer is.  */
1123     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1124 }
1125 
1126 /* Return 1 plus largest pseudo reg number used in the current function.  */
1127 
1128 int
1129 max_reg_num (void)
1130 {
1131   return reg_rtx_no;
1132 }
1133 
1134 /* Return 1 + the largest label number used so far in the current function.  */
1135 
1136 int
1137 max_label_num (void)
1138 {
1139   return label_num;
1140 }
1141 
1142 /* Return first label number used in this function (if any were used).  */
1143 
1144 int
1145 get_first_label_num (void)
1146 {
1147   return first_label_num;
1148 }
1149 
1150 /* If the rtx for label was created during the expansion of a nested
1151    function, then first_label_num won't include this label number.
1152    Fix this now so that array indices work later.  */
1153 
1154 void
1155 maybe_set_first_label_num (rtx x)
1156 {
1157   if (CODE_LABEL_NUMBER (x) < first_label_num)
1158     first_label_num = CODE_LABEL_NUMBER (x);
1159 }
1160 
1161 /* Return a value representing some low-order bits of X, where the number
1162    of low-order bits is given by MODE.  Note that no conversion is done
1163    between floating-point and fixed-point values, rather, the bit
1164    representation is returned.
1165 
1166    This function handles the cases in common between gen_lowpart, below,
1167    and two variants in cse.c and combine.c.  These are the cases that can
1168    be safely handled at all points in the compilation.
1169 
1170    If this is not a case we can handle, return 0.  */
1171 
1172 rtx
1173 gen_lowpart_common (enum machine_mode mode, rtx x)
1174 {
1175   int msize = GET_MODE_SIZE (mode);
1176   int xsize;
1177   int offset = 0;
1178   enum machine_mode innermode;
1179 
1180   /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1181      so we have to make one up.  Yuk.  */
1182   innermode = GET_MODE (x);
1183   if (CONST_INT_P (x)
1184       && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1185     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1186   else if (innermode == VOIDmode)
1187     innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1188 
1189   xsize = GET_MODE_SIZE (innermode);
1190 
1191   gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1192 
1193   if (innermode == mode)
1194     return x;
1195 
1196   /* MODE must occupy no more words than the mode of X.  */
1197   if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1198       > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1199     return 0;
1200 
1201   /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
1202   if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1203     return 0;
1204 
1205   offset = subreg_lowpart_offset (mode, innermode);
1206 
1207   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1208       && (GET_MODE_CLASS (mode) == MODE_INT
1209 	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1210     {
1211       /* If we are getting the low-order part of something that has been
1212 	 sign- or zero-extended, we can either just use the object being
1213 	 extended or make a narrower extension.  If we want an even smaller
1214 	 piece than the size of the object being extended, call ourselves
1215 	 recursively.
1216 
1217 	 This case is used mostly by combine and cse.  */
1218 
1219       if (GET_MODE (XEXP (x, 0)) == mode)
1220 	return XEXP (x, 0);
1221       else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1222 	return gen_lowpart_common (mode, XEXP (x, 0));
1223       else if (msize < xsize)
1224 	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1225     }
1226   else if (GET_CODE (x) == SUBREG || REG_P (x)
1227 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1228 	   || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1229     return simplify_gen_subreg (mode, x, innermode, offset);
1230 
1231   /* Otherwise, we can't do this.  */
1232   return 0;
1233 }
1234 
1235 rtx
1236 gen_highpart (enum machine_mode mode, rtx x)
1237 {
1238   unsigned int msize = GET_MODE_SIZE (mode);
1239   rtx result;
1240 
1241   /* This case loses if X is a subreg.  To catch bugs early,
1242      complain if an invalid MODE is used even in other cases.  */
1243   gcc_assert (msize <= UNITS_PER_WORD
1244 	      || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1245 
1246   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1247 				subreg_highpart_offset (mode, GET_MODE (x)));
1248   gcc_assert (result);
1249 
1250   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1251      the target if we have a MEM.  gen_highpart must return a valid operand,
1252      emitting code if necessary to do so.  */
1253   if (MEM_P (result))
1254     {
1255       result = validize_mem (result);
1256       gcc_assert (result);
1257     }
1258 
1259   return result;
1260 }
1261 
1262 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1263    be VOIDmode constant.  */
1264 rtx
1265 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1266 {
1267   if (GET_MODE (exp) != VOIDmode)
1268     {
1269       gcc_assert (GET_MODE (exp) == innermode);
1270       return gen_highpart (outermode, exp);
1271     }
1272   return simplify_gen_subreg (outermode, exp, innermode,
1273 			      subreg_highpart_offset (outermode, innermode));
1274 }
1275 
1276 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value.  */
1277 
1278 unsigned int
1279 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1280 {
1281   unsigned int offset = 0;
1282   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1283 
1284   if (difference > 0)
1285     {
1286       if (WORDS_BIG_ENDIAN)
1287 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1288       if (BYTES_BIG_ENDIAN)
1289 	offset += difference % UNITS_PER_WORD;
1290     }
1291 
1292   return offset;
1293 }
1294 
1295 /* Return offset in bytes to get OUTERMODE high part
1296    of the value in mode INNERMODE stored in memory in target format.  */
1297 unsigned int
1298 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1299 {
1300   unsigned int offset = 0;
1301   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1302 
1303   gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1304 
1305   if (difference > 0)
1306     {
1307       if (! WORDS_BIG_ENDIAN)
1308 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1309       if (! BYTES_BIG_ENDIAN)
1310 	offset += difference % UNITS_PER_WORD;
1311     }
1312 
1313   return offset;
1314 }
1315 
1316 /* Return 1 iff X, assumed to be a SUBREG,
1317    refers to the least significant part of its containing reg.
1318    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1319 
1320 int
1321 subreg_lowpart_p (const_rtx x)
1322 {
1323   if (GET_CODE (x) != SUBREG)
1324     return 1;
1325   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1326     return 0;
1327 
1328   return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1329 	  == SUBREG_BYTE (x));
1330 }
1331 
1332 /* Return true if X is a paradoxical subreg, false otherwise.  */
1333 bool
1334 paradoxical_subreg_p (const_rtx x)
1335 {
1336   if (GET_CODE (x) != SUBREG)
1337     return false;
1338   return (GET_MODE_PRECISION (GET_MODE (x))
1339 	  > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1340 }
1341 
1342 /* Return subword OFFSET of operand OP.
1343    The word number, OFFSET, is interpreted as the word number starting
1344    at the low-order address.  OFFSET 0 is the low-order word if not
1345    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1346 
1347    If we cannot extract the required word, we return zero.  Otherwise,
1348    an rtx corresponding to the requested word will be returned.
1349 
1350    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1351    reload has completed, a valid address will always be returned.  After
1352    reload, if a valid address cannot be returned, we return zero.
1353 
1354    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1355    it is the responsibility of the caller.
1356 
1357    MODE is the mode of OP in case it is a CONST_INT.
1358 
1359    ??? This is still rather broken for some cases.  The problem for the
1360    moment is that all callers of this thing provide no 'goal mode' to
1361    tell us to work with.  This exists because all callers were written
1362    in a word based SUBREG world.
1363    Now use of this function can be deprecated by simplify_subreg in most
1364    cases.
1365  */
1366 
1367 rtx
1368 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1369 {
1370   if (mode == VOIDmode)
1371     mode = GET_MODE (op);
1372 
1373   gcc_assert (mode != VOIDmode);
1374 
1375   /* If OP is narrower than a word, fail.  */
1376   if (mode != BLKmode
1377       && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1378     return 0;
1379 
1380   /* If we want a word outside OP, return zero.  */
1381   if (mode != BLKmode
1382       && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1383     return const0_rtx;
1384 
1385   /* Form a new MEM at the requested address.  */
1386   if (MEM_P (op))
1387     {
1388       rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1389 
1390       if (! validate_address)
1391 	return new_rtx;
1392 
1393       else if (reload_completed)
1394 	{
1395 	  if (! strict_memory_address_addr_space_p (word_mode,
1396 						    XEXP (new_rtx, 0),
1397 						    MEM_ADDR_SPACE (op)))
1398 	    return 0;
1399 	}
1400       else
1401 	return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1402     }
1403 
1404   /* Rest can be handled by simplify_subreg.  */
1405   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1406 }
1407 
1408 /* Similar to `operand_subword', but never return 0.  If we can't
1409    extract the required subword, put OP into a register and try again.
1410    The second attempt must succeed.  We always validate the address in
1411    this case.
1412 
1413    MODE is the mode of OP, in case it is CONST_INT.  */
1414 
1415 rtx
1416 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1417 {
1418   rtx result = operand_subword (op, offset, 1, mode);
1419 
1420   if (result)
1421     return result;
1422 
1423   if (mode != BLKmode && mode != VOIDmode)
1424     {
1425       /* If this is a register which can not be accessed by words, copy it
1426 	 to a pseudo register.  */
1427       if (REG_P (op))
1428 	op = copy_to_reg (op);
1429       else
1430 	op = force_reg (mode, op);
1431     }
1432 
1433   result = operand_subword (op, offset, 1, mode);
1434   gcc_assert (result);
1435 
1436   return result;
1437 }
1438 
1439 /* Returns 1 if both MEM_EXPR can be considered equal
1440    and 0 otherwise.  */
1441 
1442 int
1443 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1444 {
1445   if (expr1 == expr2)
1446     return 1;
1447 
1448   if (! expr1 || ! expr2)
1449     return 0;
1450 
1451   if (TREE_CODE (expr1) != TREE_CODE (expr2))
1452     return 0;
1453 
1454   return operand_equal_p (expr1, expr2, 0);
1455 }
1456 
1457 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1458    bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1459    -1 if not known.  */
1460 
1461 int
1462 get_mem_align_offset (rtx mem, unsigned int align)
1463 {
1464   tree expr;
1465   unsigned HOST_WIDE_INT offset;
1466 
1467   /* This function can't use
1468      if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1469 	 || (MAX (MEM_ALIGN (mem),
1470 	          MAX (align, get_object_alignment (MEM_EXPR (mem))))
1471 	     < align))
1472        return -1;
1473      else
1474        return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1475      for two reasons:
1476      - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1477        for <variable>.  get_inner_reference doesn't handle it and
1478        even if it did, the alignment in that case needs to be determined
1479        from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1480      - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1481        isn't sufficiently aligned, the object it is in might be.  */
1482   gcc_assert (MEM_P (mem));
1483   expr = MEM_EXPR (mem);
1484   if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1485     return -1;
1486 
1487   offset = MEM_OFFSET (mem);
1488   if (DECL_P (expr))
1489     {
1490       if (DECL_ALIGN (expr) < align)
1491 	return -1;
1492     }
1493   else if (INDIRECT_REF_P (expr))
1494     {
1495       if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1496 	return -1;
1497     }
1498   else if (TREE_CODE (expr) == COMPONENT_REF)
1499     {
1500       while (1)
1501 	{
1502 	  tree inner = TREE_OPERAND (expr, 0);
1503 	  tree field = TREE_OPERAND (expr, 1);
1504 	  tree byte_offset = component_ref_field_offset (expr);
1505 	  tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1506 
1507 	  if (!byte_offset
1508 	      || !host_integerp (byte_offset, 1)
1509 	      || !host_integerp (bit_offset, 1))
1510 	    return -1;
1511 
1512 	  offset += tree_low_cst (byte_offset, 1);
1513 	  offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1514 
1515 	  if (inner == NULL_TREE)
1516 	    {
1517 	      if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1518 		  < (unsigned int) align)
1519 		return -1;
1520 	      break;
1521 	    }
1522 	  else if (DECL_P (inner))
1523 	    {
1524 	      if (DECL_ALIGN (inner) < align)
1525 		return -1;
1526 	      break;
1527 	    }
1528 	  else if (TREE_CODE (inner) != COMPONENT_REF)
1529 	    return -1;
1530 	  expr = inner;
1531 	}
1532     }
1533   else
1534     return -1;
1535 
1536   return offset & ((align / BITS_PER_UNIT) - 1);
1537 }
1538 
1539 /* Given REF (a MEM) and T, either the type of X or the expression
1540    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1541    if we are making a new object of this type.  BITPOS is nonzero if
1542    there is an offset outstanding on T that will be applied later.  */
1543 
1544 void
1545 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1546 				 HOST_WIDE_INT bitpos)
1547 {
1548   HOST_WIDE_INT apply_bitpos = 0;
1549   tree type;
1550   struct mem_attrs attrs, *defattrs, *refattrs;
1551   addr_space_t as;
1552 
1553   /* It can happen that type_for_mode was given a mode for which there
1554      is no language-level type.  In which case it returns NULL, which
1555      we can see here.  */
1556   if (t == NULL_TREE)
1557     return;
1558 
1559   type = TYPE_P (t) ? t : TREE_TYPE (t);
1560   if (type == error_mark_node)
1561     return;
1562 
1563   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1564      wrong answer, as it assumes that DECL_RTL already has the right alias
1565      info.  Callers should not set DECL_RTL until after the call to
1566      set_mem_attributes.  */
1567   gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1568 
1569   memset (&attrs, 0, sizeof (attrs));
1570 
1571   /* Get the alias set from the expression or type (perhaps using a
1572      front-end routine) and use it.  */
1573   attrs.alias = get_alias_set (t);
1574 
1575   MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1576   MEM_POINTER (ref) = POINTER_TYPE_P (type);
1577 
1578   /* Default values from pre-existing memory attributes if present.  */
1579   refattrs = MEM_ATTRS (ref);
1580   if (refattrs)
1581     {
1582       /* ??? Can this ever happen?  Calling this routine on a MEM that
1583 	 already carries memory attributes should probably be invalid.  */
1584       attrs.expr = refattrs->expr;
1585       attrs.offset_known_p = refattrs->offset_known_p;
1586       attrs.offset = refattrs->offset;
1587       attrs.size_known_p = refattrs->size_known_p;
1588       attrs.size = refattrs->size;
1589       attrs.align = refattrs->align;
1590     }
1591 
1592   /* Otherwise, default values from the mode of the MEM reference.  */
1593   else
1594     {
1595       defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1596       gcc_assert (!defattrs->expr);
1597       gcc_assert (!defattrs->offset_known_p);
1598 
1599       /* Respect mode size.  */
1600       attrs.size_known_p = defattrs->size_known_p;
1601       attrs.size = defattrs->size;
1602       /* ??? Is this really necessary?  We probably should always get
1603 	 the size from the type below.  */
1604 
1605       /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1606          if T is an object, always compute the object alignment below.  */
1607       if (TYPE_P (t))
1608 	attrs.align = defattrs->align;
1609       else
1610 	attrs.align = BITS_PER_UNIT;
1611       /* ??? If T is a type, respecting mode alignment may *also* be wrong
1612 	 e.g. if the type carries an alignment attribute.  Should we be
1613 	 able to simply always use TYPE_ALIGN?  */
1614     }
1615 
1616   /* We can set the alignment from the type if we are making an object,
1617      this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1618   if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1619     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1620 
1621   else if (TREE_CODE (t) == MEM_REF)
1622     {
1623       tree op0 = TREE_OPERAND (t, 0);
1624       if (TREE_CODE (op0) == ADDR_EXPR
1625 	  && (DECL_P (TREE_OPERAND (op0, 0))
1626 	      || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1627 	{
1628 	  if (DECL_P (TREE_OPERAND (op0, 0)))
1629 	    attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1630 	  else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1631 	    {
1632 	      attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1633 #ifdef CONSTANT_ALIGNMENT
1634 	      attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1635 						attrs.align);
1636 #endif
1637 	    }
1638 	  if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1639 	    {
1640 	      unsigned HOST_WIDE_INT ioff
1641 		= TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1642 	      unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1643 	      attrs.align = MIN (aoff, attrs.align);
1644 	    }
1645 	}
1646       else
1647 	/* ??? This isn't fully correct, we can't set the alignment from the
1648 	   type in all cases.  */
1649 	attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1650     }
1651 
1652   else if (TREE_CODE (t) == TARGET_MEM_REF)
1653     /* ??? This isn't fully correct, we can't set the alignment from the
1654        type in all cases.  */
1655     attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1656 
1657   /* If the size is known, we can set that.  */
1658   if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1659     {
1660       attrs.size_known_p = true;
1661       attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1662     }
1663 
1664   /* If T is not a type, we may be able to deduce some more information about
1665      the expression.  */
1666   if (! TYPE_P (t))
1667     {
1668       tree base;
1669       bool align_computed = false;
1670 
1671       if (TREE_THIS_VOLATILE (t))
1672 	MEM_VOLATILE_P (ref) = 1;
1673 
1674       /* Now remove any conversions: they don't change what the underlying
1675 	 object is.  Likewise for SAVE_EXPR.  */
1676       while (CONVERT_EXPR_P (t)
1677 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1678 	     || TREE_CODE (t) == SAVE_EXPR)
1679 	t = TREE_OPERAND (t, 0);
1680 
1681       /* Note whether this expression can trap.  */
1682       MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1683 
1684       base = get_base_address (t);
1685       if (base)
1686 	{
1687 	  if (DECL_P (base)
1688 	      && TREE_READONLY (base)
1689 	      && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1690 	      && !TREE_THIS_VOLATILE (base))
1691 	    MEM_READONLY_P (ref) = 1;
1692 
1693 	  /* Mark static const strings readonly as well.  */
1694 	  if (TREE_CODE (base) == STRING_CST
1695 	      && TREE_READONLY (base)
1696 	      && TREE_STATIC (base))
1697 	    MEM_READONLY_P (ref) = 1;
1698 
1699 	  if (TREE_CODE (base) == MEM_REF
1700 	      || TREE_CODE (base) == TARGET_MEM_REF)
1701 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1702 								      0))));
1703 	  else
1704 	    as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1705 	}
1706       else
1707 	as = TYPE_ADDR_SPACE (type);
1708 
1709       /* If this expression uses it's parent's alias set, mark it such
1710 	 that we won't change it.  */
1711       if (component_uses_parent_alias_set (t))
1712 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1713 
1714       /* If this is a decl, set the attributes of the MEM from it.  */
1715       if (DECL_P (t))
1716 	{
1717 	  attrs.expr = t;
1718 	  attrs.offset_known_p = true;
1719 	  attrs.offset = 0;
1720 	  apply_bitpos = bitpos;
1721 	  if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1722 	    {
1723 	      attrs.size_known_p = true;
1724 	      attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1725 	    }
1726 	  else
1727 	    attrs.size_known_p = false;
1728 	  attrs.align = DECL_ALIGN (t);
1729 	  align_computed = true;
1730 	}
1731 
1732       /* If this is a constant, we know the alignment.  */
1733       else if (CONSTANT_CLASS_P (t))
1734 	{
1735 	  attrs.align = TYPE_ALIGN (type);
1736 #ifdef CONSTANT_ALIGNMENT
1737 	  attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1738 #endif
1739 	  align_computed = true;
1740 	}
1741 
1742       /* If this is a field reference and not a bit-field, record it.  */
1743       /* ??? There is some information that can be gleaned from bit-fields,
1744 	 such as the word offset in the structure that might be modified.
1745 	 But skip it for now.  */
1746       else if (TREE_CODE (t) == COMPONENT_REF
1747 	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1748 	{
1749 	  attrs.expr = t;
1750 	  attrs.offset_known_p = true;
1751 	  attrs.offset = 0;
1752 	  apply_bitpos = bitpos;
1753 	  /* ??? Any reason the field size would be different than
1754 	     the size we got from the type?  */
1755 	}
1756 
1757       /* If this is an array reference, look for an outer field reference.  */
1758       else if (TREE_CODE (t) == ARRAY_REF)
1759 	{
1760 	  tree off_tree = size_zero_node;
1761 	  /* We can't modify t, because we use it at the end of the
1762 	     function.  */
1763 	  tree t2 = t;
1764 
1765 	  do
1766 	    {
1767 	      tree index = TREE_OPERAND (t2, 1);
1768 	      tree low_bound = array_ref_low_bound (t2);
1769 	      tree unit_size = array_ref_element_size (t2);
1770 
1771 	      /* We assume all arrays have sizes that are a multiple of a byte.
1772 		 First subtract the lower bound, if any, in the type of the
1773 		 index, then convert to sizetype and multiply by the size of
1774 		 the array element.  */
1775 	      if (! integer_zerop (low_bound))
1776 		index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1777 				     index, low_bound);
1778 
1779 	      off_tree = size_binop (PLUS_EXPR,
1780 				     size_binop (MULT_EXPR,
1781 						 fold_convert (sizetype,
1782 							       index),
1783 						 unit_size),
1784 				     off_tree);
1785 	      t2 = TREE_OPERAND (t2, 0);
1786 	    }
1787 	  while (TREE_CODE (t2) == ARRAY_REF);
1788 
1789 	  if (DECL_P (t2))
1790 	    {
1791 	      attrs.expr = t2;
1792 	      attrs.offset_known_p = false;
1793 	      if (host_integerp (off_tree, 1))
1794 		{
1795 		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1796 		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1797 		  attrs.align = DECL_ALIGN (t2);
1798 		  if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1799 	            attrs.align = aoff;
1800 		  align_computed = true;
1801 		  attrs.offset_known_p = true;
1802 		  attrs.offset = ioff;
1803 		  apply_bitpos = bitpos;
1804 		}
1805 	    }
1806 	  else if (TREE_CODE (t2) == COMPONENT_REF)
1807 	    {
1808 	      attrs.expr = t2;
1809 	      attrs.offset_known_p = false;
1810 	      if (host_integerp (off_tree, 1))
1811 		{
1812 		  attrs.offset_known_p = true;
1813 		  attrs.offset = tree_low_cst (off_tree, 1);
1814 		  apply_bitpos = bitpos;
1815 		}
1816 	      /* ??? Any reason the field size would be different than
1817 		 the size we got from the type?  */
1818 	    }
1819 
1820 	  /* If this is an indirect reference, record it.  */
1821 	  else if (TREE_CODE (t) == MEM_REF)
1822 	    {
1823 	      attrs.expr = t;
1824 	      attrs.offset_known_p = true;
1825 	      attrs.offset = 0;
1826 	      apply_bitpos = bitpos;
1827 	    }
1828 	}
1829 
1830       /* If this is an indirect reference, record it.  */
1831       else if (TREE_CODE (t) == MEM_REF
1832 	       || TREE_CODE (t) == TARGET_MEM_REF)
1833 	{
1834 	  attrs.expr = t;
1835 	  attrs.offset_known_p = true;
1836 	  attrs.offset = 0;
1837 	  apply_bitpos = bitpos;
1838 	}
1839 
1840       if (!align_computed)
1841 	{
1842 	  unsigned int obj_align;
1843 	  unsigned HOST_WIDE_INT obj_bitpos;
1844 	  obj_align = get_object_alignment_1 (t, &obj_bitpos);
1845 	  obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1846 	  if (obj_bitpos != 0)
1847 	    obj_align = (obj_bitpos & -obj_bitpos);
1848 	  attrs.align = MAX (attrs.align, obj_align);
1849 	}
1850     }
1851   else
1852     as = TYPE_ADDR_SPACE (type);
1853 
1854   /* If we modified OFFSET based on T, then subtract the outstanding
1855      bit position offset.  Similarly, increase the size of the accessed
1856      object to contain the negative offset.  */
1857   if (apply_bitpos)
1858     {
1859       gcc_assert (attrs.offset_known_p);
1860       attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1861       if (attrs.size_known_p)
1862 	attrs.size += apply_bitpos / BITS_PER_UNIT;
1863     }
1864 
1865   /* Now set the attributes we computed above.  */
1866   attrs.addrspace = as;
1867   set_mem_attrs (ref, &attrs);
1868 }
1869 
1870 void
1871 set_mem_attributes (rtx ref, tree t, int objectp)
1872 {
1873   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1874 }
1875 
1876 /* Set the alias set of MEM to SET.  */
1877 
1878 void
1879 set_mem_alias_set (rtx mem, alias_set_type set)
1880 {
1881   struct mem_attrs attrs;
1882 
1883   /* If the new and old alias sets don't conflict, something is wrong.  */
1884   gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1885   attrs = *get_mem_attrs (mem);
1886   attrs.alias = set;
1887   set_mem_attrs (mem, &attrs);
1888 }
1889 
1890 /* Set the address space of MEM to ADDRSPACE (target-defined).  */
1891 
1892 void
1893 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1894 {
1895   struct mem_attrs attrs;
1896 
1897   attrs = *get_mem_attrs (mem);
1898   attrs.addrspace = addrspace;
1899   set_mem_attrs (mem, &attrs);
1900 }
1901 
1902 /* Set the alignment of MEM to ALIGN bits.  */
1903 
1904 void
1905 set_mem_align (rtx mem, unsigned int align)
1906 {
1907   struct mem_attrs attrs;
1908 
1909   attrs = *get_mem_attrs (mem);
1910   attrs.align = align;
1911   set_mem_attrs (mem, &attrs);
1912 }
1913 
1914 /* Set the expr for MEM to EXPR.  */
1915 
1916 void
1917 set_mem_expr (rtx mem, tree expr)
1918 {
1919   struct mem_attrs attrs;
1920 
1921   attrs = *get_mem_attrs (mem);
1922   attrs.expr = expr;
1923   set_mem_attrs (mem, &attrs);
1924 }
1925 
1926 /* Set the offset of MEM to OFFSET.  */
1927 
1928 void
1929 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1930 {
1931   struct mem_attrs attrs;
1932 
1933   attrs = *get_mem_attrs (mem);
1934   attrs.offset_known_p = true;
1935   attrs.offset = offset;
1936   set_mem_attrs (mem, &attrs);
1937 }
1938 
1939 /* Clear the offset of MEM.  */
1940 
1941 void
1942 clear_mem_offset (rtx mem)
1943 {
1944   struct mem_attrs attrs;
1945 
1946   attrs = *get_mem_attrs (mem);
1947   attrs.offset_known_p = false;
1948   set_mem_attrs (mem, &attrs);
1949 }
1950 
1951 /* Set the size of MEM to SIZE.  */
1952 
1953 void
1954 set_mem_size (rtx mem, HOST_WIDE_INT size)
1955 {
1956   struct mem_attrs attrs;
1957 
1958   attrs = *get_mem_attrs (mem);
1959   attrs.size_known_p = true;
1960   attrs.size = size;
1961   set_mem_attrs (mem, &attrs);
1962 }
1963 
1964 /* Clear the size of MEM.  */
1965 
1966 void
1967 clear_mem_size (rtx mem)
1968 {
1969   struct mem_attrs attrs;
1970 
1971   attrs = *get_mem_attrs (mem);
1972   attrs.size_known_p = false;
1973   set_mem_attrs (mem, &attrs);
1974 }
1975 
1976 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1977    and its address changed to ADDR.  (VOIDmode means don't change the mode.
1978    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1979    returned memory location is required to be valid.  The memory
1980    attributes are not changed.  */
1981 
1982 static rtx
1983 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1984 {
1985   addr_space_t as;
1986   rtx new_rtx;
1987 
1988   gcc_assert (MEM_P (memref));
1989   as = MEM_ADDR_SPACE (memref);
1990   if (mode == VOIDmode)
1991     mode = GET_MODE (memref);
1992   if (addr == 0)
1993     addr = XEXP (memref, 0);
1994   if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1995       && (!validate || memory_address_addr_space_p (mode, addr, as)))
1996     return memref;
1997 
1998   if (validate)
1999     {
2000       if (reload_in_progress || reload_completed)
2001 	gcc_assert (memory_address_addr_space_p (mode, addr, as));
2002       else
2003 	addr = memory_address_addr_space (mode, addr, as);
2004     }
2005 
2006   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2007     return memref;
2008 
2009   new_rtx = gen_rtx_MEM (mode, addr);
2010   MEM_COPY_ATTRIBUTES (new_rtx, memref);
2011   return new_rtx;
2012 }
2013 
2014 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2015    way we are changing MEMREF, so we only preserve the alias set.  */
2016 
2017 rtx
2018 change_address (rtx memref, enum machine_mode mode, rtx addr)
2019 {
2020   rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2021   enum machine_mode mmode = GET_MODE (new_rtx);
2022   struct mem_attrs attrs, *defattrs;
2023 
2024   attrs = *get_mem_attrs (memref);
2025   defattrs = mode_mem_attrs[(int) mmode];
2026   attrs.expr = NULL_TREE;
2027   attrs.offset_known_p = false;
2028   attrs.size_known_p = defattrs->size_known_p;
2029   attrs.size = defattrs->size;
2030   attrs.align = defattrs->align;
2031 
2032   /* If there are no changes, just return the original memory reference.  */
2033   if (new_rtx == memref)
2034     {
2035       if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2036 	return new_rtx;
2037 
2038       new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2039       MEM_COPY_ATTRIBUTES (new_rtx, memref);
2040     }
2041 
2042   set_mem_attrs (new_rtx, &attrs);
2043   return new_rtx;
2044 }
2045 
2046 /* Return a memory reference like MEMREF, but with its mode changed
2047    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2048    nonzero, the memory address is forced to be valid.
2049    If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2050    and caller is responsible for adjusting MEMREF base register.  */
2051 
2052 rtx
2053 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2054 		  int validate, int adjust)
2055 {
2056   rtx addr = XEXP (memref, 0);
2057   rtx new_rtx;
2058   enum machine_mode address_mode;
2059   int pbits;
2060   struct mem_attrs attrs, *defattrs;
2061   unsigned HOST_WIDE_INT max_align;
2062 
2063   attrs = *get_mem_attrs (memref);
2064 
2065   /* If there are no changes, just return the original memory reference.  */
2066   if (mode == GET_MODE (memref) && !offset
2067       && (!validate || memory_address_addr_space_p (mode, addr,
2068 						    attrs.addrspace)))
2069     return memref;
2070 
2071   /* ??? Prefer to create garbage instead of creating shared rtl.
2072      This may happen even if offset is nonzero -- consider
2073      (plus (plus reg reg) const_int) -- so do this always.  */
2074   addr = copy_rtx (addr);
2075 
2076   /* Convert a possibly large offset to a signed value within the
2077      range of the target address space.  */
2078   address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2079   pbits = GET_MODE_BITSIZE (address_mode);
2080   if (HOST_BITS_PER_WIDE_INT > pbits)
2081     {
2082       int shift = HOST_BITS_PER_WIDE_INT - pbits;
2083       offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2084 		>> shift);
2085     }
2086 
2087   if (adjust)
2088     {
2089       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2090 	 object, we can merge it into the LO_SUM.  */
2091       if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2092 	  && offset >= 0
2093 	  && (unsigned HOST_WIDE_INT) offset
2094 	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2095 	addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2096 			       plus_constant (XEXP (addr, 1), offset));
2097       else
2098 	addr = plus_constant (addr, offset);
2099     }
2100 
2101   new_rtx = change_address_1 (memref, mode, addr, validate);
2102 
2103   /* If the address is a REG, change_address_1 rightfully returns memref,
2104      but this would destroy memref's MEM_ATTRS.  */
2105   if (new_rtx == memref && offset != 0)
2106     new_rtx = copy_rtx (new_rtx);
2107 
2108   /* Compute the new values of the memory attributes due to this adjustment.
2109      We add the offsets and update the alignment.  */
2110   if (attrs.offset_known_p)
2111     attrs.offset += offset;
2112 
2113   /* Compute the new alignment by taking the MIN of the alignment and the
2114      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2115      if zero.  */
2116   if (offset != 0)
2117     {
2118       max_align = (offset & -offset) * BITS_PER_UNIT;
2119       attrs.align = MIN (attrs.align, max_align);
2120     }
2121 
2122   /* We can compute the size in a number of ways.  */
2123   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2124   if (defattrs->size_known_p)
2125     {
2126       attrs.size_known_p = true;
2127       attrs.size = defattrs->size;
2128     }
2129   else if (attrs.size_known_p)
2130     attrs.size -= offset;
2131 
2132   set_mem_attrs (new_rtx, &attrs);
2133 
2134   /* At some point, we should validate that this offset is within the object,
2135      if all the appropriate values are known.  */
2136   return new_rtx;
2137 }
2138 
2139 /* Return a memory reference like MEMREF, but with its mode changed
2140    to MODE and its address changed to ADDR, which is assumed to be
2141    MEMREF offset by OFFSET bytes.  If VALIDATE is
2142    nonzero, the memory address is forced to be valid.  */
2143 
2144 rtx
2145 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2146 			     HOST_WIDE_INT offset, int validate)
2147 {
2148   memref = change_address_1 (memref, VOIDmode, addr, validate);
2149   return adjust_address_1 (memref, mode, offset, validate, 0);
2150 }
2151 
2152 /* Return a memory reference like MEMREF, but whose address is changed by
2153    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2154    known to be in OFFSET (possibly 1).  */
2155 
2156 rtx
2157 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2158 {
2159   rtx new_rtx, addr = XEXP (memref, 0);
2160   enum machine_mode address_mode;
2161   struct mem_attrs attrs, *defattrs;
2162 
2163   attrs = *get_mem_attrs (memref);
2164   address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2165   new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2166 
2167   /* At this point we don't know _why_ the address is invalid.  It
2168      could have secondary memory references, multiplies or anything.
2169 
2170      However, if we did go and rearrange things, we can wind up not
2171      being able to recognize the magic around pic_offset_table_rtx.
2172      This stuff is fragile, and is yet another example of why it is
2173      bad to expose PIC machinery too early.  */
2174   if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2175 				     attrs.addrspace)
2176       && GET_CODE (addr) == PLUS
2177       && XEXP (addr, 0) == pic_offset_table_rtx)
2178     {
2179       addr = force_reg (GET_MODE (addr), addr);
2180       new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2181     }
2182 
2183   update_temp_slot_address (XEXP (memref, 0), new_rtx);
2184   new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2185 
2186   /* If there are no changes, just return the original memory reference.  */
2187   if (new_rtx == memref)
2188     return new_rtx;
2189 
2190   /* Update the alignment to reflect the offset.  Reset the offset, which
2191      we don't know.  */
2192   defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2193   attrs.offset_known_p = false;
2194   attrs.size_known_p = defattrs->size_known_p;
2195   attrs.size = defattrs->size;
2196   attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2197   set_mem_attrs (new_rtx, &attrs);
2198   return new_rtx;
2199 }
2200 
2201 /* Return a memory reference like MEMREF, but with its address changed to
2202    ADDR.  The caller is asserting that the actual piece of memory pointed
2203    to is the same, just the form of the address is being changed, such as
2204    by putting something into a register.  */
2205 
2206 rtx
2207 replace_equiv_address (rtx memref, rtx addr)
2208 {
2209   /* change_address_1 copies the memory attribute structure without change
2210      and that's exactly what we want here.  */
2211   update_temp_slot_address (XEXP (memref, 0), addr);
2212   return change_address_1 (memref, VOIDmode, addr, 1);
2213 }
2214 
2215 /* Likewise, but the reference is not required to be valid.  */
2216 
2217 rtx
2218 replace_equiv_address_nv (rtx memref, rtx addr)
2219 {
2220   return change_address_1 (memref, VOIDmode, addr, 0);
2221 }
2222 
2223 /* Return a memory reference like MEMREF, but with its mode widened to
2224    MODE and offset by OFFSET.  This would be used by targets that e.g.
2225    cannot issue QImode memory operations and have to use SImode memory
2226    operations plus masking logic.  */
2227 
2228 rtx
2229 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2230 {
2231   rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2232   struct mem_attrs attrs;
2233   unsigned int size = GET_MODE_SIZE (mode);
2234 
2235   /* If there are no changes, just return the original memory reference.  */
2236   if (new_rtx == memref)
2237     return new_rtx;
2238 
2239   attrs = *get_mem_attrs (new_rtx);
2240 
2241   /* If we don't know what offset we were at within the expression, then
2242      we can't know if we've overstepped the bounds.  */
2243   if (! attrs.offset_known_p)
2244     attrs.expr = NULL_TREE;
2245 
2246   while (attrs.expr)
2247     {
2248       if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2249 	{
2250 	  tree field = TREE_OPERAND (attrs.expr, 1);
2251 	  tree offset = component_ref_field_offset (attrs.expr);
2252 
2253 	  if (! DECL_SIZE_UNIT (field))
2254 	    {
2255 	      attrs.expr = NULL_TREE;
2256 	      break;
2257 	    }
2258 
2259 	  /* Is the field at least as large as the access?  If so, ok,
2260 	     otherwise strip back to the containing structure.  */
2261 	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2262 	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2263 	      && attrs.offset >= 0)
2264 	    break;
2265 
2266 	  if (! host_integerp (offset, 1))
2267 	    {
2268 	      attrs.expr = NULL_TREE;
2269 	      break;
2270 	    }
2271 
2272 	  attrs.expr = TREE_OPERAND (attrs.expr, 0);
2273 	  attrs.offset += tree_low_cst (offset, 1);
2274 	  attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2275 			   / BITS_PER_UNIT);
2276 	}
2277       /* Similarly for the decl.  */
2278       else if (DECL_P (attrs.expr)
2279 	       && DECL_SIZE_UNIT (attrs.expr)
2280 	       && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2281 	       && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2282 	       && (! attrs.offset_known_p || attrs.offset >= 0))
2283 	break;
2284       else
2285 	{
2286 	  /* The widened memory access overflows the expression, which means
2287 	     that it could alias another expression.  Zap it.  */
2288 	  attrs.expr = NULL_TREE;
2289 	  break;
2290 	}
2291     }
2292 
2293   if (! attrs.expr)
2294     attrs.offset_known_p = false;
2295 
2296   /* The widened memory may alias other stuff, so zap the alias set.  */
2297   /* ??? Maybe use get_alias_set on any remaining expression.  */
2298   attrs.alias = 0;
2299   attrs.size_known_p = true;
2300   attrs.size = size;
2301   set_mem_attrs (new_rtx, &attrs);
2302   return new_rtx;
2303 }
2304 
2305 /* A fake decl that is used as the MEM_EXPR of spill slots.  */
2306 static GTY(()) tree spill_slot_decl;
2307 
2308 tree
2309 get_spill_slot_decl (bool force_build_p)
2310 {
2311   tree d = spill_slot_decl;
2312   rtx rd;
2313   struct mem_attrs attrs;
2314 
2315   if (d || !force_build_p)
2316     return d;
2317 
2318   d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2319 		  VAR_DECL, get_identifier ("%sfp"), void_type_node);
2320   DECL_ARTIFICIAL (d) = 1;
2321   DECL_IGNORED_P (d) = 1;
2322   TREE_USED (d) = 1;
2323   spill_slot_decl = d;
2324 
2325   rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2326   MEM_NOTRAP_P (rd) = 1;
2327   attrs = *mode_mem_attrs[(int) BLKmode];
2328   attrs.alias = new_alias_set ();
2329   attrs.expr = d;
2330   set_mem_attrs (rd, &attrs);
2331   SET_DECL_RTL (d, rd);
2332 
2333   return d;
2334 }
2335 
2336 /* Given MEM, a result from assign_stack_local, fill in the memory
2337    attributes as appropriate for a register allocator spill slot.
2338    These slots are not aliasable by other memory.  We arrange for
2339    them all to use a single MEM_EXPR, so that the aliasing code can
2340    work properly in the case of shared spill slots.  */
2341 
2342 void
2343 set_mem_attrs_for_spill (rtx mem)
2344 {
2345   struct mem_attrs attrs;
2346   rtx addr;
2347 
2348   attrs = *get_mem_attrs (mem);
2349   attrs.expr = get_spill_slot_decl (true);
2350   attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2351   attrs.addrspace = ADDR_SPACE_GENERIC;
2352 
2353   /* We expect the incoming memory to be of the form:
2354 	(mem:MODE (plus (reg sfp) (const_int offset)))
2355      with perhaps the plus missing for offset = 0.  */
2356   addr = XEXP (mem, 0);
2357   attrs.offset_known_p = true;
2358   attrs.offset = 0;
2359   if (GET_CODE (addr) == PLUS
2360       && CONST_INT_P (XEXP (addr, 1)))
2361     attrs.offset = INTVAL (XEXP (addr, 1));
2362 
2363   set_mem_attrs (mem, &attrs);
2364   MEM_NOTRAP_P (mem) = 1;
2365 }
2366 
2367 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2368 
2369 rtx
2370 gen_label_rtx (void)
2371 {
2372   return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2373 			     NULL, label_num++, NULL);
2374 }
2375 
2376 /* For procedure integration.  */
2377 
2378 /* Install new pointers to the first and last insns in the chain.
2379    Also, set cur_insn_uid to one higher than the last in use.
2380    Used for an inline-procedure after copying the insn chain.  */
2381 
2382 void
2383 set_new_first_and_last_insn (rtx first, rtx last)
2384 {
2385   rtx insn;
2386 
2387   set_first_insn (first);
2388   set_last_insn (last);
2389   cur_insn_uid = 0;
2390 
2391   if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2392     {
2393       int debug_count = 0;
2394 
2395       cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2396       cur_debug_insn_uid = 0;
2397 
2398       for (insn = first; insn; insn = NEXT_INSN (insn))
2399 	if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2400 	  cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2401 	else
2402 	  {
2403 	    cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2404 	    if (DEBUG_INSN_P (insn))
2405 	      debug_count++;
2406 	  }
2407 
2408       if (debug_count)
2409 	cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2410       else
2411 	cur_debug_insn_uid++;
2412     }
2413   else
2414     for (insn = first; insn; insn = NEXT_INSN (insn))
2415       cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2416 
2417   cur_insn_uid++;
2418 }
2419 
2420 /* Go through all the RTL insn bodies and copy any invalid shared
2421    structure.  This routine should only be called once.  */
2422 
2423 static void
2424 unshare_all_rtl_1 (rtx insn)
2425 {
2426   /* Unshare just about everything else.  */
2427   unshare_all_rtl_in_chain (insn);
2428 
2429   /* Make sure the addresses of stack slots found outside the insn chain
2430      (such as, in DECL_RTL of a variable) are not shared
2431      with the insn chain.
2432 
2433      This special care is necessary when the stack slot MEM does not
2434      actually appear in the insn chain.  If it does appear, its address
2435      is unshared from all else at that point.  */
2436   stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2437 }
2438 
2439 /* Go through all the RTL insn bodies and copy any invalid shared
2440    structure, again.  This is a fairly expensive thing to do so it
2441    should be done sparingly.  */
2442 
2443 void
2444 unshare_all_rtl_again (rtx insn)
2445 {
2446   rtx p;
2447   tree decl;
2448 
2449   for (p = insn; p; p = NEXT_INSN (p))
2450     if (INSN_P (p))
2451       {
2452 	reset_used_flags (PATTERN (p));
2453 	reset_used_flags (REG_NOTES (p));
2454 	if (CALL_P (p))
2455 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2456       }
2457 
2458   /* Make sure that virtual stack slots are not shared.  */
2459   set_used_decls (DECL_INITIAL (cfun->decl));
2460 
2461   /* Make sure that virtual parameters are not shared.  */
2462   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2463     set_used_flags (DECL_RTL (decl));
2464 
2465   reset_used_flags (stack_slot_list);
2466 
2467   unshare_all_rtl_1 (insn);
2468 }
2469 
2470 unsigned int
2471 unshare_all_rtl (void)
2472 {
2473   unshare_all_rtl_1 (get_insns ());
2474   return 0;
2475 }
2476 
2477 struct rtl_opt_pass pass_unshare_all_rtl =
2478 {
2479  {
2480   RTL_PASS,
2481   "unshare",                            /* name */
2482   NULL,                                 /* gate */
2483   unshare_all_rtl,                      /* execute */
2484   NULL,                                 /* sub */
2485   NULL,                                 /* next */
2486   0,                                    /* static_pass_number */
2487   TV_NONE,                              /* tv_id */
2488   0,                                    /* properties_required */
2489   0,                                    /* properties_provided */
2490   0,                                    /* properties_destroyed */
2491   0,                                    /* todo_flags_start */
2492   TODO_verify_rtl_sharing               /* todo_flags_finish */
2493  }
2494 };
2495 
2496 
2497 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2498    Recursively does the same for subexpressions.  */
2499 
2500 static void
2501 verify_rtx_sharing (rtx orig, rtx insn)
2502 {
2503   rtx x = orig;
2504   int i;
2505   enum rtx_code code;
2506   const char *format_ptr;
2507 
2508   if (x == 0)
2509     return;
2510 
2511   code = GET_CODE (x);
2512 
2513   /* These types may be freely shared.  */
2514 
2515   switch (code)
2516     {
2517     case REG:
2518     case DEBUG_EXPR:
2519     case VALUE:
2520     case CONST_INT:
2521     case CONST_DOUBLE:
2522     case CONST_FIXED:
2523     case CONST_VECTOR:
2524     case SYMBOL_REF:
2525     case LABEL_REF:
2526     case CODE_LABEL:
2527     case PC:
2528     case CC0:
2529     case RETURN:
2530     case SIMPLE_RETURN:
2531     case SCRATCH:
2532       return;
2533       /* SCRATCH must be shared because they represent distinct values.  */
2534     case CLOBBER:
2535       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2536 	return;
2537       break;
2538 
2539     case CONST:
2540       if (shared_const_p (orig))
2541 	return;
2542       break;
2543 
2544     case MEM:
2545       /* A MEM is allowed to be shared if its address is constant.  */
2546       if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2547 	  || reload_completed || reload_in_progress)
2548 	return;
2549 
2550       break;
2551 
2552     default:
2553       break;
2554     }
2555 
2556   /* This rtx may not be shared.  If it has already been seen,
2557      replace it with a copy of itself.  */
2558 #ifdef ENABLE_CHECKING
2559   if (RTX_FLAG (x, used))
2560     {
2561       error ("invalid rtl sharing found in the insn");
2562       debug_rtx (insn);
2563       error ("shared rtx");
2564       debug_rtx (x);
2565       internal_error ("internal consistency failure");
2566     }
2567 #endif
2568   gcc_assert (!RTX_FLAG (x, used));
2569 
2570   RTX_FLAG (x, used) = 1;
2571 
2572   /* Now scan the subexpressions recursively.  */
2573 
2574   format_ptr = GET_RTX_FORMAT (code);
2575 
2576   for (i = 0; i < GET_RTX_LENGTH (code); i++)
2577     {
2578       switch (*format_ptr++)
2579 	{
2580 	case 'e':
2581 	  verify_rtx_sharing (XEXP (x, i), insn);
2582 	  break;
2583 
2584 	case 'E':
2585 	  if (XVEC (x, i) != NULL)
2586 	    {
2587 	      int j;
2588 	      int len = XVECLEN (x, i);
2589 
2590 	      for (j = 0; j < len; j++)
2591 		{
2592 		  /* We allow sharing of ASM_OPERANDS inside single
2593 		     instruction.  */
2594 		  if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2595 		      && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2596 			  == ASM_OPERANDS))
2597 		    verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2598 		  else
2599 		    verify_rtx_sharing (XVECEXP (x, i, j), insn);
2600 		}
2601 	    }
2602 	  break;
2603 	}
2604     }
2605   return;
2606 }
2607 
2608 /* Go through all the RTL insn bodies and check that there is no unexpected
2609    sharing in between the subexpressions.  */
2610 
2611 DEBUG_FUNCTION void
2612 verify_rtl_sharing (void)
2613 {
2614   rtx p;
2615 
2616   timevar_push (TV_VERIFY_RTL_SHARING);
2617 
2618   for (p = get_insns (); p; p = NEXT_INSN (p))
2619     if (INSN_P (p))
2620       {
2621 	reset_used_flags (PATTERN (p));
2622 	reset_used_flags (REG_NOTES (p));
2623 	if (CALL_P (p))
2624 	  reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2625 	if (GET_CODE (PATTERN (p)) == SEQUENCE)
2626 	  {
2627 	    int i;
2628 	    rtx q, sequence = PATTERN (p);
2629 
2630 	    for (i = 0; i < XVECLEN (sequence, 0); i++)
2631 	      {
2632 		q = XVECEXP (sequence, 0, i);
2633 		gcc_assert (INSN_P (q));
2634 		reset_used_flags (PATTERN (q));
2635 		reset_used_flags (REG_NOTES (q));
2636 		if (CALL_P (q))
2637 		  reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2638 	      }
2639 	  }
2640       }
2641 
2642   for (p = get_insns (); p; p = NEXT_INSN (p))
2643     if (INSN_P (p))
2644       {
2645 	verify_rtx_sharing (PATTERN (p), p);
2646 	verify_rtx_sharing (REG_NOTES (p), p);
2647 	if (CALL_P (p))
2648 	  verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2649       }
2650 
2651   timevar_pop (TV_VERIFY_RTL_SHARING);
2652 }
2653 
2654 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2655    Assumes the mark bits are cleared at entry.  */
2656 
2657 void
2658 unshare_all_rtl_in_chain (rtx insn)
2659 {
2660   for (; insn; insn = NEXT_INSN (insn))
2661     if (INSN_P (insn))
2662       {
2663 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2664 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2665 	if (CALL_P (insn))
2666 	  CALL_INSN_FUNCTION_USAGE (insn)
2667 	    = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2668       }
2669 }
2670 
2671 /* Go through all virtual stack slots of a function and mark them as
2672    shared.  We never replace the DECL_RTLs themselves with a copy,
2673    but expressions mentioned into a DECL_RTL cannot be shared with
2674    expressions in the instruction stream.
2675 
2676    Note that reload may convert pseudo registers into memories in-place.
2677    Pseudo registers are always shared, but MEMs never are.  Thus if we
2678    reset the used flags on MEMs in the instruction stream, we must set
2679    them again on MEMs that appear in DECL_RTLs.  */
2680 
2681 static void
2682 set_used_decls (tree blk)
2683 {
2684   tree t;
2685 
2686   /* Mark decls.  */
2687   for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2688     if (DECL_RTL_SET_P (t))
2689       set_used_flags (DECL_RTL (t));
2690 
2691   /* Now process sub-blocks.  */
2692   for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2693     set_used_decls (t);
2694 }
2695 
2696 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2697    Recursively does the same for subexpressions.  Uses
2698    copy_rtx_if_shared_1 to reduce stack space.  */
2699 
2700 rtx
2701 copy_rtx_if_shared (rtx orig)
2702 {
2703   copy_rtx_if_shared_1 (&orig);
2704   return orig;
2705 }
2706 
2707 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2708    use.  Recursively does the same for subexpressions.  */
2709 
2710 static void
2711 copy_rtx_if_shared_1 (rtx *orig1)
2712 {
2713   rtx x;
2714   int i;
2715   enum rtx_code code;
2716   rtx *last_ptr;
2717   const char *format_ptr;
2718   int copied = 0;
2719   int length;
2720 
2721   /* Repeat is used to turn tail-recursion into iteration.  */
2722 repeat:
2723   x = *orig1;
2724 
2725   if (x == 0)
2726     return;
2727 
2728   code = GET_CODE (x);
2729 
2730   /* These types may be freely shared.  */
2731 
2732   switch (code)
2733     {
2734     case REG:
2735     case DEBUG_EXPR:
2736     case VALUE:
2737     case CONST_INT:
2738     case CONST_DOUBLE:
2739     case CONST_FIXED:
2740     case CONST_VECTOR:
2741     case SYMBOL_REF:
2742     case LABEL_REF:
2743     case CODE_LABEL:
2744     case PC:
2745     case CC0:
2746     case RETURN:
2747     case SIMPLE_RETURN:
2748     case SCRATCH:
2749       /* SCRATCH must be shared because they represent distinct values.  */
2750       return;
2751     case CLOBBER:
2752       if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2753 	return;
2754       break;
2755 
2756     case CONST:
2757       if (shared_const_p (x))
2758 	return;
2759       break;
2760 
2761     case DEBUG_INSN:
2762     case INSN:
2763     case JUMP_INSN:
2764     case CALL_INSN:
2765     case NOTE:
2766     case BARRIER:
2767       /* The chain of insns is not being copied.  */
2768       return;
2769 
2770     default:
2771       break;
2772     }
2773 
2774   /* This rtx may not be shared.  If it has already been seen,
2775      replace it with a copy of itself.  */
2776 
2777   if (RTX_FLAG (x, used))
2778     {
2779       x = shallow_copy_rtx (x);
2780       copied = 1;
2781     }
2782   RTX_FLAG (x, used) = 1;
2783 
2784   /* Now scan the subexpressions recursively.
2785      We can store any replaced subexpressions directly into X
2786      since we know X is not shared!  Any vectors in X
2787      must be copied if X was copied.  */
2788 
2789   format_ptr = GET_RTX_FORMAT (code);
2790   length = GET_RTX_LENGTH (code);
2791   last_ptr = NULL;
2792 
2793   for (i = 0; i < length; i++)
2794     {
2795       switch (*format_ptr++)
2796 	{
2797 	case 'e':
2798           if (last_ptr)
2799             copy_rtx_if_shared_1 (last_ptr);
2800 	  last_ptr = &XEXP (x, i);
2801 	  break;
2802 
2803 	case 'E':
2804 	  if (XVEC (x, i) != NULL)
2805 	    {
2806 	      int j;
2807 	      int len = XVECLEN (x, i);
2808 
2809               /* Copy the vector iff I copied the rtx and the length
2810 		 is nonzero.  */
2811 	      if (copied && len > 0)
2812 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2813 
2814               /* Call recursively on all inside the vector.  */
2815 	      for (j = 0; j < len; j++)
2816                 {
2817 		  if (last_ptr)
2818 		    copy_rtx_if_shared_1 (last_ptr);
2819                   last_ptr = &XVECEXP (x, i, j);
2820                 }
2821 	    }
2822 	  break;
2823 	}
2824     }
2825   *orig1 = x;
2826   if (last_ptr)
2827     {
2828       orig1 = last_ptr;
2829       goto repeat;
2830     }
2831   return;
2832 }
2833 
2834 /* Set the USED bit in X and its non-shareable subparts to FLAG.  */
2835 
2836 static void
2837 mark_used_flags (rtx x, int flag)
2838 {
2839   int i, j;
2840   enum rtx_code code;
2841   const char *format_ptr;
2842   int length;
2843 
2844   /* Repeat is used to turn tail-recursion into iteration.  */
2845 repeat:
2846   if (x == 0)
2847     return;
2848 
2849   code = GET_CODE (x);
2850 
2851   /* These types may be freely shared so we needn't do any resetting
2852      for them.  */
2853 
2854   switch (code)
2855     {
2856     case REG:
2857     case DEBUG_EXPR:
2858     case VALUE:
2859     case CONST_INT:
2860     case CONST_DOUBLE:
2861     case CONST_FIXED:
2862     case CONST_VECTOR:
2863     case SYMBOL_REF:
2864     case CODE_LABEL:
2865     case PC:
2866     case CC0:
2867     case RETURN:
2868     case SIMPLE_RETURN:
2869       return;
2870 
2871     case DEBUG_INSN:
2872     case INSN:
2873     case JUMP_INSN:
2874     case CALL_INSN:
2875     case NOTE:
2876     case LABEL_REF:
2877     case BARRIER:
2878       /* The chain of insns is not being copied.  */
2879       return;
2880 
2881     default:
2882       break;
2883     }
2884 
2885   RTX_FLAG (x, used) = flag;
2886 
2887   format_ptr = GET_RTX_FORMAT (code);
2888   length = GET_RTX_LENGTH (code);
2889 
2890   for (i = 0; i < length; i++)
2891     {
2892       switch (*format_ptr++)
2893 	{
2894 	case 'e':
2895           if (i == length-1)
2896             {
2897               x = XEXP (x, i);
2898 	      goto repeat;
2899             }
2900 	  mark_used_flags (XEXP (x, i), flag);
2901 	  break;
2902 
2903 	case 'E':
2904 	  for (j = 0; j < XVECLEN (x, i); j++)
2905 	    mark_used_flags (XVECEXP (x, i, j), flag);
2906 	  break;
2907 	}
2908     }
2909 }
2910 
2911 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2912    to look for shared sub-parts.  */
2913 
2914 void
2915 reset_used_flags (rtx x)
2916 {
2917   mark_used_flags (x, 0);
2918 }
2919 
2920 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2921    to look for shared sub-parts.  */
2922 
2923 void
2924 set_used_flags (rtx x)
2925 {
2926   mark_used_flags (x, 1);
2927 }
2928 
2929 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2930    Return X or the rtx for the pseudo reg the value of X was copied into.
2931    OTHER must be valid as a SET_DEST.  */
2932 
2933 rtx
2934 make_safe_from (rtx x, rtx other)
2935 {
2936   while (1)
2937     switch (GET_CODE (other))
2938       {
2939       case SUBREG:
2940 	other = SUBREG_REG (other);
2941 	break;
2942       case STRICT_LOW_PART:
2943       case SIGN_EXTEND:
2944       case ZERO_EXTEND:
2945 	other = XEXP (other, 0);
2946 	break;
2947       default:
2948 	goto done;
2949       }
2950  done:
2951   if ((MEM_P (other)
2952        && ! CONSTANT_P (x)
2953        && !REG_P (x)
2954        && GET_CODE (x) != SUBREG)
2955       || (REG_P (other)
2956 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2957 	      || reg_mentioned_p (other, x))))
2958     {
2959       rtx temp = gen_reg_rtx (GET_MODE (x));
2960       emit_move_insn (temp, x);
2961       return temp;
2962     }
2963   return x;
2964 }
2965 
2966 /* Emission of insns (adding them to the doubly-linked list).  */
2967 
2968 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
2969 
2970 rtx
2971 get_last_insn_anywhere (void)
2972 {
2973   struct sequence_stack *stack;
2974   if (get_last_insn ())
2975     return get_last_insn ();
2976   for (stack = seq_stack; stack; stack = stack->next)
2977     if (stack->last != 0)
2978       return stack->last;
2979   return 0;
2980 }
2981 
2982 /* Return the first nonnote insn emitted in current sequence or current
2983    function.  This routine looks inside SEQUENCEs.  */
2984 
2985 rtx
2986 get_first_nonnote_insn (void)
2987 {
2988   rtx insn = get_insns ();
2989 
2990   if (insn)
2991     {
2992       if (NOTE_P (insn))
2993 	for (insn = next_insn (insn);
2994 	     insn && NOTE_P (insn);
2995 	     insn = next_insn (insn))
2996 	  continue;
2997       else
2998 	{
2999 	  if (NONJUMP_INSN_P (insn)
3000 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3001 	    insn = XVECEXP (PATTERN (insn), 0, 0);
3002 	}
3003     }
3004 
3005   return insn;
3006 }
3007 
3008 /* Return the last nonnote insn emitted in current sequence or current
3009    function.  This routine looks inside SEQUENCEs.  */
3010 
3011 rtx
3012 get_last_nonnote_insn (void)
3013 {
3014   rtx insn = get_last_insn ();
3015 
3016   if (insn)
3017     {
3018       if (NOTE_P (insn))
3019 	for (insn = previous_insn (insn);
3020 	     insn && NOTE_P (insn);
3021 	     insn = previous_insn (insn))
3022 	  continue;
3023       else
3024 	{
3025 	  if (NONJUMP_INSN_P (insn)
3026 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
3027 	    insn = XVECEXP (PATTERN (insn), 0,
3028 			    XVECLEN (PATTERN (insn), 0) - 1);
3029 	}
3030     }
3031 
3032   return insn;
3033 }
3034 
3035 /* Return the number of actual (non-debug) insns emitted in this
3036    function.  */
3037 
3038 int
3039 get_max_insn_count (void)
3040 {
3041   int n = cur_insn_uid;
3042 
3043   /* The table size must be stable across -g, to avoid codegen
3044      differences due to debug insns, and not be affected by
3045      -fmin-insn-uid, to avoid excessive table size and to simplify
3046      debugging of -fcompare-debug failures.  */
3047   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3048     n -= cur_debug_insn_uid;
3049   else
3050     n -= MIN_NONDEBUG_INSN_UID;
3051 
3052   return n;
3053 }
3054 
3055 
3056 /* Return the next insn.  If it is a SEQUENCE, return the first insn
3057    of the sequence.  */
3058 
3059 rtx
3060 next_insn (rtx insn)
3061 {
3062   if (insn)
3063     {
3064       insn = NEXT_INSN (insn);
3065       if (insn && NONJUMP_INSN_P (insn)
3066 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3067 	insn = XVECEXP (PATTERN (insn), 0, 0);
3068     }
3069 
3070   return insn;
3071 }
3072 
3073 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
3074    of the sequence.  */
3075 
3076 rtx
3077 previous_insn (rtx insn)
3078 {
3079   if (insn)
3080     {
3081       insn = PREV_INSN (insn);
3082       if (insn && NONJUMP_INSN_P (insn)
3083 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3084 	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3085     }
3086 
3087   return insn;
3088 }
3089 
3090 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3091    look inside SEQUENCEs.  */
3092 
3093 rtx
3094 next_nonnote_insn (rtx insn)
3095 {
3096   while (insn)
3097     {
3098       insn = NEXT_INSN (insn);
3099       if (insn == 0 || !NOTE_P (insn))
3100 	break;
3101     }
3102 
3103   return insn;
3104 }
3105 
3106 /* Return the next insn after INSN that is not a NOTE, but stop the
3107    search before we enter another basic block.  This routine does not
3108    look inside SEQUENCEs.  */
3109 
3110 rtx
3111 next_nonnote_insn_bb (rtx insn)
3112 {
3113   while (insn)
3114     {
3115       insn = NEXT_INSN (insn);
3116       if (insn == 0 || !NOTE_P (insn))
3117 	break;
3118       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3119 	return NULL_RTX;
3120     }
3121 
3122   return insn;
3123 }
3124 
3125 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3126    not look inside SEQUENCEs.  */
3127 
3128 rtx
3129 prev_nonnote_insn (rtx insn)
3130 {
3131   while (insn)
3132     {
3133       insn = PREV_INSN (insn);
3134       if (insn == 0 || !NOTE_P (insn))
3135 	break;
3136     }
3137 
3138   return insn;
3139 }
3140 
3141 /* Return the previous insn before INSN that is not a NOTE, but stop
3142    the search before we enter another basic block.  This routine does
3143    not look inside SEQUENCEs.  */
3144 
3145 rtx
3146 prev_nonnote_insn_bb (rtx insn)
3147 {
3148   while (insn)
3149     {
3150       insn = PREV_INSN (insn);
3151       if (insn == 0 || !NOTE_P (insn))
3152 	break;
3153       if (NOTE_INSN_BASIC_BLOCK_P (insn))
3154 	return NULL_RTX;
3155     }
3156 
3157   return insn;
3158 }
3159 
3160 /* Return the next insn after INSN that is not a DEBUG_INSN.  This
3161    routine does not look inside SEQUENCEs.  */
3162 
3163 rtx
3164 next_nondebug_insn (rtx insn)
3165 {
3166   while (insn)
3167     {
3168       insn = NEXT_INSN (insn);
3169       if (insn == 0 || !DEBUG_INSN_P (insn))
3170 	break;
3171     }
3172 
3173   return insn;
3174 }
3175 
3176 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3177    This routine does not look inside SEQUENCEs.  */
3178 
3179 rtx
3180 prev_nondebug_insn (rtx insn)
3181 {
3182   while (insn)
3183     {
3184       insn = PREV_INSN (insn);
3185       if (insn == 0 || !DEBUG_INSN_P (insn))
3186 	break;
3187     }
3188 
3189   return insn;
3190 }
3191 
3192 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3193    This routine does not look inside SEQUENCEs.  */
3194 
3195 rtx
3196 next_nonnote_nondebug_insn (rtx insn)
3197 {
3198   while (insn)
3199     {
3200       insn = NEXT_INSN (insn);
3201       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3202 	break;
3203     }
3204 
3205   return insn;
3206 }
3207 
3208 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3209    This routine does not look inside SEQUENCEs.  */
3210 
3211 rtx
3212 prev_nonnote_nondebug_insn (rtx insn)
3213 {
3214   while (insn)
3215     {
3216       insn = PREV_INSN (insn);
3217       if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3218 	break;
3219     }
3220 
3221   return insn;
3222 }
3223 
3224 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3225    or 0, if there is none.  This routine does not look inside
3226    SEQUENCEs.  */
3227 
3228 rtx
3229 next_real_insn (rtx insn)
3230 {
3231   while (insn)
3232     {
3233       insn = NEXT_INSN (insn);
3234       if (insn == 0 || INSN_P (insn))
3235 	break;
3236     }
3237 
3238   return insn;
3239 }
3240 
3241 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3242    or 0, if there is none.  This routine does not look inside
3243    SEQUENCEs.  */
3244 
3245 rtx
3246 prev_real_insn (rtx insn)
3247 {
3248   while (insn)
3249     {
3250       insn = PREV_INSN (insn);
3251       if (insn == 0 || INSN_P (insn))
3252 	break;
3253     }
3254 
3255   return insn;
3256 }
3257 
3258 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3259    This routine does not look inside SEQUENCEs.  */
3260 
3261 rtx
3262 last_call_insn (void)
3263 {
3264   rtx insn;
3265 
3266   for (insn = get_last_insn ();
3267        insn && !CALL_P (insn);
3268        insn = PREV_INSN (insn))
3269     ;
3270 
3271   return insn;
3272 }
3273 
3274 /* Find the next insn after INSN that really does something.  This routine
3275    does not look inside SEQUENCEs.  After reload this also skips over
3276    standalone USE and CLOBBER insn.  */
3277 
3278 int
3279 active_insn_p (const_rtx insn)
3280 {
3281   return (CALL_P (insn) || JUMP_P (insn)
3282 	  || (NONJUMP_INSN_P (insn)
3283 	      && (! reload_completed
3284 		  || (GET_CODE (PATTERN (insn)) != USE
3285 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3286 }
3287 
3288 rtx
3289 next_active_insn (rtx insn)
3290 {
3291   while (insn)
3292     {
3293       insn = NEXT_INSN (insn);
3294       if (insn == 0 || active_insn_p (insn))
3295 	break;
3296     }
3297 
3298   return insn;
3299 }
3300 
3301 /* Find the last insn before INSN that really does something.  This routine
3302    does not look inside SEQUENCEs.  After reload this also skips over
3303    standalone USE and CLOBBER insn.  */
3304 
3305 rtx
3306 prev_active_insn (rtx insn)
3307 {
3308   while (insn)
3309     {
3310       insn = PREV_INSN (insn);
3311       if (insn == 0 || active_insn_p (insn))
3312 	break;
3313     }
3314 
3315   return insn;
3316 }
3317 
3318 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
3319 
3320 rtx
3321 next_label (rtx insn)
3322 {
3323   while (insn)
3324     {
3325       insn = NEXT_INSN (insn);
3326       if (insn == 0 || LABEL_P (insn))
3327 	break;
3328     }
3329 
3330   return insn;
3331 }
3332 
3333 /* Return the last label to mark the same position as LABEL.  Return LABEL
3334    itself if it is null or any return rtx.  */
3335 
3336 rtx
3337 skip_consecutive_labels (rtx label)
3338 {
3339   rtx insn;
3340 
3341   if (label && ANY_RETURN_P (label))
3342     return label;
3343 
3344   for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3345     if (LABEL_P (insn))
3346       label = insn;
3347 
3348   return label;
3349 }
3350 
3351 #ifdef HAVE_cc0
3352 /* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3353    and REG_CC_USER notes so we can find it.  */
3354 
3355 void
3356 link_cc0_insns (rtx insn)
3357 {
3358   rtx user = next_nonnote_insn (insn);
3359 
3360   if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3361     user = XVECEXP (PATTERN (user), 0, 0);
3362 
3363   add_reg_note (user, REG_CC_SETTER, insn);
3364   add_reg_note (insn, REG_CC_USER, user);
3365 }
3366 
3367 /* Return the next insn that uses CC0 after INSN, which is assumed to
3368    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3369    applied to the result of this function should yield INSN).
3370 
3371    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3372    is present, it contains the insn that uses CC0.
3373 
3374    Return 0 if we can't find the insn.  */
3375 
3376 rtx
3377 next_cc0_user (rtx insn)
3378 {
3379   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3380 
3381   if (note)
3382     return XEXP (note, 0);
3383 
3384   insn = next_nonnote_insn (insn);
3385   if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3386     insn = XVECEXP (PATTERN (insn), 0, 0);
3387 
3388   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3389     return insn;
3390 
3391   return 0;
3392 }
3393 
3394 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3395    note, it is the previous insn.  */
3396 
3397 rtx
3398 prev_cc0_setter (rtx insn)
3399 {
3400   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3401 
3402   if (note)
3403     return XEXP (note, 0);
3404 
3405   insn = prev_nonnote_insn (insn);
3406   gcc_assert (sets_cc0_p (PATTERN (insn)));
3407 
3408   return insn;
3409 }
3410 #endif
3411 
3412 #ifdef AUTO_INC_DEC
3413 /* Find a RTX_AUTOINC class rtx which matches DATA.  */
3414 
3415 static int
3416 find_auto_inc (rtx *xp, void *data)
3417 {
3418   rtx x = *xp;
3419   rtx reg = (rtx) data;
3420 
3421   if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3422     return 0;
3423 
3424   switch (GET_CODE (x))
3425     {
3426       case PRE_DEC:
3427       case PRE_INC:
3428       case POST_DEC:
3429       case POST_INC:
3430       case PRE_MODIFY:
3431       case POST_MODIFY:
3432 	if (rtx_equal_p (reg, XEXP (x, 0)))
3433 	  return 1;
3434 	break;
3435 
3436       default:
3437 	gcc_unreachable ();
3438     }
3439   return -1;
3440 }
3441 #endif
3442 
3443 /* Increment the label uses for all labels present in rtx.  */
3444 
3445 static void
3446 mark_label_nuses (rtx x)
3447 {
3448   enum rtx_code code;
3449   int i, j;
3450   const char *fmt;
3451 
3452   code = GET_CODE (x);
3453   if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3454     LABEL_NUSES (XEXP (x, 0))++;
3455 
3456   fmt = GET_RTX_FORMAT (code);
3457   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3458     {
3459       if (fmt[i] == 'e')
3460 	mark_label_nuses (XEXP (x, i));
3461       else if (fmt[i] == 'E')
3462 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3463 	  mark_label_nuses (XVECEXP (x, i, j));
3464     }
3465 }
3466 
3467 
3468 /* Try splitting insns that can be split for better scheduling.
3469    PAT is the pattern which might split.
3470    TRIAL is the insn providing PAT.
3471    LAST is nonzero if we should return the last insn of the sequence produced.
3472 
3473    If this routine succeeds in splitting, it returns the first or last
3474    replacement insn depending on the value of LAST.  Otherwise, it
3475    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3476 
3477 rtx
3478 try_split (rtx pat, rtx trial, int last)
3479 {
3480   rtx before = PREV_INSN (trial);
3481   rtx after = NEXT_INSN (trial);
3482   int has_barrier = 0;
3483   rtx note, seq, tem;
3484   int probability;
3485   rtx insn_last, insn;
3486   int njumps = 0;
3487 
3488   /* We're not good at redistributing frame information.  */
3489   if (RTX_FRAME_RELATED_P (trial))
3490     return trial;
3491 
3492   if (any_condjump_p (trial)
3493       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3494     split_branch_probability = INTVAL (XEXP (note, 0));
3495   probability = split_branch_probability;
3496 
3497   seq = split_insns (pat, trial);
3498 
3499   split_branch_probability = -1;
3500 
3501   /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3502      We may need to handle this specially.  */
3503   if (after && BARRIER_P (after))
3504     {
3505       has_barrier = 1;
3506       after = NEXT_INSN (after);
3507     }
3508 
3509   if (!seq)
3510     return trial;
3511 
3512   /* Avoid infinite loop if any insn of the result matches
3513      the original pattern.  */
3514   insn_last = seq;
3515   while (1)
3516     {
3517       if (INSN_P (insn_last)
3518 	  && rtx_equal_p (PATTERN (insn_last), pat))
3519 	return trial;
3520       if (!NEXT_INSN (insn_last))
3521 	break;
3522       insn_last = NEXT_INSN (insn_last);
3523     }
3524 
3525   /* We will be adding the new sequence to the function.  The splitters
3526      may have introduced invalid RTL sharing, so unshare the sequence now.  */
3527   unshare_all_rtl_in_chain (seq);
3528 
3529   /* Mark labels.  */
3530   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3531     {
3532       if (JUMP_P (insn))
3533 	{
3534 	  mark_jump_label (PATTERN (insn), insn, 0);
3535 	  njumps++;
3536 	  if (probability != -1
3537 	      && any_condjump_p (insn)
3538 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3539 	    {
3540 	      /* We can preserve the REG_BR_PROB notes only if exactly
3541 		 one jump is created, otherwise the machine description
3542 		 is responsible for this step using
3543 		 split_branch_probability variable.  */
3544 	      gcc_assert (njumps == 1);
3545 	      add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3546 	    }
3547 	}
3548     }
3549 
3550   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3551      in SEQ and copy any additional information across.  */
3552   if (CALL_P (trial))
3553     {
3554       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3555 	if (CALL_P (insn))
3556 	  {
3557 	    rtx next, *p;
3558 
3559 	    /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3560 	       target may have explicitly specified.  */
3561 	    p = &CALL_INSN_FUNCTION_USAGE (insn);
3562 	    while (*p)
3563 	      p = &XEXP (*p, 1);
3564 	    *p = CALL_INSN_FUNCTION_USAGE (trial);
3565 
3566 	    /* If the old call was a sibling call, the new one must
3567 	       be too.  */
3568 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3569 
3570 	    /* If the new call is the last instruction in the sequence,
3571 	       it will effectively replace the old call in-situ.  Otherwise
3572 	       we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3573 	       so that it comes immediately after the new call.  */
3574 	    if (NEXT_INSN (insn))
3575 	      for (next = NEXT_INSN (trial);
3576 		   next && NOTE_P (next);
3577 		   next = NEXT_INSN (next))
3578 		if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3579 		  {
3580 		    remove_insn (next);
3581 		    add_insn_after (next, insn, NULL);
3582 		    break;
3583 		  }
3584 	  }
3585     }
3586 
3587   /* Copy notes, particularly those related to the CFG.  */
3588   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3589     {
3590       switch (REG_NOTE_KIND (note))
3591 	{
3592 	case REG_EH_REGION:
3593 	  copy_reg_eh_region_note_backward (note, insn_last, NULL);
3594 	  break;
3595 
3596 	case REG_NORETURN:
3597 	case REG_SETJMP:
3598 	case REG_TM:
3599 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3600 	    {
3601 	      if (CALL_P (insn))
3602 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3603 	    }
3604 	  break;
3605 
3606 	case REG_NON_LOCAL_GOTO:
3607 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3608 	    {
3609 	      if (JUMP_P (insn))
3610 		add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3611 	    }
3612 	  break;
3613 
3614 #ifdef AUTO_INC_DEC
3615 	case REG_INC:
3616 	  for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3617 	    {
3618 	      rtx reg = XEXP (note, 0);
3619 	      if (!FIND_REG_INC_NOTE (insn, reg)
3620 		  && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3621 		add_reg_note (insn, REG_INC, reg);
3622 	    }
3623 	  break;
3624 #endif
3625 
3626 	case REG_ARGS_SIZE:
3627 	  fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3628 	  break;
3629 
3630 	default:
3631 	  break;
3632 	}
3633     }
3634 
3635   /* If there are LABELS inside the split insns increment the
3636      usage count so we don't delete the label.  */
3637   if (INSN_P (trial))
3638     {
3639       insn = insn_last;
3640       while (insn != NULL_RTX)
3641 	{
3642 	  /* JUMP_P insns have already been "marked" above.  */
3643 	  if (NONJUMP_INSN_P (insn))
3644 	    mark_label_nuses (PATTERN (insn));
3645 
3646 	  insn = PREV_INSN (insn);
3647 	}
3648     }
3649 
3650   tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3651 
3652   delete_insn (trial);
3653   if (has_barrier)
3654     emit_barrier_after (tem);
3655 
3656   /* Recursively call try_split for each new insn created; by the
3657      time control returns here that insn will be fully split, so
3658      set LAST and continue from the insn after the one returned.
3659      We can't use next_active_insn here since AFTER may be a note.
3660      Ignore deleted insns, which can be occur if not optimizing.  */
3661   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3662     if (! INSN_DELETED_P (tem) && INSN_P (tem))
3663       tem = try_split (PATTERN (tem), tem, 1);
3664 
3665   /* Return either the first or the last insn, depending on which was
3666      requested.  */
3667   return last
3668     ? (after ? PREV_INSN (after) : get_last_insn ())
3669     : NEXT_INSN (before);
3670 }
3671 
3672 /* Make and return an INSN rtx, initializing all its slots.
3673    Store PATTERN in the pattern slots.  */
3674 
3675 rtx
3676 make_insn_raw (rtx pattern)
3677 {
3678   rtx insn;
3679 
3680   insn = rtx_alloc (INSN);
3681 
3682   INSN_UID (insn) = cur_insn_uid++;
3683   PATTERN (insn) = pattern;
3684   INSN_CODE (insn) = -1;
3685   REG_NOTES (insn) = NULL;
3686   INSN_LOCATOR (insn) = curr_insn_locator ();
3687   BLOCK_FOR_INSN (insn) = NULL;
3688 
3689 #ifdef ENABLE_RTL_CHECKING
3690   if (insn
3691       && INSN_P (insn)
3692       && (returnjump_p (insn)
3693 	  || (GET_CODE (insn) == SET
3694 	      && SET_DEST (insn) == pc_rtx)))
3695     {
3696       warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3697       debug_rtx (insn);
3698     }
3699 #endif
3700 
3701   return insn;
3702 }
3703 
3704 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn.  */
3705 
3706 rtx
3707 make_debug_insn_raw (rtx pattern)
3708 {
3709   rtx insn;
3710 
3711   insn = rtx_alloc (DEBUG_INSN);
3712   INSN_UID (insn) = cur_debug_insn_uid++;
3713   if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3714     INSN_UID (insn) = cur_insn_uid++;
3715 
3716   PATTERN (insn) = pattern;
3717   INSN_CODE (insn) = -1;
3718   REG_NOTES (insn) = NULL;
3719   INSN_LOCATOR (insn) = curr_insn_locator ();
3720   BLOCK_FOR_INSN (insn) = NULL;
3721 
3722   return insn;
3723 }
3724 
3725 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3726 
3727 rtx
3728 make_jump_insn_raw (rtx pattern)
3729 {
3730   rtx insn;
3731 
3732   insn = rtx_alloc (JUMP_INSN);
3733   INSN_UID (insn) = cur_insn_uid++;
3734 
3735   PATTERN (insn) = pattern;
3736   INSN_CODE (insn) = -1;
3737   REG_NOTES (insn) = NULL;
3738   JUMP_LABEL (insn) = NULL;
3739   INSN_LOCATOR (insn) = curr_insn_locator ();
3740   BLOCK_FOR_INSN (insn) = NULL;
3741 
3742   return insn;
3743 }
3744 
3745 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3746 
3747 static rtx
3748 make_call_insn_raw (rtx pattern)
3749 {
3750   rtx insn;
3751 
3752   insn = rtx_alloc (CALL_INSN);
3753   INSN_UID (insn) = cur_insn_uid++;
3754 
3755   PATTERN (insn) = pattern;
3756   INSN_CODE (insn) = -1;
3757   REG_NOTES (insn) = NULL;
3758   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3759   INSN_LOCATOR (insn) = curr_insn_locator ();
3760   BLOCK_FOR_INSN (insn) = NULL;
3761 
3762   return insn;
3763 }
3764 
3765 /* Add INSN to the end of the doubly-linked list.
3766    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3767 
3768 void
3769 add_insn (rtx insn)
3770 {
3771   PREV_INSN (insn) = get_last_insn();
3772   NEXT_INSN (insn) = 0;
3773 
3774   if (NULL != get_last_insn())
3775     NEXT_INSN (get_last_insn ()) = insn;
3776 
3777   if (NULL == get_insns ())
3778     set_first_insn (insn);
3779 
3780   set_last_insn (insn);
3781 }
3782 
3783 /* Add INSN into the doubly-linked list after insn AFTER.  This and
3784    the next should be the only functions called to insert an insn once
3785    delay slots have been filled since only they know how to update a
3786    SEQUENCE.  */
3787 
3788 void
3789 add_insn_after (rtx insn, rtx after, basic_block bb)
3790 {
3791   rtx next = NEXT_INSN (after);
3792 
3793   gcc_assert (!optimize || !INSN_DELETED_P (after));
3794 
3795   NEXT_INSN (insn) = next;
3796   PREV_INSN (insn) = after;
3797 
3798   if (next)
3799     {
3800       PREV_INSN (next) = insn;
3801       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3802 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3803     }
3804   else if (get_last_insn () == after)
3805     set_last_insn (insn);
3806   else
3807     {
3808       struct sequence_stack *stack = seq_stack;
3809       /* Scan all pending sequences too.  */
3810       for (; stack; stack = stack->next)
3811 	if (after == stack->last)
3812 	  {
3813 	    stack->last = insn;
3814 	    break;
3815 	  }
3816 
3817       gcc_assert (stack);
3818     }
3819 
3820   if (!BARRIER_P (after)
3821       && !BARRIER_P (insn)
3822       && (bb = BLOCK_FOR_INSN (after)))
3823     {
3824       set_block_for_insn (insn, bb);
3825       if (INSN_P (insn))
3826 	df_insn_rescan (insn);
3827       /* Should not happen as first in the BB is always
3828 	 either NOTE or LABEL.  */
3829       if (BB_END (bb) == after
3830 	  /* Avoid clobbering of structure when creating new BB.  */
3831 	  && !BARRIER_P (insn)
3832 	  && !NOTE_INSN_BASIC_BLOCK_P (insn))
3833 	BB_END (bb) = insn;
3834     }
3835 
3836   NEXT_INSN (after) = insn;
3837   if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3838     {
3839       rtx sequence = PATTERN (after);
3840       NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3841     }
3842 }
3843 
3844 /* Add INSN into the doubly-linked list before insn BEFORE.  This and
3845    the previous should be the only functions called to insert an insn
3846    once delay slots have been filled since only they know how to
3847    update a SEQUENCE.  If BB is NULL, an attempt is made to infer the
3848    bb from before.  */
3849 
3850 void
3851 add_insn_before (rtx insn, rtx before, basic_block bb)
3852 {
3853   rtx prev = PREV_INSN (before);
3854 
3855   gcc_assert (!optimize || !INSN_DELETED_P (before));
3856 
3857   PREV_INSN (insn) = prev;
3858   NEXT_INSN (insn) = before;
3859 
3860   if (prev)
3861     {
3862       NEXT_INSN (prev) = insn;
3863       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3864 	{
3865 	  rtx sequence = PATTERN (prev);
3866 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3867 	}
3868     }
3869   else if (get_insns () == before)
3870     set_first_insn (insn);
3871   else
3872     {
3873       struct sequence_stack *stack = seq_stack;
3874       /* Scan all pending sequences too.  */
3875       for (; stack; stack = stack->next)
3876 	if (before == stack->first)
3877 	  {
3878 	    stack->first = insn;
3879 	    break;
3880 	  }
3881 
3882       gcc_assert (stack);
3883     }
3884 
3885   if (!bb
3886       && !BARRIER_P (before)
3887       && !BARRIER_P (insn))
3888     bb = BLOCK_FOR_INSN (before);
3889 
3890   if (bb)
3891     {
3892       set_block_for_insn (insn, bb);
3893       if (INSN_P (insn))
3894 	df_insn_rescan (insn);
3895       /* Should not happen as first in the BB is always either NOTE or
3896 	 LABEL.  */
3897       gcc_assert (BB_HEAD (bb) != insn
3898 		  /* Avoid clobbering of structure when creating new BB.  */
3899 		  || BARRIER_P (insn)
3900 		  || NOTE_INSN_BASIC_BLOCK_P (insn));
3901     }
3902 
3903   PREV_INSN (before) = insn;
3904   if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3905     PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3906 }
3907 
3908 
3909 /* Replace insn with an deleted instruction note.  */
3910 
3911 void
3912 set_insn_deleted (rtx insn)
3913 {
3914   df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3915   PUT_CODE (insn, NOTE);
3916   NOTE_KIND (insn) = NOTE_INSN_DELETED;
3917 }
3918 
3919 
3920 /* Remove an insn from its doubly-linked list.  This function knows how
3921    to handle sequences.  */
3922 void
3923 remove_insn (rtx insn)
3924 {
3925   rtx next = NEXT_INSN (insn);
3926   rtx prev = PREV_INSN (insn);
3927   basic_block bb;
3928 
3929   /* Later in the code, the block will be marked dirty.  */
3930   df_insn_delete (NULL, INSN_UID (insn));
3931 
3932   if (prev)
3933     {
3934       NEXT_INSN (prev) = next;
3935       if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3936 	{
3937 	  rtx sequence = PATTERN (prev);
3938 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3939 	}
3940     }
3941   else if (get_insns () == insn)
3942     {
3943       if (next)
3944         PREV_INSN (next) = NULL;
3945       set_first_insn (next);
3946     }
3947   else
3948     {
3949       struct sequence_stack *stack = seq_stack;
3950       /* Scan all pending sequences too.  */
3951       for (; stack; stack = stack->next)
3952 	if (insn == stack->first)
3953 	  {
3954 	    stack->first = next;
3955 	    break;
3956 	  }
3957 
3958       gcc_assert (stack);
3959     }
3960 
3961   if (next)
3962     {
3963       PREV_INSN (next) = prev;
3964       if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3965 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3966     }
3967   else if (get_last_insn () == insn)
3968     set_last_insn (prev);
3969   else
3970     {
3971       struct sequence_stack *stack = seq_stack;
3972       /* Scan all pending sequences too.  */
3973       for (; stack; stack = stack->next)
3974 	if (insn == stack->last)
3975 	  {
3976 	    stack->last = prev;
3977 	    break;
3978 	  }
3979 
3980       gcc_assert (stack);
3981     }
3982   if (!BARRIER_P (insn)
3983       && (bb = BLOCK_FOR_INSN (insn)))
3984     {
3985       if (NONDEBUG_INSN_P (insn))
3986 	df_set_bb_dirty (bb);
3987       if (BB_HEAD (bb) == insn)
3988 	{
3989 	  /* Never ever delete the basic block note without deleting whole
3990 	     basic block.  */
3991 	  gcc_assert (!NOTE_P (insn));
3992 	  BB_HEAD (bb) = next;
3993 	}
3994       if (BB_END (bb) == insn)
3995 	BB_END (bb) = prev;
3996     }
3997 }
3998 
3999 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN.  */
4000 
4001 void
4002 add_function_usage_to (rtx call_insn, rtx call_fusage)
4003 {
4004   gcc_assert (call_insn && CALL_P (call_insn));
4005 
4006   /* Put the register usage information on the CALL.  If there is already
4007      some usage information, put ours at the end.  */
4008   if (CALL_INSN_FUNCTION_USAGE (call_insn))
4009     {
4010       rtx link;
4011 
4012       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4013 	   link = XEXP (link, 1))
4014 	;
4015 
4016       XEXP (link, 1) = call_fusage;
4017     }
4018   else
4019     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4020 }
4021 
4022 /* Delete all insns made since FROM.
4023    FROM becomes the new last instruction.  */
4024 
4025 void
4026 delete_insns_since (rtx from)
4027 {
4028   if (from == 0)
4029     set_first_insn (0);
4030   else
4031     NEXT_INSN (from) = 0;
4032   set_last_insn (from);
4033 }
4034 
4035 /* This function is deprecated, please use sequences instead.
4036 
4037    Move a consecutive bunch of insns to a different place in the chain.
4038    The insns to be moved are those between FROM and TO.
4039    They are moved to a new position after the insn AFTER.
4040    AFTER must not be FROM or TO or any insn in between.
4041 
4042    This function does not know about SEQUENCEs and hence should not be
4043    called after delay-slot filling has been done.  */
4044 
4045 void
4046 reorder_insns_nobb (rtx from, rtx to, rtx after)
4047 {
4048 #ifdef ENABLE_CHECKING
4049   rtx x;
4050   for (x = from; x != to; x = NEXT_INSN (x))
4051     gcc_assert (after != x);
4052   gcc_assert (after != to);
4053 #endif
4054 
4055   /* Splice this bunch out of where it is now.  */
4056   if (PREV_INSN (from))
4057     NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4058   if (NEXT_INSN (to))
4059     PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4060   if (get_last_insn () == to)
4061     set_last_insn (PREV_INSN (from));
4062   if (get_insns () == from)
4063     set_first_insn (NEXT_INSN (to));
4064 
4065   /* Make the new neighbors point to it and it to them.  */
4066   if (NEXT_INSN (after))
4067     PREV_INSN (NEXT_INSN (after)) = to;
4068 
4069   NEXT_INSN (to) = NEXT_INSN (after);
4070   PREV_INSN (from) = after;
4071   NEXT_INSN (after) = from;
4072   if (after == get_last_insn())
4073     set_last_insn (to);
4074 }
4075 
4076 /* Same as function above, but take care to update BB boundaries.  */
4077 void
4078 reorder_insns (rtx from, rtx to, rtx after)
4079 {
4080   rtx prev = PREV_INSN (from);
4081   basic_block bb, bb2;
4082 
4083   reorder_insns_nobb (from, to, after);
4084 
4085   if (!BARRIER_P (after)
4086       && (bb = BLOCK_FOR_INSN (after)))
4087     {
4088       rtx x;
4089       df_set_bb_dirty (bb);
4090 
4091       if (!BARRIER_P (from)
4092 	  && (bb2 = BLOCK_FOR_INSN (from)))
4093 	{
4094 	  if (BB_END (bb2) == to)
4095 	    BB_END (bb2) = prev;
4096 	  df_set_bb_dirty (bb2);
4097 	}
4098 
4099       if (BB_END (bb) == after)
4100 	BB_END (bb) = to;
4101 
4102       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4103 	if (!BARRIER_P (x))
4104 	  df_insn_change_bb (x, bb);
4105     }
4106 }
4107 
4108 
4109 /* Emit insn(s) of given code and pattern
4110    at a specified place within the doubly-linked list.
4111 
4112    All of the emit_foo global entry points accept an object
4113    X which is either an insn list or a PATTERN of a single
4114    instruction.
4115 
4116    There are thus a few canonical ways to generate code and
4117    emit it at a specific place in the instruction stream.  For
4118    example, consider the instruction named SPOT and the fact that
4119    we would like to emit some instructions before SPOT.  We might
4120    do it like this:
4121 
4122 	start_sequence ();
4123 	... emit the new instructions ...
4124 	insns_head = get_insns ();
4125 	end_sequence ();
4126 
4127 	emit_insn_before (insns_head, SPOT);
4128 
4129    It used to be common to generate SEQUENCE rtl instead, but that
4130    is a relic of the past which no longer occurs.  The reason is that
4131    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4132    generated would almost certainly die right after it was created.  */
4133 
4134 static rtx
4135 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4136                            rtx (*make_raw) (rtx))
4137 {
4138   rtx insn;
4139 
4140   gcc_assert (before);
4141 
4142   if (x == NULL_RTX)
4143     return last;
4144 
4145   switch (GET_CODE (x))
4146     {
4147     case DEBUG_INSN:
4148     case INSN:
4149     case JUMP_INSN:
4150     case CALL_INSN:
4151     case CODE_LABEL:
4152     case BARRIER:
4153     case NOTE:
4154       insn = x;
4155       while (insn)
4156 	{
4157 	  rtx next = NEXT_INSN (insn);
4158 	  add_insn_before (insn, before, bb);
4159 	  last = insn;
4160 	  insn = next;
4161 	}
4162       break;
4163 
4164 #ifdef ENABLE_RTL_CHECKING
4165     case SEQUENCE:
4166       gcc_unreachable ();
4167       break;
4168 #endif
4169 
4170     default:
4171       last = (*make_raw) (x);
4172       add_insn_before (last, before, bb);
4173       break;
4174     }
4175 
4176   return last;
4177 }
4178 
4179 /* Make X be output before the instruction BEFORE.  */
4180 
4181 rtx
4182 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4183 {
4184   return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4185 }
4186 
4187 /* Make an instruction with body X and code JUMP_INSN
4188    and output it before the instruction BEFORE.  */
4189 
4190 rtx
4191 emit_jump_insn_before_noloc (rtx x, rtx before)
4192 {
4193   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4194 				    make_jump_insn_raw);
4195 }
4196 
4197 /* Make an instruction with body X and code CALL_INSN
4198    and output it before the instruction BEFORE.  */
4199 
4200 rtx
4201 emit_call_insn_before_noloc (rtx x, rtx before)
4202 {
4203   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4204 				    make_call_insn_raw);
4205 }
4206 
4207 /* Make an instruction with body X and code DEBUG_INSN
4208    and output it before the instruction BEFORE.  */
4209 
4210 rtx
4211 emit_debug_insn_before_noloc (rtx x, rtx before)
4212 {
4213   return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4214 				    make_debug_insn_raw);
4215 }
4216 
4217 /* Make an insn of code BARRIER
4218    and output it before the insn BEFORE.  */
4219 
4220 rtx
4221 emit_barrier_before (rtx before)
4222 {
4223   rtx insn = rtx_alloc (BARRIER);
4224 
4225   INSN_UID (insn) = cur_insn_uid++;
4226 
4227   add_insn_before (insn, before, NULL);
4228   return insn;
4229 }
4230 
4231 /* Emit the label LABEL before the insn BEFORE.  */
4232 
4233 rtx
4234 emit_label_before (rtx label, rtx before)
4235 {
4236   /* This can be called twice for the same label as a result of the
4237      confusion that follows a syntax error!  So make it harmless.  */
4238   if (INSN_UID (label) == 0)
4239     {
4240       INSN_UID (label) = cur_insn_uid++;
4241       add_insn_before (label, before, NULL);
4242     }
4243 
4244   return label;
4245 }
4246 
4247 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4248 
4249 rtx
4250 emit_note_before (enum insn_note subtype, rtx before)
4251 {
4252   rtx note = rtx_alloc (NOTE);
4253   INSN_UID (note) = cur_insn_uid++;
4254   NOTE_KIND (note) = subtype;
4255   BLOCK_FOR_INSN (note) = NULL;
4256   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4257 
4258   add_insn_before (note, before, NULL);
4259   return note;
4260 }
4261 
4262 /* Helper for emit_insn_after, handles lists of instructions
4263    efficiently.  */
4264 
4265 static rtx
4266 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4267 {
4268   rtx last;
4269   rtx after_after;
4270   if (!bb && !BARRIER_P (after))
4271     bb = BLOCK_FOR_INSN (after);
4272 
4273   if (bb)
4274     {
4275       df_set_bb_dirty (bb);
4276       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4277 	if (!BARRIER_P (last))
4278 	  {
4279 	    set_block_for_insn (last, bb);
4280 	    df_insn_rescan (last);
4281 	  }
4282       if (!BARRIER_P (last))
4283 	{
4284 	  set_block_for_insn (last, bb);
4285 	  df_insn_rescan (last);
4286 	}
4287       if (BB_END (bb) == after)
4288 	BB_END (bb) = last;
4289     }
4290   else
4291     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4292       continue;
4293 
4294   after_after = NEXT_INSN (after);
4295 
4296   NEXT_INSN (after) = first;
4297   PREV_INSN (first) = after;
4298   NEXT_INSN (last) = after_after;
4299   if (after_after)
4300     PREV_INSN (after_after) = last;
4301 
4302   if (after == get_last_insn())
4303     set_last_insn (last);
4304 
4305   return last;
4306 }
4307 
4308 static rtx
4309 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4310 			  rtx (*make_raw)(rtx))
4311 {
4312   rtx last = after;
4313 
4314   gcc_assert (after);
4315 
4316   if (x == NULL_RTX)
4317     return last;
4318 
4319   switch (GET_CODE (x))
4320     {
4321     case DEBUG_INSN:
4322     case INSN:
4323     case JUMP_INSN:
4324     case CALL_INSN:
4325     case CODE_LABEL:
4326     case BARRIER:
4327     case NOTE:
4328       last = emit_insn_after_1 (x, after, bb);
4329       break;
4330 
4331 #ifdef ENABLE_RTL_CHECKING
4332     case SEQUENCE:
4333       gcc_unreachable ();
4334       break;
4335 #endif
4336 
4337     default:
4338       last = (*make_raw) (x);
4339       add_insn_after (last, after, bb);
4340       break;
4341     }
4342 
4343   return last;
4344 }
4345 
4346 /* Make X be output after the insn AFTER and set the BB of insn.  If
4347    BB is NULL, an attempt is made to infer the BB from AFTER.  */
4348 
4349 rtx
4350 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4351 {
4352   return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4353 }
4354 
4355 
4356 /* Make an insn of code JUMP_INSN with body X
4357    and output it after the insn AFTER.  */
4358 
4359 rtx
4360 emit_jump_insn_after_noloc (rtx x, rtx after)
4361 {
4362   return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4363 }
4364 
4365 /* Make an instruction with body X and code CALL_INSN
4366    and output it after the instruction AFTER.  */
4367 
4368 rtx
4369 emit_call_insn_after_noloc (rtx x, rtx after)
4370 {
4371   return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4372 }
4373 
4374 /* Make an instruction with body X and code CALL_INSN
4375    and output it after the instruction AFTER.  */
4376 
4377 rtx
4378 emit_debug_insn_after_noloc (rtx x, rtx after)
4379 {
4380   return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4381 }
4382 
4383 /* Make an insn of code BARRIER
4384    and output it after the insn AFTER.  */
4385 
4386 rtx
4387 emit_barrier_after (rtx after)
4388 {
4389   rtx insn = rtx_alloc (BARRIER);
4390 
4391   INSN_UID (insn) = cur_insn_uid++;
4392 
4393   add_insn_after (insn, after, NULL);
4394   return insn;
4395 }
4396 
4397 /* Emit the label LABEL after the insn AFTER.  */
4398 
4399 rtx
4400 emit_label_after (rtx label, rtx after)
4401 {
4402   /* This can be called twice for the same label
4403      as a result of the confusion that follows a syntax error!
4404      So make it harmless.  */
4405   if (INSN_UID (label) == 0)
4406     {
4407       INSN_UID (label) = cur_insn_uid++;
4408       add_insn_after (label, after, NULL);
4409     }
4410 
4411   return label;
4412 }
4413 
4414 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4415 
4416 rtx
4417 emit_note_after (enum insn_note subtype, rtx after)
4418 {
4419   rtx note = rtx_alloc (NOTE);
4420   INSN_UID (note) = cur_insn_uid++;
4421   NOTE_KIND (note) = subtype;
4422   BLOCK_FOR_INSN (note) = NULL;
4423   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4424   add_insn_after (note, after, NULL);
4425   return note;
4426 }
4427 
4428 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4429    MAKE_RAW indicates how to turn PATTERN into a real insn.  */
4430 
4431 static rtx
4432 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4433 			   rtx (*make_raw) (rtx))
4434 {
4435   rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4436 
4437   if (pattern == NULL_RTX || !loc)
4438     return last;
4439 
4440   after = NEXT_INSN (after);
4441   while (1)
4442     {
4443       if (active_insn_p (after) && !INSN_LOCATOR (after))
4444 	INSN_LOCATOR (after) = loc;
4445       if (after == last)
4446 	break;
4447       after = NEXT_INSN (after);
4448     }
4449   return last;
4450 }
4451 
4452 /* Insert PATTERN after AFTER.  MAKE_RAW indicates how to turn PATTERN
4453    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert after
4454    any DEBUG_INSNs.  */
4455 
4456 static rtx
4457 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4458 		    rtx (*make_raw) (rtx))
4459 {
4460   rtx prev = after;
4461 
4462   if (skip_debug_insns)
4463     while (DEBUG_INSN_P (prev))
4464       prev = PREV_INSN (prev);
4465 
4466   if (INSN_P (prev))
4467     return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4468 				      make_raw);
4469   else
4470     return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4471 }
4472 
4473 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC.  */
4474 rtx
4475 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4476 {
4477   return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4478 }
4479 
4480 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4481 rtx
4482 emit_insn_after (rtx pattern, rtx after)
4483 {
4484   return emit_pattern_after (pattern, after, true, make_insn_raw);
4485 }
4486 
4487 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC.  */
4488 rtx
4489 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4490 {
4491   return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4492 }
4493 
4494 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4495 rtx
4496 emit_jump_insn_after (rtx pattern, rtx after)
4497 {
4498   return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4499 }
4500 
4501 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC.  */
4502 rtx
4503 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4504 {
4505   return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4506 }
4507 
4508 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4509 rtx
4510 emit_call_insn_after (rtx pattern, rtx after)
4511 {
4512   return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4513 }
4514 
4515 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC.  */
4516 rtx
4517 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4518 {
4519   return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4520 }
4521 
4522 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER.  */
4523 rtx
4524 emit_debug_insn_after (rtx pattern, rtx after)
4525 {
4526   return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4527 }
4528 
4529 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4530    MAKE_RAW indicates how to turn PATTERN into a real insn.  INSNP
4531    indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4532    CALL_INSN, etc.  */
4533 
4534 static rtx
4535 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4536 			    rtx (*make_raw) (rtx))
4537 {
4538   rtx first = PREV_INSN (before);
4539   rtx last = emit_pattern_before_noloc (pattern, before,
4540                                         insnp ? before : NULL_RTX,
4541                                         NULL, make_raw);
4542 
4543   if (pattern == NULL_RTX || !loc)
4544     return last;
4545 
4546   if (!first)
4547     first = get_insns ();
4548   else
4549     first = NEXT_INSN (first);
4550   while (1)
4551     {
4552       if (active_insn_p (first) && !INSN_LOCATOR (first))
4553 	INSN_LOCATOR (first) = loc;
4554       if (first == last)
4555 	break;
4556       first = NEXT_INSN (first);
4557     }
4558   return last;
4559 }
4560 
4561 /* Insert PATTERN before BEFORE.  MAKE_RAW indicates how to turn PATTERN
4562    into a real insn.  SKIP_DEBUG_INSNS indicates whether to insert
4563    before any DEBUG_INSNs.  INSNP indicates if PATTERN is meant for an
4564    INSN as opposed to a JUMP_INSN, CALL_INSN, etc.  */
4565 
4566 static rtx
4567 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4568 		     bool insnp, rtx (*make_raw) (rtx))
4569 {
4570   rtx next = before;
4571 
4572   if (skip_debug_insns)
4573     while (DEBUG_INSN_P (next))
4574       next = PREV_INSN (next);
4575 
4576   if (INSN_P (next))
4577     return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4578 				       insnp, make_raw);
4579   else
4580     return emit_pattern_before_noloc (pattern, before,
4581                                       insnp ? before : NULL_RTX,
4582                                       NULL, make_raw);
4583 }
4584 
4585 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC.  */
4586 rtx
4587 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4588 {
4589   return emit_pattern_before_setloc (pattern, before, loc, true,
4590 				     make_insn_raw);
4591 }
4592 
4593 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4594 rtx
4595 emit_insn_before (rtx pattern, rtx before)
4596 {
4597   return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4598 }
4599 
4600 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC.  */
4601 rtx
4602 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4603 {
4604   return emit_pattern_before_setloc (pattern, before, loc, false,
4605 				     make_jump_insn_raw);
4606 }
4607 
4608 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE.  */
4609 rtx
4610 emit_jump_insn_before (rtx pattern, rtx before)
4611 {
4612   return emit_pattern_before (pattern, before, true, false,
4613 			      make_jump_insn_raw);
4614 }
4615 
4616 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC.  */
4617 rtx
4618 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4619 {
4620   return emit_pattern_before_setloc (pattern, before, loc, false,
4621 				     make_call_insn_raw);
4622 }
4623 
4624 /* Like emit_call_insn_before_noloc,
4625    but set insn_locator according to BEFORE.  */
4626 rtx
4627 emit_call_insn_before (rtx pattern, rtx before)
4628 {
4629   return emit_pattern_before (pattern, before, true, false,
4630 			      make_call_insn_raw);
4631 }
4632 
4633 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC.  */
4634 rtx
4635 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4636 {
4637   return emit_pattern_before_setloc (pattern, before, loc, false,
4638 				     make_debug_insn_raw);
4639 }
4640 
4641 /* Like emit_debug_insn_before_noloc,
4642    but set insn_locator according to BEFORE.  */
4643 rtx
4644 emit_debug_insn_before (rtx pattern, rtx before)
4645 {
4646   return emit_pattern_before (pattern, before, false, false,
4647 			      make_debug_insn_raw);
4648 }
4649 
4650 /* Take X and emit it at the end of the doubly-linked
4651    INSN list.
4652 
4653    Returns the last insn emitted.  */
4654 
4655 rtx
4656 emit_insn (rtx x)
4657 {
4658   rtx last = get_last_insn();
4659   rtx insn;
4660 
4661   if (x == NULL_RTX)
4662     return last;
4663 
4664   switch (GET_CODE (x))
4665     {
4666     case DEBUG_INSN:
4667     case INSN:
4668     case JUMP_INSN:
4669     case CALL_INSN:
4670     case CODE_LABEL:
4671     case BARRIER:
4672     case NOTE:
4673       insn = x;
4674       while (insn)
4675 	{
4676 	  rtx next = NEXT_INSN (insn);
4677 	  add_insn (insn);
4678 	  last = insn;
4679 	  insn = next;
4680 	}
4681       break;
4682 
4683 #ifdef ENABLE_RTL_CHECKING
4684     case SEQUENCE:
4685       gcc_unreachable ();
4686       break;
4687 #endif
4688 
4689     default:
4690       last = make_insn_raw (x);
4691       add_insn (last);
4692       break;
4693     }
4694 
4695   return last;
4696 }
4697 
4698 /* Make an insn of code DEBUG_INSN with pattern X
4699    and add it to the end of the doubly-linked list.  */
4700 
4701 rtx
4702 emit_debug_insn (rtx x)
4703 {
4704   rtx last = get_last_insn();
4705   rtx insn;
4706 
4707   if (x == NULL_RTX)
4708     return last;
4709 
4710   switch (GET_CODE (x))
4711     {
4712     case DEBUG_INSN:
4713     case INSN:
4714     case JUMP_INSN:
4715     case CALL_INSN:
4716     case CODE_LABEL:
4717     case BARRIER:
4718     case NOTE:
4719       insn = x;
4720       while (insn)
4721 	{
4722 	  rtx next = NEXT_INSN (insn);
4723 	  add_insn (insn);
4724 	  last = insn;
4725 	  insn = next;
4726 	}
4727       break;
4728 
4729 #ifdef ENABLE_RTL_CHECKING
4730     case SEQUENCE:
4731       gcc_unreachable ();
4732       break;
4733 #endif
4734 
4735     default:
4736       last = make_debug_insn_raw (x);
4737       add_insn (last);
4738       break;
4739     }
4740 
4741   return last;
4742 }
4743 
4744 /* Make an insn of code JUMP_INSN with pattern X
4745    and add it to the end of the doubly-linked list.  */
4746 
4747 rtx
4748 emit_jump_insn (rtx x)
4749 {
4750   rtx last = NULL_RTX, insn;
4751 
4752   switch (GET_CODE (x))
4753     {
4754     case DEBUG_INSN:
4755     case INSN:
4756     case JUMP_INSN:
4757     case CALL_INSN:
4758     case CODE_LABEL:
4759     case BARRIER:
4760     case NOTE:
4761       insn = x;
4762       while (insn)
4763 	{
4764 	  rtx next = NEXT_INSN (insn);
4765 	  add_insn (insn);
4766 	  last = insn;
4767 	  insn = next;
4768 	}
4769       break;
4770 
4771 #ifdef ENABLE_RTL_CHECKING
4772     case SEQUENCE:
4773       gcc_unreachable ();
4774       break;
4775 #endif
4776 
4777     default:
4778       last = make_jump_insn_raw (x);
4779       add_insn (last);
4780       break;
4781     }
4782 
4783   return last;
4784 }
4785 
4786 /* Make an insn of code CALL_INSN with pattern X
4787    and add it to the end of the doubly-linked list.  */
4788 
4789 rtx
4790 emit_call_insn (rtx x)
4791 {
4792   rtx insn;
4793 
4794   switch (GET_CODE (x))
4795     {
4796     case DEBUG_INSN:
4797     case INSN:
4798     case JUMP_INSN:
4799     case CALL_INSN:
4800     case CODE_LABEL:
4801     case BARRIER:
4802     case NOTE:
4803       insn = emit_insn (x);
4804       break;
4805 
4806 #ifdef ENABLE_RTL_CHECKING
4807     case SEQUENCE:
4808       gcc_unreachable ();
4809       break;
4810 #endif
4811 
4812     default:
4813       insn = make_call_insn_raw (x);
4814       add_insn (insn);
4815       break;
4816     }
4817 
4818   return insn;
4819 }
4820 
4821 /* Add the label LABEL to the end of the doubly-linked list.  */
4822 
4823 rtx
4824 emit_label (rtx label)
4825 {
4826   /* This can be called twice for the same label
4827      as a result of the confusion that follows a syntax error!
4828      So make it harmless.  */
4829   if (INSN_UID (label) == 0)
4830     {
4831       INSN_UID (label) = cur_insn_uid++;
4832       add_insn (label);
4833     }
4834   return label;
4835 }
4836 
4837 /* Make an insn of code BARRIER
4838    and add it to the end of the doubly-linked list.  */
4839 
4840 rtx
4841 emit_barrier (void)
4842 {
4843   rtx barrier = rtx_alloc (BARRIER);
4844   INSN_UID (barrier) = cur_insn_uid++;
4845   add_insn (barrier);
4846   return barrier;
4847 }
4848 
4849 /* Emit a copy of note ORIG.  */
4850 
4851 rtx
4852 emit_note_copy (rtx orig)
4853 {
4854   rtx note;
4855 
4856   note = rtx_alloc (NOTE);
4857 
4858   INSN_UID (note) = cur_insn_uid++;
4859   NOTE_DATA (note) = NOTE_DATA (orig);
4860   NOTE_KIND (note) = NOTE_KIND (orig);
4861   BLOCK_FOR_INSN (note) = NULL;
4862   add_insn (note);
4863 
4864   return note;
4865 }
4866 
4867 /* Make an insn of code NOTE or type NOTE_NO
4868    and add it to the end of the doubly-linked list.  */
4869 
4870 rtx
4871 emit_note (enum insn_note kind)
4872 {
4873   rtx note;
4874 
4875   note = rtx_alloc (NOTE);
4876   INSN_UID (note) = cur_insn_uid++;
4877   NOTE_KIND (note) = kind;
4878   memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4879   BLOCK_FOR_INSN (note) = NULL;
4880   add_insn (note);
4881   return note;
4882 }
4883 
4884 /* Emit a clobber of lvalue X.  */
4885 
4886 rtx
4887 emit_clobber (rtx x)
4888 {
4889   /* CONCATs should not appear in the insn stream.  */
4890   if (GET_CODE (x) == CONCAT)
4891     {
4892       emit_clobber (XEXP (x, 0));
4893       return emit_clobber (XEXP (x, 1));
4894     }
4895   return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4896 }
4897 
4898 /* Return a sequence of insns to clobber lvalue X.  */
4899 
4900 rtx
4901 gen_clobber (rtx x)
4902 {
4903   rtx seq;
4904 
4905   start_sequence ();
4906   emit_clobber (x);
4907   seq = get_insns ();
4908   end_sequence ();
4909   return seq;
4910 }
4911 
4912 /* Emit a use of rvalue X.  */
4913 
4914 rtx
4915 emit_use (rtx x)
4916 {
4917   /* CONCATs should not appear in the insn stream.  */
4918   if (GET_CODE (x) == CONCAT)
4919     {
4920       emit_use (XEXP (x, 0));
4921       return emit_use (XEXP (x, 1));
4922     }
4923   return emit_insn (gen_rtx_USE (VOIDmode, x));
4924 }
4925 
4926 /* Return a sequence of insns to use rvalue X.  */
4927 
4928 rtx
4929 gen_use (rtx x)
4930 {
4931   rtx seq;
4932 
4933   start_sequence ();
4934   emit_use (x);
4935   seq = get_insns ();
4936   end_sequence ();
4937   return seq;
4938 }
4939 
4940 /* Cause next statement to emit a line note even if the line number
4941    has not changed.  */
4942 
4943 void
4944 force_next_line_note (void)
4945 {
4946   last_location = -1;
4947 }
4948 
4949 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4950    note of this type already exists, remove it first.  */
4951 
4952 rtx
4953 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4954 {
4955   rtx note = find_reg_note (insn, kind, NULL_RTX);
4956 
4957   switch (kind)
4958     {
4959     case REG_EQUAL:
4960     case REG_EQUIV:
4961       /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4962 	 has multiple sets (some callers assume single_set
4963 	 means the insn only has one set, when in fact it
4964 	 means the insn only has one * useful * set).  */
4965       if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4966 	{
4967 	  gcc_assert (!note);
4968 	  return NULL_RTX;
4969 	}
4970 
4971       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4972 	 It serves no useful purpose and breaks eliminate_regs.  */
4973       if (GET_CODE (datum) == ASM_OPERANDS)
4974 	return NULL_RTX;
4975 
4976       if (note)
4977 	{
4978 	  XEXP (note, 0) = datum;
4979 	  df_notes_rescan (insn);
4980 	  return note;
4981 	}
4982       break;
4983 
4984     default:
4985       if (note)
4986 	{
4987 	  XEXP (note, 0) = datum;
4988 	  return note;
4989 	}
4990       break;
4991     }
4992 
4993   add_reg_note (insn, kind, datum);
4994 
4995   switch (kind)
4996     {
4997     case REG_EQUAL:
4998     case REG_EQUIV:
4999       df_notes_rescan (insn);
5000       break;
5001     default:
5002       break;
5003     }
5004 
5005   return REG_NOTES (insn);
5006 }
5007 
5008 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST.  */
5009 rtx
5010 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5011 {
5012   rtx set = single_set (insn);
5013 
5014   if (set && SET_DEST (set) == dst)
5015     return set_unique_reg_note (insn, kind, datum);
5016   return NULL_RTX;
5017 }
5018 
5019 /* Return an indication of which type of insn should have X as a body.
5020    The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
5021 
5022 static enum rtx_code
5023 classify_insn (rtx x)
5024 {
5025   if (LABEL_P (x))
5026     return CODE_LABEL;
5027   if (GET_CODE (x) == CALL)
5028     return CALL_INSN;
5029   if (ANY_RETURN_P (x))
5030     return JUMP_INSN;
5031   if (GET_CODE (x) == SET)
5032     {
5033       if (SET_DEST (x) == pc_rtx)
5034 	return JUMP_INSN;
5035       else if (GET_CODE (SET_SRC (x)) == CALL)
5036 	return CALL_INSN;
5037       else
5038 	return INSN;
5039     }
5040   if (GET_CODE (x) == PARALLEL)
5041     {
5042       int j;
5043       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5044 	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5045 	  return CALL_INSN;
5046 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5047 		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5048 	  return JUMP_INSN;
5049 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5050 		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5051 	  return CALL_INSN;
5052     }
5053   return INSN;
5054 }
5055 
5056 /* Emit the rtl pattern X as an appropriate kind of insn.
5057    If X is a label, it is simply added into the insn chain.  */
5058 
5059 rtx
5060 emit (rtx x)
5061 {
5062   enum rtx_code code = classify_insn (x);
5063 
5064   switch (code)
5065     {
5066     case CODE_LABEL:
5067       return emit_label (x);
5068     case INSN:
5069       return emit_insn (x);
5070     case  JUMP_INSN:
5071       {
5072 	rtx insn = emit_jump_insn (x);
5073 	if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5074 	  return emit_barrier ();
5075 	return insn;
5076       }
5077     case CALL_INSN:
5078       return emit_call_insn (x);
5079     case DEBUG_INSN:
5080       return emit_debug_insn (x);
5081     default:
5082       gcc_unreachable ();
5083     }
5084 }
5085 
5086 /* Space for free sequence stack entries.  */
5087 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5088 
5089 /* Begin emitting insns to a sequence.  If this sequence will contain
5090    something that might cause the compiler to pop arguments to function
5091    calls (because those pops have previously been deferred; see
5092    INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5093    before calling this function.  That will ensure that the deferred
5094    pops are not accidentally emitted in the middle of this sequence.  */
5095 
5096 void
5097 start_sequence (void)
5098 {
5099   struct sequence_stack *tem;
5100 
5101   if (free_sequence_stack != NULL)
5102     {
5103       tem = free_sequence_stack;
5104       free_sequence_stack = tem->next;
5105     }
5106   else
5107     tem = ggc_alloc_sequence_stack ();
5108 
5109   tem->next = seq_stack;
5110   tem->first = get_insns ();
5111   tem->last = get_last_insn ();
5112 
5113   seq_stack = tem;
5114 
5115   set_first_insn (0);
5116   set_last_insn (0);
5117 }
5118 
5119 /* Set up the insn chain starting with FIRST as the current sequence,
5120    saving the previously current one.  See the documentation for
5121    start_sequence for more information about how to use this function.  */
5122 
5123 void
5124 push_to_sequence (rtx first)
5125 {
5126   rtx last;
5127 
5128   start_sequence ();
5129 
5130   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5131     ;
5132 
5133   set_first_insn (first);
5134   set_last_insn (last);
5135 }
5136 
5137 /* Like push_to_sequence, but take the last insn as an argument to avoid
5138    looping through the list.  */
5139 
5140 void
5141 push_to_sequence2 (rtx first, rtx last)
5142 {
5143   start_sequence ();
5144 
5145   set_first_insn (first);
5146   set_last_insn (last);
5147 }
5148 
5149 /* Set up the outer-level insn chain
5150    as the current sequence, saving the previously current one.  */
5151 
5152 void
5153 push_topmost_sequence (void)
5154 {
5155   struct sequence_stack *stack, *top = NULL;
5156 
5157   start_sequence ();
5158 
5159   for (stack = seq_stack; stack; stack = stack->next)
5160     top = stack;
5161 
5162   set_first_insn (top->first);
5163   set_last_insn (top->last);
5164 }
5165 
5166 /* After emitting to the outer-level insn chain, update the outer-level
5167    insn chain, and restore the previous saved state.  */
5168 
5169 void
5170 pop_topmost_sequence (void)
5171 {
5172   struct sequence_stack *stack, *top = NULL;
5173 
5174   for (stack = seq_stack; stack; stack = stack->next)
5175     top = stack;
5176 
5177   top->first = get_insns ();
5178   top->last = get_last_insn ();
5179 
5180   end_sequence ();
5181 }
5182 
5183 /* After emitting to a sequence, restore previous saved state.
5184 
5185    To get the contents of the sequence just made, you must call
5186    `get_insns' *before* calling here.
5187 
5188    If the compiler might have deferred popping arguments while
5189    generating this sequence, and this sequence will not be immediately
5190    inserted into the instruction stream, use do_pending_stack_adjust
5191    before calling get_insns.  That will ensure that the deferred
5192    pops are inserted into this sequence, and not into some random
5193    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5194    information about deferred popping of arguments.  */
5195 
5196 void
5197 end_sequence (void)
5198 {
5199   struct sequence_stack *tem = seq_stack;
5200 
5201   set_first_insn (tem->first);
5202   set_last_insn (tem->last);
5203   seq_stack = tem->next;
5204 
5205   memset (tem, 0, sizeof (*tem));
5206   tem->next = free_sequence_stack;
5207   free_sequence_stack = tem;
5208 }
5209 
5210 /* Return 1 if currently emitting into a sequence.  */
5211 
5212 int
5213 in_sequence_p (void)
5214 {
5215   return seq_stack != 0;
5216 }
5217 
5218 /* Put the various virtual registers into REGNO_REG_RTX.  */
5219 
5220 static void
5221 init_virtual_regs (void)
5222 {
5223   regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5224   regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5225   regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5226   regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5227   regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5228   regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5229     = virtual_preferred_stack_boundary_rtx;
5230 }
5231 
5232 
5233 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5234 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5235 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5236 static int copy_insn_n_scratches;
5237 
5238 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5239    copied an ASM_OPERANDS.
5240    In that case, it is the original input-operand vector.  */
5241 static rtvec orig_asm_operands_vector;
5242 
5243 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5244    copied an ASM_OPERANDS.
5245    In that case, it is the copied input-operand vector.  */
5246 static rtvec copy_asm_operands_vector;
5247 
5248 /* Likewise for the constraints vector.  */
5249 static rtvec orig_asm_constraints_vector;
5250 static rtvec copy_asm_constraints_vector;
5251 
5252 /* Recursively create a new copy of an rtx for copy_insn.
5253    This function differs from copy_rtx in that it handles SCRATCHes and
5254    ASM_OPERANDs properly.
5255    Normally, this function is not used directly; use copy_insn as front end.
5256    However, you could first copy an insn pattern with copy_insn and then use
5257    this function afterwards to properly copy any REG_NOTEs containing
5258    SCRATCHes.  */
5259 
5260 rtx
5261 copy_insn_1 (rtx orig)
5262 {
5263   rtx copy;
5264   int i, j;
5265   RTX_CODE code;
5266   const char *format_ptr;
5267 
5268   if (orig == NULL)
5269     return NULL;
5270 
5271   code = GET_CODE (orig);
5272 
5273   switch (code)
5274     {
5275     case REG:
5276     case DEBUG_EXPR:
5277     case CONST_INT:
5278     case CONST_DOUBLE:
5279     case CONST_FIXED:
5280     case CONST_VECTOR:
5281     case SYMBOL_REF:
5282     case CODE_LABEL:
5283     case PC:
5284     case CC0:
5285     case RETURN:
5286     case SIMPLE_RETURN:
5287       return orig;
5288     case CLOBBER:
5289       if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5290 	return orig;
5291       break;
5292 
5293     case SCRATCH:
5294       for (i = 0; i < copy_insn_n_scratches; i++)
5295 	if (copy_insn_scratch_in[i] == orig)
5296 	  return copy_insn_scratch_out[i];
5297       break;
5298 
5299     case CONST:
5300       if (shared_const_p (orig))
5301 	return orig;
5302       break;
5303 
5304       /* A MEM with a constant address is not sharable.  The problem is that
5305 	 the constant address may need to be reloaded.  If the mem is shared,
5306 	 then reloading one copy of this mem will cause all copies to appear
5307 	 to have been reloaded.  */
5308 
5309     default:
5310       break;
5311     }
5312 
5313   /* Copy the various flags, fields, and other information.  We assume
5314      that all fields need copying, and then clear the fields that should
5315      not be copied.  That is the sensible default behavior, and forces
5316      us to explicitly document why we are *not* copying a flag.  */
5317   copy = shallow_copy_rtx (orig);
5318 
5319   /* We do not copy the USED flag, which is used as a mark bit during
5320      walks over the RTL.  */
5321   RTX_FLAG (copy, used) = 0;
5322 
5323   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5324   if (INSN_P (orig))
5325     {
5326       RTX_FLAG (copy, jump) = 0;
5327       RTX_FLAG (copy, call) = 0;
5328       RTX_FLAG (copy, frame_related) = 0;
5329     }
5330 
5331   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5332 
5333   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5334     switch (*format_ptr++)
5335       {
5336       case 'e':
5337 	if (XEXP (orig, i) != NULL)
5338 	  XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5339 	break;
5340 
5341       case 'E':
5342       case 'V':
5343 	if (XVEC (orig, i) == orig_asm_constraints_vector)
5344 	  XVEC (copy, i) = copy_asm_constraints_vector;
5345 	else if (XVEC (orig, i) == orig_asm_operands_vector)
5346 	  XVEC (copy, i) = copy_asm_operands_vector;
5347 	else if (XVEC (orig, i) != NULL)
5348 	  {
5349 	    XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5350 	    for (j = 0; j < XVECLEN (copy, i); j++)
5351 	      XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5352 	  }
5353 	break;
5354 
5355       case 't':
5356       case 'w':
5357       case 'i':
5358       case 's':
5359       case 'S':
5360       case 'u':
5361       case '0':
5362 	/* These are left unchanged.  */
5363 	break;
5364 
5365       default:
5366 	gcc_unreachable ();
5367       }
5368 
5369   if (code == SCRATCH)
5370     {
5371       i = copy_insn_n_scratches++;
5372       gcc_assert (i < MAX_RECOG_OPERANDS);
5373       copy_insn_scratch_in[i] = orig;
5374       copy_insn_scratch_out[i] = copy;
5375     }
5376   else if (code == ASM_OPERANDS)
5377     {
5378       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5379       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5380       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5381       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5382     }
5383 
5384   return copy;
5385 }
5386 
5387 /* Create a new copy of an rtx.
5388    This function differs from copy_rtx in that it handles SCRATCHes and
5389    ASM_OPERANDs properly.
5390    INSN doesn't really have to be a full INSN; it could be just the
5391    pattern.  */
5392 rtx
5393 copy_insn (rtx insn)
5394 {
5395   copy_insn_n_scratches = 0;
5396   orig_asm_operands_vector = 0;
5397   orig_asm_constraints_vector = 0;
5398   copy_asm_operands_vector = 0;
5399   copy_asm_constraints_vector = 0;
5400   return copy_insn_1 (insn);
5401 }
5402 
5403 /* Initialize data structures and variables in this file
5404    before generating rtl for each function.  */
5405 
5406 void
5407 init_emit (void)
5408 {
5409   set_first_insn (NULL);
5410   set_last_insn (NULL);
5411   if (MIN_NONDEBUG_INSN_UID)
5412     cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5413   else
5414     cur_insn_uid = 1;
5415   cur_debug_insn_uid = 1;
5416   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5417   last_location = UNKNOWN_LOCATION;
5418   first_label_num = label_num;
5419   seq_stack = NULL;
5420 
5421   /* Init the tables that describe all the pseudo regs.  */
5422 
5423   crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5424 
5425   crtl->emit.regno_pointer_align
5426     = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5427 
5428   regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5429 
5430   /* Put copies of all the hard registers into regno_reg_rtx.  */
5431   memcpy (regno_reg_rtx,
5432 	  initial_regno_reg_rtx,
5433 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5434 
5435   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5436   init_virtual_regs ();
5437 
5438   /* Indicate that the virtual registers and stack locations are
5439      all pointers.  */
5440   REG_POINTER (stack_pointer_rtx) = 1;
5441   REG_POINTER (frame_pointer_rtx) = 1;
5442   REG_POINTER (hard_frame_pointer_rtx) = 1;
5443   REG_POINTER (arg_pointer_rtx) = 1;
5444 
5445   REG_POINTER (virtual_incoming_args_rtx) = 1;
5446   REG_POINTER (virtual_stack_vars_rtx) = 1;
5447   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5448   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5449   REG_POINTER (virtual_cfa_rtx) = 1;
5450 
5451 #ifdef STACK_BOUNDARY
5452   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5453   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5454   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5455   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5456 
5457   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5458   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5459   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5460   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5461   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5462 #endif
5463 
5464 #ifdef INIT_EXPANDERS
5465   INIT_EXPANDERS;
5466 #endif
5467 }
5468 
5469 /* Generate a vector constant for mode MODE and constant value CONSTANT.  */
5470 
5471 static rtx
5472 gen_const_vector (enum machine_mode mode, int constant)
5473 {
5474   rtx tem;
5475   rtvec v;
5476   int units, i;
5477   enum machine_mode inner;
5478 
5479   units = GET_MODE_NUNITS (mode);
5480   inner = GET_MODE_INNER (mode);
5481 
5482   gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5483 
5484   v = rtvec_alloc (units);
5485 
5486   /* We need to call this function after we set the scalar const_tiny_rtx
5487      entries.  */
5488   gcc_assert (const_tiny_rtx[constant][(int) inner]);
5489 
5490   for (i = 0; i < units; ++i)
5491     RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5492 
5493   tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5494   return tem;
5495 }
5496 
5497 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5498    all elements are zero, and the one vector when all elements are one.  */
5499 rtx
5500 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5501 {
5502   enum machine_mode inner = GET_MODE_INNER (mode);
5503   int nunits = GET_MODE_NUNITS (mode);
5504   rtx x;
5505   int i;
5506 
5507   /* Check to see if all of the elements have the same value.  */
5508   x = RTVEC_ELT (v, nunits - 1);
5509   for (i = nunits - 2; i >= 0; i--)
5510     if (RTVEC_ELT (v, i) != x)
5511       break;
5512 
5513   /* If the values are all the same, check to see if we can use one of the
5514      standard constant vectors.  */
5515   if (i == -1)
5516     {
5517       if (x == CONST0_RTX (inner))
5518 	return CONST0_RTX (mode);
5519       else if (x == CONST1_RTX (inner))
5520 	return CONST1_RTX (mode);
5521       else if (x == CONSTM1_RTX (inner))
5522 	return CONSTM1_RTX (mode);
5523     }
5524 
5525   return gen_rtx_raw_CONST_VECTOR (mode, v);
5526 }
5527 
5528 /* Initialise global register information required by all functions.  */
5529 
5530 void
5531 init_emit_regs (void)
5532 {
5533   int i;
5534   enum machine_mode mode;
5535   mem_attrs *attrs;
5536 
5537   /* Reset register attributes */
5538   htab_empty (reg_attrs_htab);
5539 
5540   /* We need reg_raw_mode, so initialize the modes now.  */
5541   init_reg_modes_target ();
5542 
5543   /* Assign register numbers to the globally defined register rtx.  */
5544   pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5545   ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5546   simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5547   cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5548   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5549   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5550   hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5551   arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5552   virtual_incoming_args_rtx =
5553     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5554   virtual_stack_vars_rtx =
5555     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5556   virtual_stack_dynamic_rtx =
5557     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5558   virtual_outgoing_args_rtx =
5559     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5560   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5561   virtual_preferred_stack_boundary_rtx =
5562     gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5563 
5564   /* Initialize RTL for commonly used hard registers.  These are
5565      copied into regno_reg_rtx as we begin to compile each function.  */
5566   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5567     initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5568 
5569 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5570   return_address_pointer_rtx
5571     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5572 #endif
5573 
5574   if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5575     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5576   else
5577     pic_offset_table_rtx = NULL_RTX;
5578 
5579   for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5580     {
5581       mode = (enum machine_mode) i;
5582       attrs = ggc_alloc_cleared_mem_attrs ();
5583       attrs->align = BITS_PER_UNIT;
5584       attrs->addrspace = ADDR_SPACE_GENERIC;
5585       if (mode != BLKmode)
5586 	{
5587 	  attrs->size_known_p = true;
5588 	  attrs->size = GET_MODE_SIZE (mode);
5589 	  if (STRICT_ALIGNMENT)
5590 	    attrs->align = GET_MODE_ALIGNMENT (mode);
5591 	}
5592       mode_mem_attrs[i] = attrs;
5593     }
5594 }
5595 
5596 /* Create some permanent unique rtl objects shared between all functions.  */
5597 
5598 void
5599 init_emit_once (void)
5600 {
5601   int i;
5602   enum machine_mode mode;
5603   enum machine_mode double_mode;
5604 
5605   /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5606      hash tables.  */
5607   const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5608 				    const_int_htab_eq, NULL);
5609 
5610   const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5611 				       const_double_htab_eq, NULL);
5612 
5613   const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5614 				      const_fixed_htab_eq, NULL);
5615 
5616   mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5617 				    mem_attrs_htab_eq, NULL);
5618   reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5619 				    reg_attrs_htab_eq, NULL);
5620 
5621   /* Compute the word and byte modes.  */
5622 
5623   byte_mode = VOIDmode;
5624   word_mode = VOIDmode;
5625   double_mode = VOIDmode;
5626 
5627   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5628        mode != VOIDmode;
5629        mode = GET_MODE_WIDER_MODE (mode))
5630     {
5631       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5632 	  && byte_mode == VOIDmode)
5633 	byte_mode = mode;
5634 
5635       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5636 	  && word_mode == VOIDmode)
5637 	word_mode = mode;
5638     }
5639 
5640   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5641        mode != VOIDmode;
5642        mode = GET_MODE_WIDER_MODE (mode))
5643     {
5644       if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5645 	  && double_mode == VOIDmode)
5646 	double_mode = mode;
5647     }
5648 
5649   ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5650 
5651 #ifdef INIT_EXPANDERS
5652   /* This is to initialize {init|mark|free}_machine_status before the first
5653      call to push_function_context_to.  This is needed by the Chill front
5654      end which calls push_function_context_to before the first call to
5655      init_function_start.  */
5656   INIT_EXPANDERS;
5657 #endif
5658 
5659   /* Create the unique rtx's for certain rtx codes and operand values.  */
5660 
5661   /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5662      tries to use these variables.  */
5663   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5664     const_int_rtx[i + MAX_SAVED_CONST_INT] =
5665       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5666 
5667   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5668       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5669     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5670   else
5671     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5672 
5673   REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5674   REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5675   REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5676 
5677   dconstm1 = dconst1;
5678   dconstm1.sign = 1;
5679 
5680   dconsthalf = dconst1;
5681   SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5682 
5683   for (i = 0; i < 3; i++)
5684     {
5685       const REAL_VALUE_TYPE *const r =
5686 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5687 
5688       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5689 	   mode != VOIDmode;
5690 	   mode = GET_MODE_WIDER_MODE (mode))
5691 	const_tiny_rtx[i][(int) mode] =
5692 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5693 
5694       for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5695 	   mode != VOIDmode;
5696 	   mode = GET_MODE_WIDER_MODE (mode))
5697 	const_tiny_rtx[i][(int) mode] =
5698 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5699 
5700       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5701 
5702       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5703 	   mode != VOIDmode;
5704 	   mode = GET_MODE_WIDER_MODE (mode))
5705 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5706 
5707       for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5708 	   mode != VOIDmode;
5709 	   mode = GET_MODE_WIDER_MODE (mode))
5710 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5711     }
5712 
5713   const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5714 
5715   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5716        mode != VOIDmode;
5717        mode = GET_MODE_WIDER_MODE (mode))
5718     const_tiny_rtx[3][(int) mode] = constm1_rtx;
5719 
5720   for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5721        mode != VOIDmode;
5722        mode = GET_MODE_WIDER_MODE (mode))
5723     const_tiny_rtx[3][(int) mode] = constm1_rtx;
5724 
5725   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5726        mode != VOIDmode;
5727        mode = GET_MODE_WIDER_MODE (mode))
5728     {
5729       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5730       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5731     }
5732 
5733   for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5734        mode != VOIDmode;
5735        mode = GET_MODE_WIDER_MODE (mode))
5736     {
5737       rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5738       const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5739     }
5740 
5741   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5742        mode != VOIDmode;
5743        mode = GET_MODE_WIDER_MODE (mode))
5744     {
5745       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5746       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5747       const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5748     }
5749 
5750   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5751        mode != VOIDmode;
5752        mode = GET_MODE_WIDER_MODE (mode))
5753     {
5754       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5755       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5756     }
5757 
5758   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5759        mode != VOIDmode;
5760        mode = GET_MODE_WIDER_MODE (mode))
5761     {
5762       FCONST0(mode).data.high = 0;
5763       FCONST0(mode).data.low = 0;
5764       FCONST0(mode).mode = mode;
5765       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5766 				      FCONST0 (mode), mode);
5767     }
5768 
5769   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5770        mode != VOIDmode;
5771        mode = GET_MODE_WIDER_MODE (mode))
5772     {
5773       FCONST0(mode).data.high = 0;
5774       FCONST0(mode).data.low = 0;
5775       FCONST0(mode).mode = mode;
5776       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5777 				      FCONST0 (mode), mode);
5778     }
5779 
5780   for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5781        mode != VOIDmode;
5782        mode = GET_MODE_WIDER_MODE (mode))
5783     {
5784       FCONST0(mode).data.high = 0;
5785       FCONST0(mode).data.low = 0;
5786       FCONST0(mode).mode = mode;
5787       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5788 				      FCONST0 (mode), mode);
5789 
5790       /* We store the value 1.  */
5791       FCONST1(mode).data.high = 0;
5792       FCONST1(mode).data.low = 0;
5793       FCONST1(mode).mode = mode;
5794       lshift_double (1, 0, GET_MODE_FBIT (mode),
5795                      2 * HOST_BITS_PER_WIDE_INT,
5796                      &FCONST1(mode).data.low,
5797 		     &FCONST1(mode).data.high,
5798                      SIGNED_FIXED_POINT_MODE_P (mode));
5799       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5800 				      FCONST1 (mode), mode);
5801     }
5802 
5803   for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5804        mode != VOIDmode;
5805        mode = GET_MODE_WIDER_MODE (mode))
5806     {
5807       FCONST0(mode).data.high = 0;
5808       FCONST0(mode).data.low = 0;
5809       FCONST0(mode).mode = mode;
5810       const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5811 				      FCONST0 (mode), mode);
5812 
5813       /* We store the value 1.  */
5814       FCONST1(mode).data.high = 0;
5815       FCONST1(mode).data.low = 0;
5816       FCONST1(mode).mode = mode;
5817       lshift_double (1, 0, GET_MODE_FBIT (mode),
5818                      2 * HOST_BITS_PER_WIDE_INT,
5819                      &FCONST1(mode).data.low,
5820 		     &FCONST1(mode).data.high,
5821                      SIGNED_FIXED_POINT_MODE_P (mode));
5822       const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5823 				      FCONST1 (mode), mode);
5824     }
5825 
5826   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5827        mode != VOIDmode;
5828        mode = GET_MODE_WIDER_MODE (mode))
5829     {
5830       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5831     }
5832 
5833   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5834        mode != VOIDmode;
5835        mode = GET_MODE_WIDER_MODE (mode))
5836     {
5837       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5838     }
5839 
5840   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5841        mode != VOIDmode;
5842        mode = GET_MODE_WIDER_MODE (mode))
5843     {
5844       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5845       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5846     }
5847 
5848   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5849        mode != VOIDmode;
5850        mode = GET_MODE_WIDER_MODE (mode))
5851     {
5852       const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5853       const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5854     }
5855 
5856   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5857     if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5858       const_tiny_rtx[0][i] = const0_rtx;
5859 
5860   const_tiny_rtx[0][(int) BImode] = const0_rtx;
5861   if (STORE_FLAG_VALUE == 1)
5862     const_tiny_rtx[1][(int) BImode] = const1_rtx;
5863 }
5864 
5865 /* Produce exact duplicate of insn INSN after AFTER.
5866    Care updating of libcall regions if present.  */
5867 
5868 rtx
5869 emit_copy_of_insn_after (rtx insn, rtx after)
5870 {
5871   rtx new_rtx, link;
5872 
5873   switch (GET_CODE (insn))
5874     {
5875     case INSN:
5876       new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5877       break;
5878 
5879     case JUMP_INSN:
5880       new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5881       break;
5882 
5883     case DEBUG_INSN:
5884       new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5885       break;
5886 
5887     case CALL_INSN:
5888       new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5889       if (CALL_INSN_FUNCTION_USAGE (insn))
5890 	CALL_INSN_FUNCTION_USAGE (new_rtx)
5891 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5892       SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5893       RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5894       RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5895       RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5896 	= RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5897       break;
5898 
5899     default:
5900       gcc_unreachable ();
5901     }
5902 
5903   /* Update LABEL_NUSES.  */
5904   mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5905 
5906   INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5907 
5908   /* If the old insn is frame related, then so is the new one.  This is
5909      primarily needed for IA-64 unwind info which marks epilogue insns,
5910      which may be duplicated by the basic block reordering code.  */
5911   RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5912 
5913   /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5914      will make them.  REG_LABEL_TARGETs are created there too, but are
5915      supposed to be sticky, so we copy them.  */
5916   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5917     if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5918       {
5919 	if (GET_CODE (link) == EXPR_LIST)
5920 	  add_reg_note (new_rtx, REG_NOTE_KIND (link),
5921 			copy_insn_1 (XEXP (link, 0)));
5922 	else
5923 	  add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5924       }
5925 
5926   INSN_CODE (new_rtx) = INSN_CODE (insn);
5927   return new_rtx;
5928 }
5929 
5930 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5931 rtx
5932 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5933 {
5934   if (hard_reg_clobbers[mode][regno])
5935     return hard_reg_clobbers[mode][regno];
5936   else
5937     return (hard_reg_clobbers[mode][regno] =
5938 	    gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5939 }
5940 
5941 #include "gt-emit-rtl.h"
5942