xref: /openbsd/gnu/usr.bin/gcc/gcc/emit-rtl.c (revision a67f0032)
1 /* Emit RTL for the GNU C-Compiler expander.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 
23 /* Middle-to-low level generation of rtx code and insns.
24 
25    This file contains the functions `gen_rtx', `gen_reg_rtx'
26    and `gen_label_rtx' that are the usual ways of creating rtl
27    expressions for most purposes.
28 
29    It also has the functions for creating insns and linking
30    them in the doubly-linked chain.
31 
32    The patterns of the insns are created by machine-dependent
33    routines in insn-emit.c, which is generated automatically from
34    the machine description.  These routines use `gen_rtx' to make
35    the individual rtx's of the pattern; what is machine dependent
36    is the kind of rtx's they make and what arguments they use.  */
37 
38 #include "config.h"
39 #include "system.h"
40 #include "toplev.h"
41 #include "rtl.h"
42 #include "tree.h"
43 #include "tm_p.h"
44 #include "flags.h"
45 #include "function.h"
46 #include "expr.h"
47 #include "regs.h"
48 #include "hard-reg-set.h"
49 #include "hashtab.h"
50 #include "insn-config.h"
51 #include "recog.h"
52 #include "real.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 
59 /* Commonly used modes.  */
60 
61 enum machine_mode byte_mode;	/* Mode whose width is BITS_PER_UNIT.  */
62 enum machine_mode word_mode;	/* Mode whose width is BITS_PER_WORD.  */
63 enum machine_mode double_mode;	/* Mode whose width is DOUBLE_TYPE_SIZE.  */
64 enum machine_mode ptr_mode;	/* Mode whose width is POINTER_SIZE.  */
65 
66 
67 /* This is *not* reset after each function.  It gives each CODE_LABEL
68    in the entire compilation a unique label number.  */
69 
70 static int label_num = 1;
71 
72 /* Highest label number in current function.
73    Zero means use the value of label_num instead.
74    This is nonzero only when belatedly compiling an inline function.  */
75 
76 static int last_label_num;
77 
78 /* Value label_num had when set_new_first_and_last_label_number was called.
79    If label_num has not changed since then, last_label_num is valid.  */
80 
81 static int base_label_num;
82 
83 /* Nonzero means do not generate NOTEs for source line numbers.  */
84 
85 static int no_line_numbers;
86 
87 /* Commonly used rtx's, so that we only need space for one copy.
88    These are initialized once for the entire compilation.
89    All of these are unique; no other rtx-object will be equal to any
90    of these.  */
91 
92 rtx global_rtl[GR_MAX];
93 
94 /* Commonly used RTL for hard registers.  These objects are not necessarily
95    unique, so we allocate them separately from global_rtl.  They are
96    initialized once per compilation unit, then copied into regno_reg_rtx
97    at the beginning of each function.  */
98 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
99 
100 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
101    the values of 0, 1, and 2.  For the integer entries and VOIDmode, we
102    record a copy of const[012]_rtx.  */
103 
104 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105 
106 rtx const_true_rtx;
107 
108 REAL_VALUE_TYPE dconst0;
109 REAL_VALUE_TYPE dconst1;
110 REAL_VALUE_TYPE dconst2;
111 REAL_VALUE_TYPE dconstm1;
112 
113 /* All references to the following fixed hard registers go through
114    these unique rtl objects.  On machines where the frame-pointer and
115    arg-pointer are the same register, they use the same unique object.
116 
117    After register allocation, other rtl objects which used to be pseudo-regs
118    may be clobbered to refer to the frame-pointer register.
119    But references that were originally to the frame-pointer can be
120    distinguished from the others because they contain frame_pointer_rtx.
121 
122    When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
123    tricky: until register elimination has taken place hard_frame_pointer_rtx
124    should be used if it is being set, and frame_pointer_rtx otherwise.  After
125    register elimination hard_frame_pointer_rtx should always be used.
126    On machines where the two registers are same (most) then these are the
127    same.
128 
129    In an inline procedure, the stack and frame pointer rtxs may not be
130    used for anything else.  */
131 rtx struct_value_rtx;		/* (REG:Pmode STRUCT_VALUE_REGNUM) */
132 rtx struct_value_incoming_rtx;	/* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
133 rtx static_chain_rtx;		/* (REG:Pmode STATIC_CHAIN_REGNUM) */
134 rtx static_chain_incoming_rtx;	/* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
135 rtx pic_offset_table_rtx;	/* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
136 
137 /* This is used to implement __builtin_return_address for some machines.
138    See for instance the MIPS port.  */
139 rtx return_address_pointer_rtx;	/* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
140 
141 /* We make one copy of (const_int C) where C is in
142    [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
143    to save space during the compilation and simplify comparisons of
144    integers.  */
145 
146 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
147 
148 /* A hash table storing CONST_INTs whose absolute value is greater
149    than MAX_SAVED_CONST_INT.  */
150 
151 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
152      htab_t const_int_htab;
153 
154 /* A hash table storing memory attribute structures.  */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
156      htab_t mem_attrs_htab;
157 
158 /* A hash table storing all CONST_DOUBLEs.  */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160      htab_t const_double_htab;
161 
162 #define first_insn (cfun->emit->x_first_insn)
163 #define last_insn (cfun->emit->x_last_insn)
164 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165 #define last_linenum (cfun->emit->x_last_linenum)
166 #define last_filename (cfun->emit->x_last_filename)
167 #define first_label_num (cfun->emit->x_first_label_num)
168 
169 static rtx make_jump_insn_raw		PARAMS ((rtx));
170 static rtx make_call_insn_raw		PARAMS ((rtx));
171 static rtx find_line_note		PARAMS ((rtx));
172 static rtx change_address_1		PARAMS ((rtx, enum machine_mode, rtx,
173 						 int));
174 static void unshare_all_rtl_1		PARAMS ((rtx));
175 static void unshare_all_decls		PARAMS ((tree));
176 static void reset_used_decls		PARAMS ((tree));
177 static void mark_label_nuses		PARAMS ((rtx));
178 static hashval_t const_int_htab_hash    PARAMS ((const void *));
179 static int const_int_htab_eq            PARAMS ((const void *,
180 						 const void *));
181 static hashval_t const_double_htab_hash PARAMS ((const void *));
182 static int const_double_htab_eq		PARAMS ((const void *,
183 						 const void *));
184 static rtx lookup_const_double		PARAMS ((rtx));
185 static hashval_t mem_attrs_htab_hash    PARAMS ((const void *));
186 static int mem_attrs_htab_eq            PARAMS ((const void *,
187 						 const void *));
188 static mem_attrs *get_mem_attrs		PARAMS ((HOST_WIDE_INT, tree, rtx,
189 						 rtx, unsigned int,
190 						 enum machine_mode));
191 static tree component_ref_for_mem_expr	PARAMS ((tree));
192 static rtx gen_const_vector_0		PARAMS ((enum machine_mode));
193 static void copy_rtx_if_shared_1	PARAMS ((rtx *orig));
194 
195 /* Probability of the conditional branch currently proceeded by try_split.
196    Set to -1 otherwise.  */
197 int split_branch_probability = -1;
198 
199 /* Returns a hash code for X (which is a really a CONST_INT).  */
200 
201 static hashval_t
const_int_htab_hash(x)202 const_int_htab_hash (x)
203      const void *x;
204 {
205   return (hashval_t) INTVAL ((struct rtx_def *) x);
206 }
207 
208 /* Returns nonzero if the value represented by X (which is really a
209    CONST_INT) is the same as that given by Y (which is really a
210    HOST_WIDE_INT *).  */
211 
212 static int
const_int_htab_eq(x,y)213 const_int_htab_eq (x, y)
214      const void *x;
215      const void *y;
216 {
217   return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
218 }
219 
220 /* Returns a hash code for X (which is really a CONST_DOUBLE).  */
221 static hashval_t
const_double_htab_hash(x)222 const_double_htab_hash (x)
223      const void *x;
224 {
225   rtx value = (rtx) x;
226   hashval_t h;
227 
228   if (GET_MODE (value) == VOIDmode)
229     h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
230   else
231     h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
232   return h;
233 }
234 
235 /* Returns nonzero if the value represented by X (really a ...)
236    is the same as that represented by Y (really a ...) */
237 static int
const_double_htab_eq(x,y)238 const_double_htab_eq (x, y)
239      const void *x;
240      const void *y;
241 {
242   rtx a = (rtx)x, b = (rtx)y;
243 
244   if (GET_MODE (a) != GET_MODE (b))
245     return 0;
246   if (GET_MODE (a) == VOIDmode)
247     return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
248 	    && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
249   else
250     return real_identical (CONST_DOUBLE_REAL_VALUE (a),
251 			   CONST_DOUBLE_REAL_VALUE (b));
252 }
253 
254 /* Returns a hash code for X (which is a really a mem_attrs *).  */
255 
256 static hashval_t
mem_attrs_htab_hash(x)257 mem_attrs_htab_hash (x)
258      const void *x;
259 {
260   mem_attrs *p = (mem_attrs *) x;
261 
262   return (p->alias ^ (p->align * 1000)
263 	  ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
264 	  ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
265 	  ^ (size_t) p->expr);
266 }
267 
268 /* Returns nonzero if the value represented by X (which is really a
269    mem_attrs *) is the same as that given by Y (which is also really a
270    mem_attrs *).  */
271 
272 static int
mem_attrs_htab_eq(x,y)273 mem_attrs_htab_eq (x, y)
274      const void *x;
275      const void *y;
276 {
277   mem_attrs *p = (mem_attrs *) x;
278   mem_attrs *q = (mem_attrs *) y;
279 
280   return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
281 	  && p->size == q->size && p->align == q->align);
282 }
283 
284 /* Allocate a new mem_attrs structure and insert it into the hash table if
285    one identical to it is not already in the table.  We are doing this for
286    MEM of mode MODE.  */
287 
288 static mem_attrs *
get_mem_attrs(alias,expr,offset,size,align,mode)289 get_mem_attrs (alias, expr, offset, size, align, mode)
290      HOST_WIDE_INT alias;
291      tree expr;
292      rtx offset;
293      rtx size;
294      unsigned int align;
295      enum machine_mode mode;
296 {
297   mem_attrs attrs;
298   void **slot;
299 
300   /* If everything is the default, we can just return zero.
301      This must match what the corresponding MEM_* macros return when the
302      field is not present.  */
303   if (alias == 0 && expr == 0 && offset == 0
304       && (size == 0
305 	  || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
306       && (STRICT_ALIGNMENT && mode != BLKmode
307 	  ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
308     return 0;
309 
310   attrs.alias = alias;
311   attrs.expr = expr;
312   attrs.offset = offset;
313   attrs.size = size;
314   attrs.align = align;
315 
316   slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
317   if (*slot == 0)
318     {
319       *slot = ggc_alloc (sizeof (mem_attrs));
320       memcpy (*slot, &attrs, sizeof (mem_attrs));
321     }
322 
323   return *slot;
324 }
325 
326 /* Generate a new REG rtx.  Make sure ORIGINAL_REGNO is set properly, and
327    don't attempt to share with the various global pieces of rtl (such as
328    frame_pointer_rtx).  */
329 
330 rtx
gen_raw_REG(mode,regno)331 gen_raw_REG (mode, regno)
332      enum machine_mode mode;
333      int regno;
334 {
335   rtx x = gen_rtx_raw_REG (mode, regno);
336   ORIGINAL_REGNO (x) = regno;
337   return x;
338 }
339 
340 /* There are some RTL codes that require special attention; the generation
341    functions do the raw handling.  If you add to this list, modify
342    special_rtx in gengenrtl.c as well.  */
343 
344 rtx
gen_rtx_CONST_INT(mode,arg)345 gen_rtx_CONST_INT (mode, arg)
346      enum machine_mode mode ATTRIBUTE_UNUSED;
347      HOST_WIDE_INT arg;
348 {
349   void **slot;
350 
351   if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
352     return const_int_rtx[arg + MAX_SAVED_CONST_INT];
353 
354 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
355   if (const_true_rtx && arg == STORE_FLAG_VALUE)
356     return const_true_rtx;
357 #endif
358 
359   /* Look up the CONST_INT in the hash table.  */
360   slot = htab_find_slot_with_hash (const_int_htab, &arg,
361 				   (hashval_t) arg, INSERT);
362   if (*slot == 0)
363     *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
364 
365   return (rtx) *slot;
366 }
367 
368 rtx
gen_int_mode(c,mode)369 gen_int_mode (c, mode)
370      HOST_WIDE_INT c;
371      enum machine_mode mode;
372 {
373   return GEN_INT (trunc_int_for_mode (c, mode));
374 }
375 
376 /* CONST_DOUBLEs might be created from pairs of integers, or from
377    REAL_VALUE_TYPEs.  Also, their length is known only at run time,
378    so we cannot use gen_rtx_raw_CONST_DOUBLE.  */
379 
380 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
381    hash table.  If so, return its counterpart; otherwise add it
382    to the hash table and return it.  */
383 static rtx
lookup_const_double(real)384 lookup_const_double (real)
385      rtx real;
386 {
387   void **slot = htab_find_slot (const_double_htab, real, INSERT);
388   if (*slot == 0)
389     *slot = real;
390 
391   return (rtx) *slot;
392 }
393 
394 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
395    VALUE in mode MODE.  */
396 rtx
const_double_from_real_value(value,mode)397 const_double_from_real_value (value, mode)
398      REAL_VALUE_TYPE value;
399      enum machine_mode mode;
400 {
401   rtx real = rtx_alloc (CONST_DOUBLE);
402   PUT_MODE (real, mode);
403 
404   memcpy (&CONST_DOUBLE_LOW (real), &value, sizeof (REAL_VALUE_TYPE));
405 
406   return lookup_const_double (real);
407 }
408 
409 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
410    of ints: I0 is the low-order word and I1 is the high-order word.
411    Do not use this routine for non-integer modes; convert to
412    REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE.  */
413 
414 rtx
immed_double_const(i0,i1,mode)415 immed_double_const (i0, i1, mode)
416      HOST_WIDE_INT i0, i1;
417      enum machine_mode mode;
418 {
419   rtx value;
420   unsigned int i;
421 
422   if (mode != VOIDmode)
423     {
424       int width;
425       if (GET_MODE_CLASS (mode) != MODE_INT
426 	  && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT
427 	  /* We can get a 0 for an error mark.  */
428 	  && GET_MODE_CLASS (mode) != MODE_VECTOR_INT
429 	  && GET_MODE_CLASS (mode) != MODE_VECTOR_FLOAT)
430 	abort ();
431 
432       /* We clear out all bits that don't belong in MODE, unless they and
433 	 our sign bit are all one.  So we get either a reasonable negative
434 	 value or a reasonable unsigned value for this mode.  */
435       width = GET_MODE_BITSIZE (mode);
436       if (width < HOST_BITS_PER_WIDE_INT
437 	  && ((i0 & ((HOST_WIDE_INT) (-1) << (width - 1)))
438 	      != ((HOST_WIDE_INT) (-1) << (width - 1))))
439 	i0 &= ((HOST_WIDE_INT) 1 << width) - 1, i1 = 0;
440       else if (width == HOST_BITS_PER_WIDE_INT
441 	       && ! (i1 == ~0 && i0 < 0))
442 	i1 = 0;
443       else if (width > 2 * HOST_BITS_PER_WIDE_INT)
444 	/* We cannot represent this value as a constant.  */
445 	abort ();
446 
447       /* If this would be an entire word for the target, but is not for
448 	 the host, then sign-extend on the host so that the number will
449 	 look the same way on the host that it would on the target.
450 
451 	 For example, when building a 64 bit alpha hosted 32 bit sparc
452 	 targeted compiler, then we want the 32 bit unsigned value -1 to be
453 	 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
454 	 The latter confuses the sparc backend.  */
455 
456       if (width < HOST_BITS_PER_WIDE_INT
457 	  && (i0 & ((HOST_WIDE_INT) 1 << (width - 1))))
458 	i0 |= ((HOST_WIDE_INT) (-1) << width);
459 
460       /* If MODE fits within HOST_BITS_PER_WIDE_INT, always use a
461 	 CONST_INT.
462 
463 	 ??? Strictly speaking, this is wrong if we create a CONST_INT for
464 	 a large unsigned constant with the size of MODE being
465 	 HOST_BITS_PER_WIDE_INT and later try to interpret that constant
466 	 in a wider mode.  In that case we will mis-interpret it as a
467 	 negative number.
468 
469 	 Unfortunately, the only alternative is to make a CONST_DOUBLE for
470 	 any constant in any mode if it is an unsigned constant larger
471 	 than the maximum signed integer in an int on the host.  However,
472 	 doing this will break everyone that always expects to see a
473 	 CONST_INT for SImode and smaller.
474 
475 	 We have always been making CONST_INTs in this case, so nothing
476 	 new is being broken.  */
477 
478       if (width <= HOST_BITS_PER_WIDE_INT)
479 	i1 = (i0 < 0) ? ~(HOST_WIDE_INT) 0 : 0;
480     }
481 
482   /* If this integer fits in one word, return a CONST_INT.  */
483   if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
484     return GEN_INT (i0);
485 
486   /* We use VOIDmode for integers.  */
487   value = rtx_alloc (CONST_DOUBLE);
488   PUT_MODE (value, VOIDmode);
489 
490   CONST_DOUBLE_LOW (value) = i0;
491   CONST_DOUBLE_HIGH (value) = i1;
492 
493   for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
494     XWINT (value, i) = 0;
495 
496   return lookup_const_double (value);
497 }
498 
499 rtx
gen_rtx_REG(mode,regno)500 gen_rtx_REG (mode, regno)
501      enum machine_mode mode;
502      unsigned int regno;
503 {
504   /* In case the MD file explicitly references the frame pointer, have
505      all such references point to the same frame pointer.  This is
506      used during frame pointer elimination to distinguish the explicit
507      references to these registers from pseudos that happened to be
508      assigned to them.
509 
510      If we have eliminated the frame pointer or arg pointer, we will
511      be using it as a normal register, for example as a spill
512      register.  In such cases, we might be accessing it in a mode that
513      is not Pmode and therefore cannot use the pre-allocated rtx.
514 
515      Also don't do this when we are making new REGs in reload, since
516      we don't want to get confused with the real pointers.  */
517 
518   if (mode == Pmode && !reload_in_progress)
519     {
520       if (regno == FRAME_POINTER_REGNUM
521 	  && (!reload_completed || frame_pointer_needed))
522 	return frame_pointer_rtx;
523 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
524       if (regno == HARD_FRAME_POINTER_REGNUM
525 	  && (!reload_completed || frame_pointer_needed))
526 	return hard_frame_pointer_rtx;
527 #endif
528 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
529       if (regno == ARG_POINTER_REGNUM)
530 	return arg_pointer_rtx;
531 #endif
532 #ifdef RETURN_ADDRESS_POINTER_REGNUM
533       if (regno == RETURN_ADDRESS_POINTER_REGNUM)
534 	return return_address_pointer_rtx;
535 #endif
536       if (regno == PIC_OFFSET_TABLE_REGNUM
537 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
538 	return pic_offset_table_rtx;
539       if (regno == STACK_POINTER_REGNUM)
540 	return stack_pointer_rtx;
541     }
542 
543 #if 0
544   /* If the per-function register table has been set up, try to re-use
545      an existing entry in that table to avoid useless generation of RTL.
546 
547      This code is disabled for now until we can fix the various backends
548      which depend on having non-shared hard registers in some cases.   Long
549      term we want to re-enable this code as it can significantly cut down
550      on the amount of useless RTL that gets generated.
551 
552      We'll also need to fix some code that runs after reload that wants to
553      set ORIGINAL_REGNO.  */
554 
555   if (cfun
556       && cfun->emit
557       && regno_reg_rtx
558       && regno < FIRST_PSEUDO_REGISTER
559       && reg_raw_mode[regno] == mode)
560     return regno_reg_rtx[regno];
561 #endif
562 
563   return gen_raw_REG (mode, regno);
564 }
565 
566 rtx
gen_rtx_MEM(mode,addr)567 gen_rtx_MEM (mode, addr)
568      enum machine_mode mode;
569      rtx addr;
570 {
571   rtx rt = gen_rtx_raw_MEM (mode, addr);
572 
573   /* This field is not cleared by the mere allocation of the rtx, so
574      we clear it here.  */
575   MEM_ATTRS (rt) = 0;
576 
577   return rt;
578 }
579 
580 rtx
gen_rtx_SUBREG(mode,reg,offset)581 gen_rtx_SUBREG (mode, reg, offset)
582      enum machine_mode mode;
583      rtx reg;
584      int offset;
585 {
586   /* This is the most common failure type.
587      Catch it early so we can see who does it.  */
588   if ((offset % GET_MODE_SIZE (mode)) != 0)
589     abort ();
590 
591   /* This check isn't usable right now because combine will
592      throw arbitrary crap like a CALL into a SUBREG in
593      gen_lowpart_for_combine so we must just eat it.  */
594 #if 0
595   /* Check for this too.  */
596   if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
597     abort ();
598 #endif
599   return gen_rtx_raw_SUBREG (mode, reg, offset);
600 }
601 
602 /* Generate a SUBREG representing the least-significant part of REG if MODE
603    is smaller than mode of REG, otherwise paradoxical SUBREG.  */
604 
605 rtx
gen_lowpart_SUBREG(mode,reg)606 gen_lowpart_SUBREG (mode, reg)
607      enum machine_mode mode;
608      rtx reg;
609 {
610   enum machine_mode inmode;
611 
612   inmode = GET_MODE (reg);
613   if (inmode == VOIDmode)
614     inmode = mode;
615   return gen_rtx_SUBREG (mode, reg,
616 			 subreg_lowpart_offset (mode, inmode));
617 }
618 
619 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
620 **
621 **	    This routine generates an RTX of the size specified by
622 **	<code>, which is an RTX code.   The RTX structure is initialized
623 **	from the arguments <element1> through <elementn>, which are
624 **	interpreted according to the specific RTX type's format.   The
625 **	special machine mode associated with the rtx (if any) is specified
626 **	in <mode>.
627 **
628 **	    gen_rtx can be invoked in a way which resembles the lisp-like
629 **	rtx it will generate.   For example, the following rtx structure:
630 **
631 **	      (plus:QI (mem:QI (reg:SI 1))
632 **		       (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
633 **
634 **		...would be generated by the following C code:
635 **
636 **		gen_rtx (PLUS, QImode,
637 **		    gen_rtx (MEM, QImode,
638 **			gen_rtx (REG, SImode, 1)),
639 **		    gen_rtx (MEM, QImode,
640 **			gen_rtx (PLUS, SImode,
641 **			    gen_rtx (REG, SImode, 2),
642 **			    gen_rtx (REG, SImode, 3)))),
643 */
644 
645 /*VARARGS2*/
646 rtx
gen_rtx(enum rtx_code code,enum machine_mode mode,...)647 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
648 {
649   int i;		/* Array indices...			*/
650   const char *fmt;	/* Current rtx's format...		*/
651   rtx rt_val;		/* RTX to return to caller...		*/
652 
653   VA_OPEN (p, mode);
654   VA_FIXEDARG (p, enum rtx_code, code);
655   VA_FIXEDARG (p, enum machine_mode, mode);
656 
657   switch (code)
658     {
659     case CONST_INT:
660       rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
661       break;
662 
663     case CONST_DOUBLE:
664       {
665 	HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
666 	HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
667 
668 	rt_val = immed_double_const (arg0, arg1, mode);
669       }
670       break;
671 
672     case REG:
673       rt_val = gen_rtx_REG (mode, va_arg (p, int));
674       break;
675 
676     case MEM:
677       rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
678       break;
679 
680     default:
681       rt_val = rtx_alloc (code);	/* Allocate the storage space.  */
682       rt_val->mode = mode;		/* Store the machine mode...  */
683 
684       fmt = GET_RTX_FORMAT (code);	/* Find the right format...  */
685       for (i = 0; i < GET_RTX_LENGTH (code); i++)
686 	{
687 	  switch (*fmt++)
688 	    {
689 	    case '0':		/* Unused field.  */
690 	      break;
691 
692 	    case 'i':		/* An integer?  */
693 	      XINT (rt_val, i) = va_arg (p, int);
694 	      break;
695 
696 	    case 'w':		/* A wide integer? */
697 	      XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
698 	      break;
699 
700 	    case 's':		/* A string?  */
701 	      XSTR (rt_val, i) = va_arg (p, char *);
702 	      break;
703 
704 	    case 'e':		/* An expression?  */
705 	    case 'u':		/* An insn?  Same except when printing.  */
706 	      XEXP (rt_val, i) = va_arg (p, rtx);
707 	      break;
708 
709 	    case 'E':		/* An RTX vector?  */
710 	      XVEC (rt_val, i) = va_arg (p, rtvec);
711 	      break;
712 
713 	    case 'b':           /* A bitmap? */
714 	      XBITMAP (rt_val, i) = va_arg (p, bitmap);
715 	      break;
716 
717 	    case 't':           /* A tree? */
718 	      XTREE (rt_val, i) = va_arg (p, tree);
719 	      break;
720 
721 	    default:
722 	      abort ();
723 	    }
724 	}
725       break;
726     }
727 
728   VA_CLOSE (p);
729   return rt_val;
730 }
731 
732 /* gen_rtvec (n, [rt1, ..., rtn])
733 **
734 **	    This routine creates an rtvec and stores within it the
735 **	pointers to rtx's which are its arguments.
736 */
737 
738 /*VARARGS1*/
739 rtvec
gen_rtvec(int n,...)740 gen_rtvec VPARAMS ((int n, ...))
741 {
742   int i, save_n;
743   rtx *vector;
744 
745   VA_OPEN (p, n);
746   VA_FIXEDARG (p, int, n);
747 
748   if (n == 0)
749     return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
750 
751   vector = (rtx *) alloca (n * sizeof (rtx));
752 
753   for (i = 0; i < n; i++)
754     vector[i] = va_arg (p, rtx);
755 
756   /* The definition of VA_* in K&R C causes `n' to go out of scope.  */
757   save_n = n;
758   VA_CLOSE (p);
759 
760   return gen_rtvec_v (save_n, vector);
761 }
762 
763 rtvec
gen_rtvec_v(n,argp)764 gen_rtvec_v (n, argp)
765      int n;
766      rtx *argp;
767 {
768   int i;
769   rtvec rt_val;
770 
771   if (n == 0)
772     return NULL_RTVEC;		/* Don't allocate an empty rtvec...	*/
773 
774   rt_val = rtvec_alloc (n);	/* Allocate an rtvec...			*/
775 
776   for (i = 0; i < n; i++)
777     rt_val->elem[i] = *argp++;
778 
779   return rt_val;
780 }
781 
782 /* Generate a REG rtx for a new pseudo register of mode MODE.
783    This pseudo is assigned the next sequential register number.  */
784 
785 rtx
gen_reg_rtx(mode)786 gen_reg_rtx (mode)
787      enum machine_mode mode;
788 {
789   struct function *f = cfun;
790   rtx val;
791 
792   /* Don't let anything called after initial flow analysis create new
793      registers.  */
794   if (no_new_pseudos)
795     abort ();
796 
797   if (generating_concat_p
798       && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
799 	  || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
800     {
801       /* For complex modes, don't make a single pseudo.
802 	 Instead, make a CONCAT of two pseudos.
803 	 This allows noncontiguous allocation of the real and imaginary parts,
804 	 which makes much better code.  Besides, allocating DCmode
805 	 pseudos overstrains reload on some machines like the 386.  */
806       rtx realpart, imagpart;
807       enum machine_mode partmode = GET_MODE_INNER (mode);
808 
809       realpart = gen_reg_rtx (partmode);
810       imagpart = gen_reg_rtx (partmode);
811       return gen_rtx_CONCAT (mode, realpart, imagpart);
812     }
813 
814   /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
815      enough to have an element for this pseudo reg number.  */
816 
817   if (reg_rtx_no == f->emit->regno_pointer_align_length)
818     {
819       int old_size = f->emit->regno_pointer_align_length;
820       char *new;
821       rtx *new1;
822       tree *new2;
823 
824       new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
825       memset (new + old_size, 0, old_size);
826       f->emit->regno_pointer_align = (unsigned char *) new;
827 
828       new1 = (rtx *) ggc_realloc (f->emit->x_regno_reg_rtx,
829 				  old_size * 2 * sizeof (rtx));
830       memset (new1 + old_size, 0, old_size * sizeof (rtx));
831       regno_reg_rtx = new1;
832 
833       new2 = (tree *) ggc_realloc (f->emit->regno_decl,
834 				   old_size * 2 * sizeof (tree));
835       memset (new2 + old_size, 0, old_size * sizeof (tree));
836       f->emit->regno_decl = new2;
837 
838       f->emit->regno_pointer_align_length = old_size * 2;
839     }
840 
841   val = gen_raw_REG (mode, reg_rtx_no);
842   regno_reg_rtx[reg_rtx_no++] = val;
843   return val;
844 }
845 
846 /* Identify REG (which may be a CONCAT) as a user register.  */
847 
848 void
mark_user_reg(reg)849 mark_user_reg (reg)
850      rtx reg;
851 {
852   if (GET_CODE (reg) == CONCAT)
853     {
854       REG_USERVAR_P (XEXP (reg, 0)) = 1;
855       REG_USERVAR_P (XEXP (reg, 1)) = 1;
856     }
857   else if (GET_CODE (reg) == REG)
858     REG_USERVAR_P (reg) = 1;
859   else
860     abort ();
861 }
862 
863 /* Identify REG as a probable pointer register and show its alignment
864    as ALIGN, if nonzero.  */
865 
866 void
mark_reg_pointer(reg,align)867 mark_reg_pointer (reg, align)
868      rtx reg;
869      int align;
870 {
871   if (! REG_POINTER (reg))
872     {
873       REG_POINTER (reg) = 1;
874 
875       if (align)
876 	REGNO_POINTER_ALIGN (REGNO (reg)) = align;
877     }
878   else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
879     /* We can no-longer be sure just how aligned this pointer is */
880     REGNO_POINTER_ALIGN (REGNO (reg)) = align;
881 }
882 
883 /* Return 1 plus largest pseudo reg number used in the current function.  */
884 
885 int
max_reg_num()886 max_reg_num ()
887 {
888   return reg_rtx_no;
889 }
890 
891 /* Return 1 + the largest label number used so far in the current function.  */
892 
893 int
max_label_num()894 max_label_num ()
895 {
896   if (last_label_num && label_num == base_label_num)
897     return last_label_num;
898   return label_num;
899 }
900 
901 /* Return first label number used in this function (if any were used).  */
902 
903 int
get_first_label_num()904 get_first_label_num ()
905 {
906   return first_label_num;
907 }
908 
909 /* Return the final regno of X, which is a SUBREG of a hard
910    register.  */
911 int
subreg_hard_regno(x,check_mode)912 subreg_hard_regno (x, check_mode)
913      rtx x;
914      int check_mode;
915 {
916   enum machine_mode mode = GET_MODE (x);
917   unsigned int byte_offset, base_regno, final_regno;
918   rtx reg = SUBREG_REG (x);
919 
920   /* This is where we attempt to catch illegal subregs
921      created by the compiler.  */
922   if (GET_CODE (x) != SUBREG
923       || GET_CODE (reg) != REG)
924     abort ();
925   base_regno = REGNO (reg);
926   if (base_regno >= FIRST_PSEUDO_REGISTER)
927     abort ();
928   if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
929     abort ();
930 #ifdef ENABLE_CHECKING
931   if (!subreg_offset_representable_p (REGNO (reg), GET_MODE (reg),
932 			  	      SUBREG_BYTE (x), mode))
933     abort ();
934 #endif
935   /* Catch non-congruent offsets too.  */
936   byte_offset = SUBREG_BYTE (x);
937   if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
938     abort ();
939 
940   final_regno = subreg_regno (x);
941 
942   return final_regno;
943 }
944 
945 /* Return a value representing some low-order bits of X, where the number
946    of low-order bits is given by MODE.  Note that no conversion is done
947    between floating-point and fixed-point values, rather, the bit
948    representation is returned.
949 
950    This function handles the cases in common between gen_lowpart, below,
951    and two variants in cse.c and combine.c.  These are the cases that can
952    be safely handled at all points in the compilation.
953 
954    If this is not a case we can handle, return 0.  */
955 
956 rtx
gen_lowpart_common(mode,x)957 gen_lowpart_common (mode, x)
958      enum machine_mode mode;
959      rtx x;
960 {
961   int msize = GET_MODE_SIZE (mode);
962   int xsize = GET_MODE_SIZE (GET_MODE (x));
963   int offset = 0;
964 
965   if (GET_MODE (x) == mode)
966     return x;
967 
968   /* MODE must occupy no more words than the mode of X.  */
969   if (GET_MODE (x) != VOIDmode
970       && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
971 	  > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
972     return 0;
973 
974   /* Don't allow generating paradoxical FLOAT_MODE subregs.  */
975   if (GET_MODE_CLASS (mode) == MODE_FLOAT
976       && GET_MODE (x) != VOIDmode && msize > xsize)
977     return 0;
978 
979   offset = subreg_lowpart_offset (mode, GET_MODE (x));
980 
981   if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
982       && (GET_MODE_CLASS (mode) == MODE_INT
983 	  || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
984     {
985       /* If we are getting the low-order part of something that has been
986 	 sign- or zero-extended, we can either just use the object being
987 	 extended or make a narrower extension.  If we want an even smaller
988 	 piece than the size of the object being extended, call ourselves
989 	 recursively.
990 
991 	 This case is used mostly by combine and cse.  */
992 
993       if (GET_MODE (XEXP (x, 0)) == mode)
994 	return XEXP (x, 0);
995       else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
996 	return gen_lowpart_common (mode, XEXP (x, 0));
997       else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
998 	return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
999     }
1000   else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
1001 	   || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR)
1002     return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
1003   else if ((GET_MODE_CLASS (mode) == MODE_VECTOR_INT
1004 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
1005 	   && GET_MODE (x) == VOIDmode)
1006     return simplify_gen_subreg (mode, x, int_mode_for_mode (mode), offset);
1007   /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
1008      from the low-order part of the constant.  */
1009   else if ((GET_MODE_CLASS (mode) == MODE_INT
1010 	    || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1011 	   && GET_MODE (x) == VOIDmode
1012 	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
1013     {
1014       /* If MODE is twice the host word size, X is already the desired
1015 	 representation.  Otherwise, if MODE is wider than a word, we can't
1016 	 do this.  If MODE is exactly a word, return just one CONST_INT.  */
1017 
1018       if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
1019 	return x;
1020       else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
1021 	return 0;
1022       else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
1023 	return (GET_CODE (x) == CONST_INT ? x
1024 		: GEN_INT (CONST_DOUBLE_LOW (x)));
1025       else
1026 	{
1027 	  /* MODE must be narrower than HOST_BITS_PER_WIDE_INT.  */
1028 	  HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
1029 			       : CONST_DOUBLE_LOW (x));
1030 
1031 	  /* Sign extend to HOST_WIDE_INT.  */
1032 	  val = trunc_int_for_mode (val, mode);
1033 
1034 	  return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
1035 		  : GEN_INT (val));
1036 	}
1037     }
1038 
1039   /* The floating-point emulator can handle all conversions between
1040      FP and integer operands.  This simplifies reload because it
1041      doesn't have to deal with constructs like (subreg:DI
1042      (const_double:SF ...)) or (subreg:DF (const_int ...)).  */
1043   /* Single-precision floats are always 32-bits and double-precision
1044      floats are always 64-bits.  */
1045 
1046   else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1047 	   && GET_MODE_BITSIZE (mode) == 32
1048 	   && GET_CODE (x) == CONST_INT)
1049     {
1050       REAL_VALUE_TYPE r;
1051       long i = INTVAL (x);
1052 
1053       real_from_target (&r, &i, mode);
1054       return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1055     }
1056   else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1057 	   && GET_MODE_BITSIZE (mode) == 64
1058 	   && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1059 	   && GET_MODE (x) == VOIDmode)
1060     {
1061       REAL_VALUE_TYPE r;
1062       HOST_WIDE_INT low, high;
1063       long i[2];
1064 
1065       if (GET_CODE (x) == CONST_INT)
1066 	{
1067 	  low = INTVAL (x);
1068 	  high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1069 	}
1070       else
1071 	{
1072 	  low = CONST_DOUBLE_LOW (x);
1073 	  high = CONST_DOUBLE_HIGH (x);
1074 	}
1075 
1076       if (HOST_BITS_PER_WIDE_INT > 32)
1077 	high = low >> 31 >> 1;
1078 
1079       /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1080 	 target machine.  */
1081       if (WORDS_BIG_ENDIAN)
1082 	i[0] = high, i[1] = low;
1083       else
1084 	i[0] = low, i[1] = high;
1085 
1086       real_from_target (&r, i, mode);
1087       return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1088     }
1089   else if ((GET_MODE_CLASS (mode) == MODE_INT
1090 	    || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1091 	   && GET_CODE (x) == CONST_DOUBLE
1092 	   && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1093     {
1094       REAL_VALUE_TYPE r;
1095       long i[4];  /* Only the low 32 bits of each 'long' are used.  */
1096       int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1097 
1098       /* Convert 'r' into an array of four 32-bit words in target word
1099          order.  */
1100       REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1101       switch (GET_MODE_BITSIZE (GET_MODE (x)))
1102 	{
1103 	case 32:
1104 	  REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1105 	  i[1] = 0;
1106 	  i[2] = 0;
1107 	  i[3 - 3 * endian] = 0;
1108 	  break;
1109 	case 64:
1110 	  REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1111 	  i[2 - 2 * endian] = 0;
1112 	  i[3 - 2 * endian] = 0;
1113 	  break;
1114 	case 96:
1115 	  REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1116 	  i[3 - 3 * endian] = 0;
1117 	  break;
1118 	case 128:
1119 	  REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1120 	  break;
1121 	default:
1122 	  abort ();
1123 	}
1124       /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1125 	 and return it.  */
1126 #if HOST_BITS_PER_WIDE_INT == 32
1127       return immed_double_const (i[3 * endian], i[1 + endian], mode);
1128 #else
1129       if (HOST_BITS_PER_WIDE_INT != 64)
1130 	abort ();
1131 
1132       return immed_double_const ((((unsigned long) i[3 * endian])
1133 				  | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1134 				 (((unsigned long) i[2 - endian])
1135 				  | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1136 				 mode);
1137 #endif
1138     }
1139 
1140   /* Otherwise, we can't do this.  */
1141   return 0;
1142 }
1143 
1144 /* Return the real part (which has mode MODE) of a complex value X.
1145    This always comes at the low address in memory.  */
1146 
1147 rtx
gen_realpart(mode,x)1148 gen_realpart (mode, x)
1149      enum machine_mode mode;
1150      rtx x;
1151 {
1152   if (WORDS_BIG_ENDIAN
1153       && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1154       && REG_P (x)
1155       && REGNO (x) < FIRST_PSEUDO_REGISTER)
1156     internal_error
1157       ("can't access real part of complex value in hard register");
1158   else if (WORDS_BIG_ENDIAN)
1159     return gen_highpart (mode, x);
1160   else
1161     return gen_lowpart (mode, x);
1162 }
1163 
1164 /* Return the imaginary part (which has mode MODE) of a complex value X.
1165    This always comes at the high address in memory.  */
1166 
1167 rtx
gen_imagpart(mode,x)1168 gen_imagpart (mode, x)
1169      enum machine_mode mode;
1170      rtx x;
1171 {
1172   if (WORDS_BIG_ENDIAN)
1173     return gen_lowpart (mode, x);
1174   else if (! WORDS_BIG_ENDIAN
1175 	   && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1176 	   && REG_P (x)
1177 	   && REGNO (x) < FIRST_PSEUDO_REGISTER)
1178     internal_error
1179       ("can't access imaginary part of complex value in hard register");
1180   else
1181     return gen_highpart (mode, x);
1182 }
1183 
1184 /* Return 1 iff X, assumed to be a SUBREG,
1185    refers to the real part of the complex value in its containing reg.
1186    Complex values are always stored with the real part in the first word,
1187    regardless of WORDS_BIG_ENDIAN.  */
1188 
1189 int
subreg_realpart_p(x)1190 subreg_realpart_p (x)
1191      rtx x;
1192 {
1193   if (GET_CODE (x) != SUBREG)
1194     abort ();
1195 
1196   return ((unsigned int) SUBREG_BYTE (x)
1197 	  < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1198 }
1199 
1200 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1201    return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1202    least-significant part of X.
1203    MODE specifies how big a part of X to return;
1204    it usually should not be larger than a word.
1205    If X is a MEM whose address is a QUEUED, the value may be so also.  */
1206 
1207 rtx
gen_lowpart(mode,x)1208 gen_lowpart (mode, x)
1209      enum machine_mode mode;
1210      rtx x;
1211 {
1212   rtx result = gen_lowpart_common (mode, x);
1213 
1214   if (result)
1215     return result;
1216   else if (GET_CODE (x) == REG)
1217     {
1218       /* Must be a hard reg that's not valid in MODE.  */
1219       result = gen_lowpart_common (mode, copy_to_reg (x));
1220       if (result == 0)
1221 	abort ();
1222       return result;
1223     }
1224   else if (GET_CODE (x) == MEM)
1225     {
1226       /* The only additional case we can do is MEM.  */
1227       int offset = 0;
1228       if (WORDS_BIG_ENDIAN)
1229 	offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1230 		  - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1231 
1232       if (BYTES_BIG_ENDIAN)
1233 	/* Adjust the address so that the address-after-the-data
1234 	   is unchanged.  */
1235 	offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1236 		   - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1237 
1238       return adjust_address (x, mode, offset);
1239     }
1240   else if (GET_CODE (x) == ADDRESSOF)
1241     return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1242   else
1243     abort ();
1244 }
1245 
1246 /* Like `gen_lowpart', but refer to the most significant part.
1247    This is used to access the imaginary part of a complex number.  */
1248 
1249 rtx
gen_highpart(mode,x)1250 gen_highpart (mode, x)
1251      enum machine_mode mode;
1252      rtx x;
1253 {
1254   unsigned int msize = GET_MODE_SIZE (mode);
1255   rtx result;
1256 
1257   /* This case loses if X is a subreg.  To catch bugs early,
1258      complain if an invalid MODE is used even in other cases.  */
1259   if (msize > UNITS_PER_WORD
1260       && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1261     abort ();
1262 
1263   result = simplify_gen_subreg (mode, x, GET_MODE (x),
1264 				subreg_highpart_offset (mode, GET_MODE (x)));
1265 
1266   /* simplify_gen_subreg is not guaranteed to return a valid operand for
1267      the target if we have a MEM.  gen_highpart must return a valid operand,
1268      emitting code if necessary to do so.  */
1269   if (result != NULL_RTX && GET_CODE (result) == MEM)
1270     result = validize_mem (result);
1271 
1272   if (!result)
1273     abort ();
1274   return result;
1275 }
1276 
1277 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1278    be VOIDmode constant.  */
1279 rtx
gen_highpart_mode(outermode,innermode,exp)1280 gen_highpart_mode (outermode, innermode, exp)
1281      enum machine_mode outermode, innermode;
1282      rtx exp;
1283 {
1284   if (GET_MODE (exp) != VOIDmode)
1285     {
1286       if (GET_MODE (exp) != innermode)
1287 	abort ();
1288       return gen_highpart (outermode, exp);
1289     }
1290   return simplify_gen_subreg (outermode, exp, innermode,
1291 			      subreg_highpart_offset (outermode, innermode));
1292 }
1293 
1294 /* Return offset in bytes to get OUTERMODE low part
1295    of the value in mode INNERMODE stored in memory in target format.  */
1296 
1297 unsigned int
subreg_lowpart_offset(outermode,innermode)1298 subreg_lowpart_offset (outermode, innermode)
1299      enum machine_mode outermode, innermode;
1300 {
1301   unsigned int offset = 0;
1302   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1303 
1304   if (difference > 0)
1305     {
1306       if (WORDS_BIG_ENDIAN)
1307 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1308       if (BYTES_BIG_ENDIAN)
1309 	offset += difference % UNITS_PER_WORD;
1310     }
1311 
1312   return offset;
1313 }
1314 
1315 /* Return offset in bytes to get OUTERMODE high part
1316    of the value in mode INNERMODE stored in memory in target format.  */
1317 unsigned int
subreg_highpart_offset(outermode,innermode)1318 subreg_highpart_offset (outermode, innermode)
1319      enum machine_mode outermode, innermode;
1320 {
1321   unsigned int offset = 0;
1322   int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1323 
1324   if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1325     abort ();
1326 
1327   if (difference > 0)
1328     {
1329       if (! WORDS_BIG_ENDIAN)
1330 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1331       if (! BYTES_BIG_ENDIAN)
1332 	offset += difference % UNITS_PER_WORD;
1333     }
1334 
1335   return offset;
1336 }
1337 
1338 /* Return 1 iff X, assumed to be a SUBREG,
1339    refers to the least significant part of its containing reg.
1340    If X is not a SUBREG, always return 1 (it is its own low part!).  */
1341 
1342 int
subreg_lowpart_p(x)1343 subreg_lowpart_p (x)
1344      rtx x;
1345 {
1346   if (GET_CODE (x) != SUBREG)
1347     return 1;
1348   else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1349     return 0;
1350 
1351   return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1352 	  == SUBREG_BYTE (x));
1353 }
1354 
1355 
1356 /* Helper routine for all the constant cases of operand_subword.
1357    Some places invoke this directly.  */
1358 
1359 rtx
constant_subword(op,offset,mode)1360 constant_subword (op, offset, mode)
1361      rtx op;
1362      int offset;
1363      enum machine_mode mode;
1364 {
1365   int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1366   HOST_WIDE_INT val;
1367 
1368   /* If OP is already an integer word, return it.  */
1369   if (GET_MODE_CLASS (mode) == MODE_INT
1370       && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1371     return op;
1372 
1373   /* The output is some bits, the width of the target machine's word.
1374      A wider-word host can surely hold them in a CONST_INT. A narrower-word
1375      host can't.  */
1376   if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1377       && GET_MODE_CLASS (mode) == MODE_FLOAT
1378       && GET_MODE_BITSIZE (mode) == 64
1379       && GET_CODE (op) == CONST_DOUBLE)
1380     {
1381       long k[2];
1382       REAL_VALUE_TYPE rv;
1383 
1384       REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1385       REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1386 
1387       /* We handle 32-bit and >= 64-bit words here.  Note that the order in
1388 	 which the words are written depends on the word endianness.
1389 	 ??? This is a potential portability problem and should
1390 	 be fixed at some point.
1391 
1392 	 We must exercise caution with the sign bit.  By definition there
1393 	 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1394 	 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1395 	 So we explicitly mask and sign-extend as necessary.  */
1396       if (BITS_PER_WORD == 32)
1397 	{
1398 	  val = k[offset];
1399 	  val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1400 	  return GEN_INT (val);
1401 	}
1402 #if HOST_BITS_PER_WIDE_INT >= 64
1403       else if (BITS_PER_WORD >= 64 && offset == 0)
1404 	{
1405 	  val = k[! WORDS_BIG_ENDIAN];
1406 	  val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1407 	  val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1408 	  return GEN_INT (val);
1409 	}
1410 #endif
1411       else if (BITS_PER_WORD == 16)
1412 	{
1413 	  val = k[offset >> 1];
1414 	  if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1415 	    val >>= 16;
1416 	  val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1417 	  return GEN_INT (val);
1418 	}
1419       else
1420 	abort ();
1421     }
1422   else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1423 	   && GET_MODE_CLASS (mode) == MODE_FLOAT
1424 	   && GET_MODE_BITSIZE (mode) > 64
1425 	   && GET_CODE (op) == CONST_DOUBLE)
1426     {
1427       long k[4];
1428       REAL_VALUE_TYPE rv;
1429 
1430       REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1431       REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1432 
1433       if (BITS_PER_WORD == 32)
1434 	{
1435 	  val = k[offset];
1436 	  val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1437 	  return GEN_INT (val);
1438 	}
1439 #if HOST_BITS_PER_WIDE_INT >= 64
1440       else if (BITS_PER_WORD >= 64 && offset <= 1)
1441 	{
1442 	  val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1443 	  val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1444 	  val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1445 	  return GEN_INT (val);
1446 	}
1447 #endif
1448       else
1449 	abort ();
1450     }
1451 
1452   /* Single word float is a little harder, since single- and double-word
1453      values often do not have the same high-order bits.  We have already
1454      verified that we want the only defined word of the single-word value.  */
1455   if (GET_MODE_CLASS (mode) == MODE_FLOAT
1456       && GET_MODE_BITSIZE (mode) == 32
1457       && GET_CODE (op) == CONST_DOUBLE)
1458     {
1459       long l;
1460       REAL_VALUE_TYPE rv;
1461 
1462       REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1463       REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1464 
1465       /* Sign extend from known 32-bit value to HOST_WIDE_INT.  */
1466       val = l;
1467       val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1468 
1469       if (BITS_PER_WORD == 16)
1470 	{
1471 	  if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1472 	    val >>= 16;
1473 	  val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1474 	}
1475 
1476       return GEN_INT (val);
1477     }
1478 
1479   /* The only remaining cases that we can handle are integers.
1480      Convert to proper endianness now since these cases need it.
1481      At this point, offset == 0 means the low-order word.
1482 
1483      We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1484      in general.  However, if OP is (const_int 0), we can just return
1485      it for any word.  */
1486 
1487   if (op == const0_rtx)
1488     return op;
1489 
1490   if (GET_MODE_CLASS (mode) != MODE_INT
1491       || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1492       || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1493     return 0;
1494 
1495   if (WORDS_BIG_ENDIAN)
1496     offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1497 
1498   /* Find out which word on the host machine this value is in and get
1499      it from the constant.  */
1500   val = (offset / size_ratio == 0
1501 	 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1502 	 : (GET_CODE (op) == CONST_INT
1503 	    ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1504 
1505   /* Get the value we want into the low bits of val.  */
1506   if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1507     val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1508 
1509   val = trunc_int_for_mode (val, word_mode);
1510 
1511   return GEN_INT (val);
1512 }
1513 
1514 /* Return subword OFFSET of operand OP.
1515    The word number, OFFSET, is interpreted as the word number starting
1516    at the low-order address.  OFFSET 0 is the low-order word if not
1517    WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1518 
1519    If we cannot extract the required word, we return zero.  Otherwise,
1520    an rtx corresponding to the requested word will be returned.
1521 
1522    VALIDATE_ADDRESS is nonzero if the address should be validated.  Before
1523    reload has completed, a valid address will always be returned.  After
1524    reload, if a valid address cannot be returned, we return zero.
1525 
1526    If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1527    it is the responsibility of the caller.
1528 
1529    MODE is the mode of OP in case it is a CONST_INT.
1530 
1531    ??? This is still rather broken for some cases.  The problem for the
1532    moment is that all callers of this thing provide no 'goal mode' to
1533    tell us to work with.  This exists because all callers were written
1534    in a word based SUBREG world.
1535    Now use of this function can be deprecated by simplify_subreg in most
1536    cases.
1537  */
1538 
1539 rtx
operand_subword(op,offset,validate_address,mode)1540 operand_subword (op, offset, validate_address, mode)
1541      rtx op;
1542      unsigned int offset;
1543      int validate_address;
1544      enum machine_mode mode;
1545 {
1546   if (mode == VOIDmode)
1547     mode = GET_MODE (op);
1548 
1549   if (mode == VOIDmode)
1550     abort ();
1551 
1552   /* If OP is narrower than a word, fail.  */
1553   if (mode != BLKmode
1554       && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1555     return 0;
1556 
1557   /* If we want a word outside OP, return zero.  */
1558   if (mode != BLKmode
1559       && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1560     return const0_rtx;
1561 
1562   /* Form a new MEM at the requested address.  */
1563   if (GET_CODE (op) == MEM)
1564     {
1565       rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1566 
1567       if (! validate_address)
1568 	return new;
1569 
1570       else if (reload_completed)
1571 	{
1572 	  if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1573 	    return 0;
1574 	}
1575       else
1576 	return replace_equiv_address (new, XEXP (new, 0));
1577     }
1578 
1579   /* Rest can be handled by simplify_subreg.  */
1580   return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1581 }
1582 
1583 /* Similar to `operand_subword', but never return 0.  If we can't extract
1584    the required subword, put OP into a register and try again.  If that fails,
1585    abort.  We always validate the address in this case.
1586 
1587    MODE is the mode of OP, in case it is CONST_INT.  */
1588 
1589 rtx
operand_subword_force(op,offset,mode)1590 operand_subword_force (op, offset, mode)
1591      rtx op;
1592      unsigned int offset;
1593      enum machine_mode mode;
1594 {
1595   rtx result = operand_subword (op, offset, 1, mode);
1596 
1597   if (result)
1598     return result;
1599 
1600   if (mode != BLKmode && mode != VOIDmode)
1601     {
1602       /* If this is a register which can not be accessed by words, copy it
1603 	 to a pseudo register.  */
1604       if (GET_CODE (op) == REG)
1605 	op = copy_to_reg (op);
1606       else
1607 	op = force_reg (mode, op);
1608     }
1609 
1610   result = operand_subword (op, offset, 1, mode);
1611   if (result == 0)
1612     abort ();
1613 
1614   return result;
1615 }
1616 
1617 /* Given a compare instruction, swap the operands.
1618    A test instruction is changed into a compare of 0 against the operand.  */
1619 
1620 void
reverse_comparison(insn)1621 reverse_comparison (insn)
1622      rtx insn;
1623 {
1624   rtx body = PATTERN (insn);
1625   rtx comp;
1626 
1627   if (GET_CODE (body) == SET)
1628     comp = SET_SRC (body);
1629   else
1630     comp = SET_SRC (XVECEXP (body, 0, 0));
1631 
1632   if (GET_CODE (comp) == COMPARE)
1633     {
1634       rtx op0 = XEXP (comp, 0);
1635       rtx op1 = XEXP (comp, 1);
1636       XEXP (comp, 0) = op1;
1637       XEXP (comp, 1) = op0;
1638     }
1639   else
1640     {
1641       rtx new = gen_rtx_COMPARE (VOIDmode,
1642 				 CONST0_RTX (GET_MODE (comp)), comp);
1643       if (GET_CODE (body) == SET)
1644 	SET_SRC (body) = new;
1645       else
1646 	SET_SRC (XVECEXP (body, 0, 0)) = new;
1647     }
1648 }
1649 
1650 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1651    or (2) a component ref of something variable.  Represent the later with
1652    a NULL expression.  */
1653 
1654 static tree
component_ref_for_mem_expr(ref)1655 component_ref_for_mem_expr (ref)
1656      tree ref;
1657 {
1658   tree inner = TREE_OPERAND (ref, 0);
1659 
1660   if (TREE_CODE (inner) == COMPONENT_REF)
1661     inner = component_ref_for_mem_expr (inner);
1662   else
1663     {
1664       tree placeholder_ptr = 0;
1665 
1666       /* Now remove any conversions: they don't change what the underlying
1667 	 object is.  Likewise for SAVE_EXPR.  Also handle PLACEHOLDER_EXPR.  */
1668       while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1669 	     || TREE_CODE (inner) == NON_LVALUE_EXPR
1670 	     || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1671 	     || TREE_CODE (inner) == SAVE_EXPR
1672 	     || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1673 	if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1674 	  inner = find_placeholder (inner, &placeholder_ptr);
1675 	else
1676 	  inner = TREE_OPERAND (inner, 0);
1677 
1678       if (! DECL_P (inner))
1679 	inner = NULL_TREE;
1680     }
1681 
1682   if (inner == TREE_OPERAND (ref, 0))
1683     return ref;
1684   else
1685     return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1686 		  TREE_OPERAND (ref, 1));
1687 }
1688 
1689 /* Given REF, a MEM, and T, either the type of X or the expression
1690    corresponding to REF, set the memory attributes.  OBJECTP is nonzero
1691    if we are making a new object of this type.  BITPOS is nonzero if
1692    there is an offset outstanding on T that will be applied later.  */
1693 
1694 void
set_mem_attributes_minus_bitpos(ref,t,objectp,bitpos)1695 set_mem_attributes_minus_bitpos (ref, t, objectp, bitpos)
1696      rtx ref;
1697      tree t;
1698      int objectp;
1699      HOST_WIDE_INT bitpos;
1700 {
1701   HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1702   tree expr = MEM_EXPR (ref);
1703   rtx offset = MEM_OFFSET (ref);
1704   rtx size = MEM_SIZE (ref);
1705   unsigned int align = MEM_ALIGN (ref);
1706   HOST_WIDE_INT apply_bitpos = 0;
1707   tree type;
1708 
1709   /* It can happen that type_for_mode was given a mode for which there
1710      is no language-level type.  In which case it returns NULL, which
1711      we can see here.  */
1712   if (t == NULL_TREE)
1713     return;
1714 
1715   type = TYPE_P (t) ? t : TREE_TYPE (t);
1716 
1717   /* If we have already set DECL_RTL = ref, get_alias_set will get the
1718      wrong answer, as it assumes that DECL_RTL already has the right alias
1719      info.  Callers should not set DECL_RTL until after the call to
1720      set_mem_attributes.  */
1721   if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1722     abort ();
1723 
1724   /* Get the alias set from the expression or type (perhaps using a
1725      front-end routine) and use it.  */
1726   alias = get_alias_set (t);
1727 
1728   MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1729   MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1730   RTX_UNCHANGING_P (ref)
1731     |= ((lang_hooks.honor_readonly
1732 	 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1733 	|| (! TYPE_P (t) && TREE_CONSTANT (t)));
1734 
1735   /* If we are making an object of this type, or if this is a DECL, we know
1736      that it is a scalar if the type is not an aggregate.  */
1737   if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1738     MEM_SCALAR_P (ref) = 1;
1739 
1740   /* We can set the alignment from the type if we are making an object,
1741      this is an INDIRECT_REF, or if TYPE_ALIGN_OK.  */
1742   if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1743     align = MAX (align, TYPE_ALIGN (type));
1744 
1745   /* If the size is known, we can set that.  */
1746   if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1747     size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1748 
1749   /* If T is not a type, we may be able to deduce some more information about
1750      the expression.  */
1751   if (! TYPE_P (t))
1752     {
1753       maybe_set_unchanging (ref, t);
1754       if (TREE_THIS_VOLATILE (t))
1755 	MEM_VOLATILE_P (ref) = 1;
1756 
1757       /* Now remove any conversions: they don't change what the underlying
1758 	 object is.  Likewise for SAVE_EXPR.  */
1759       while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1760 	     || TREE_CODE (t) == NON_LVALUE_EXPR
1761 	     || TREE_CODE (t) == VIEW_CONVERT_EXPR
1762 	     || TREE_CODE (t) == SAVE_EXPR)
1763 	t = TREE_OPERAND (t, 0);
1764 
1765       /* If this expression can't be addressed (e.g., it contains a reference
1766 	 to a non-addressable field), show we don't change its alias set.  */
1767       if (! can_address_p (t))
1768 	MEM_KEEP_ALIAS_SET_P (ref) = 1;
1769 
1770       /* If this is a decl, set the attributes of the MEM from it.  */
1771       if (DECL_P (t))
1772 	{
1773 	  expr = t;
1774 	  offset = const0_rtx;
1775 	  apply_bitpos = bitpos;
1776 	  size = (DECL_SIZE_UNIT (t)
1777 		  && host_integerp (DECL_SIZE_UNIT (t), 1)
1778 		  ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1779 	  align = DECL_ALIGN (t);
1780 	}
1781 
1782       /* If this is a constant, we know the alignment.  */
1783       else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1784 	{
1785 	  align = TYPE_ALIGN (type);
1786 #ifdef CONSTANT_ALIGNMENT
1787 	  align = CONSTANT_ALIGNMENT (t, align);
1788 #endif
1789 	}
1790 
1791       /* If this is a field reference and not a bit-field, record it.  */
1792       /* ??? There is some information that can be gleened from bit-fields,
1793 	 such as the word offset in the structure that might be modified.
1794 	 But skip it for now.  */
1795       else if (TREE_CODE (t) == COMPONENT_REF
1796 	       && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1797 	{
1798 	  expr = component_ref_for_mem_expr (t);
1799 	  offset = const0_rtx;
1800 	  apply_bitpos = bitpos;
1801 	  /* ??? Any reason the field size would be different than
1802 	     the size we got from the type?  */
1803 	}
1804 
1805       /* If this is an array reference, look for an outer field reference.  */
1806       else if (TREE_CODE (t) == ARRAY_REF)
1807 	{
1808 	  tree off_tree = size_zero_node;
1809 	  /* We can't modify t, because we use it at the end of the
1810 	     function.  */
1811 	  tree t2 = t;
1812 
1813 	  do
1814 	    {
1815 	      tree index = TREE_OPERAND (t2, 1);
1816 	      tree array = TREE_OPERAND (t2, 0);
1817 	      tree domain = TYPE_DOMAIN (TREE_TYPE (array));
1818 	      tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
1819 	      tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
1820 
1821 	      /* We assume all arrays have sizes that are a multiple of a byte.
1822 		 First subtract the lower bound, if any, in the type of the
1823 		 index, then convert to sizetype and multiply by the size of the
1824 		 array element.  */
1825 	      if (low_bound != 0 && ! integer_zerop (low_bound))
1826 		index = fold (build (MINUS_EXPR, TREE_TYPE (index),
1827 				     index, low_bound));
1828 
1829 	      /* If the index has a self-referential type, pass it to a
1830 		 WITH_RECORD_EXPR; if the component size is, pass our
1831 		 component to one.  */
1832 	      if (! TREE_CONSTANT (index)
1833 		  && contains_placeholder_p (index))
1834 		index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, t2);
1835 	      if (! TREE_CONSTANT (unit_size)
1836 		  && contains_placeholder_p (unit_size))
1837 		unit_size = build (WITH_RECORD_EXPR, sizetype,
1838 				   unit_size, array);
1839 
1840 	      off_tree
1841 		= fold (build (PLUS_EXPR, sizetype,
1842 			       fold (build (MULT_EXPR, sizetype,
1843 					    index,
1844 					    unit_size)),
1845 			       off_tree));
1846 	      t2 = TREE_OPERAND (t2, 0);
1847 	    }
1848 	  while (TREE_CODE (t2) == ARRAY_REF);
1849 
1850 	  if (DECL_P (t2))
1851 	    {
1852 	      expr = t2;
1853 	      offset = NULL;
1854 	      if (host_integerp (off_tree, 1))
1855 		{
1856 		  HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1857 		  HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1858 		  align = DECL_ALIGN (t2);
1859 		  if (aoff && aoff < align)
1860 	            align = aoff;
1861 		  offset = GEN_INT (ioff);
1862 		  apply_bitpos = bitpos;
1863 		}
1864 	    }
1865 	  else if (TREE_CODE (t2) == COMPONENT_REF)
1866 	    {
1867 	      expr = component_ref_for_mem_expr (t2);
1868 	      if (host_integerp (off_tree, 1))
1869 		{
1870 		  offset = GEN_INT (tree_low_cst (off_tree, 1));
1871 		  apply_bitpos = bitpos;
1872 		}
1873 	      /* ??? Any reason the field size would be different than
1874 		 the size we got from the type?  */
1875 	    }
1876 	  else if (flag_argument_noalias > 1
1877 		   && TREE_CODE (t2) == INDIRECT_REF
1878 		   && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1879 	    {
1880 	      expr = t2;
1881 	      offset = NULL;
1882 	    }
1883 	}
1884 
1885       /* If this is a Fortran indirect argument reference, record the
1886 	 parameter decl.  */
1887       else if (flag_argument_noalias > 1
1888 	       && TREE_CODE (t) == INDIRECT_REF
1889 	       && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1890 	{
1891 	  expr = t;
1892 	  offset = NULL;
1893 	}
1894     }
1895 
1896   /* If we modified OFFSET based on T, then subtract the outstanding
1897      bit position offset.  Similarly, increase the size of the accessed
1898      object to contain the negative offset.  */
1899   if (apply_bitpos)
1900     {
1901       offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1902       if (size)
1903 	size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1904     }
1905 
1906   /* Now set the attributes we computed above.  */
1907   MEM_ATTRS (ref)
1908     = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1909 
1910   /* If this is already known to be a scalar or aggregate, we are done.  */
1911   if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1912     return;
1913 
1914   /* If it is a reference into an aggregate, this is part of an aggregate.
1915      Otherwise we don't know.  */
1916   else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1917 	   || TREE_CODE (t) == ARRAY_RANGE_REF
1918 	   || TREE_CODE (t) == BIT_FIELD_REF)
1919     MEM_IN_STRUCT_P (ref) = 1;
1920 }
1921 
1922 void
set_mem_attributes(ref,t,objectp)1923 set_mem_attributes (ref, t, objectp)
1924      rtx ref;
1925      tree t;
1926      int objectp;
1927 {
1928   set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1929 }
1930 
1931 /* Set the alias set of MEM to SET.  */
1932 
1933 void
set_mem_alias_set(mem,set)1934 set_mem_alias_set (mem, set)
1935      rtx mem;
1936      HOST_WIDE_INT set;
1937 {
1938 #ifdef ENABLE_CHECKING
1939   /* If the new and old alias sets don't conflict, something is wrong.  */
1940   if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1941     abort ();
1942 #endif
1943 
1944   MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1945 				   MEM_SIZE (mem), MEM_ALIGN (mem),
1946 				   GET_MODE (mem));
1947 }
1948 
1949 /* Set the alignment of MEM to ALIGN bits.  */
1950 
1951 void
set_mem_align(mem,align)1952 set_mem_align (mem, align)
1953      rtx mem;
1954      unsigned int align;
1955 {
1956   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1957 				   MEM_OFFSET (mem), MEM_SIZE (mem), align,
1958 				   GET_MODE (mem));
1959 }
1960 
1961 /* Set the expr for MEM to EXPR.  */
1962 
1963 void
set_mem_expr(mem,expr)1964 set_mem_expr (mem, expr)
1965      rtx mem;
1966      tree expr;
1967 {
1968   MEM_ATTRS (mem)
1969     = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1970 		     MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1971 }
1972 
1973 /* Set the offset of MEM to OFFSET.  */
1974 
1975 void
set_mem_offset(mem,offset)1976 set_mem_offset (mem, offset)
1977      rtx mem, offset;
1978 {
1979   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1980 				   offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1981 				   GET_MODE (mem));
1982 }
1983 
1984 /* Set the size of MEM to SIZE.  */
1985 
1986 void
set_mem_size(mem,size)1987 set_mem_size (mem, size)
1988      rtx mem, size;
1989 {
1990   MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1991 				   MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1992 				   GET_MODE (mem));
1993 }
1994 
1995 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1996    and its address changed to ADDR.  (VOIDmode means don't change the mode.
1997    NULL for ADDR means don't change the address.)  VALIDATE is nonzero if the
1998    returned memory location is required to be valid.  The memory
1999    attributes are not changed.  */
2000 
2001 static rtx
change_address_1(memref,mode,addr,validate)2002 change_address_1 (memref, mode, addr, validate)
2003      rtx memref;
2004      enum machine_mode mode;
2005      rtx addr;
2006      int validate;
2007 {
2008   rtx new;
2009 
2010   if (GET_CODE (memref) != MEM)
2011     abort ();
2012   if (mode == VOIDmode)
2013     mode = GET_MODE (memref);
2014   if (addr == 0)
2015     addr = XEXP (memref, 0);
2016 
2017   if (validate)
2018     {
2019       if (reload_in_progress || reload_completed)
2020 	{
2021 	  if (! memory_address_p (mode, addr))
2022 	    abort ();
2023 	}
2024       else
2025 	addr = memory_address (mode, addr);
2026     }
2027 
2028   if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2029     return memref;
2030 
2031   new = gen_rtx_MEM (mode, addr);
2032   MEM_COPY_ATTRIBUTES (new, memref);
2033   return new;
2034 }
2035 
2036 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2037    way we are changing MEMREF, so we only preserve the alias set.  */
2038 
2039 rtx
change_address(memref,mode,addr)2040 change_address (memref, mode, addr)
2041      rtx memref;
2042      enum machine_mode mode;
2043      rtx addr;
2044 {
2045   rtx new = change_address_1 (memref, mode, addr, 1);
2046   enum machine_mode mmode = GET_MODE (new);
2047 
2048   MEM_ATTRS (new)
2049     = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
2050 		     mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
2051 		     (mmode == BLKmode ? BITS_PER_UNIT
2052 		      : GET_MODE_ALIGNMENT (mmode)),
2053 		     mmode);
2054 
2055   return new;
2056 }
2057 
2058 /* Return a memory reference like MEMREF, but with its mode changed
2059    to MODE and its address offset by OFFSET bytes.  If VALIDATE is
2060    nonzero, the memory address is forced to be valid.
2061    If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2062    and caller is responsible for adjusting MEMREF base register.  */
2063 
2064 rtx
adjust_address_1(memref,mode,offset,validate,adjust)2065 adjust_address_1 (memref, mode, offset, validate, adjust)
2066      rtx memref;
2067      enum machine_mode mode;
2068      HOST_WIDE_INT offset;
2069      int validate, adjust;
2070 {
2071   rtx addr = XEXP (memref, 0);
2072   rtx new;
2073   rtx memoffset = MEM_OFFSET (memref);
2074   rtx size = 0;
2075   unsigned int memalign = MEM_ALIGN (memref);
2076 
2077   /* ??? Prefer to create garbage instead of creating shared rtl.
2078      This may happen even if offset is nonzero -- consider
2079      (plus (plus reg reg) const_int) -- so do this always.  */
2080   addr = copy_rtx (addr);
2081 
2082   if (adjust)
2083     {
2084       /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2085 	 object, we can merge it into the LO_SUM.  */
2086       if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2087 	  && offset >= 0
2088 	  && (unsigned HOST_WIDE_INT) offset
2089 	      < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2090 	addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2091 			       plus_constant (XEXP (addr, 1), offset));
2092       else
2093 	addr = plus_constant (addr, offset);
2094     }
2095 
2096   new = change_address_1 (memref, mode, addr, validate);
2097 
2098   /* Compute the new values of the memory attributes due to this adjustment.
2099      We add the offsets and update the alignment.  */
2100   if (memoffset)
2101     memoffset = GEN_INT (offset + INTVAL (memoffset));
2102 
2103   /* Compute the new alignment by taking the MIN of the alignment and the
2104      lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2105      if zero.  */
2106   if (offset != 0)
2107     memalign
2108       = MIN (memalign,
2109 	     (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2110 
2111   /* We can compute the size in a number of ways.  */
2112   if (GET_MODE (new) != BLKmode)
2113     size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2114   else if (MEM_SIZE (memref))
2115     size = plus_constant (MEM_SIZE (memref), -offset);
2116 
2117   MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2118 				   memoffset, size, memalign, GET_MODE (new));
2119 
2120   /* At some point, we should validate that this offset is within the object,
2121      if all the appropriate values are known.  */
2122   return new;
2123 }
2124 
2125 /* Return a memory reference like MEMREF, but with its mode changed
2126    to MODE and its address changed to ADDR, which is assumed to be
2127    MEMREF offseted by OFFSET bytes.  If VALIDATE is
2128    nonzero, the memory address is forced to be valid.  */
2129 
2130 rtx
adjust_automodify_address_1(memref,mode,addr,offset,validate)2131 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2132      rtx memref;
2133      enum machine_mode mode;
2134      rtx addr;
2135      HOST_WIDE_INT offset;
2136      int validate;
2137 {
2138   memref = change_address_1 (memref, VOIDmode, addr, validate);
2139   return adjust_address_1 (memref, mode, offset, validate, 0);
2140 }
2141 
2142 /* Return a memory reference like MEMREF, but whose address is changed by
2143    adding OFFSET, an RTX, to it.  POW2 is the highest power of two factor
2144    known to be in OFFSET (possibly 1).  */
2145 
2146 rtx
offset_address(memref,offset,pow2)2147 offset_address (memref, offset, pow2)
2148      rtx memref;
2149      rtx offset;
2150      HOST_WIDE_INT pow2;
2151 {
2152   rtx new, addr = XEXP (memref, 0);
2153 
2154   new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2155 
2156   /* At this point we don't know _why_ the address is invalid.  It
2157      could have secondary memory refereces, multiplies or anything.
2158 
2159      However, if we did go and rearrange things, we can wind up not
2160      being able to recognize the magic around pic_offset_table_rtx.
2161      This stuff is fragile, and is yet another example of why it is
2162      bad to expose PIC machinery too early.  */
2163   if (! memory_address_p (GET_MODE (memref), new)
2164       && GET_CODE (addr) == PLUS
2165       && XEXP (addr, 0) == pic_offset_table_rtx)
2166     {
2167       addr = force_reg (GET_MODE (addr), addr);
2168       new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2169     }
2170 
2171   update_temp_slot_address (XEXP (memref, 0), new);
2172   new = change_address_1 (memref, VOIDmode, new, 1);
2173 
2174   /* Update the alignment to reflect the offset.  Reset the offset, which
2175      we don't know.  */
2176   MEM_ATTRS (new)
2177     = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2178 		     MIN (MEM_ALIGN (memref),
2179 			  (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2180 		     GET_MODE (new));
2181   return new;
2182 }
2183 
2184 /* Return a memory reference like MEMREF, but with its address changed to
2185    ADDR.  The caller is asserting that the actual piece of memory pointed
2186    to is the same, just the form of the address is being changed, such as
2187    by putting something into a register.  */
2188 
2189 rtx
replace_equiv_address(memref,addr)2190 replace_equiv_address (memref, addr)
2191      rtx memref;
2192      rtx addr;
2193 {
2194   /* change_address_1 copies the memory attribute structure without change
2195      and that's exactly what we want here.  */
2196   update_temp_slot_address (XEXP (memref, 0), addr);
2197   return change_address_1 (memref, VOIDmode, addr, 1);
2198 }
2199 
2200 /* Likewise, but the reference is not required to be valid.  */
2201 
2202 rtx
replace_equiv_address_nv(memref,addr)2203 replace_equiv_address_nv (memref, addr)
2204      rtx memref;
2205      rtx addr;
2206 {
2207   return change_address_1 (memref, VOIDmode, addr, 0);
2208 }
2209 
2210 /* Return a memory reference like MEMREF, but with its mode widened to
2211    MODE and offset by OFFSET.  This would be used by targets that e.g.
2212    cannot issue QImode memory operations and have to use SImode memory
2213    operations plus masking logic.  */
2214 
2215 rtx
widen_memory_access(memref,mode,offset)2216 widen_memory_access (memref, mode, offset)
2217      rtx memref;
2218      enum machine_mode mode;
2219      HOST_WIDE_INT offset;
2220 {
2221   rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2222   tree expr = MEM_EXPR (new);
2223   rtx memoffset = MEM_OFFSET (new);
2224   unsigned int size = GET_MODE_SIZE (mode);
2225 
2226   /* If we don't know what offset we were at within the expression, then
2227      we can't know if we've overstepped the bounds.  */
2228   if (! memoffset)
2229     expr = NULL_TREE;
2230 
2231   while (expr)
2232     {
2233       if (TREE_CODE (expr) == COMPONENT_REF)
2234 	{
2235 	  tree field = TREE_OPERAND (expr, 1);
2236 
2237 	  if (! DECL_SIZE_UNIT (field))
2238 	    {
2239 	      expr = NULL_TREE;
2240 	      break;
2241 	    }
2242 
2243 	  /* Is the field at least as large as the access?  If so, ok,
2244 	     otherwise strip back to the containing structure.  */
2245 	  if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2246 	      && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2247 	      && INTVAL (memoffset) >= 0)
2248 	    break;
2249 
2250 	  if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2251 	    {
2252 	      expr = NULL_TREE;
2253 	      break;
2254 	    }
2255 
2256 	  expr = TREE_OPERAND (expr, 0);
2257 	  memoffset = (GEN_INT (INTVAL (memoffset)
2258 		       + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2259 		       + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2260 		          / BITS_PER_UNIT)));
2261 	}
2262       /* Similarly for the decl.  */
2263       else if (DECL_P (expr)
2264 	       && DECL_SIZE_UNIT (expr)
2265 	       && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2266 	       && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2267 	       && (! memoffset || INTVAL (memoffset) >= 0))
2268 	break;
2269       else
2270 	{
2271 	  /* The widened memory access overflows the expression, which means
2272 	     that it could alias another expression.  Zap it.  */
2273 	  expr = NULL_TREE;
2274 	  break;
2275 	}
2276     }
2277 
2278   if (! expr)
2279     memoffset = NULL_RTX;
2280 
2281   /* The widened memory may alias other stuff, so zap the alias set.  */
2282   /* ??? Maybe use get_alias_set on any remaining expression.  */
2283 
2284   MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2285 				   MEM_ALIGN (new), mode);
2286 
2287   return new;
2288 }
2289 
2290 /* Return a newly created CODE_LABEL rtx with a unique label number.  */
2291 
2292 rtx
gen_label_rtx()2293 gen_label_rtx ()
2294 {
2295   return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2296 		  	     NULL, label_num++, NULL);
2297 }
2298 
2299 /* For procedure integration.  */
2300 
2301 /* Install new pointers to the first and last insns in the chain.
2302    Also, set cur_insn_uid to one higher than the last in use.
2303    Used for an inline-procedure after copying the insn chain.  */
2304 
2305 void
set_new_first_and_last_insn(first,last)2306 set_new_first_and_last_insn (first, last)
2307      rtx first, last;
2308 {
2309   rtx insn;
2310 
2311   first_insn = first;
2312   last_insn = last;
2313   cur_insn_uid = 0;
2314 
2315   for (insn = first; insn; insn = NEXT_INSN (insn))
2316     cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2317 
2318   cur_insn_uid++;
2319 }
2320 
2321 /* Set the range of label numbers found in the current function.
2322    This is used when belatedly compiling an inline function.  */
2323 
2324 void
set_new_first_and_last_label_num(first,last)2325 set_new_first_and_last_label_num (first, last)
2326      int first, last;
2327 {
2328   base_label_num = label_num;
2329   first_label_num = first;
2330   last_label_num = last;
2331 }
2332 
2333 /* Set the last label number found in the current function.
2334    This is used when belatedly compiling an inline function.  */
2335 
2336 void
set_new_last_label_num(last)2337 set_new_last_label_num (last)
2338      int last;
2339 {
2340   base_label_num = label_num;
2341   last_label_num = last;
2342 }
2343 
2344 /* Restore all variables describing the current status from the structure *P.
2345    This is used after a nested function.  */
2346 
2347 void
restore_emit_status(p)2348 restore_emit_status (p)
2349      struct function *p ATTRIBUTE_UNUSED;
2350 {
2351   last_label_num = 0;
2352 }
2353 
2354 /* Go through all the RTL insn bodies and copy any invalid shared
2355    structure.  This routine should only be called once.  */
2356 
2357 void
unshare_all_rtl(fndecl,insn)2358 unshare_all_rtl (fndecl, insn)
2359      tree fndecl;
2360      rtx insn;
2361 {
2362   tree decl;
2363 
2364   /* Make sure that virtual parameters are not shared.  */
2365   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2366     SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2367 
2368   /* Make sure that virtual stack slots are not shared.  */
2369   unshare_all_decls (DECL_INITIAL (fndecl));
2370 
2371   /* Unshare just about everything else.  */
2372   unshare_all_rtl_1 (insn);
2373 
2374   /* Make sure the addresses of stack slots found outside the insn chain
2375      (such as, in DECL_RTL of a variable) are not shared
2376      with the insn chain.
2377 
2378      This special care is necessary when the stack slot MEM does not
2379      actually appear in the insn chain.  If it does appear, its address
2380      is unshared from all else at that point.  */
2381   stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2382 }
2383 
2384 /* Go through all the RTL insn bodies and copy any invalid shared
2385    structure, again.  This is a fairly expensive thing to do so it
2386    should be done sparingly.  */
2387 
2388 void
unshare_all_rtl_again(insn)2389 unshare_all_rtl_again (insn)
2390      rtx insn;
2391 {
2392   rtx p;
2393   tree decl;
2394 
2395   for (p = insn; p; p = NEXT_INSN (p))
2396     if (INSN_P (p))
2397       {
2398 	reset_used_flags (PATTERN (p));
2399 	reset_used_flags (REG_NOTES (p));
2400 	reset_used_flags (LOG_LINKS (p));
2401       }
2402 
2403   /* Make sure that virtual stack slots are not shared.  */
2404   reset_used_decls (DECL_INITIAL (cfun->decl));
2405 
2406   /* Make sure that virtual parameters are not shared.  */
2407   for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2408     reset_used_flags (DECL_RTL (decl));
2409 
2410   reset_used_flags (stack_slot_list);
2411 
2412   unshare_all_rtl (cfun->decl, insn);
2413 }
2414 
2415 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2416    Assumes the mark bits are cleared at entry.  */
2417 
2418 static void
unshare_all_rtl_1(insn)2419 unshare_all_rtl_1 (insn)
2420      rtx insn;
2421 {
2422   for (; insn; insn = NEXT_INSN (insn))
2423     if (INSN_P (insn))
2424       {
2425 	PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2426 	REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2427 	LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2428       }
2429 }
2430 
2431 /* Go through all virtual stack slots of a function and copy any
2432    shared structure.  */
2433 static void
unshare_all_decls(blk)2434 unshare_all_decls (blk)
2435      tree blk;
2436 {
2437   tree t;
2438 
2439   /* Copy shared decls.  */
2440   for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2441     if (DECL_RTL_SET_P (t))
2442       SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2443 
2444   /* Now process sub-blocks.  */
2445   for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2446     unshare_all_decls (t);
2447 }
2448 
2449 /* Go through all virtual stack slots of a function and mark them as
2450    not shared.  */
2451 static void
reset_used_decls(blk)2452 reset_used_decls (blk)
2453      tree blk;
2454 {
2455   tree t;
2456 
2457   /* Mark decls.  */
2458   for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2459     if (DECL_RTL_SET_P (t))
2460       reset_used_flags (DECL_RTL (t));
2461 
2462   /* Now process sub-blocks.  */
2463   for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2464     reset_used_decls (t);
2465 }
2466 
2467 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2468    placed in the result directly, rather than being copied.  MAY_SHARE is
2469    either a MEM of an EXPR_LIST of MEMs.  */
2470 
2471 rtx
copy_most_rtx(orig,may_share)2472 copy_most_rtx (orig, may_share)
2473      rtx orig;
2474      rtx may_share;
2475 {
2476   rtx copy;
2477   int i, j;
2478   RTX_CODE code;
2479   const char *format_ptr;
2480 
2481   if (orig == may_share
2482       || (GET_CODE (may_share) == EXPR_LIST
2483 	  && in_expr_list_p (may_share, orig)))
2484     return orig;
2485 
2486   code = GET_CODE (orig);
2487 
2488   switch (code)
2489     {
2490     case REG:
2491     case QUEUED:
2492     case CONST_INT:
2493     case CONST_DOUBLE:
2494     case CONST_VECTOR:
2495     case SYMBOL_REF:
2496     case CODE_LABEL:
2497     case PC:
2498     case CC0:
2499       return orig;
2500     default:
2501       break;
2502     }
2503 
2504   copy = rtx_alloc (code);
2505   PUT_MODE (copy, GET_MODE (orig));
2506   RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
2507   RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
2508   RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
2509   RTX_FLAG (copy, integrated) = RTX_FLAG (orig, integrated);
2510   RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related);
2511 
2512   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2513 
2514   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2515     {
2516       switch (*format_ptr++)
2517 	{
2518 	case 'e':
2519 	  XEXP (copy, i) = XEXP (orig, i);
2520 	  if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2521 	    XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2522 	  break;
2523 
2524 	case 'u':
2525 	  XEXP (copy, i) = XEXP (orig, i);
2526 	  break;
2527 
2528 	case 'E':
2529 	case 'V':
2530 	  XVEC (copy, i) = XVEC (orig, i);
2531 	  if (XVEC (orig, i) != NULL)
2532 	    {
2533 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2534 	      for (j = 0; j < XVECLEN (copy, i); j++)
2535 		XVECEXP (copy, i, j)
2536 		  = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2537 	    }
2538 	  break;
2539 
2540 	case 'w':
2541 	  XWINT (copy, i) = XWINT (orig, i);
2542 	  break;
2543 
2544 	case 'n':
2545 	case 'i':
2546 	  XINT (copy, i) = XINT (orig, i);
2547 	  break;
2548 
2549 	case 't':
2550 	  XTREE (copy, i) = XTREE (orig, i);
2551 	  break;
2552 
2553 	case 's':
2554 	case 'S':
2555 	  XSTR (copy, i) = XSTR (orig, i);
2556 	  break;
2557 
2558 	case '0':
2559 	  /* Copy this through the wide int field; that's safest.  */
2560 	  X0WINT (copy, i) = X0WINT (orig, i);
2561 	  break;
2562 
2563 	default:
2564 	  abort ();
2565 	}
2566     }
2567   return copy;
2568 }
2569 
2570 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2571    Recursively does the same for subexpressions.  */
2572 
2573 rtx
copy_rtx_if_shared(orig)2574 copy_rtx_if_shared (orig)
2575      rtx orig;
2576 {
2577   copy_rtx_if_shared_1 (&orig);
2578   return orig;
2579 }
2580 
2581 static void
copy_rtx_if_shared_1(orig1)2582 copy_rtx_if_shared_1 (orig1)
2583      rtx *orig1;
2584 {
2585   rtx x;
2586   int i;
2587   enum rtx_code code;
2588   rtx *last_ptr;
2589   const char *format_ptr;
2590   int copied = 0;
2591   int length;
2592 
2593   /* Repeat is used to turn tail-recursion into iteration.  */
2594 repeat:
2595   x = *orig1;
2596 
2597   if (x == 0)
2598     return;
2599 
2600   code = GET_CODE (x);
2601 
2602   /* These types may be freely shared.  */
2603 
2604   switch (code)
2605     {
2606     case REG:
2607     case QUEUED:
2608     case CONST_INT:
2609     case CONST_DOUBLE:
2610     case CONST_VECTOR:
2611     case SYMBOL_REF:
2612     case CODE_LABEL:
2613     case PC:
2614     case CC0:
2615     case SCRATCH:
2616       /* SCRATCH must be shared because they represent distinct values.  */
2617       return;
2618 
2619     case CONST:
2620       /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
2621 	 a LABEL_REF, it isn't sharable.  */
2622       if (GET_CODE (XEXP (x, 0)) == PLUS
2623 	  && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2624 	  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2625 	return;
2626       break;
2627 
2628     case INSN:
2629     case JUMP_INSN:
2630     case CALL_INSN:
2631     case NOTE:
2632     case BARRIER:
2633       /* The chain of insns is not being copied.  */
2634       return;
2635 
2636     case MEM:
2637       /* A MEM is allowed to be shared if its address is constant.
2638 
2639 	 We used to allow sharing of MEMs which referenced
2640 	 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2641 	 that can lose.  instantiate_virtual_regs will not unshare
2642 	 the MEMs, and combine may change the structure of the address
2643 	 because it looks safe and profitable in one context, but
2644 	 in some other context it creates unrecognizable RTL.  */
2645       if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2646 	return;
2647 
2648       break;
2649 
2650     default:
2651       break;
2652     }
2653 
2654   /* This rtx may not be shared.  If it has already been seen,
2655      replace it with a copy of itself.  */
2656 
2657   if (RTX_FLAG (x, used))
2658     {
2659       rtx copy;
2660 
2661       copy = rtx_alloc (code);
2662       memcpy (copy, x,
2663 	     (sizeof (*copy) - sizeof (copy->fld)
2664 	      + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2665       x = copy;
2666       copied = 1;
2667     }
2668   RTX_FLAG (x, used) = 1;
2669 
2670   /* Now scan the subexpressions recursively.
2671      We can store any replaced subexpressions directly into X
2672      since we know X is not shared!  Any vectors in X
2673      must be copied if X was copied.  */
2674 
2675   format_ptr = GET_RTX_FORMAT (code);
2676   length = GET_RTX_LENGTH (code);
2677   last_ptr = NULL;
2678 
2679   for (i = 0; i < length; i++)
2680     {
2681       switch (*format_ptr++)
2682 	{
2683 	case 'e':
2684           if (last_ptr)
2685             copy_rtx_if_shared_1 (last_ptr);
2686 	  last_ptr = &XEXP (x, i);
2687 	  break;
2688 
2689 	case 'E':
2690 	  if (XVEC (x, i) != NULL)
2691 	    {
2692 	      int j;
2693 	      int len = XVECLEN (x, i);
2694 
2695               /* Copy the vector iff I copied the rtx and the length is nonzero. */
2696 	      if (copied && len > 0)
2697 		XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2698 
2699               /* Call recsusively on all inside the vector. */
2700 	      for (j = 0; j < len; j++)
2701                 {
2702 		  if (last_ptr)
2703 		    copy_rtx_if_shared_1 (last_ptr);
2704                   last_ptr = &XVECEXP (x, i, j);
2705                 }
2706 	    }
2707 	  break;
2708 	}
2709     }
2710   *orig1 = x;
2711   if (last_ptr)
2712     {
2713       orig1 = last_ptr;
2714       goto repeat;
2715     }
2716   return;
2717 }
2718 
2719 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2720    to look for shared sub-parts.  */
2721 
2722 void
reset_used_flags(x)2723 reset_used_flags (x)
2724      rtx x;
2725 {
2726   int i, j;
2727   enum rtx_code code;
2728   const char *format_ptr;
2729   int length;
2730 
2731   /* Repeat is used to turn tail-recursion into iteration.  */
2732 repeat:
2733   if (x == 0)
2734     return;
2735 
2736   code = GET_CODE (x);
2737 
2738   /* These types may be freely shared so we needn't do any resetting
2739      for them.  */
2740 
2741   switch (code)
2742     {
2743     case REG:
2744     case QUEUED:
2745     case CONST_INT:
2746     case CONST_DOUBLE:
2747     case CONST_VECTOR:
2748     case SYMBOL_REF:
2749     case CODE_LABEL:
2750     case PC:
2751     case CC0:
2752       return;
2753 
2754     case INSN:
2755     case JUMP_INSN:
2756     case CALL_INSN:
2757     case NOTE:
2758     case LABEL_REF:
2759     case BARRIER:
2760       /* The chain of insns is not being copied.  */
2761       return;
2762 
2763     default:
2764       break;
2765     }
2766 
2767   RTX_FLAG (x, used) = 0;
2768 
2769   format_ptr = GET_RTX_FORMAT (code);
2770   length = GET_RTX_LENGTH (code);
2771 
2772   for (i = 0; i < length; i++)
2773     {
2774       switch (*format_ptr++)
2775 	{
2776 	case 'e':
2777           if (i == length-1)
2778             {
2779               x = XEXP (x, i);
2780 	      goto repeat;
2781             }
2782 	  reset_used_flags (XEXP (x, i));
2783 	  break;
2784 
2785 	case 'E':
2786 	  for (j = 0; j < XVECLEN (x, i); j++)
2787 	    reset_used_flags (XVECEXP (x, i, j));
2788 	  break;
2789 	}
2790     }
2791 }
2792 
2793 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2794    Return X or the rtx for the pseudo reg the value of X was copied into.
2795    OTHER must be valid as a SET_DEST.  */
2796 
2797 rtx
make_safe_from(x,other)2798 make_safe_from (x, other)
2799      rtx x, other;
2800 {
2801   while (1)
2802     switch (GET_CODE (other))
2803       {
2804       case SUBREG:
2805 	other = SUBREG_REG (other);
2806 	break;
2807       case STRICT_LOW_PART:
2808       case SIGN_EXTEND:
2809       case ZERO_EXTEND:
2810 	other = XEXP (other, 0);
2811 	break;
2812       default:
2813 	goto done;
2814       }
2815  done:
2816   if ((GET_CODE (other) == MEM
2817        && ! CONSTANT_P (x)
2818        && GET_CODE (x) != REG
2819        && GET_CODE (x) != SUBREG)
2820       || (GET_CODE (other) == REG
2821 	  && (REGNO (other) < FIRST_PSEUDO_REGISTER
2822 	      || reg_mentioned_p (other, x))))
2823     {
2824       rtx temp = gen_reg_rtx (GET_MODE (x));
2825       emit_move_insn (temp, x);
2826       return temp;
2827     }
2828   return x;
2829 }
2830 
2831 /* Emission of insns (adding them to the doubly-linked list).  */
2832 
2833 /* Return the first insn of the current sequence or current function.  */
2834 
2835 rtx
get_insns()2836 get_insns ()
2837 {
2838   return first_insn;
2839 }
2840 
2841 /* Specify a new insn as the first in the chain.  */
2842 
2843 void
set_first_insn(insn)2844 set_first_insn (insn)
2845      rtx insn;
2846 {
2847   if (PREV_INSN (insn) != 0)
2848     abort ();
2849   first_insn = insn;
2850 }
2851 
2852 /* Return the last insn emitted in current sequence or current function.  */
2853 
2854 rtx
get_last_insn()2855 get_last_insn ()
2856 {
2857   return last_insn;
2858 }
2859 
2860 /* Specify a new insn as the last in the chain.  */
2861 
2862 void
set_last_insn(insn)2863 set_last_insn (insn)
2864      rtx insn;
2865 {
2866   if (NEXT_INSN (insn) != 0)
2867     abort ();
2868   last_insn = insn;
2869 }
2870 
2871 /* Return the last insn emitted, even if it is in a sequence now pushed.  */
2872 
2873 rtx
get_last_insn_anywhere()2874 get_last_insn_anywhere ()
2875 {
2876   struct sequence_stack *stack;
2877   if (last_insn)
2878     return last_insn;
2879   for (stack = seq_stack; stack; stack = stack->next)
2880     if (stack->last != 0)
2881       return stack->last;
2882   return 0;
2883 }
2884 
2885 /* Return the first nonnote insn emitted in current sequence or current
2886    function.  This routine looks inside SEQUENCEs.  */
2887 
2888 rtx
get_first_nonnote_insn()2889 get_first_nonnote_insn ()
2890 {
2891   rtx insn = first_insn;
2892 
2893   if (insn)
2894     {
2895       if (NOTE_P (insn))
2896 	for (insn = next_insn (insn);
2897 	     insn && NOTE_P (insn);
2898 	     insn = next_insn (insn))
2899 	  continue;
2900       else
2901 	{
2902 	  if (GET_CODE (insn) == INSN
2903 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2904 	    insn = XVECEXP (PATTERN (insn), 0, 0);
2905 	}
2906     }
2907 
2908   return insn;
2909 }
2910 
2911 /* Return the last nonnote insn emitted in current sequence or current
2912    function.  This routine looks inside SEQUENCEs.  */
2913 
2914 rtx
get_last_nonnote_insn()2915 get_last_nonnote_insn ()
2916 {
2917   rtx insn = last_insn;
2918 
2919   if (insn)
2920     {
2921       if (NOTE_P (insn))
2922 	for (insn = previous_insn (insn);
2923 	     insn && NOTE_P (insn);
2924 	     insn = previous_insn (insn))
2925 	  continue;
2926       else
2927 	{
2928 	  if (GET_CODE (insn) == INSN
2929 	      && GET_CODE (PATTERN (insn)) == SEQUENCE)
2930 	    insn = XVECEXP (PATTERN (insn), 0,
2931 			    XVECLEN (PATTERN (insn), 0) - 1);
2932 	}
2933     }
2934 
2935   return insn;
2936 }
2937 
2938 /* Return a number larger than any instruction's uid in this function.  */
2939 
2940 int
get_max_uid()2941 get_max_uid ()
2942 {
2943   return cur_insn_uid;
2944 }
2945 
2946 /* Renumber instructions so that no instruction UIDs are wasted.  */
2947 
2948 void
renumber_insns(stream)2949 renumber_insns (stream)
2950      FILE *stream;
2951 {
2952   rtx insn;
2953 
2954   /* If we're not supposed to renumber instructions, don't.  */
2955   if (!flag_renumber_insns)
2956     return;
2957 
2958   /* If there aren't that many instructions, then it's not really
2959      worth renumbering them.  */
2960   if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2961     return;
2962 
2963   cur_insn_uid = 1;
2964 
2965   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2966     {
2967       if (stream)
2968 	fprintf (stream, "Renumbering insn %d to %d\n",
2969 		 INSN_UID (insn), cur_insn_uid);
2970       INSN_UID (insn) = cur_insn_uid++;
2971     }
2972 }
2973 
2974 /* Return the next insn.  If it is a SEQUENCE, return the first insn
2975    of the sequence.  */
2976 
2977 rtx
next_insn(insn)2978 next_insn (insn)
2979      rtx insn;
2980 {
2981   if (insn)
2982     {
2983       insn = NEXT_INSN (insn);
2984       if (insn && GET_CODE (insn) == INSN
2985 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
2986 	insn = XVECEXP (PATTERN (insn), 0, 0);
2987     }
2988 
2989   return insn;
2990 }
2991 
2992 /* Return the previous insn.  If it is a SEQUENCE, return the last insn
2993    of the sequence.  */
2994 
2995 rtx
previous_insn(insn)2996 previous_insn (insn)
2997      rtx insn;
2998 {
2999   if (insn)
3000     {
3001       insn = PREV_INSN (insn);
3002       if (insn && GET_CODE (insn) == INSN
3003 	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
3004 	insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3005     }
3006 
3007   return insn;
3008 }
3009 
3010 /* Return the next insn after INSN that is not a NOTE.  This routine does not
3011    look inside SEQUENCEs.  */
3012 
3013 rtx
next_nonnote_insn(insn)3014 next_nonnote_insn (insn)
3015      rtx insn;
3016 {
3017   while (insn)
3018     {
3019       insn = NEXT_INSN (insn);
3020       if (insn == 0 || GET_CODE (insn) != NOTE)
3021 	break;
3022     }
3023 
3024   return insn;
3025 }
3026 
3027 /* Return the previous insn before INSN that is not a NOTE.  This routine does
3028    not look inside SEQUENCEs.  */
3029 
3030 rtx
prev_nonnote_insn(insn)3031 prev_nonnote_insn (insn)
3032      rtx insn;
3033 {
3034   while (insn)
3035     {
3036       insn = PREV_INSN (insn);
3037       if (insn == 0 || GET_CODE (insn) != NOTE)
3038 	break;
3039     }
3040 
3041   return insn;
3042 }
3043 
3044 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3045    or 0, if there is none.  This routine does not look inside
3046    SEQUENCEs.  */
3047 
3048 rtx
next_real_insn(insn)3049 next_real_insn (insn)
3050      rtx insn;
3051 {
3052   while (insn)
3053     {
3054       insn = NEXT_INSN (insn);
3055       if (insn == 0 || GET_CODE (insn) == INSN
3056 	  || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
3057 	break;
3058     }
3059 
3060   return insn;
3061 }
3062 
3063 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3064    or 0, if there is none.  This routine does not look inside
3065    SEQUENCEs.  */
3066 
3067 rtx
prev_real_insn(insn)3068 prev_real_insn (insn)
3069      rtx insn;
3070 {
3071   while (insn)
3072     {
3073       insn = PREV_INSN (insn);
3074       if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
3075 	  || GET_CODE (insn) == JUMP_INSN)
3076 	break;
3077     }
3078 
3079   return insn;
3080 }
3081 
3082 /* Find the next insn after INSN that really does something.  This routine
3083    does not look inside SEQUENCEs.  Until reload has completed, this is the
3084    same as next_real_insn.  */
3085 
3086 int
active_insn_p(insn)3087 active_insn_p (insn)
3088      rtx insn;
3089 {
3090   return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
3091 	  || (GET_CODE (insn) == INSN
3092 	      && (! reload_completed
3093 		  || (GET_CODE (PATTERN (insn)) != USE
3094 		      && GET_CODE (PATTERN (insn)) != CLOBBER))));
3095 }
3096 
3097 rtx
next_active_insn(insn)3098 next_active_insn (insn)
3099      rtx insn;
3100 {
3101   while (insn)
3102     {
3103       insn = NEXT_INSN (insn);
3104       if (insn == 0 || active_insn_p (insn))
3105 	break;
3106     }
3107 
3108   return insn;
3109 }
3110 
3111 /* Find the last insn before INSN that really does something.  This routine
3112    does not look inside SEQUENCEs.  Until reload has completed, this is the
3113    same as prev_real_insn.  */
3114 
3115 rtx
prev_active_insn(insn)3116 prev_active_insn (insn)
3117      rtx insn;
3118 {
3119   while (insn)
3120     {
3121       insn = PREV_INSN (insn);
3122       if (insn == 0 || active_insn_p (insn))
3123 	break;
3124     }
3125 
3126   return insn;
3127 }
3128 
3129 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none.  */
3130 
3131 rtx
next_label(insn)3132 next_label (insn)
3133      rtx insn;
3134 {
3135   while (insn)
3136     {
3137       insn = NEXT_INSN (insn);
3138       if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3139 	break;
3140     }
3141 
3142   return insn;
3143 }
3144 
3145 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none.  */
3146 
3147 rtx
prev_label(insn)3148 prev_label (insn)
3149      rtx insn;
3150 {
3151   while (insn)
3152     {
3153       insn = PREV_INSN (insn);
3154       if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3155 	break;
3156     }
3157 
3158   return insn;
3159 }
3160 
3161 #ifdef HAVE_cc0
3162 /* INSN uses CC0 and is being moved into a delay slot.  Set up REG_CC_SETTER
3163    and REG_CC_USER notes so we can find it.  */
3164 
3165 void
link_cc0_insns(insn)3166 link_cc0_insns (insn)
3167      rtx insn;
3168 {
3169   rtx user = next_nonnote_insn (insn);
3170 
3171   if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3172     user = XVECEXP (PATTERN (user), 0, 0);
3173 
3174   REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3175 					REG_NOTES (user));
3176   REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3177 }
3178 
3179 /* Return the next insn that uses CC0 after INSN, which is assumed to
3180    set it.  This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3181    applied to the result of this function should yield INSN).
3182 
3183    Normally, this is simply the next insn.  However, if a REG_CC_USER note
3184    is present, it contains the insn that uses CC0.
3185 
3186    Return 0 if we can't find the insn.  */
3187 
3188 rtx
next_cc0_user(insn)3189 next_cc0_user (insn)
3190      rtx insn;
3191 {
3192   rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3193 
3194   if (note)
3195     return XEXP (note, 0);
3196 
3197   insn = next_nonnote_insn (insn);
3198   if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3199     insn = XVECEXP (PATTERN (insn), 0, 0);
3200 
3201   if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3202     return insn;
3203 
3204   return 0;
3205 }
3206 
3207 /* Find the insn that set CC0 for INSN.  Unless INSN has a REG_CC_SETTER
3208    note, it is the previous insn.  */
3209 
3210 rtx
prev_cc0_setter(insn)3211 prev_cc0_setter (insn)
3212      rtx insn;
3213 {
3214   rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3215 
3216   if (note)
3217     return XEXP (note, 0);
3218 
3219   insn = prev_nonnote_insn (insn);
3220   if (! sets_cc0_p (PATTERN (insn)))
3221     abort ();
3222 
3223   return insn;
3224 }
3225 #endif
3226 
3227 /* Increment the label uses for all labels present in rtx.  */
3228 
3229 static void
mark_label_nuses(x)3230 mark_label_nuses (x)
3231      rtx x;
3232 {
3233   enum rtx_code code;
3234   int i, j;
3235   const char *fmt;
3236 
3237   code = GET_CODE (x);
3238   if (code == LABEL_REF)
3239     LABEL_NUSES (XEXP (x, 0))++;
3240 
3241   fmt = GET_RTX_FORMAT (code);
3242   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3243     {
3244       if (fmt[i] == 'e')
3245 	mark_label_nuses (XEXP (x, i));
3246       else if (fmt[i] == 'E')
3247 	for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3248 	  mark_label_nuses (XVECEXP (x, i, j));
3249     }
3250 }
3251 
3252 
3253 /* Try splitting insns that can be split for better scheduling.
3254    PAT is the pattern which might split.
3255    TRIAL is the insn providing PAT.
3256    LAST is nonzero if we should return the last insn of the sequence produced.
3257 
3258    If this routine succeeds in splitting, it returns the first or last
3259    replacement insn depending on the value of LAST.  Otherwise, it
3260    returns TRIAL.  If the insn to be returned can be split, it will be.  */
3261 
3262 rtx
try_split(pat,trial,last)3263 try_split (pat, trial, last)
3264      rtx pat, trial;
3265      int last;
3266 {
3267   rtx before = PREV_INSN (trial);
3268   rtx after = NEXT_INSN (trial);
3269   int has_barrier = 0;
3270   rtx tem;
3271   rtx note, seq;
3272   int probability;
3273   rtx insn_last, insn;
3274   int njumps = 0;
3275 
3276   if (any_condjump_p (trial)
3277       && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3278     split_branch_probability = INTVAL (XEXP (note, 0));
3279   probability = split_branch_probability;
3280 
3281   seq = split_insns (pat, trial);
3282 
3283   split_branch_probability = -1;
3284 
3285   /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3286      We may need to handle this specially.  */
3287   if (after && GET_CODE (after) == BARRIER)
3288     {
3289       has_barrier = 1;
3290       after = NEXT_INSN (after);
3291     }
3292 
3293   if (!seq)
3294     return trial;
3295 
3296   /* Avoid infinite loop if any insn of the result matches
3297      the original pattern.  */
3298   insn_last = seq;
3299   while (1)
3300     {
3301       if (INSN_P (insn_last)
3302 	  && rtx_equal_p (PATTERN (insn_last), pat))
3303 	return trial;
3304       if (!NEXT_INSN (insn_last))
3305 	break;
3306       insn_last = NEXT_INSN (insn_last);
3307     }
3308 
3309   /* Mark labels.  */
3310   for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3311     {
3312       if (GET_CODE (insn) == JUMP_INSN)
3313 	{
3314 	  mark_jump_label (PATTERN (insn), insn, 0);
3315 	  njumps++;
3316 	  if (probability != -1
3317 	      && any_condjump_p (insn)
3318 	      && !find_reg_note (insn, REG_BR_PROB, 0))
3319 	    {
3320 	      /* We can preserve the REG_BR_PROB notes only if exactly
3321 		 one jump is created, otherwise the machine description
3322 		 is responsible for this step using
3323 		 split_branch_probability variable.  */
3324 	      if (njumps != 1)
3325 		abort ();
3326 	      REG_NOTES (insn)
3327 		= gen_rtx_EXPR_LIST (REG_BR_PROB,
3328 				     GEN_INT (probability),
3329 				     REG_NOTES (insn));
3330 	    }
3331 	}
3332     }
3333 
3334   /* If we are splitting a CALL_INSN, look for the CALL_INSN
3335      in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it.  */
3336   if (GET_CODE (trial) == CALL_INSN)
3337     {
3338       for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3339 	if (GET_CODE (insn) == CALL_INSN)
3340 	  {
3341 	    CALL_INSN_FUNCTION_USAGE (insn)
3342 	      = CALL_INSN_FUNCTION_USAGE (trial);
3343 	    SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3344 	  }
3345     }
3346 
3347   /* Copy notes, particularly those related to the CFG.  */
3348   for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3349     {
3350       switch (REG_NOTE_KIND (note))
3351 	{
3352 	case REG_EH_REGION:
3353 	  insn = insn_last;
3354 	  while (insn != NULL_RTX)
3355 	    {
3356 	      if (GET_CODE (insn) == CALL_INSN
3357 		  || (flag_non_call_exceptions
3358 		      && may_trap_p (PATTERN (insn))))
3359 		REG_NOTES (insn)
3360 		  = gen_rtx_EXPR_LIST (REG_EH_REGION,
3361 				       XEXP (note, 0),
3362 				       REG_NOTES (insn));
3363 	      insn = PREV_INSN (insn);
3364 	    }
3365 	  break;
3366 
3367 	case REG_NORETURN:
3368 	case REG_SETJMP:
3369 	case REG_ALWAYS_RETURN:
3370 	  insn = insn_last;
3371 	  while (insn != NULL_RTX)
3372 	    {
3373 	      if (GET_CODE (insn) == CALL_INSN)
3374 		REG_NOTES (insn)
3375 		  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3376 				       XEXP (note, 0),
3377 				       REG_NOTES (insn));
3378 	      insn = PREV_INSN (insn);
3379 	    }
3380 	  break;
3381 
3382 	case REG_NON_LOCAL_GOTO:
3383 	  insn = insn_last;
3384 	  while (insn != NULL_RTX)
3385 	    {
3386 	      if (GET_CODE (insn) == JUMP_INSN)
3387 		REG_NOTES (insn)
3388 		  = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3389 				       XEXP (note, 0),
3390 				       REG_NOTES (insn));
3391 	      insn = PREV_INSN (insn);
3392 	    }
3393 	  break;
3394 
3395 	default:
3396 	  break;
3397 	}
3398     }
3399 
3400   /* If there are LABELS inside the split insns increment the
3401      usage count so we don't delete the label.  */
3402   if (GET_CODE (trial) == INSN)
3403     {
3404       insn = insn_last;
3405       while (insn != NULL_RTX)
3406 	{
3407 	  if (GET_CODE (insn) == INSN)
3408 	    mark_label_nuses (PATTERN (insn));
3409 
3410 	  insn = PREV_INSN (insn);
3411 	}
3412     }
3413 
3414   tem = emit_insn_after_scope (seq, trial, INSN_SCOPE (trial));
3415 
3416   delete_insn (trial);
3417   if (has_barrier)
3418     emit_barrier_after (tem);
3419 
3420   /* Recursively call try_split for each new insn created; by the
3421      time control returns here that insn will be fully split, so
3422      set LAST and continue from the insn after the one returned.
3423      We can't use next_active_insn here since AFTER may be a note.
3424      Ignore deleted insns, which can be occur if not optimizing.  */
3425   for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3426     if (! INSN_DELETED_P (tem) && INSN_P (tem))
3427       tem = try_split (PATTERN (tem), tem, 1);
3428 
3429   /* Return either the first or the last insn, depending on which was
3430      requested.  */
3431   return last
3432     ? (after ? PREV_INSN (after) : last_insn)
3433     : NEXT_INSN (before);
3434 }
3435 
3436 /* Make and return an INSN rtx, initializing all its slots.
3437    Store PATTERN in the pattern slots.  */
3438 
3439 rtx
make_insn_raw(pattern)3440 make_insn_raw (pattern)
3441      rtx pattern;
3442 {
3443   rtx insn;
3444 
3445   insn = rtx_alloc (INSN);
3446 
3447   INSN_UID (insn) = cur_insn_uid++;
3448   PATTERN (insn) = pattern;
3449   INSN_CODE (insn) = -1;
3450   LOG_LINKS (insn) = NULL;
3451   REG_NOTES (insn) = NULL;
3452   INSN_SCOPE (insn) = NULL;
3453   BLOCK_FOR_INSN (insn) = NULL;
3454 
3455 #ifdef ENABLE_RTL_CHECKING
3456   if (insn
3457       && INSN_P (insn)
3458       && (returnjump_p (insn)
3459 	  || (GET_CODE (insn) == SET
3460 	      && SET_DEST (insn) == pc_rtx)))
3461     {
3462       warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3463       debug_rtx (insn);
3464     }
3465 #endif
3466 
3467   return insn;
3468 }
3469 
3470 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn.  */
3471 
3472 static rtx
make_jump_insn_raw(pattern)3473 make_jump_insn_raw (pattern)
3474      rtx pattern;
3475 {
3476   rtx insn;
3477 
3478   insn = rtx_alloc (JUMP_INSN);
3479   INSN_UID (insn) = cur_insn_uid++;
3480 
3481   PATTERN (insn) = pattern;
3482   INSN_CODE (insn) = -1;
3483   LOG_LINKS (insn) = NULL;
3484   REG_NOTES (insn) = NULL;
3485   JUMP_LABEL (insn) = NULL;
3486   INSN_SCOPE (insn) = NULL;
3487   BLOCK_FOR_INSN (insn) = NULL;
3488 
3489   return insn;
3490 }
3491 
3492 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn.  */
3493 
3494 static rtx
make_call_insn_raw(pattern)3495 make_call_insn_raw (pattern)
3496      rtx pattern;
3497 {
3498   rtx insn;
3499 
3500   insn = rtx_alloc (CALL_INSN);
3501   INSN_UID (insn) = cur_insn_uid++;
3502 
3503   PATTERN (insn) = pattern;
3504   INSN_CODE (insn) = -1;
3505   LOG_LINKS (insn) = NULL;
3506   REG_NOTES (insn) = NULL;
3507   CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3508   INSN_SCOPE (insn) = NULL;
3509   BLOCK_FOR_INSN (insn) = NULL;
3510 
3511   return insn;
3512 }
3513 
3514 /* Add INSN to the end of the doubly-linked list.
3515    INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE.  */
3516 
3517 void
add_insn(insn)3518 add_insn (insn)
3519      rtx insn;
3520 {
3521   PREV_INSN (insn) = last_insn;
3522   NEXT_INSN (insn) = 0;
3523 
3524   if (NULL != last_insn)
3525     NEXT_INSN (last_insn) = insn;
3526 
3527   if (NULL == first_insn)
3528     first_insn = insn;
3529 
3530   last_insn = insn;
3531 }
3532 
3533 /* Add INSN into the doubly-linked list after insn AFTER.  This and
3534    the next should be the only functions called to insert an insn once
3535    delay slots have been filled since only they know how to update a
3536    SEQUENCE.  */
3537 
3538 void
add_insn_after(insn,after)3539 add_insn_after (insn, after)
3540      rtx insn, after;
3541 {
3542   rtx next = NEXT_INSN (after);
3543   basic_block bb;
3544 
3545   if (optimize && INSN_DELETED_P (after))
3546     abort ();
3547 
3548   NEXT_INSN (insn) = next;
3549   PREV_INSN (insn) = after;
3550 
3551   if (next)
3552     {
3553       PREV_INSN (next) = insn;
3554       if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3555 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3556     }
3557   else if (last_insn == after)
3558     last_insn = insn;
3559   else
3560     {
3561       struct sequence_stack *stack = seq_stack;
3562       /* Scan all pending sequences too.  */
3563       for (; stack; stack = stack->next)
3564 	if (after == stack->last)
3565 	  {
3566 	    stack->last = insn;
3567 	    break;
3568 	  }
3569 
3570       if (stack == 0)
3571 	abort ();
3572     }
3573 
3574   if (GET_CODE (after) != BARRIER
3575       && GET_CODE (insn) != BARRIER
3576       && (bb = BLOCK_FOR_INSN (after)))
3577     {
3578       set_block_for_insn (insn, bb);
3579       if (INSN_P (insn))
3580 	bb->flags |= BB_DIRTY;
3581       /* Should not happen as first in the BB is always
3582 	 either NOTE or LABEL.  */
3583       if (bb->end == after
3584 	  /* Avoid clobbering of structure when creating new BB.  */
3585 	  && GET_CODE (insn) != BARRIER
3586 	  && (GET_CODE (insn) != NOTE
3587 	      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3588 	bb->end = insn;
3589     }
3590 
3591   NEXT_INSN (after) = insn;
3592   if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3593     {
3594       rtx sequence = PATTERN (after);
3595       NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3596     }
3597 }
3598 
3599 /* Add INSN into the doubly-linked list before insn BEFORE.  This and
3600    the previous should be the only functions called to insert an insn once
3601    delay slots have been filled since only they know how to update a
3602    SEQUENCE.  */
3603 
3604 void
add_insn_before(insn,before)3605 add_insn_before (insn, before)
3606      rtx insn, before;
3607 {
3608   rtx prev = PREV_INSN (before);
3609   basic_block bb;
3610 
3611   if (optimize && INSN_DELETED_P (before))
3612     abort ();
3613 
3614   PREV_INSN (insn) = prev;
3615   NEXT_INSN (insn) = before;
3616 
3617   if (prev)
3618     {
3619       NEXT_INSN (prev) = insn;
3620       if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3621 	{
3622 	  rtx sequence = PATTERN (prev);
3623 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3624 	}
3625     }
3626   else if (first_insn == before)
3627     first_insn = insn;
3628   else
3629     {
3630       struct sequence_stack *stack = seq_stack;
3631       /* Scan all pending sequences too.  */
3632       for (; stack; stack = stack->next)
3633 	if (before == stack->first)
3634 	  {
3635 	    stack->first = insn;
3636 	    break;
3637 	  }
3638 
3639       if (stack == 0)
3640 	abort ();
3641     }
3642 
3643   if (GET_CODE (before) != BARRIER
3644       && GET_CODE (insn) != BARRIER
3645       && (bb = BLOCK_FOR_INSN (before)))
3646     {
3647       set_block_for_insn (insn, bb);
3648       if (INSN_P (insn))
3649 	bb->flags |= BB_DIRTY;
3650       /* Should not happen as first in the BB is always
3651 	 either NOTE or LABEl.  */
3652       if (bb->head == insn
3653 	  /* Avoid clobbering of structure when creating new BB.  */
3654 	  && GET_CODE (insn) != BARRIER
3655 	  && (GET_CODE (insn) != NOTE
3656 	      || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3657 	abort ();
3658     }
3659 
3660   PREV_INSN (before) = insn;
3661   if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3662     PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3663 }
3664 
3665 /* Remove an insn from its doubly-linked list.  This function knows how
3666    to handle sequences.  */
3667 void
remove_insn(insn)3668 remove_insn (insn)
3669      rtx insn;
3670 {
3671   rtx next = NEXT_INSN (insn);
3672   rtx prev = PREV_INSN (insn);
3673   basic_block bb;
3674 
3675   if (prev)
3676     {
3677       NEXT_INSN (prev) = next;
3678       if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3679 	{
3680 	  rtx sequence = PATTERN (prev);
3681 	  NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3682 	}
3683     }
3684   else if (first_insn == insn)
3685     first_insn = next;
3686   else
3687     {
3688       struct sequence_stack *stack = seq_stack;
3689       /* Scan all pending sequences too.  */
3690       for (; stack; stack = stack->next)
3691 	if (insn == stack->first)
3692 	  {
3693 	    stack->first = next;
3694 	    break;
3695 	  }
3696 
3697       if (stack == 0)
3698 	abort ();
3699     }
3700 
3701   if (next)
3702     {
3703       PREV_INSN (next) = prev;
3704       if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3705 	PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3706     }
3707   else if (last_insn == insn)
3708     last_insn = prev;
3709   else
3710     {
3711       struct sequence_stack *stack = seq_stack;
3712       /* Scan all pending sequences too.  */
3713       for (; stack; stack = stack->next)
3714 	if (insn == stack->last)
3715 	  {
3716 	    stack->last = prev;
3717 	    break;
3718 	  }
3719 
3720       if (stack == 0)
3721 	abort ();
3722     }
3723   if (GET_CODE (insn) != BARRIER
3724       && (bb = BLOCK_FOR_INSN (insn)))
3725     {
3726       if (INSN_P (insn))
3727 	bb->flags |= BB_DIRTY;
3728       if (bb->head == insn)
3729 	{
3730 	  /* Never ever delete the basic block note without deleting whole
3731 	     basic block.  */
3732 	  if (GET_CODE (insn) == NOTE)
3733 	    abort ();
3734 	  bb->head = next;
3735 	}
3736       if (bb->end == insn)
3737 	bb->end = prev;
3738     }
3739 }
3740 
3741 /* Delete all insns made since FROM.
3742    FROM becomes the new last instruction.  */
3743 
3744 void
delete_insns_since(from)3745 delete_insns_since (from)
3746      rtx from;
3747 {
3748   if (from == 0)
3749     first_insn = 0;
3750   else
3751     NEXT_INSN (from) = 0;
3752   last_insn = from;
3753 }
3754 
3755 /* This function is deprecated, please use sequences instead.
3756 
3757    Move a consecutive bunch of insns to a different place in the chain.
3758    The insns to be moved are those between FROM and TO.
3759    They are moved to a new position after the insn AFTER.
3760    AFTER must not be FROM or TO or any insn in between.
3761 
3762    This function does not know about SEQUENCEs and hence should not be
3763    called after delay-slot filling has been done.  */
3764 
3765 void
reorder_insns_nobb(from,to,after)3766 reorder_insns_nobb (from, to, after)
3767      rtx from, to, after;
3768 {
3769   /* Splice this bunch out of where it is now.  */
3770   if (PREV_INSN (from))
3771     NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3772   if (NEXT_INSN (to))
3773     PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3774   if (last_insn == to)
3775     last_insn = PREV_INSN (from);
3776   if (first_insn == from)
3777     first_insn = NEXT_INSN (to);
3778 
3779   /* Make the new neighbors point to it and it to them.  */
3780   if (NEXT_INSN (after))
3781     PREV_INSN (NEXT_INSN (after)) = to;
3782 
3783   NEXT_INSN (to) = NEXT_INSN (after);
3784   PREV_INSN (from) = after;
3785   NEXT_INSN (after) = from;
3786   if (after == last_insn)
3787     last_insn = to;
3788 }
3789 
3790 /* Same as function above, but take care to update BB boundaries.  */
3791 void
reorder_insns(from,to,after)3792 reorder_insns (from, to, after)
3793      rtx from, to, after;
3794 {
3795   rtx prev = PREV_INSN (from);
3796   basic_block bb, bb2;
3797 
3798   reorder_insns_nobb (from, to, after);
3799 
3800   if (GET_CODE (after) != BARRIER
3801       && (bb = BLOCK_FOR_INSN (after)))
3802     {
3803       rtx x;
3804       bb->flags |= BB_DIRTY;
3805 
3806       if (GET_CODE (from) != BARRIER
3807 	  && (bb2 = BLOCK_FOR_INSN (from)))
3808 	{
3809 	  if (bb2->end == to)
3810 	    bb2->end = prev;
3811 	  bb2->flags |= BB_DIRTY;
3812 	}
3813 
3814       if (bb->end == after)
3815 	bb->end = to;
3816 
3817       for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3818 	set_block_for_insn (x, bb);
3819     }
3820 }
3821 
3822 /* Return the line note insn preceding INSN.  */
3823 
3824 static rtx
find_line_note(insn)3825 find_line_note (insn)
3826      rtx insn;
3827 {
3828   if (no_line_numbers)
3829     return 0;
3830 
3831   for (; insn; insn = PREV_INSN (insn))
3832     if (GET_CODE (insn) == NOTE
3833 	&& NOTE_LINE_NUMBER (insn) >= 0)
3834       break;
3835 
3836   return insn;
3837 }
3838 
3839 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3840    of the moved insns when debugging.  This may insert a note between AFTER
3841    and FROM, and another one after TO.  */
3842 
3843 void
reorder_insns_with_line_notes(from,to,after)3844 reorder_insns_with_line_notes (from, to, after)
3845      rtx from, to, after;
3846 {
3847   rtx from_line = find_line_note (from);
3848   rtx after_line = find_line_note (after);
3849 
3850   reorder_insns (from, to, after);
3851 
3852   if (from_line == after_line)
3853     return;
3854 
3855   if (from_line)
3856     emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3857 			  NOTE_LINE_NUMBER (from_line),
3858 			  after);
3859   if (after_line)
3860     emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3861 			  NOTE_LINE_NUMBER (after_line),
3862 			  to);
3863 }
3864 
3865 /* Remove unnecessary notes from the instruction stream.  */
3866 
3867 void
remove_unnecessary_notes()3868 remove_unnecessary_notes ()
3869 {
3870   rtx block_stack = NULL_RTX;
3871   rtx eh_stack = NULL_RTX;
3872   rtx insn;
3873   rtx next;
3874   rtx tmp;
3875 
3876   /* We must not remove the first instruction in the function because
3877      the compiler depends on the first instruction being a note.  */
3878   for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3879     {
3880       /* Remember what's next.  */
3881       next = NEXT_INSN (insn);
3882 
3883       /* We're only interested in notes.  */
3884       if (GET_CODE (insn) != NOTE)
3885 	continue;
3886 
3887       switch (NOTE_LINE_NUMBER (insn))
3888 	{
3889 	case NOTE_INSN_DELETED:
3890 	case NOTE_INSN_LOOP_END_TOP_COND:
3891 	  remove_insn (insn);
3892 	  break;
3893 
3894 	case NOTE_INSN_EH_REGION_BEG:
3895 	  eh_stack = alloc_INSN_LIST (insn, eh_stack);
3896 	  break;
3897 
3898 	case NOTE_INSN_EH_REGION_END:
3899 	  /* Too many end notes.  */
3900 	  if (eh_stack == NULL_RTX)
3901 	    abort ();
3902 	  /* Mismatched nesting.  */
3903 	  if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3904 	    abort ();
3905 	  tmp = eh_stack;
3906 	  eh_stack = XEXP (eh_stack, 1);
3907 	  free_INSN_LIST_node (tmp);
3908 	  break;
3909 
3910 	case NOTE_INSN_BLOCK_BEG:
3911 	  /* By now, all notes indicating lexical blocks should have
3912 	     NOTE_BLOCK filled in.  */
3913 	  if (NOTE_BLOCK (insn) == NULL_TREE)
3914 	    abort ();
3915 	  block_stack = alloc_INSN_LIST (insn, block_stack);
3916 	  break;
3917 
3918 	case NOTE_INSN_BLOCK_END:
3919 	  /* Too many end notes.  */
3920 	  if (block_stack == NULL_RTX)
3921 	    abort ();
3922 	  /* Mismatched nesting.  */
3923 	  if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3924 	    abort ();
3925 	  tmp = block_stack;
3926 	  block_stack = XEXP (block_stack, 1);
3927 	  free_INSN_LIST_node (tmp);
3928 
3929 	  /* Scan back to see if there are any non-note instructions
3930 	     between INSN and the beginning of this block.  If not,
3931 	     then there is no PC range in the generated code that will
3932 	     actually be in this block, so there's no point in
3933 	     remembering the existence of the block.  */
3934 	  for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp))
3935 	    {
3936 	      /* This block contains a real instruction.  Note that we
3937 		 don't include labels; if the only thing in the block
3938 		 is a label, then there are still no PC values that
3939 		 lie within the block.  */
3940 	      if (INSN_P (tmp))
3941 		break;
3942 
3943 	      /* We're only interested in NOTEs.  */
3944 	      if (GET_CODE (tmp) != NOTE)
3945 		continue;
3946 
3947 	      if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3948 		{
3949 		  /* We just verified that this BLOCK matches us with
3950 		     the block_stack check above.  Never delete the
3951 		     BLOCK for the outermost scope of the function; we
3952 		     can refer to names from that scope even if the
3953 		     block notes are messed up.  */
3954 		  if (! is_body_block (NOTE_BLOCK (insn))
3955 		      && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3956 		    {
3957 		      remove_insn (tmp);
3958 		      remove_insn (insn);
3959 		    }
3960 		  break;
3961 		}
3962 	      else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3963 		/* There's a nested block.  We need to leave the
3964 		   current block in place since otherwise the debugger
3965 		   wouldn't be able to show symbols from our block in
3966 		   the nested block.  */
3967 		break;
3968 	    }
3969 	}
3970     }
3971 
3972   /* Too many begin notes.  */
3973   if (block_stack || eh_stack)
3974     abort ();
3975 }
3976 
3977 
3978 /* Emit insn(s) of given code and pattern
3979    at a specified place within the doubly-linked list.
3980 
3981    All of the emit_foo global entry points accept an object
3982    X which is either an insn list or a PATTERN of a single
3983    instruction.
3984 
3985    There are thus a few canonical ways to generate code and
3986    emit it at a specific place in the instruction stream.  For
3987    example, consider the instruction named SPOT and the fact that
3988    we would like to emit some instructions before SPOT.  We might
3989    do it like this:
3990 
3991 	start_sequence ();
3992 	... emit the new instructions ...
3993 	insns_head = get_insns ();
3994 	end_sequence ();
3995 
3996 	emit_insn_before (insns_head, SPOT);
3997 
3998    It used to be common to generate SEQUENCE rtl instead, but that
3999    is a relic of the past which no longer occurs.  The reason is that
4000    SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4001    generated would almost certainly die right after it was created.  */
4002 
4003 /* Make X be output before the instruction BEFORE.  */
4004 
4005 rtx
emit_insn_before(x,before)4006 emit_insn_before (x, before)
4007      rtx x, before;
4008 {
4009   rtx last = before;
4010   rtx insn;
4011 
4012 #ifdef ENABLE_RTL_CHECKING
4013   if (before == NULL_RTX)
4014     abort ();
4015 #endif
4016 
4017   if (x == NULL_RTX)
4018     return last;
4019 
4020   switch (GET_CODE (x))
4021     {
4022     case INSN:
4023     case JUMP_INSN:
4024     case CALL_INSN:
4025     case CODE_LABEL:
4026     case BARRIER:
4027     case NOTE:
4028       insn = x;
4029       while (insn)
4030 	{
4031 	  rtx next = NEXT_INSN (insn);
4032 	  add_insn_before (insn, before);
4033 	  last = insn;
4034 	  insn = next;
4035 	}
4036       break;
4037 
4038 #ifdef ENABLE_RTL_CHECKING
4039     case SEQUENCE:
4040       abort ();
4041       break;
4042 #endif
4043 
4044     default:
4045       last = make_insn_raw (x);
4046       add_insn_before (last, before);
4047       break;
4048     }
4049 
4050   return last;
4051 }
4052 
4053 /* Make an instruction with body X and code JUMP_INSN
4054    and output it before the instruction BEFORE.  */
4055 
4056 rtx
emit_jump_insn_before(x,before)4057 emit_jump_insn_before (x, before)
4058      rtx x, before;
4059 {
4060   rtx insn, last = NULL_RTX;
4061 
4062 #ifdef ENABLE_RTL_CHECKING
4063   if (before == NULL_RTX)
4064     abort ();
4065 #endif
4066 
4067   switch (GET_CODE (x))
4068     {
4069     case INSN:
4070     case JUMP_INSN:
4071     case CALL_INSN:
4072     case CODE_LABEL:
4073     case BARRIER:
4074     case NOTE:
4075       insn = x;
4076       while (insn)
4077 	{
4078 	  rtx next = NEXT_INSN (insn);
4079 	  add_insn_before (insn, before);
4080 	  last = insn;
4081 	  insn = next;
4082 	}
4083       break;
4084 
4085 #ifdef ENABLE_RTL_CHECKING
4086     case SEQUENCE:
4087       abort ();
4088       break;
4089 #endif
4090 
4091     default:
4092       last = make_jump_insn_raw (x);
4093       add_insn_before (last, before);
4094       break;
4095     }
4096 
4097   return last;
4098 }
4099 
4100 /* Make an instruction with body X and code CALL_INSN
4101    and output it before the instruction BEFORE.  */
4102 
4103 rtx
emit_call_insn_before(x,before)4104 emit_call_insn_before (x, before)
4105      rtx x, before;
4106 {
4107   rtx last = NULL_RTX, insn;
4108 
4109 #ifdef ENABLE_RTL_CHECKING
4110   if (before == NULL_RTX)
4111     abort ();
4112 #endif
4113 
4114   switch (GET_CODE (x))
4115     {
4116     case INSN:
4117     case JUMP_INSN:
4118     case CALL_INSN:
4119     case CODE_LABEL:
4120     case BARRIER:
4121     case NOTE:
4122       insn = x;
4123       while (insn)
4124 	{
4125 	  rtx next = NEXT_INSN (insn);
4126 	  add_insn_before (insn, before);
4127 	  last = insn;
4128 	  insn = next;
4129 	}
4130       break;
4131 
4132 #ifdef ENABLE_RTL_CHECKING
4133     case SEQUENCE:
4134       abort ();
4135       break;
4136 #endif
4137 
4138     default:
4139       last = make_call_insn_raw (x);
4140       add_insn_before (last, before);
4141       break;
4142     }
4143 
4144   return last;
4145 }
4146 
4147 /* Make an insn of code BARRIER
4148    and output it before the insn BEFORE.  */
4149 
4150 rtx
emit_barrier_before(before)4151 emit_barrier_before (before)
4152      rtx before;
4153 {
4154   rtx insn = rtx_alloc (BARRIER);
4155 
4156   INSN_UID (insn) = cur_insn_uid++;
4157 
4158   add_insn_before (insn, before);
4159   return insn;
4160 }
4161 
4162 /* Emit the label LABEL before the insn BEFORE.  */
4163 
4164 rtx
emit_label_before(label,before)4165 emit_label_before (label, before)
4166      rtx label, before;
4167 {
4168   /* This can be called twice for the same label as a result of the
4169      confusion that follows a syntax error!  So make it harmless.  */
4170   if (INSN_UID (label) == 0)
4171     {
4172       INSN_UID (label) = cur_insn_uid++;
4173       add_insn_before (label, before);
4174     }
4175 
4176   return label;
4177 }
4178 
4179 /* Emit a note of subtype SUBTYPE before the insn BEFORE.  */
4180 
4181 rtx
emit_note_before(subtype,before)4182 emit_note_before (subtype, before)
4183      int subtype;
4184      rtx before;
4185 {
4186   rtx note = rtx_alloc (NOTE);
4187   INSN_UID (note) = cur_insn_uid++;
4188   NOTE_SOURCE_FILE (note) = 0;
4189   NOTE_LINE_NUMBER (note) = subtype;
4190   BLOCK_FOR_INSN (note) = NULL;
4191 
4192   add_insn_before (note, before);
4193   return note;
4194 }
4195 
4196 /* Helper for emit_insn_after, handles lists of instructions
4197    efficiently.  */
4198 
4199 static rtx emit_insn_after_1 PARAMS ((rtx, rtx));
4200 
4201 static rtx
emit_insn_after_1(first,after)4202 emit_insn_after_1 (first, after)
4203      rtx first, after;
4204 {
4205   rtx last;
4206   rtx after_after;
4207   basic_block bb;
4208 
4209   if (GET_CODE (after) != BARRIER
4210       && (bb = BLOCK_FOR_INSN (after)))
4211     {
4212       bb->flags |= BB_DIRTY;
4213       for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4214 	if (GET_CODE (last) != BARRIER)
4215 	  set_block_for_insn (last, bb);
4216       if (GET_CODE (last) != BARRIER)
4217 	set_block_for_insn (last, bb);
4218       if (bb->end == after)
4219 	bb->end = last;
4220     }
4221   else
4222     for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4223       continue;
4224 
4225   after_after = NEXT_INSN (after);
4226 
4227   NEXT_INSN (after) = first;
4228   PREV_INSN (first) = after;
4229   NEXT_INSN (last) = after_after;
4230   if (after_after)
4231     PREV_INSN (after_after) = last;
4232 
4233   if (after == last_insn)
4234     last_insn = last;
4235   return last;
4236 }
4237 
4238 /* Make X be output after the insn AFTER.  */
4239 
4240 rtx
emit_insn_after(x,after)4241 emit_insn_after (x, after)
4242      rtx x, after;
4243 {
4244   rtx last = after;
4245 
4246 #ifdef ENABLE_RTL_CHECKING
4247   if (after == NULL_RTX)
4248     abort ();
4249 #endif
4250 
4251   if (x == NULL_RTX)
4252     return last;
4253 
4254   switch (GET_CODE (x))
4255     {
4256     case INSN:
4257     case JUMP_INSN:
4258     case CALL_INSN:
4259     case CODE_LABEL:
4260     case BARRIER:
4261     case NOTE:
4262       last = emit_insn_after_1 (x, after);
4263       break;
4264 
4265 #ifdef ENABLE_RTL_CHECKING
4266     case SEQUENCE:
4267       abort ();
4268       break;
4269 #endif
4270 
4271     default:
4272       last = make_insn_raw (x);
4273       add_insn_after (last, after);
4274       break;
4275     }
4276 
4277   return last;
4278 }
4279 
4280 /* Similar to emit_insn_after, except that line notes are to be inserted so
4281    as to act as if this insn were at FROM.  */
4282 
4283 void
emit_insn_after_with_line_notes(x,after,from)4284 emit_insn_after_with_line_notes (x, after, from)
4285      rtx x, after, from;
4286 {
4287   rtx from_line = find_line_note (from);
4288   rtx after_line = find_line_note (after);
4289   rtx insn = emit_insn_after (x, after);
4290 
4291   if (from_line)
4292     emit_line_note_after (NOTE_SOURCE_FILE (from_line),
4293 			  NOTE_LINE_NUMBER (from_line),
4294 			  after);
4295 
4296   if (after_line)
4297     emit_line_note_after (NOTE_SOURCE_FILE (after_line),
4298 			  NOTE_LINE_NUMBER (after_line),
4299 			  insn);
4300 }
4301 
4302 /* Make an insn of code JUMP_INSN with body X
4303    and output it after the insn AFTER.  */
4304 
4305 rtx
emit_jump_insn_after(x,after)4306 emit_jump_insn_after (x, after)
4307      rtx x, after;
4308 {
4309   rtx last;
4310 
4311 #ifdef ENABLE_RTL_CHECKING
4312   if (after == NULL_RTX)
4313     abort ();
4314 #endif
4315 
4316   switch (GET_CODE (x))
4317     {
4318     case INSN:
4319     case JUMP_INSN:
4320     case CALL_INSN:
4321     case CODE_LABEL:
4322     case BARRIER:
4323     case NOTE:
4324       last = emit_insn_after_1 (x, after);
4325       break;
4326 
4327 #ifdef ENABLE_RTL_CHECKING
4328     case SEQUENCE:
4329       abort ();
4330       break;
4331 #endif
4332 
4333     default:
4334       last = make_jump_insn_raw (x);
4335       add_insn_after (last, after);
4336       break;
4337     }
4338 
4339   return last;
4340 }
4341 
4342 /* Make an instruction with body X and code CALL_INSN
4343    and output it after the instruction AFTER.  */
4344 
4345 rtx
emit_call_insn_after(x,after)4346 emit_call_insn_after (x, after)
4347      rtx x, after;
4348 {
4349   rtx last;
4350 
4351 #ifdef ENABLE_RTL_CHECKING
4352   if (after == NULL_RTX)
4353     abort ();
4354 #endif
4355 
4356   switch (GET_CODE (x))
4357     {
4358     case INSN:
4359     case JUMP_INSN:
4360     case CALL_INSN:
4361     case CODE_LABEL:
4362     case BARRIER:
4363     case NOTE:
4364       last = emit_insn_after_1 (x, after);
4365       break;
4366 
4367 #ifdef ENABLE_RTL_CHECKING
4368     case SEQUENCE:
4369       abort ();
4370       break;
4371 #endif
4372 
4373     default:
4374       last = make_call_insn_raw (x);
4375       add_insn_after (last, after);
4376       break;
4377     }
4378 
4379   return last;
4380 }
4381 
4382 /* Make an insn of code BARRIER
4383    and output it after the insn AFTER.  */
4384 
4385 rtx
emit_barrier_after(after)4386 emit_barrier_after (after)
4387      rtx after;
4388 {
4389   rtx insn = rtx_alloc (BARRIER);
4390 
4391   INSN_UID (insn) = cur_insn_uid++;
4392 
4393   add_insn_after (insn, after);
4394   return insn;
4395 }
4396 
4397 /* Emit the label LABEL after the insn AFTER.  */
4398 
4399 rtx
emit_label_after(label,after)4400 emit_label_after (label, after)
4401      rtx label, after;
4402 {
4403   /* This can be called twice for the same label
4404      as a result of the confusion that follows a syntax error!
4405      So make it harmless.  */
4406   if (INSN_UID (label) == 0)
4407     {
4408       INSN_UID (label) = cur_insn_uid++;
4409       add_insn_after (label, after);
4410     }
4411 
4412   return label;
4413 }
4414 
4415 /* Emit a note of subtype SUBTYPE after the insn AFTER.  */
4416 
4417 rtx
emit_note_after(subtype,after)4418 emit_note_after (subtype, after)
4419      int subtype;
4420      rtx after;
4421 {
4422   rtx note = rtx_alloc (NOTE);
4423   INSN_UID (note) = cur_insn_uid++;
4424   NOTE_SOURCE_FILE (note) = 0;
4425   NOTE_LINE_NUMBER (note) = subtype;
4426   BLOCK_FOR_INSN (note) = NULL;
4427   add_insn_after (note, after);
4428   return note;
4429 }
4430 
4431 /* Emit a line note for FILE and LINE after the insn AFTER.  */
4432 
4433 rtx
emit_line_note_after(file,line,after)4434 emit_line_note_after (file, line, after)
4435      const char *file;
4436      int line;
4437      rtx after;
4438 {
4439   rtx note;
4440 
4441   if (no_line_numbers && line > 0)
4442     {
4443       cur_insn_uid++;
4444       return 0;
4445     }
4446 
4447   note = rtx_alloc (NOTE);
4448   INSN_UID (note) = cur_insn_uid++;
4449   NOTE_SOURCE_FILE (note) = file;
4450   NOTE_LINE_NUMBER (note) = line;
4451   BLOCK_FOR_INSN (note) = NULL;
4452   add_insn_after (note, after);
4453   return note;
4454 }
4455 
4456 /* Like emit_insn_after, but set INSN_SCOPE according to SCOPE.  */
4457 rtx
emit_insn_after_scope(pattern,after,scope)4458 emit_insn_after_scope (pattern, after, scope)
4459      rtx pattern, after;
4460      tree scope;
4461 {
4462   rtx last = emit_insn_after (pattern, after);
4463 
4464   after = NEXT_INSN (after);
4465   while (1)
4466     {
4467       if (active_insn_p (after))
4468 	INSN_SCOPE (after) = scope;
4469       if (after == last)
4470 	break;
4471       after = NEXT_INSN (after);
4472     }
4473   return last;
4474 }
4475 
4476 /* Like emit_jump_insn_after, but set INSN_SCOPE according to SCOPE.  */
4477 rtx
emit_jump_insn_after_scope(pattern,after,scope)4478 emit_jump_insn_after_scope (pattern, after, scope)
4479      rtx pattern, after;
4480      tree scope;
4481 {
4482   rtx last = emit_jump_insn_after (pattern, after);
4483 
4484   after = NEXT_INSN (after);
4485   while (1)
4486     {
4487       if (active_insn_p (after))
4488 	INSN_SCOPE (after) = scope;
4489       if (after == last)
4490 	break;
4491       after = NEXT_INSN (after);
4492     }
4493   return last;
4494 }
4495 
4496 /* Like emit_call_insn_after, but set INSN_SCOPE according to SCOPE.  */
4497 rtx
emit_call_insn_after_scope(pattern,after,scope)4498 emit_call_insn_after_scope (pattern, after, scope)
4499      rtx pattern, after;
4500      tree scope;
4501 {
4502   rtx last = emit_call_insn_after (pattern, after);
4503 
4504   after = NEXT_INSN (after);
4505   while (1)
4506     {
4507       if (active_insn_p (after))
4508 	INSN_SCOPE (after) = scope;
4509       if (after == last)
4510 	break;
4511       after = NEXT_INSN (after);
4512     }
4513   return last;
4514 }
4515 
4516 /* Like emit_insn_before, but set INSN_SCOPE according to SCOPE.  */
4517 rtx
emit_insn_before_scope(pattern,before,scope)4518 emit_insn_before_scope (pattern, before, scope)
4519      rtx pattern, before;
4520      tree scope;
4521 {
4522   rtx first = PREV_INSN (before);
4523   rtx last = emit_insn_before (pattern, before);
4524 
4525   first = NEXT_INSN (first);
4526   while (1)
4527     {
4528       if (active_insn_p (first))
4529 	INSN_SCOPE (first) = scope;
4530       if (first == last)
4531 	break;
4532       first = NEXT_INSN (first);
4533     }
4534   return last;
4535 }
4536 
4537 /* Take X and emit it at the end of the doubly-linked
4538    INSN list.
4539 
4540    Returns the last insn emitted.  */
4541 
4542 rtx
emit_insn(x)4543 emit_insn (x)
4544      rtx x;
4545 {
4546   rtx last = last_insn;
4547   rtx insn;
4548 
4549   if (x == NULL_RTX)
4550     return last;
4551 
4552   switch (GET_CODE (x))
4553     {
4554     case INSN:
4555     case JUMP_INSN:
4556     case CALL_INSN:
4557     case CODE_LABEL:
4558     case BARRIER:
4559     case NOTE:
4560       insn = x;
4561       while (insn)
4562 	{
4563 	  rtx next = NEXT_INSN (insn);
4564 	  add_insn (insn);
4565 	  last = insn;
4566 	  insn = next;
4567 	}
4568       break;
4569 
4570 #ifdef ENABLE_RTL_CHECKING
4571     case SEQUENCE:
4572       abort ();
4573       break;
4574 #endif
4575 
4576     default:
4577       last = make_insn_raw (x);
4578       add_insn (last);
4579       break;
4580     }
4581 
4582   return last;
4583 }
4584 
4585 /* Make an insn of code JUMP_INSN with pattern X
4586    and add it to the end of the doubly-linked list.  */
4587 
4588 rtx
emit_jump_insn(x)4589 emit_jump_insn (x)
4590      rtx x;
4591 {
4592   rtx last = NULL_RTX, insn;
4593 
4594   switch (GET_CODE (x))
4595     {
4596     case INSN:
4597     case JUMP_INSN:
4598     case CALL_INSN:
4599     case CODE_LABEL:
4600     case BARRIER:
4601     case NOTE:
4602       insn = x;
4603       while (insn)
4604 	{
4605 	  rtx next = NEXT_INSN (insn);
4606 	  add_insn (insn);
4607 	  last = insn;
4608 	  insn = next;
4609 	}
4610       break;
4611 
4612 #ifdef ENABLE_RTL_CHECKING
4613     case SEQUENCE:
4614       abort ();
4615       break;
4616 #endif
4617 
4618     default:
4619       last = make_jump_insn_raw (x);
4620       add_insn (last);
4621       break;
4622     }
4623 
4624   return last;
4625 }
4626 
4627 /* Make an insn of code CALL_INSN with pattern X
4628    and add it to the end of the doubly-linked list.  */
4629 
4630 rtx
emit_call_insn(x)4631 emit_call_insn (x)
4632      rtx x;
4633 {
4634   rtx insn;
4635 
4636   switch (GET_CODE (x))
4637     {
4638     case INSN:
4639     case JUMP_INSN:
4640     case CALL_INSN:
4641     case CODE_LABEL:
4642     case BARRIER:
4643     case NOTE:
4644       insn = emit_insn (x);
4645       break;
4646 
4647 #ifdef ENABLE_RTL_CHECKING
4648     case SEQUENCE:
4649       abort ();
4650       break;
4651 #endif
4652 
4653     default:
4654       insn = make_call_insn_raw (x);
4655       add_insn (insn);
4656       break;
4657     }
4658 
4659   return insn;
4660 }
4661 
4662 /* Add the label LABEL to the end of the doubly-linked list.  */
4663 
4664 rtx
emit_label(label)4665 emit_label (label)
4666      rtx label;
4667 {
4668   /* This can be called twice for the same label
4669      as a result of the confusion that follows a syntax error!
4670      So make it harmless.  */
4671   if (INSN_UID (label) == 0)
4672     {
4673       INSN_UID (label) = cur_insn_uid++;
4674       add_insn (label);
4675     }
4676   return label;
4677 }
4678 
4679 /* Make an insn of code BARRIER
4680    and add it to the end of the doubly-linked list.  */
4681 
4682 rtx
emit_barrier()4683 emit_barrier ()
4684 {
4685   rtx barrier = rtx_alloc (BARRIER);
4686   INSN_UID (barrier) = cur_insn_uid++;
4687   add_insn (barrier);
4688   return barrier;
4689 }
4690 
4691 /* Make an insn of code NOTE
4692    with data-fields specified by FILE and LINE
4693    and add it to the end of the doubly-linked list,
4694    but only if line-numbers are desired for debugging info.  */
4695 
4696 rtx
emit_line_note(file,line)4697 emit_line_note (file, line)
4698      const char *file;
4699      int line;
4700 {
4701   set_file_and_line_for_stmt (file, line);
4702 
4703 #if 0
4704   if (no_line_numbers)
4705     return 0;
4706 #endif
4707 
4708   return emit_note (file, line);
4709 }
4710 
4711 /* Make an insn of code NOTE
4712    with data-fields specified by FILE and LINE
4713    and add it to the end of the doubly-linked list.
4714    If it is a line-number NOTE, omit it if it matches the previous one.  */
4715 
4716 rtx
emit_note(file,line)4717 emit_note (file, line)
4718      const char *file;
4719      int line;
4720 {
4721   rtx note;
4722 
4723   if (line > 0)
4724     {
4725       if (file && last_filename && !strcmp (file, last_filename)
4726 	  && line == last_linenum)
4727 	return 0;
4728       last_filename = file;
4729       last_linenum = line;
4730     }
4731 
4732   if (no_line_numbers && line > 0)
4733     {
4734       cur_insn_uid++;
4735       return 0;
4736     }
4737 
4738   note = rtx_alloc (NOTE);
4739   INSN_UID (note) = cur_insn_uid++;
4740   NOTE_SOURCE_FILE (note) = file;
4741   NOTE_LINE_NUMBER (note) = line;
4742   BLOCK_FOR_INSN (note) = NULL;
4743   add_insn (note);
4744   return note;
4745 }
4746 
4747 /* Emit a NOTE, and don't omit it even if LINE is the previous note.  */
4748 
4749 rtx
emit_line_note_force(file,line)4750 emit_line_note_force (file, line)
4751      const char *file;
4752      int line;
4753 {
4754   last_linenum = -1;
4755   return emit_line_note (file, line);
4756 }
4757 
4758 /* Cause next statement to emit a line note even if the line number
4759    has not changed.  This is used at the beginning of a function.  */
4760 
4761 void
force_next_line_note()4762 force_next_line_note ()
4763 {
4764   last_linenum = -1;
4765 }
4766 
4767 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4768    note of this type already exists, remove it first.  */
4769 
4770 rtx
set_unique_reg_note(insn,kind,datum)4771 set_unique_reg_note (insn, kind, datum)
4772      rtx insn;
4773      enum reg_note kind;
4774      rtx datum;
4775 {
4776   rtx note = find_reg_note (insn, kind, NULL_RTX);
4777 
4778   switch (kind)
4779     {
4780     case REG_EQUAL:
4781     case REG_EQUIV:
4782       /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4783 	 has multiple sets (some callers assume single_set
4784 	 means the insn only has one set, when in fact it
4785 	 means the insn only has one * useful * set).  */
4786       if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4787 	{
4788 	  if (note)
4789 	    abort ();
4790 	  return NULL_RTX;
4791 	}
4792 
4793       /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4794 	 It serves no useful purpose and breaks eliminate_regs.  */
4795       if (GET_CODE (datum) == ASM_OPERANDS)
4796 	return NULL_RTX;
4797       break;
4798 
4799     default:
4800       break;
4801     }
4802 
4803   if (note)
4804     {
4805       XEXP (note, 0) = datum;
4806       return note;
4807     }
4808 
4809   REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4810   return REG_NOTES (insn);
4811 }
4812 
4813 /* Return an indication of which type of insn should have X as a body.
4814    The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN.  */
4815 
4816 enum rtx_code
classify_insn(x)4817 classify_insn (x)
4818      rtx x;
4819 {
4820   if (GET_CODE (x) == CODE_LABEL)
4821     return CODE_LABEL;
4822   if (GET_CODE (x) == CALL)
4823     return CALL_INSN;
4824   if (GET_CODE (x) == RETURN)
4825     return JUMP_INSN;
4826   if (GET_CODE (x) == SET)
4827     {
4828       if (SET_DEST (x) == pc_rtx)
4829 	return JUMP_INSN;
4830       else if (GET_CODE (SET_SRC (x)) == CALL)
4831 	return CALL_INSN;
4832       else
4833 	return INSN;
4834     }
4835   if (GET_CODE (x) == PARALLEL)
4836     {
4837       int j;
4838       for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4839 	if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4840 	  return CALL_INSN;
4841 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4842 		 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4843 	  return JUMP_INSN;
4844 	else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4845 		 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4846 	  return CALL_INSN;
4847     }
4848   return INSN;
4849 }
4850 
4851 /* Emit the rtl pattern X as an appropriate kind of insn.
4852    If X is a label, it is simply added into the insn chain.  */
4853 
4854 rtx
emit(x)4855 emit (x)
4856      rtx x;
4857 {
4858   enum rtx_code code = classify_insn (x);
4859 
4860   if (code == CODE_LABEL)
4861     return emit_label (x);
4862   else if (code == INSN)
4863     return emit_insn (x);
4864   else if (code == JUMP_INSN)
4865     {
4866       rtx insn = emit_jump_insn (x);
4867       if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4868 	return emit_barrier ();
4869       return insn;
4870     }
4871   else if (code == CALL_INSN)
4872     return emit_call_insn (x);
4873   else
4874     abort ();
4875 }
4876 
4877 /* Space for free sequence stack entries.  */
4878 static GTY ((deletable (""))) struct sequence_stack *free_sequence_stack;
4879 
4880 /* Begin emitting insns to a sequence which can be packaged in an
4881    RTL_EXPR.  If this sequence will contain something that might cause
4882    the compiler to pop arguments to function calls (because those
4883    pops have previously been deferred; see INHIBIT_DEFER_POP for more
4884    details), use do_pending_stack_adjust before calling this function.
4885    That will ensure that the deferred pops are not accidentally
4886    emitted in the middle of this sequence.  */
4887 
4888 void
start_sequence()4889 start_sequence ()
4890 {
4891   struct sequence_stack *tem;
4892 
4893   if (free_sequence_stack != NULL)
4894     {
4895       tem = free_sequence_stack;
4896       free_sequence_stack = tem->next;
4897     }
4898   else
4899     tem = (struct sequence_stack *) ggc_alloc (sizeof (struct sequence_stack));
4900 
4901   tem->next = seq_stack;
4902   tem->first = first_insn;
4903   tem->last = last_insn;
4904   tem->sequence_rtl_expr = seq_rtl_expr;
4905 
4906   seq_stack = tem;
4907 
4908   first_insn = 0;
4909   last_insn = 0;
4910 }
4911 
4912 /* Similarly, but indicate that this sequence will be placed in T, an
4913    RTL_EXPR.  See the documentation for start_sequence for more
4914    information about how to use this function.  */
4915 
4916 void
start_sequence_for_rtl_expr(t)4917 start_sequence_for_rtl_expr (t)
4918      tree t;
4919 {
4920   start_sequence ();
4921 
4922   seq_rtl_expr = t;
4923 }
4924 
4925 /* Set up the insn chain starting with FIRST as the current sequence,
4926    saving the previously current one.  See the documentation for
4927    start_sequence for more information about how to use this function.  */
4928 
4929 void
push_to_sequence(first)4930 push_to_sequence (first)
4931      rtx first;
4932 {
4933   rtx last;
4934 
4935   start_sequence ();
4936 
4937   for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4938 
4939   first_insn = first;
4940   last_insn = last;
4941 }
4942 
4943 /* Set up the insn chain from a chain stort in FIRST to LAST.  */
4944 
4945 void
push_to_full_sequence(first,last)4946 push_to_full_sequence (first, last)
4947      rtx first, last;
4948 {
4949   start_sequence ();
4950   first_insn = first;
4951   last_insn = last;
4952   /* We really should have the end of the insn chain here.  */
4953   if (last && NEXT_INSN (last))
4954     abort ();
4955 }
4956 
4957 /* Set up the outer-level insn chain
4958    as the current sequence, saving the previously current one.  */
4959 
4960 void
push_topmost_sequence()4961 push_topmost_sequence ()
4962 {
4963   struct sequence_stack *stack, *top = NULL;
4964 
4965   start_sequence ();
4966 
4967   for (stack = seq_stack; stack; stack = stack->next)
4968     top = stack;
4969 
4970   first_insn = top->first;
4971   last_insn = top->last;
4972   seq_rtl_expr = top->sequence_rtl_expr;
4973 }
4974 
4975 /* After emitting to the outer-level insn chain, update the outer-level
4976    insn chain, and restore the previous saved state.  */
4977 
4978 void
pop_topmost_sequence()4979 pop_topmost_sequence ()
4980 {
4981   struct sequence_stack *stack, *top = NULL;
4982 
4983   for (stack = seq_stack; stack; stack = stack->next)
4984     top = stack;
4985 
4986   top->first = first_insn;
4987   top->last = last_insn;
4988   /* ??? Why don't we save seq_rtl_expr here?  */
4989 
4990   end_sequence ();
4991 }
4992 
4993 /* After emitting to a sequence, restore previous saved state.
4994 
4995    To get the contents of the sequence just made, you must call
4996    `get_insns' *before* calling here.
4997 
4998    If the compiler might have deferred popping arguments while
4999    generating this sequence, and this sequence will not be immediately
5000    inserted into the instruction stream, use do_pending_stack_adjust
5001    before calling get_insns.  That will ensure that the deferred
5002    pops are inserted into this sequence, and not into some random
5003    location in the instruction stream.  See INHIBIT_DEFER_POP for more
5004    information about deferred popping of arguments.  */
5005 
5006 void
end_sequence()5007 end_sequence ()
5008 {
5009   struct sequence_stack *tem = seq_stack;
5010 
5011   first_insn = tem->first;
5012   last_insn = tem->last;
5013   seq_rtl_expr = tem->sequence_rtl_expr;
5014   seq_stack = tem->next;
5015 
5016   memset (tem, 0, sizeof (*tem));
5017   tem->next = free_sequence_stack;
5018   free_sequence_stack = tem;
5019 }
5020 
5021 /* This works like end_sequence, but records the old sequence in FIRST
5022    and LAST.  */
5023 
5024 void
end_full_sequence(first,last)5025 end_full_sequence (first, last)
5026      rtx *first, *last;
5027 {
5028   *first = first_insn;
5029   *last = last_insn;
5030   end_sequence ();
5031 }
5032 
5033 /* Return 1 if currently emitting into a sequence.  */
5034 
5035 int
in_sequence_p()5036 in_sequence_p ()
5037 {
5038   return seq_stack != 0;
5039 }
5040 
5041 /* Put the various virtual registers into REGNO_REG_RTX.  */
5042 
5043 void
init_virtual_regs(es)5044 init_virtual_regs (es)
5045      struct emit_status *es;
5046 {
5047   rtx *ptr = es->x_regno_reg_rtx;
5048   ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5049   ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5050   ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5051   ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5052   ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5053 }
5054 
5055 
5056 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once.  */
5057 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5058 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5059 static int copy_insn_n_scratches;
5060 
5061 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5062    copied an ASM_OPERANDS.
5063    In that case, it is the original input-operand vector.  */
5064 static rtvec orig_asm_operands_vector;
5065 
5066 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5067    copied an ASM_OPERANDS.
5068    In that case, it is the copied input-operand vector.  */
5069 static rtvec copy_asm_operands_vector;
5070 
5071 /* Likewise for the constraints vector.  */
5072 static rtvec orig_asm_constraints_vector;
5073 static rtvec copy_asm_constraints_vector;
5074 
5075 /* Recursively create a new copy of an rtx for copy_insn.
5076    This function differs from copy_rtx in that it handles SCRATCHes and
5077    ASM_OPERANDs properly.
5078    Normally, this function is not used directly; use copy_insn as front end.
5079    However, you could first copy an insn pattern with copy_insn and then use
5080    this function afterwards to properly copy any REG_NOTEs containing
5081    SCRATCHes.  */
5082 
5083 rtx
copy_insn_1(orig)5084 copy_insn_1 (orig)
5085      rtx orig;
5086 {
5087   rtx copy;
5088   int i, j;
5089   RTX_CODE code;
5090   const char *format_ptr;
5091 
5092   code = GET_CODE (orig);
5093 
5094   switch (code)
5095     {
5096     case REG:
5097     case QUEUED:
5098     case CONST_INT:
5099     case CONST_DOUBLE:
5100     case CONST_VECTOR:
5101     case SYMBOL_REF:
5102     case CODE_LABEL:
5103     case PC:
5104     case CC0:
5105     case ADDRESSOF:
5106       return orig;
5107 
5108     case SCRATCH:
5109       for (i = 0; i < copy_insn_n_scratches; i++)
5110 	if (copy_insn_scratch_in[i] == orig)
5111 	  return copy_insn_scratch_out[i];
5112       break;
5113 
5114     case CONST:
5115       /* CONST can be shared if it contains a SYMBOL_REF.  If it contains
5116 	 a LABEL_REF, it isn't sharable.  */
5117       if (GET_CODE (XEXP (orig, 0)) == PLUS
5118 	  && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
5119 	  && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
5120 	return orig;
5121       break;
5122 
5123       /* A MEM with a constant address is not sharable.  The problem is that
5124 	 the constant address may need to be reloaded.  If the mem is shared,
5125 	 then reloading one copy of this mem will cause all copies to appear
5126 	 to have been reloaded.  */
5127 
5128     default:
5129       break;
5130     }
5131 
5132   copy = rtx_alloc (code);
5133 
5134   /* Copy the various flags, and other information.  We assume that
5135      all fields need copying, and then clear the fields that should
5136      not be copied.  That is the sensible default behavior, and forces
5137      us to explicitly document why we are *not* copying a flag.  */
5138   memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
5139 
5140   /* We do not copy the USED flag, which is used as a mark bit during
5141      walks over the RTL.  */
5142   RTX_FLAG (copy, used) = 0;
5143 
5144   /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs.  */
5145   if (GET_RTX_CLASS (code) == 'i')
5146     {
5147       RTX_FLAG (copy, jump) = 0;
5148       RTX_FLAG (copy, call) = 0;
5149       RTX_FLAG (copy, frame_related) = 0;
5150     }
5151 
5152   format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5153 
5154   for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5155     {
5156       copy->fld[i] = orig->fld[i];
5157       switch (*format_ptr++)
5158 	{
5159 	case 'e':
5160 	  if (XEXP (orig, i) != NULL)
5161 	    XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5162 	  break;
5163 
5164 	case 'E':
5165 	case 'V':
5166 	  if (XVEC (orig, i) == orig_asm_constraints_vector)
5167 	    XVEC (copy, i) = copy_asm_constraints_vector;
5168 	  else if (XVEC (orig, i) == orig_asm_operands_vector)
5169 	    XVEC (copy, i) = copy_asm_operands_vector;
5170 	  else if (XVEC (orig, i) != NULL)
5171 	    {
5172 	      XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5173 	      for (j = 0; j < XVECLEN (copy, i); j++)
5174 		XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5175 	    }
5176 	  break;
5177 
5178 	case 't':
5179 	case 'w':
5180 	case 'i':
5181 	case 's':
5182 	case 'S':
5183 	case 'u':
5184 	case '0':
5185 	  /* These are left unchanged.  */
5186 	  break;
5187 
5188 	default:
5189 	  abort ();
5190 	}
5191     }
5192 
5193   if (code == SCRATCH)
5194     {
5195       i = copy_insn_n_scratches++;
5196       if (i >= MAX_RECOG_OPERANDS)
5197 	abort ();
5198       copy_insn_scratch_in[i] = orig;
5199       copy_insn_scratch_out[i] = copy;
5200     }
5201   else if (code == ASM_OPERANDS)
5202     {
5203       orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5204       copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5205       orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5206       copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5207     }
5208 
5209   return copy;
5210 }
5211 
5212 /* Create a new copy of an rtx.
5213    This function differs from copy_rtx in that it handles SCRATCHes and
5214    ASM_OPERANDs properly.
5215    INSN doesn't really have to be a full INSN; it could be just the
5216    pattern.  */
5217 rtx
copy_insn(insn)5218 copy_insn (insn)
5219      rtx insn;
5220 {
5221   copy_insn_n_scratches = 0;
5222   orig_asm_operands_vector = 0;
5223   orig_asm_constraints_vector = 0;
5224   copy_asm_operands_vector = 0;
5225   copy_asm_constraints_vector = 0;
5226   return copy_insn_1 (insn);
5227 }
5228 
5229 /* Initialize data structures and variables in this file
5230    before generating rtl for each function.  */
5231 
5232 void
init_emit()5233 init_emit ()
5234 {
5235   struct function *f = cfun;
5236 
5237   f->emit = (struct emit_status *) ggc_alloc (sizeof (struct emit_status));
5238   first_insn = NULL;
5239   last_insn = NULL;
5240   seq_rtl_expr = NULL;
5241   cur_insn_uid = 1;
5242   reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5243   last_linenum = 0;
5244   last_filename = 0;
5245   first_label_num = label_num;
5246   last_label_num = 0;
5247   seq_stack = NULL;
5248 
5249   /* Init the tables that describe all the pseudo regs.  */
5250 
5251   f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5252 
5253   f->emit->regno_pointer_align
5254     = (unsigned char *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5255 					   * sizeof (unsigned char));
5256 
5257   regno_reg_rtx
5258     = (rtx *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5259 				 * sizeof (rtx));
5260 
5261   f->emit->regno_decl
5262     = (tree *) ggc_alloc_cleared (f->emit->regno_pointer_align_length
5263 				  * sizeof (tree));
5264 
5265   /* Put copies of all the hard registers into regno_reg_rtx.  */
5266   memcpy (regno_reg_rtx,
5267 	  static_regno_reg_rtx,
5268 	  FIRST_PSEUDO_REGISTER * sizeof (rtx));
5269 
5270   /* Put copies of all the virtual register rtx into regno_reg_rtx.  */
5271   init_virtual_regs (f->emit);
5272 
5273   /* Indicate that the virtual registers and stack locations are
5274      all pointers.  */
5275   REG_POINTER (stack_pointer_rtx) = 1;
5276   REG_POINTER (frame_pointer_rtx) = 1;
5277   REG_POINTER (hard_frame_pointer_rtx) = 1;
5278   REG_POINTER (arg_pointer_rtx) = 1;
5279 
5280   REG_POINTER (virtual_incoming_args_rtx) = 1;
5281   REG_POINTER (virtual_stack_vars_rtx) = 1;
5282   REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5283   REG_POINTER (virtual_outgoing_args_rtx) = 1;
5284   REG_POINTER (virtual_cfa_rtx) = 1;
5285 
5286 #ifdef STACK_BOUNDARY
5287   REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5288   REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5289   REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5290   REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5291 
5292   REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5293   REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5294   REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5295   REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5296   REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5297 #endif
5298 
5299 #ifdef INIT_EXPANDERS
5300   INIT_EXPANDERS;
5301 #endif
5302 }
5303 
5304 /* Generate the constant 0.  */
5305 
5306 static rtx
gen_const_vector_0(mode)5307 gen_const_vector_0 (mode)
5308      enum machine_mode mode;
5309 {
5310   rtx tem;
5311   rtvec v;
5312   int units, i;
5313   enum machine_mode inner;
5314 
5315   units = GET_MODE_NUNITS (mode);
5316   inner = GET_MODE_INNER (mode);
5317 
5318   v = rtvec_alloc (units);
5319 
5320   /* We need to call this function after we to set CONST0_RTX first.  */
5321   if (!CONST0_RTX (inner))
5322     abort ();
5323 
5324   for (i = 0; i < units; ++i)
5325     RTVEC_ELT (v, i) = CONST0_RTX (inner);
5326 
5327   tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5328   return tem;
5329 }
5330 
5331 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5332    all elements are zero.  */
5333 rtx
gen_rtx_CONST_VECTOR(mode,v)5334 gen_rtx_CONST_VECTOR (mode, v)
5335      enum machine_mode mode;
5336      rtvec v;
5337 {
5338   rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode));
5339   int i;
5340 
5341   for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--)
5342     if (RTVEC_ELT (v, i) != inner_zero)
5343       return gen_rtx_raw_CONST_VECTOR (mode, v);
5344   return CONST0_RTX (mode);
5345 }
5346 
5347 /* Create some permanent unique rtl objects shared between all functions.
5348    LINE_NUMBERS is nonzero if line numbers are to be generated.  */
5349 
5350 void
init_emit_once(line_numbers)5351 init_emit_once (line_numbers)
5352      int line_numbers;
5353 {
5354   int i;
5355   enum machine_mode mode;
5356   enum machine_mode double_mode;
5357 
5358   /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5359      tables.  */
5360   const_int_htab = htab_create (37, const_int_htab_hash,
5361 				const_int_htab_eq, NULL);
5362 
5363   const_double_htab = htab_create (37, const_double_htab_hash,
5364 				   const_double_htab_eq, NULL);
5365 
5366   mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
5367 				mem_attrs_htab_eq, NULL);
5368 
5369   no_line_numbers = ! line_numbers;
5370 
5371   /* Compute the word and byte modes.  */
5372 
5373   byte_mode = VOIDmode;
5374   word_mode = VOIDmode;
5375   double_mode = VOIDmode;
5376 
5377   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5378        mode = GET_MODE_WIDER_MODE (mode))
5379     {
5380       if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5381 	  && byte_mode == VOIDmode)
5382 	byte_mode = mode;
5383 
5384       if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5385 	  && word_mode == VOIDmode)
5386 	word_mode = mode;
5387     }
5388 
5389   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5390        mode = GET_MODE_WIDER_MODE (mode))
5391     {
5392       if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5393 	  && double_mode == VOIDmode)
5394 	double_mode = mode;
5395     }
5396 
5397   ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5398 
5399   /* Assign register numbers to the globally defined register rtx.
5400      This must be done at runtime because the register number field
5401      is in a union and some compilers can't initialize unions.  */
5402 
5403   pc_rtx = gen_rtx (PC, VOIDmode);
5404   cc0_rtx = gen_rtx (CC0, VOIDmode);
5405   stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5406   frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5407   if (hard_frame_pointer_rtx == 0)
5408     hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5409 					  HARD_FRAME_POINTER_REGNUM);
5410   if (arg_pointer_rtx == 0)
5411     arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5412   virtual_incoming_args_rtx =
5413     gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5414   virtual_stack_vars_rtx =
5415     gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5416   virtual_stack_dynamic_rtx =
5417     gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5418   virtual_outgoing_args_rtx =
5419     gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5420   virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5421 
5422   /* Initialize RTL for commonly used hard registers.  These are
5423      copied into regno_reg_rtx as we begin to compile each function.  */
5424   for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5425     static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5426 
5427 #ifdef INIT_EXPANDERS
5428   /* This is to initialize {init|mark|free}_machine_status before the first
5429      call to push_function_context_to.  This is needed by the Chill front
5430      end which calls push_function_context_to before the first call to
5431      init_function_start.  */
5432   INIT_EXPANDERS;
5433 #endif
5434 
5435   /* Create the unique rtx's for certain rtx codes and operand values.  */
5436 
5437   /* Don't use gen_rtx here since gen_rtx in this case
5438      tries to use these variables.  */
5439   for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5440     const_int_rtx[i + MAX_SAVED_CONST_INT] =
5441       gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5442 
5443   if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5444       && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5445     const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5446   else
5447     const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5448 
5449   REAL_VALUE_FROM_INT (dconst0,   0,  0, double_mode);
5450   REAL_VALUE_FROM_INT (dconst1,   1,  0, double_mode);
5451   REAL_VALUE_FROM_INT (dconst2,   2,  0, double_mode);
5452   REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5453 
5454   for (i = 0; i <= 2; i++)
5455     {
5456       REAL_VALUE_TYPE *r =
5457 	(i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5458 
5459       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5460 	   mode = GET_MODE_WIDER_MODE (mode))
5461 	const_tiny_rtx[i][(int) mode] =
5462 	  CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5463 
5464       const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5465 
5466       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5467 	   mode = GET_MODE_WIDER_MODE (mode))
5468 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5469 
5470       for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5471 	   mode != VOIDmode;
5472 	   mode = GET_MODE_WIDER_MODE (mode))
5473 	const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5474     }
5475 
5476   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5477        mode != VOIDmode;
5478        mode = GET_MODE_WIDER_MODE (mode))
5479     const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5480 
5481   for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5482        mode != VOIDmode;
5483        mode = GET_MODE_WIDER_MODE (mode))
5484     const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5485 
5486   for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5487     if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5488       const_tiny_rtx[0][i] = const0_rtx;
5489 
5490   const_tiny_rtx[0][(int) BImode] = const0_rtx;
5491   if (STORE_FLAG_VALUE == 1)
5492     const_tiny_rtx[1][(int) BImode] = const1_rtx;
5493 
5494 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5495   return_address_pointer_rtx
5496     = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5497 #endif
5498 
5499 #ifdef STRUCT_VALUE
5500   struct_value_rtx = STRUCT_VALUE;
5501 #else
5502   struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5503 #endif
5504 
5505 #ifdef STRUCT_VALUE_INCOMING
5506   struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5507 #else
5508 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5509   struct_value_incoming_rtx
5510     = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5511 #else
5512   struct_value_incoming_rtx = struct_value_rtx;
5513 #endif
5514 #endif
5515 
5516 #ifdef STATIC_CHAIN_REGNUM
5517   static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5518 
5519 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5520   if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5521     static_chain_incoming_rtx
5522       = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5523   else
5524 #endif
5525     static_chain_incoming_rtx = static_chain_rtx;
5526 #endif
5527 
5528 #ifdef STATIC_CHAIN
5529   static_chain_rtx = STATIC_CHAIN;
5530 
5531 #ifdef STATIC_CHAIN_INCOMING
5532   static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5533 #else
5534   static_chain_incoming_rtx = static_chain_rtx;
5535 #endif
5536 #endif
5537 
5538   if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5539     pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5540 }
5541 
5542 /* Query and clear/ restore no_line_numbers.  This is used by the
5543    switch / case handling in stmt.c to give proper line numbers in
5544    warnings about unreachable code.  */
5545 
5546 int
force_line_numbers()5547 force_line_numbers ()
5548 {
5549   int old = no_line_numbers;
5550 
5551   no_line_numbers = 0;
5552   if (old)
5553     force_next_line_note ();
5554   return old;
5555 }
5556 
5557 void
restore_line_number_status(old_value)5558 restore_line_number_status (old_value)
5559      int old_value;
5560 {
5561   no_line_numbers = old_value;
5562 }
5563 
5564 /* Produce exact duplicate of insn INSN after AFTER.
5565    Care updating of libcall regions if present.  */
5566 
5567 rtx
emit_copy_of_insn_after(insn,after)5568 emit_copy_of_insn_after (insn, after)
5569      rtx insn, after;
5570 {
5571   rtx new;
5572   rtx note1, note2, link;
5573 
5574   switch (GET_CODE (insn))
5575     {
5576     case INSN:
5577       new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5578       break;
5579 
5580     case JUMP_INSN:
5581       new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5582       break;
5583 
5584     case CALL_INSN:
5585       new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5586       if (CALL_INSN_FUNCTION_USAGE (insn))
5587 	CALL_INSN_FUNCTION_USAGE (new)
5588 	  = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5589       SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5590       CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5591       break;
5592 
5593     default:
5594       abort ();
5595     }
5596 
5597   /* Update LABEL_NUSES.  */
5598   mark_jump_label (PATTERN (new), new, 0);
5599 
5600   INSN_SCOPE (new) = INSN_SCOPE (insn);
5601 
5602   /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5603      make them.  */
5604   for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5605     if (REG_NOTE_KIND (link) != REG_LABEL)
5606       {
5607 	if (GET_CODE (link) == EXPR_LIST)
5608 	  REG_NOTES (new)
5609 	    = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5610 					      XEXP (link, 0),
5611 					      REG_NOTES (new)));
5612 	else
5613 	  REG_NOTES (new)
5614 	    = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5615 					      XEXP (link, 0),
5616 					      REG_NOTES (new)));
5617       }
5618 
5619   /* Fix the libcall sequences.  */
5620   if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5621     {
5622       rtx p = new;
5623       while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5624 	p = PREV_INSN (p);
5625       XEXP (note1, 0) = p;
5626       XEXP (note2, 0) = new;
5627     }
5628   return new;
5629 }
5630 
5631 #include "gt-emit-rtl.h"
5632